mirror of
https://github.com/pacnpal/thrillwiki_django_no_react.git
synced 2026-01-01 23:47:04 -05:00
feat: Implement initial schema and add various API, service, and management command enhancements across the application.
This commit is contained in:
@@ -156,7 +156,6 @@ class ParkLocationAdmin(QueryOptimizationMixin, GISModelAdmin):
|
||||
"description": "OpenStreetMap identifiers for data synchronization.",
|
||||
},
|
||||
),
|
||||
|
||||
)
|
||||
|
||||
@admin.display(description="Park")
|
||||
@@ -358,9 +357,7 @@ class ParkAdmin(
|
||||
for park in queryset:
|
||||
# Statistics are auto-calculated, so just touch the record
|
||||
park.save(update_fields=["updated_at"])
|
||||
self.message_user(
|
||||
request, f"Successfully recalculated statistics for {queryset.count()} parks."
|
||||
)
|
||||
self.message_user(request, f"Successfully recalculated statistics for {queryset.count()} parks.")
|
||||
|
||||
def get_actions(self, request):
|
||||
"""Add custom actions to the admin."""
|
||||
@@ -482,9 +479,7 @@ class CompanyHeadquartersInline(admin.StackedInline):
|
||||
)
|
||||
|
||||
|
||||
class CompanyHeadquartersAdmin(
|
||||
QueryOptimizationMixin, TimestampFieldsMixin, BaseModelAdmin
|
||||
):
|
||||
class CompanyHeadquartersAdmin(QueryOptimizationMixin, TimestampFieldsMixin, BaseModelAdmin):
|
||||
"""
|
||||
Admin interface for standalone CompanyHeadquarters management.
|
||||
|
||||
@@ -661,7 +656,7 @@ class CompanyAdmin(
|
||||
color = colors.get(role, "#6c757d")
|
||||
badges.append(
|
||||
f'<span style="background-color: {color}; color: white; '
|
||||
f'padding: 2px 6px; border-radius: 3px; font-size: 10px; '
|
||||
f"padding: 2px 6px; border-radius: 3px; font-size: 10px; "
|
||||
f'margin-right: 4px;">{role}</span>'
|
||||
)
|
||||
return format_html("".join(badges))
|
||||
@@ -702,9 +697,7 @@ class CompanyAdmin(
|
||||
"""Refresh park count statistics for selected companies."""
|
||||
for company in queryset:
|
||||
company.save(update_fields=["updated_at"])
|
||||
self.message_user(
|
||||
request, f"Successfully updated counts for {queryset.count()} companies."
|
||||
)
|
||||
self.message_user(request, f"Successfully updated counts for {queryset.count()} companies.")
|
||||
|
||||
def get_actions(self, request):
|
||||
"""Add custom actions to the admin."""
|
||||
@@ -840,12 +833,8 @@ class ParkReviewAdmin(QueryOptimizationMixin, ExportActionMixin, BaseModelAdmin)
|
||||
"""Display moderation status with color coding."""
|
||||
if obj.moderated_by:
|
||||
if obj.is_published:
|
||||
return format_html(
|
||||
'<span style="color: green; font-weight: bold;">Approved</span>'
|
||||
)
|
||||
return format_html(
|
||||
'<span style="color: red; font-weight: bold;">Rejected</span>'
|
||||
)
|
||||
return format_html('<span style="color: green; font-weight: bold;">Approved</span>')
|
||||
return format_html('<span style="color: red; font-weight: bold;">Rejected</span>')
|
||||
return format_html('<span style="color: orange;">Pending</span>')
|
||||
|
||||
def save_model(self, request, obj, form, change):
|
||||
|
||||
@@ -22,9 +22,7 @@ class ParksConfig(AppConfig):
|
||||
from apps.parks.models import Park
|
||||
|
||||
# Register FSM transitions for Park
|
||||
apply_state_machine(
|
||||
Park, field_name="status", choice_group="statuses", domain="parks"
|
||||
)
|
||||
apply_state_machine(Park, field_name="status", choice_group="statuses", domain="parks")
|
||||
|
||||
def _register_callbacks(self):
|
||||
"""Register FSM transition callbacks for park models."""
|
||||
@@ -42,31 +40,16 @@ class ParksConfig(AppConfig):
|
||||
from apps.parks.models import Park
|
||||
|
||||
# Cache invalidation for all park status changes
|
||||
register_callback(
|
||||
Park, 'status', '*', '*',
|
||||
ParkCacheInvalidation()
|
||||
)
|
||||
register_callback(Park, "status", "*", "*", ParkCacheInvalidation())
|
||||
|
||||
# API cache invalidation
|
||||
register_callback(
|
||||
Park, 'status', '*', '*',
|
||||
APICacheInvalidation(include_geo_cache=True)
|
||||
)
|
||||
register_callback(Park, "status", "*", "*", APICacheInvalidation(include_geo_cache=True))
|
||||
|
||||
# Search text update
|
||||
register_callback(
|
||||
Park, 'status', '*', '*',
|
||||
SearchTextUpdateCallback()
|
||||
)
|
||||
register_callback(Park, "status", "*", "*", SearchTextUpdateCallback())
|
||||
|
||||
# Notification for significant status changes
|
||||
register_callback(
|
||||
Park, 'status', '*', 'CLOSED_PERM',
|
||||
StatusChangeNotification(notify_admins=True)
|
||||
)
|
||||
register_callback(
|
||||
Park, 'status', '*', 'DEMOLISHED',
|
||||
StatusChangeNotification(notify_admins=True)
|
||||
)
|
||||
register_callback(Park, "status", "*", "CLOSED_PERM", StatusChangeNotification(notify_admins=True))
|
||||
register_callback(Park, "status", "*", "DEMOLISHED", StatusChangeNotification(notify_admins=True))
|
||||
|
||||
logger.debug("Registered park transition callbacks")
|
||||
|
||||
@@ -15,101 +15,101 @@ PARK_STATUSES = [
|
||||
label="Operating",
|
||||
description="Park is currently open and operating normally",
|
||||
metadata={
|
||||
'color': 'green',
|
||||
'icon': 'check-circle',
|
||||
'css_class': 'bg-green-100 text-green-800',
|
||||
'sort_order': 1,
|
||||
'can_transition_to': [
|
||||
'CLOSED_TEMP',
|
||||
'CLOSED_PERM',
|
||||
"color": "green",
|
||||
"icon": "check-circle",
|
||||
"css_class": "bg-green-100 text-green-800",
|
||||
"sort_order": 1,
|
||||
"can_transition_to": [
|
||||
"CLOSED_TEMP",
|
||||
"CLOSED_PERM",
|
||||
],
|
||||
'requires_moderator': False,
|
||||
'is_final': False,
|
||||
'is_initial': True,
|
||||
"requires_moderator": False,
|
||||
"is_final": False,
|
||||
"is_initial": True,
|
||||
},
|
||||
category=ChoiceCategory.STATUS
|
||||
category=ChoiceCategory.STATUS,
|
||||
),
|
||||
RichChoice(
|
||||
value="CLOSED_TEMP",
|
||||
label="Temporarily Closed",
|
||||
description="Park is temporarily closed for maintenance, weather, or seasonal reasons",
|
||||
metadata={
|
||||
'color': 'yellow',
|
||||
'icon': 'pause-circle',
|
||||
'css_class': 'bg-yellow-100 text-yellow-800',
|
||||
'sort_order': 2,
|
||||
'can_transition_to': [
|
||||
'CLOSED_PERM',
|
||||
"color": "yellow",
|
||||
"icon": "pause-circle",
|
||||
"css_class": "bg-yellow-100 text-yellow-800",
|
||||
"sort_order": 2,
|
||||
"can_transition_to": [
|
||||
"CLOSED_PERM",
|
||||
],
|
||||
'requires_moderator': False,
|
||||
'is_final': False,
|
||||
"requires_moderator": False,
|
||||
"is_final": False,
|
||||
},
|
||||
category=ChoiceCategory.STATUS
|
||||
category=ChoiceCategory.STATUS,
|
||||
),
|
||||
RichChoice(
|
||||
value="CLOSED_PERM",
|
||||
label="Permanently Closed",
|
||||
description="Park has been permanently closed and will not reopen",
|
||||
metadata={
|
||||
'color': 'red',
|
||||
'icon': 'x-circle',
|
||||
'css_class': 'bg-red-100 text-red-800',
|
||||
'sort_order': 3,
|
||||
'can_transition_to': [
|
||||
'DEMOLISHED',
|
||||
'RELOCATED',
|
||||
"color": "red",
|
||||
"icon": "x-circle",
|
||||
"css_class": "bg-red-100 text-red-800",
|
||||
"sort_order": 3,
|
||||
"can_transition_to": [
|
||||
"DEMOLISHED",
|
||||
"RELOCATED",
|
||||
],
|
||||
'requires_moderator': True,
|
||||
'is_final': False,
|
||||
"requires_moderator": True,
|
||||
"is_final": False,
|
||||
},
|
||||
category=ChoiceCategory.STATUS
|
||||
category=ChoiceCategory.STATUS,
|
||||
),
|
||||
RichChoice(
|
||||
value="UNDER_CONSTRUCTION",
|
||||
label="Under Construction",
|
||||
description="Park is currently being built or undergoing major renovation",
|
||||
metadata={
|
||||
'color': 'blue',
|
||||
'icon': 'tool',
|
||||
'css_class': 'bg-blue-100 text-blue-800',
|
||||
'sort_order': 4,
|
||||
'can_transition_to': [
|
||||
'OPERATING',
|
||||
"color": "blue",
|
||||
"icon": "tool",
|
||||
"css_class": "bg-blue-100 text-blue-800",
|
||||
"sort_order": 4,
|
||||
"can_transition_to": [
|
||||
"OPERATING",
|
||||
],
|
||||
'requires_moderator': False,
|
||||
'is_final': False,
|
||||
"requires_moderator": False,
|
||||
"is_final": False,
|
||||
},
|
||||
category=ChoiceCategory.STATUS
|
||||
category=ChoiceCategory.STATUS,
|
||||
),
|
||||
RichChoice(
|
||||
value="DEMOLISHED",
|
||||
label="Demolished",
|
||||
description="Park has been completely demolished and removed",
|
||||
metadata={
|
||||
'color': 'gray',
|
||||
'icon': 'trash',
|
||||
'css_class': 'bg-gray-100 text-gray-800',
|
||||
'sort_order': 5,
|
||||
'can_transition_to': [],
|
||||
'requires_moderator': True,
|
||||
'is_final': True,
|
||||
"color": "gray",
|
||||
"icon": "trash",
|
||||
"css_class": "bg-gray-100 text-gray-800",
|
||||
"sort_order": 5,
|
||||
"can_transition_to": [],
|
||||
"requires_moderator": True,
|
||||
"is_final": True,
|
||||
},
|
||||
category=ChoiceCategory.STATUS
|
||||
category=ChoiceCategory.STATUS,
|
||||
),
|
||||
RichChoice(
|
||||
value="RELOCATED",
|
||||
label="Relocated",
|
||||
description="Park has been moved to a different location",
|
||||
metadata={
|
||||
'color': 'purple',
|
||||
'icon': 'arrow-right',
|
||||
'css_class': 'bg-purple-100 text-purple-800',
|
||||
'sort_order': 6,
|
||||
'can_transition_to': [],
|
||||
'requires_moderator': True,
|
||||
'is_final': True,
|
||||
"color": "purple",
|
||||
"icon": "arrow-right",
|
||||
"css_class": "bg-purple-100 text-purple-800",
|
||||
"sort_order": 6,
|
||||
"can_transition_to": [],
|
||||
"requires_moderator": True,
|
||||
"is_final": True,
|
||||
},
|
||||
category=ChoiceCategory.STATUS
|
||||
category=ChoiceCategory.STATUS,
|
||||
),
|
||||
]
|
||||
|
||||
@@ -119,133 +119,88 @@ PARK_TYPES = [
|
||||
value="THEME_PARK",
|
||||
label="Theme Park",
|
||||
description="Large-scale amusement park with themed areas and attractions",
|
||||
metadata={
|
||||
'color': 'red',
|
||||
'icon': 'castle',
|
||||
'css_class': 'bg-red-100 text-red-800',
|
||||
'sort_order': 1
|
||||
},
|
||||
category=ChoiceCategory.CLASSIFICATION
|
||||
metadata={"color": "red", "icon": "castle", "css_class": "bg-red-100 text-red-800", "sort_order": 1},
|
||||
category=ChoiceCategory.CLASSIFICATION,
|
||||
),
|
||||
RichChoice(
|
||||
value="AMUSEMENT_PARK",
|
||||
label="Amusement Park",
|
||||
description="Traditional amusement park with rides and games",
|
||||
metadata={
|
||||
'color': 'blue',
|
||||
'icon': 'ferris-wheel',
|
||||
'css_class': 'bg-blue-100 text-blue-800',
|
||||
'sort_order': 2
|
||||
},
|
||||
category=ChoiceCategory.CLASSIFICATION
|
||||
metadata={"color": "blue", "icon": "ferris-wheel", "css_class": "bg-blue-100 text-blue-800", "sort_order": 2},
|
||||
category=ChoiceCategory.CLASSIFICATION,
|
||||
),
|
||||
RichChoice(
|
||||
value="WATER_PARK",
|
||||
label="Water Park",
|
||||
description="Park featuring water-based attractions and activities",
|
||||
metadata={
|
||||
'color': 'cyan',
|
||||
'icon': 'water',
|
||||
'css_class': 'bg-cyan-100 text-cyan-800',
|
||||
'sort_order': 3
|
||||
},
|
||||
category=ChoiceCategory.CLASSIFICATION
|
||||
metadata={"color": "cyan", "icon": "water", "css_class": "bg-cyan-100 text-cyan-800", "sort_order": 3},
|
||||
category=ChoiceCategory.CLASSIFICATION,
|
||||
),
|
||||
RichChoice(
|
||||
value="FAMILY_ENTERTAINMENT_CENTER",
|
||||
label="Family Entertainment Center",
|
||||
description="Indoor entertainment facility with games and family attractions",
|
||||
metadata={
|
||||
'color': 'green',
|
||||
'icon': 'family',
|
||||
'css_class': 'bg-green-100 text-green-800',
|
||||
'sort_order': 4
|
||||
},
|
||||
category=ChoiceCategory.CLASSIFICATION
|
||||
metadata={"color": "green", "icon": "family", "css_class": "bg-green-100 text-green-800", "sort_order": 4},
|
||||
category=ChoiceCategory.CLASSIFICATION,
|
||||
),
|
||||
RichChoice(
|
||||
value="CARNIVAL",
|
||||
label="Carnival",
|
||||
description="Traveling amusement show with rides, games, and entertainment",
|
||||
metadata={
|
||||
'color': 'yellow',
|
||||
'icon': 'carnival',
|
||||
'css_class': 'bg-yellow-100 text-yellow-800',
|
||||
'sort_order': 5
|
||||
},
|
||||
category=ChoiceCategory.CLASSIFICATION
|
||||
metadata={"color": "yellow", "icon": "carnival", "css_class": "bg-yellow-100 text-yellow-800", "sort_order": 5},
|
||||
category=ChoiceCategory.CLASSIFICATION,
|
||||
),
|
||||
RichChoice(
|
||||
value="FAIR",
|
||||
label="Fair",
|
||||
description="Temporary event featuring rides, games, and agricultural exhibits",
|
||||
metadata={
|
||||
'color': 'orange',
|
||||
'icon': 'fair',
|
||||
'css_class': 'bg-orange-100 text-orange-800',
|
||||
'sort_order': 6
|
||||
},
|
||||
category=ChoiceCategory.CLASSIFICATION
|
||||
metadata={"color": "orange", "icon": "fair", "css_class": "bg-orange-100 text-orange-800", "sort_order": 6},
|
||||
category=ChoiceCategory.CLASSIFICATION,
|
||||
),
|
||||
RichChoice(
|
||||
value="PIER",
|
||||
label="Pier",
|
||||
description="Seaside entertainment pier with rides and attractions",
|
||||
metadata={
|
||||
'color': 'teal',
|
||||
'icon': 'pier',
|
||||
'css_class': 'bg-teal-100 text-teal-800',
|
||||
'sort_order': 7
|
||||
},
|
||||
category=ChoiceCategory.CLASSIFICATION
|
||||
metadata={"color": "teal", "icon": "pier", "css_class": "bg-teal-100 text-teal-800", "sort_order": 7},
|
||||
category=ChoiceCategory.CLASSIFICATION,
|
||||
),
|
||||
RichChoice(
|
||||
value="BOARDWALK",
|
||||
label="Boardwalk",
|
||||
description="Waterfront entertainment area with rides and attractions",
|
||||
metadata={
|
||||
'color': 'indigo',
|
||||
'icon': 'boardwalk',
|
||||
'css_class': 'bg-indigo-100 text-indigo-800',
|
||||
'sort_order': 8
|
||||
"color": "indigo",
|
||||
"icon": "boardwalk",
|
||||
"css_class": "bg-indigo-100 text-indigo-800",
|
||||
"sort_order": 8,
|
||||
},
|
||||
category=ChoiceCategory.CLASSIFICATION
|
||||
category=ChoiceCategory.CLASSIFICATION,
|
||||
),
|
||||
RichChoice(
|
||||
value="SAFARI_PARK",
|
||||
label="Safari Park",
|
||||
description="Wildlife park with drive-through animal experiences",
|
||||
metadata={
|
||||
'color': 'emerald',
|
||||
'icon': 'safari',
|
||||
'css_class': 'bg-emerald-100 text-emerald-800',
|
||||
'sort_order': 9
|
||||
"color": "emerald",
|
||||
"icon": "safari",
|
||||
"css_class": "bg-emerald-100 text-emerald-800",
|
||||
"sort_order": 9,
|
||||
},
|
||||
category=ChoiceCategory.CLASSIFICATION
|
||||
category=ChoiceCategory.CLASSIFICATION,
|
||||
),
|
||||
RichChoice(
|
||||
value="ZOO",
|
||||
label="Zoo",
|
||||
description="Zoological park with animal exhibits and educational programs",
|
||||
metadata={
|
||||
'color': 'lime',
|
||||
'icon': 'zoo',
|
||||
'css_class': 'bg-lime-100 text-lime-800',
|
||||
'sort_order': 10
|
||||
},
|
||||
category=ChoiceCategory.CLASSIFICATION
|
||||
metadata={"color": "lime", "icon": "zoo", "css_class": "bg-lime-100 text-lime-800", "sort_order": 10},
|
||||
category=ChoiceCategory.CLASSIFICATION,
|
||||
),
|
||||
RichChoice(
|
||||
value="OTHER",
|
||||
label="Other",
|
||||
description="Park type that doesn't fit into standard categories",
|
||||
metadata={
|
||||
'color': 'gray',
|
||||
'icon': 'other',
|
||||
'css_class': 'bg-gray-100 text-gray-800',
|
||||
'sort_order': 11
|
||||
},
|
||||
category=ChoiceCategory.CLASSIFICATION
|
||||
metadata={"color": "gray", "icon": "other", "css_class": "bg-gray-100 text-gray-800", "sort_order": 11},
|
||||
category=ChoiceCategory.CLASSIFICATION,
|
||||
),
|
||||
]
|
||||
|
||||
@@ -256,30 +211,30 @@ PARKS_COMPANY_ROLES = [
|
||||
label="Park Operator",
|
||||
description="Company that operates and manages theme parks and amusement facilities",
|
||||
metadata={
|
||||
'color': 'blue',
|
||||
'icon': 'building-office',
|
||||
'css_class': 'bg-blue-100 text-blue-800',
|
||||
'sort_order': 1,
|
||||
'domain': 'parks',
|
||||
'permissions': ['manage_parks', 'view_operations'],
|
||||
'url_pattern': '/parks/operators/{slug}/'
|
||||
"color": "blue",
|
||||
"icon": "building-office",
|
||||
"css_class": "bg-blue-100 text-blue-800",
|
||||
"sort_order": 1,
|
||||
"domain": "parks",
|
||||
"permissions": ["manage_parks", "view_operations"],
|
||||
"url_pattern": "/parks/operators/{slug}/",
|
||||
},
|
||||
category=ChoiceCategory.CLASSIFICATION
|
||||
category=ChoiceCategory.CLASSIFICATION,
|
||||
),
|
||||
RichChoice(
|
||||
value="PROPERTY_OWNER",
|
||||
label="Property Owner",
|
||||
description="Company that owns the land and property where parks are located",
|
||||
metadata={
|
||||
'color': 'green',
|
||||
'icon': 'home',
|
||||
'css_class': 'bg-green-100 text-green-800',
|
||||
'sort_order': 2,
|
||||
'domain': 'parks',
|
||||
'permissions': ['manage_property', 'view_ownership'],
|
||||
'url_pattern': '/parks/owners/{slug}/'
|
||||
"color": "green",
|
||||
"icon": "home",
|
||||
"css_class": "bg-green-100 text-green-800",
|
||||
"sort_order": 2,
|
||||
"domain": "parks",
|
||||
"permissions": ["manage_property", "view_ownership"],
|
||||
"url_pattern": "/parks/owners/{slug}/",
|
||||
},
|
||||
category=ChoiceCategory.CLASSIFICATION
|
||||
category=ChoiceCategory.CLASSIFICATION,
|
||||
),
|
||||
]
|
||||
|
||||
@@ -292,7 +247,7 @@ def register_parks_choices():
|
||||
choices=PARK_STATUSES,
|
||||
domain="parks",
|
||||
description="Park operational status options",
|
||||
metadata={'domain': 'parks', 'type': 'status'}
|
||||
metadata={"domain": "parks", "type": "status"},
|
||||
)
|
||||
|
||||
register_choices(
|
||||
@@ -300,7 +255,7 @@ def register_parks_choices():
|
||||
choices=PARK_TYPES,
|
||||
domain="parks",
|
||||
description="Park type and category classifications",
|
||||
metadata={'domain': 'parks', 'type': 'park_type'}
|
||||
metadata={"domain": "parks", "type": "park_type"},
|
||||
)
|
||||
|
||||
register_choices(
|
||||
@@ -308,7 +263,7 @@ def register_parks_choices():
|
||||
choices=PARKS_COMPANY_ROLES,
|
||||
domain="parks",
|
||||
description="Company role classifications for parks domain (OPERATOR and PROPERTY_OWNER only)",
|
||||
metadata={'domain': 'parks', 'type': 'company_role'}
|
||||
metadata={"domain": "parks", "type": "company_role"},
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -29,7 +29,7 @@ def validate_positive_integer(value):
|
||||
raise ValidationError(_("Value must be a positive integer"))
|
||||
return int(value)
|
||||
except (TypeError, ValueError):
|
||||
raise ValidationError(_("Invalid number format"))
|
||||
raise ValidationError(_("Invalid number format")) from None
|
||||
|
||||
|
||||
class ParkFilter(FilterSet):
|
||||
@@ -341,9 +341,7 @@ class ParkFilter(FilterSet):
|
||||
if value:
|
||||
return queryset.filter(coaster_count__gt=0)
|
||||
else:
|
||||
return queryset.filter(
|
||||
models.Q(coaster_count__isnull=True) | models.Q(coaster_count=0)
|
||||
)
|
||||
return queryset.filter(models.Q(coaster_count__isnull=True) | models.Q(coaster_count=0))
|
||||
|
||||
def filter_min_rating(self, queryset, name, value):
|
||||
"""Filter parks by minimum rating"""
|
||||
|
||||
@@ -256,9 +256,7 @@ class ParkForm(forms.ModelForm):
|
||||
|
||||
# Validate range
|
||||
if latitude < -90 or latitude > 90:
|
||||
raise forms.ValidationError(
|
||||
"Latitude must be between -90 and 90 degrees."
|
||||
)
|
||||
raise forms.ValidationError("Latitude must be between -90 and 90 degrees.")
|
||||
|
||||
# Convert to string to preserve exact decimal places
|
||||
return str(latitude)
|
||||
@@ -277,9 +275,7 @@ class ParkForm(forms.ModelForm):
|
||||
|
||||
# Validate range
|
||||
if longitude < -180 or longitude > 180:
|
||||
raise forms.ValidationError(
|
||||
"Longitude must be between -180 and 180 degrees."
|
||||
)
|
||||
raise forms.ValidationError("Longitude must be between -180 and 180 degrees.")
|
||||
|
||||
# Convert to string to preserve exact decimal places
|
||||
return str(longitude)
|
||||
@@ -314,7 +310,7 @@ class ParkForm(forms.ModelForm):
|
||||
setattr(park_location, key, value)
|
||||
|
||||
# Handle coordinates if provided
|
||||
if "latitude" in location_data and "longitude" in location_data:
|
||||
if "latitude" in location_data and "longitude" in location_data: # noqa: SIM102
|
||||
if location_data["latitude"] and location_data["longitude"]:
|
||||
park_location.set_coordinates(
|
||||
float(location_data["latitude"]),
|
||||
@@ -324,7 +320,7 @@ class ParkForm(forms.ModelForm):
|
||||
except ParkLocation.DoesNotExist:
|
||||
# Create new ParkLocation
|
||||
coordinates_data = {}
|
||||
if "latitude" in location_data and "longitude" in location_data:
|
||||
if "latitude" in location_data and "longitude" in location_data: # noqa: SIM102
|
||||
if location_data["latitude"] and location_data["longitude"]:
|
||||
coordinates_data = {
|
||||
"latitude": float(location_data["latitude"]),
|
||||
@@ -332,19 +328,13 @@ class ParkForm(forms.ModelForm):
|
||||
}
|
||||
|
||||
# Remove coordinate fields from location_data for creation
|
||||
creation_data = {
|
||||
k: v
|
||||
for k, v in location_data.items()
|
||||
if k not in ["latitude", "longitude"]
|
||||
}
|
||||
creation_data = {k: v for k, v in location_data.items() if k not in ["latitude", "longitude"]}
|
||||
creation_data.setdefault("country", "USA")
|
||||
|
||||
park_location = ParkLocation.objects.create(park=park, **creation_data)
|
||||
|
||||
if coordinates_data:
|
||||
park_location.set_coordinates(
|
||||
coordinates_data["latitude"], coordinates_data["longitude"]
|
||||
)
|
||||
park_location.set_coordinates(coordinates_data["latitude"], coordinates_data["longitude"])
|
||||
park_location.save()
|
||||
|
||||
if commit:
|
||||
|
||||
@@ -27,9 +27,7 @@ class Command(BaseCommand):
|
||||
self.create_park_areas()
|
||||
self.create_reviews()
|
||||
|
||||
self.stdout.write(
|
||||
self.style.SUCCESS("Successfully created comprehensive sample data!")
|
||||
)
|
||||
self.stdout.write(self.style.SUCCESS("Successfully created comprehensive sample data!"))
|
||||
self.print_summary()
|
||||
|
||||
except Exception as e:
|
||||
@@ -101,13 +99,9 @@ class Command(BaseCommand):
|
||||
]
|
||||
|
||||
for company_data in park_operators_data:
|
||||
company, created = ParkCompany.objects.get_or_create(
|
||||
slug=company_data["slug"], defaults=company_data
|
||||
)
|
||||
company, created = ParkCompany.objects.get_or_create(slug=company_data["slug"], defaults=company_data)
|
||||
self.created_companies[company.slug] = company
|
||||
self.stdout.write(
|
||||
f" {'Created' if created else 'Found'} park company: {company.name}"
|
||||
)
|
||||
self.stdout.write(f" {'Created' if created else 'Found'} park company: {company.name}")
|
||||
|
||||
# Ride manufacturers and designers (using rides.models.Company)
|
||||
ride_companies_data = [
|
||||
@@ -194,13 +188,9 @@ class Command(BaseCommand):
|
||||
]
|
||||
|
||||
for company_data in ride_companies_data:
|
||||
company, created = RideCompany.objects.get_or_create(
|
||||
slug=company_data["slug"], defaults=company_data
|
||||
)
|
||||
company, created = RideCompany.objects.get_or_create(slug=company_data["slug"], defaults=company_data)
|
||||
self.created_companies[company.slug] = company
|
||||
self.stdout.write(
|
||||
f" {'Created' if created else 'Found'} ride company: {company.name}"
|
||||
)
|
||||
self.stdout.write(f" {'Created' if created else 'Found'} ride company: {company.name}")
|
||||
|
||||
def create_parks(self):
|
||||
"""Create parks with proper operator relationships."""
|
||||
|
||||
@@ -31,6 +31,4 @@ class Command(BaseCommand):
|
||||
"""
|
||||
)
|
||||
|
||||
self.stdout.write(
|
||||
self.style.SUCCESS("Successfully fixed migration history")
|
||||
)
|
||||
self.stdout.write(self.style.SUCCESS("Successfully fixed migration history"))
|
||||
|
||||
@@ -50,13 +50,9 @@ class Command(BaseCommand):
|
||||
|
||||
companies = {}
|
||||
for company_data in companies_data:
|
||||
operator, created = Operator.objects.get_or_create(
|
||||
name=company_data["name"], defaults=company_data
|
||||
)
|
||||
operator, created = Operator.objects.get_or_create(name=company_data["name"], defaults=company_data)
|
||||
companies[operator.name] = operator
|
||||
self.stdout.write(
|
||||
f"{'Created' if created else 'Found'} company: {operator.name}"
|
||||
)
|
||||
self.stdout.write(f"{'Created' if created else 'Found'} company: {operator.name}")
|
||||
|
||||
# Create parks with their locations
|
||||
parks_data = [
|
||||
@@ -317,9 +313,7 @@ class Command(BaseCommand):
|
||||
postal_code=loc_data["postal_code"],
|
||||
)
|
||||
# Set coordinates using the helper method
|
||||
park_location.set_coordinates(
|
||||
loc_data["latitude"], loc_data["longitude"]
|
||||
)
|
||||
park_location.set_coordinates(loc_data["latitude"], loc_data["longitude"])
|
||||
park_location.save()
|
||||
|
||||
# Create areas for park
|
||||
@@ -329,8 +323,6 @@ class Command(BaseCommand):
|
||||
park=park,
|
||||
defaults={"description": area_data["description"]},
|
||||
)
|
||||
self.stdout.write(
|
||||
f"{'Created' if created else 'Found'} area: {area.name} in {park.name}"
|
||||
)
|
||||
self.stdout.write(f"{'Created' if created else 'Found'} area: {area.name} in {park.name}")
|
||||
|
||||
self.stdout.write(self.style.SUCCESS("Successfully seeded initial park data"))
|
||||
|
||||
@@ -43,19 +43,13 @@ class Command(BaseCommand):
|
||||
# Log what will be deleted
|
||||
self.stdout.write(f" Found {park_review_count} park reviews to delete")
|
||||
self.stdout.write(f" Found {ride_review_count} ride reviews to delete")
|
||||
self.stdout.write(
|
||||
f" Found {rollercoaster_stats_count} roller coaster stats to delete"
|
||||
)
|
||||
self.stdout.write(f" Found {rollercoaster_stats_count} roller coaster stats to delete")
|
||||
self.stdout.write(f" Found {ride_count} rides to delete")
|
||||
self.stdout.write(f" Found {ride_model_count} ride models to delete")
|
||||
self.stdout.write(f" Found {park_area_count} park areas to delete")
|
||||
self.stdout.write(
|
||||
f" Found {park_location_count} park locations to delete"
|
||||
)
|
||||
self.stdout.write(f" Found {park_location_count} park locations to delete")
|
||||
self.stdout.write(f" Found {park_count} parks to delete")
|
||||
self.stdout.write(
|
||||
f" Found {ride_company_count} ride companies to delete"
|
||||
)
|
||||
self.stdout.write(f" Found {ride_company_count} ride companies to delete")
|
||||
self.stdout.write(f" Found {company_count} park companies to delete")
|
||||
self.stdout.write(f" Found {test_user_count} test users to delete")
|
||||
|
||||
@@ -72,9 +66,7 @@ class Command(BaseCommand):
|
||||
# Roller coaster stats (references Ride)
|
||||
if rollercoaster_stats_count > 0:
|
||||
RollerCoasterStats.objects.all().delete()
|
||||
self.stdout.write(
|
||||
f" Deleted {rollercoaster_stats_count} roller coaster stats"
|
||||
)
|
||||
self.stdout.write(f" Deleted {rollercoaster_stats_count} roller coaster stats")
|
||||
|
||||
# Rides (references Park, RideCompany, RideModel)
|
||||
if ride_count > 0:
|
||||
@@ -116,18 +108,14 @@ class Command(BaseCommand):
|
||||
User.objects.filter(username="testuser").delete()
|
||||
self.stdout.write(f" Deleted {test_user_count} test users")
|
||||
|
||||
self.stdout.write(
|
||||
self.style.SUCCESS("Successfully cleaned up existing sample data!")
|
||||
)
|
||||
self.stdout.write(self.style.SUCCESS("Successfully cleaned up existing sample data!"))
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(
|
||||
f"Error during data cleanup: {str(e)}",
|
||||
exc_info=True,
|
||||
)
|
||||
self.stdout.write(
|
||||
self.style.ERROR(f"Failed to clean up existing data: {str(e)}")
|
||||
)
|
||||
self.stdout.write(self.style.ERROR(f"Failed to clean up existing data: {str(e)}"))
|
||||
raise
|
||||
|
||||
def handle(self, *args, **options):
|
||||
@@ -137,9 +125,7 @@ class Command(BaseCommand):
|
||||
# Check if required tables exist
|
||||
if not self.check_required_tables():
|
||||
self.stdout.write(
|
||||
self.style.ERROR(
|
||||
"Required database tables are missing. Please run migrations first."
|
||||
)
|
||||
self.style.ERROR("Required database tables are missing. Please run migrations first.")
|
||||
)
|
||||
return
|
||||
|
||||
@@ -163,17 +149,11 @@ class Command(BaseCommand):
|
||||
# Add sample reviews for testing
|
||||
self.create_reviews()
|
||||
|
||||
self.stdout.write(
|
||||
self.style.SUCCESS("Successfully created comprehensive sample data!")
|
||||
)
|
||||
self.stdout.write(self.style.SUCCESS("Successfully created comprehensive sample data!"))
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(
|
||||
f"Error during sample data creation: {str(e)}", exc_info=True
|
||||
)
|
||||
self.stdout.write(
|
||||
self.style.ERROR(f"Failed to create sample data: {str(e)}")
|
||||
)
|
||||
self.logger.error(f"Error during sample data creation: {str(e)}", exc_info=True)
|
||||
self.stdout.write(self.style.ERROR(f"Failed to create sample data: {str(e)}"))
|
||||
raise
|
||||
|
||||
def check_required_tables(self):
|
||||
@@ -202,11 +182,7 @@ class Command(BaseCommand):
|
||||
missing_tables.append(model._meta.label)
|
||||
|
||||
if missing_tables:
|
||||
self.stdout.write(
|
||||
self.style.WARNING(
|
||||
f"Missing tables for models: {', '.join(missing_tables)}"
|
||||
)
|
||||
)
|
||||
self.stdout.write(self.style.WARNING(f"Missing tables for models: {', '.join(missing_tables)}"))
|
||||
return False
|
||||
|
||||
self.stdout.write(self.style.SUCCESS("All required tables exist."))
|
||||
@@ -357,9 +333,7 @@ class Command(BaseCommand):
|
||||
}"
|
||||
)
|
||||
except Exception as e:
|
||||
self.logger.error(
|
||||
f"Error creating park company {data['name']}: {str(e)}"
|
||||
)
|
||||
self.logger.error(f"Error creating park company {data['name']}: {str(e)}")
|
||||
raise
|
||||
|
||||
# Create companies in rides app (for manufacturers and designers)
|
||||
@@ -382,9 +356,7 @@ class Command(BaseCommand):
|
||||
}"
|
||||
)
|
||||
except Exception as e:
|
||||
self.logger.error(
|
||||
f"Error creating ride company {data['name']}: {str(e)}"
|
||||
)
|
||||
self.logger.error(f"Error creating ride company {data['name']}: {str(e)}")
|
||||
raise
|
||||
|
||||
except Exception as e:
|
||||
@@ -512,9 +484,7 @@ class Command(BaseCommand):
|
||||
try:
|
||||
operator = self.park_companies[park_data["operator"]]
|
||||
property_owner = (
|
||||
self.park_companies.get(park_data["property_owner"])
|
||||
if park_data["property_owner"]
|
||||
else None
|
||||
self.park_companies.get(park_data["property_owner"]) if park_data["property_owner"] else None
|
||||
)
|
||||
|
||||
park, created = Park.objects.get_or_create(
|
||||
@@ -530,9 +500,7 @@ class Command(BaseCommand):
|
||||
},
|
||||
)
|
||||
self.parks[park_data["name"]] = park
|
||||
self.stdout.write(
|
||||
f" {'Created' if created else 'Found'} park: {park.name}"
|
||||
)
|
||||
self.stdout.write(f" {'Created' if created else 'Found'} park: {park.name}")
|
||||
|
||||
# Create location for park
|
||||
if created:
|
||||
@@ -547,9 +515,7 @@ class Command(BaseCommand):
|
||||
postal_code=loc_data["postal_code"],
|
||||
)
|
||||
# Set coordinates using the helper method
|
||||
park_location.set_coordinates(
|
||||
loc_data["latitude"], loc_data["longitude"]
|
||||
)
|
||||
park_location.set_coordinates(loc_data["latitude"], loc_data["longitude"])
|
||||
park_location.save()
|
||||
except Exception as e:
|
||||
self.logger.error(
|
||||
@@ -560,9 +526,7 @@ class Command(BaseCommand):
|
||||
raise
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(
|
||||
f"Error creating park {park_data['name']}: {str(e)}"
|
||||
)
|
||||
self.logger.error(f"Error creating park {park_data['name']}: {str(e)}")
|
||||
raise
|
||||
|
||||
except Exception as e:
|
||||
@@ -633,9 +597,7 @@ class Command(BaseCommand):
|
||||
}"
|
||||
)
|
||||
except Exception as e:
|
||||
self.logger.error(
|
||||
f"Error creating ride model {model_data['name']}: {str(e)}"
|
||||
)
|
||||
self.logger.error(f"Error creating ride model {model_data['name']}: {str(e)}")
|
||||
raise
|
||||
|
||||
# Create rides
|
||||
@@ -834,9 +796,7 @@ class Command(BaseCommand):
|
||||
for ride_data in rides_data:
|
||||
try:
|
||||
park = self.parks[ride_data["park"]]
|
||||
manufacturer = self.ride_companies.get(
|
||||
ride_data.get("manufacturer")
|
||||
)
|
||||
manufacturer = self.ride_companies.get(ride_data.get("manufacturer"))
|
||||
designer = self.ride_companies.get(ride_data.get("designer"))
|
||||
ride_model = self.ride_models.get(ride_data.get("ride_model"))
|
||||
|
||||
@@ -854,9 +814,7 @@ class Command(BaseCommand):
|
||||
},
|
||||
)
|
||||
self.rides[ride_data["name"]] = ride
|
||||
self.stdout.write(
|
||||
f" {'Created' if created else 'Found'} ride: {ride.name}"
|
||||
)
|
||||
self.stdout.write(f" {'Created' if created else 'Found'} ride: {ride.name}")
|
||||
|
||||
# Create roller coaster stats if provided
|
||||
if created and "coaster_stats" in ride_data:
|
||||
@@ -872,9 +830,7 @@ class Command(BaseCommand):
|
||||
raise
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(
|
||||
f"Error creating ride {ride_data['name']}: {str(e)}"
|
||||
)
|
||||
self.logger.error(f"Error creating ride {ride_data['name']}: {str(e)}")
|
||||
raise
|
||||
|
||||
except Exception as e:
|
||||
@@ -1011,9 +967,7 @@ class Command(BaseCommand):
|
||||
} in {park.name}"
|
||||
)
|
||||
except Exception as e:
|
||||
self.logger.error(
|
||||
f"Error creating areas for park {area_group['park']}: {str(e)}"
|
||||
)
|
||||
self.logger.error(f"Error creating areas for park {area_group['park']}: {str(e)}")
|
||||
raise
|
||||
|
||||
except Exception as e:
|
||||
|
||||
@@ -85,9 +85,7 @@ class Command(BaseCommand):
|
||||
"country": "USA",
|
||||
},
|
||||
)
|
||||
location2.set_coordinates(
|
||||
34.4244, -118.5971
|
||||
) # Six Flags Magic Mountain coordinates
|
||||
location2.set_coordinates(34.4244, -118.5971) # Six Flags Magic Mountain coordinates
|
||||
location2.save()
|
||||
|
||||
# Test distance calculation
|
||||
@@ -107,9 +105,7 @@ class Command(BaseCommand):
|
||||
# Find parks within 100km of a point
|
||||
# Same as Disneyland
|
||||
search_point = Point(-117.9190, 33.8121, srid=4326)
|
||||
nearby_locations = ParkLocation.objects.filter(
|
||||
point__distance_lte=(search_point, D(km=100))
|
||||
)
|
||||
nearby_locations = ParkLocation.objects.filter(point__distance_lte=(search_point, D(km=100)))
|
||||
self.stdout.write(f" Found {nearby_locations.count()} parks within 100km")
|
||||
for loc in nearby_locations:
|
||||
self.stdout.write(f" - {loc.park.name} in {loc.city}, {loc.state}")
|
||||
|
||||
@@ -20,11 +20,7 @@ class Command(BaseCommand):
|
||||
total_coasters = park.rides.filter(operating_rides, category="RC").count()
|
||||
|
||||
# Update park counts
|
||||
Park.objects.filter(id=park.id).update(
|
||||
total_rides=total_rides, total_roller_coasters=total_coasters
|
||||
)
|
||||
Park.objects.filter(id=park.id).update(total_rides=total_rides, total_roller_coasters=total_coasters)
|
||||
updated += 1
|
||||
|
||||
self.stdout.write(
|
||||
self.style.SUCCESS(f"Successfully updated counts for {updated} parks")
|
||||
)
|
||||
self.stdout.write(self.style.SUCCESS(f"Successfully updated counts for {updated} parks"))
|
||||
|
||||
@@ -30,23 +30,15 @@ class ParkQuerySet(StatusQuerySet, ReviewableQuerySet, LocationQuerySet):
|
||||
distinct=True,
|
||||
),
|
||||
area_count=Count("areas", distinct=True),
|
||||
review_count=Count(
|
||||
"reviews", filter=Q(reviews__is_published=True), distinct=True
|
||||
),
|
||||
average_rating_calculated=Avg(
|
||||
"reviews__rating", filter=Q(reviews__is_published=True)
|
||||
),
|
||||
review_count=Count("reviews", filter=Q(reviews__is_published=True), distinct=True),
|
||||
average_rating_calculated=Avg("reviews__rating", filter=Q(reviews__is_published=True)),
|
||||
latest_ride_opening=Max("rides__opening_date"),
|
||||
oldest_ride_opening=Min("rides__opening_date"),
|
||||
)
|
||||
|
||||
def optimized_for_list(self):
|
||||
"""Optimize for park list display."""
|
||||
return (
|
||||
self.select_related("operator", "property_owner")
|
||||
.prefetch_related("location")
|
||||
.with_complete_stats()
|
||||
)
|
||||
return self.select_related("operator", "property_owner").prefetch_related("location").with_complete_stats()
|
||||
|
||||
def optimized_for_detail(self):
|
||||
"""Optimize for park detail display."""
|
||||
@@ -59,9 +51,9 @@ class ParkQuerySet(StatusQuerySet, ReviewableQuerySet, LocationQuerySet):
|
||||
"areas",
|
||||
Prefetch(
|
||||
"rides",
|
||||
queryset=Ride.objects.select_related(
|
||||
"manufacturer", "designer", "ride_model", "park_area"
|
||||
).order_by("name"),
|
||||
queryset=Ride.objects.select_related("manufacturer", "designer", "ride_model", "park_area").order_by(
|
||||
"name"
|
||||
),
|
||||
),
|
||||
Prefetch(
|
||||
"reviews",
|
||||
@@ -82,9 +74,7 @@ class ParkQuerySet(StatusQuerySet, ReviewableQuerySet, LocationQuerySet):
|
||||
|
||||
def with_minimum_coasters(self, *, min_coasters: int = 5):
|
||||
"""Filter parks with minimum number of coasters."""
|
||||
return self.with_complete_stats().filter(
|
||||
coaster_count_calculated__gte=min_coasters
|
||||
)
|
||||
return self.with_complete_stats().filter(coaster_count_calculated__gte=min_coasters)
|
||||
|
||||
def large_parks(self, *, min_acres: float = 100.0):
|
||||
"""Filter for large parks."""
|
||||
@@ -123,16 +113,10 @@ class ParkQuerySet(StatusQuerySet, ReviewableQuerySet, LocationQuerySet):
|
||||
"""Optimized search for autocomplete."""
|
||||
return (
|
||||
self.filter(
|
||||
Q(name__icontains=query)
|
||||
| Q(location__city__icontains=query)
|
||||
| Q(location__state__icontains=query)
|
||||
Q(name__icontains=query) | Q(location__city__icontains=query) | Q(location__state__icontains=query)
|
||||
)
|
||||
.select_related("operator", "location")
|
||||
.only(
|
||||
"id", "name", "slug",
|
||||
"location__city", "location__state",
|
||||
"operator__name"
|
||||
)[:limit]
|
||||
.only("id", "name", "slug", "location__city", "location__state", "operator__name")[:limit]
|
||||
)
|
||||
|
||||
def with_location(self):
|
||||
@@ -247,9 +231,7 @@ class ParkReviewManager(BaseManager):
|
||||
return self.get_queryset().for_park(park_id=park_id)
|
||||
|
||||
def by_rating_range(self, *, min_rating: int = 1, max_rating: int = 10):
|
||||
return self.get_queryset().by_rating_range(
|
||||
min_rating=min_rating, max_rating=max_rating
|
||||
)
|
||||
return self.get_queryset().by_rating_range(min_rating=min_rating, max_rating=max_rating)
|
||||
|
||||
def moderation_required(self):
|
||||
return self.get_queryset().moderation_required()
|
||||
@@ -275,17 +257,12 @@ class CompanyQuerySet(BaseQuerySet):
|
||||
return self.annotate(
|
||||
operated_parks_count=Count("operated_parks", distinct=True),
|
||||
owned_parks_count=Count("owned_parks", distinct=True),
|
||||
total_parks_involvement=Count("operated_parks", distinct=True)
|
||||
+ Count("owned_parks", distinct=True),
|
||||
total_parks_involvement=Count("operated_parks", distinct=True) + Count("owned_parks", distinct=True),
|
||||
)
|
||||
|
||||
def major_operators(self, *, min_parks: int = 5):
|
||||
"""Filter for major park operators."""
|
||||
return (
|
||||
self.operators()
|
||||
.with_park_counts()
|
||||
.filter(operated_parks_count__gte=min_parks)
|
||||
)
|
||||
return self.operators().with_park_counts().filter(operated_parks_count__gte=min_parks)
|
||||
|
||||
def optimized_for_list(self):
|
||||
"""Optimize for company list display."""
|
||||
@@ -313,7 +290,7 @@ class CompanyManager(BaseManager):
|
||||
self.get_queryset()
|
||||
.manufacturers()
|
||||
.annotate(ride_count=Count("manufactured_rides", distinct=True))
|
||||
.only('id', 'name', 'slug', 'roles', 'description')
|
||||
.only("id", "name", "slug", "roles", "description")
|
||||
.order_by("name")
|
||||
)
|
||||
|
||||
@@ -323,7 +300,7 @@ class CompanyManager(BaseManager):
|
||||
self.get_queryset()
|
||||
.filter(roles__contains=["DESIGNER"])
|
||||
.annotate(ride_count=Count("designed_rides", distinct=True))
|
||||
.only('id', 'name', 'slug', 'roles', 'description')
|
||||
.only("id", "name", "slug", "roles", "description")
|
||||
.order_by("name")
|
||||
)
|
||||
|
||||
@@ -333,6 +310,6 @@ class CompanyManager(BaseManager):
|
||||
self.get_queryset()
|
||||
.operators()
|
||||
.with_park_counts()
|
||||
.only('id', 'name', 'slug', 'roles', 'description')
|
||||
.only("id", "name", "slug", "roles", "description")
|
||||
.order_by("name")
|
||||
)
|
||||
|
||||
@@ -102,16 +102,12 @@ class Migration(migrations.Migration):
|
||||
),
|
||||
(
|
||||
"size_acres",
|
||||
models.DecimalField(
|
||||
blank=True, decimal_places=2, max_digits=10, null=True
|
||||
),
|
||||
models.DecimalField(blank=True, decimal_places=2, max_digits=10, null=True),
|
||||
),
|
||||
("website", models.URLField(blank=True)),
|
||||
(
|
||||
"average_rating",
|
||||
models.DecimalField(
|
||||
blank=True, decimal_places=2, max_digits=3, null=True
|
||||
),
|
||||
models.DecimalField(blank=True, decimal_places=2, max_digits=3, null=True),
|
||||
),
|
||||
("ride_count", models.IntegerField(blank=True, null=True)),
|
||||
("coaster_count", models.IntegerField(blank=True, null=True)),
|
||||
@@ -266,16 +262,12 @@ class Migration(migrations.Migration):
|
||||
),
|
||||
(
|
||||
"size_acres",
|
||||
models.DecimalField(
|
||||
blank=True, decimal_places=2, max_digits=10, null=True
|
||||
),
|
||||
models.DecimalField(blank=True, decimal_places=2, max_digits=10, null=True),
|
||||
),
|
||||
("website", models.URLField(blank=True)),
|
||||
(
|
||||
"average_rating",
|
||||
models.DecimalField(
|
||||
blank=True, decimal_places=2, max_digits=3, null=True
|
||||
),
|
||||
models.DecimalField(blank=True, decimal_places=2, max_digits=3, null=True),
|
||||
),
|
||||
("ride_count", models.IntegerField(blank=True, null=True)),
|
||||
("coaster_count", models.IntegerField(blank=True, null=True)),
|
||||
@@ -678,9 +670,7 @@ class Migration(migrations.Migration):
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="parklocation",
|
||||
index=models.Index(
|
||||
fields=["city", "state"], name="parks_parkl_city_7cc873_idx"
|
||||
),
|
||||
index=models.Index(fields=["city", "state"], name="parks_parkl_city_7cc873_idx"),
|
||||
),
|
||||
migrations.AlterUniqueTogether(
|
||||
name="parkreview",
|
||||
|
||||
@@ -35,9 +35,7 @@ class Migration(migrations.Migration):
|
||||
),
|
||||
(
|
||||
"state_province",
|
||||
models.CharField(
|
||||
blank=True, help_text="State/Province/Region", max_length=100
|
||||
),
|
||||
models.CharField(blank=True, help_text="State/Province/Region", max_length=100),
|
||||
),
|
||||
(
|
||||
"country",
|
||||
@@ -49,9 +47,7 @@ class Migration(migrations.Migration):
|
||||
),
|
||||
(
|
||||
"postal_code",
|
||||
models.CharField(
|
||||
blank=True, help_text="ZIP or postal code", max_length=20
|
||||
),
|
||||
models.CharField(blank=True, help_text="ZIP or postal code", max_length=20),
|
||||
),
|
||||
(
|
||||
"mailing_address",
|
||||
|
||||
@@ -133,21 +133,15 @@ class Migration(migrations.Migration):
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="parkphoto",
|
||||
index=models.Index(
|
||||
fields=["park", "is_primary"], name="parks_parkp_park_id_eda26e_idx"
|
||||
),
|
||||
index=models.Index(fields=["park", "is_primary"], name="parks_parkp_park_id_eda26e_idx"),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="parkphoto",
|
||||
index=models.Index(
|
||||
fields=["park", "is_approved"], name="parks_parkp_park_id_5fe576_idx"
|
||||
),
|
||||
index=models.Index(fields=["park", "is_approved"], name="parks_parkp_park_id_5fe576_idx"),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="parkphoto",
|
||||
index=models.Index(
|
||||
fields=["created_at"], name="parks_parkp_created_033dc3_idx"
|
||||
),
|
||||
index=models.Index(fields=["created_at"], name="parks_parkp_created_033dc3_idx"),
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="parkphoto",
|
||||
|
||||
@@ -11,24 +11,29 @@ def populate_computed_fields(apps, schema_editor):
|
||||
|
||||
try:
|
||||
# Use raw SQL to update opening_year from opening_date
|
||||
schema_editor.execute("""
|
||||
schema_editor.execute(
|
||||
"""
|
||||
UPDATE parks_park
|
||||
SET opening_year = EXTRACT(YEAR FROM opening_date)
|
||||
WHERE opening_date IS NOT NULL;
|
||||
""")
|
||||
"""
|
||||
)
|
||||
|
||||
# Use raw SQL to populate search_text
|
||||
# This is a simplified version - we'll populate it with just name and description
|
||||
schema_editor.execute("""
|
||||
schema_editor.execute(
|
||||
"""
|
||||
UPDATE parks_park
|
||||
SET search_text = LOWER(
|
||||
COALESCE(name, '') || ' ' ||
|
||||
COALESCE(description, '')
|
||||
);
|
||||
""")
|
||||
"""
|
||||
)
|
||||
|
||||
# Update search_text to include operator names using a join
|
||||
schema_editor.execute("""
|
||||
schema_editor.execute(
|
||||
"""
|
||||
UPDATE parks_park
|
||||
SET search_text = LOWER(
|
||||
COALESCE(parks_park.name, '') || ' ' ||
|
||||
@@ -37,7 +42,8 @@ def populate_computed_fields(apps, schema_editor):
|
||||
)
|
||||
FROM parks_company
|
||||
WHERE parks_park.operator_id = parks_company.id;
|
||||
""")
|
||||
"""
|
||||
)
|
||||
|
||||
finally:
|
||||
# Re-enable pghistory triggers
|
||||
@@ -46,8 +52,8 @@ def populate_computed_fields(apps, schema_editor):
|
||||
|
||||
def reverse_populate_computed_fields(apps, schema_editor):
|
||||
"""Clear computed fields (reverse operation)"""
|
||||
Park = apps.get_model('parks', 'Park')
|
||||
Park.objects.update(opening_year=None, search_text='')
|
||||
Park = apps.get_model("parks", "Park")
|
||||
Park.objects.update(opening_year=None, search_text="")
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
@@ -13,37 +13,34 @@ class Migration(migrations.Migration):
|
||||
# Composite indexes for common filter combinations
|
||||
migrations.RunSQL(
|
||||
"CREATE INDEX IF NOT EXISTS parks_park_status_park_type_idx ON parks_park (status, park_type);",
|
||||
reverse_sql="DROP INDEX IF EXISTS parks_park_status_park_type_idx;"
|
||||
reverse_sql="DROP INDEX IF EXISTS parks_park_status_park_type_idx;",
|
||||
),
|
||||
migrations.RunSQL(
|
||||
"CREATE INDEX IF NOT EXISTS parks_park_opening_year_status_idx ON parks_park (opening_year, status) WHERE opening_year IS NOT NULL;",
|
||||
reverse_sql="DROP INDEX IF EXISTS parks_park_opening_year_status_idx;"
|
||||
reverse_sql="DROP INDEX IF EXISTS parks_park_opening_year_status_idx;",
|
||||
),
|
||||
migrations.RunSQL(
|
||||
"CREATE INDEX IF NOT EXISTS parks_park_size_rating_idx ON parks_park (size_acres, average_rating) WHERE size_acres IS NOT NULL AND average_rating IS NOT NULL;",
|
||||
reverse_sql="DROP INDEX IF EXISTS parks_park_size_rating_idx;"
|
||||
reverse_sql="DROP INDEX IF EXISTS parks_park_size_rating_idx;",
|
||||
),
|
||||
migrations.RunSQL(
|
||||
"CREATE INDEX IF NOT EXISTS parks_park_ride_coaster_count_idx ON parks_park (ride_count, coaster_count) WHERE ride_count IS NOT NULL AND coaster_count IS NOT NULL;",
|
||||
reverse_sql="DROP INDEX IF EXISTS parks_park_ride_coaster_count_idx;"
|
||||
reverse_sql="DROP INDEX IF EXISTS parks_park_ride_coaster_count_idx;",
|
||||
),
|
||||
|
||||
# Full-text search index for search_text field
|
||||
migrations.RunSQL(
|
||||
"CREATE INDEX IF NOT EXISTS parks_park_search_text_gin_idx ON parks_park USING gin(to_tsvector('english', search_text));",
|
||||
reverse_sql="DROP INDEX IF EXISTS parks_park_search_text_gin_idx;"
|
||||
reverse_sql="DROP INDEX IF EXISTS parks_park_search_text_gin_idx;",
|
||||
),
|
||||
|
||||
# Trigram index for fuzzy search on search_text
|
||||
migrations.RunSQL(
|
||||
"CREATE EXTENSION IF NOT EXISTS pg_trgm;",
|
||||
reverse_sql="-- Cannot drop extension as it might be used elsewhere"
|
||||
reverse_sql="-- Cannot drop extension as it might be used elsewhere",
|
||||
),
|
||||
migrations.RunSQL(
|
||||
"CREATE INDEX IF NOT EXISTS parks_park_search_text_trgm_idx ON parks_park USING gin(search_text gin_trgm_ops);",
|
||||
reverse_sql="DROP INDEX IF EXISTS parks_park_search_text_trgm_idx;"
|
||||
reverse_sql="DROP INDEX IF EXISTS parks_park_search_text_trgm_idx;",
|
||||
),
|
||||
|
||||
# Indexes for location-based filtering (assuming location relationship exists)
|
||||
migrations.RunSQL(
|
||||
"""
|
||||
@@ -51,27 +48,23 @@ class Migration(migrations.Migration):
|
||||
ON parks_parklocation (country, state)
|
||||
WHERE country IS NOT NULL AND state IS NOT NULL;
|
||||
""",
|
||||
reverse_sql="DROP INDEX IF EXISTS parks_parklocation_country_state_idx;"
|
||||
reverse_sql="DROP INDEX IF EXISTS parks_parklocation_country_state_idx;",
|
||||
),
|
||||
|
||||
# Index for operator-based filtering
|
||||
migrations.RunSQL(
|
||||
"CREATE INDEX IF NOT EXISTS parks_park_operator_status_idx ON parks_park (operator_id, status);",
|
||||
reverse_sql="DROP INDEX IF EXISTS parks_park_operator_status_idx;"
|
||||
reverse_sql="DROP INDEX IF EXISTS parks_park_operator_status_idx;",
|
||||
),
|
||||
|
||||
# Partial indexes for common status filters
|
||||
migrations.RunSQL(
|
||||
"CREATE INDEX IF NOT EXISTS parks_park_operating_parks_idx ON parks_park (name, opening_year) WHERE status IN ('OPERATING', 'CLOSED_TEMP');",
|
||||
reverse_sql="DROP INDEX IF EXISTS parks_park_operating_parks_idx;"
|
||||
reverse_sql="DROP INDEX IF EXISTS parks_park_operating_parks_idx;",
|
||||
),
|
||||
|
||||
# Index for ordering by name (already exists but ensuring it's optimized)
|
||||
migrations.RunSQL(
|
||||
"CREATE INDEX IF NOT EXISTS parks_park_name_lower_idx ON parks_park (LOWER(name));",
|
||||
reverse_sql="DROP INDEX IF EXISTS parks_park_name_lower_idx;"
|
||||
reverse_sql="DROP INDEX IF EXISTS parks_park_name_lower_idx;",
|
||||
),
|
||||
|
||||
# Covering index for common query patterns
|
||||
migrations.RunSQL(
|
||||
"""
|
||||
@@ -80,6 +73,6 @@ class Migration(migrations.Migration):
|
||||
INCLUDE (name, slug, size_acres, average_rating, ride_count, coaster_count, operator_id)
|
||||
WHERE status IN ('OPERATING', 'CLOSED_TEMP');
|
||||
""",
|
||||
reverse_sql="DROP INDEX IF EXISTS parks_park_hybrid_covering_idx;"
|
||||
reverse_sql="DROP INDEX IF EXISTS parks_park_hybrid_covering_idx;",
|
||||
),
|
||||
]
|
||||
|
||||
@@ -47,6 +47,6 @@ class Migration(migrations.Migration):
|
||||
reverse_sql="""
|
||||
-- This is irreversible, but we can drop and recreate without timezone
|
||||
DROP FUNCTION IF EXISTS pgtrigger_insert_insert_66883() CASCADE;
|
||||
"""
|
||||
""",
|
||||
),
|
||||
]
|
||||
|
||||
@@ -47,6 +47,6 @@ class Migration(migrations.Migration):
|
||||
reverse_sql="""
|
||||
-- This is irreversible, but we can drop and recreate without timezone
|
||||
DROP FUNCTION IF EXISTS pgtrigger_update_update_19f56() CASCADE;
|
||||
"""
|
||||
""",
|
||||
),
|
||||
]
|
||||
|
||||
@@ -14,7 +14,7 @@ from django.db import migrations
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('parks', '0022_alter_company_roles_alter_companyevent_roles'),
|
||||
("parks", "0022_alter_company_roles_alter_companyevent_roles"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
|
||||
@@ -11,16 +11,16 @@ from django.db import migrations, models
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('parks', '0023_add_company_roles_gin_index'),
|
||||
("parks", "0023_add_company_roles_gin_index"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='park',
|
||||
name='timezone',
|
||||
model_name="park",
|
||||
name="timezone",
|
||||
field=models.CharField(
|
||||
blank=True,
|
||||
default='UTC',
|
||||
default="UTC",
|
||||
help_text="Timezone identifier for park operations (e.g., 'America/New_York')",
|
||||
max_length=50,
|
||||
),
|
||||
|
||||
@@ -61,16 +61,12 @@ class Migration(migrations.Migration):
|
||||
migrations.AlterField(
|
||||
model_name="company",
|
||||
name="description",
|
||||
field=models.TextField(
|
||||
blank=True, help_text="Detailed company description"
|
||||
),
|
||||
field=models.TextField(blank=True, help_text="Detailed company description"),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="company",
|
||||
name="founded_year",
|
||||
field=models.PositiveIntegerField(
|
||||
blank=True, help_text="Year the company was founded", null=True
|
||||
),
|
||||
field=models.PositiveIntegerField(blank=True, help_text="Year the company was founded", null=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="company",
|
||||
@@ -80,16 +76,12 @@ class Migration(migrations.Migration):
|
||||
migrations.AlterField(
|
||||
model_name="company",
|
||||
name="parks_count",
|
||||
field=models.IntegerField(
|
||||
default=0, help_text="Number of parks operated (auto-calculated)"
|
||||
),
|
||||
field=models.IntegerField(default=0, help_text="Number of parks operated (auto-calculated)"),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="company",
|
||||
name="rides_count",
|
||||
field=models.IntegerField(
|
||||
default=0, help_text="Number of rides manufactured (auto-calculated)"
|
||||
),
|
||||
field=models.IntegerField(default=0, help_text="Number of rides manufactured (auto-calculated)"),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="company",
|
||||
@@ -114,9 +106,7 @@ class Migration(migrations.Migration):
|
||||
migrations.AlterField(
|
||||
model_name="company",
|
||||
name="slug",
|
||||
field=models.SlugField(
|
||||
help_text="URL-friendly identifier", max_length=255, unique=True
|
||||
),
|
||||
field=models.SlugField(help_text="URL-friendly identifier", max_length=255, unique=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="company",
|
||||
@@ -126,16 +116,12 @@ class Migration(migrations.Migration):
|
||||
migrations.AlterField(
|
||||
model_name="companyevent",
|
||||
name="description",
|
||||
field=models.TextField(
|
||||
blank=True, help_text="Detailed company description"
|
||||
),
|
||||
field=models.TextField(blank=True, help_text="Detailed company description"),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="companyevent",
|
||||
name="founded_year",
|
||||
field=models.PositiveIntegerField(
|
||||
blank=True, help_text="Year the company was founded", null=True
|
||||
),
|
||||
field=models.PositiveIntegerField(blank=True, help_text="Year the company was founded", null=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="companyevent",
|
||||
@@ -145,16 +131,12 @@ class Migration(migrations.Migration):
|
||||
migrations.AlterField(
|
||||
model_name="companyevent",
|
||||
name="parks_count",
|
||||
field=models.IntegerField(
|
||||
default=0, help_text="Number of parks operated (auto-calculated)"
|
||||
),
|
||||
field=models.IntegerField(default=0, help_text="Number of parks operated (auto-calculated)"),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="companyevent",
|
||||
name="rides_count",
|
||||
field=models.IntegerField(
|
||||
default=0, help_text="Number of rides manufactured (auto-calculated)"
|
||||
),
|
||||
field=models.IntegerField(default=0, help_text="Number of rides manufactured (auto-calculated)"),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="companyevent",
|
||||
@@ -179,9 +161,7 @@ class Migration(migrations.Migration):
|
||||
migrations.AlterField(
|
||||
model_name="companyevent",
|
||||
name="slug",
|
||||
field=models.SlugField(
|
||||
db_index=False, help_text="URL-friendly identifier", max_length=255
|
||||
),
|
||||
field=models.SlugField(db_index=False, help_text="URL-friendly identifier", max_length=255),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="companyevent",
|
||||
@@ -229,9 +209,7 @@ class Migration(migrations.Migration):
|
||||
migrations.AlterField(
|
||||
model_name="park",
|
||||
name="coaster_count",
|
||||
field=models.IntegerField(
|
||||
blank=True, help_text="Total coaster count", null=True
|
||||
),
|
||||
field=models.IntegerField(blank=True, help_text="Total coaster count", null=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="park",
|
||||
@@ -251,16 +229,12 @@ class Migration(migrations.Migration):
|
||||
migrations.AlterField(
|
||||
model_name="park",
|
||||
name="operating_season",
|
||||
field=models.CharField(
|
||||
blank=True, help_text="Operating season", max_length=255
|
||||
),
|
||||
field=models.CharField(blank=True, help_text="Operating season", max_length=255),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="park",
|
||||
name="ride_count",
|
||||
field=models.IntegerField(
|
||||
blank=True, help_text="Total ride count", null=True
|
||||
),
|
||||
field=models.IntegerField(blank=True, help_text="Total ride count", null=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="park",
|
||||
@@ -276,9 +250,7 @@ class Migration(migrations.Migration):
|
||||
migrations.AlterField(
|
||||
model_name="park",
|
||||
name="slug",
|
||||
field=models.SlugField(
|
||||
help_text="URL-friendly identifier", max_length=255, unique=True
|
||||
),
|
||||
field=models.SlugField(help_text="URL-friendly identifier", max_length=255, unique=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="park",
|
||||
@@ -300,16 +272,12 @@ class Migration(migrations.Migration):
|
||||
migrations.AlterField(
|
||||
model_name="parkarea",
|
||||
name="closing_date",
|
||||
field=models.DateField(
|
||||
blank=True, help_text="Date this area closed (if applicable)", null=True
|
||||
),
|
||||
field=models.DateField(blank=True, help_text="Date this area closed (if applicable)", null=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="parkarea",
|
||||
name="description",
|
||||
field=models.TextField(
|
||||
blank=True, help_text="Detailed description of the area"
|
||||
),
|
||||
field=models.TextField(blank=True, help_text="Detailed description of the area"),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="parkarea",
|
||||
@@ -319,9 +287,7 @@ class Migration(migrations.Migration):
|
||||
migrations.AlterField(
|
||||
model_name="parkarea",
|
||||
name="opening_date",
|
||||
field=models.DateField(
|
||||
blank=True, help_text="Date this area opened", null=True
|
||||
),
|
||||
field=models.DateField(blank=True, help_text="Date this area opened", null=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="parkarea",
|
||||
@@ -336,23 +302,17 @@ class Migration(migrations.Migration):
|
||||
migrations.AlterField(
|
||||
model_name="parkarea",
|
||||
name="slug",
|
||||
field=models.SlugField(
|
||||
help_text="URL-friendly identifier (unique within park)", max_length=255
|
||||
),
|
||||
field=models.SlugField(help_text="URL-friendly identifier (unique within park)", max_length=255),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="parkareaevent",
|
||||
name="closing_date",
|
||||
field=models.DateField(
|
||||
blank=True, help_text="Date this area closed (if applicable)", null=True
|
||||
),
|
||||
field=models.DateField(blank=True, help_text="Date this area closed (if applicable)", null=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="parkareaevent",
|
||||
name="description",
|
||||
field=models.TextField(
|
||||
blank=True, help_text="Detailed description of the area"
|
||||
),
|
||||
field=models.TextField(blank=True, help_text="Detailed description of the area"),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="parkareaevent",
|
||||
@@ -362,9 +322,7 @@ class Migration(migrations.Migration):
|
||||
migrations.AlterField(
|
||||
model_name="parkareaevent",
|
||||
name="opening_date",
|
||||
field=models.DateField(
|
||||
blank=True, help_text="Date this area opened", null=True
|
||||
),
|
||||
field=models.DateField(blank=True, help_text="Date this area opened", null=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="parkareaevent",
|
||||
@@ -406,9 +364,7 @@ class Migration(migrations.Migration):
|
||||
migrations.AlterField(
|
||||
model_name="parkevent",
|
||||
name="coaster_count",
|
||||
field=models.IntegerField(
|
||||
blank=True, help_text="Total coaster count", null=True
|
||||
),
|
||||
field=models.IntegerField(blank=True, help_text="Total coaster count", null=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="parkevent",
|
||||
@@ -428,16 +384,12 @@ class Migration(migrations.Migration):
|
||||
migrations.AlterField(
|
||||
model_name="parkevent",
|
||||
name="operating_season",
|
||||
field=models.CharField(
|
||||
blank=True, help_text="Operating season", max_length=255
|
||||
),
|
||||
field=models.CharField(blank=True, help_text="Operating season", max_length=255),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="parkevent",
|
||||
name="ride_count",
|
||||
field=models.IntegerField(
|
||||
blank=True, help_text="Total ride count", null=True
|
||||
),
|
||||
field=models.IntegerField(blank=True, help_text="Total ride count", null=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="parkevent",
|
||||
@@ -453,9 +405,7 @@ class Migration(migrations.Migration):
|
||||
migrations.AlterField(
|
||||
model_name="parkevent",
|
||||
name="slug",
|
||||
field=models.SlugField(
|
||||
db_index=False, help_text="URL-friendly identifier", max_length=255
|
||||
),
|
||||
field=models.SlugField(db_index=False, help_text="URL-friendly identifier", max_length=255),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="parkevent",
|
||||
@@ -496,9 +446,7 @@ class Migration(migrations.Migration):
|
||||
migrations.AlterField(
|
||||
model_name="parkphoto",
|
||||
name="caption",
|
||||
field=models.CharField(
|
||||
blank=True, help_text="Photo caption or description", max_length=255
|
||||
),
|
||||
field=models.CharField(blank=True, help_text="Photo caption or description", max_length=255),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="parkphoto",
|
||||
@@ -549,9 +497,7 @@ class Migration(migrations.Migration):
|
||||
migrations.AlterField(
|
||||
model_name="parkphotoevent",
|
||||
name="caption",
|
||||
field=models.CharField(
|
||||
blank=True, help_text="Photo caption or description", max_length=255
|
||||
),
|
||||
field=models.CharField(blank=True, help_text="Photo caption or description", max_length=255),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="parkphotoevent",
|
||||
@@ -602,16 +548,12 @@ class Migration(migrations.Migration):
|
||||
migrations.AlterField(
|
||||
model_name="parkreview",
|
||||
name="is_published",
|
||||
field=models.BooleanField(
|
||||
default=True, help_text="Whether this review is publicly visible"
|
||||
),
|
||||
field=models.BooleanField(default=True, help_text="Whether this review is publicly visible"),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="parkreview",
|
||||
name="moderated_at",
|
||||
field=models.DateTimeField(
|
||||
blank=True, help_text="When this review was moderated", null=True
|
||||
),
|
||||
field=models.DateTimeField(blank=True, help_text="When this review was moderated", null=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="parkreview",
|
||||
@@ -628,9 +570,7 @@ class Migration(migrations.Migration):
|
||||
migrations.AlterField(
|
||||
model_name="parkreview",
|
||||
name="moderation_notes",
|
||||
field=models.TextField(
|
||||
blank=True, help_text="Internal notes from moderators"
|
||||
),
|
||||
field=models.TextField(blank=True, help_text="Internal notes from moderators"),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="parkreview",
|
||||
@@ -681,16 +621,12 @@ class Migration(migrations.Migration):
|
||||
migrations.AlterField(
|
||||
model_name="parkreviewevent",
|
||||
name="is_published",
|
||||
field=models.BooleanField(
|
||||
default=True, help_text="Whether this review is publicly visible"
|
||||
),
|
||||
field=models.BooleanField(default=True, help_text="Whether this review is publicly visible"),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="parkreviewevent",
|
||||
name="moderated_at",
|
||||
field=models.DateTimeField(
|
||||
blank=True, help_text="When this review was moderated", null=True
|
||||
),
|
||||
field=models.DateTimeField(blank=True, help_text="When this review was moderated", null=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="parkreviewevent",
|
||||
@@ -709,9 +645,7 @@ class Migration(migrations.Migration):
|
||||
migrations.AlterField(
|
||||
model_name="parkreviewevent",
|
||||
name="moderation_notes",
|
||||
field=models.TextField(
|
||||
blank=True, help_text="Internal notes from moderators"
|
||||
),
|
||||
field=models.TextField(blank=True, help_text="Internal notes from moderators"),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="parkreviewevent",
|
||||
|
||||
@@ -9,7 +9,7 @@ while maintaining backward compatibility through the Company alias.
|
||||
"""
|
||||
|
||||
# Import choices to trigger registration
|
||||
from ..choices import *
|
||||
from ..choices import * # noqa: F403
|
||||
from .areas import ParkArea
|
||||
from .companies import Company, CompanyHeadquarters
|
||||
from .location import ParkLocation
|
||||
|
||||
@@ -21,16 +21,10 @@ class ParkArea(TrackedModel):
|
||||
help_text="Park this area belongs to",
|
||||
)
|
||||
name = models.CharField(max_length=255, help_text="Name of the park area")
|
||||
slug = models.SlugField(
|
||||
max_length=255, help_text="URL-friendly identifier (unique within park)"
|
||||
)
|
||||
slug = models.SlugField(max_length=255, help_text="URL-friendly identifier (unique within park)")
|
||||
description = models.TextField(blank=True, help_text="Detailed description of the area")
|
||||
opening_date = models.DateField(
|
||||
null=True, blank=True, help_text="Date this area opened"
|
||||
)
|
||||
closing_date = models.DateField(
|
||||
null=True, blank=True, help_text="Date this area closed (if applicable)"
|
||||
)
|
||||
opening_date = models.DateField(null=True, blank=True, help_text="Date this area opened")
|
||||
closing_date = models.DateField(null=True, blank=True, help_text="Date this area closed (if applicable)")
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
if not self.slug:
|
||||
|
||||
@@ -26,15 +26,9 @@ class Company(TrackedModel):
|
||||
website = models.URLField(blank=True, help_text="Company website URL")
|
||||
|
||||
# Operator-specific fields
|
||||
founded_year = models.PositiveIntegerField(
|
||||
blank=True, null=True, help_text="Year the company was founded"
|
||||
)
|
||||
parks_count = models.IntegerField(
|
||||
default=0, help_text="Number of parks operated (auto-calculated)"
|
||||
)
|
||||
rides_count = models.IntegerField(
|
||||
default=0, help_text="Number of rides manufactured (auto-calculated)"
|
||||
)
|
||||
founded_year = models.PositiveIntegerField(blank=True, null=True, help_text="Year the company was founded")
|
||||
parks_count = models.IntegerField(default=0, help_text="Number of parks operated (auto-calculated)")
|
||||
rides_count = models.IntegerField(default=0, help_text="Number of rides manufactured (auto-calculated)")
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
if not self.slug:
|
||||
@@ -72,9 +66,7 @@ class CompanyHeadquarters(models.Model):
|
||||
blank=True,
|
||||
help_text="Mailing address if publicly available",
|
||||
)
|
||||
city = models.CharField(
|
||||
max_length=100, db_index=True, help_text="Headquarters city"
|
||||
)
|
||||
city = models.CharField(max_length=100, db_index=True, help_text="Headquarters city")
|
||||
state_province = models.CharField(
|
||||
max_length=100,
|
||||
blank=True,
|
||||
@@ -87,9 +79,7 @@ class CompanyHeadquarters(models.Model):
|
||||
db_index=True,
|
||||
help_text="Country where headquarters is located",
|
||||
)
|
||||
postal_code = models.CharField(
|
||||
max_length=20, blank=True, help_text="ZIP or postal code"
|
||||
)
|
||||
postal_code = models.CharField(max_length=20, blank=True, help_text="ZIP or postal code")
|
||||
|
||||
# Optional mailing address if different or more complete
|
||||
mailing_address = models.TextField(
|
||||
|
||||
@@ -9,9 +9,7 @@ class ParkLocation(models.Model):
|
||||
Represents the geographic location and address of a park, with PostGIS support.
|
||||
"""
|
||||
|
||||
park = models.OneToOneField(
|
||||
"parks.Park", on_delete=models.CASCADE, related_name="location"
|
||||
)
|
||||
park = models.OneToOneField("parks.Park", on_delete=models.CASCADE, related_name="location")
|
||||
|
||||
# Spatial Data
|
||||
point = models.PointField(
|
||||
@@ -27,10 +25,7 @@ class ParkLocation(models.Model):
|
||||
state = models.CharField(max_length=100, db_index=True)
|
||||
country = models.CharField(max_length=100, default="USA")
|
||||
continent = models.CharField(
|
||||
max_length=50,
|
||||
blank=True,
|
||||
db_index=True,
|
||||
help_text="Continent where the park is located"
|
||||
max_length=50, blank=True, db_index=True, help_text="Continent where the park is located"
|
||||
)
|
||||
postal_code = models.CharField(max_length=20, blank=True)
|
||||
|
||||
|
||||
@@ -22,9 +22,7 @@ def park_photo_upload_path(instance: models.Model, filename: str) -> str:
|
||||
if park is None:
|
||||
raise ValueError("Park cannot be None")
|
||||
|
||||
return MediaService.generate_upload_path(
|
||||
domain="park", identifier=park.slug, filename=filename
|
||||
)
|
||||
return MediaService.generate_upload_path(domain="park", identifier=park.slug, filename=filename)
|
||||
|
||||
|
||||
@pghistory.track()
|
||||
@@ -39,23 +37,15 @@ class ParkPhoto(TrackedModel):
|
||||
)
|
||||
|
||||
image = models.ForeignKey(
|
||||
'django_cloudflareimages_toolkit.CloudflareImage',
|
||||
"django_cloudflareimages_toolkit.CloudflareImage",
|
||||
on_delete=models.CASCADE,
|
||||
help_text="Park photo stored on Cloudflare Images"
|
||||
help_text="Park photo stored on Cloudflare Images",
|
||||
)
|
||||
|
||||
caption = models.CharField(
|
||||
max_length=255, blank=True, help_text="Photo caption or description"
|
||||
)
|
||||
alt_text = models.CharField(
|
||||
max_length=255, blank=True, help_text="Alternative text for accessibility"
|
||||
)
|
||||
is_primary = models.BooleanField(
|
||||
default=False, help_text="Whether this is the primary photo for the park"
|
||||
)
|
||||
is_approved = models.BooleanField(
|
||||
default=False, help_text="Whether this photo has been approved by moderators"
|
||||
)
|
||||
caption = models.CharField(max_length=255, blank=True, help_text="Photo caption or description")
|
||||
alt_text = models.CharField(max_length=255, blank=True, help_text="Alternative text for accessibility")
|
||||
is_primary = models.BooleanField(default=False, help_text="Whether this is the primary photo for the park")
|
||||
is_approved = models.BooleanField(default=False, help_text="Whether this photo has been approved by moderators")
|
||||
|
||||
# Metadata
|
||||
created_at = models.DateTimeField(auto_now_add=True)
|
||||
@@ -100,9 +90,7 @@ class ParkPhoto(TrackedModel):
|
||||
|
||||
# Set default caption if not provided
|
||||
if not self.caption and self.uploaded_by:
|
||||
self.caption = MediaService.generate_default_caption(
|
||||
self.uploaded_by.username
|
||||
)
|
||||
self.caption = MediaService.generate_default_caption(self.uploaded_by.username)
|
||||
|
||||
# If this is marked as primary, unmark other primary photos for this park
|
||||
if self.is_primary:
|
||||
|
||||
@@ -45,7 +45,7 @@ class Park(StateMachineMixin, TrackedModel):
|
||||
max_length=30,
|
||||
default="THEME_PARK",
|
||||
db_index=True,
|
||||
help_text="Type/category of the park"
|
||||
help_text="Type/category of the park",
|
||||
)
|
||||
|
||||
# Location relationship - reverse relation from ParkLocation
|
||||
@@ -118,23 +118,18 @@ class Park(StateMachineMixin, TrackedModel):
|
||||
|
||||
# Computed fields for hybrid filtering
|
||||
opening_year = models.IntegerField(
|
||||
null=True,
|
||||
blank=True,
|
||||
db_index=True,
|
||||
help_text="Year the park opened (computed from opening_date)"
|
||||
null=True, blank=True, db_index=True, help_text="Year the park opened (computed from opening_date)"
|
||||
)
|
||||
search_text = models.TextField(
|
||||
blank=True,
|
||||
db_index=True,
|
||||
help_text="Searchable text combining name, description, location, and operator"
|
||||
blank=True, db_index=True, help_text="Searchable text combining name, description, location, and operator"
|
||||
)
|
||||
|
||||
# Timezone for park operations
|
||||
timezone = models.CharField(
|
||||
max_length=50,
|
||||
default='UTC',
|
||||
default="UTC",
|
||||
blank=True,
|
||||
help_text="Timezone identifier for park operations (e.g., 'America/New_York')"
|
||||
help_text="Timezone identifier for park operations (e.g., 'America/New_York')",
|
||||
)
|
||||
|
||||
class Meta:
|
||||
@@ -171,8 +166,7 @@ class Park(StateMachineMixin, TrackedModel):
|
||||
),
|
||||
models.CheckConstraint(
|
||||
name="park_coaster_count_non_negative",
|
||||
check=models.Q(coaster_count__isnull=True)
|
||||
| models.Q(coaster_count__gte=0),
|
||||
check=models.Q(coaster_count__isnull=True) | models.Q(coaster_count__gte=0),
|
||||
violation_error_message="Coaster count must be non-negative",
|
||||
),
|
||||
# Business rule: Coaster count cannot exceed ride count
|
||||
@@ -204,9 +198,7 @@ class Park(StateMachineMixin, TrackedModel):
|
||||
self.transition_to_under_construction(user=user)
|
||||
self.save()
|
||||
|
||||
def close_permanently(
|
||||
self, *, closing_date=None, user: Optional["AbstractBaseUser"] = None
|
||||
) -> None:
|
||||
def close_permanently(self, *, closing_date=None, user: Optional["AbstractBaseUser"] = None) -> None:
|
||||
"""Transition park to CLOSED_PERM status."""
|
||||
self.transition_to_closed_perm(user=user)
|
||||
if closing_date:
|
||||
@@ -279,7 +271,7 @@ class Park(StateMachineMixin, TrackedModel):
|
||||
|
||||
# Add location information if available
|
||||
try:
|
||||
if hasattr(self, 'location') and self.location:
|
||||
if hasattr(self, "location") and self.location:
|
||||
if self.location.city:
|
||||
search_parts.append(self.location.city)
|
||||
if self.location.state:
|
||||
@@ -299,16 +291,14 @@ class Park(StateMachineMixin, TrackedModel):
|
||||
search_parts.append(self.property_owner.name)
|
||||
|
||||
# Combine all parts into searchable text
|
||||
self.search_text = ' '.join(filter(None, search_parts)).lower()
|
||||
self.search_text = " ".join(filter(None, search_parts)).lower()
|
||||
|
||||
def clean(self):
|
||||
super().clean()
|
||||
if self.operator and "OPERATOR" not in self.operator.roles:
|
||||
raise ValidationError({"operator": "Company must have the OPERATOR role."})
|
||||
if self.property_owner and "PROPERTY_OWNER" not in self.property_owner.roles:
|
||||
raise ValidationError(
|
||||
{"property_owner": "Company must have the PROPERTY_OWNER role."}
|
||||
)
|
||||
raise ValidationError({"property_owner": "Company must have the PROPERTY_OWNER role."})
|
||||
|
||||
def get_absolute_url(self) -> str:
|
||||
return reverse("parks:park_detail", kwargs={"slug": self.slug})
|
||||
@@ -325,7 +315,7 @@ class Park(StateMachineMixin, TrackedModel):
|
||||
"""Returns coordinates as a list [latitude, longitude]"""
|
||||
if hasattr(self, "location") and self.location:
|
||||
coords = self.location.coordinates
|
||||
if coords and isinstance(coords, (tuple, list)):
|
||||
if coords and isinstance(coords, tuple | list):
|
||||
return list(coords)
|
||||
return None
|
||||
|
||||
@@ -349,9 +339,7 @@ class Park(StateMachineMixin, TrackedModel):
|
||||
content_type = ContentType.objects.get_for_model(cls)
|
||||
print(f"Searching HistoricalSlug with content_type: {content_type}")
|
||||
historical = (
|
||||
HistoricalSlug.objects.filter(content_type=content_type, slug=slug)
|
||||
.order_by("-created_at")
|
||||
.first()
|
||||
HistoricalSlug.objects.filter(content_type=content_type, slug=slug).order_by("-created_at").first()
|
||||
)
|
||||
|
||||
if historical:
|
||||
@@ -373,11 +361,7 @@ class Park(StateMachineMixin, TrackedModel):
|
||||
print("Searching pghistory events")
|
||||
event_model = getattr(cls, "event_model", None)
|
||||
if event_model:
|
||||
historical_event = (
|
||||
event_model.objects.filter(slug=slug)
|
||||
.order_by("-pgh_created_at")
|
||||
.first()
|
||||
)
|
||||
historical_event = event_model.objects.filter(slug=slug).order_by("-pgh_created_at").first()
|
||||
|
||||
if historical_event:
|
||||
print(
|
||||
@@ -394,4 +378,4 @@ class Park(StateMachineMixin, TrackedModel):
|
||||
else:
|
||||
print("No pghistory event found")
|
||||
|
||||
raise cls.DoesNotExist("No park found with this slug")
|
||||
raise cls.DoesNotExist("No park found with this slug") from None
|
||||
|
||||
@@ -40,12 +40,8 @@ class ParkReview(TrackedModel):
|
||||
updated_at = models.DateTimeField(auto_now=True)
|
||||
|
||||
# Moderation
|
||||
is_published = models.BooleanField(
|
||||
default=True, help_text="Whether this review is publicly visible"
|
||||
)
|
||||
moderation_notes = models.TextField(
|
||||
blank=True, help_text="Internal notes from moderators"
|
||||
)
|
||||
is_published = models.BooleanField(default=True, help_text="Whether this review is publicly visible")
|
||||
moderation_notes = models.TextField(blank=True, help_text="Internal notes from moderators")
|
||||
moderated_by = models.ForeignKey(
|
||||
"accounts.User",
|
||||
on_delete=models.SET_NULL,
|
||||
@@ -54,9 +50,7 @@ class ParkReview(TrackedModel):
|
||||
related_name="moderated_park_reviews",
|
||||
help_text="Moderator who reviewed this",
|
||||
)
|
||||
moderated_at = models.DateTimeField(
|
||||
null=True, blank=True, help_text="When this review was moderated"
|
||||
)
|
||||
moderated_at = models.DateTimeField(null=True, blank=True, help_text="When this review was moderated")
|
||||
|
||||
class Meta(TrackedModel.Meta):
|
||||
verbose_name = "Park Review"
|
||||
@@ -82,10 +76,7 @@ class ParkReview(TrackedModel):
|
||||
name="park_review_moderation_consistency",
|
||||
check=models.Q(moderated_by__isnull=True, moderated_at__isnull=True)
|
||||
| models.Q(moderated_by__isnull=False, moderated_at__isnull=False),
|
||||
violation_error_message=(
|
||||
"Moderated reviews must have both moderator and moderation "
|
||||
"timestamp"
|
||||
),
|
||||
violation_error_message=("Moderated reviews must have both moderator and moderation " "timestamp"),
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
@@ -10,9 +10,7 @@ def get_base_park_queryset() -> QuerySet[Park]:
|
||||
.prefetch_related("photos", "rides")
|
||||
.annotate(
|
||||
current_ride_count=Count("rides", distinct=True),
|
||||
current_coaster_count=Count(
|
||||
"rides", filter=Q(rides__category="RC"), distinct=True
|
||||
),
|
||||
current_coaster_count=Count("rides", filter=Q(rides__category="RC"), distinct=True),
|
||||
)
|
||||
.order_by("name")
|
||||
)
|
||||
|
||||
@@ -47,9 +47,7 @@ def park_list_with_stats(*, filters: dict[str, Any] | None = None) -> QuerySet[P
|
||||
queryset = queryset.filter(location__country=filters["country"])
|
||||
if "search" in filters:
|
||||
search_term = filters["search"]
|
||||
queryset = queryset.filter(
|
||||
Q(name__icontains=search_term) | Q(description__icontains=search_term)
|
||||
)
|
||||
queryset = queryset.filter(Q(name__icontains=search_term) | Q(description__icontains=search_term))
|
||||
|
||||
return queryset.order_by("name")
|
||||
|
||||
@@ -74,15 +72,11 @@ def park_detail_optimized(*, slug: str) -> Park:
|
||||
"areas",
|
||||
Prefetch(
|
||||
"rides",
|
||||
queryset=Ride.objects.select_related(
|
||||
"manufacturer", "designer", "ride_model"
|
||||
),
|
||||
queryset=Ride.objects.select_related("manufacturer", "designer", "ride_model"),
|
||||
),
|
||||
Prefetch(
|
||||
"reviews",
|
||||
queryset=ParkReview.objects.select_related("user").filter(
|
||||
is_published=True
|
||||
),
|
||||
queryset=ParkReview.objects.select_related("user").filter(is_published=True),
|
||||
),
|
||||
"photos",
|
||||
)
|
||||
@@ -90,9 +84,7 @@ def park_detail_optimized(*, slug: str) -> Park:
|
||||
)
|
||||
|
||||
|
||||
def parks_near_location(
|
||||
*, point: Point, distance_km: float = 50, limit: int = 10
|
||||
) -> QuerySet[Park]:
|
||||
def parks_near_location(*, point: Point, distance_km: float = 50, limit: int = 10) -> QuerySet[Park]:
|
||||
"""
|
||||
Get parks near a specific geographic location.
|
||||
|
||||
@@ -176,16 +168,10 @@ def parks_with_recent_reviews(*, days: int = 30) -> QuerySet[Park]:
|
||||
cutoff_date = timezone.now() - timedelta(days=days)
|
||||
|
||||
return (
|
||||
Park.objects.filter(
|
||||
reviews__created_at__gte=cutoff_date, reviews__is_published=True
|
||||
)
|
||||
Park.objects.filter(reviews__created_at__gte=cutoff_date, reviews__is_published=True)
|
||||
.select_related("operator")
|
||||
.prefetch_related("location")
|
||||
.annotate(
|
||||
recent_review_count=Count(
|
||||
"reviews", filter=Q(reviews__created_at__gte=cutoff_date)
|
||||
)
|
||||
)
|
||||
.annotate(recent_review_count=Count("reviews", filter=Q(reviews__created_at__gte=cutoff_date)))
|
||||
.order_by("-recent_review_count")
|
||||
.distinct()
|
||||
)
|
||||
@@ -204,9 +190,7 @@ def park_search_autocomplete(*, query: str, limit: int = 10) -> QuerySet[Park]:
|
||||
"""
|
||||
return (
|
||||
Park.objects.filter(
|
||||
Q(name__icontains=query)
|
||||
| Q(location__city__icontains=query)
|
||||
| Q(location__region__icontains=query)
|
||||
Q(name__icontains=query) | Q(location__city__icontains=query) | Q(location__region__icontains=query)
|
||||
)
|
||||
.select_related("operator")
|
||||
.prefetch_related("location")
|
||||
|
||||
@@ -212,9 +212,9 @@ class ParkService:
|
||||
)
|
||||
|
||||
# Calculate average rating
|
||||
avg_rating = ParkReview.objects.filter(
|
||||
park=park, is_published=True
|
||||
).aggregate(avg_rating=Avg("rating"))["avg_rating"]
|
||||
avg_rating = ParkReview.objects.filter(park=park, is_published=True).aggregate(avg_rating=Avg("rating"))[
|
||||
"avg_rating"
|
||||
]
|
||||
|
||||
# Update park fields
|
||||
park.ride_count = ride_stats["total_rides"] or 0
|
||||
|
||||
@@ -26,9 +26,7 @@ class ParkFilterService:
|
||||
def __init__(self):
|
||||
self.cache_prefix = "park_filter"
|
||||
|
||||
def get_filter_counts(
|
||||
self, base_queryset: QuerySet | None = None
|
||||
) -> dict[str, Any]:
|
||||
def get_filter_counts(self, base_queryset: QuerySet | None = None) -> dict[str, Any]:
|
||||
"""
|
||||
Get counts for various filter options to show users what's available.
|
||||
|
||||
@@ -76,9 +74,7 @@ class ParkFilterService:
|
||||
).count(),
|
||||
}
|
||||
|
||||
def _get_top_operators(
|
||||
self, queryset: QuerySet, limit: int = 10
|
||||
) -> list[dict[str, Any]]:
|
||||
def _get_top_operators(self, queryset: QuerySet, limit: int = 10) -> list[dict[str, Any]]:
|
||||
"""Get the top operators by number of parks."""
|
||||
return list(
|
||||
queryset.values("operator__name", "operator__id")
|
||||
@@ -87,9 +83,7 @@ class ParkFilterService:
|
||||
.order_by("-park_count")[:limit]
|
||||
)
|
||||
|
||||
def _get_country_counts(
|
||||
self, queryset: QuerySet, limit: int = 10
|
||||
) -> list[dict[str, Any]]:
|
||||
def _get_country_counts(self, queryset: QuerySet, limit: int = 10) -> list[dict[str, Any]]:
|
||||
"""Get countries with the most parks."""
|
||||
return list(
|
||||
queryset.filter(location__country__isnull=False)
|
||||
@@ -123,21 +117,18 @@ class ParkFilterService:
|
||||
|
||||
if len(query) >= 2: # Only search for queries of 2+ characters
|
||||
# Park name suggestions
|
||||
park_names = Park.objects.filter(name__icontains=query).values_list(
|
||||
"name", flat=True
|
||||
)[:5]
|
||||
park_names = Park.objects.filter(name__icontains=query).values_list("name", flat=True)[:5]
|
||||
suggestions["parks"] = list(park_names)
|
||||
|
||||
# Operator suggestions
|
||||
operator_names = Company.objects.filter(
|
||||
roles__contains=["OPERATOR"], name__icontains=query
|
||||
).values_list("name", flat=True)[:5]
|
||||
operator_names = Company.objects.filter(roles__contains=["OPERATOR"], name__icontains=query).values_list(
|
||||
"name", flat=True
|
||||
)[:5]
|
||||
suggestions["operators"] = list(operator_names)
|
||||
|
||||
# Location suggestions (cities and countries)
|
||||
locations = Park.objects.filter(
|
||||
Q(location__city__icontains=query)
|
||||
| Q(location__country__icontains=query)
|
||||
Q(location__city__icontains=query) | Q(location__country__icontains=query)
|
||||
).values_list("location__city", "location__country")[:5]
|
||||
|
||||
location_suggestions = []
|
||||
@@ -264,14 +255,10 @@ class ParkFilterService:
|
||||
|
||||
# Apply location filters
|
||||
if filters.get("country_filter"):
|
||||
queryset = queryset.filter(
|
||||
location__country__icontains=filters["country_filter"]
|
||||
)
|
||||
queryset = queryset.filter(location__country__icontains=filters["country_filter"])
|
||||
|
||||
if filters.get("state_filter"):
|
||||
queryset = queryset.filter(
|
||||
location__state__icontains=filters["state_filter"]
|
||||
)
|
||||
queryset = queryset.filter(location__state__icontains=filters["state_filter"])
|
||||
|
||||
# Apply ordering
|
||||
if filters.get("ordering"):
|
||||
|
||||
@@ -21,8 +21,8 @@ class SmartParkLoader:
|
||||
"""
|
||||
|
||||
# Cache configuration
|
||||
CACHE_TIMEOUT = getattr(settings, 'HYBRID_FILTER_CACHE_TIMEOUT', 300) # 5 minutes
|
||||
CACHE_KEY_PREFIX = 'hybrid_parks'
|
||||
CACHE_TIMEOUT = getattr(settings, "HYBRID_FILTER_CACHE_TIMEOUT", 300) # 5 minutes
|
||||
CACHE_KEY_PREFIX = "hybrid_parks"
|
||||
|
||||
# Progressive loading thresholds
|
||||
INITIAL_LOAD_SIZE = 50
|
||||
@@ -34,17 +34,22 @@ class SmartParkLoader:
|
||||
|
||||
def _get_optimized_queryset(self) -> models.QuerySet:
|
||||
"""Get optimized base queryset with all necessary prefetches."""
|
||||
return Park.objects.select_related(
|
||||
'operator',
|
||||
'property_owner',
|
||||
'banner_image',
|
||||
'card_image',
|
||||
).prefetch_related(
|
||||
'location', # ParkLocation relationship
|
||||
).filter(
|
||||
# Only include operating and temporarily closed parks by default
|
||||
status__in=['OPERATING', 'CLOSED_TEMP']
|
||||
).order_by('name')
|
||||
return (
|
||||
Park.objects.select_related(
|
||||
"operator",
|
||||
"property_owner",
|
||||
"banner_image",
|
||||
"card_image",
|
||||
)
|
||||
.prefetch_related(
|
||||
"location", # ParkLocation relationship
|
||||
)
|
||||
.filter(
|
||||
# Only include operating and temporarily closed parks by default
|
||||
status__in=["OPERATING", "CLOSED_TEMP"]
|
||||
)
|
||||
.order_by("name")
|
||||
)
|
||||
|
||||
def get_initial_load(self, filters: dict[str, Any] | None = None) -> dict[str, Any]:
|
||||
"""
|
||||
@@ -56,7 +61,7 @@ class SmartParkLoader:
|
||||
Returns:
|
||||
Dictionary containing parks data and metadata
|
||||
"""
|
||||
cache_key = self._generate_cache_key('initial', filters)
|
||||
cache_key = self._generate_cache_key("initial", filters)
|
||||
cached_result = cache.get(cache_key)
|
||||
|
||||
if cached_result:
|
||||
@@ -74,21 +79,21 @@ class SmartParkLoader:
|
||||
if total_count <= self.MAX_CLIENT_SIDE_RECORDS:
|
||||
# Load all data for client-side filtering
|
||||
parks = list(queryset.all())
|
||||
strategy = 'client_side'
|
||||
strategy = "client_side"
|
||||
has_more = False
|
||||
else:
|
||||
# Load initial batch for server-side pagination
|
||||
parks = list(queryset[:self.INITIAL_LOAD_SIZE])
|
||||
strategy = 'server_side'
|
||||
parks = list(queryset[: self.INITIAL_LOAD_SIZE])
|
||||
strategy = "server_side"
|
||||
has_more = total_count > self.INITIAL_LOAD_SIZE
|
||||
|
||||
result = {
|
||||
'parks': parks,
|
||||
'total_count': total_count,
|
||||
'strategy': strategy,
|
||||
'has_more': has_more,
|
||||
'next_offset': len(parks) if has_more else None,
|
||||
'filter_metadata': self._get_filter_metadata(queryset),
|
||||
"parks": parks,
|
||||
"total_count": total_count,
|
||||
"strategy": strategy,
|
||||
"has_more": has_more,
|
||||
"next_offset": len(parks) if has_more else None,
|
||||
"filter_metadata": self._get_filter_metadata(queryset),
|
||||
}
|
||||
|
||||
# Cache the result
|
||||
@@ -96,11 +101,7 @@ class SmartParkLoader:
|
||||
|
||||
return result
|
||||
|
||||
def get_progressive_load(
|
||||
self,
|
||||
offset: int,
|
||||
filters: dict[str, Any] | None = None
|
||||
) -> dict[str, Any]:
|
||||
def get_progressive_load(self, offset: int, filters: dict[str, Any] | None = None) -> dict[str, Any]:
|
||||
"""
|
||||
Get next batch of parks for progressive loading.
|
||||
|
||||
@@ -111,7 +112,7 @@ class SmartParkLoader:
|
||||
Returns:
|
||||
Dictionary containing parks data and metadata
|
||||
"""
|
||||
cache_key = self._generate_cache_key(f'progressive_{offset}', filters)
|
||||
cache_key = self._generate_cache_key(f"progressive_{offset}", filters)
|
||||
cached_result = cache.get(cache_key)
|
||||
|
||||
if cached_result:
|
||||
@@ -131,10 +132,10 @@ class SmartParkLoader:
|
||||
has_more = end_offset < total_count
|
||||
|
||||
result = {
|
||||
'parks': parks,
|
||||
'total_count': total_count,
|
||||
'has_more': has_more,
|
||||
'next_offset': end_offset if has_more else None,
|
||||
"parks": parks,
|
||||
"total_count": total_count,
|
||||
"has_more": has_more,
|
||||
"next_offset": end_offset if has_more else None,
|
||||
}
|
||||
|
||||
# Cache the result
|
||||
@@ -152,7 +153,7 @@ class SmartParkLoader:
|
||||
Returns:
|
||||
Dictionary containing filter metadata
|
||||
"""
|
||||
cache_key = self._generate_cache_key('metadata', filters)
|
||||
cache_key = self._generate_cache_key("metadata", filters)
|
||||
cached_result = cache.get(cache_key)
|
||||
|
||||
if cached_result:
|
||||
@@ -174,72 +175,72 @@ class SmartParkLoader:
|
||||
"""Apply filters to the queryset."""
|
||||
|
||||
# Status filter
|
||||
if 'status' in filters and filters['status']:
|
||||
if isinstance(filters['status'], list):
|
||||
queryset = queryset.filter(status__in=filters['status'])
|
||||
if "status" in filters and filters["status"]:
|
||||
if isinstance(filters["status"], list):
|
||||
queryset = queryset.filter(status__in=filters["status"])
|
||||
else:
|
||||
queryset = queryset.filter(status=filters['status'])
|
||||
queryset = queryset.filter(status=filters["status"])
|
||||
|
||||
# Park type filter
|
||||
if 'park_type' in filters and filters['park_type']:
|
||||
if isinstance(filters['park_type'], list):
|
||||
queryset = queryset.filter(park_type__in=filters['park_type'])
|
||||
if "park_type" in filters and filters["park_type"]:
|
||||
if isinstance(filters["park_type"], list):
|
||||
queryset = queryset.filter(park_type__in=filters["park_type"])
|
||||
else:
|
||||
queryset = queryset.filter(park_type=filters['park_type'])
|
||||
queryset = queryset.filter(park_type=filters["park_type"])
|
||||
|
||||
# Country filter
|
||||
if 'country' in filters and filters['country']:
|
||||
queryset = queryset.filter(location__country__in=filters['country'])
|
||||
if "country" in filters and filters["country"]:
|
||||
queryset = queryset.filter(location__country__in=filters["country"])
|
||||
|
||||
# State filter
|
||||
if 'state' in filters and filters['state']:
|
||||
queryset = queryset.filter(location__state__in=filters['state'])
|
||||
if "state" in filters and filters["state"]:
|
||||
queryset = queryset.filter(location__state__in=filters["state"])
|
||||
|
||||
# Opening year range
|
||||
if 'opening_year_min' in filters and filters['opening_year_min']:
|
||||
queryset = queryset.filter(opening_year__gte=filters['opening_year_min'])
|
||||
if "opening_year_min" in filters and filters["opening_year_min"]:
|
||||
queryset = queryset.filter(opening_year__gte=filters["opening_year_min"])
|
||||
|
||||
if 'opening_year_max' in filters and filters['opening_year_max']:
|
||||
queryset = queryset.filter(opening_year__lte=filters['opening_year_max'])
|
||||
if "opening_year_max" in filters and filters["opening_year_max"]:
|
||||
queryset = queryset.filter(opening_year__lte=filters["opening_year_max"])
|
||||
|
||||
# Size range
|
||||
if 'size_min' in filters and filters['size_min']:
|
||||
queryset = queryset.filter(size_acres__gte=filters['size_min'])
|
||||
if "size_min" in filters and filters["size_min"]:
|
||||
queryset = queryset.filter(size_acres__gte=filters["size_min"])
|
||||
|
||||
if 'size_max' in filters and filters['size_max']:
|
||||
queryset = queryset.filter(size_acres__lte=filters['size_max'])
|
||||
if "size_max" in filters and filters["size_max"]:
|
||||
queryset = queryset.filter(size_acres__lte=filters["size_max"])
|
||||
|
||||
# Rating range
|
||||
if 'rating_min' in filters and filters['rating_min']:
|
||||
queryset = queryset.filter(average_rating__gte=filters['rating_min'])
|
||||
if "rating_min" in filters and filters["rating_min"]:
|
||||
queryset = queryset.filter(average_rating__gte=filters["rating_min"])
|
||||
|
||||
if 'rating_max' in filters and filters['rating_max']:
|
||||
queryset = queryset.filter(average_rating__lte=filters['rating_max'])
|
||||
if "rating_max" in filters and filters["rating_max"]:
|
||||
queryset = queryset.filter(average_rating__lte=filters["rating_max"])
|
||||
|
||||
# Ride count range
|
||||
if 'ride_count_min' in filters and filters['ride_count_min']:
|
||||
queryset = queryset.filter(ride_count__gte=filters['ride_count_min'])
|
||||
if "ride_count_min" in filters and filters["ride_count_min"]:
|
||||
queryset = queryset.filter(ride_count__gte=filters["ride_count_min"])
|
||||
|
||||
if 'ride_count_max' in filters and filters['ride_count_max']:
|
||||
queryset = queryset.filter(ride_count__lte=filters['ride_count_max'])
|
||||
if "ride_count_max" in filters and filters["ride_count_max"]:
|
||||
queryset = queryset.filter(ride_count__lte=filters["ride_count_max"])
|
||||
|
||||
# Coaster count range
|
||||
if 'coaster_count_min' in filters and filters['coaster_count_min']:
|
||||
queryset = queryset.filter(coaster_count__gte=filters['coaster_count_min'])
|
||||
if "coaster_count_min" in filters and filters["coaster_count_min"]:
|
||||
queryset = queryset.filter(coaster_count__gte=filters["coaster_count_min"])
|
||||
|
||||
if 'coaster_count_max' in filters and filters['coaster_count_max']:
|
||||
queryset = queryset.filter(coaster_count__lte=filters['coaster_count_max'])
|
||||
if "coaster_count_max" in filters and filters["coaster_count_max"]:
|
||||
queryset = queryset.filter(coaster_count__lte=filters["coaster_count_max"])
|
||||
|
||||
# Operator filter
|
||||
if 'operator' in filters and filters['operator']:
|
||||
if isinstance(filters['operator'], list):
|
||||
queryset = queryset.filter(operator__slug__in=filters['operator'])
|
||||
if "operator" in filters and filters["operator"]:
|
||||
if isinstance(filters["operator"], list):
|
||||
queryset = queryset.filter(operator__slug__in=filters["operator"])
|
||||
else:
|
||||
queryset = queryset.filter(operator__slug=filters['operator'])
|
||||
queryset = queryset.filter(operator__slug=filters["operator"])
|
||||
|
||||
# Search query
|
||||
if 'search' in filters and filters['search']:
|
||||
search_term = filters['search'].lower()
|
||||
if "search" in filters and filters["search"]:
|
||||
search_term = filters["search"].lower()
|
||||
queryset = queryset.filter(search_text__icontains=search_term)
|
||||
|
||||
return queryset
|
||||
@@ -249,150 +250,125 @@ class SmartParkLoader:
|
||||
|
||||
# Get distinct values for categorical filters with counts
|
||||
countries_data = list(
|
||||
queryset.values('location__country')
|
||||
queryset.values("location__country")
|
||||
.exclude(location__country__isnull=True)
|
||||
.annotate(count=models.Count('id'))
|
||||
.order_by('location__country')
|
||||
.annotate(count=models.Count("id"))
|
||||
.order_by("location__country")
|
||||
)
|
||||
|
||||
states_data = list(
|
||||
queryset.values('location__state')
|
||||
queryset.values("location__state")
|
||||
.exclude(location__state__isnull=True)
|
||||
.annotate(count=models.Count('id'))
|
||||
.order_by('location__state')
|
||||
.annotate(count=models.Count("id"))
|
||||
.order_by("location__state")
|
||||
)
|
||||
|
||||
park_types_data = list(
|
||||
queryset.values('park_type')
|
||||
queryset.values("park_type")
|
||||
.exclude(park_type__isnull=True)
|
||||
.annotate(count=models.Count('id'))
|
||||
.order_by('park_type')
|
||||
.annotate(count=models.Count("id"))
|
||||
.order_by("park_type")
|
||||
)
|
||||
|
||||
statuses_data = list(
|
||||
queryset.values('status')
|
||||
.annotate(count=models.Count('id'))
|
||||
.order_by('status')
|
||||
)
|
||||
statuses_data = list(queryset.values("status").annotate(count=models.Count("id")).order_by("status"))
|
||||
|
||||
operators_data = list(
|
||||
queryset.select_related('operator')
|
||||
.values('operator__id', 'operator__name', 'operator__slug')
|
||||
queryset.select_related("operator")
|
||||
.values("operator__id", "operator__name", "operator__slug")
|
||||
.exclude(operator__isnull=True)
|
||||
.annotate(count=models.Count('id'))
|
||||
.order_by('operator__name')
|
||||
.annotate(count=models.Count("id"))
|
||||
.order_by("operator__name")
|
||||
)
|
||||
|
||||
# Convert to frontend-expected format with value/label/count
|
||||
countries = [
|
||||
{
|
||||
'value': item['location__country'],
|
||||
'label': item['location__country'],
|
||||
'count': item['count']
|
||||
}
|
||||
{"value": item["location__country"], "label": item["location__country"], "count": item["count"]}
|
||||
for item in countries_data
|
||||
]
|
||||
|
||||
states = [
|
||||
{
|
||||
'value': item['location__state'],
|
||||
'label': item['location__state'],
|
||||
'count': item['count']
|
||||
}
|
||||
{"value": item["location__state"], "label": item["location__state"], "count": item["count"]}
|
||||
for item in states_data
|
||||
]
|
||||
|
||||
park_types = [
|
||||
{
|
||||
'value': item['park_type'],
|
||||
'label': item['park_type'],
|
||||
'count': item['count']
|
||||
}
|
||||
for item in park_types_data
|
||||
{"value": item["park_type"], "label": item["park_type"], "count": item["count"]} for item in park_types_data
|
||||
]
|
||||
|
||||
statuses = [
|
||||
{
|
||||
'value': item['status'],
|
||||
'label': self._get_status_label(item['status']),
|
||||
'count': item['count']
|
||||
}
|
||||
{"value": item["status"], "label": self._get_status_label(item["status"]), "count": item["count"]}
|
||||
for item in statuses_data
|
||||
]
|
||||
|
||||
operators = [
|
||||
{
|
||||
'value': item['operator__slug'],
|
||||
'label': item['operator__name'],
|
||||
'count': item['count']
|
||||
}
|
||||
{"value": item["operator__slug"], "label": item["operator__name"], "count": item["count"]}
|
||||
for item in operators_data
|
||||
]
|
||||
|
||||
# Get ranges for numerical filters
|
||||
aggregates = queryset.aggregate(
|
||||
opening_year_min=models.Min('opening_year'),
|
||||
opening_year_max=models.Max('opening_year'),
|
||||
size_min=models.Min('size_acres'),
|
||||
size_max=models.Max('size_acres'),
|
||||
rating_min=models.Min('average_rating'),
|
||||
rating_max=models.Max('average_rating'),
|
||||
ride_count_min=models.Min('ride_count'),
|
||||
ride_count_max=models.Max('ride_count'),
|
||||
coaster_count_min=models.Min('coaster_count'),
|
||||
coaster_count_max=models.Max('coaster_count'),
|
||||
opening_year_min=models.Min("opening_year"),
|
||||
opening_year_max=models.Max("opening_year"),
|
||||
size_min=models.Min("size_acres"),
|
||||
size_max=models.Max("size_acres"),
|
||||
rating_min=models.Min("average_rating"),
|
||||
rating_max=models.Max("average_rating"),
|
||||
ride_count_min=models.Min("ride_count"),
|
||||
ride_count_max=models.Max("ride_count"),
|
||||
coaster_count_min=models.Min("coaster_count"),
|
||||
coaster_count_max=models.Max("coaster_count"),
|
||||
)
|
||||
|
||||
return {
|
||||
'categorical': {
|
||||
'countries': countries,
|
||||
'states': states,
|
||||
'park_types': park_types,
|
||||
'statuses': statuses,
|
||||
'operators': operators,
|
||||
"categorical": {
|
||||
"countries": countries,
|
||||
"states": states,
|
||||
"park_types": park_types,
|
||||
"statuses": statuses,
|
||||
"operators": operators,
|
||||
},
|
||||
'ranges': {
|
||||
'opening_year': {
|
||||
'min': aggregates['opening_year_min'],
|
||||
'max': aggregates['opening_year_max'],
|
||||
'step': 1,
|
||||
'unit': 'year'
|
||||
"ranges": {
|
||||
"opening_year": {
|
||||
"min": aggregates["opening_year_min"],
|
||||
"max": aggregates["opening_year_max"],
|
||||
"step": 1,
|
||||
"unit": "year",
|
||||
},
|
||||
'size_acres': {
|
||||
'min': float(aggregates['size_min']) if aggregates['size_min'] else None,
|
||||
'max': float(aggregates['size_max']) if aggregates['size_max'] else None,
|
||||
'step': 1.0,
|
||||
'unit': 'acres'
|
||||
"size_acres": {
|
||||
"min": float(aggregates["size_min"]) if aggregates["size_min"] else None,
|
||||
"max": float(aggregates["size_max"]) if aggregates["size_max"] else None,
|
||||
"step": 1.0,
|
||||
"unit": "acres",
|
||||
},
|
||||
'average_rating': {
|
||||
'min': float(aggregates['rating_min']) if aggregates['rating_min'] else None,
|
||||
'max': float(aggregates['rating_max']) if aggregates['rating_max'] else None,
|
||||
'step': 0.1,
|
||||
'unit': 'stars'
|
||||
"average_rating": {
|
||||
"min": float(aggregates["rating_min"]) if aggregates["rating_min"] else None,
|
||||
"max": float(aggregates["rating_max"]) if aggregates["rating_max"] else None,
|
||||
"step": 0.1,
|
||||
"unit": "stars",
|
||||
},
|
||||
'ride_count': {
|
||||
'min': aggregates['ride_count_min'],
|
||||
'max': aggregates['ride_count_max'],
|
||||
'step': 1,
|
||||
'unit': 'rides'
|
||||
"ride_count": {
|
||||
"min": aggregates["ride_count_min"],
|
||||
"max": aggregates["ride_count_max"],
|
||||
"step": 1,
|
||||
"unit": "rides",
|
||||
},
|
||||
'coaster_count': {
|
||||
'min': aggregates['coaster_count_min'],
|
||||
'max': aggregates['coaster_count_max'],
|
||||
'step': 1,
|
||||
'unit': 'coasters'
|
||||
"coaster_count": {
|
||||
"min": aggregates["coaster_count_min"],
|
||||
"max": aggregates["coaster_count_max"],
|
||||
"step": 1,
|
||||
"unit": "coasters",
|
||||
},
|
||||
},
|
||||
'total_count': queryset.count(),
|
||||
"total_count": queryset.count(),
|
||||
}
|
||||
|
||||
def _get_status_label(self, status: str) -> str:
|
||||
"""Convert status code to human-readable label."""
|
||||
status_labels = {
|
||||
'OPERATING': 'Operating',
|
||||
'CLOSED_TEMP': 'Temporarily Closed',
|
||||
'CLOSED_PERM': 'Permanently Closed',
|
||||
'UNDER_CONSTRUCTION': 'Under Construction',
|
||||
"OPERATING": "Operating",
|
||||
"CLOSED_TEMP": "Temporarily Closed",
|
||||
"CLOSED_PERM": "Permanently Closed",
|
||||
"UNDER_CONSTRUCTION": "Under Construction",
|
||||
}
|
||||
if status in status_labels:
|
||||
return status_labels[status]
|
||||
@@ -405,23 +381,23 @@ class SmartParkLoader:
|
||||
|
||||
if filters:
|
||||
# Create a consistent string representation of filters
|
||||
filter_str = '_'.join(f"{k}:{v}" for k, v in sorted(filters.items()) if v)
|
||||
filter_str = "_".join(f"{k}:{v}" for k, v in sorted(filters.items()) if v)
|
||||
key_parts.append(filter_str)
|
||||
|
||||
return '_'.join(key_parts)
|
||||
return "_".join(key_parts)
|
||||
|
||||
def invalidate_cache(self, filters: dict[str, Any] | None = None) -> None:
|
||||
"""Invalidate cached data for the given filters."""
|
||||
# This is a simplified implementation
|
||||
# In production, you might want to use cache versioning or tags
|
||||
cache_keys = [
|
||||
self._generate_cache_key('initial', filters),
|
||||
self._generate_cache_key('metadata', filters),
|
||||
self._generate_cache_key("initial", filters),
|
||||
self._generate_cache_key("metadata", filters),
|
||||
]
|
||||
|
||||
# Also invalidate progressive load caches
|
||||
for offset in range(0, 1000, self.PROGRESSIVE_LOAD_SIZE):
|
||||
cache_keys.append(self._generate_cache_key(f'progressive_{offset}', filters))
|
||||
cache_keys.append(self._generate_cache_key(f"progressive_{offset}", filters))
|
||||
|
||||
cache.delete_many(cache_keys)
|
||||
|
||||
|
||||
@@ -245,9 +245,7 @@ class ParkLocationService:
|
||||
return park_location
|
||||
|
||||
@classmethod
|
||||
def update_park_location(
|
||||
cls, park_location: ParkLocation, **updates
|
||||
) -> ParkLocation:
|
||||
def update_park_location(cls, park_location: ParkLocation, **updates) -> ParkLocation:
|
||||
"""
|
||||
Update park location with validation.
|
||||
|
||||
@@ -278,9 +276,7 @@ class ParkLocationService:
|
||||
return park_location
|
||||
|
||||
@classmethod
|
||||
def find_nearby_parks(
|
||||
cls, latitude: float, longitude: float, radius_km: float = 50
|
||||
) -> list[ParkLocation]:
|
||||
def find_nearby_parks(cls, latitude: float, longitude: float, radius_km: float = 50) -> list[ParkLocation]:
|
||||
"""
|
||||
Find parks near given coordinates using PostGIS.
|
||||
|
||||
@@ -298,9 +294,7 @@ class ParkLocationService:
|
||||
center_point = Point(longitude, latitude, srid=4326)
|
||||
|
||||
return list(
|
||||
ParkLocation.objects.filter(
|
||||
point__distance_lte=(center_point, Distance(km=radius_km))
|
||||
)
|
||||
ParkLocation.objects.filter(point__distance_lte=(center_point, Distance(km=radius_km)))
|
||||
.select_related("park", "park__operator")
|
||||
.order_by("point__distance")
|
||||
)
|
||||
@@ -349,9 +343,7 @@ class ParkLocationService:
|
||||
return park_location
|
||||
|
||||
@classmethod
|
||||
def _transform_osm_result(
|
||||
cls, osm_item: dict[str, Any]
|
||||
) -> dict[str, Any] | None:
|
||||
def _transform_osm_result(cls, osm_item: dict[str, Any]) -> dict[str, Any] | None:
|
||||
"""Transform OSM search result to our standard format."""
|
||||
try:
|
||||
address = osm_item.get("address", {})
|
||||
@@ -369,12 +361,7 @@ class ParkLocationService:
|
||||
or ""
|
||||
)
|
||||
|
||||
state = (
|
||||
address.get("state")
|
||||
or address.get("province")
|
||||
or address.get("region")
|
||||
or ""
|
||||
)
|
||||
state = address.get("state") or address.get("province") or address.get("region") or ""
|
||||
|
||||
country = address.get("country", "")
|
||||
postal_code = address.get("postcode", "")
|
||||
@@ -432,9 +419,7 @@ class ParkLocationService:
|
||||
return None
|
||||
|
||||
@classmethod
|
||||
def _transform_osm_reverse_result(
|
||||
cls, osm_result: dict[str, Any]
|
||||
) -> dict[str, Any]:
|
||||
def _transform_osm_reverse_result(cls, osm_result: dict[str, Any]) -> dict[str, Any]:
|
||||
"""Transform OSM reverse geocoding result to our standard format."""
|
||||
address = osm_result.get("address", {})
|
||||
|
||||
@@ -443,20 +428,9 @@ class ParkLocationService:
|
||||
street_name = address.get("road", "")
|
||||
street_address = f"{street_number} {street_name}".strip()
|
||||
|
||||
city = (
|
||||
address.get("city")
|
||||
or address.get("town")
|
||||
or address.get("village")
|
||||
or address.get("municipality")
|
||||
or ""
|
||||
)
|
||||
city = address.get("city") or address.get("town") or address.get("village") or address.get("municipality") or ""
|
||||
|
||||
state = (
|
||||
address.get("state")
|
||||
or address.get("province")
|
||||
or address.get("region")
|
||||
or ""
|
||||
)
|
||||
state = address.get("state") or address.get("province") or address.get("region") or ""
|
||||
|
||||
country = address.get("country", "")
|
||||
postal_code = address.get("postcode", "")
|
||||
|
||||
@@ -79,9 +79,7 @@ class ParkMediaService:
|
||||
return photo
|
||||
|
||||
@staticmethod
|
||||
def get_park_photos(
|
||||
park: Park, approved_only: bool = True, primary_first: bool = True
|
||||
) -> list[ParkPhoto]:
|
||||
def get_park_photos(park: Park, approved_only: bool = True, primary_first: bool = True) -> list[ParkPhoto]:
|
||||
"""
|
||||
Get photos for a park.
|
||||
|
||||
@@ -190,9 +188,7 @@ class ParkMediaService:
|
||||
photo.image.delete(save=False)
|
||||
photo.delete()
|
||||
|
||||
logger.info(
|
||||
f"Photo {photo_id} deleted from park {park_slug} by user {deleted_by.username}"
|
||||
)
|
||||
logger.info(f"Photo {photo_id} deleted from park {park_slug} by user {deleted_by.username}")
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to delete photo {photo.pk}: {str(e)}")
|
||||
@@ -238,7 +234,5 @@ class ParkMediaService:
|
||||
if ParkMediaService.approve_photo(photo, approved_by):
|
||||
approved_count += 1
|
||||
|
||||
logger.info(
|
||||
f"Bulk approved {approved_count} photos by user {approved_by.username}"
|
||||
)
|
||||
logger.info(f"Bulk approved {approved_count} photos by user {approved_by.username}")
|
||||
return approved_count
|
||||
|
||||
@@ -133,9 +133,7 @@ class ParkService:
|
||||
return park
|
||||
|
||||
@staticmethod
|
||||
def delete_park(
|
||||
*, park_id: int, deleted_by: Optional["AbstractUser"] = None
|
||||
) -> bool:
|
||||
def delete_park(*, park_id: int, deleted_by: Optional["AbstractUser"] = None) -> bool:
|
||||
"""
|
||||
Soft delete a park by setting status to DEMOLISHED.
|
||||
|
||||
@@ -219,9 +217,9 @@ class ParkService:
|
||||
)
|
||||
|
||||
# Calculate average rating
|
||||
avg_rating = ParkReview.objects.filter(
|
||||
park=park, is_published=True
|
||||
).aggregate(avg_rating=Avg("rating"))["avg_rating"]
|
||||
avg_rating = ParkReview.objects.filter(park=park, is_published=True).aggregate(avg_rating=Avg("rating"))[
|
||||
"avg_rating"
|
||||
]
|
||||
|
||||
# Update park fields
|
||||
park.ride_count = ride_stats["total_rides"] or 0
|
||||
|
||||
@@ -148,12 +148,8 @@ class RoadTripService:
|
||||
|
||||
# Configuration from Django settings
|
||||
self.cache_timeout = getattr(settings, "ROADTRIP_CACHE_TIMEOUT", 3600 * 24)
|
||||
self.route_cache_timeout = getattr(
|
||||
settings, "ROADTRIP_ROUTE_CACHE_TIMEOUT", 3600 * 6
|
||||
)
|
||||
self.user_agent = getattr(
|
||||
settings, "ROADTRIP_USER_AGENT", "ThrillWiki Road Trip Planner"
|
||||
)
|
||||
self.route_cache_timeout = getattr(settings, "ROADTRIP_ROUTE_CACHE_TIMEOUT", 3600 * 6)
|
||||
self.user_agent = getattr(settings, "ROADTRIP_USER_AGENT", "ThrillWiki Road Trip Planner")
|
||||
self.request_timeout = getattr(settings, "ROADTRIP_REQUEST_TIMEOUT", 10)
|
||||
self.max_retries = getattr(settings, "ROADTRIP_MAX_RETRIES", 3)
|
||||
self.backoff_factor = getattr(settings, "ROADTRIP_BACKOFF_FACTOR", 2)
|
||||
@@ -179,9 +175,7 @@ class RoadTripService:
|
||||
|
||||
for attempt in range(self.max_retries):
|
||||
try:
|
||||
response = self.session.get(
|
||||
url, params=params, timeout=self.request_timeout
|
||||
)
|
||||
response = self.session.get(url, params=params, timeout=self.request_timeout)
|
||||
response.raise_for_status()
|
||||
return response.json()
|
||||
|
||||
@@ -192,9 +186,7 @@ class RoadTripService:
|
||||
wait_time = self.backoff_factor**attempt
|
||||
time.sleep(wait_time)
|
||||
else:
|
||||
raise OSMAPIException(
|
||||
f"Failed to make request after {self.max_retries} attempts: {e}"
|
||||
)
|
||||
raise OSMAPIException(f"Failed to make request after {self.max_retries} attempts: {e}") from e
|
||||
|
||||
def geocode_address(self, address: str) -> Coordinates | None:
|
||||
"""
|
||||
@@ -243,9 +235,7 @@ class RoadTripService:
|
||||
self.cache_timeout,
|
||||
)
|
||||
|
||||
logger.info(
|
||||
f"Geocoded '{address}' to {coords.latitude}, {coords.longitude}"
|
||||
)
|
||||
logger.info(f"Geocoded '{address}' to {coords.latitude}, {coords.longitude}")
|
||||
return coords
|
||||
else:
|
||||
logger.warning(f"No geocoding results for address: {address}")
|
||||
@@ -255,9 +245,7 @@ class RoadTripService:
|
||||
logger.error(f"Geocoding failed for '{address}': {e}")
|
||||
return None
|
||||
|
||||
def calculate_route(
|
||||
self, start_coords: Coordinates, end_coords: Coordinates
|
||||
) -> RouteInfo | None:
|
||||
def calculate_route(self, start_coords: Coordinates, end_coords: Coordinates) -> RouteInfo | None:
|
||||
"""
|
||||
Calculate route between two coordinate points using OSRM.
|
||||
|
||||
@@ -327,9 +315,7 @@ class RoadTripService:
|
||||
return route_info
|
||||
else:
|
||||
# Fallback to straight-line distance calculation
|
||||
logger.warning(
|
||||
"OSRM routing failed, falling back to straight-line distance"
|
||||
)
|
||||
logger.warning("OSRM routing failed, falling back to straight-line distance")
|
||||
return self._calculate_straight_line_route(start_coords, end_coords)
|
||||
|
||||
except Exception as e:
|
||||
@@ -337,9 +323,7 @@ class RoadTripService:
|
||||
# Fallback to straight-line distance
|
||||
return self._calculate_straight_line_route(start_coords, end_coords)
|
||||
|
||||
def _calculate_straight_line_route(
|
||||
self, start_coords: Coordinates, end_coords: Coordinates
|
||||
) -> RouteInfo:
|
||||
def _calculate_straight_line_route(self, start_coords: Coordinates, end_coords: Coordinates) -> RouteInfo:
|
||||
"""
|
||||
Calculate straight-line distance as fallback when routing fails.
|
||||
"""
|
||||
@@ -356,10 +340,7 @@ class RoadTripService:
|
||||
dlat = lat2 - lat1
|
||||
dlon = lon2 - lon1
|
||||
|
||||
a = (
|
||||
math.sin(dlat / 2) ** 2
|
||||
+ math.cos(lat1) * math.cos(lat2) * math.sin(dlon / 2) ** 2
|
||||
)
|
||||
a = math.sin(dlat / 2) ** 2 + math.cos(lat1) * math.cos(lat2) * math.sin(dlon / 2) ** 2
|
||||
c = 2 * math.asin(math.sqrt(a))
|
||||
|
||||
# Earth's radius in kilometers
|
||||
@@ -376,9 +357,7 @@ class RoadTripService:
|
||||
geometry=None,
|
||||
)
|
||||
|
||||
def find_parks_along_route(
|
||||
self, start_park: "Park", end_park: "Park", max_detour_km: float = 50
|
||||
) -> list["Park"]:
|
||||
def find_parks_along_route(self, start_park: "Park", end_park: "Park", max_detour_km: float = 50) -> list["Park"]:
|
||||
"""
|
||||
Find parks along a route within specified detour distance.
|
||||
|
||||
@@ -443,9 +422,7 @@ class RoadTripService:
|
||||
|
||||
return parks_along_route
|
||||
|
||||
def _calculate_detour_distance(
|
||||
self, start: Coordinates, end: Coordinates, waypoint: Coordinates
|
||||
) -> float | None:
|
||||
def _calculate_detour_distance(self, start: Coordinates, end: Coordinates, waypoint: Coordinates) -> float | None:
|
||||
"""
|
||||
Calculate the detour distance when visiting a waypoint.
|
||||
"""
|
||||
@@ -508,9 +485,7 @@ class RoadTripService:
|
||||
|
||||
return best_trip
|
||||
|
||||
def _optimize_trip_nearest_neighbor(
|
||||
self, park_list: list["Park"]
|
||||
) -> RoadTrip | None:
|
||||
def _optimize_trip_nearest_neighbor(self, park_list: list["Park"]) -> RoadTrip | None:
|
||||
"""
|
||||
Optimize trip using nearest neighbor heuristic (for larger lists).
|
||||
"""
|
||||
@@ -536,9 +511,7 @@ class RoadTripService:
|
||||
if not park_coords:
|
||||
continue
|
||||
|
||||
route = self.calculate_route(
|
||||
Coordinates(*current_coords), Coordinates(*park_coords)
|
||||
)
|
||||
route = self.calculate_route(Coordinates(*current_coords), Coordinates(*park_coords))
|
||||
|
||||
if route and route.distance_km < min_distance:
|
||||
min_distance = route.distance_km
|
||||
@@ -553,9 +526,7 @@ class RoadTripService:
|
||||
|
||||
return self._create_trip_from_order(ordered_parks)
|
||||
|
||||
def _create_trip_from_order(
|
||||
self, ordered_parks: list["Park"]
|
||||
) -> RoadTrip | None:
|
||||
def _create_trip_from_order(self, ordered_parks: list["Park"]) -> RoadTrip | None:
|
||||
"""
|
||||
Create a RoadTrip object from an ordered list of parks.
|
||||
"""
|
||||
@@ -576,9 +547,7 @@ class RoadTripService:
|
||||
if not from_coords or not to_coords:
|
||||
continue
|
||||
|
||||
route = self.calculate_route(
|
||||
Coordinates(*from_coords), Coordinates(*to_coords)
|
||||
)
|
||||
route = self.calculate_route(Coordinates(*from_coords), Coordinates(*to_coords))
|
||||
|
||||
if route:
|
||||
legs.append(TripLeg(from_park=from_park, to_park=to_park, route=route))
|
||||
@@ -595,9 +564,7 @@ class RoadTripService:
|
||||
total_duration_minutes=total_duration,
|
||||
)
|
||||
|
||||
def get_park_distances(
|
||||
self, center_park: "Park", radius_km: float = 100
|
||||
) -> list[dict[str, Any]]:
|
||||
def get_park_distances(self, center_park: "Park", radius_km: float = 100) -> list[dict[str, Any]]:
|
||||
"""
|
||||
Get all parks within radius of a center park with distances.
|
||||
|
||||
@@ -621,9 +588,7 @@ class RoadTripService:
|
||||
search_distance = Distance(km=radius_km)
|
||||
|
||||
nearby_parks = (
|
||||
Park.objects.filter(
|
||||
location__point__distance_lte=(center_point, search_distance)
|
||||
)
|
||||
Park.objects.filter(location__point__distance_lte=(center_point, search_distance))
|
||||
.exclude(id=center_park.id)
|
||||
.select_related("location")
|
||||
)
|
||||
@@ -635,9 +600,7 @@ class RoadTripService:
|
||||
if not park_coords:
|
||||
continue
|
||||
|
||||
route = self.calculate_route(
|
||||
Coordinates(*center_coords), Coordinates(*park_coords)
|
||||
)
|
||||
route = self.calculate_route(Coordinates(*center_coords), Coordinates(*park_coords))
|
||||
|
||||
if route:
|
||||
results.append(
|
||||
@@ -691,9 +654,7 @@ class RoadTripService:
|
||||
if coords:
|
||||
location.set_coordinates(coords.latitude, coords.longitude)
|
||||
location.save()
|
||||
logger.info(
|
||||
f"Geocoded park '{park.name}' to {coords.latitude}, {coords.longitude}"
|
||||
)
|
||||
logger.info(f"Geocoded park '{park.name}' to {coords.latitude}, {coords.longitude}")
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
@@ -15,6 +15,7 @@ logger = logging.getLogger(__name__)
|
||||
# Computed Field Maintenance Signals
|
||||
# =============================================================================
|
||||
|
||||
|
||||
def update_park_search_text(park):
|
||||
"""
|
||||
Update park's search_text computed field.
|
||||
@@ -27,17 +28,17 @@ def update_park_search_text(park):
|
||||
|
||||
try:
|
||||
park._populate_computed_fields()
|
||||
park.save(update_fields=['search_text'])
|
||||
park.save(update_fields=["search_text"])
|
||||
logger.debug(f"Updated search_text for park {park.pk}")
|
||||
except Exception as e:
|
||||
logger.exception(f"Failed to update search_text for park {park.pk}: {e}")
|
||||
|
||||
|
||||
# Status values that count as "active" rides for counting purposes
|
||||
ACTIVE_STATUSES = {'OPERATING', 'SEASONAL', 'UNDER_CONSTRUCTION'}
|
||||
ACTIVE_STATUSES = {"OPERATING", "SEASONAL", "UNDER_CONSTRUCTION"}
|
||||
|
||||
# Status values that should decrement ride counts
|
||||
INACTIVE_STATUSES = {'CLOSED_PERM', 'DEMOLISHED', 'RELOCATED', 'REMOVED'}
|
||||
INACTIVE_STATUSES = {"CLOSED_PERM", "DEMOLISHED", "RELOCATED", "REMOVED"}
|
||||
|
||||
|
||||
def update_park_ride_counts(park, old_status=None, new_status=None):
|
||||
@@ -54,11 +55,11 @@ def update_park_ride_counts(park, old_status=None, new_status=None):
|
||||
return
|
||||
|
||||
# Get park ID
|
||||
park_id = park.pk if hasattr(park, 'pk') else park
|
||||
park_id = park.pk if hasattr(park, "pk") else park
|
||||
|
||||
try:
|
||||
# Fetch the park if we only have an ID
|
||||
if not hasattr(park, 'rides'):
|
||||
if not hasattr(park, "rides"):
|
||||
park = Park.objects.get(id=park_id)
|
||||
|
||||
# Build the query for active rides
|
||||
@@ -72,14 +73,9 @@ def update_park_ride_counts(park, old_status=None, new_status=None):
|
||||
coaster_count = park.rides.filter(operating_rides, category__in=["RC", "WC"]).count()
|
||||
|
||||
# Update park counts
|
||||
Park.objects.filter(id=park_id).update(
|
||||
ride_count=ride_count, coaster_count=coaster_count
|
||||
)
|
||||
Park.objects.filter(id=park_id).update(ride_count=ride_count, coaster_count=coaster_count)
|
||||
|
||||
logger.debug(
|
||||
f"Updated park {park_id} counts: "
|
||||
f"ride_count={ride_count}, coaster_count={coaster_count}"
|
||||
)
|
||||
logger.debug(f"Updated park {park_id} counts: " f"ride_count={ride_count}, coaster_count={coaster_count}")
|
||||
|
||||
except Park.DoesNotExist:
|
||||
logger.warning(f"Park {park_id} does not exist, cannot update counts")
|
||||
@@ -124,14 +120,12 @@ def ride_saved(sender, instance, created, **kwargs):
|
||||
return
|
||||
|
||||
# Check if status changed using model's tracker if available
|
||||
if hasattr(instance, 'tracker') and hasattr(instance.tracker, 'has_changed'):
|
||||
if instance.tracker.has_changed('status'):
|
||||
old_status = instance.tracker.previous('status')
|
||||
if hasattr(instance, "tracker") and hasattr(instance.tracker, "has_changed"):
|
||||
if instance.tracker.has_changed("status"):
|
||||
old_status = instance.tracker.previous("status")
|
||||
new_status = instance.status
|
||||
if should_update_counts(old_status, new_status):
|
||||
logger.info(
|
||||
f"Ride {instance.pk} status changed: {old_status} → {new_status}"
|
||||
)
|
||||
logger.info(f"Ride {instance.pk} status changed: {old_status} → {new_status}")
|
||||
update_park_ride_counts(instance.park, old_status, new_status)
|
||||
else:
|
||||
# Fallback: always update counts on save
|
||||
@@ -151,6 +145,7 @@ def ride_deleted(sender, instance, **kwargs):
|
||||
|
||||
# FSM transition signal handlers
|
||||
|
||||
|
||||
def handle_ride_status_transition(instance, source, target, user, **kwargs):
|
||||
"""
|
||||
Handle ride status FSM transitions.
|
||||
@@ -165,10 +160,7 @@ def handle_ride_status_transition(instance, source, target, user, **kwargs):
|
||||
user: The user who initiated the transition.
|
||||
"""
|
||||
if should_update_counts(source, target):
|
||||
logger.info(
|
||||
f"FSM transition: Ride {instance.pk} {source} → {target} "
|
||||
f"by {user if user else 'system'}"
|
||||
)
|
||||
logger.info(f"FSM transition: Ride {instance.pk} {source} → {target} " f"by {user if user else 'system'}")
|
||||
update_park_ride_counts(instance.park, source, target)
|
||||
|
||||
|
||||
@@ -176,7 +168,8 @@ def handle_ride_status_transition(instance, source, target, user, **kwargs):
|
||||
# Computed Field Maintenance Signal Handlers
|
||||
# =============================================================================
|
||||
|
||||
@receiver(post_save, sender='parks.ParkLocation')
|
||||
|
||||
@receiver(post_save, sender="parks.ParkLocation")
|
||||
def update_park_search_text_on_location_change(sender, instance, **kwargs):
|
||||
"""
|
||||
Update park search_text when location changes.
|
||||
@@ -186,13 +179,13 @@ def update_park_search_text_on_location_change(sender, instance, **kwargs):
|
||||
location information.
|
||||
"""
|
||||
try:
|
||||
if hasattr(instance, 'park') and instance.park:
|
||||
if hasattr(instance, "park") and instance.park:
|
||||
update_park_search_text(instance.park)
|
||||
except Exception as e:
|
||||
logger.exception(f"Failed to update park search_text on location change: {e}")
|
||||
|
||||
|
||||
@receiver(post_save, sender='parks.Company')
|
||||
@receiver(post_save, sender="parks.Company")
|
||||
def update_park_search_text_on_company_change(sender, instance, **kwargs):
|
||||
"""
|
||||
Update park search_text when operator/owner name changes.
|
||||
|
||||
@@ -5,48 +5,48 @@ register = template.Library()
|
||||
|
||||
# Status configuration mapping for parks and rides
|
||||
STATUS_CONFIG = {
|
||||
'OPERATING': {
|
||||
'label': 'Operating',
|
||||
'classes': 'bg-green-100 text-green-800 dark:bg-green-900 dark:text-green-200',
|
||||
'icon': True,
|
||||
"OPERATING": {
|
||||
"label": "Operating",
|
||||
"classes": "bg-green-100 text-green-800 dark:bg-green-900 dark:text-green-200",
|
||||
"icon": True,
|
||||
},
|
||||
'CLOSED_TEMP': {
|
||||
'label': 'Temporarily Closed',
|
||||
'classes': 'bg-yellow-100 text-yellow-800 dark:bg-yellow-900 dark:text-yellow-200',
|
||||
'icon': True,
|
||||
"CLOSED_TEMP": {
|
||||
"label": "Temporarily Closed",
|
||||
"classes": "bg-yellow-100 text-yellow-800 dark:bg-yellow-900 dark:text-yellow-200",
|
||||
"icon": True,
|
||||
},
|
||||
'CLOSED_PERM': {
|
||||
'label': 'Permanently Closed',
|
||||
'classes': 'bg-red-100 text-red-800 dark:bg-red-900 dark:text-red-200',
|
||||
'icon': True,
|
||||
"CLOSED_PERM": {
|
||||
"label": "Permanently Closed",
|
||||
"classes": "bg-red-100 text-red-800 dark:bg-red-900 dark:text-red-200",
|
||||
"icon": True,
|
||||
},
|
||||
'CONSTRUCTION': {
|
||||
'label': 'Under Construction',
|
||||
'classes': 'bg-orange-100 text-orange-800 dark:bg-orange-900 dark:text-orange-200',
|
||||
'icon': True,
|
||||
"CONSTRUCTION": {
|
||||
"label": "Under Construction",
|
||||
"classes": "bg-orange-100 text-orange-800 dark:bg-orange-900 dark:text-orange-200",
|
||||
"icon": True,
|
||||
},
|
||||
'DEMOLISHED': {
|
||||
'label': 'Demolished',
|
||||
'classes': 'bg-gray-100 text-gray-800 dark:bg-gray-700 dark:text-gray-300',
|
||||
'icon': True,
|
||||
"DEMOLISHED": {
|
||||
"label": "Demolished",
|
||||
"classes": "bg-gray-100 text-gray-800 dark:bg-gray-700 dark:text-gray-300",
|
||||
"icon": True,
|
||||
},
|
||||
'RELOCATED': {
|
||||
'label': 'Relocated',
|
||||
'classes': 'bg-purple-100 text-purple-800 dark:bg-purple-900 dark:text-purple-200',
|
||||
'icon': True,
|
||||
"RELOCATED": {
|
||||
"label": "Relocated",
|
||||
"classes": "bg-purple-100 text-purple-800 dark:bg-purple-900 dark:text-purple-200",
|
||||
"icon": True,
|
||||
},
|
||||
'SBNO': {
|
||||
'label': 'Standing But Not Operating',
|
||||
'classes': 'bg-amber-100 text-amber-800 dark:bg-amber-900 dark:text-amber-200',
|
||||
'icon': True,
|
||||
"SBNO": {
|
||||
"label": "Standing But Not Operating",
|
||||
"classes": "bg-amber-100 text-amber-800 dark:bg-amber-900 dark:text-amber-200",
|
||||
"icon": True,
|
||||
},
|
||||
}
|
||||
|
||||
# Default config for unknown statuses
|
||||
DEFAULT_STATUS_CONFIG = {
|
||||
'label': 'Unknown',
|
||||
'classes': 'bg-gray-100 text-gray-800 dark:bg-gray-700 dark:text-gray-300',
|
||||
'icon': False,
|
||||
"label": "Unknown",
|
||||
"classes": "bg-gray-100 text-gray-800 dark:bg-gray-700 dark:text-gray-300",
|
||||
"icon": False,
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -31,39 +31,28 @@ class ParkTransitionTests(TestCase):
|
||||
def setUp(self):
|
||||
"""Set up test fixtures."""
|
||||
self.user = User.objects.create_user(
|
||||
username='testuser',
|
||||
email='test@example.com',
|
||||
password='testpass123',
|
||||
role='USER'
|
||||
username="testuser", email="test@example.com", password="testpass123", role="USER"
|
||||
)
|
||||
self.moderator = User.objects.create_user(
|
||||
username='moderator',
|
||||
email='moderator@example.com',
|
||||
password='testpass123',
|
||||
role='MODERATOR'
|
||||
username="moderator", email="moderator@example.com", password="testpass123", role="MODERATOR"
|
||||
)
|
||||
self.admin = User.objects.create_user(
|
||||
username='admin',
|
||||
email='admin@example.com',
|
||||
password='testpass123',
|
||||
role='ADMIN'
|
||||
username="admin", email="admin@example.com", password="testpass123", role="ADMIN"
|
||||
)
|
||||
|
||||
# Create operator company
|
||||
self.operator = Company.objects.create(
|
||||
name='Test Operator',
|
||||
description='Test operator company',
|
||||
roles=['OPERATOR']
|
||||
name="Test Operator", description="Test operator company", roles=["OPERATOR"]
|
||||
)
|
||||
|
||||
def _create_park(self, status='OPERATING', **kwargs):
|
||||
def _create_park(self, status="OPERATING", **kwargs):
|
||||
"""Helper to create a Park with specified status."""
|
||||
defaults = {
|
||||
'name': 'Test Park',
|
||||
'slug': 'test-park',
|
||||
'description': 'A test park',
|
||||
'operator': self.operator,
|
||||
'timezone': 'America/New_York'
|
||||
"name": "Test Park",
|
||||
"slug": "test-park",
|
||||
"description": "A test park",
|
||||
"operator": self.operator,
|
||||
"timezone": "America/New_York",
|
||||
}
|
||||
defaults.update(kwargs)
|
||||
return Park.objects.create(status=status, **defaults)
|
||||
@@ -74,25 +63,25 @@ class ParkTransitionTests(TestCase):
|
||||
|
||||
def test_operating_to_closed_temp_transition(self):
|
||||
"""Test transition from OPERATING to CLOSED_TEMP."""
|
||||
park = self._create_park(status='OPERATING')
|
||||
self.assertEqual(park.status, 'OPERATING')
|
||||
park = self._create_park(status="OPERATING")
|
||||
self.assertEqual(park.status, "OPERATING")
|
||||
|
||||
park.transition_to_closed_temp(user=self.user)
|
||||
park.save()
|
||||
|
||||
park.refresh_from_db()
|
||||
self.assertEqual(park.status, 'CLOSED_TEMP')
|
||||
self.assertEqual(park.status, "CLOSED_TEMP")
|
||||
|
||||
def test_operating_to_closed_perm_transition(self):
|
||||
"""Test transition from OPERATING to CLOSED_PERM."""
|
||||
park = self._create_park(status='OPERATING')
|
||||
park = self._create_park(status="OPERATING")
|
||||
|
||||
park.transition_to_closed_perm(user=self.moderator)
|
||||
park.closing_date = date.today()
|
||||
park.save()
|
||||
|
||||
park.refresh_from_db()
|
||||
self.assertEqual(park.status, 'CLOSED_PERM')
|
||||
self.assertEqual(park.status, "CLOSED_PERM")
|
||||
self.assertIsNotNone(park.closing_date)
|
||||
|
||||
# -------------------------------------------------------------------------
|
||||
@@ -101,14 +90,14 @@ class ParkTransitionTests(TestCase):
|
||||
|
||||
def test_under_construction_to_operating_transition(self):
|
||||
"""Test transition from UNDER_CONSTRUCTION to OPERATING."""
|
||||
park = self._create_park(status='UNDER_CONSTRUCTION')
|
||||
self.assertEqual(park.status, 'UNDER_CONSTRUCTION')
|
||||
park = self._create_park(status="UNDER_CONSTRUCTION")
|
||||
self.assertEqual(park.status, "UNDER_CONSTRUCTION")
|
||||
|
||||
park.transition_to_operating(user=self.user)
|
||||
park.save()
|
||||
|
||||
park.refresh_from_db()
|
||||
self.assertEqual(park.status, 'OPERATING')
|
||||
self.assertEqual(park.status, "OPERATING")
|
||||
|
||||
# -------------------------------------------------------------------------
|
||||
# Closed temp transitions
|
||||
@@ -116,24 +105,24 @@ class ParkTransitionTests(TestCase):
|
||||
|
||||
def test_closed_temp_to_operating_transition(self):
|
||||
"""Test transition from CLOSED_TEMP to OPERATING (reopen)."""
|
||||
park = self._create_park(status='CLOSED_TEMP')
|
||||
park = self._create_park(status="CLOSED_TEMP")
|
||||
|
||||
park.transition_to_operating(user=self.user)
|
||||
park.save()
|
||||
|
||||
park.refresh_from_db()
|
||||
self.assertEqual(park.status, 'OPERATING')
|
||||
self.assertEqual(park.status, "OPERATING")
|
||||
|
||||
def test_closed_temp_to_closed_perm_transition(self):
|
||||
"""Test transition from CLOSED_TEMP to CLOSED_PERM."""
|
||||
park = self._create_park(status='CLOSED_TEMP')
|
||||
park = self._create_park(status="CLOSED_TEMP")
|
||||
|
||||
park.transition_to_closed_perm(user=self.moderator)
|
||||
park.closing_date = date.today()
|
||||
park.save()
|
||||
|
||||
park.refresh_from_db()
|
||||
self.assertEqual(park.status, 'CLOSED_PERM')
|
||||
self.assertEqual(park.status, "CLOSED_PERM")
|
||||
|
||||
# -------------------------------------------------------------------------
|
||||
# Closed perm transitions (to final states)
|
||||
@@ -141,23 +130,23 @@ class ParkTransitionTests(TestCase):
|
||||
|
||||
def test_closed_perm_to_demolished_transition(self):
|
||||
"""Test transition from CLOSED_PERM to DEMOLISHED."""
|
||||
park = self._create_park(status='CLOSED_PERM')
|
||||
park = self._create_park(status="CLOSED_PERM")
|
||||
|
||||
park.transition_to_demolished(user=self.moderator)
|
||||
park.save()
|
||||
|
||||
park.refresh_from_db()
|
||||
self.assertEqual(park.status, 'DEMOLISHED')
|
||||
self.assertEqual(park.status, "DEMOLISHED")
|
||||
|
||||
def test_closed_perm_to_relocated_transition(self):
|
||||
"""Test transition from CLOSED_PERM to RELOCATED."""
|
||||
park = self._create_park(status='CLOSED_PERM')
|
||||
park = self._create_park(status="CLOSED_PERM")
|
||||
|
||||
park.transition_to_relocated(user=self.moderator)
|
||||
park.save()
|
||||
|
||||
park.refresh_from_db()
|
||||
self.assertEqual(park.status, 'RELOCATED')
|
||||
self.assertEqual(park.status, "RELOCATED")
|
||||
|
||||
# -------------------------------------------------------------------------
|
||||
# Invalid transitions (final states)
|
||||
@@ -165,28 +154,28 @@ class ParkTransitionTests(TestCase):
|
||||
|
||||
def test_demolished_cannot_transition(self):
|
||||
"""Test that DEMOLISHED state cannot transition further."""
|
||||
park = self._create_park(status='DEMOLISHED')
|
||||
park = self._create_park(status="DEMOLISHED")
|
||||
|
||||
with self.assertRaises(TransitionNotAllowed):
|
||||
park.transition_to_operating(user=self.moderator)
|
||||
|
||||
def test_relocated_cannot_transition(self):
|
||||
"""Test that RELOCATED state cannot transition further."""
|
||||
park = self._create_park(status='RELOCATED')
|
||||
park = self._create_park(status="RELOCATED")
|
||||
|
||||
with self.assertRaises(TransitionNotAllowed):
|
||||
park.transition_to_operating(user=self.moderator)
|
||||
|
||||
def test_operating_cannot_directly_demolish(self):
|
||||
"""Test that OPERATING cannot directly transition to DEMOLISHED."""
|
||||
park = self._create_park(status='OPERATING')
|
||||
park = self._create_park(status="OPERATING")
|
||||
|
||||
with self.assertRaises(TransitionNotAllowed):
|
||||
park.transition_to_demolished(user=self.moderator)
|
||||
|
||||
def test_operating_cannot_directly_relocate(self):
|
||||
"""Test that OPERATING cannot directly transition to RELOCATED."""
|
||||
park = self._create_park(status='OPERATING')
|
||||
park = self._create_park(status="OPERATING")
|
||||
|
||||
with self.assertRaises(TransitionNotAllowed):
|
||||
park.transition_to_relocated(user=self.moderator)
|
||||
@@ -197,69 +186,69 @@ class ParkTransitionTests(TestCase):
|
||||
|
||||
def test_reopen_wrapper_method(self):
|
||||
"""Test the reopen() wrapper method."""
|
||||
park = self._create_park(status='CLOSED_TEMP')
|
||||
park = self._create_park(status="CLOSED_TEMP")
|
||||
|
||||
park.reopen(user=self.user)
|
||||
|
||||
park.refresh_from_db()
|
||||
self.assertEqual(park.status, 'OPERATING')
|
||||
self.assertEqual(park.status, "OPERATING")
|
||||
|
||||
def test_close_temporarily_wrapper_method(self):
|
||||
"""Test the close_temporarily() wrapper method."""
|
||||
park = self._create_park(status='OPERATING')
|
||||
park = self._create_park(status="OPERATING")
|
||||
|
||||
park.close_temporarily(user=self.user)
|
||||
|
||||
park.refresh_from_db()
|
||||
self.assertEqual(park.status, 'CLOSED_TEMP')
|
||||
self.assertEqual(park.status, "CLOSED_TEMP")
|
||||
|
||||
def test_close_permanently_wrapper_method(self):
|
||||
"""Test the close_permanently() wrapper method."""
|
||||
park = self._create_park(status='OPERATING')
|
||||
park = self._create_park(status="OPERATING")
|
||||
closing = date(2025, 12, 31)
|
||||
|
||||
park.close_permanently(closing_date=closing, user=self.moderator)
|
||||
|
||||
park.refresh_from_db()
|
||||
self.assertEqual(park.status, 'CLOSED_PERM')
|
||||
self.assertEqual(park.status, "CLOSED_PERM")
|
||||
self.assertEqual(park.closing_date, closing)
|
||||
|
||||
def test_close_permanently_without_date(self):
|
||||
"""Test close_permanently() without closing_date."""
|
||||
park = self._create_park(status='OPERATING')
|
||||
park = self._create_park(status="OPERATING")
|
||||
|
||||
park.close_permanently(user=self.moderator)
|
||||
|
||||
park.refresh_from_db()
|
||||
self.assertEqual(park.status, 'CLOSED_PERM')
|
||||
self.assertEqual(park.status, "CLOSED_PERM")
|
||||
self.assertIsNone(park.closing_date)
|
||||
|
||||
def test_demolish_wrapper_method(self):
|
||||
"""Test the demolish() wrapper method."""
|
||||
park = self._create_park(status='CLOSED_PERM')
|
||||
park = self._create_park(status="CLOSED_PERM")
|
||||
|
||||
park.demolish(user=self.moderator)
|
||||
|
||||
park.refresh_from_db()
|
||||
self.assertEqual(park.status, 'DEMOLISHED')
|
||||
self.assertEqual(park.status, "DEMOLISHED")
|
||||
|
||||
def test_relocate_wrapper_method(self):
|
||||
"""Test the relocate() wrapper method."""
|
||||
park = self._create_park(status='CLOSED_PERM')
|
||||
park = self._create_park(status="CLOSED_PERM")
|
||||
|
||||
park.relocate(user=self.moderator)
|
||||
|
||||
park.refresh_from_db()
|
||||
self.assertEqual(park.status, 'RELOCATED')
|
||||
self.assertEqual(park.status, "RELOCATED")
|
||||
|
||||
def test_start_construction_wrapper_method(self):
|
||||
"""Test the start_construction() wrapper method if applicable."""
|
||||
# This depends on allowed transitions - skip if not allowed
|
||||
try:
|
||||
park = self._create_park(status='OPERATING')
|
||||
park = self._create_park(status="OPERATING")
|
||||
park.start_construction(user=self.moderator)
|
||||
park.refresh_from_db()
|
||||
self.assertEqual(park.status, 'UNDER_CONSTRUCTION')
|
||||
self.assertEqual(park.status, "UNDER_CONSTRUCTION")
|
||||
except TransitionNotAllowed:
|
||||
# If transition from OPERATING to UNDER_CONSTRUCTION is not allowed
|
||||
pass
|
||||
@@ -276,52 +265,44 @@ class ParkTransitionHistoryTests(TestCase):
|
||||
def setUp(self):
|
||||
"""Set up test fixtures."""
|
||||
self.moderator = User.objects.create_user(
|
||||
username='moderator',
|
||||
email='moderator@example.com',
|
||||
password='testpass123',
|
||||
role='MODERATOR'
|
||||
username="moderator", email="moderator@example.com", password="testpass123", role="MODERATOR"
|
||||
)
|
||||
self.operator = Company.objects.create(
|
||||
name='Test Operator',
|
||||
description='Test operator company',
|
||||
roles=['OPERATOR']
|
||||
name="Test Operator", description="Test operator company", roles=["OPERATOR"]
|
||||
)
|
||||
|
||||
def _create_park(self, status='OPERATING'):
|
||||
def _create_park(self, status="OPERATING"):
|
||||
"""Helper to create a Park."""
|
||||
return Park.objects.create(
|
||||
name='Test Park',
|
||||
slug='test-park',
|
||||
description='A test park',
|
||||
name="Test Park",
|
||||
slug="test-park",
|
||||
description="A test park",
|
||||
operator=self.operator,
|
||||
status=status,
|
||||
timezone='America/New_York'
|
||||
timezone="America/New_York",
|
||||
)
|
||||
|
||||
def test_transition_creates_state_log(self):
|
||||
"""Test that transitions create StateLog entries."""
|
||||
from django_fsm_log.models import StateLog
|
||||
|
||||
park = self._create_park(status='OPERATING')
|
||||
park = self._create_park(status="OPERATING")
|
||||
|
||||
park.transition_to_closed_temp(user=self.moderator)
|
||||
park.save()
|
||||
|
||||
park_ct = ContentType.objects.get_for_model(park)
|
||||
log = StateLog.objects.filter(
|
||||
content_type=park_ct,
|
||||
object_id=park.id
|
||||
).first()
|
||||
log = StateLog.objects.filter(content_type=park_ct, object_id=park.id).first()
|
||||
|
||||
self.assertIsNotNone(log)
|
||||
self.assertEqual(log.state, 'CLOSED_TEMP')
|
||||
self.assertEqual(log.state, "CLOSED_TEMP")
|
||||
self.assertEqual(log.by, self.moderator)
|
||||
|
||||
def test_multiple_transitions_create_multiple_logs(self):
|
||||
"""Test that multiple transitions create multiple log entries."""
|
||||
from django_fsm_log.models import StateLog
|
||||
|
||||
park = self._create_park(status='OPERATING')
|
||||
park = self._create_park(status="OPERATING")
|
||||
park_ct = ContentType.objects.get_for_model(park)
|
||||
|
||||
# First transition
|
||||
@@ -332,29 +313,23 @@ class ParkTransitionHistoryTests(TestCase):
|
||||
park.transition_to_operating(user=self.moderator)
|
||||
park.save()
|
||||
|
||||
logs = StateLog.objects.filter(
|
||||
content_type=park_ct,
|
||||
object_id=park.id
|
||||
).order_by('timestamp')
|
||||
logs = StateLog.objects.filter(content_type=park_ct, object_id=park.id).order_by("timestamp")
|
||||
|
||||
self.assertEqual(logs.count(), 2)
|
||||
self.assertEqual(logs[0].state, 'CLOSED_TEMP')
|
||||
self.assertEqual(logs[1].state, 'OPERATING')
|
||||
self.assertEqual(logs[0].state, "CLOSED_TEMP")
|
||||
self.assertEqual(logs[1].state, "OPERATING")
|
||||
|
||||
def test_transition_log_includes_user(self):
|
||||
"""Test that transition logs include the user who made the change."""
|
||||
from django_fsm_log.models import StateLog
|
||||
|
||||
park = self._create_park(status='OPERATING')
|
||||
park = self._create_park(status="OPERATING")
|
||||
|
||||
park.transition_to_closed_perm(user=self.moderator)
|
||||
park.save()
|
||||
|
||||
park_ct = ContentType.objects.get_for_model(park)
|
||||
log = StateLog.objects.filter(
|
||||
content_type=park_ct,
|
||||
object_id=park.id
|
||||
).first()
|
||||
log = StateLog.objects.filter(content_type=park_ct, object_id=park.id).first()
|
||||
|
||||
self.assertEqual(log.by, self.moderator)
|
||||
|
||||
@@ -370,24 +345,20 @@ class ParkBusinessLogicTests(TestCase):
|
||||
def setUp(self):
|
||||
"""Set up test fixtures."""
|
||||
self.operator = Company.objects.create(
|
||||
name='Test Operator',
|
||||
description='Test operator company',
|
||||
roles=['OPERATOR']
|
||||
name="Test Operator", description="Test operator company", roles=["OPERATOR"]
|
||||
)
|
||||
self.property_owner = Company.objects.create(
|
||||
name='Property Owner',
|
||||
description='Property owner company',
|
||||
roles=['PROPERTY_OWNER']
|
||||
name="Property Owner", description="Property owner company", roles=["PROPERTY_OWNER"]
|
||||
)
|
||||
|
||||
def test_park_creates_with_valid_operator(self):
|
||||
"""Test park can be created with valid operator."""
|
||||
park = Park.objects.create(
|
||||
name='Test Park',
|
||||
slug='test-park',
|
||||
description='A test park',
|
||||
name="Test Park",
|
||||
slug="test-park",
|
||||
description="A test park",
|
||||
operator=self.operator,
|
||||
timezone='America/New_York'
|
||||
timezone="America/New_York",
|
||||
)
|
||||
|
||||
self.assertEqual(park.operator, self.operator)
|
||||
@@ -395,35 +366,32 @@ class ParkBusinessLogicTests(TestCase):
|
||||
def test_park_slug_auto_generated(self):
|
||||
"""Test that park slug is auto-generated from name."""
|
||||
park = Park.objects.create(
|
||||
name='My Amazing Theme Park',
|
||||
description='A test park',
|
||||
operator=self.operator,
|
||||
timezone='America/New_York'
|
||||
name="My Amazing Theme Park", description="A test park", operator=self.operator, timezone="America/New_York"
|
||||
)
|
||||
|
||||
self.assertEqual(park.slug, 'my-amazing-theme-park')
|
||||
self.assertEqual(park.slug, "my-amazing-theme-park")
|
||||
|
||||
def test_park_url_generated(self):
|
||||
"""Test that frontend URL is generated on save."""
|
||||
park = Park.objects.create(
|
||||
name='Test Park',
|
||||
slug='test-park',
|
||||
description='A test park',
|
||||
name="Test Park",
|
||||
slug="test-park",
|
||||
description="A test park",
|
||||
operator=self.operator,
|
||||
timezone='America/New_York'
|
||||
timezone="America/New_York",
|
||||
)
|
||||
|
||||
self.assertIn('test-park', park.url)
|
||||
self.assertIn("test-park", park.url)
|
||||
|
||||
def test_opening_year_computed_from_opening_date(self):
|
||||
"""Test that opening_year is computed from opening_date."""
|
||||
park = Park.objects.create(
|
||||
name='Test Park',
|
||||
slug='test-park',
|
||||
description='A test park',
|
||||
name="Test Park",
|
||||
slug="test-park",
|
||||
description="A test park",
|
||||
operator=self.operator,
|
||||
opening_date=date(2020, 6, 15),
|
||||
timezone='America/New_York'
|
||||
timezone="America/New_York",
|
||||
)
|
||||
|
||||
self.assertEqual(park.opening_year, 2020)
|
||||
@@ -431,26 +399,26 @@ class ParkBusinessLogicTests(TestCase):
|
||||
def test_search_text_populated(self):
|
||||
"""Test that search_text is populated on save."""
|
||||
park = Park.objects.create(
|
||||
name='Test Park',
|
||||
slug='test-park',
|
||||
description='A wonderful theme park',
|
||||
name="Test Park",
|
||||
slug="test-park",
|
||||
description="A wonderful theme park",
|
||||
operator=self.operator,
|
||||
timezone='America/New_York'
|
||||
timezone="America/New_York",
|
||||
)
|
||||
|
||||
self.assertIn('test park', park.search_text)
|
||||
self.assertIn('wonderful theme park', park.search_text)
|
||||
self.assertIn('test operator', park.search_text)
|
||||
self.assertIn("test park", park.search_text)
|
||||
self.assertIn("wonderful theme park", park.search_text)
|
||||
self.assertIn("test operator", park.search_text)
|
||||
|
||||
def test_park_with_property_owner(self):
|
||||
"""Test park with separate property owner."""
|
||||
park = Park.objects.create(
|
||||
name='Test Park',
|
||||
slug='test-park',
|
||||
description='A test park',
|
||||
name="Test Park",
|
||||
slug="test-park",
|
||||
description="A test park",
|
||||
operator=self.operator,
|
||||
property_owner=self.property_owner,
|
||||
timezone='America/New_York'
|
||||
timezone="America/New_York",
|
||||
)
|
||||
|
||||
self.assertEqual(park.operator, self.operator)
|
||||
@@ -468,9 +436,7 @@ class ParkSlugHistoryTests(TestCase):
|
||||
def setUp(self):
|
||||
"""Set up test fixtures."""
|
||||
self.operator = Company.objects.create(
|
||||
name='Test Operator',
|
||||
description='Test operator company',
|
||||
roles=['OPERATOR']
|
||||
name="Test Operator", description="Test operator company", roles=["OPERATOR"]
|
||||
)
|
||||
|
||||
def test_historical_slug_created_on_name_change(self):
|
||||
@@ -480,25 +446,18 @@ class ParkSlugHistoryTests(TestCase):
|
||||
from apps.core.history import HistoricalSlug
|
||||
|
||||
park = Park.objects.create(
|
||||
name='Original Name',
|
||||
description='A test park',
|
||||
operator=self.operator,
|
||||
timezone='America/New_York'
|
||||
name="Original Name", description="A test park", operator=self.operator, timezone="America/New_York"
|
||||
)
|
||||
|
||||
original_slug = park.slug
|
||||
|
||||
# Change name
|
||||
park.name = 'New Name'
|
||||
park.name = "New Name"
|
||||
park.save()
|
||||
|
||||
# Check historical slug was created
|
||||
park_ct = ContentType.objects.get_for_model(park)
|
||||
historical = HistoricalSlug.objects.filter(
|
||||
content_type=park_ct,
|
||||
object_id=park.id,
|
||||
slug=original_slug
|
||||
).first()
|
||||
historical = HistoricalSlug.objects.filter(content_type=park_ct, object_id=park.id, slug=original_slug).first()
|
||||
|
||||
self.assertIsNotNone(historical)
|
||||
self.assertEqual(historical.slug, original_slug)
|
||||
@@ -506,14 +465,14 @@ class ParkSlugHistoryTests(TestCase):
|
||||
def test_get_by_slug_finds_current_slug(self):
|
||||
"""Test get_by_slug finds park by current slug."""
|
||||
park = Park.objects.create(
|
||||
name='Test Park',
|
||||
slug='test-park',
|
||||
description='A test park',
|
||||
name="Test Park",
|
||||
slug="test-park",
|
||||
description="A test park",
|
||||
operator=self.operator,
|
||||
timezone='America/New_York'
|
||||
timezone="America/New_York",
|
||||
)
|
||||
|
||||
found_park, is_historical = Park.get_by_slug('test-park')
|
||||
found_park, is_historical = Park.get_by_slug("test-park")
|
||||
|
||||
self.assertEqual(found_park, park)
|
||||
self.assertFalse(is_historical)
|
||||
@@ -522,16 +481,13 @@ class ParkSlugHistoryTests(TestCase):
|
||||
"""Test get_by_slug finds park by historical slug."""
|
||||
|
||||
park = Park.objects.create(
|
||||
name='Original Name',
|
||||
description='A test park',
|
||||
operator=self.operator,
|
||||
timezone='America/New_York'
|
||||
name="Original Name", description="A test park", operator=self.operator, timezone="America/New_York"
|
||||
)
|
||||
|
||||
original_slug = park.slug
|
||||
|
||||
# Change name to create historical slug
|
||||
park.name = 'New Name'
|
||||
park.name = "New Name"
|
||||
park.save()
|
||||
|
||||
# Find by historical slug
|
||||
|
||||
@@ -22,33 +22,24 @@ class ParkOpeningWorkflowTests(TestCase):
|
||||
@classmethod
|
||||
def setUpTestData(cls):
|
||||
cls.user = User.objects.create_user(
|
||||
username='park_user',
|
||||
email='park_user@example.com',
|
||||
password='testpass123',
|
||||
role='USER'
|
||||
username="park_user", email="park_user@example.com", password="testpass123", role="USER"
|
||||
)
|
||||
cls.moderator = User.objects.create_user(
|
||||
username='park_mod',
|
||||
email='park_mod@example.com',
|
||||
password='testpass123',
|
||||
role='MODERATOR'
|
||||
username="park_mod", email="park_mod@example.com", password="testpass123", role="MODERATOR"
|
||||
)
|
||||
|
||||
def _create_park(self, status='OPERATING', **kwargs):
|
||||
def _create_park(self, status="OPERATING", **kwargs):
|
||||
"""Helper to create a park."""
|
||||
from apps.parks.models import Company, Park
|
||||
|
||||
operator = Company.objects.create(
|
||||
name=f'Operator {status}',
|
||||
roles=['OPERATOR']
|
||||
)
|
||||
operator = Company.objects.create(name=f"Operator {status}", roles=["OPERATOR"])
|
||||
|
||||
defaults = {
|
||||
'name': f'Test Park {status}',
|
||||
'slug': f'test-park-{status.lower()}-{timezone.now().timestamp()}',
|
||||
'operator': operator,
|
||||
'status': status,
|
||||
'timezone': 'America/New_York'
|
||||
"name": f"Test Park {status}",
|
||||
"slug": f"test-park-{status.lower()}-{timezone.now().timestamp()}",
|
||||
"operator": operator,
|
||||
"status": status,
|
||||
"timezone": "America/New_York",
|
||||
}
|
||||
defaults.update(kwargs)
|
||||
return Park.objects.create(**defaults)
|
||||
@@ -59,16 +50,16 @@ class ParkOpeningWorkflowTests(TestCase):
|
||||
|
||||
Flow: UNDER_CONSTRUCTION → OPERATING
|
||||
"""
|
||||
park = self._create_park(status='UNDER_CONSTRUCTION')
|
||||
park = self._create_park(status="UNDER_CONSTRUCTION")
|
||||
|
||||
self.assertEqual(park.status, 'UNDER_CONSTRUCTION')
|
||||
self.assertEqual(park.status, "UNDER_CONSTRUCTION")
|
||||
|
||||
# Park opens
|
||||
park.transition_to_operating(user=self.user)
|
||||
park.save()
|
||||
|
||||
park.refresh_from_db()
|
||||
self.assertEqual(park.status, 'OPERATING')
|
||||
self.assertEqual(park.status, "OPERATING")
|
||||
|
||||
|
||||
class ParkTemporaryClosureWorkflowTests(TestCase):
|
||||
@@ -77,26 +68,20 @@ class ParkTemporaryClosureWorkflowTests(TestCase):
|
||||
@classmethod
|
||||
def setUpTestData(cls):
|
||||
cls.user = User.objects.create_user(
|
||||
username='temp_closure_user',
|
||||
email='temp_closure@example.com',
|
||||
password='testpass123',
|
||||
role='USER'
|
||||
username="temp_closure_user", email="temp_closure@example.com", password="testpass123", role="USER"
|
||||
)
|
||||
|
||||
def _create_park(self, status='OPERATING', **kwargs):
|
||||
def _create_park(self, status="OPERATING", **kwargs):
|
||||
from apps.parks.models import Company, Park
|
||||
|
||||
operator = Company.objects.create(
|
||||
name=f'Operator Temp {timezone.now().timestamp()}',
|
||||
roles=['OPERATOR']
|
||||
)
|
||||
operator = Company.objects.create(name=f"Operator Temp {timezone.now().timestamp()}", roles=["OPERATOR"])
|
||||
|
||||
defaults = {
|
||||
'name': f'Test Park Temp {timezone.now().timestamp()}',
|
||||
'slug': f'test-park-temp-{timezone.now().timestamp()}',
|
||||
'operator': operator,
|
||||
'status': status,
|
||||
'timezone': 'America/New_York'
|
||||
"name": f"Test Park Temp {timezone.now().timestamp()}",
|
||||
"slug": f"test-park-temp-{timezone.now().timestamp()}",
|
||||
"operator": operator,
|
||||
"status": status,
|
||||
"timezone": "America/New_York",
|
||||
}
|
||||
defaults.update(kwargs)
|
||||
return Park.objects.create(**defaults)
|
||||
@@ -107,23 +92,23 @@ class ParkTemporaryClosureWorkflowTests(TestCase):
|
||||
|
||||
Flow: OPERATING → CLOSED_TEMP → OPERATING
|
||||
"""
|
||||
park = self._create_park(status='OPERATING')
|
||||
park = self._create_park(status="OPERATING")
|
||||
|
||||
self.assertEqual(park.status, 'OPERATING')
|
||||
self.assertEqual(park.status, "OPERATING")
|
||||
|
||||
# Close temporarily (e.g., off-season)
|
||||
park.transition_to_closed_temp(user=self.user)
|
||||
park.save()
|
||||
|
||||
park.refresh_from_db()
|
||||
self.assertEqual(park.status, 'CLOSED_TEMP')
|
||||
self.assertEqual(park.status, "CLOSED_TEMP")
|
||||
|
||||
# Reopen
|
||||
park.transition_to_operating(user=self.user)
|
||||
park.save()
|
||||
|
||||
park.refresh_from_db()
|
||||
self.assertEqual(park.status, 'OPERATING')
|
||||
self.assertEqual(park.status, "OPERATING")
|
||||
|
||||
|
||||
class ParkPermanentClosureWorkflowTests(TestCase):
|
||||
@@ -132,26 +117,20 @@ class ParkPermanentClosureWorkflowTests(TestCase):
|
||||
@classmethod
|
||||
def setUpTestData(cls):
|
||||
cls.moderator = User.objects.create_user(
|
||||
username='perm_mod',
|
||||
email='perm_mod@example.com',
|
||||
password='testpass123',
|
||||
role='MODERATOR'
|
||||
username="perm_mod", email="perm_mod@example.com", password="testpass123", role="MODERATOR"
|
||||
)
|
||||
|
||||
def _create_park(self, status='OPERATING', **kwargs):
|
||||
def _create_park(self, status="OPERATING", **kwargs):
|
||||
from apps.parks.models import Company, Park
|
||||
|
||||
operator = Company.objects.create(
|
||||
name=f'Operator Perm {timezone.now().timestamp()}',
|
||||
roles=['OPERATOR']
|
||||
)
|
||||
operator = Company.objects.create(name=f"Operator Perm {timezone.now().timestamp()}", roles=["OPERATOR"])
|
||||
|
||||
defaults = {
|
||||
'name': f'Test Park Perm {timezone.now().timestamp()}',
|
||||
'slug': f'test-park-perm-{timezone.now().timestamp()}',
|
||||
'operator': operator,
|
||||
'status': status,
|
||||
'timezone': 'America/New_York'
|
||||
"name": f"Test Park Perm {timezone.now().timestamp()}",
|
||||
"slug": f"test-park-perm-{timezone.now().timestamp()}",
|
||||
"operator": operator,
|
||||
"status": status,
|
||||
"timezone": "America/New_York",
|
||||
}
|
||||
defaults.update(kwargs)
|
||||
return Park.objects.create(**defaults)
|
||||
@@ -162,7 +141,7 @@ class ParkPermanentClosureWorkflowTests(TestCase):
|
||||
|
||||
Flow: OPERATING → CLOSED_PERM
|
||||
"""
|
||||
park = self._create_park(status='OPERATING')
|
||||
park = self._create_park(status="OPERATING")
|
||||
|
||||
# Close permanently
|
||||
park.transition_to_closed_perm(user=self.moderator)
|
||||
@@ -170,7 +149,7 @@ class ParkPermanentClosureWorkflowTests(TestCase):
|
||||
park.save()
|
||||
|
||||
park.refresh_from_db()
|
||||
self.assertEqual(park.status, 'CLOSED_PERM')
|
||||
self.assertEqual(park.status, "CLOSED_PERM")
|
||||
self.assertIsNotNone(park.closing_date)
|
||||
|
||||
def test_park_permanent_closure_from_temp(self):
|
||||
@@ -179,7 +158,7 @@ class ParkPermanentClosureWorkflowTests(TestCase):
|
||||
|
||||
Flow: OPERATING → CLOSED_TEMP → CLOSED_PERM
|
||||
"""
|
||||
park = self._create_park(status='OPERATING')
|
||||
park = self._create_park(status="OPERATING")
|
||||
|
||||
# Temporary closure
|
||||
park.transition_to_closed_temp(user=self.moderator)
|
||||
@@ -191,7 +170,7 @@ class ParkPermanentClosureWorkflowTests(TestCase):
|
||||
park.save()
|
||||
|
||||
park.refresh_from_db()
|
||||
self.assertEqual(park.status, 'CLOSED_PERM')
|
||||
self.assertEqual(park.status, "CLOSED_PERM")
|
||||
|
||||
|
||||
class ParkDemolitionWorkflowTests(TestCase):
|
||||
@@ -200,26 +179,20 @@ class ParkDemolitionWorkflowTests(TestCase):
|
||||
@classmethod
|
||||
def setUpTestData(cls):
|
||||
cls.moderator = User.objects.create_user(
|
||||
username='demo_mod',
|
||||
email='demo_mod@example.com',
|
||||
password='testpass123',
|
||||
role='MODERATOR'
|
||||
username="demo_mod", email="demo_mod@example.com", password="testpass123", role="MODERATOR"
|
||||
)
|
||||
|
||||
def _create_park(self, status='CLOSED_PERM', **kwargs):
|
||||
def _create_park(self, status="CLOSED_PERM", **kwargs):
|
||||
from apps.parks.models import Company, Park
|
||||
|
||||
operator = Company.objects.create(
|
||||
name=f'Operator Demo {timezone.now().timestamp()}',
|
||||
roles=['OPERATOR']
|
||||
)
|
||||
operator = Company.objects.create(name=f"Operator Demo {timezone.now().timestamp()}", roles=["OPERATOR"])
|
||||
|
||||
defaults = {
|
||||
'name': f'Test Park Demo {timezone.now().timestamp()}',
|
||||
'slug': f'test-park-demo-{timezone.now().timestamp()}',
|
||||
'operator': operator,
|
||||
'status': status,
|
||||
'timezone': 'America/New_York'
|
||||
"name": f"Test Park Demo {timezone.now().timestamp()}",
|
||||
"slug": f"test-park-demo-{timezone.now().timestamp()}",
|
||||
"operator": operator,
|
||||
"status": status,
|
||||
"timezone": "America/New_York",
|
||||
}
|
||||
defaults.update(kwargs)
|
||||
return Park.objects.create(**defaults)
|
||||
@@ -230,20 +203,20 @@ class ParkDemolitionWorkflowTests(TestCase):
|
||||
|
||||
Flow: OPERATING → CLOSED_PERM → DEMOLISHED
|
||||
"""
|
||||
park = self._create_park(status='CLOSED_PERM')
|
||||
park = self._create_park(status="CLOSED_PERM")
|
||||
|
||||
# Demolish
|
||||
park.transition_to_demolished(user=self.moderator)
|
||||
park.save()
|
||||
|
||||
park.refresh_from_db()
|
||||
self.assertEqual(park.status, 'DEMOLISHED')
|
||||
self.assertEqual(park.status, "DEMOLISHED")
|
||||
|
||||
def test_demolished_is_final_state(self):
|
||||
"""Test that demolished parks cannot transition further."""
|
||||
from django_fsm import TransitionNotAllowed
|
||||
|
||||
park = self._create_park(status='DEMOLISHED')
|
||||
park = self._create_park(status="DEMOLISHED")
|
||||
|
||||
# Cannot transition from demolished
|
||||
with self.assertRaises(TransitionNotAllowed):
|
||||
@@ -256,26 +229,20 @@ class ParkRelocationWorkflowTests(TestCase):
|
||||
@classmethod
|
||||
def setUpTestData(cls):
|
||||
cls.moderator = User.objects.create_user(
|
||||
username='reloc_mod',
|
||||
email='reloc_mod@example.com',
|
||||
password='testpass123',
|
||||
role='MODERATOR'
|
||||
username="reloc_mod", email="reloc_mod@example.com", password="testpass123", role="MODERATOR"
|
||||
)
|
||||
|
||||
def _create_park(self, status='CLOSED_PERM', **kwargs):
|
||||
def _create_park(self, status="CLOSED_PERM", **kwargs):
|
||||
from apps.parks.models import Company, Park
|
||||
|
||||
operator = Company.objects.create(
|
||||
name=f'Operator Reloc {timezone.now().timestamp()}',
|
||||
roles=['OPERATOR']
|
||||
)
|
||||
operator = Company.objects.create(name=f"Operator Reloc {timezone.now().timestamp()}", roles=["OPERATOR"])
|
||||
|
||||
defaults = {
|
||||
'name': f'Test Park Reloc {timezone.now().timestamp()}',
|
||||
'slug': f'test-park-reloc-{timezone.now().timestamp()}',
|
||||
'operator': operator,
|
||||
'status': status,
|
||||
'timezone': 'America/New_York'
|
||||
"name": f"Test Park Reloc {timezone.now().timestamp()}",
|
||||
"slug": f"test-park-reloc-{timezone.now().timestamp()}",
|
||||
"operator": operator,
|
||||
"status": status,
|
||||
"timezone": "America/New_York",
|
||||
}
|
||||
defaults.update(kwargs)
|
||||
return Park.objects.create(**defaults)
|
||||
@@ -286,20 +253,20 @@ class ParkRelocationWorkflowTests(TestCase):
|
||||
|
||||
Flow: OPERATING → CLOSED_PERM → RELOCATED
|
||||
"""
|
||||
park = self._create_park(status='CLOSED_PERM')
|
||||
park = self._create_park(status="CLOSED_PERM")
|
||||
|
||||
# Relocate
|
||||
park.transition_to_relocated(user=self.moderator)
|
||||
park.save()
|
||||
|
||||
park.refresh_from_db()
|
||||
self.assertEqual(park.status, 'RELOCATED')
|
||||
self.assertEqual(park.status, "RELOCATED")
|
||||
|
||||
def test_relocated_is_final_state(self):
|
||||
"""Test that relocated parks cannot transition further."""
|
||||
from django_fsm import TransitionNotAllowed
|
||||
|
||||
park = self._create_park(status='RELOCATED')
|
||||
park = self._create_park(status="RELOCATED")
|
||||
|
||||
# Cannot transition from relocated
|
||||
with self.assertRaises(TransitionNotAllowed):
|
||||
@@ -312,71 +279,62 @@ class ParkWrapperMethodTests(TestCase):
|
||||
@classmethod
|
||||
def setUpTestData(cls):
|
||||
cls.user = User.objects.create_user(
|
||||
username='wrapper_user',
|
||||
email='wrapper@example.com',
|
||||
password='testpass123',
|
||||
role='USER'
|
||||
username="wrapper_user", email="wrapper@example.com", password="testpass123", role="USER"
|
||||
)
|
||||
cls.moderator = User.objects.create_user(
|
||||
username='wrapper_mod',
|
||||
email='wrapper_mod@example.com',
|
||||
password='testpass123',
|
||||
role='MODERATOR'
|
||||
username="wrapper_mod", email="wrapper_mod@example.com", password="testpass123", role="MODERATOR"
|
||||
)
|
||||
|
||||
def _create_park(self, status='OPERATING', **kwargs):
|
||||
def _create_park(self, status="OPERATING", **kwargs):
|
||||
from apps.parks.models import Company, Park
|
||||
|
||||
operator = Company.objects.create(
|
||||
name=f'Operator Wrapper {timezone.now().timestamp()}',
|
||||
roles=['OPERATOR']
|
||||
)
|
||||
operator = Company.objects.create(name=f"Operator Wrapper {timezone.now().timestamp()}", roles=["OPERATOR"])
|
||||
|
||||
defaults = {
|
||||
'name': f'Test Park Wrapper {timezone.now().timestamp()}',
|
||||
'slug': f'test-park-wrapper-{timezone.now().timestamp()}',
|
||||
'operator': operator,
|
||||
'status': status,
|
||||
'timezone': 'America/New_York'
|
||||
"name": f"Test Park Wrapper {timezone.now().timestamp()}",
|
||||
"slug": f"test-park-wrapper-{timezone.now().timestamp()}",
|
||||
"operator": operator,
|
||||
"status": status,
|
||||
"timezone": "America/New_York",
|
||||
}
|
||||
defaults.update(kwargs)
|
||||
return Park.objects.create(**defaults)
|
||||
|
||||
def test_close_temporarily_wrapper(self):
|
||||
"""Test close_temporarily wrapper method."""
|
||||
park = self._create_park(status='OPERATING')
|
||||
park = self._create_park(status="OPERATING")
|
||||
|
||||
# Use wrapper method if it exists
|
||||
if hasattr(park, 'close_temporarily'):
|
||||
if hasattr(park, "close_temporarily"):
|
||||
park.close_temporarily(user=self.user)
|
||||
else:
|
||||
park.transition_to_closed_temp(user=self.user)
|
||||
park.save()
|
||||
|
||||
park.refresh_from_db()
|
||||
self.assertEqual(park.status, 'CLOSED_TEMP')
|
||||
self.assertEqual(park.status, "CLOSED_TEMP")
|
||||
|
||||
def test_reopen_wrapper(self):
|
||||
"""Test reopen wrapper method."""
|
||||
park = self._create_park(status='CLOSED_TEMP')
|
||||
park = self._create_park(status="CLOSED_TEMP")
|
||||
|
||||
# Use wrapper method if it exists
|
||||
if hasattr(park, 'reopen'):
|
||||
if hasattr(park, "reopen"):
|
||||
park.reopen(user=self.user)
|
||||
else:
|
||||
park.transition_to_operating(user=self.user)
|
||||
park.save()
|
||||
|
||||
park.refresh_from_db()
|
||||
self.assertEqual(park.status, 'OPERATING')
|
||||
self.assertEqual(park.status, "OPERATING")
|
||||
|
||||
def test_close_permanently_wrapper(self):
|
||||
"""Test close_permanently wrapper method."""
|
||||
park = self._create_park(status='OPERATING')
|
||||
park = self._create_park(status="OPERATING")
|
||||
closing_date = timezone.now().date()
|
||||
|
||||
# Use wrapper method if it exists
|
||||
if hasattr(park, 'close_permanently'):
|
||||
if hasattr(park, "close_permanently"):
|
||||
park.close_permanently(closing_date=closing_date, user=self.moderator)
|
||||
else:
|
||||
park.transition_to_closed_perm(user=self.moderator)
|
||||
@@ -384,35 +342,35 @@ class ParkWrapperMethodTests(TestCase):
|
||||
park.save()
|
||||
|
||||
park.refresh_from_db()
|
||||
self.assertEqual(park.status, 'CLOSED_PERM')
|
||||
self.assertEqual(park.status, "CLOSED_PERM")
|
||||
|
||||
def test_demolish_wrapper(self):
|
||||
"""Test demolish wrapper method."""
|
||||
park = self._create_park(status='CLOSED_PERM')
|
||||
park = self._create_park(status="CLOSED_PERM")
|
||||
|
||||
# Use wrapper method if it exists
|
||||
if hasattr(park, 'demolish'):
|
||||
if hasattr(park, "demolish"):
|
||||
park.demolish(user=self.moderator)
|
||||
else:
|
||||
park.transition_to_demolished(user=self.moderator)
|
||||
park.save()
|
||||
|
||||
park.refresh_from_db()
|
||||
self.assertEqual(park.status, 'DEMOLISHED')
|
||||
self.assertEqual(park.status, "DEMOLISHED")
|
||||
|
||||
def test_relocate_wrapper(self):
|
||||
"""Test relocate wrapper method."""
|
||||
park = self._create_park(status='CLOSED_PERM')
|
||||
park = self._create_park(status="CLOSED_PERM")
|
||||
|
||||
# Use wrapper method if it exists
|
||||
if hasattr(park, 'relocate'):
|
||||
if hasattr(park, "relocate"):
|
||||
park.relocate(user=self.moderator)
|
||||
else:
|
||||
park.transition_to_relocated(user=self.moderator)
|
||||
park.save()
|
||||
|
||||
park.refresh_from_db()
|
||||
self.assertEqual(park.status, 'RELOCATED')
|
||||
self.assertEqual(park.status, "RELOCATED")
|
||||
|
||||
|
||||
class ParkStateLogTests(TestCase):
|
||||
@@ -421,32 +379,23 @@ class ParkStateLogTests(TestCase):
|
||||
@classmethod
|
||||
def setUpTestData(cls):
|
||||
cls.user = User.objects.create_user(
|
||||
username='log_user',
|
||||
email='log_user@example.com',
|
||||
password='testpass123',
|
||||
role='USER'
|
||||
username="log_user", email="log_user@example.com", password="testpass123", role="USER"
|
||||
)
|
||||
cls.moderator = User.objects.create_user(
|
||||
username='log_mod',
|
||||
email='log_mod@example.com',
|
||||
password='testpass123',
|
||||
role='MODERATOR'
|
||||
username="log_mod", email="log_mod@example.com", password="testpass123", role="MODERATOR"
|
||||
)
|
||||
|
||||
def _create_park(self, status='OPERATING', **kwargs):
|
||||
def _create_park(self, status="OPERATING", **kwargs):
|
||||
from apps.parks.models import Company, Park
|
||||
|
||||
operator = Company.objects.create(
|
||||
name=f'Operator Log {timezone.now().timestamp()}',
|
||||
roles=['OPERATOR']
|
||||
)
|
||||
operator = Company.objects.create(name=f"Operator Log {timezone.now().timestamp()}", roles=["OPERATOR"])
|
||||
|
||||
defaults = {
|
||||
'name': f'Test Park Log {timezone.now().timestamp()}',
|
||||
'slug': f'test-park-log-{timezone.now().timestamp()}',
|
||||
'operator': operator,
|
||||
'status': status,
|
||||
'timezone': 'America/New_York'
|
||||
"name": f"Test Park Log {timezone.now().timestamp()}",
|
||||
"slug": f"test-park-log-{timezone.now().timestamp()}",
|
||||
"operator": operator,
|
||||
"status": status,
|
||||
"timezone": "America/New_York",
|
||||
}
|
||||
defaults.update(kwargs)
|
||||
return Park.objects.create(**defaults)
|
||||
@@ -456,7 +405,7 @@ class ParkStateLogTests(TestCase):
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from django_fsm_log.models import StateLog
|
||||
|
||||
park = self._create_park(status='OPERATING')
|
||||
park = self._create_park(status="OPERATING")
|
||||
park_ct = ContentType.objects.get_for_model(park)
|
||||
|
||||
# Perform transition
|
||||
@@ -464,13 +413,10 @@ class ParkStateLogTests(TestCase):
|
||||
park.save()
|
||||
|
||||
# Check log was created
|
||||
log = StateLog.objects.filter(
|
||||
content_type=park_ct,
|
||||
object_id=park.id
|
||||
).first()
|
||||
log = StateLog.objects.filter(content_type=park_ct, object_id=park.id).first()
|
||||
|
||||
self.assertIsNotNone(log, "StateLog entry should be created")
|
||||
self.assertEqual(log.state, 'CLOSED_TEMP')
|
||||
self.assertEqual(log.state, "CLOSED_TEMP")
|
||||
self.assertEqual(log.by, self.user)
|
||||
|
||||
def test_multiple_transitions_logged(self):
|
||||
@@ -478,7 +424,7 @@ class ParkStateLogTests(TestCase):
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from django_fsm_log.models import StateLog
|
||||
|
||||
park = self._create_park(status='OPERATING')
|
||||
park = self._create_park(status="OPERATING")
|
||||
park_ct = ContentType.objects.get_for_model(park)
|
||||
|
||||
# First transition: OPERATING -> CLOSED_TEMP
|
||||
@@ -490,15 +436,12 @@ class ParkStateLogTests(TestCase):
|
||||
park.save()
|
||||
|
||||
# Check multiple logs created
|
||||
logs = StateLog.objects.filter(
|
||||
content_type=park_ct,
|
||||
object_id=park.id
|
||||
).order_by('timestamp')
|
||||
logs = StateLog.objects.filter(content_type=park_ct, object_id=park.id).order_by("timestamp")
|
||||
|
||||
self.assertEqual(logs.count(), 2, "Should have 2 log entries")
|
||||
self.assertEqual(logs[0].state, 'CLOSED_TEMP')
|
||||
self.assertEqual(logs[0].state, "CLOSED_TEMP")
|
||||
self.assertEqual(logs[0].by, self.user)
|
||||
self.assertEqual(logs[1].state, 'CLOSED_PERM')
|
||||
self.assertEqual(logs[1].state, "CLOSED_PERM")
|
||||
self.assertEqual(logs[1].by, self.moderator)
|
||||
|
||||
def test_full_lifecycle_logged(self):
|
||||
@@ -506,7 +449,7 @@ class ParkStateLogTests(TestCase):
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from django_fsm_log.models import StateLog
|
||||
|
||||
park = self._create_park(status='OPERATING')
|
||||
park = self._create_park(status="OPERATING")
|
||||
park_ct = ContentType.objects.get_for_model(park)
|
||||
|
||||
# Full lifecycle: OPERATING -> CLOSED_TEMP -> OPERATING -> CLOSED_PERM -> DEMOLISHED
|
||||
@@ -523,11 +466,8 @@ class ParkStateLogTests(TestCase):
|
||||
park.save()
|
||||
|
||||
# Check all logs created
|
||||
logs = StateLog.objects.filter(
|
||||
content_type=park_ct,
|
||||
object_id=park.id
|
||||
).order_by('timestamp')
|
||||
logs = StateLog.objects.filter(content_type=park_ct, object_id=park.id).order_by("timestamp")
|
||||
|
||||
self.assertEqual(logs.count(), 4, "Should have 4 log entries")
|
||||
states = [log.state for log in logs]
|
||||
self.assertEqual(states, ['CLOSED_TEMP', 'OPERATING', 'CLOSED_PERM', 'DEMOLISHED'])
|
||||
self.assertEqual(states, ["CLOSED_TEMP", "OPERATING", "CLOSED_PERM", "DEMOLISHED"])
|
||||
|
||||
@@ -55,9 +55,7 @@ class ParkQueryOptimizationTests(TestCase):
|
||||
# Should be a small number of queries (main query + prefetch)
|
||||
# The exact count depends on prefetch_related configuration
|
||||
self.assertLessEqual(
|
||||
len(context.captured_queries),
|
||||
5,
|
||||
f"Expected <= 5 queries, got {len(context.captured_queries)}"
|
||||
len(context.captured_queries), 5, f"Expected <= 5 queries, got {len(context.captured_queries)}"
|
||||
)
|
||||
|
||||
def test_optimized_for_detail_query_count(self):
|
||||
@@ -72,9 +70,7 @@ class ParkQueryOptimizationTests(TestCase):
|
||||
|
||||
# Should be a reasonable number of queries
|
||||
self.assertLessEqual(
|
||||
len(context.captured_queries),
|
||||
10,
|
||||
f"Expected <= 10 queries, got {len(context.captured_queries)}"
|
||||
len(context.captured_queries), 10, f"Expected <= 10 queries, got {len(context.captured_queries)}"
|
||||
)
|
||||
|
||||
def test_with_location_includes_location(self):
|
||||
@@ -94,10 +90,10 @@ class ParkQueryOptimizationTests(TestCase):
|
||||
if result.exists():
|
||||
first = result.first()
|
||||
# Should include these fields
|
||||
self.assertIn('id', first)
|
||||
self.assertIn('name', first)
|
||||
self.assertIn('slug', first)
|
||||
self.assertIn('status', first)
|
||||
self.assertIn("id", first)
|
||||
self.assertIn("name", first)
|
||||
self.assertIn("slug", first)
|
||||
self.assertIn("status", first)
|
||||
|
||||
def test_search_autocomplete_limits_results(self):
|
||||
"""Verify search_autocomplete respects limit parameter."""
|
||||
@@ -148,7 +144,7 @@ class CompanyQueryOptimizationTests(TestCase):
|
||||
if result.exists():
|
||||
first = result.first()
|
||||
# Should have ride_count attribute
|
||||
self.assertTrue(hasattr(first, 'ride_count'))
|
||||
self.assertTrue(hasattr(first, "ride_count"))
|
||||
|
||||
def test_operators_with_park_count_includes_annotation(self):
|
||||
"""Verify operators_with_park_count adds park count annotations."""
|
||||
@@ -156,7 +152,7 @@ class CompanyQueryOptimizationTests(TestCase):
|
||||
if result.exists():
|
||||
first = result.first()
|
||||
# Should have operated_parks_count attribute
|
||||
self.assertTrue(hasattr(first, 'operated_parks_count'))
|
||||
self.assertTrue(hasattr(first, "operated_parks_count"))
|
||||
|
||||
|
||||
class ComputedFieldMaintenanceTests(TestCase):
|
||||
|
||||
@@ -19,12 +19,8 @@ class ParkFilterTests(TestCase):
|
||||
def setUpTestData(cls):
|
||||
"""Set up test data for all filter tests"""
|
||||
# Create operators
|
||||
cls.operator1 = Company.objects.create(
|
||||
name="Thrilling Adventures Inc", slug="thrilling-adventures"
|
||||
)
|
||||
cls.operator2 = Company.objects.create(
|
||||
name="Family Fun Corp", slug="family-fun"
|
||||
)
|
||||
cls.operator1 = Company.objects.create(name="Thrilling Adventures Inc", slug="thrilling-adventures")
|
||||
cls.operator2 = Company.objects.create(name="Family Fun Corp", slug="family-fun")
|
||||
|
||||
# Create parks with various attributes for testing all filters
|
||||
cls.park1 = Park.objects.create(
|
||||
|
||||
@@ -89,9 +89,7 @@ class ParkModelTests(TestCase):
|
||||
# Check pghistory records
|
||||
event_model = getattr(Park, "event_model", None)
|
||||
if event_model:
|
||||
historical_records = event_model.objects.filter(
|
||||
pgh_obj_id=park.id
|
||||
).order_by("-pgh_created_at")
|
||||
historical_records = event_model.objects.filter(pgh_obj_id=park.id).order_by("-pgh_created_at")
|
||||
print("\nPG History records:")
|
||||
for record in historical_records:
|
||||
print(f"- Event ID: {record.pgh_id}")
|
||||
@@ -104,17 +102,13 @@ class ParkModelTests(TestCase):
|
||||
# Try to find by old slug
|
||||
found_park, is_historical = Park.get_by_slug(original_slug)
|
||||
self.assertEqual(found_park.id, park.id)
|
||||
print(
|
||||
f"Found park by old slug: {found_park.slug}, is_historical: {is_historical}"
|
||||
)
|
||||
print(f"Found park by old slug: {found_park.slug}, is_historical: {is_historical}")
|
||||
self.assertTrue(is_historical)
|
||||
|
||||
# Try current slug
|
||||
found_park, is_historical = Park.get_by_slug(new_slug)
|
||||
self.assertEqual(found_park.id, park.id)
|
||||
print(
|
||||
f"Found park by new slug: {found_park.slug}, is_historical: {is_historical}"
|
||||
)
|
||||
print(f"Found park by new slug: {found_park.slug}, is_historical: {is_historical}")
|
||||
self.assertFalse(is_historical)
|
||||
|
||||
def test_status_color_mapping(self):
|
||||
@@ -141,15 +135,9 @@ class ParkModelTests(TestCase):
|
||||
class ParkAreaModelTests(TestCase):
|
||||
def setUp(self):
|
||||
"""Set up test data"""
|
||||
self.operator = Company.objects.create(
|
||||
name="Test Company 2", slug="test-company-2"
|
||||
)
|
||||
self.park = Park.objects.create(
|
||||
name="Test Park", status="OPERATING", operator=self.operator
|
||||
)
|
||||
self.area = ParkArea.objects.create(
|
||||
park=self.park, name="Test Area", description="A test area"
|
||||
)
|
||||
self.operator = Company.objects.create(name="Test Company 2", slug="test-company-2")
|
||||
self.park = Park.objects.create(name="Test Park", status="OPERATING", operator=self.operator)
|
||||
self.area = ParkArea.objects.create(park=self.park, name="Test Area", description="A test area")
|
||||
|
||||
def test_area_creation(self):
|
||||
"""Test basic area creation and fields"""
|
||||
|
||||
@@ -42,9 +42,7 @@ logger = logging.getLogger(__name__)
|
||||
# Constants
|
||||
PARK_DETAIL_URL = "parks:park_detail"
|
||||
PARK_LIST_ITEM_TEMPLATE = "parks/partials/park_list_item.html"
|
||||
REQUIRED_FIELDS_ERROR = (
|
||||
"Please correct the errors below. Required fields are marked with an asterisk (*)."
|
||||
)
|
||||
REQUIRED_FIELDS_ERROR = "Please correct the errors below. Required fields are marked with an asterisk (*)."
|
||||
TRIP_PARKS_TEMPLATE = "parks/partials/trip_parks_list.html"
|
||||
TRIP_SUMMARY_TEMPLATE = "parks/partials/trip_summary.html"
|
||||
SAVED_TRIPS_TEMPLATE = "parks/partials/saved_trips.html"
|
||||
@@ -87,18 +85,10 @@ def normalize_osm_result(result: dict) -> dict:
|
||||
neighborhood = address.get("neighbourhood", "")
|
||||
|
||||
# Build city from available components
|
||||
city = (
|
||||
address.get("city")
|
||||
or address.get("town")
|
||||
or address.get("village")
|
||||
or address.get("municipality")
|
||||
or ""
|
||||
)
|
||||
city = address.get("city") or address.get("town") or address.get("village") or address.get("municipality") or ""
|
||||
|
||||
# Get detailed state/region information
|
||||
state = (
|
||||
address.get("state") or address.get("province") or address.get("region") or ""
|
||||
)
|
||||
state = address.get("state") or address.get("province") or address.get("region") or ""
|
||||
|
||||
# Get postal code with fallbacks
|
||||
postal_code = address.get("postcode") or address.get("postal_code") or ""
|
||||
@@ -170,9 +160,7 @@ def get_park_areas(request: HttpRequest) -> HttpResponse:
|
||||
park = Park.objects.get(id=park_id)
|
||||
areas = park.areas.all()
|
||||
options = ['<option value="">No specific area</option>']
|
||||
options.extend(
|
||||
[f'<option value="{area.id}">{area.name}</option>' for area in areas]
|
||||
)
|
||||
options.extend([f'<option value="{area.id}">{area.name}</option>' for area in areas])
|
||||
return HttpResponse("\n".join(options))
|
||||
except Park.DoesNotExist:
|
||||
return HttpResponse('<option value="">Invalid park selected</option>')
|
||||
@@ -201,11 +189,7 @@ def location_search(request: HttpRequest) -> JsonResponse:
|
||||
if response.status_code == 200:
|
||||
results = response.json()
|
||||
normalized_results = [normalize_osm_result(result) for result in results]
|
||||
valid_results = [
|
||||
r
|
||||
for r in normalized_results
|
||||
if r["lat"] is not None and r["lon"] is not None
|
||||
]
|
||||
valid_results = [r for r in normalized_results if r["lat"] is not None and r["lon"] is not None]
|
||||
return JsonResponse({"results": valid_results})
|
||||
|
||||
return JsonResponse({"results": []})
|
||||
@@ -226,13 +210,9 @@ def reverse_geocode(request: HttpRequest) -> JsonResponse:
|
||||
lon = lon.quantize(Decimal("0.000001"), rounding=ROUND_DOWN)
|
||||
|
||||
if lat < -90 or lat > 90:
|
||||
return JsonResponse(
|
||||
{"error": "Latitude must be between -90 and 90"}, status=400
|
||||
)
|
||||
return JsonResponse({"error": "Latitude must be between -90 and 90"}, status=400)
|
||||
if lon < -180 or lon > 180:
|
||||
return JsonResponse(
|
||||
{"error": "Longitude must be between -180 and 180"}, status=400
|
||||
)
|
||||
return JsonResponse({"error": "Longitude must be between -180 and 180"}, status=400)
|
||||
|
||||
response = requests.get(
|
||||
"https://nominatim.openstreetmap.org/reverse",
|
||||
@@ -306,9 +286,7 @@ class ParkListView(HTMXFilterableMixin, ListView):
|
||||
try:
|
||||
# Initialize filterset if not exists
|
||||
if not hasattr(self, "filterset"):
|
||||
self.filterset = self.filter_class(
|
||||
self.request.GET, queryset=self.model.objects.none()
|
||||
)
|
||||
self.filterset = self.filter_class(self.request.GET, queryset=self.model.objects.none())
|
||||
|
||||
context = super().get_context_data(**kwargs)
|
||||
|
||||
@@ -323,20 +301,14 @@ class ParkListView(HTMXFilterableMixin, ListView):
|
||||
"search_query": self.request.GET.get("search", ""),
|
||||
"filter_counts": filter_counts,
|
||||
"popular_filters": popular_filters,
|
||||
"total_results": (
|
||||
context.get("paginator").count
|
||||
if context.get("paginator")
|
||||
else 0
|
||||
),
|
||||
"total_results": (context.get("paginator").count if context.get("paginator") else 0),
|
||||
}
|
||||
)
|
||||
|
||||
# Add filter suggestions for search queries
|
||||
search_query = self.request.GET.get("search", "")
|
||||
if search_query:
|
||||
context["filter_suggestions"] = (
|
||||
self.filter_service.get_filter_suggestions(search_query)
|
||||
)
|
||||
context["filter_suggestions"] = self.filter_service.get_filter_suggestions(search_query)
|
||||
|
||||
return context
|
||||
|
||||
@@ -353,9 +325,7 @@ class ParkListView(HTMXFilterableMixin, ListView):
|
||||
messages.error(self.request, f"Error applying filters: {str(e)}")
|
||||
# Ensure filterset exists in error case
|
||||
if not hasattr(self, "filterset"):
|
||||
self.filterset = self.filter_class(
|
||||
self.request.GET, queryset=self.model.objects.none()
|
||||
)
|
||||
self.filterset = self.filter_class(self.request.GET, queryset=self.model.objects.none())
|
||||
return {
|
||||
"filter": self.filterset,
|
||||
"error": "Unable to apply filters. Please try adjusting your criteria.",
|
||||
@@ -427,9 +397,7 @@ class ParkListView(HTMXFilterableMixin, ListView):
|
||||
|
||||
return urlencode(url_params)
|
||||
|
||||
def _get_pagination_urls(
|
||||
self, page_obj, filter_params: dict[str, Any]
|
||||
) -> dict[str, str]:
|
||||
def _get_pagination_urls(self, page_obj, filter_params: dict[str, Any]) -> dict[str, str]:
|
||||
"""Generate pagination URLs that preserve filter state."""
|
||||
|
||||
base_query = self._build_filter_query_string(filter_params)
|
||||
@@ -476,9 +444,7 @@ def search_parks(request: HttpRequest) -> HttpResponse:
|
||||
|
||||
# Get current view mode from request
|
||||
current_view_mode = request.GET.get("view_mode", "grid")
|
||||
park_filter = ParkFilter(
|
||||
{"search": search_query}, queryset=get_base_park_queryset()
|
||||
)
|
||||
park_filter = ParkFilter({"search": search_query}, queryset=get_base_park_queryset())
|
||||
|
||||
parks = park_filter.qs
|
||||
if request.GET.get("quick_search"):
|
||||
@@ -747,10 +713,7 @@ def htmx_optimize_route(request: HttpRequest) -> HttpResponse:
|
||||
rlat1, rlon1, rlat2, rlon2 = map(math.radians, [lat1, lon1, lat2, lon2])
|
||||
dlat = rlat2 - rlat1
|
||||
dlon = rlon2 - rlon1
|
||||
a = (
|
||||
math.sin(dlat / 2) ** 2
|
||||
+ math.cos(rlat1) * math.cos(rlat2) * math.sin(dlon / 2) ** 2
|
||||
)
|
||||
a = math.sin(dlat / 2) ** 2 + math.cos(rlat1) * math.cos(rlat2) * math.sin(dlon / 2) ** 2
|
||||
c = 2 * math.asin(min(1, math.sqrt(a)))
|
||||
miles = 3958.8 * c
|
||||
return miles
|
||||
@@ -762,18 +725,14 @@ def htmx_optimize_route(request: HttpRequest) -> HttpResponse:
|
||||
lat = getattr(loc, "latitude", None) if loc else None
|
||||
lon = getattr(loc, "longitude", None) if loc else None
|
||||
if lat is not None and lon is not None:
|
||||
waypoints.append(
|
||||
{"id": p.id, "name": p.name, "latitude": lat, "longitude": lon}
|
||||
)
|
||||
waypoints.append({"id": p.id, "name": p.name, "latitude": lat, "longitude": lon})
|
||||
|
||||
# sum straight-line distances between consecutive waypoints
|
||||
for i in range(len(waypoints) - 1):
|
||||
a = waypoints[i]
|
||||
b = waypoints[i + 1]
|
||||
try:
|
||||
total_miles += haversine_miles(
|
||||
a["latitude"], a["longitude"], b["latitude"], b["longitude"]
|
||||
)
|
||||
total_miles += haversine_miles(a["latitude"], a["longitude"], b["latitude"], b["longitude"])
|
||||
except Exception as e:
|
||||
log_exception(
|
||||
logger,
|
||||
@@ -807,9 +766,7 @@ def htmx_optimize_route(request: HttpRequest) -> HttpResponse:
|
||||
"total_rides": sum(getattr(p, "ride_count", 0) or 0 for p in parks),
|
||||
}
|
||||
|
||||
html = render_to_string(
|
||||
TRIP_SUMMARY_TEMPLATE, {"summary": summary}, request=request
|
||||
)
|
||||
html = render_to_string(TRIP_SUMMARY_TEMPLATE, {"summary": summary}, request=request)
|
||||
resp = HttpResponse(html)
|
||||
# Include waypoints payload in HX-Trigger so client can render route on the map
|
||||
resp["HX-Trigger"] = json.dumps({"tripOptimized": {"parks": waypoints}})
|
||||
@@ -843,9 +800,7 @@ def htmx_save_trip(request: HttpRequest) -> HttpResponse:
|
||||
# attempt to associate parks if the Trip model supports it
|
||||
with contextlib.suppress(Exception):
|
||||
trip.parks.set([p.id for p in parks])
|
||||
trips = list(
|
||||
Trip.objects.filter(owner=request.user).order_by("-created_at")[:10]
|
||||
)
|
||||
trips = list(Trip.objects.filter(owner=request.user).order_by("-created_at")[:10])
|
||||
except Exception:
|
||||
trips = []
|
||||
|
||||
@@ -892,14 +847,10 @@ class ParkCreateView(LoginRequiredMixin, CreateView):
|
||||
def normalize_coordinates(self, form: ParkForm) -> None:
|
||||
if form.cleaned_data.get("latitude"):
|
||||
lat = Decimal(str(form.cleaned_data["latitude"]))
|
||||
form.cleaned_data["latitude"] = lat.quantize(
|
||||
Decimal("0.000001"), rounding=ROUND_DOWN
|
||||
)
|
||||
form.cleaned_data["latitude"] = lat.quantize(Decimal("0.000001"), rounding=ROUND_DOWN)
|
||||
if form.cleaned_data.get("longitude"):
|
||||
lon = Decimal(str(form.cleaned_data["longitude"]))
|
||||
form.cleaned_data["longitude"] = lon.quantize(
|
||||
Decimal("0.000001"), rounding=ROUND_DOWN
|
||||
)
|
||||
form.cleaned_data["longitude"] = lon.quantize(Decimal("0.000001"), rounding=ROUND_DOWN)
|
||||
|
||||
def form_valid(self, form: ParkForm) -> HttpResponse:
|
||||
self.normalize_coordinates(form)
|
||||
@@ -942,8 +893,7 @@ class ParkCreateView(LoginRequiredMixin, CreateView):
|
||||
)
|
||||
messages.success(
|
||||
self.request,
|
||||
f"Successfully created {self.object.name}. "
|
||||
f"Added {service_result['uploaded_count']} photo(s).",
|
||||
f"Successfully created {self.object.name}. " f"Added {service_result['uploaded_count']} photo(s).",
|
||||
)
|
||||
return HttpResponseRedirect(self.get_success_url())
|
||||
|
||||
@@ -960,8 +910,7 @@ class ParkCreateView(LoginRequiredMixin, CreateView):
|
||||
)
|
||||
messages.success(
|
||||
self.request,
|
||||
"Your park submission has been sent for review. "
|
||||
"You will be notified when it is approved.",
|
||||
"Your park submission has been sent for review. " "You will be notified when it is approved.",
|
||||
)
|
||||
return HttpResponseRedirect(reverse("parks:park_list"))
|
||||
|
||||
@@ -1016,14 +965,10 @@ class ParkUpdateView(LoginRequiredMixin, UpdateView):
|
||||
def normalize_coordinates(self, form: ParkForm) -> None:
|
||||
if form.cleaned_data.get("latitude"):
|
||||
lat = Decimal(str(form.cleaned_data["latitude"]))
|
||||
form.cleaned_data["latitude"] = lat.quantize(
|
||||
Decimal("0.000001"), rounding=ROUND_DOWN
|
||||
)
|
||||
form.cleaned_data["latitude"] = lat.quantize(Decimal("0.000001"), rounding=ROUND_DOWN)
|
||||
if form.cleaned_data.get("longitude"):
|
||||
lon = Decimal(str(form.cleaned_data["longitude"]))
|
||||
form.cleaned_data["longitude"] = lon.quantize(
|
||||
Decimal("0.000001"), rounding=ROUND_DOWN
|
||||
)
|
||||
form.cleaned_data["longitude"] = lon.quantize(Decimal("0.000001"), rounding=ROUND_DOWN)
|
||||
|
||||
def form_valid(self, form: ParkForm) -> HttpResponse:
|
||||
self.normalize_coordinates(form)
|
||||
@@ -1068,8 +1013,7 @@ class ParkUpdateView(LoginRequiredMixin, UpdateView):
|
||||
)
|
||||
messages.success(
|
||||
self.request,
|
||||
f"Successfully updated {self.object.name}. "
|
||||
f"Added {service_result['uploaded_count']} new photo(s).",
|
||||
f"Successfully updated {self.object.name}. " f"Added {service_result['uploaded_count']} new photo(s).",
|
||||
)
|
||||
return HttpResponseRedirect(self.get_success_url())
|
||||
|
||||
@@ -1090,9 +1034,7 @@ class ParkUpdateView(LoginRequiredMixin, UpdateView):
|
||||
f"Your changes to {self.object.name} have been sent for review. "
|
||||
"You will be notified when they are approved.",
|
||||
)
|
||||
return HttpResponseRedirect(
|
||||
reverse(PARK_DETAIL_URL, kwargs={"slug": self.object.slug})
|
||||
)
|
||||
return HttpResponseRedirect(reverse(PARK_DETAIL_URL, kwargs={"slug": self.object.slug}))
|
||||
|
||||
elif service_result["status"] == "failed":
|
||||
messages.error(
|
||||
@@ -1143,11 +1085,7 @@ class ParkDetailView(
|
||||
def get_queryset(self) -> QuerySet[Park]:
|
||||
return cast(
|
||||
QuerySet[Park],
|
||||
super()
|
||||
.get_queryset()
|
||||
.prefetch_related(
|
||||
"rides", "rides__manufacturer", "photos", "areas", "location"
|
||||
),
|
||||
super().get_queryset().prefetch_related("rides", "rides__manufacturer", "photos", "areas", "location"),
|
||||
)
|
||||
|
||||
def get_context_data(self, **kwargs: Any) -> dict[str, Any]:
|
||||
|
||||
@@ -119,9 +119,7 @@ class CreateTripView(RoadTripViewMixin, View):
|
||||
|
||||
# Get parks
|
||||
parks = list(
|
||||
Park.objects.filter(
|
||||
id__in=park_ids, location__isnull=False
|
||||
).select_related("location", "operator")
|
||||
Park.objects.filter(id__in=park_ids, location__isnull=False).select_related("location", "operator")
|
||||
)
|
||||
|
||||
if len(parks) != len(park_ids):
|
||||
@@ -159,9 +157,7 @@ class CreateTripView(RoadTripViewMixin, View):
|
||||
{
|
||||
"status": "success",
|
||||
"data": trip_data,
|
||||
"trip_url": reverse(
|
||||
"parks:roadtrip_detail", kwargs={"trip_id": "temp"}
|
||||
),
|
||||
"trip_url": reverse("parks:roadtrip_detail", kwargs={"trip_id": "temp"}),
|
||||
}
|
||||
)
|
||||
|
||||
@@ -258,12 +254,8 @@ class FindParksAlongRouteView(RoadTripViewMixin, View):
|
||||
|
||||
# Get start and end parks
|
||||
try:
|
||||
start_park = Park.objects.select_related("location").get(
|
||||
id=start_park_id, location__isnull=False
|
||||
)
|
||||
end_park = Park.objects.select_related("location").get(
|
||||
id=end_park_id, location__isnull=False
|
||||
)
|
||||
start_park = Park.objects.select_related("location").get(id=start_park_id, location__isnull=False)
|
||||
end_park = Park.objects.select_related("location").get(id=end_park_id, location__isnull=False)
|
||||
except Park.DoesNotExist:
|
||||
return render(
|
||||
request,
|
||||
@@ -272,21 +264,21 @@ class FindParksAlongRouteView(RoadTripViewMixin, View):
|
||||
)
|
||||
|
||||
# Find parks along route
|
||||
parks_along_route = self.roadtrip_service.find_parks_along_route(
|
||||
start_park, end_park, max_detour_km
|
||||
)
|
||||
parks_along_route = self.roadtrip_service.find_parks_along_route(start_park, end_park, max_detour_km)
|
||||
|
||||
# Return JSON if requested
|
||||
if request.headers.get("Accept") == "application/json" or request.content_type == "application/json":
|
||||
return JsonResponse({
|
||||
"status": "success",
|
||||
"data": {
|
||||
"parks": [self._park_to_dict(p) for p in parks_along_route],
|
||||
"start_park": self._park_to_dict(start_park),
|
||||
"end_park": self._park_to_dict(end_park),
|
||||
"count": len(parks_along_route)
|
||||
return JsonResponse(
|
||||
{
|
||||
"status": "success",
|
||||
"data": {
|
||||
"parks": [self._park_to_dict(p) for p in parks_along_route],
|
||||
"start_park": self._park_to_dict(start_park),
|
||||
"end_park": self._park_to_dict(end_park),
|
||||
"count": len(parks_along_route),
|
||||
},
|
||||
}
|
||||
})
|
||||
)
|
||||
|
||||
return render(
|
||||
request,
|
||||
@@ -375,9 +367,7 @@ class GeocodeAddressView(RoadTripViewMixin, View):
|
||||
"longitude": coordinates.longitude,
|
||||
},
|
||||
"address": address,
|
||||
"nearby_parks": [
|
||||
loc.to_dict() for loc in map_response.locations[:20]
|
||||
],
|
||||
"nearby_parks": [loc.to_dict() for loc in map_response.locations[:20]],
|
||||
"radius_km": radius_km,
|
||||
},
|
||||
}
|
||||
@@ -418,12 +408,8 @@ class ParkDistanceCalculatorView(RoadTripViewMixin, View):
|
||||
|
||||
# Get parks
|
||||
try:
|
||||
park1 = Park.objects.select_related("location").get(
|
||||
id=park1_id, location__isnull=False
|
||||
)
|
||||
park2 = Park.objects.select_related("location").get(
|
||||
id=park2_id, location__isnull=False
|
||||
)
|
||||
park1 = Park.objects.select_related("location").get(id=park1_id, location__isnull=False)
|
||||
park2 = Park.objects.select_related("location").get(id=park2_id, location__isnull=False)
|
||||
except Park.DoesNotExist:
|
||||
return JsonResponse(
|
||||
{
|
||||
@@ -448,9 +434,7 @@ class ParkDistanceCalculatorView(RoadTripViewMixin, View):
|
||||
|
||||
from services.roadtrip import Coordinates
|
||||
|
||||
route = self.roadtrip_service.calculate_route(
|
||||
Coordinates(*coords1), Coordinates(*coords2)
|
||||
)
|
||||
route = self.roadtrip_service.calculate_route(Coordinates(*coords1), Coordinates(*coords2))
|
||||
|
||||
if not route:
|
||||
return JsonResponse(
|
||||
@@ -471,15 +455,11 @@ class ParkDistanceCalculatorView(RoadTripViewMixin, View):
|
||||
"formatted_duration": route.formatted_duration,
|
||||
"park1": {
|
||||
"name": park1.name,
|
||||
"formatted_location": getattr(
|
||||
park1, "formatted_location", ""
|
||||
),
|
||||
"formatted_location": getattr(park1, "formatted_location", ""),
|
||||
},
|
||||
"park2": {
|
||||
"name": park2.name,
|
||||
"formatted_location": getattr(
|
||||
park2, "formatted_location", ""
|
||||
),
|
||||
"formatted_location": getattr(park2, "formatted_location", ""),
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user