mirror of
https://github.com/pacnpal/thrillwiki_django_no_react.git
synced 2025-12-20 09:51:09 -05:00
@@ -1,403 +0,0 @@
|
||||
from django.contrib import admin
|
||||
from django.contrib.gis.admin import GISModelAdmin
|
||||
from django.utils.html import format_html
|
||||
import pghistory.models
|
||||
from .models import (
|
||||
Park,
|
||||
ParkArea,
|
||||
ParkLocation,
|
||||
Company,
|
||||
CompanyHeadquarters,
|
||||
ParkReview,
|
||||
)
|
||||
|
||||
|
||||
class ParkLocationInline(admin.StackedInline):
|
||||
"""Inline admin for ParkLocation"""
|
||||
|
||||
model = ParkLocation
|
||||
extra = 0
|
||||
fields = (
|
||||
("city", "state", "country"),
|
||||
"street_address",
|
||||
"postal_code",
|
||||
"point",
|
||||
("highway_exit", "best_arrival_time"),
|
||||
"parking_notes",
|
||||
"seasonal_notes",
|
||||
("osm_id", "osm_type"),
|
||||
)
|
||||
|
||||
|
||||
class ParkLocationAdmin(GISModelAdmin):
|
||||
"""Admin for standalone ParkLocation management"""
|
||||
|
||||
list_display = (
|
||||
"park",
|
||||
"city",
|
||||
"state",
|
||||
"country",
|
||||
"latitude",
|
||||
"longitude",
|
||||
)
|
||||
list_filter = ("country", "state")
|
||||
search_fields = (
|
||||
"park__name",
|
||||
"city",
|
||||
"state",
|
||||
"country",
|
||||
"street_address",
|
||||
)
|
||||
readonly_fields = ("latitude", "longitude", "coordinates")
|
||||
fieldsets = (
|
||||
("Park", {"fields": ("park",)}),
|
||||
(
|
||||
"Address",
|
||||
{
|
||||
"fields": (
|
||||
"street_address",
|
||||
"city",
|
||||
"state",
|
||||
"country",
|
||||
"postal_code",
|
||||
)
|
||||
},
|
||||
),
|
||||
(
|
||||
"Geographic Coordinates",
|
||||
{
|
||||
"fields": ("point", "latitude", "longitude", "coordinates"),
|
||||
"description": "Set coordinates by clicking on the map or entering latitude/longitude",
|
||||
},
|
||||
),
|
||||
(
|
||||
"Travel Information",
|
||||
{
|
||||
"fields": (
|
||||
"highway_exit",
|
||||
"best_arrival_time",
|
||||
"parking_notes",
|
||||
"seasonal_notes",
|
||||
),
|
||||
"classes": ("collapse",),
|
||||
},
|
||||
),
|
||||
(
|
||||
"OpenStreetMap Integration",
|
||||
{"fields": ("osm_id", "osm_type"), "classes": ("collapse",)},
|
||||
),
|
||||
)
|
||||
|
||||
@admin.display(description="Latitude")
|
||||
def latitude(self, obj):
|
||||
return obj.latitude
|
||||
|
||||
@admin.display(description="Longitude")
|
||||
def longitude(self, obj):
|
||||
return obj.longitude
|
||||
|
||||
|
||||
class ParkAdmin(admin.ModelAdmin):
|
||||
list_display = (
|
||||
"name",
|
||||
"formatted_location",
|
||||
"status",
|
||||
"operator",
|
||||
"property_owner",
|
||||
"created_at",
|
||||
"updated_at",
|
||||
)
|
||||
list_filter = ("status", "location__country", "location__state")
|
||||
search_fields = (
|
||||
"name",
|
||||
"description",
|
||||
"location__city",
|
||||
"location__state",
|
||||
"location__country",
|
||||
)
|
||||
readonly_fields = ("created_at", "updated_at")
|
||||
prepopulated_fields = {"slug": ("name",)}
|
||||
inlines = [ParkLocationInline]
|
||||
|
||||
@admin.display(description="Location")
|
||||
def formatted_location(self, obj):
|
||||
"""Display formatted location string"""
|
||||
return obj.formatted_location
|
||||
|
||||
|
||||
class ParkAreaAdmin(admin.ModelAdmin):
|
||||
list_display = ("name", "park", "created_at", "updated_at")
|
||||
list_filter = ("park",)
|
||||
search_fields = ("name", "description", "park__name")
|
||||
readonly_fields = ("created_at", "updated_at")
|
||||
prepopulated_fields = {"slug": ("name",)}
|
||||
|
||||
|
||||
class CompanyHeadquartersInline(admin.StackedInline):
|
||||
"""Inline admin for CompanyHeadquarters"""
|
||||
|
||||
model = CompanyHeadquarters
|
||||
extra = 0
|
||||
fields = (
|
||||
("city", "state_province", "country"),
|
||||
"street_address",
|
||||
"postal_code",
|
||||
"mailing_address",
|
||||
)
|
||||
|
||||
|
||||
class CompanyHeadquartersAdmin(admin.ModelAdmin):
|
||||
"""Admin for standalone CompanyHeadquarters management"""
|
||||
|
||||
list_display = (
|
||||
"company",
|
||||
"location_display",
|
||||
"city",
|
||||
"country",
|
||||
"created_at",
|
||||
)
|
||||
list_filter = ("country", "state_province")
|
||||
search_fields = (
|
||||
"company__name",
|
||||
"city",
|
||||
"state_province",
|
||||
"country",
|
||||
"street_address",
|
||||
)
|
||||
readonly_fields = ("created_at", "updated_at")
|
||||
fieldsets = (
|
||||
("Company", {"fields": ("company",)}),
|
||||
(
|
||||
"Address",
|
||||
{
|
||||
"fields": (
|
||||
"street_address",
|
||||
"city",
|
||||
"state_province",
|
||||
"country",
|
||||
"postal_code",
|
||||
)
|
||||
},
|
||||
),
|
||||
(
|
||||
"Additional Information",
|
||||
{"fields": ("mailing_address",), "classes": ("collapse",)},
|
||||
),
|
||||
(
|
||||
"Metadata",
|
||||
{"fields": ("created_at", "updated_at"), "classes": ("collapse",)},
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
class CompanyAdmin(admin.ModelAdmin):
|
||||
"""Enhanced Company admin with headquarters inline"""
|
||||
|
||||
list_display = (
|
||||
"name",
|
||||
"roles_display",
|
||||
"headquarters_location",
|
||||
"website",
|
||||
"founded_year",
|
||||
)
|
||||
list_filter = ("roles",)
|
||||
search_fields = ("name", "description")
|
||||
readonly_fields = ("created_at", "updated_at")
|
||||
prepopulated_fields = {"slug": ("name",)}
|
||||
inlines = [CompanyHeadquartersInline]
|
||||
|
||||
@admin.display(description="Roles")
|
||||
def roles_display(self, obj):
|
||||
"""Display roles as a formatted string"""
|
||||
return ", ".join(obj.roles) if obj.roles else "No roles"
|
||||
|
||||
@admin.display(description="Headquarters")
|
||||
def headquarters_location(self, obj):
|
||||
"""Display headquarters location if available"""
|
||||
if hasattr(obj, "headquarters"):
|
||||
return obj.headquarters.location_display
|
||||
return "No headquarters"
|
||||
|
||||
|
||||
@admin.register(ParkReview)
|
||||
class ParkReviewAdmin(admin.ModelAdmin):
|
||||
"""Admin interface for park reviews"""
|
||||
|
||||
list_display = (
|
||||
"park",
|
||||
"user",
|
||||
"rating",
|
||||
"title",
|
||||
"visit_date",
|
||||
"is_published",
|
||||
"created_at",
|
||||
"moderation_status",
|
||||
)
|
||||
list_filter = (
|
||||
"rating",
|
||||
"is_published",
|
||||
"visit_date",
|
||||
"created_at",
|
||||
"park",
|
||||
"moderated_by",
|
||||
)
|
||||
search_fields = (
|
||||
"title",
|
||||
"content",
|
||||
"user__username",
|
||||
"park__name",
|
||||
)
|
||||
readonly_fields = ("created_at", "updated_at")
|
||||
date_hierarchy = "created_at"
|
||||
ordering = ("-created_at",)
|
||||
|
||||
fieldsets = (
|
||||
(
|
||||
"Review Details",
|
||||
{
|
||||
"fields": (
|
||||
"user",
|
||||
"park",
|
||||
"rating",
|
||||
"title",
|
||||
"content",
|
||||
"visit_date",
|
||||
)
|
||||
},
|
||||
),
|
||||
(
|
||||
"Publication Status",
|
||||
{
|
||||
"fields": ("is_published",),
|
||||
},
|
||||
),
|
||||
(
|
||||
"Moderation",
|
||||
{
|
||||
"fields": (
|
||||
"moderated_by",
|
||||
"moderated_at",
|
||||
"moderation_notes",
|
||||
),
|
||||
"classes": ("collapse",),
|
||||
},
|
||||
),
|
||||
(
|
||||
"Metadata",
|
||||
{
|
||||
"fields": ("created_at", "updated_at"),
|
||||
"classes": ("collapse",),
|
||||
},
|
||||
),
|
||||
)
|
||||
|
||||
@admin.display(description="Moderation Status", boolean=True)
|
||||
def moderation_status(self, obj):
|
||||
"""Display moderation status with color coding"""
|
||||
if obj.moderated_by:
|
||||
return format_html(
|
||||
'<span style="color: {};">{}</span>',
|
||||
"green" if obj.is_published else "red",
|
||||
"Approved" if obj.is_published else "Rejected",
|
||||
)
|
||||
return format_html('<span style="color: orange;">Pending</span>')
|
||||
|
||||
def save_model(self, request, obj, form, change):
|
||||
"""Auto-set moderation info when status changes"""
|
||||
if change and "is_published" in form.changed_data:
|
||||
from django.utils import timezone
|
||||
|
||||
obj.moderated_by = request.user
|
||||
obj.moderated_at = timezone.now()
|
||||
super().save_model(request, obj, form, change)
|
||||
|
||||
|
||||
@admin.register(pghistory.models.Events)
|
||||
class PgHistoryEventsAdmin(admin.ModelAdmin):
|
||||
"""Admin interface for pghistory Events"""
|
||||
|
||||
list_display = (
|
||||
"pgh_id",
|
||||
"pgh_created_at",
|
||||
"pgh_label",
|
||||
"pgh_model",
|
||||
"pgh_obj_id",
|
||||
"pgh_context_display",
|
||||
)
|
||||
list_filter = (
|
||||
"pgh_label",
|
||||
"pgh_model",
|
||||
"pgh_created_at",
|
||||
)
|
||||
search_fields = (
|
||||
"pgh_obj_id",
|
||||
"pgh_context",
|
||||
)
|
||||
readonly_fields = (
|
||||
"pgh_id",
|
||||
"pgh_created_at",
|
||||
"pgh_label",
|
||||
"pgh_model",
|
||||
"pgh_obj_id",
|
||||
"pgh_context",
|
||||
"pgh_data",
|
||||
)
|
||||
date_hierarchy = "pgh_created_at"
|
||||
ordering = ("-pgh_created_at",)
|
||||
|
||||
fieldsets = (
|
||||
(
|
||||
"Event Information",
|
||||
{
|
||||
"fields": (
|
||||
"pgh_id",
|
||||
"pgh_created_at",
|
||||
"pgh_label",
|
||||
"pgh_model",
|
||||
"pgh_obj_id",
|
||||
)
|
||||
},
|
||||
),
|
||||
(
|
||||
"Context & Data",
|
||||
{
|
||||
"fields": (
|
||||
"pgh_context",
|
||||
"pgh_data",
|
||||
),
|
||||
"classes": ("collapse",),
|
||||
},
|
||||
),
|
||||
)
|
||||
|
||||
@admin.display(description="Context")
|
||||
def pgh_context_display(self, obj):
|
||||
"""Display context information in a readable format"""
|
||||
if obj.pgh_context:
|
||||
if isinstance(obj.pgh_context, dict):
|
||||
context_items = []
|
||||
for key, value in obj.pgh_context.items():
|
||||
context_items.append(f"{key}: {value}")
|
||||
return ", ".join(context_items)
|
||||
return str(obj.pgh_context)
|
||||
return "No context"
|
||||
|
||||
def has_add_permission(self, request):
|
||||
"""Disable manual creation of history events"""
|
||||
return False
|
||||
|
||||
def has_change_permission(self, request, obj=None):
|
||||
"""Make history events read-only"""
|
||||
return False
|
||||
|
||||
def has_delete_permission(self, request, obj=None):
|
||||
"""Prevent deletion of history events"""
|
||||
return getattr(request.user, "is_superuser", False)
|
||||
|
||||
|
||||
# Register the models with their admin classes
|
||||
admin.site.register(Park, ParkAdmin)
|
||||
admin.site.register(ParkArea, ParkAreaAdmin)
|
||||
admin.site.register(ParkLocation, ParkLocationAdmin)
|
||||
admin.site.register(Company, CompanyAdmin)
|
||||
admin.site.register(CompanyHeadquarters, CompanyHeadquartersAdmin)
|
||||
@@ -1,9 +0,0 @@
|
||||
from django.apps import AppConfig
|
||||
|
||||
|
||||
class ParksConfig(AppConfig):
|
||||
default_auto_field = "django.db.models.BigAutoField"
|
||||
name = "apps.parks"
|
||||
|
||||
def ready(self):
|
||||
import apps.parks.signals # noqa: F401 - Register signals
|
||||
@@ -1,288 +0,0 @@
|
||||
"""
|
||||
Rich Choice Objects for Parks Domain
|
||||
|
||||
This module defines all choice objects for the parks domain, replacing
|
||||
the legacy tuple-based choices with rich choice objects.
|
||||
"""
|
||||
|
||||
from apps.core.choices import RichChoice, ChoiceCategory
|
||||
from apps.core.choices.registry import register_choices
|
||||
|
||||
|
||||
# Park Status Choices
|
||||
PARK_STATUSES = [
|
||||
RichChoice(
|
||||
value="OPERATING",
|
||||
label="Operating",
|
||||
description="Park is currently open and operating normally",
|
||||
metadata={
|
||||
'color': 'green',
|
||||
'icon': 'check-circle',
|
||||
'css_class': 'bg-green-100 text-green-800',
|
||||
'sort_order': 1
|
||||
},
|
||||
category=ChoiceCategory.STATUS
|
||||
),
|
||||
RichChoice(
|
||||
value="CLOSED_TEMP",
|
||||
label="Temporarily Closed",
|
||||
description="Park is temporarily closed for maintenance, weather, or seasonal reasons",
|
||||
metadata={
|
||||
'color': 'yellow',
|
||||
'icon': 'pause-circle',
|
||||
'css_class': 'bg-yellow-100 text-yellow-800',
|
||||
'sort_order': 2
|
||||
},
|
||||
category=ChoiceCategory.STATUS
|
||||
),
|
||||
RichChoice(
|
||||
value="CLOSED_PERM",
|
||||
label="Permanently Closed",
|
||||
description="Park has been permanently closed and will not reopen",
|
||||
metadata={
|
||||
'color': 'red',
|
||||
'icon': 'x-circle',
|
||||
'css_class': 'bg-red-100 text-red-800',
|
||||
'sort_order': 3
|
||||
},
|
||||
category=ChoiceCategory.STATUS
|
||||
),
|
||||
RichChoice(
|
||||
value="UNDER_CONSTRUCTION",
|
||||
label="Under Construction",
|
||||
description="Park is currently being built or undergoing major renovation",
|
||||
metadata={
|
||||
'color': 'blue',
|
||||
'icon': 'tool',
|
||||
'css_class': 'bg-blue-100 text-blue-800',
|
||||
'sort_order': 4
|
||||
},
|
||||
category=ChoiceCategory.STATUS
|
||||
),
|
||||
RichChoice(
|
||||
value="DEMOLISHED",
|
||||
label="Demolished",
|
||||
description="Park has been completely demolished and removed",
|
||||
metadata={
|
||||
'color': 'gray',
|
||||
'icon': 'trash',
|
||||
'css_class': 'bg-gray-100 text-gray-800',
|
||||
'sort_order': 5
|
||||
},
|
||||
category=ChoiceCategory.STATUS
|
||||
),
|
||||
RichChoice(
|
||||
value="RELOCATED",
|
||||
label="Relocated",
|
||||
description="Park has been moved to a different location",
|
||||
metadata={
|
||||
'color': 'purple',
|
||||
'icon': 'arrow-right',
|
||||
'css_class': 'bg-purple-100 text-purple-800',
|
||||
'sort_order': 6
|
||||
},
|
||||
category=ChoiceCategory.STATUS
|
||||
),
|
||||
]
|
||||
|
||||
# Park Type Choices
|
||||
PARK_TYPES = [
|
||||
RichChoice(
|
||||
value="THEME_PARK",
|
||||
label="Theme Park",
|
||||
description="Large-scale amusement park with themed areas and attractions",
|
||||
metadata={
|
||||
'color': 'red',
|
||||
'icon': 'castle',
|
||||
'css_class': 'bg-red-100 text-red-800',
|
||||
'sort_order': 1
|
||||
},
|
||||
category=ChoiceCategory.CLASSIFICATION
|
||||
),
|
||||
RichChoice(
|
||||
value="AMUSEMENT_PARK",
|
||||
label="Amusement Park",
|
||||
description="Traditional amusement park with rides and games",
|
||||
metadata={
|
||||
'color': 'blue',
|
||||
'icon': 'ferris-wheel',
|
||||
'css_class': 'bg-blue-100 text-blue-800',
|
||||
'sort_order': 2
|
||||
},
|
||||
category=ChoiceCategory.CLASSIFICATION
|
||||
),
|
||||
RichChoice(
|
||||
value="WATER_PARK",
|
||||
label="Water Park",
|
||||
description="Park featuring water-based attractions and activities",
|
||||
metadata={
|
||||
'color': 'cyan',
|
||||
'icon': 'water',
|
||||
'css_class': 'bg-cyan-100 text-cyan-800',
|
||||
'sort_order': 3
|
||||
},
|
||||
category=ChoiceCategory.CLASSIFICATION
|
||||
),
|
||||
RichChoice(
|
||||
value="FAMILY_ENTERTAINMENT_CENTER",
|
||||
label="Family Entertainment Center",
|
||||
description="Indoor entertainment facility with games and family attractions",
|
||||
metadata={
|
||||
'color': 'green',
|
||||
'icon': 'family',
|
||||
'css_class': 'bg-green-100 text-green-800',
|
||||
'sort_order': 4
|
||||
},
|
||||
category=ChoiceCategory.CLASSIFICATION
|
||||
),
|
||||
RichChoice(
|
||||
value="CARNIVAL",
|
||||
label="Carnival",
|
||||
description="Traveling amusement show with rides, games, and entertainment",
|
||||
metadata={
|
||||
'color': 'yellow',
|
||||
'icon': 'carnival',
|
||||
'css_class': 'bg-yellow-100 text-yellow-800',
|
||||
'sort_order': 5
|
||||
},
|
||||
category=ChoiceCategory.CLASSIFICATION
|
||||
),
|
||||
RichChoice(
|
||||
value="FAIR",
|
||||
label="Fair",
|
||||
description="Temporary event featuring rides, games, and agricultural exhibits",
|
||||
metadata={
|
||||
'color': 'orange',
|
||||
'icon': 'fair',
|
||||
'css_class': 'bg-orange-100 text-orange-800',
|
||||
'sort_order': 6
|
||||
},
|
||||
category=ChoiceCategory.CLASSIFICATION
|
||||
),
|
||||
RichChoice(
|
||||
value="PIER",
|
||||
label="Pier",
|
||||
description="Seaside entertainment pier with rides and attractions",
|
||||
metadata={
|
||||
'color': 'teal',
|
||||
'icon': 'pier',
|
||||
'css_class': 'bg-teal-100 text-teal-800',
|
||||
'sort_order': 7
|
||||
},
|
||||
category=ChoiceCategory.CLASSIFICATION
|
||||
),
|
||||
RichChoice(
|
||||
value="BOARDWALK",
|
||||
label="Boardwalk",
|
||||
description="Waterfront entertainment area with rides and attractions",
|
||||
metadata={
|
||||
'color': 'indigo',
|
||||
'icon': 'boardwalk',
|
||||
'css_class': 'bg-indigo-100 text-indigo-800',
|
||||
'sort_order': 8
|
||||
},
|
||||
category=ChoiceCategory.CLASSIFICATION
|
||||
),
|
||||
RichChoice(
|
||||
value="SAFARI_PARK",
|
||||
label="Safari Park",
|
||||
description="Wildlife park with drive-through animal experiences",
|
||||
metadata={
|
||||
'color': 'emerald',
|
||||
'icon': 'safari',
|
||||
'css_class': 'bg-emerald-100 text-emerald-800',
|
||||
'sort_order': 9
|
||||
},
|
||||
category=ChoiceCategory.CLASSIFICATION
|
||||
),
|
||||
RichChoice(
|
||||
value="ZOO",
|
||||
label="Zoo",
|
||||
description="Zoological park with animal exhibits and educational programs",
|
||||
metadata={
|
||||
'color': 'lime',
|
||||
'icon': 'zoo',
|
||||
'css_class': 'bg-lime-100 text-lime-800',
|
||||
'sort_order': 10
|
||||
},
|
||||
category=ChoiceCategory.CLASSIFICATION
|
||||
),
|
||||
RichChoice(
|
||||
value="OTHER",
|
||||
label="Other",
|
||||
description="Park type that doesn't fit into standard categories",
|
||||
metadata={
|
||||
'color': 'gray',
|
||||
'icon': 'other',
|
||||
'css_class': 'bg-gray-100 text-gray-800',
|
||||
'sort_order': 11
|
||||
},
|
||||
category=ChoiceCategory.CLASSIFICATION
|
||||
),
|
||||
]
|
||||
|
||||
# Company Role Choices for Parks Domain (OPERATOR and PROPERTY_OWNER only)
|
||||
PARKS_COMPANY_ROLES = [
|
||||
RichChoice(
|
||||
value="OPERATOR",
|
||||
label="Park Operator",
|
||||
description="Company that operates and manages theme parks and amusement facilities",
|
||||
metadata={
|
||||
'color': 'blue',
|
||||
'icon': 'building-office',
|
||||
'css_class': 'bg-blue-100 text-blue-800',
|
||||
'sort_order': 1,
|
||||
'domain': 'parks',
|
||||
'permissions': ['manage_parks', 'view_operations'],
|
||||
'url_pattern': '/parks/operators/{slug}/'
|
||||
},
|
||||
category=ChoiceCategory.CLASSIFICATION
|
||||
),
|
||||
RichChoice(
|
||||
value="PROPERTY_OWNER",
|
||||
label="Property Owner",
|
||||
description="Company that owns the land and property where parks are located",
|
||||
metadata={
|
||||
'color': 'green',
|
||||
'icon': 'home',
|
||||
'css_class': 'bg-green-100 text-green-800',
|
||||
'sort_order': 2,
|
||||
'domain': 'parks',
|
||||
'permissions': ['manage_property', 'view_ownership'],
|
||||
'url_pattern': '/parks/owners/{slug}/'
|
||||
},
|
||||
category=ChoiceCategory.CLASSIFICATION
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
def register_parks_choices():
|
||||
"""Register all parks domain choices with the global registry"""
|
||||
|
||||
register_choices(
|
||||
name="statuses",
|
||||
choices=PARK_STATUSES,
|
||||
domain="parks",
|
||||
description="Park operational status options",
|
||||
metadata={'domain': 'parks', 'type': 'status'}
|
||||
)
|
||||
|
||||
register_choices(
|
||||
name="types",
|
||||
choices=PARK_TYPES,
|
||||
domain="parks",
|
||||
description="Park type and category classifications",
|
||||
metadata={'domain': 'parks', 'type': 'park_type'}
|
||||
)
|
||||
|
||||
register_choices(
|
||||
name="company_roles",
|
||||
choices=PARKS_COMPANY_ROLES,
|
||||
domain="parks",
|
||||
description="Company role classifications for parks domain (OPERATOR and PROPERTY_OWNER only)",
|
||||
metadata={'domain': 'parks', 'type': 'company_role'}
|
||||
)
|
||||
|
||||
|
||||
# Auto-register choices when module is imported
|
||||
register_parks_choices()
|
||||
@@ -1,391 +0,0 @@
|
||||
from django.core.exceptions import ValidationError
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from django.db import models
|
||||
from django.contrib.gis.geos import Point
|
||||
from django.contrib.gis.measure import Distance
|
||||
from django_filters import (
|
||||
NumberFilter,
|
||||
ModelChoiceFilter,
|
||||
DateFromToRangeFilter,
|
||||
ChoiceFilter,
|
||||
FilterSet,
|
||||
CharFilter,
|
||||
BooleanFilter,
|
||||
OrderingFilter,
|
||||
)
|
||||
from .models import Park, Company
|
||||
from .querysets import get_base_park_queryset
|
||||
from apps.core.choices.registry import get_choices
|
||||
import requests
|
||||
|
||||
|
||||
def validate_positive_integer(value):
|
||||
"""Validate that a value is a positive integer"""
|
||||
try:
|
||||
value = float(value)
|
||||
if not value.is_integer() or value < 0:
|
||||
raise ValidationError(_("Value must be a positive integer"))
|
||||
return int(value)
|
||||
except (TypeError, ValueError):
|
||||
raise ValidationError(_("Invalid number format"))
|
||||
|
||||
|
||||
class ParkFilter(FilterSet):
|
||||
"""Filter set for parks with search and validation capabilities"""
|
||||
|
||||
class Meta:
|
||||
model = Park
|
||||
fields = []
|
||||
|
||||
# Search field with better description
|
||||
search = CharFilter(
|
||||
method="filter_search",
|
||||
label=_("Search Parks"),
|
||||
help_text=_("Search by park name, description, or location"),
|
||||
)
|
||||
|
||||
# Status filter with clearer label
|
||||
status = ChoiceFilter(
|
||||
field_name="status",
|
||||
choices=lambda: [(choice.value, choice.label) for choice in get_choices("park_statuses", "parks")],
|
||||
empty_label=_("Any status"),
|
||||
label=_("Operating Status"),
|
||||
help_text=_("Filter parks by their current operating status"),
|
||||
)
|
||||
|
||||
# Operator filters with helpful descriptions
|
||||
operator = ModelChoiceFilter(
|
||||
field_name="operator",
|
||||
queryset=Company.objects.filter(roles__contains=["OPERATOR"]),
|
||||
empty_label=_("Any operator"),
|
||||
label=_("Operating Company"),
|
||||
help_text=_("Filter parks by their operating company"),
|
||||
)
|
||||
has_operator = BooleanFilter(
|
||||
method="filter_has_operator",
|
||||
label=_("Operator Status"),
|
||||
help_text=_("Show parks with or without an operating company"),
|
||||
)
|
||||
|
||||
# Ride and attraction filters
|
||||
min_rides = NumberFilter(
|
||||
field_name="ride_count",
|
||||
lookup_expr="gte",
|
||||
validators=[validate_positive_integer],
|
||||
label=_("Minimum Rides"),
|
||||
help_text=_("Show parks with at least this many rides"),
|
||||
)
|
||||
min_coasters = NumberFilter(
|
||||
field_name="coaster_count",
|
||||
lookup_expr="gte",
|
||||
validators=[validate_positive_integer],
|
||||
label=_("Minimum Roller Coasters"),
|
||||
help_text=_("Show parks with at least this many roller coasters"),
|
||||
)
|
||||
|
||||
# Size filter
|
||||
min_size = NumberFilter(
|
||||
field_name="size_acres",
|
||||
lookup_expr="gte",
|
||||
validators=[validate_positive_integer],
|
||||
label=_("Minimum Size (acres)"),
|
||||
help_text=_("Show parks of at least this size in acres"),
|
||||
)
|
||||
|
||||
# Opening date filter with better label
|
||||
opening_date = DateFromToRangeFilter(
|
||||
field_name="opening_date",
|
||||
label=_("Opening Date Range"),
|
||||
help_text=_("Filter parks by their opening date"),
|
||||
)
|
||||
|
||||
# Location-based filters
|
||||
location_search = CharFilter(
|
||||
method="filter_location_search",
|
||||
label=_("Location Search"),
|
||||
help_text=_("Search by city, state, country, or address"),
|
||||
)
|
||||
|
||||
near_location = CharFilter(
|
||||
method="filter_near_location",
|
||||
label=_("Near Location"),
|
||||
help_text=_("Find parks near a specific location"),
|
||||
)
|
||||
|
||||
radius_km = NumberFilter(
|
||||
method="filter_radius",
|
||||
label=_("Radius (km)"),
|
||||
help_text=_("Search radius in kilometers (use with 'Near Location')"),
|
||||
)
|
||||
|
||||
country_filter = CharFilter(
|
||||
method="filter_country",
|
||||
label=_("Country"),
|
||||
help_text=_("Filter parks by country"),
|
||||
)
|
||||
|
||||
state_filter = CharFilter(
|
||||
method="filter_state",
|
||||
label=_("State/Region"),
|
||||
help_text=_("Filter parks by state or region"),
|
||||
)
|
||||
|
||||
# Practical filter fields that people actually use
|
||||
park_type = ChoiceFilter(
|
||||
method="filter_park_type",
|
||||
label=_("Park Type"),
|
||||
help_text=_("Filter by popular park categories"),
|
||||
choices=[
|
||||
("disney", _("Disney Parks")),
|
||||
("universal", _("Universal Parks")),
|
||||
("six_flags", _("Six Flags")),
|
||||
("cedar_fair", _("Cedar Fair")),
|
||||
("independent", _("Independent Parks")),
|
||||
],
|
||||
empty_label=_("All parks"),
|
||||
)
|
||||
|
||||
has_coasters = BooleanFilter(
|
||||
method="filter_has_coasters",
|
||||
label=_("Has Roller Coasters"),
|
||||
help_text=_("Show only parks with roller coasters"),
|
||||
)
|
||||
|
||||
min_rating = ChoiceFilter(
|
||||
method="filter_min_rating",
|
||||
label=_("Minimum Rating"),
|
||||
help_text=_("Show parks with at least this rating"),
|
||||
choices=[
|
||||
("3", _("3+ stars")),
|
||||
("4", _("4+ stars")),
|
||||
("4.5", _("4.5+ stars")),
|
||||
],
|
||||
empty_label=_("Any rating"),
|
||||
)
|
||||
|
||||
big_parks_only = BooleanFilter(
|
||||
method="filter_big_parks",
|
||||
label=_("Major Parks Only"),
|
||||
help_text=_("Show only large theme parks (10+ rides)"),
|
||||
)
|
||||
|
||||
# Simple, useful ordering
|
||||
ordering = OrderingFilter(
|
||||
fields=(
|
||||
("name", "name"),
|
||||
("average_rating", "rating"),
|
||||
("coaster_count", "coasters"),
|
||||
("ride_count", "rides"),
|
||||
),
|
||||
field_labels={
|
||||
"name": _("Name (A-Z)"),
|
||||
"-name": _("Name (Z-A)"),
|
||||
"-average_rating": _("Highest Rated"),
|
||||
"-coaster_count": _("Most Coasters"),
|
||||
"-ride_count": _("Most Rides"),
|
||||
},
|
||||
label=_("Sort by"),
|
||||
)
|
||||
|
||||
def filter_search(self, queryset, name, value):
|
||||
"""Custom search implementation"""
|
||||
if not value:
|
||||
return queryset
|
||||
|
||||
search_fields = [
|
||||
"name__icontains",
|
||||
"description__icontains",
|
||||
"location__city__icontains",
|
||||
"location__state__icontains",
|
||||
"location__country__icontains",
|
||||
]
|
||||
|
||||
queries = [models.Q(**{field: value}) for field in search_fields]
|
||||
query = queries.pop()
|
||||
for item in queries:
|
||||
query |= item
|
||||
|
||||
return queryset.filter(query).distinct()
|
||||
|
||||
def filter_has_operator(self, queryset, name, value):
|
||||
"""Filter parks based on whether they have an operator"""
|
||||
return queryset.filter(operator__isnull=not value)
|
||||
|
||||
@property
|
||||
def qs(self):
|
||||
"""
|
||||
Override qs property to ensure we always use base queryset with annotations
|
||||
"""
|
||||
if not hasattr(self, "_qs"):
|
||||
# Start with optimized base queryset
|
||||
base_qs = (
|
||||
get_base_park_queryset()
|
||||
.select_related("operator", "property_owner", "location")
|
||||
.prefetch_related("photos", "rides__manufacturer")
|
||||
)
|
||||
|
||||
if not self.is_bound:
|
||||
self._qs = base_qs
|
||||
return self._qs
|
||||
|
||||
if not self.form.is_valid():
|
||||
self._qs = base_qs.none()
|
||||
return self._qs
|
||||
|
||||
self._qs = base_qs
|
||||
for name, value in self.form.cleaned_data.items():
|
||||
if value in [None, "", 0] and name not in [
|
||||
"has_operator",
|
||||
"has_coasters",
|
||||
"big_parks_only",
|
||||
]:
|
||||
continue
|
||||
self._qs = self.filters[name].filter(self._qs, value)
|
||||
self._qs = self._qs.distinct()
|
||||
return self._qs
|
||||
|
||||
def filter_location_search(self, queryset, name, value):
|
||||
"""Filter parks by location fields"""
|
||||
if not value:
|
||||
return queryset
|
||||
|
||||
location_query = (
|
||||
models.Q(location__city__icontains=value)
|
||||
| models.Q(location__state__icontains=value)
|
||||
| models.Q(location__country__icontains=value)
|
||||
| models.Q(location__street_address__icontains=value)
|
||||
)
|
||||
|
||||
return queryset.filter(location_query).distinct()
|
||||
|
||||
def filter_near_location(self, queryset, name, value):
|
||||
"""Filter parks near a specific location using geocoding"""
|
||||
if not value:
|
||||
return queryset
|
||||
|
||||
# Try to geocode the location
|
||||
coordinates = self._geocode_location(value)
|
||||
if not coordinates:
|
||||
return queryset
|
||||
|
||||
lat, lng = coordinates
|
||||
point = Point(lng, lat, srid=4326)
|
||||
|
||||
# Get radius from form data, default to 50km
|
||||
radius = self.data.get("radius_km", 50)
|
||||
try:
|
||||
radius = float(radius)
|
||||
except (ValueError, TypeError):
|
||||
radius = 50
|
||||
|
||||
# Filter by distance
|
||||
distance = Distance(km=radius)
|
||||
return (
|
||||
queryset.filter(location__point__distance_lte=(point, distance))
|
||||
.annotate(distance=models.Value(0, output_field=models.FloatField()))
|
||||
.order_by("distance")
|
||||
.distinct()
|
||||
)
|
||||
|
||||
def filter_radius(self, queryset, name, value):
|
||||
"""Radius filter - handled by filter_near_location"""
|
||||
return queryset
|
||||
|
||||
def filter_country(self, queryset, name, value):
|
||||
"""Filter parks by country"""
|
||||
if not value:
|
||||
return queryset
|
||||
return queryset.filter(location__country__icontains=value).distinct()
|
||||
|
||||
def filter_state(self, queryset, name, value):
|
||||
"""Filter parks by state/region"""
|
||||
if not value:
|
||||
return queryset
|
||||
return queryset.filter(location__state__icontains=value).distinct()
|
||||
|
||||
def filter_park_type(self, queryset, name, value):
|
||||
"""Filter parks by popular company/brand"""
|
||||
if not value:
|
||||
return queryset
|
||||
|
||||
# Map common park types to operator name patterns
|
||||
type_filters = {
|
||||
"disney": models.Q(operator__name__icontains="Disney"),
|
||||
"universal": models.Q(operator__name__icontains="Universal"),
|
||||
"six_flags": models.Q(operator__name__icontains="Six Flags"),
|
||||
"cedar_fair": models.Q(operator__name__icontains="Cedar Fair")
|
||||
| models.Q(operator__name__icontains="Cedar Point")
|
||||
| models.Q(operator__name__icontains="Kings Island")
|
||||
| models.Q(operator__name__icontains="Canada's Wonderland"),
|
||||
"independent": ~(
|
||||
models.Q(operator__name__icontains="Disney")
|
||||
| models.Q(operator__name__icontains="Universal")
|
||||
| models.Q(operator__name__icontains="Six Flags")
|
||||
| models.Q(operator__name__icontains="Cedar Fair")
|
||||
| models.Q(operator__name__icontains="Cedar Point")
|
||||
),
|
||||
}
|
||||
|
||||
if value in type_filters:
|
||||
return queryset.filter(type_filters[value])
|
||||
|
||||
return queryset
|
||||
|
||||
def filter_has_coasters(self, queryset, name, value):
|
||||
"""Filter parks based on whether they have roller coasters"""
|
||||
if value is None:
|
||||
return queryset
|
||||
|
||||
if value:
|
||||
return queryset.filter(coaster_count__gt=0)
|
||||
else:
|
||||
return queryset.filter(
|
||||
models.Q(coaster_count__isnull=True) | models.Q(coaster_count=0)
|
||||
)
|
||||
|
||||
def filter_min_rating(self, queryset, name, value):
|
||||
"""Filter parks by minimum rating"""
|
||||
if not value:
|
||||
return queryset
|
||||
|
||||
try:
|
||||
min_rating = float(value)
|
||||
return queryset.filter(average_rating__gte=min_rating)
|
||||
except (ValueError, TypeError):
|
||||
return queryset
|
||||
|
||||
def filter_big_parks(self, queryset, name, value):
|
||||
"""Filter to show only major parks with many rides"""
|
||||
if not value:
|
||||
return queryset
|
||||
|
||||
return queryset.filter(ride_count__gte=10)
|
||||
|
||||
def _geocode_location(self, location_string):
|
||||
"""
|
||||
Geocode a location string using OpenStreetMap Nominatim.
|
||||
Returns (lat, lng) tuple or None if geocoding fails.
|
||||
"""
|
||||
try:
|
||||
response = requests.get(
|
||||
"https://nominatim.openstreetmap.org/search",
|
||||
params={
|
||||
"q": location_string,
|
||||
"format": "json",
|
||||
"limit": 1,
|
||||
"countrycodes": "us,ca,gb,fr,de,es,it,jp,au", # Popular countries
|
||||
},
|
||||
headers={"User-Agent": "ThrillWiki/1.0"},
|
||||
timeout=5,
|
||||
)
|
||||
|
||||
if response.status_code == 200:
|
||||
data = response.json()
|
||||
if data:
|
||||
result = data[0]
|
||||
return float(result["lat"]), float(result["lon"])
|
||||
except Exception:
|
||||
# Silently fail geocoding - just return None
|
||||
pass
|
||||
|
||||
return None
|
||||
@@ -1,351 +0,0 @@
|
||||
from django import forms
|
||||
from decimal import Decimal, InvalidOperation, ROUND_DOWN
|
||||
from autocomplete.core import register
|
||||
from autocomplete.shortcuts import ModelAutocomplete
|
||||
from autocomplete.widgets import AutocompleteWidget
|
||||
from .models import Park
|
||||
from .models.location import ParkLocation
|
||||
|
||||
|
||||
@register
|
||||
class ParkAutocomplete(ModelAutocomplete):
|
||||
"""Autocomplete for searching parks.
|
||||
|
||||
Features:
|
||||
- Name-based search with partial matching
|
||||
- Prefetches related owner data
|
||||
- Applies standard park queryset filtering
|
||||
- Includes park status and location in results
|
||||
"""
|
||||
|
||||
model = Park
|
||||
search_attrs = ["name"] # We'll match on park names
|
||||
|
||||
|
||||
class ParkSearchForm(forms.Form):
|
||||
"""Form for searching parks with autocomplete."""
|
||||
|
||||
park = forms.ModelChoiceField(
|
||||
queryset=Park.objects.all(),
|
||||
required=False,
|
||||
widget=AutocompleteWidget(
|
||||
ac_class=ParkAutocomplete,
|
||||
attrs={
|
||||
"class": (
|
||||
"w-full border-gray-300 rounded-lg form-input "
|
||||
"dark:border-gray-600 dark:bg-gray-700 dark:text-white"
|
||||
),
|
||||
"placeholder": "Search parks...",
|
||||
},
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
class ParkForm(forms.ModelForm):
|
||||
"""Form for creating and updating Park objects with location support"""
|
||||
|
||||
# Location fields
|
||||
latitude = forms.DecimalField(
|
||||
max_digits=9,
|
||||
decimal_places=6,
|
||||
required=False,
|
||||
widget=forms.HiddenInput(),
|
||||
)
|
||||
longitude = forms.DecimalField(
|
||||
max_digits=10,
|
||||
decimal_places=6,
|
||||
required=False,
|
||||
widget=forms.HiddenInput(),
|
||||
)
|
||||
street_address = forms.CharField(
|
||||
max_length=255,
|
||||
required=False,
|
||||
widget=forms.TextInput(
|
||||
attrs={
|
||||
"class": (
|
||||
"w-full border-gray-300 rounded-lg form-input "
|
||||
"dark:border-gray-600 dark:bg-gray-700 dark:text-white"
|
||||
)
|
||||
}
|
||||
),
|
||||
)
|
||||
city = forms.CharField(
|
||||
max_length=255,
|
||||
required=False,
|
||||
widget=forms.TextInput(
|
||||
attrs={
|
||||
"class": (
|
||||
"w-full border-gray-300 rounded-lg form-input "
|
||||
"dark:border-gray-600 dark:bg-gray-700 dark:text-white"
|
||||
)
|
||||
}
|
||||
),
|
||||
)
|
||||
state = forms.CharField(
|
||||
max_length=255,
|
||||
required=False,
|
||||
widget=forms.TextInput(
|
||||
attrs={
|
||||
"class": (
|
||||
"w-full border-gray-300 rounded-lg form-input "
|
||||
"dark:border-gray-600 dark:bg-gray-700 dark:text-white"
|
||||
)
|
||||
}
|
||||
),
|
||||
)
|
||||
country = forms.CharField(
|
||||
max_length=255,
|
||||
required=False,
|
||||
widget=forms.TextInput(
|
||||
attrs={
|
||||
"class": (
|
||||
"w-full border-gray-300 rounded-lg form-input "
|
||||
"dark:border-gray-600 dark:bg-gray-700 dark:text-white"
|
||||
)
|
||||
}
|
||||
),
|
||||
)
|
||||
postal_code = forms.CharField(
|
||||
max_length=20,
|
||||
required=False,
|
||||
widget=forms.TextInput(
|
||||
attrs={
|
||||
"class": (
|
||||
"w-full border-gray-300 rounded-lg form-input "
|
||||
"dark:border-gray-600 dark:bg-gray-700 dark:text-white"
|
||||
)
|
||||
}
|
||||
),
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = Park
|
||||
fields = [
|
||||
"name",
|
||||
"description",
|
||||
"operator",
|
||||
"property_owner",
|
||||
"status",
|
||||
"opening_date",
|
||||
"closing_date",
|
||||
"operating_season",
|
||||
"size_acres",
|
||||
"website",
|
||||
# Location fields handled separately
|
||||
"latitude",
|
||||
"longitude",
|
||||
"street_address",
|
||||
"city",
|
||||
"state",
|
||||
"country",
|
||||
"postal_code",
|
||||
]
|
||||
widgets = {
|
||||
"name": forms.TextInput(
|
||||
attrs={
|
||||
"class": (
|
||||
"w-full border-gray-300 rounded-lg form-input "
|
||||
"dark:border-gray-600 dark:bg-gray-700 dark:text-white"
|
||||
)
|
||||
}
|
||||
),
|
||||
"description": forms.Textarea(
|
||||
attrs={
|
||||
"class": (
|
||||
"w-full border-gray-300 rounded-lg form-textarea "
|
||||
"dark:border-gray-600 dark:bg-gray-700 dark:text-white"
|
||||
),
|
||||
"rows": 2,
|
||||
}
|
||||
),
|
||||
"operator": forms.Select(
|
||||
attrs={
|
||||
"class": (
|
||||
"w-full border-gray-300 rounded-lg form-select "
|
||||
"dark:border-gray-600 dark:bg-gray-700 dark:text-white"
|
||||
)
|
||||
}
|
||||
),
|
||||
"property_owner": forms.Select(
|
||||
attrs={
|
||||
"class": (
|
||||
"w-full border-gray-300 rounded-lg form-select "
|
||||
"dark:border-gray-600 dark:bg-gray-700 dark:text-white"
|
||||
)
|
||||
}
|
||||
),
|
||||
"status": forms.Select(
|
||||
attrs={
|
||||
"class": (
|
||||
"w-full border-gray-300 rounded-lg form-select "
|
||||
"dark:border-gray-600 dark:bg-gray-700 dark:text-white"
|
||||
)
|
||||
}
|
||||
),
|
||||
"opening_date": forms.DateInput(
|
||||
attrs={
|
||||
"type": "date",
|
||||
"class": (
|
||||
"w-full border-gray-300 rounded-lg form-input "
|
||||
"dark:border-gray-600 dark:bg-gray-700 dark:text-white"
|
||||
),
|
||||
}
|
||||
),
|
||||
"closing_date": forms.DateInput(
|
||||
attrs={
|
||||
"type": "date",
|
||||
"class": (
|
||||
"w-full border-gray-300 rounded-lg form-input "
|
||||
"dark:border-gray-600 dark:bg-gray-700 dark:text-white"
|
||||
),
|
||||
}
|
||||
),
|
||||
"operating_season": forms.TextInput(
|
||||
attrs={
|
||||
"class": (
|
||||
"w-full border-gray-300 rounded-lg form-input "
|
||||
"dark:border-gray-600 dark:bg-gray-700 dark:text-white"
|
||||
),
|
||||
"placeholder": "e.g., Year-round, Summer only, etc.",
|
||||
}
|
||||
),
|
||||
"size_acres": forms.NumberInput(
|
||||
attrs={
|
||||
"class": (
|
||||
"w-full border-gray-300 rounded-lg form-input "
|
||||
"dark:border-gray-600 dark:bg-gray-700 dark:text-white"
|
||||
),
|
||||
"step": "0.01",
|
||||
"min": "0",
|
||||
}
|
||||
),
|
||||
"website": forms.URLInput(
|
||||
attrs={
|
||||
"class": (
|
||||
"w-full border-gray-300 rounded-lg form-input "
|
||||
"dark:border-gray-600 dark:bg-gray-700 dark:text-white"
|
||||
),
|
||||
"placeholder": "https://example.com",
|
||||
}
|
||||
),
|
||||
}
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
# Pre-fill location fields if editing existing park
|
||||
if self.instance and self.instance.pk and self.instance.location.exists():
|
||||
location = self.instance.location.first()
|
||||
self.fields["latitude"].initial = location.latitude
|
||||
self.fields["longitude"].initial = location.longitude
|
||||
self.fields["street_address"].initial = location.street_address
|
||||
self.fields["city"].initial = location.city
|
||||
self.fields["state"].initial = location.state
|
||||
self.fields["country"].initial = location.country
|
||||
self.fields["postal_code"].initial = location.postal_code
|
||||
|
||||
def clean_latitude(self):
|
||||
latitude = self.cleaned_data.get("latitude")
|
||||
if latitude is not None:
|
||||
try:
|
||||
# Convert to Decimal for precise handling
|
||||
latitude = Decimal(str(latitude))
|
||||
# Round to exactly 6 decimal places
|
||||
latitude = latitude.quantize(Decimal("0.000001"), rounding=ROUND_DOWN)
|
||||
|
||||
# Validate range
|
||||
if latitude < -90 or latitude > 90:
|
||||
raise forms.ValidationError(
|
||||
"Latitude must be between -90 and 90 degrees."
|
||||
)
|
||||
|
||||
# Convert to string to preserve exact decimal places
|
||||
return str(latitude)
|
||||
except (InvalidOperation, TypeError) as e:
|
||||
raise forms.ValidationError("Invalid latitude value.") from e
|
||||
return latitude
|
||||
|
||||
def clean_longitude(self):
|
||||
longitude = self.cleaned_data.get("longitude")
|
||||
if longitude is not None:
|
||||
try:
|
||||
# Convert to Decimal for precise handling
|
||||
longitude = Decimal(str(longitude))
|
||||
# Round to exactly 6 decimal places
|
||||
longitude = longitude.quantize(Decimal("0.000001"), rounding=ROUND_DOWN)
|
||||
|
||||
# Validate range
|
||||
if longitude < -180 or longitude > 180:
|
||||
raise forms.ValidationError(
|
||||
"Longitude must be between -180 and 180 degrees."
|
||||
)
|
||||
|
||||
# Convert to string to preserve exact decimal places
|
||||
return str(longitude)
|
||||
except (InvalidOperation, TypeError) as e:
|
||||
raise forms.ValidationError("Invalid longitude value.") from e
|
||||
return longitude
|
||||
|
||||
def save(self, commit=True):
|
||||
park = super().save(commit=False)
|
||||
|
||||
# Prepare location data
|
||||
location_data = {
|
||||
"name": park.name,
|
||||
"location_type": "park",
|
||||
"latitude": self.cleaned_data.get("latitude"),
|
||||
"longitude": self.cleaned_data.get("longitude"),
|
||||
"street_address": self.cleaned_data.get("street_address"),
|
||||
"city": self.cleaned_data.get("city"),
|
||||
"state": self.cleaned_data.get("state"),
|
||||
"country": self.cleaned_data.get("country"),
|
||||
"postal_code": self.cleaned_data.get("postal_code"),
|
||||
}
|
||||
|
||||
# Handle location: update if exists, create if not
|
||||
try:
|
||||
park_location = park.location
|
||||
# Update existing location
|
||||
for key, value in location_data.items():
|
||||
if key in ["latitude", "longitude"] and value:
|
||||
continue # Handle coordinates separately
|
||||
if hasattr(park_location, key):
|
||||
setattr(park_location, key, value)
|
||||
|
||||
# Handle coordinates if provided
|
||||
if "latitude" in location_data and "longitude" in location_data:
|
||||
if location_data["latitude"] and location_data["longitude"]:
|
||||
park_location.set_coordinates(
|
||||
float(location_data["latitude"]),
|
||||
float(location_data["longitude"]),
|
||||
)
|
||||
park_location.save()
|
||||
except ParkLocation.DoesNotExist:
|
||||
# Create new ParkLocation
|
||||
coordinates_data = {}
|
||||
if "latitude" in location_data and "longitude" in location_data:
|
||||
if location_data["latitude"] and location_data["longitude"]:
|
||||
coordinates_data = {
|
||||
"latitude": float(location_data["latitude"]),
|
||||
"longitude": float(location_data["longitude"]),
|
||||
}
|
||||
|
||||
# Remove coordinate fields from location_data for creation
|
||||
creation_data = {
|
||||
k: v
|
||||
for k, v in location_data.items()
|
||||
if k not in ["latitude", "longitude"]
|
||||
}
|
||||
creation_data.setdefault("country", "USA")
|
||||
|
||||
park_location = ParkLocation.objects.create(park=park, **creation_data)
|
||||
|
||||
if coordinates_data:
|
||||
park_location.set_coordinates(
|
||||
coordinates_data["latitude"], coordinates_data["longitude"]
|
||||
)
|
||||
park_location.save()
|
||||
|
||||
if commit:
|
||||
park.save()
|
||||
|
||||
return park
|
||||
@@ -1,55 +0,0 @@
|
||||
from decimal import Decimal, ROUND_DOWN, InvalidOperation
|
||||
|
||||
|
||||
def normalize_coordinate(value, max_digits, decimal_places):
|
||||
"""Normalize coordinate to have exactly 6 decimal places"""
|
||||
try:
|
||||
if value is None:
|
||||
return None
|
||||
|
||||
# Convert to Decimal for precise handling
|
||||
value = Decimal(str(value))
|
||||
# Round to exactly 6 decimal places
|
||||
value = value.quantize(Decimal("0.000001"), rounding=ROUND_DOWN)
|
||||
|
||||
return float(value)
|
||||
except (TypeError, ValueError, InvalidOperation):
|
||||
return None
|
||||
|
||||
|
||||
def get_english_name(tags):
|
||||
"""Extract English name from OSM tags, falling back to default name"""
|
||||
# Try name:en first
|
||||
if "name:en" in tags:
|
||||
return tags["name:en"]
|
||||
# Then try int_name (international name)
|
||||
if "int_name" in tags:
|
||||
return tags["int_name"]
|
||||
# Fall back to default name
|
||||
return tags.get("name")
|
||||
|
||||
|
||||
def normalize_osm_result(result):
|
||||
"""Normalize OpenStreetMap result to use English names and normalized coordinates"""
|
||||
# Normalize coordinates
|
||||
result["lat"] = normalize_coordinate(float(result["lat"]), 9, 6)
|
||||
result["lon"] = normalize_coordinate(float(result["lon"]), 10, 6)
|
||||
|
||||
# Get address details
|
||||
address = result.get("address", {})
|
||||
|
||||
# Normalize place names to English where possible
|
||||
if "namedetails" in result:
|
||||
# For main display name
|
||||
result["display_name"] = get_english_name(result["namedetails"])
|
||||
|
||||
# For address components
|
||||
if "city" in address and "city_tags" in result:
|
||||
address["city"] = get_english_name(result["city_tags"])
|
||||
if "state" in address and "state_tags" in result:
|
||||
address["state"] = get_english_name(result["state_tags"])
|
||||
if "country" in address and "country_tags" in result:
|
||||
address["country"] = get_english_name(result["country_tags"])
|
||||
|
||||
result["address"] = address
|
||||
return result
|
||||
@@ -1,316 +0,0 @@
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.db import transaction
|
||||
|
||||
# Import models from both apps
|
||||
from apps.parks.models import Company as ParkCompany
|
||||
from apps.rides.models.company import Company as RideCompany
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Creates comprehensive sample data for the ThrillWiki theme park application"
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self.created_companies = {}
|
||||
self.created_parks = {}
|
||||
self.created_rides = {}
|
||||
|
||||
def handle(self, *args, **options):
|
||||
self.stdout.write("Starting sample data creation...")
|
||||
|
||||
try:
|
||||
with transaction.atomic():
|
||||
self.create_companies()
|
||||
self.create_parks()
|
||||
self.create_ride_models()
|
||||
self.create_rides()
|
||||
self.create_park_areas()
|
||||
self.create_reviews()
|
||||
|
||||
self.stdout.write(
|
||||
self.style.SUCCESS("Successfully created comprehensive sample data!")
|
||||
)
|
||||
self.print_summary()
|
||||
|
||||
except Exception as e:
|
||||
self.stdout.write(self.style.ERROR(f"Error creating sample data: {e}"))
|
||||
raise
|
||||
|
||||
def create_companies(self):
|
||||
"""Create companies with different roles following entity relationship rules"""
|
||||
self.stdout.write("Creating companies...")
|
||||
|
||||
# Park operators and property owners (using parks.models.Company)
|
||||
park_operators_data = [
|
||||
{
|
||||
"name": "The Walt Disney Company",
|
||||
"slug": "walt-disney-company",
|
||||
"roles": ["OPERATOR", "PROPERTY_OWNER"],
|
||||
"description": "World's largest entertainment company and theme park operator.",
|
||||
"website": "https://www.disney.com/",
|
||||
"founded_year": 1923,
|
||||
},
|
||||
{
|
||||
"name": "Universal Parks & Resorts",
|
||||
"slug": "universal-parks-resorts",
|
||||
"roles": ["OPERATOR", "PROPERTY_OWNER"],
|
||||
"description": "Division of Comcast NBCUniversal, operating major theme parks worldwide.",
|
||||
"website": "https://www.universalparks.com/",
|
||||
"founded_year": 1964,
|
||||
},
|
||||
{
|
||||
"name": "Six Flags Entertainment Corporation",
|
||||
"slug": "six-flags-entertainment",
|
||||
"roles": ["OPERATOR", "PROPERTY_OWNER"],
|
||||
"description": "World's largest regional theme park company.",
|
||||
"website": "https://www.sixflags.com/",
|
||||
"founded_year": 1961,
|
||||
},
|
||||
{
|
||||
"name": "Cedar Fair Entertainment Company",
|
||||
"slug": "cedar-fair-entertainment",
|
||||
"roles": ["OPERATOR", "PROPERTY_OWNER"],
|
||||
"description": "One of North America's largest operators of regional amusement parks.",
|
||||
"website": "https://www.cedarfair.com/",
|
||||
"founded_year": 1983,
|
||||
},
|
||||
{
|
||||
"name": "Herschend Family Entertainment",
|
||||
"slug": "herschend-family-entertainment",
|
||||
"roles": ["OPERATOR", "PROPERTY_OWNER"],
|
||||
"description": "Largest family-owned themed attractions corporation in the United States.",
|
||||
"website": "https://www.hfecorp.com/",
|
||||
"founded_year": 1950,
|
||||
},
|
||||
{
|
||||
"name": "SeaWorld Parks & Entertainment",
|
||||
"slug": "seaworld-parks-entertainment",
|
||||
"roles": ["OPERATOR", "PROPERTY_OWNER"],
|
||||
"description": "Theme park and entertainment company focusing on nature-based themes.",
|
||||
"website": "https://www.seaworldentertainment.com/",
|
||||
"founded_year": 1959,
|
||||
},
|
||||
{
|
||||
"name": "Merlin Entertainments",
|
||||
"slug": "merlin-entertainments",
|
||||
"roles": ["OPERATOR", "PROPERTY_OWNER"],
|
||||
"description": "European theme park operator with LEGOLAND and Madame Tussauds brands.",
|
||||
"website": "https://www.merlinentertainments.com/",
|
||||
"founded_year": 1998,
|
||||
},
|
||||
]
|
||||
|
||||
for company_data in park_operators_data:
|
||||
company, created = ParkCompany.objects.get_or_create(
|
||||
slug=company_data["slug"], defaults=company_data
|
||||
)
|
||||
self.created_companies[company.slug] = company
|
||||
self.stdout.write(
|
||||
f" {'Created' if created else 'Found'} park company: {company.name}"
|
||||
)
|
||||
|
||||
# Ride manufacturers and designers (using rides.models.Company)
|
||||
ride_companies_data = [
|
||||
{
|
||||
"name": "Bolliger & Mabillard",
|
||||
"slug": "bolliger-mabillard",
|
||||
"roles": ["MANUFACTURER", "DESIGNER"],
|
||||
"description": "Swiss roller coaster manufacturer known for inverted and diving coasters.",
|
||||
"website": "https://www.bolliger-mabillard.com/",
|
||||
"founded_date": "1988-01-01",
|
||||
},
|
||||
{
|
||||
"name": "Intamin Amusement Rides",
|
||||
"slug": "intamin-amusement-rides",
|
||||
"roles": ["MANUFACTURER", "DESIGNER"],
|
||||
"description": "Liechtenstein-based manufacturer of roller coasters and thrill rides.",
|
||||
"website": "https://www.intamin.com/",
|
||||
"founded_date": "1967-01-01",
|
||||
},
|
||||
{
|
||||
"name": "Arrow Dynamics",
|
||||
"slug": "arrow-dynamics",
|
||||
"roles": ["MANUFACTURER", "DESIGNER"],
|
||||
"description": "American manufacturer known for corkscrew coasters and mine trains.",
|
||||
"website": "https://en.wikipedia.org/wiki/Arrow_Dynamics",
|
||||
"founded_date": "1946-01-01",
|
||||
},
|
||||
{
|
||||
"name": "Vekoma Rides Manufacturing",
|
||||
"slug": "vekoma-rides-manufacturing",
|
||||
"roles": ["MANUFACTURER", "DESIGNER"],
|
||||
"description": "Dutch manufacturer of roller coasters and family rides.",
|
||||
"website": "https://www.vekoma.com/",
|
||||
"founded_date": "1926-01-01",
|
||||
},
|
||||
{
|
||||
"name": "Rocky Mountain Construction",
|
||||
"slug": "rocky-mountain-construction",
|
||||
"roles": ["MANUFACTURER", "DESIGNER"],
|
||||
"description": "American manufacturer specializing in I-Box track and Raptor track coasters.",
|
||||
"website": "https://www.rockymtnconstruction.com/",
|
||||
"founded_date": "2001-01-01",
|
||||
},
|
||||
{
|
||||
"name": "Mack Rides",
|
||||
"slug": "mack-rides",
|
||||
"roles": ["MANUFACTURER", "DESIGNER"],
|
||||
"description": "German manufacturer known for water rides and powered coasters.",
|
||||
"website": "https://www.mack-rides.com/",
|
||||
"founded_date": "1780-01-01",
|
||||
},
|
||||
{
|
||||
"name": "Chance Rides",
|
||||
"slug": "chance-rides",
|
||||
"roles": ["MANUFACTURER"],
|
||||
"description": "American manufacturer of thrill rides and amusement park equipment.",
|
||||
"website": "https://www.chancerides.com/",
|
||||
"founded_date": "1961-01-01",
|
||||
},
|
||||
{
|
||||
"name": "S&S Worldwide",
|
||||
"slug": "s-s-worldwide",
|
||||
"roles": ["MANUFACTURER", "DESIGNER"],
|
||||
"description": "American manufacturer known for drop towers and 4D free-fly coasters.",
|
||||
"website": "https://www.s-s.com/",
|
||||
"founded_date": "1990-01-01",
|
||||
},
|
||||
{
|
||||
"name": "Zierer Rides",
|
||||
"slug": "zierer-rides",
|
||||
"roles": ["MANUFACTURER"],
|
||||
"description": "German manufacturer of kiddie rides and family coasters.",
|
||||
"website": "https://www.zierer.com/",
|
||||
"founded_date": "1950-01-01",
|
||||
},
|
||||
{
|
||||
"name": "Gerstlauer",
|
||||
"slug": "gerstlauer",
|
||||
"roles": ["MANUFACTURER", "DESIGNER"],
|
||||
"description": "German manufacturer known for Euro-Fighter and spinning coasters.",
|
||||
"website": "https://www.gerstlauer-rides.de/",
|
||||
"founded_date": "1982-01-01",
|
||||
},
|
||||
]
|
||||
|
||||
for company_data in ride_companies_data:
|
||||
company, created = RideCompany.objects.get_or_create(
|
||||
slug=company_data["slug"], defaults=company_data
|
||||
)
|
||||
self.created_companies[company.slug] = company
|
||||
self.stdout.write(
|
||||
f" {'Created' if created else 'Found'} ride company: {company.name}"
|
||||
)
|
||||
|
||||
def create_parks(self):
|
||||
"""Create parks with proper operator relationships"""
|
||||
self.stdout.write("Creating parks...")
|
||||
|
||||
# TODO: Implement park creation - parks_data defined but not used yet
|
||||
parks_data = [ # noqa: F841
|
||||
{
|
||||
"name": "Magic Kingdom",
|
||||
"slug": "magic-kingdom",
|
||||
"operator_slug": "walt-disney-company",
|
||||
"property_owner_slug": "walt-disney-company",
|
||||
"description": "The first theme park at Walt Disney World Resort in Florida, opened in 1971.",
|
||||
"opening_date": "1971-10-01",
|
||||
"size_acres": 142,
|
||||
"website": "https://disneyworld.disney.go.com/destinations/magic-kingdom/",
|
||||
"location": {
|
||||
"street_address": "1180 Seven Seas Dr",
|
||||
"city": "Lake Buena Vista",
|
||||
"state_province": "Florida",
|
||||
"country": "USA",
|
||||
"postal_code": "32830",
|
||||
"latitude": 28.4177,
|
||||
"longitude": -81.5812,
|
||||
},
|
||||
},
|
||||
{
|
||||
"name": "Universal Studios Florida",
|
||||
"slug": "universal-studios-florida",
|
||||
"operator_slug": "universal-parks-resorts",
|
||||
"property_owner_slug": "universal-parks-resorts",
|
||||
"description": "Movie and television-based theme park in Orlando, Florida.",
|
||||
"opening_date": "1990-06-07",
|
||||
"size_acres": 108,
|
||||
"website": "https://www.universalorlando.com/web/en/us/theme-parks/universal-studios-florida",
|
||||
"location": {
|
||||
"street_address": "6000 Universal Blvd",
|
||||
"city": "Orlando",
|
||||
"state_province": "Florida",
|
||||
"country": "USA",
|
||||
"postal_code": "32819",
|
||||
"latitude": 28.4749,
|
||||
"longitude": -81.4687,
|
||||
},
|
||||
},
|
||||
{
|
||||
"name": "Cedar Point",
|
||||
"slug": "cedar-point",
|
||||
"operator_slug": "cedar-fair-entertainment",
|
||||
"property_owner_slug": "cedar-fair-entertainment",
|
||||
"description": 'Known as the "Roller Coaster Capital of the World".',
|
||||
"opening_date": "1870-06-01",
|
||||
"size_acres": 364,
|
||||
"website": "https://www.cedarpoint.com/",
|
||||
"location": {
|
||||
"street_address": "1 Cedar Point Dr",
|
||||
"city": "Sandusky",
|
||||
"state_province": "Ohio",
|
||||
"country": "USA",
|
||||
"postal_code": "44870",
|
||||
"latitude": 41.4822,
|
||||
"longitude": -82.6835,
|
||||
},
|
||||
},
|
||||
{
|
||||
"name": "Six Flags Magic Mountain",
|
||||
"slug": "six-flags-magic-mountain",
|
||||
"operator_slug": "six-flags-entertainment",
|
||||
"property_owner_slug": "six-flags-entertainment",
|
||||
"description": "Known for its world-record 19 roller coasters.",
|
||||
"opening_date": "1971-05-29",
|
||||
"size_acres": 262,
|
||||
"website": "https://www.sixflags.com/magicmountain",
|
||||
"location": {
|
||||
"street_address": "26101 Magic Mountain Pkwy",
|
||||
"city": "Valencia",
|
||||
"state_province": "California",
|
||||
"country": "USA",
|
||||
"postal_code": "91355",
|
||||
"latitude": 34.4253,
|
||||
"longitude": -118.5971,
|
||||
},
|
||||
},
|
||||
{
|
||||
"name": "Europa-Park",
|
||||
"slug": "europa-park",
|
||||
"operator_slug": "merlin-entertainments",
|
||||
"property_owner_slug": "merlin-entertainments",
|
||||
"description": "One of the most popular theme parks in Europe, located in Germany.",
|
||||
"opening_date": "1975-07-12",
|
||||
"size_acres": 234,
|
||||
"website": "https://www.europapark.de/",
|
||||
"location": {
|
||||
"street_address": "Europa-Park-Straße 2",
|
||||
"city": "Rust",
|
||||
"state_province": "Baden-Württemberg",
|
||||
"country": "Germany",
|
||||
"postal_code": "77977",
|
||||
"latitude": 48.2667,
|
||||
"longitude": 7.7167,
|
||||
},
|
||||
},
|
||||
{
|
||||
"name": "Alton Towers",
|
||||
"slug": "alton-towers",
|
||||
"operator_slug": "merlin-entertainments",
|
||||
"property_owner_slug": "merlin-entertainments",
|
||||
"description": "Major theme park and former country estate in Staffordshire, England.",
|
||||
"opening_date": "1980-04-23",
|
||||
"size_acres": 500,
|
||||
# Add other fields as needed
|
||||
},
|
||||
]
|
||||
@@ -1,36 +0,0 @@
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.db import connection
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Fix migration history"
|
||||
|
||||
def handle(self, *args, **options):
|
||||
with connection.cursor() as cursor:
|
||||
# Drop existing historical tables
|
||||
cursor.execute(
|
||||
"""
|
||||
DROP TABLE IF EXISTS parks_historicalpark CASCADE;
|
||||
DROP TABLE IF EXISTS parks_historicalparkarea CASCADE;
|
||||
"""
|
||||
)
|
||||
|
||||
# Delete all existing parks migrations
|
||||
cursor.execute(
|
||||
"""
|
||||
DELETE FROM django_migrations
|
||||
WHERE app = 'parks';
|
||||
"""
|
||||
)
|
||||
|
||||
# Insert the new initial migration
|
||||
cursor.execute(
|
||||
"""
|
||||
INSERT INTO django_migrations (app, name, applied)
|
||||
VALUES ('parks', '0001_initial', NOW());
|
||||
"""
|
||||
)
|
||||
|
||||
self.stdout.write(
|
||||
self.style.SUCCESS("Successfully fixed migration history")
|
||||
)
|
||||
@@ -1,351 +0,0 @@
|
||||
{
|
||||
"parks": [
|
||||
{
|
||||
"name": "Walt Disney World Magic Kingdom",
|
||||
"location": "Orlando, Florida",
|
||||
"country": "US",
|
||||
"opening_date": "1971-10-01",
|
||||
"status": "OPERATING",
|
||||
"description": "The most visited theme park in the world, Magic Kingdom is Walt Disney World's first theme park.",
|
||||
"website": "https://disneyworld.disney.go.com/destinations/magic-kingdom/",
|
||||
"owner": "The Walt Disney Company",
|
||||
"size_acres": "142.00",
|
||||
"photos": [
|
||||
"https://images.unsplash.com/photo-1524008279394-3aed4643b30b"
|
||||
],
|
||||
"rides": [
|
||||
{
|
||||
"name": "Space Mountain",
|
||||
"category": "RC",
|
||||
"opening_date": "1975-01-15",
|
||||
"status": "OPERATING",
|
||||
"manufacturer": "Walt Disney Imagineering",
|
||||
"description": "A high-speed roller coaster in the dark through space.",
|
||||
"photos": [
|
||||
"https://images.unsplash.com/photo-1536768139911-e290a59011e4"
|
||||
],
|
||||
"stats": {
|
||||
"height_ft": "183.00",
|
||||
"length_ft": "3196.00",
|
||||
"speed_mph": "27.00",
|
||||
"inversions": 0,
|
||||
"ride_time_seconds": 180
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "Big Thunder Mountain Railroad",
|
||||
"category": "RC",
|
||||
"opening_date": "1980-09-23",
|
||||
"status": "OPERATING",
|
||||
"manufacturer": "Walt Disney Imagineering",
|
||||
"description": "A mine train roller coaster through the Old West.",
|
||||
"photos": [
|
||||
"https://images.unsplash.com/photo-1513889961551-628c1e5e2ee9"
|
||||
],
|
||||
"stats": {
|
||||
"height_ft": "104.00",
|
||||
"length_ft": "2671.00",
|
||||
"speed_mph": "30.00",
|
||||
"inversions": 0,
|
||||
"ride_time_seconds": 197
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "Seven Dwarfs Mine Train",
|
||||
"category": "RC",
|
||||
"opening_date": "2014-05-28",
|
||||
"status": "OPERATING",
|
||||
"manufacturer": "Vekoma",
|
||||
"description": "A family roller coaster featuring unique swinging cars.",
|
||||
"photos": [
|
||||
"https://images.unsplash.com/photo-1590144662036-33bf0ebd2c7f"
|
||||
],
|
||||
"stats": {
|
||||
"height_ft": "112.00",
|
||||
"length_ft": "2000.00",
|
||||
"speed_mph": "34.00",
|
||||
"inversions": 0,
|
||||
"ride_time_seconds": 180
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "Haunted Mansion",
|
||||
"category": "DR",
|
||||
"opening_date": "1971-10-01",
|
||||
"status": "OPERATING",
|
||||
"manufacturer": "Walt Disney Imagineering",
|
||||
"description": "A dark ride through a haunted estate.",
|
||||
"photos": [
|
||||
"https://images.unsplash.com/photo-1597466599360-3b9775841aec"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "Pirates of the Caribbean",
|
||||
"category": "DR",
|
||||
"opening_date": "1973-12-15",
|
||||
"status": "OPERATING",
|
||||
"manufacturer": "Walt Disney Imagineering",
|
||||
"description": "A boat ride through pirate-filled Caribbean waters.",
|
||||
"photos": [
|
||||
"https://images.unsplash.com/photo-1506126799754-92bc47fc5d78"
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "Cedar Point",
|
||||
"location": "Sandusky, Ohio",
|
||||
"country": "US",
|
||||
"opening_date": "1870-06-01",
|
||||
"status": "OPERATING",
|
||||
"description": "Known as the Roller Coaster Capital of the World.",
|
||||
"website": "https://www.cedarpoint.com",
|
||||
"owner": "Cedar Fair",
|
||||
"size_acres": "364.00",
|
||||
"photos": [
|
||||
"https://images.unsplash.com/photo-1536768139911-e290a59011e4"
|
||||
],
|
||||
"rides": [
|
||||
{
|
||||
"name": "Steel Vengeance",
|
||||
"category": "RC",
|
||||
"opening_date": "2018-05-05",
|
||||
"status": "OPERATING",
|
||||
"manufacturer": "Rocky Mountain Construction",
|
||||
"description": "A hybrid roller coaster featuring multiple inversions.",
|
||||
"photos": [
|
||||
"https://images.unsplash.com/photo-1543674892-7d64d45df18b"
|
||||
],
|
||||
"stats": {
|
||||
"height_ft": "205.00",
|
||||
"length_ft": "5740.00",
|
||||
"speed_mph": "74.00",
|
||||
"inversions": 4,
|
||||
"ride_time_seconds": 150
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "Millennium Force",
|
||||
"category": "RC",
|
||||
"opening_date": "2000-05-13",
|
||||
"status": "OPERATING",
|
||||
"manufacturer": "Intamin",
|
||||
"description": "A giga coaster with stunning views of Lake Erie.",
|
||||
"photos": [
|
||||
"https://images.unsplash.com/photo-1605559911160-a3d95d213904"
|
||||
],
|
||||
"stats": {
|
||||
"height_ft": "310.00",
|
||||
"length_ft": "6595.00",
|
||||
"speed_mph": "93.00",
|
||||
"inversions": 0,
|
||||
"ride_time_seconds": 120
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "Top Thrill Dragster",
|
||||
"category": "RC",
|
||||
"opening_date": "2003-05-04",
|
||||
"status": "SBNO",
|
||||
"manufacturer": "Intamin",
|
||||
"description": "A strata coaster featuring a 420-foot top hat element.",
|
||||
"photos": [
|
||||
"https://images.unsplash.com/photo-1578912996078-305d92249aa6"
|
||||
],
|
||||
"stats": {
|
||||
"height_ft": "420.00",
|
||||
"length_ft": "2800.00",
|
||||
"speed_mph": "120.00",
|
||||
"inversions": 0,
|
||||
"ride_time_seconds": 50
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "Maverick",
|
||||
"category": "RC",
|
||||
"opening_date": "2007-05-26",
|
||||
"status": "OPERATING",
|
||||
"manufacturer": "Intamin",
|
||||
"description": "A launched roller coaster with multiple inversions.",
|
||||
"photos": [
|
||||
"https://images.unsplash.com/photo-1581309638082-877cb8132535"
|
||||
],
|
||||
"stats": {
|
||||
"height_ft": "105.00",
|
||||
"length_ft": "4450.00",
|
||||
"speed_mph": "70.00",
|
||||
"inversions": 2,
|
||||
"ride_time_seconds": 150
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "Universal's Islands of Adventure",
|
||||
"location": "Orlando, Florida",
|
||||
"country": "US",
|
||||
"opening_date": "1999-05-28",
|
||||
"status": "OPERATING",
|
||||
"description": "A theme park featuring cutting-edge technology and thrilling attractions.",
|
||||
"website": "https://www.universalorlando.com/web/en/us/theme-parks/islands-of-adventure",
|
||||
"owner": "NBCUniversal",
|
||||
"size_acres": "110.00",
|
||||
"photos": [
|
||||
"https://images.unsplash.com/photo-1597466599360-3b9775841aec"
|
||||
],
|
||||
"rides": [
|
||||
{
|
||||
"name": "Jurassic World VelociCoaster",
|
||||
"category": "RC",
|
||||
"opening_date": "2021-06-10",
|
||||
"status": "OPERATING",
|
||||
"manufacturer": "Intamin",
|
||||
"description": "A high-speed launch coaster featuring velociraptors.",
|
||||
"photos": [
|
||||
"https://images.unsplash.com/photo-1536768139911-e290a59011e4"
|
||||
],
|
||||
"stats": {
|
||||
"height_ft": "155.00",
|
||||
"length_ft": "4700.00",
|
||||
"speed_mph": "70.00",
|
||||
"inversions": 4,
|
||||
"ride_time_seconds": 145
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "Hagrid's Magical Creatures Motorbike Adventure",
|
||||
"category": "RC",
|
||||
"opening_date": "2019-06-13",
|
||||
"status": "OPERATING",
|
||||
"manufacturer": "Intamin",
|
||||
"description": "A story coaster through the Forbidden Forest.",
|
||||
"photos": [
|
||||
"https://images.unsplash.com/photo-1513889961551-628c1e5e2ee9"
|
||||
],
|
||||
"stats": {
|
||||
"height_ft": "65.00",
|
||||
"length_ft": "5053.00",
|
||||
"speed_mph": "50.00",
|
||||
"inversions": 0,
|
||||
"ride_time_seconds": 180
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "The Amazing Adventures of Spider-Man",
|
||||
"category": "DR",
|
||||
"opening_date": "1999-05-28",
|
||||
"status": "OPERATING",
|
||||
"manufacturer": "Oceaneering International",
|
||||
"description": "A 3D dark ride featuring Spider-Man.",
|
||||
"photos": [
|
||||
"https://images.unsplash.com/photo-1590144662036-33bf0ebd2c7f"
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "Alton Towers",
|
||||
"location": "Alton, England",
|
||||
"country": "GB",
|
||||
"opening_date": "1980-04-04",
|
||||
"status": "OPERATING",
|
||||
"description": "The UK's largest theme park, built around a historic stately home.",
|
||||
"website": "https://www.altontowers.com",
|
||||
"owner": "Merlin Entertainments",
|
||||
"size_acres": "910.00",
|
||||
"photos": [
|
||||
"https://images.unsplash.com/photo-1506126799754-92bc47fc5d78"
|
||||
],
|
||||
"rides": [
|
||||
{
|
||||
"name": "Nemesis",
|
||||
"category": "RC",
|
||||
"opening_date": "1994-03-19",
|
||||
"status": "CLOSED",
|
||||
"manufacturer": "Bolliger & Mabillard",
|
||||
"description": "An inverted roller coaster through ravines.",
|
||||
"photos": [
|
||||
"https://images.unsplash.com/photo-1543674892-7d64d45df18b"
|
||||
],
|
||||
"stats": {
|
||||
"height_ft": "43.00",
|
||||
"length_ft": "2349.00",
|
||||
"speed_mph": "50.00",
|
||||
"inversions": 4,
|
||||
"ride_time_seconds": 80
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "Oblivion",
|
||||
"category": "RC",
|
||||
"opening_date": "1998-03-14",
|
||||
"status": "OPERATING",
|
||||
"manufacturer": "Bolliger & Mabillard",
|
||||
"description": "The world's first vertical drop roller coaster.",
|
||||
"photos": [
|
||||
"https://images.unsplash.com/photo-1605559911160-a3d95d213904"
|
||||
],
|
||||
"stats": {
|
||||
"height_ft": "65.00",
|
||||
"length_ft": "1804.00",
|
||||
"speed_mph": "68.00",
|
||||
"inversions": 0,
|
||||
"ride_time_seconds": 100
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "Europa-Park",
|
||||
"location": "Rust, Germany",
|
||||
"country": "DE",
|
||||
"opening_date": "1975-07-12",
|
||||
"status": "OPERATING",
|
||||
"description": "Germany's largest theme park, featuring European-themed areas.",
|
||||
"website": "https://www.europapark.de",
|
||||
"owner": "Mack Rides",
|
||||
"size_acres": "235.00",
|
||||
"photos": [
|
||||
"https://images.unsplash.com/photo-1536768139911-e290a59011e4"
|
||||
],
|
||||
"rides": [
|
||||
{
|
||||
"name": "Silver Star",
|
||||
"category": "RC",
|
||||
"opening_date": "2002-03-23",
|
||||
"status": "OPERATING",
|
||||
"manufacturer": "Bolliger & Mabillard",
|
||||
"description": "A hypercoaster with stunning views.",
|
||||
"photos": [
|
||||
"https://images.unsplash.com/photo-1536768139911-e290a59011e4"
|
||||
],
|
||||
"stats": {
|
||||
"height_ft": "239.00",
|
||||
"length_ft": "4003.00",
|
||||
"speed_mph": "79.00",
|
||||
"inversions": 0,
|
||||
"ride_time_seconds": 180
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "Blue Fire",
|
||||
"category": "RC",
|
||||
"opening_date": "2009-04-04",
|
||||
"status": "OPERATING",
|
||||
"manufacturer": "Mack Rides",
|
||||
"description": "A launched roller coaster with multiple inversions.",
|
||||
"photos": [
|
||||
"https://images.unsplash.com/photo-1513889961551-628c1e5e2ee9"
|
||||
],
|
||||
"stats": {
|
||||
"height_ft": "125.00",
|
||||
"length_ft": "3465.00",
|
||||
"speed_mph": "62.00",
|
||||
"inversions": 4,
|
||||
"ride_time_seconds": 150
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -1,334 +0,0 @@
|
||||
from django.core.management.base import BaseCommand
|
||||
from apps.parks.models import Park, ParkArea, ParkLocation, Company as Operator
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Seeds initial park data with major theme parks worldwide"
|
||||
|
||||
def handle(self, *args, **options):
|
||||
# Create major theme park companies
|
||||
companies_data = [
|
||||
{
|
||||
"name": "The Walt Disney Company",
|
||||
"website": "https://www.disney.com/",
|
||||
"headquarters": "Burbank, California",
|
||||
"description": "The world's largest entertainment company and theme park operator.",
|
||||
},
|
||||
{
|
||||
"name": "Universal Parks & Resorts",
|
||||
"website": "https://www.universalparks.com/",
|
||||
"headquarters": "Orlando, Florida",
|
||||
"description": "A division of Comcast NBCUniversal, operating major theme parks worldwide.",
|
||||
},
|
||||
{
|
||||
"name": "Six Flags Entertainment Corporation",
|
||||
"website": "https://www.sixflags.com/",
|
||||
"headquarters": "Arlington, Texas",
|
||||
"description": "The world's largest regional theme park company.",
|
||||
},
|
||||
{
|
||||
"name": "Cedar Fair Entertainment Company",
|
||||
"website": "https://www.cedarfair.com/",
|
||||
"headquarters": "Sandusky, Ohio",
|
||||
"description": "One of North America's largest operators of regional amusement parks.",
|
||||
},
|
||||
{
|
||||
"name": "Herschend Family Entertainment",
|
||||
"website": "https://www.hfecorp.com/",
|
||||
"headquarters": "Atlanta, Georgia",
|
||||
"description": "The largest family-owned themed attractions corporation in the United States.",
|
||||
},
|
||||
{
|
||||
"name": "SeaWorld Parks & Entertainment",
|
||||
"website": "https://www.seaworldentertainment.com/",
|
||||
"headquarters": "Orlando, Florida",
|
||||
"description": "Theme park and entertainment company focusing on nature-based themes.",
|
||||
},
|
||||
]
|
||||
|
||||
companies = {}
|
||||
for company_data in companies_data:
|
||||
operator, created = Operator.objects.get_or_create(
|
||||
name=company_data["name"], defaults=company_data
|
||||
)
|
||||
companies[operator.name] = operator
|
||||
self.stdout.write(
|
||||
f"{'Created' if created else 'Found'} company: {operator.name}"
|
||||
)
|
||||
|
||||
# Create parks with their locations
|
||||
parks_data = [
|
||||
{
|
||||
"name": "Magic Kingdom",
|
||||
"company": "The Walt Disney Company",
|
||||
"description": "The first theme park at Walt Disney World Resort in Florida, opened in 1971.",
|
||||
"opening_date": "1971-10-01",
|
||||
"size_acres": 142,
|
||||
"location": {
|
||||
"street_address": "1180 Seven Seas Dr",
|
||||
"city": "Lake Buena Vista",
|
||||
"state": "Florida",
|
||||
"country": "United States",
|
||||
"postal_code": "32830",
|
||||
"latitude": 28.4177,
|
||||
"longitude": -81.5812,
|
||||
},
|
||||
"areas": [
|
||||
{
|
||||
"name": "Main Street, U.S.A.",
|
||||
"description": "Victorian-era themed entrance corridor",
|
||||
},
|
||||
{
|
||||
"name": "Adventureland",
|
||||
"description": "Exotic tropical places themed area",
|
||||
},
|
||||
{
|
||||
"name": "Frontierland",
|
||||
"description": "American Old West themed area",
|
||||
},
|
||||
{
|
||||
"name": "Liberty Square",
|
||||
"description": "Colonial America themed area",
|
||||
},
|
||||
{
|
||||
"name": "Fantasyland",
|
||||
"description": "Fairy tale themed area",
|
||||
},
|
||||
{
|
||||
"name": "Tomorrowland",
|
||||
"description": "Future themed area",
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
"name": "Universal Studios Florida",
|
||||
"company": "Universal Parks & Resorts",
|
||||
"description": "Movie and television-based theme park in Orlando, Florida.",
|
||||
"opening_date": "1990-06-07",
|
||||
"size_acres": 108,
|
||||
"location": {
|
||||
"street_address": "6000 Universal Blvd",
|
||||
"city": "Orlando",
|
||||
"state": "Florida",
|
||||
"country": "United States",
|
||||
"postal_code": "32819",
|
||||
"latitude": 28.4749,
|
||||
"longitude": -81.4687,
|
||||
},
|
||||
"areas": [
|
||||
{
|
||||
"name": "Production Central",
|
||||
"description": "Main entrance area with movie-themed attractions",
|
||||
},
|
||||
{
|
||||
"name": "New York",
|
||||
"description": "Themed after New York City streets",
|
||||
},
|
||||
{
|
||||
"name": "San Francisco",
|
||||
"description": "Themed after San Francisco's waterfront",
|
||||
},
|
||||
{
|
||||
"name": "The Wizarding World of Harry Potter - Diagon Alley",
|
||||
"description": "Themed after the Harry Potter series",
|
||||
},
|
||||
{
|
||||
"name": "Springfield",
|
||||
"description": "Themed after The Simpsons hometown",
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
"name": "Cedar Point",
|
||||
"company": "Cedar Fair Entertainment Company",
|
||||
"description": 'Known as the "Roller Coaster Capital of the World".',
|
||||
"opening_date": "1870-06-01",
|
||||
"size_acres": 364,
|
||||
"location": {
|
||||
"street_address": "1 Cedar Point Dr",
|
||||
"city": "Sandusky",
|
||||
"state": "Ohio",
|
||||
"country": "United States",
|
||||
"postal_code": "44870",
|
||||
"latitude": 41.4822,
|
||||
"longitude": -82.6835,
|
||||
},
|
||||
"areas": [
|
||||
{
|
||||
"name": "Frontiertown",
|
||||
"description": "Western-themed area with multiple roller coasters",
|
||||
},
|
||||
{
|
||||
"name": "Millennium Island",
|
||||
"description": "Home to the Millennium Force roller coaster",
|
||||
},
|
||||
{
|
||||
"name": "Cedar Point Shores",
|
||||
"description": "Waterpark area",
|
||||
},
|
||||
{
|
||||
"name": "Top Thrill Dragster",
|
||||
"description": "Area surrounding the iconic launched coaster",
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
"name": "Silver Dollar City",
|
||||
"company": "Herschend Family Entertainment",
|
||||
"description": "An 1880s-themed park featuring over 40 rides and attractions.",
|
||||
"opening_date": "1960-05-01",
|
||||
"size_acres": 61,
|
||||
"location": {
|
||||
"street_address": "399 Silver Dollar City Parkway",
|
||||
"city": "Branson",
|
||||
"state": "Missouri",
|
||||
"country": "United States",
|
||||
"postal_code": "65616",
|
||||
"latitude": 36.668497,
|
||||
"longitude": -93.339074,
|
||||
},
|
||||
"areas": [
|
||||
{
|
||||
"name": "Grand Exposition",
|
||||
"description": "Home to many family rides and attractions",
|
||||
},
|
||||
{
|
||||
"name": "Wildfire",
|
||||
"description": "Named after the famous B&M coaster",
|
||||
},
|
||||
{
|
||||
"name": "Wilson's Farm",
|
||||
"description": "Farm-themed attractions and dining",
|
||||
},
|
||||
{
|
||||
"name": "Riverfront",
|
||||
"description": "Water-themed attractions area",
|
||||
},
|
||||
{
|
||||
"name": "The Valley",
|
||||
"description": "Home to Time Traveler and other major attractions",
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
"name": "Six Flags Magic Mountain",
|
||||
"company": "Six Flags Entertainment Corporation",
|
||||
"description": "Known for its world-record 19 roller coasters.",
|
||||
"opening_date": "1971-05-29",
|
||||
"size_acres": 262,
|
||||
"location": {
|
||||
"street_address": "26101 Magic Mountain Pkwy",
|
||||
"city": "Valencia",
|
||||
"state": "California",
|
||||
"country": "United States",
|
||||
"postal_code": "91355",
|
||||
"latitude": 34.4253,
|
||||
"longitude": -118.5971,
|
||||
},
|
||||
"areas": [
|
||||
{
|
||||
"name": "Six Flags Plaza",
|
||||
"description": "Main entrance area",
|
||||
},
|
||||
{
|
||||
"name": "DC Universe",
|
||||
"description": "DC Comics themed area",
|
||||
},
|
||||
{
|
||||
"name": "Screampunk District",
|
||||
"description": "Steampunk themed area",
|
||||
},
|
||||
{
|
||||
"name": "The Underground",
|
||||
"description": "Urban themed area",
|
||||
},
|
||||
{
|
||||
"name": "Goliath Territory",
|
||||
"description": "Area surrounding the Goliath hypercoaster",
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
"name": "SeaWorld Orlando",
|
||||
"company": "SeaWorld Parks & Entertainment",
|
||||
"description": "Marine zoological park combined with thrill rides and shows.",
|
||||
"opening_date": "1973-12-15",
|
||||
"size_acres": 200,
|
||||
"location": {
|
||||
"street_address": "7007 Sea World Dr",
|
||||
"city": "Orlando",
|
||||
"state": "Florida",
|
||||
"country": "United States",
|
||||
"postal_code": "32821",
|
||||
"latitude": 28.4115,
|
||||
"longitude": -81.4617,
|
||||
},
|
||||
"areas": [
|
||||
{
|
||||
"name": "Sea Harbor",
|
||||
"description": "Main entrance and shopping area",
|
||||
},
|
||||
{
|
||||
"name": "Shark Encounter",
|
||||
"description": "Shark exhibit and themed area",
|
||||
},
|
||||
{
|
||||
"name": "Antarctica: Empire of the Penguin",
|
||||
"description": "Penguin-themed area",
|
||||
},
|
||||
{
|
||||
"name": "Manta",
|
||||
"description": "Area themed around the Manta flying roller coaster",
|
||||
},
|
||||
{
|
||||
"name": "Sesame Street Land",
|
||||
"description": "Kid-friendly area based on Sesame Street",
|
||||
},
|
||||
],
|
||||
},
|
||||
]
|
||||
|
||||
# Create parks and their areas
|
||||
for park_data in parks_data:
|
||||
company = companies[park_data["company"]]
|
||||
park, created = Park.objects.get_or_create(
|
||||
name=park_data["name"],
|
||||
defaults={
|
||||
"description": park_data["description"],
|
||||
"status": "OPERATING",
|
||||
"opening_date": park_data["opening_date"],
|
||||
"size_acres": park_data["size_acres"],
|
||||
"owner": company,
|
||||
},
|
||||
)
|
||||
self.stdout.write(f"{'Created' if created else 'Found'} park: {park.name}")
|
||||
|
||||
# Create location for park
|
||||
if created:
|
||||
loc_data = park_data["location"]
|
||||
park_location = ParkLocation.objects.create(
|
||||
park=park,
|
||||
street_address=loc_data["street_address"],
|
||||
city=loc_data["city"],
|
||||
state=loc_data["state"],
|
||||
country=loc_data["country"],
|
||||
postal_code=loc_data["postal_code"],
|
||||
)
|
||||
# Set coordinates using the helper method
|
||||
park_location.set_coordinates(
|
||||
loc_data["latitude"], loc_data["longitude"]
|
||||
)
|
||||
park_location.save()
|
||||
|
||||
# Create areas for park
|
||||
for area_data in park_data["areas"]:
|
||||
area, created = ParkArea.objects.get_or_create(
|
||||
name=area_data["name"],
|
||||
park=park,
|
||||
defaults={"description": area_data["description"]},
|
||||
)
|
||||
self.stdout.write(
|
||||
f"{'Created' if created else 'Found'} area: {area.name} in {park.name}"
|
||||
)
|
||||
|
||||
self.stdout.write(self.style.SUCCESS("Successfully seeded initial park data"))
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,118 +0,0 @@
|
||||
from django.core.management.base import BaseCommand
|
||||
from apps.parks.models import Park, ParkLocation, Company
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Test ParkLocation model functionality"
|
||||
|
||||
def handle(self, *args, **options):
|
||||
self.stdout.write("🧪 Testing ParkLocation Model Functionality")
|
||||
self.stdout.write("=" * 50)
|
||||
|
||||
# Create a test company (operator)
|
||||
operator, created = Company.objects.get_or_create(
|
||||
name="Test Theme Parks Inc",
|
||||
defaults={"slug": "test-theme-parks-inc", "roles": ["OPERATOR"]},
|
||||
)
|
||||
self.stdout.write(f"✅ Created operator: {operator.name}")
|
||||
|
||||
# Create a test park
|
||||
park, created = Park.objects.get_or_create(
|
||||
name="Test Magic Kingdom",
|
||||
defaults={
|
||||
"slug": "test-magic-kingdom",
|
||||
"description": "A test theme park for location testing",
|
||||
"operator": operator,
|
||||
},
|
||||
)
|
||||
self.stdout.write(f"✅ Created park: {park.name}")
|
||||
|
||||
# Create a park location
|
||||
location, created = ParkLocation.objects.get_or_create(
|
||||
park=park,
|
||||
defaults={
|
||||
"street_address": "1313 Disneyland Dr",
|
||||
"city": "Anaheim",
|
||||
"state": "California",
|
||||
"country": "USA",
|
||||
"postal_code": "92802",
|
||||
"highway_exit": "I-5 Exit 110B",
|
||||
"parking_notes": "Large parking structure available",
|
||||
"seasonal_notes": "Open year-round",
|
||||
},
|
||||
)
|
||||
self.stdout.write(f"✅ Created location: {location}")
|
||||
|
||||
# Test coordinate setting
|
||||
self.stdout.write("\n🔍 Testing coordinate functionality:")
|
||||
location.set_coordinates(33.8121, -117.9190) # Disneyland coordinates
|
||||
location.save()
|
||||
|
||||
self.stdout.write(f" Latitude: {location.latitude}")
|
||||
self.stdout.write(f" Longitude: {location.longitude}")
|
||||
self.stdout.write(f" Coordinates: {location.coordinates}")
|
||||
self.stdout.write(f" Formatted Address: {location.formatted_address}")
|
||||
|
||||
# Test Park model integration
|
||||
self.stdout.write("\n🔍 Testing Park model integration:")
|
||||
self.stdout.write(f" Park formatted location: {park.formatted_location}")
|
||||
self.stdout.write(f" Park coordinates: {park.coordinates}")
|
||||
|
||||
# Create another location for distance testing
|
||||
operator2, created = Company.objects.get_or_create(
|
||||
name="Six Flags Entertainment",
|
||||
defaults={
|
||||
"slug": "six-flags-entertainment",
|
||||
"roles": ["OPERATOR"],
|
||||
},
|
||||
)
|
||||
|
||||
park2, created = Park.objects.get_or_create(
|
||||
name="Six Flags Magic Mountain",
|
||||
defaults={
|
||||
"slug": "six-flags-magic-mountain",
|
||||
"description": "Another test theme park",
|
||||
"operator": operator2,
|
||||
},
|
||||
)
|
||||
|
||||
location2, created = ParkLocation.objects.get_or_create(
|
||||
park=park2,
|
||||
defaults={
|
||||
"city": "Valencia",
|
||||
"state": "California",
|
||||
"country": "USA",
|
||||
},
|
||||
)
|
||||
location2.set_coordinates(
|
||||
34.4244, -118.5971
|
||||
) # Six Flags Magic Mountain coordinates
|
||||
location2.save()
|
||||
|
||||
# Test distance calculation
|
||||
self.stdout.write("\n🔍 Testing distance calculation:")
|
||||
distance = location.distance_to(location2)
|
||||
if distance:
|
||||
self.stdout.write(f" Distance between parks: {distance:.2f} km")
|
||||
else:
|
||||
self.stdout.write(" ❌ Distance calculation failed")
|
||||
|
||||
# Test spatial indexing
|
||||
self.stdout.write("\n🔍 Testing spatial queries:")
|
||||
try:
|
||||
from django.contrib.gis.measure import D
|
||||
from django.contrib.gis.geos import Point
|
||||
|
||||
# Find parks within 100km of a point
|
||||
# Same as Disneyland
|
||||
search_point = Point(-117.9190, 33.8121, srid=4326)
|
||||
nearby_locations = ParkLocation.objects.filter(
|
||||
point__distance_lte=(search_point, D(km=100))
|
||||
)
|
||||
self.stdout.write(f" Found {nearby_locations.count()} parks within 100km")
|
||||
for loc in nearby_locations:
|
||||
self.stdout.write(f" - {loc.park.name} in {loc.city}, {loc.state}")
|
||||
except Exception as e:
|
||||
self.stdout.write(f" ⚠️ Spatial queries not fully functional: {e}")
|
||||
|
||||
self.stdout.write("\n✅ ParkLocation model tests completed successfully!")
|
||||
@@ -1,29 +0,0 @@
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.db.models import Q
|
||||
from apps.parks.models import Park
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Update total_rides and total_roller_coasters counts for all parks"
|
||||
|
||||
def handle(self, *args, **options):
|
||||
parks = Park.objects.all()
|
||||
operating_rides = Q(status="OPERATING")
|
||||
updated = 0
|
||||
|
||||
for park in parks:
|
||||
# Count total operating rides
|
||||
total_rides = park.rides.filter(operating_rides).count()
|
||||
|
||||
# Count total operating roller coasters
|
||||
total_coasters = park.rides.filter(operating_rides, category="RC").count()
|
||||
|
||||
# Update park counts
|
||||
Park.objects.filter(id=park.id).update(
|
||||
total_rides=total_rides, total_roller_coasters=total_coasters
|
||||
)
|
||||
updated += 1
|
||||
|
||||
self.stdout.write(
|
||||
self.style.SUCCESS(f"Successfully updated counts for {updated} parks")
|
||||
)
|
||||
@@ -1,302 +0,0 @@
|
||||
"""
|
||||
Custom managers and QuerySets for Parks models.
|
||||
Optimized queries following Django styleguide patterns.
|
||||
"""
|
||||
|
||||
from django.db.models import Q, Count, Avg, Max, Min, Prefetch
|
||||
|
||||
from apps.core.managers import (
|
||||
BaseQuerySet,
|
||||
BaseManager,
|
||||
LocationQuerySet,
|
||||
LocationManager,
|
||||
ReviewableQuerySet,
|
||||
ReviewableManager,
|
||||
StatusQuerySet,
|
||||
StatusManager,
|
||||
)
|
||||
|
||||
|
||||
class ParkQuerySet(StatusQuerySet, ReviewableQuerySet, LocationQuerySet):
|
||||
"""Optimized QuerySet for Park model."""
|
||||
|
||||
def with_complete_stats(self):
|
||||
"""Add comprehensive park statistics."""
|
||||
return self.annotate(
|
||||
ride_count_calculated=Count("rides", distinct=True),
|
||||
coaster_count_calculated=Count(
|
||||
"rides",
|
||||
filter=Q(rides__category__in=["RC", "WC"]),
|
||||
distinct=True,
|
||||
),
|
||||
area_count=Count("areas", distinct=True),
|
||||
review_count=Count(
|
||||
"reviews", filter=Q(reviews__is_published=True), distinct=True
|
||||
),
|
||||
average_rating_calculated=Avg(
|
||||
"reviews__rating", filter=Q(reviews__is_published=True)
|
||||
),
|
||||
latest_ride_opening=Max("rides__opening_date"),
|
||||
oldest_ride_opening=Min("rides__opening_date"),
|
||||
)
|
||||
|
||||
def optimized_for_list(self):
|
||||
"""Optimize for park list display."""
|
||||
return (
|
||||
self.select_related("operator", "property_owner")
|
||||
.prefetch_related("location")
|
||||
.with_complete_stats()
|
||||
)
|
||||
|
||||
def optimized_for_detail(self):
|
||||
"""Optimize for park detail display."""
|
||||
from apps.rides.models import Ride
|
||||
from .models import ParkReview
|
||||
|
||||
return self.select_related("operator", "property_owner").prefetch_related(
|
||||
"location",
|
||||
"areas",
|
||||
Prefetch(
|
||||
"rides",
|
||||
queryset=Ride.objects.select_related(
|
||||
"manufacturer", "designer", "ride_model", "park_area"
|
||||
).order_by("name"),
|
||||
),
|
||||
Prefetch(
|
||||
"reviews",
|
||||
queryset=ParkReview.objects.select_related("user")
|
||||
.filter(is_published=True)
|
||||
.order_by("-created_at")[:10],
|
||||
),
|
||||
"photos",
|
||||
)
|
||||
|
||||
def by_operator(self, *, operator_id: int):
|
||||
"""Filter parks by operator."""
|
||||
return self.filter(operator_id=operator_id)
|
||||
|
||||
def by_property_owner(self, *, owner_id: int):
|
||||
"""Filter parks by property owner."""
|
||||
return self.filter(property_owner_id=owner_id)
|
||||
|
||||
def with_minimum_coasters(self, *, min_coasters: int = 5):
|
||||
"""Filter parks with minimum number of coasters."""
|
||||
return self.with_complete_stats().filter(
|
||||
coaster_count_calculated__gte=min_coasters
|
||||
)
|
||||
|
||||
def large_parks(self, *, min_acres: float = 100.0):
|
||||
"""Filter for large parks."""
|
||||
return self.filter(size_acres__gte=min_acres)
|
||||
|
||||
def seasonal_parks(self):
|
||||
"""Filter for parks with seasonal operation."""
|
||||
return self.exclude(operating_season__exact="")
|
||||
|
||||
def for_map_display(self, *, bounds=None):
|
||||
"""Optimize for map display with minimal data."""
|
||||
queryset = self.select_related("operator").prefetch_related("location")
|
||||
|
||||
if bounds:
|
||||
queryset = queryset.within_bounds(
|
||||
north=bounds.north,
|
||||
south=bounds.south,
|
||||
east=bounds.east,
|
||||
west=bounds.west,
|
||||
)
|
||||
|
||||
return queryset.values(
|
||||
"id",
|
||||
"name",
|
||||
"slug",
|
||||
"status",
|
||||
"location__latitude",
|
||||
"location__longitude",
|
||||
"location__city",
|
||||
"location__state",
|
||||
"location__country",
|
||||
"operator__name",
|
||||
)
|
||||
|
||||
def search_autocomplete(self, *, query: str, limit: int = 10):
|
||||
"""Optimized search for autocomplete."""
|
||||
return (
|
||||
self.filter(
|
||||
Q(name__icontains=query)
|
||||
| Q(location__city__icontains=query)
|
||||
| Q(location__state__icontains=query)
|
||||
)
|
||||
.select_related("operator", "location")
|
||||
.values(
|
||||
"id",
|
||||
"name",
|
||||
"slug",
|
||||
"location__city",
|
||||
"location__state",
|
||||
"operator__name",
|
||||
)[:limit]
|
||||
)
|
||||
|
||||
|
||||
class ParkManager(StatusManager, ReviewableManager, LocationManager):
|
||||
"""Custom manager for Park model."""
|
||||
|
||||
def get_queryset(self):
|
||||
return ParkQuerySet(self.model, using=self._db)
|
||||
|
||||
def with_complete_stats(self):
|
||||
return self.get_queryset().with_complete_stats()
|
||||
|
||||
def optimized_for_list(self):
|
||||
return self.get_queryset().optimized_for_list()
|
||||
|
||||
def optimized_for_detail(self):
|
||||
return self.get_queryset().optimized_for_detail()
|
||||
|
||||
def by_operator(self, *, operator_id: int):
|
||||
return self.get_queryset().by_operator(operator_id=operator_id)
|
||||
|
||||
def large_parks(self, *, min_acres: float = 100.0):
|
||||
return self.get_queryset().large_parks(min_acres=min_acres)
|
||||
|
||||
def for_map_display(self, *, bounds=None):
|
||||
return self.get_queryset().for_map_display(bounds=bounds)
|
||||
|
||||
|
||||
class ParkAreaQuerySet(BaseQuerySet):
|
||||
"""QuerySet for ParkArea model."""
|
||||
|
||||
def with_ride_counts(self):
|
||||
"""Add ride count annotations."""
|
||||
return self.annotate(
|
||||
ride_count=Count("rides", distinct=True),
|
||||
coaster_count=Count(
|
||||
"rides",
|
||||
filter=Q(rides__category__in=["RC", "WC"]),
|
||||
distinct=True,
|
||||
),
|
||||
)
|
||||
|
||||
def optimized_for_list(self):
|
||||
"""Optimize for area list display."""
|
||||
return self.select_related("park").with_ride_counts()
|
||||
|
||||
def by_park(self, *, park_id: int):
|
||||
"""Filter areas by park."""
|
||||
return self.filter(park_id=park_id)
|
||||
|
||||
def with_rides(self):
|
||||
"""Filter areas that have rides."""
|
||||
return self.filter(rides__isnull=False).distinct()
|
||||
|
||||
|
||||
class ParkAreaManager(BaseManager):
|
||||
"""Manager for ParkArea model."""
|
||||
|
||||
def get_queryset(self):
|
||||
return ParkAreaQuerySet(self.model, using=self._db)
|
||||
|
||||
def with_ride_counts(self):
|
||||
return self.get_queryset().with_ride_counts()
|
||||
|
||||
def by_park(self, *, park_id: int):
|
||||
return self.get_queryset().by_park(park_id=park_id)
|
||||
|
||||
|
||||
class ParkReviewQuerySet(ReviewableQuerySet):
|
||||
"""QuerySet for ParkReview model."""
|
||||
|
||||
def for_park(self, *, park_id: int):
|
||||
"""Filter reviews for a specific park."""
|
||||
return self.filter(park_id=park_id)
|
||||
|
||||
def by_user(self, *, user_id: int):
|
||||
"""Filter reviews by user."""
|
||||
return self.filter(user_id=user_id)
|
||||
|
||||
def by_rating_range(self, *, min_rating: int = 1, max_rating: int = 10):
|
||||
"""Filter reviews by rating range."""
|
||||
return self.filter(rating__gte=min_rating, rating__lte=max_rating)
|
||||
|
||||
def optimized_for_display(self):
|
||||
"""Optimize for review display."""
|
||||
return self.select_related("user", "park", "moderated_by")
|
||||
|
||||
def recent_reviews(self, *, days: int = 30):
|
||||
"""Get recent reviews."""
|
||||
return self.recent(days=days)
|
||||
|
||||
def moderation_required(self):
|
||||
"""Filter reviews requiring moderation."""
|
||||
return self.filter(Q(is_published=False) | Q(moderated_at__isnull=True))
|
||||
|
||||
|
||||
class ParkReviewManager(BaseManager):
|
||||
"""Manager for ParkReview model."""
|
||||
|
||||
def get_queryset(self):
|
||||
return ParkReviewQuerySet(self.model, using=self._db)
|
||||
|
||||
def for_park(self, *, park_id: int):
|
||||
return self.get_queryset().for_park(park_id=park_id)
|
||||
|
||||
def by_rating_range(self, *, min_rating: int = 1, max_rating: int = 10):
|
||||
return self.get_queryset().by_rating_range(
|
||||
min_rating=min_rating, max_rating=max_rating
|
||||
)
|
||||
|
||||
def moderation_required(self):
|
||||
return self.get_queryset().moderation_required()
|
||||
|
||||
|
||||
class CompanyQuerySet(BaseQuerySet):
|
||||
"""QuerySet for Company model."""
|
||||
|
||||
def operators(self):
|
||||
"""Filter for companies that operate parks."""
|
||||
return self.filter(roles__contains=["OPERATOR"])
|
||||
|
||||
def property_owners(self):
|
||||
"""Filter for companies that own park properties."""
|
||||
return self.filter(roles__contains=["PROPERTY_OWNER"])
|
||||
|
||||
def manufacturers(self):
|
||||
"""Filter for ride manufacturers."""
|
||||
return self.filter(roles__contains=["MANUFACTURER"])
|
||||
|
||||
def with_park_counts(self):
|
||||
"""Add park count annotations."""
|
||||
return self.annotate(
|
||||
operated_parks_count=Count("operated_parks", distinct=True),
|
||||
owned_parks_count=Count("owned_parks", distinct=True),
|
||||
total_parks_involvement=Count("operated_parks", distinct=True)
|
||||
+ Count("owned_parks", distinct=True),
|
||||
)
|
||||
|
||||
def major_operators(self, *, min_parks: int = 5):
|
||||
"""Filter for major park operators."""
|
||||
return (
|
||||
self.operators()
|
||||
.with_park_counts()
|
||||
.filter(operated_parks_count__gte=min_parks)
|
||||
)
|
||||
|
||||
def optimized_for_list(self):
|
||||
"""Optimize for company list display."""
|
||||
return self.with_park_counts()
|
||||
|
||||
|
||||
class CompanyManager(BaseManager):
|
||||
"""Manager for Company model."""
|
||||
|
||||
def get_queryset(self):
|
||||
return CompanyQuerySet(self.model, using=self._db)
|
||||
|
||||
def operators(self):
|
||||
return self.get_queryset().operators()
|
||||
|
||||
def manufacturers(self):
|
||||
return self.get_queryset().manufacturers()
|
||||
|
||||
def major_operators(self, *, min_parks: int = 5):
|
||||
return self.get_queryset().major_operators(min_parks=min_parks)
|
||||
@@ -1,62 +0,0 @@
|
||||
# Generated manually for enhanced filtering performance
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
atomic = False # Required for CREATE INDEX CONCURRENTLY
|
||||
|
||||
dependencies = [
|
||||
("parks", "0001_initial"), # Adjust this to the latest migration
|
||||
]
|
||||
|
||||
operations = [
|
||||
# Add indexes for commonly filtered fields
|
||||
migrations.RunSQL(
|
||||
"CREATE INDEX CONCURRENTLY IF NOT EXISTS parks_park_status_idx ON parks_park (status);",
|
||||
reverse_sql="DROP INDEX IF EXISTS parks_park_status_idx;",
|
||||
),
|
||||
migrations.RunSQL(
|
||||
"CREATE INDEX CONCURRENTLY IF NOT EXISTS parks_park_operator_id_idx ON parks_park (operator_id);",
|
||||
reverse_sql="DROP INDEX IF EXISTS parks_park_operator_id_idx;",
|
||||
),
|
||||
migrations.RunSQL(
|
||||
"CREATE INDEX CONCURRENTLY IF NOT EXISTS parks_park_average_rating_idx ON parks_park (average_rating);",
|
||||
reverse_sql="DROP INDEX IF EXISTS parks_park_average_rating_idx;",
|
||||
),
|
||||
migrations.RunSQL(
|
||||
"CREATE INDEX CONCURRENTLY IF NOT EXISTS parks_park_size_acres_idx ON parks_park (size_acres);",
|
||||
reverse_sql="DROP INDEX IF EXISTS parks_park_size_acres_idx;",
|
||||
),
|
||||
migrations.RunSQL(
|
||||
"CREATE INDEX CONCURRENTLY IF NOT EXISTS parks_park_coaster_count_idx ON parks_park (coaster_count);",
|
||||
reverse_sql="DROP INDEX IF EXISTS parks_park_coaster_count_idx;",
|
||||
),
|
||||
migrations.RunSQL(
|
||||
"CREATE INDEX CONCURRENTLY IF NOT EXISTS parks_park_ride_count_idx ON parks_park (ride_count);",
|
||||
reverse_sql="DROP INDEX IF EXISTS parks_park_ride_count_idx;",
|
||||
),
|
||||
migrations.RunSQL(
|
||||
"CREATE INDEX CONCURRENTLY IF NOT EXISTS parks_park_updated_at_idx ON parks_park (updated_at);",
|
||||
reverse_sql="DROP INDEX IF EXISTS parks_park_updated_at_idx;",
|
||||
),
|
||||
# Composite indexes for common filter combinations
|
||||
migrations.RunSQL(
|
||||
"CREATE INDEX CONCURRENTLY IF NOT EXISTS parks_park_status_rating_idx ON parks_park (status, average_rating);",
|
||||
reverse_sql="DROP INDEX IF EXISTS parks_park_status_rating_idx;",
|
||||
),
|
||||
migrations.RunSQL(
|
||||
"CREATE INDEX CONCURRENTLY IF NOT EXISTS parks_park_operator_status_idx ON parks_park (operator_id, status);",
|
||||
reverse_sql="DROP INDEX IF EXISTS parks_park_operator_status_idx;",
|
||||
),
|
||||
# Index for parks with coasters (coaster_count > 0)
|
||||
migrations.RunSQL(
|
||||
"CREATE INDEX CONCURRENTLY IF NOT EXISTS parks_park_has_coasters_idx ON parks_park (coaster_count) WHERE coaster_count > 0;",
|
||||
reverse_sql="DROP INDEX IF EXISTS parks_park_has_coasters_idx;",
|
||||
),
|
||||
# Index for big parks (ride_count >= 10)
|
||||
migrations.RunSQL(
|
||||
"CREATE INDEX CONCURRENTLY IF NOT EXISTS parks_park_big_parks_idx ON parks_park (ride_count) WHERE ride_count >= 10;",
|
||||
reverse_sql="DROP INDEX IF EXISTS parks_park_big_parks_idx;",
|
||||
),
|
||||
]
|
||||
@@ -1,718 +0,0 @@
|
||||
# Generated by Django 5.2.5 on 2025-08-15 22:01
|
||||
|
||||
import django.contrib.gis.db.models.fields
|
||||
import django.contrib.postgres.fields
|
||||
import django.core.validators
|
||||
import django.db.models.deletion
|
||||
import pgtrigger.compiler
|
||||
import pgtrigger.migrations
|
||||
from django.conf import settings
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
initial = True
|
||||
|
||||
dependencies = [
|
||||
("pghistory", "0007_auto_20250421_0444"),
|
||||
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name="Company",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.BigAutoField(
|
||||
auto_created=True,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
verbose_name="ID",
|
||||
),
|
||||
),
|
||||
("created_at", models.DateTimeField(auto_now_add=True)),
|
||||
("updated_at", models.DateTimeField(auto_now=True)),
|
||||
("name", models.CharField(max_length=255)),
|
||||
("slug", models.SlugField(max_length=255, unique=True)),
|
||||
(
|
||||
"roles",
|
||||
django.contrib.postgres.fields.ArrayField(
|
||||
base_field=models.CharField(
|
||||
choices=[
|
||||
("OPERATOR", "Park Operator"),
|
||||
("PROPERTY_OWNER", "Property Owner"),
|
||||
],
|
||||
max_length=20,
|
||||
),
|
||||
blank=True,
|
||||
default=list,
|
||||
size=None,
|
||||
),
|
||||
),
|
||||
("description", models.TextField(blank=True)),
|
||||
("website", models.URLField(blank=True)),
|
||||
(
|
||||
"founded_year",
|
||||
models.PositiveIntegerField(blank=True, null=True),
|
||||
),
|
||||
("parks_count", models.IntegerField(default=0)),
|
||||
("rides_count", models.IntegerField(default=0)),
|
||||
],
|
||||
options={
|
||||
"verbose_name_plural": "Companies",
|
||||
"ordering": ["name"],
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="Park",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.BigAutoField(
|
||||
auto_created=True,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
verbose_name="ID",
|
||||
),
|
||||
),
|
||||
("name", models.CharField(max_length=255)),
|
||||
("slug", models.SlugField(max_length=255, unique=True)),
|
||||
("description", models.TextField(blank=True)),
|
||||
(
|
||||
"status",
|
||||
models.CharField(
|
||||
choices=[
|
||||
("OPERATING", "Operating"),
|
||||
("CLOSED_TEMP", "Temporarily Closed"),
|
||||
("CLOSED_PERM", "Permanently Closed"),
|
||||
("UNDER_CONSTRUCTION", "Under Construction"),
|
||||
("DEMOLISHED", "Demolished"),
|
||||
("RELOCATED", "Relocated"),
|
||||
],
|
||||
default="OPERATING",
|
||||
max_length=20,
|
||||
),
|
||||
),
|
||||
("opening_date", models.DateField(blank=True, null=True)),
|
||||
("closing_date", models.DateField(blank=True, null=True)),
|
||||
(
|
||||
"operating_season",
|
||||
models.CharField(blank=True, max_length=255),
|
||||
),
|
||||
(
|
||||
"size_acres",
|
||||
models.DecimalField(
|
||||
blank=True, decimal_places=2, max_digits=10, null=True
|
||||
),
|
||||
),
|
||||
("website", models.URLField(blank=True)),
|
||||
(
|
||||
"average_rating",
|
||||
models.DecimalField(
|
||||
blank=True, decimal_places=2, max_digits=3, null=True
|
||||
),
|
||||
),
|
||||
("ride_count", models.IntegerField(blank=True, null=True)),
|
||||
("coaster_count", models.IntegerField(blank=True, null=True)),
|
||||
(
|
||||
"created_at",
|
||||
models.DateTimeField(auto_now_add=True, null=True),
|
||||
),
|
||||
("updated_at", models.DateTimeField(auto_now=True)),
|
||||
(
|
||||
"operator",
|
||||
models.ForeignKey(
|
||||
help_text="Company that operates this park",
|
||||
limit_choices_to={"roles__contains": ["OPERATOR"]},
|
||||
on_delete=django.db.models.deletion.PROTECT,
|
||||
related_name="operated_parks",
|
||||
to="parks.company",
|
||||
),
|
||||
),
|
||||
(
|
||||
"property_owner",
|
||||
models.ForeignKey(
|
||||
blank=True,
|
||||
help_text="Company that owns the property (if different from operator)",
|
||||
limit_choices_to={"roles__contains": ["PROPERTY_OWNER"]},
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.PROTECT,
|
||||
related_name="owned_parks",
|
||||
to="parks.company",
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"ordering": ["name"],
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="ParkArea",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.BigAutoField(
|
||||
auto_created=True,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
verbose_name="ID",
|
||||
),
|
||||
),
|
||||
("created_at", models.DateTimeField(auto_now_add=True)),
|
||||
("updated_at", models.DateTimeField(auto_now=True)),
|
||||
("name", models.CharField(max_length=255)),
|
||||
("slug", models.SlugField(max_length=255)),
|
||||
("description", models.TextField(blank=True)),
|
||||
("opening_date", models.DateField(blank=True, null=True)),
|
||||
("closing_date", models.DateField(blank=True, null=True)),
|
||||
(
|
||||
"park",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
related_name="areas",
|
||||
to="parks.park",
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"abstract": False,
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="ParkAreaEvent",
|
||||
fields=[
|
||||
(
|
||||
"pgh_id",
|
||||
models.AutoField(primary_key=True, serialize=False),
|
||||
),
|
||||
("pgh_created_at", models.DateTimeField(auto_now_add=True)),
|
||||
("pgh_label", models.TextField(help_text="The event label.")),
|
||||
("id", models.BigIntegerField()),
|
||||
("created_at", models.DateTimeField(auto_now_add=True)),
|
||||
("updated_at", models.DateTimeField(auto_now=True)),
|
||||
("name", models.CharField(max_length=255)),
|
||||
("slug", models.SlugField(db_index=False, max_length=255)),
|
||||
("description", models.TextField(blank=True)),
|
||||
("opening_date", models.DateField(blank=True, null=True)),
|
||||
("closing_date", models.DateField(blank=True, null=True)),
|
||||
(
|
||||
"park",
|
||||
models.ForeignKey(
|
||||
db_constraint=False,
|
||||
on_delete=django.db.models.deletion.DO_NOTHING,
|
||||
related_name="+",
|
||||
related_query_name="+",
|
||||
to="parks.park",
|
||||
),
|
||||
),
|
||||
(
|
||||
"pgh_context",
|
||||
models.ForeignKey(
|
||||
db_constraint=False,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.DO_NOTHING,
|
||||
related_name="+",
|
||||
to="pghistory.context",
|
||||
),
|
||||
),
|
||||
(
|
||||
"pgh_obj",
|
||||
models.ForeignKey(
|
||||
db_constraint=False,
|
||||
on_delete=django.db.models.deletion.DO_NOTHING,
|
||||
related_name="events",
|
||||
to="parks.parkarea",
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"abstract": False,
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="ParkEvent",
|
||||
fields=[
|
||||
(
|
||||
"pgh_id",
|
||||
models.AutoField(primary_key=True, serialize=False),
|
||||
),
|
||||
("pgh_created_at", models.DateTimeField(auto_now_add=True)),
|
||||
("pgh_label", models.TextField(help_text="The event label.")),
|
||||
("id", models.BigIntegerField()),
|
||||
("name", models.CharField(max_length=255)),
|
||||
("slug", models.SlugField(db_index=False, max_length=255)),
|
||||
("description", models.TextField(blank=True)),
|
||||
(
|
||||
"status",
|
||||
models.CharField(
|
||||
choices=[
|
||||
("OPERATING", "Operating"),
|
||||
("CLOSED_TEMP", "Temporarily Closed"),
|
||||
("CLOSED_PERM", "Permanently Closed"),
|
||||
("UNDER_CONSTRUCTION", "Under Construction"),
|
||||
("DEMOLISHED", "Demolished"),
|
||||
("RELOCATED", "Relocated"),
|
||||
],
|
||||
default="OPERATING",
|
||||
max_length=20,
|
||||
),
|
||||
),
|
||||
("opening_date", models.DateField(blank=True, null=True)),
|
||||
("closing_date", models.DateField(blank=True, null=True)),
|
||||
(
|
||||
"operating_season",
|
||||
models.CharField(blank=True, max_length=255),
|
||||
),
|
||||
(
|
||||
"size_acres",
|
||||
models.DecimalField(
|
||||
blank=True, decimal_places=2, max_digits=10, null=True
|
||||
),
|
||||
),
|
||||
("website", models.URLField(blank=True)),
|
||||
(
|
||||
"average_rating",
|
||||
models.DecimalField(
|
||||
blank=True, decimal_places=2, max_digits=3, null=True
|
||||
),
|
||||
),
|
||||
("ride_count", models.IntegerField(blank=True, null=True)),
|
||||
("coaster_count", models.IntegerField(blank=True, null=True)),
|
||||
(
|
||||
"created_at",
|
||||
models.DateTimeField(auto_now_add=True, null=True),
|
||||
),
|
||||
("updated_at", models.DateTimeField(auto_now=True)),
|
||||
(
|
||||
"operator",
|
||||
models.ForeignKey(
|
||||
db_constraint=False,
|
||||
help_text="Company that operates this park",
|
||||
limit_choices_to={"roles__contains": ["OPERATOR"]},
|
||||
on_delete=django.db.models.deletion.DO_NOTHING,
|
||||
related_name="+",
|
||||
related_query_name="+",
|
||||
to="parks.company",
|
||||
),
|
||||
),
|
||||
(
|
||||
"pgh_context",
|
||||
models.ForeignKey(
|
||||
db_constraint=False,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.DO_NOTHING,
|
||||
related_name="+",
|
||||
to="pghistory.context",
|
||||
),
|
||||
),
|
||||
(
|
||||
"pgh_obj",
|
||||
models.ForeignKey(
|
||||
db_constraint=False,
|
||||
on_delete=django.db.models.deletion.DO_NOTHING,
|
||||
related_name="events",
|
||||
to="parks.park",
|
||||
),
|
||||
),
|
||||
(
|
||||
"property_owner",
|
||||
models.ForeignKey(
|
||||
blank=True,
|
||||
db_constraint=False,
|
||||
help_text="Company that owns the property (if different from operator)",
|
||||
limit_choices_to={"roles__contains": ["PROPERTY_OWNER"]},
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.DO_NOTHING,
|
||||
related_name="+",
|
||||
related_query_name="+",
|
||||
to="parks.company",
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"abstract": False,
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="ParkLocation",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.BigAutoField(
|
||||
auto_created=True,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
verbose_name="ID",
|
||||
),
|
||||
),
|
||||
(
|
||||
"point",
|
||||
django.contrib.gis.db.models.fields.PointField(
|
||||
blank=True,
|
||||
help_text="Geographic coordinates (longitude, latitude)",
|
||||
null=True,
|
||||
srid=4326,
|
||||
),
|
||||
),
|
||||
(
|
||||
"street_address",
|
||||
models.CharField(blank=True, max_length=255),
|
||||
),
|
||||
("city", models.CharField(db_index=True, max_length=100)),
|
||||
("state", models.CharField(db_index=True, max_length=100)),
|
||||
("country", models.CharField(default="USA", max_length=100)),
|
||||
("postal_code", models.CharField(blank=True, max_length=20)),
|
||||
("highway_exit", models.CharField(blank=True, max_length=100)),
|
||||
("parking_notes", models.TextField(blank=True)),
|
||||
("best_arrival_time", models.TimeField(blank=True, null=True)),
|
||||
("seasonal_notes", models.TextField(blank=True)),
|
||||
("osm_id", models.BigIntegerField(blank=True, null=True)),
|
||||
(
|
||||
"osm_type",
|
||||
models.CharField(
|
||||
blank=True,
|
||||
help_text="Type of OpenStreetMap object (node, way, or relation)",
|
||||
max_length=10,
|
||||
),
|
||||
),
|
||||
(
|
||||
"park",
|
||||
models.OneToOneField(
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
related_name="location",
|
||||
to="parks.park",
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"verbose_name": "Park Location",
|
||||
"verbose_name_plural": "Park Locations",
|
||||
"ordering": ["park__name"],
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="ParkReview",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.BigAutoField(
|
||||
auto_created=True,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
verbose_name="ID",
|
||||
),
|
||||
),
|
||||
(
|
||||
"rating",
|
||||
models.PositiveSmallIntegerField(
|
||||
validators=[
|
||||
django.core.validators.MinValueValidator(1),
|
||||
django.core.validators.MaxValueValidator(10),
|
||||
]
|
||||
),
|
||||
),
|
||||
("title", models.CharField(max_length=200)),
|
||||
("content", models.TextField()),
|
||||
("visit_date", models.DateField()),
|
||||
("created_at", models.DateTimeField(auto_now_add=True)),
|
||||
("updated_at", models.DateTimeField(auto_now=True)),
|
||||
("is_published", models.BooleanField(default=True)),
|
||||
("moderation_notes", models.TextField(blank=True)),
|
||||
("moderated_at", models.DateTimeField(blank=True, null=True)),
|
||||
(
|
||||
"moderated_by",
|
||||
models.ForeignKey(
|
||||
blank=True,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
related_name="moderated_park_reviews",
|
||||
to=settings.AUTH_USER_MODEL,
|
||||
),
|
||||
),
|
||||
(
|
||||
"park",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
related_name="reviews",
|
||||
to="parks.park",
|
||||
),
|
||||
),
|
||||
(
|
||||
"user",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
related_name="park_reviews",
|
||||
to=settings.AUTH_USER_MODEL,
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"ordering": ["-created_at"],
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="ParkReviewEvent",
|
||||
fields=[
|
||||
(
|
||||
"pgh_id",
|
||||
models.AutoField(primary_key=True, serialize=False),
|
||||
),
|
||||
("pgh_created_at", models.DateTimeField(auto_now_add=True)),
|
||||
("pgh_label", models.TextField(help_text="The event label.")),
|
||||
("id", models.BigIntegerField()),
|
||||
(
|
||||
"rating",
|
||||
models.PositiveSmallIntegerField(
|
||||
validators=[
|
||||
django.core.validators.MinValueValidator(1),
|
||||
django.core.validators.MaxValueValidator(10),
|
||||
]
|
||||
),
|
||||
),
|
||||
("title", models.CharField(max_length=200)),
|
||||
("content", models.TextField()),
|
||||
("visit_date", models.DateField()),
|
||||
("created_at", models.DateTimeField(auto_now_add=True)),
|
||||
("updated_at", models.DateTimeField(auto_now=True)),
|
||||
("is_published", models.BooleanField(default=True)),
|
||||
("moderation_notes", models.TextField(blank=True)),
|
||||
("moderated_at", models.DateTimeField(blank=True, null=True)),
|
||||
(
|
||||
"moderated_by",
|
||||
models.ForeignKey(
|
||||
blank=True,
|
||||
db_constraint=False,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.DO_NOTHING,
|
||||
related_name="+",
|
||||
related_query_name="+",
|
||||
to=settings.AUTH_USER_MODEL,
|
||||
),
|
||||
),
|
||||
(
|
||||
"park",
|
||||
models.ForeignKey(
|
||||
db_constraint=False,
|
||||
on_delete=django.db.models.deletion.DO_NOTHING,
|
||||
related_name="+",
|
||||
related_query_name="+",
|
||||
to="parks.park",
|
||||
),
|
||||
),
|
||||
(
|
||||
"pgh_context",
|
||||
models.ForeignKey(
|
||||
db_constraint=False,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.DO_NOTHING,
|
||||
related_name="+",
|
||||
to="pghistory.context",
|
||||
),
|
||||
),
|
||||
(
|
||||
"pgh_obj",
|
||||
models.ForeignKey(
|
||||
db_constraint=False,
|
||||
on_delete=django.db.models.deletion.DO_NOTHING,
|
||||
related_name="events",
|
||||
to="parks.parkreview",
|
||||
),
|
||||
),
|
||||
(
|
||||
"user",
|
||||
models.ForeignKey(
|
||||
db_constraint=False,
|
||||
on_delete=django.db.models.deletion.DO_NOTHING,
|
||||
related_name="+",
|
||||
related_query_name="+",
|
||||
to=settings.AUTH_USER_MODEL,
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"abstract": False,
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="CompanyHeadquarters",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.BigAutoField(
|
||||
auto_created=True,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
verbose_name="ID",
|
||||
),
|
||||
),
|
||||
(
|
||||
"street_address",
|
||||
models.CharField(
|
||||
blank=True,
|
||||
help_text="Mailing address if publicly available",
|
||||
max_length=255,
|
||||
),
|
||||
),
|
||||
(
|
||||
"city",
|
||||
models.CharField(
|
||||
db_index=True,
|
||||
help_text="Headquarters city",
|
||||
max_length=100,
|
||||
),
|
||||
),
|
||||
(
|
||||
"state_province",
|
||||
models.CharField(
|
||||
blank=True,
|
||||
db_index=True,
|
||||
help_text="State/Province/Region",
|
||||
max_length=100,
|
||||
),
|
||||
),
|
||||
(
|
||||
"country",
|
||||
models.CharField(
|
||||
db_index=True,
|
||||
default="USA",
|
||||
help_text="Country where headquarters is located",
|
||||
max_length=100,
|
||||
),
|
||||
),
|
||||
(
|
||||
"postal_code",
|
||||
models.CharField(
|
||||
blank=True,
|
||||
help_text="ZIP or postal code",
|
||||
max_length=20,
|
||||
),
|
||||
),
|
||||
(
|
||||
"mailing_address",
|
||||
models.TextField(
|
||||
blank=True,
|
||||
help_text="Complete mailing address if different from basic address",
|
||||
),
|
||||
),
|
||||
("created_at", models.DateTimeField(auto_now_add=True)),
|
||||
("updated_at", models.DateTimeField(auto_now=True)),
|
||||
(
|
||||
"company",
|
||||
models.OneToOneField(
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
related_name="headquarters",
|
||||
to="parks.company",
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"verbose_name": "Company Headquarters",
|
||||
"verbose_name_plural": "Company Headquarters",
|
||||
"ordering": ["company__name"],
|
||||
"indexes": [
|
||||
models.Index(
|
||||
fields=["city", "country"],
|
||||
name="parks_compa_city_cf9a4e_idx",
|
||||
)
|
||||
],
|
||||
},
|
||||
),
|
||||
pgtrigger.migrations.AddTrigger(
|
||||
model_name="park",
|
||||
trigger=pgtrigger.compiler.Trigger(
|
||||
name="insert_insert",
|
||||
sql=pgtrigger.compiler.UpsertTriggerSql(
|
||||
func='INSERT INTO "parks_parkevent" ("average_rating", "closing_date", "coaster_count", "created_at", "description", "id", "name", "opening_date", "operating_season", "operator_id", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "property_owner_id", "ride_count", "size_acres", "slug", "status", "updated_at", "website") VALUES (NEW."average_rating", NEW."closing_date", NEW."coaster_count", NEW."created_at", NEW."description", NEW."id", NEW."name", NEW."opening_date", NEW."operating_season", NEW."operator_id", _pgh_attach_context(), NOW(), \'insert\', NEW."id", NEW."property_owner_id", NEW."ride_count", NEW."size_acres", NEW."slug", NEW."status", NEW."updated_at", NEW."website"); RETURN NULL;',
|
||||
hash="[AWS-SECRET-REMOVED]",
|
||||
operation="INSERT",
|
||||
pgid="pgtrigger_insert_insert_66883",
|
||||
table="parks_park",
|
||||
when="AFTER",
|
||||
),
|
||||
),
|
||||
),
|
||||
pgtrigger.migrations.AddTrigger(
|
||||
model_name="park",
|
||||
trigger=pgtrigger.compiler.Trigger(
|
||||
name="update_update",
|
||||
sql=pgtrigger.compiler.UpsertTriggerSql(
|
||||
condition="WHEN (OLD.* IS DISTINCT FROM NEW.*)",
|
||||
func='INSERT INTO "parks_parkevent" ("average_rating", "closing_date", "coaster_count", "created_at", "description", "id", "name", "opening_date", "operating_season", "operator_id", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "property_owner_id", "ride_count", "size_acres", "slug", "status", "updated_at", "website") VALUES (NEW."average_rating", NEW."closing_date", NEW."coaster_count", NEW."created_at", NEW."description", NEW."id", NEW."name", NEW."opening_date", NEW."operating_season", NEW."operator_id", _pgh_attach_context(), NOW(), \'update\', NEW."id", NEW."property_owner_id", NEW."ride_count", NEW."size_acres", NEW."slug", NEW."status", NEW."updated_at", NEW."website"); RETURN NULL;',
|
||||
hash="[AWS-SECRET-REMOVED]",
|
||||
operation="UPDATE",
|
||||
pgid="pgtrigger_update_update_19f56",
|
||||
table="parks_park",
|
||||
when="AFTER",
|
||||
),
|
||||
),
|
||||
),
|
||||
pgtrigger.migrations.AddTrigger(
|
||||
model_name="parkarea",
|
||||
trigger=pgtrigger.compiler.Trigger(
|
||||
name="insert_insert",
|
||||
sql=pgtrigger.compiler.UpsertTriggerSql(
|
||||
func='INSERT INTO "parks_parkareaevent" ("closing_date", "created_at", "description", "id", "name", "opening_date", "park_id", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "slug", "updated_at") VALUES (NEW."closing_date", NEW."created_at", NEW."description", NEW."id", NEW."name", NEW."opening_date", NEW."park_id", _pgh_attach_context(), NOW(), \'insert\', NEW."id", NEW."slug", NEW."updated_at"); RETURN NULL;',
|
||||
hash="[AWS-SECRET-REMOVED]",
|
||||
operation="INSERT",
|
||||
pgid="pgtrigger_insert_insert_13457",
|
||||
table="parks_parkarea",
|
||||
when="AFTER",
|
||||
),
|
||||
),
|
||||
),
|
||||
pgtrigger.migrations.AddTrigger(
|
||||
model_name="parkarea",
|
||||
trigger=pgtrigger.compiler.Trigger(
|
||||
name="update_update",
|
||||
sql=pgtrigger.compiler.UpsertTriggerSql(
|
||||
condition="WHEN (OLD.* IS DISTINCT FROM NEW.*)",
|
||||
func='INSERT INTO "parks_parkareaevent" ("closing_date", "created_at", "description", "id", "name", "opening_date", "park_id", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "slug", "updated_at") VALUES (NEW."closing_date", NEW."created_at", NEW."description", NEW."id", NEW."name", NEW."opening_date", NEW."park_id", _pgh_attach_context(), NOW(), \'update\', NEW."id", NEW."slug", NEW."updated_at"); RETURN NULL;',
|
||||
hash="[AWS-SECRET-REMOVED]",
|
||||
operation="UPDATE",
|
||||
pgid="pgtrigger_update_update_6e5aa",
|
||||
table="parks_parkarea",
|
||||
when="AFTER",
|
||||
),
|
||||
),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="parklocation",
|
||||
index=models.Index(
|
||||
fields=["city", "state"], name="parks_parkl_city_7cc873_idx"
|
||||
),
|
||||
),
|
||||
migrations.AlterUniqueTogether(
|
||||
name="parkreview",
|
||||
unique_together={("park", "user")},
|
||||
),
|
||||
pgtrigger.migrations.AddTrigger(
|
||||
model_name="parkreview",
|
||||
trigger=pgtrigger.compiler.Trigger(
|
||||
name="insert_insert",
|
||||
sql=pgtrigger.compiler.UpsertTriggerSql(
|
||||
func='INSERT INTO "parks_parkreviewevent" ("content", "created_at", "id", "is_published", "moderated_at", "moderated_by_id", "moderation_notes", "park_id", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "rating", "title", "updated_at", "user_id", "visit_date") VALUES (NEW."content", NEW."created_at", NEW."id", NEW."is_published", NEW."moderated_at", NEW."moderated_by_id", NEW."moderation_notes", NEW."park_id", _pgh_attach_context(), NOW(), \'insert\', NEW."id", NEW."rating", NEW."title", NEW."updated_at", NEW."user_id", NEW."visit_date"); RETURN NULL;',
|
||||
hash="[AWS-SECRET-REMOVED]",
|
||||
operation="INSERT",
|
||||
pgid="pgtrigger_insert_insert_a99bc",
|
||||
table="parks_parkreview",
|
||||
when="AFTER",
|
||||
),
|
||||
),
|
||||
),
|
||||
pgtrigger.migrations.AddTrigger(
|
||||
model_name="parkreview",
|
||||
trigger=pgtrigger.compiler.Trigger(
|
||||
name="update_update",
|
||||
sql=pgtrigger.compiler.UpsertTriggerSql(
|
||||
condition="WHEN (OLD.* IS DISTINCT FROM NEW.*)",
|
||||
func='INSERT INTO "parks_parkreviewevent" ("content", "created_at", "id", "is_published", "moderated_at", "moderated_by_id", "moderation_notes", "park_id", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "rating", "title", "updated_at", "user_id", "visit_date") VALUES (NEW."content", NEW."created_at", NEW."id", NEW."is_published", NEW."moderated_at", NEW."moderated_by_id", NEW."moderation_notes", NEW."park_id", _pgh_attach_context(), NOW(), \'update\', NEW."id", NEW."rating", NEW."title", NEW."updated_at", NEW."user_id", NEW."visit_date"); RETURN NULL;',
|
||||
hash="[AWS-SECRET-REMOVED]",
|
||||
operation="UPDATE",
|
||||
pgid="pgtrigger_update_update_0e40d",
|
||||
table="parks_parkreview",
|
||||
when="AFTER",
|
||||
),
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -1,16 +0,0 @@
|
||||
# Generated by Django 5.2.5 on 2025-08-15 22:05
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("parks", "0001_initial"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterUniqueTogether(
|
||||
name="parkarea",
|
||||
unique_together={("park", "slug")},
|
||||
),
|
||||
]
|
||||
@@ -1,128 +0,0 @@
|
||||
# Generated by Django 5.2.5 on 2025-08-16 17:42
|
||||
|
||||
import django.db.models.functions.datetime
|
||||
from django.conf import settings
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("parks", "0002_alter_parkarea_unique_together"),
|
||||
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddConstraint(
|
||||
model_name="park",
|
||||
constraint=models.CheckConstraint(
|
||||
condition=models.Q(
|
||||
("closing_date__isnull", True),
|
||||
("opening_date__isnull", True),
|
||||
("closing_date__gte", models.F("opening_date")),
|
||||
_connector="OR",
|
||||
),
|
||||
name="park_closing_after_opening",
|
||||
violation_error_message="Closing date must be after opening date",
|
||||
),
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="park",
|
||||
constraint=models.CheckConstraint(
|
||||
condition=models.Q(
|
||||
("size_acres__isnull", True),
|
||||
("size_acres__gt", 0),
|
||||
_connector="OR",
|
||||
),
|
||||
name="park_size_positive",
|
||||
violation_error_message="Park size must be positive",
|
||||
),
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="park",
|
||||
constraint=models.CheckConstraint(
|
||||
condition=models.Q(
|
||||
("average_rating__isnull", True),
|
||||
models.Q(("average_rating__gte", 1), ("average_rating__lte", 10)),
|
||||
_connector="OR",
|
||||
),
|
||||
name="park_rating_range",
|
||||
violation_error_message="Average rating must be between 1 and 10",
|
||||
),
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="park",
|
||||
constraint=models.CheckConstraint(
|
||||
condition=models.Q(
|
||||
("ride_count__isnull", True),
|
||||
("ride_count__gte", 0),
|
||||
_connector="OR",
|
||||
),
|
||||
name="park_ride_count_non_negative",
|
||||
violation_error_message="Ride count must be non-negative",
|
||||
),
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="park",
|
||||
constraint=models.CheckConstraint(
|
||||
condition=models.Q(
|
||||
("coaster_count__isnull", True),
|
||||
("coaster_count__gte", 0),
|
||||
_connector="OR",
|
||||
),
|
||||
name="park_coaster_count_non_negative",
|
||||
violation_error_message="Coaster count must be non-negative",
|
||||
),
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="park",
|
||||
constraint=models.CheckConstraint(
|
||||
condition=models.Q(
|
||||
("coaster_count__isnull", True),
|
||||
("ride_count__isnull", True),
|
||||
("coaster_count__lte", models.F("ride_count")),
|
||||
_connector="OR",
|
||||
),
|
||||
name="park_coaster_count_lte_ride_count",
|
||||
violation_error_message="Coaster count cannot exceed total ride count",
|
||||
),
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="parkreview",
|
||||
constraint=models.CheckConstraint(
|
||||
condition=models.Q(("rating__gte", 1), ("rating__lte", 10)),
|
||||
name="park_review_rating_range",
|
||||
violation_error_message="Rating must be between 1 and 10",
|
||||
),
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="parkreview",
|
||||
constraint=models.CheckConstraint(
|
||||
condition=models.Q(
|
||||
(
|
||||
"visit_date__lte",
|
||||
django.db.models.functions.datetime.Now(),
|
||||
)
|
||||
),
|
||||
name="park_review_visit_date_not_future",
|
||||
violation_error_message="Visit date cannot be in the future",
|
||||
),
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="parkreview",
|
||||
constraint=models.CheckConstraint(
|
||||
condition=models.Q(
|
||||
models.Q(
|
||||
("moderated_at__isnull", True),
|
||||
("moderated_by__isnull", True),
|
||||
),
|
||||
models.Q(
|
||||
("moderated_at__isnull", False),
|
||||
("moderated_by__isnull", False),
|
||||
),
|
||||
_connector="OR",
|
||||
),
|
||||
name="park_review_moderation_consistency",
|
||||
violation_error_message="Moderated reviews must have both moderator and moderation timestamp",
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -1,109 +0,0 @@
|
||||
# Generated by Django 5.2.5 on 2025-08-16 17:46
|
||||
|
||||
import django.contrib.postgres.fields
|
||||
import django.db.models.deletion
|
||||
import pgtrigger.compiler
|
||||
import pgtrigger.migrations
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("parks", "0003_add_business_constraints"),
|
||||
("pghistory", "0007_auto_20250421_0444"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name="CompanyEvent",
|
||||
fields=[
|
||||
(
|
||||
"pgh_id",
|
||||
models.AutoField(primary_key=True, serialize=False),
|
||||
),
|
||||
("pgh_created_at", models.DateTimeField(auto_now_add=True)),
|
||||
("pgh_label", models.TextField(help_text="The event label.")),
|
||||
("id", models.BigIntegerField()),
|
||||
("created_at", models.DateTimeField(auto_now_add=True)),
|
||||
("updated_at", models.DateTimeField(auto_now=True)),
|
||||
("name", models.CharField(max_length=255)),
|
||||
("slug", models.SlugField(db_index=False, max_length=255)),
|
||||
(
|
||||
"roles",
|
||||
django.contrib.postgres.fields.ArrayField(
|
||||
base_field=models.CharField(
|
||||
choices=[
|
||||
("OPERATOR", "Park Operator"),
|
||||
("PROPERTY_OWNER", "Property Owner"),
|
||||
],
|
||||
max_length=20,
|
||||
),
|
||||
blank=True,
|
||||
default=list,
|
||||
size=None,
|
||||
),
|
||||
),
|
||||
("description", models.TextField(blank=True)),
|
||||
("website", models.URLField(blank=True)),
|
||||
(
|
||||
"founded_year",
|
||||
models.PositiveIntegerField(blank=True, null=True),
|
||||
),
|
||||
("parks_count", models.IntegerField(default=0)),
|
||||
("rides_count", models.IntegerField(default=0)),
|
||||
],
|
||||
options={
|
||||
"abstract": False,
|
||||
},
|
||||
),
|
||||
pgtrigger.migrations.AddTrigger(
|
||||
model_name="company",
|
||||
trigger=pgtrigger.compiler.Trigger(
|
||||
name="insert_insert",
|
||||
sql=pgtrigger.compiler.UpsertTriggerSql(
|
||||
func='INSERT INTO "parks_companyevent" ("created_at", "description", "founded_year", "id", "name", "parks_count", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "rides_count", "roles", "slug", "updated_at", "website") VALUES (NEW."created_at", NEW."description", NEW."founded_year", NEW."id", NEW."name", NEW."parks_count", _pgh_attach_context(), NOW(), \'insert\', NEW."id", NEW."rides_count", NEW."roles", NEW."slug", NEW."updated_at", NEW."website"); RETURN NULL;',
|
||||
hash="[AWS-SECRET-REMOVED]",
|
||||
operation="INSERT",
|
||||
pgid="pgtrigger_insert_insert_35b57",
|
||||
table="parks_company",
|
||||
when="AFTER",
|
||||
),
|
||||
),
|
||||
),
|
||||
pgtrigger.migrations.AddTrigger(
|
||||
model_name="company",
|
||||
trigger=pgtrigger.compiler.Trigger(
|
||||
name="update_update",
|
||||
sql=pgtrigger.compiler.UpsertTriggerSql(
|
||||
condition="WHEN (OLD.* IS DISTINCT FROM NEW.*)",
|
||||
func='INSERT INTO "parks_companyevent" ("created_at", "description", "founded_year", "id", "name", "parks_count", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "rides_count", "roles", "slug", "updated_at", "website") VALUES (NEW."created_at", NEW."description", NEW."founded_year", NEW."id", NEW."name", NEW."parks_count", _pgh_attach_context(), NOW(), \'update\', NEW."id", NEW."rides_count", NEW."roles", NEW."slug", NEW."updated_at", NEW."website"); RETURN NULL;',
|
||||
hash="[AWS-SECRET-REMOVED]",
|
||||
operation="UPDATE",
|
||||
pgid="pgtrigger_update_update_d3286",
|
||||
table="parks_company",
|
||||
when="AFTER",
|
||||
),
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="companyevent",
|
||||
name="pgh_context",
|
||||
field=models.ForeignKey(
|
||||
db_constraint=False,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.DO_NOTHING,
|
||||
related_name="+",
|
||||
to="pghistory.context",
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="companyevent",
|
||||
name="pgh_obj",
|
||||
field=models.ForeignKey(
|
||||
db_constraint=False,
|
||||
on_delete=django.db.models.deletion.DO_NOTHING,
|
||||
related_name="events",
|
||||
to="parks.company",
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -1,12 +0,0 @@
|
||||
# Generated by Django 5.2.5 on 2025-08-21 00:20
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("parks", "0001_add_filter_indexes"),
|
||||
("parks", "0004_fix_pghistory_triggers"),
|
||||
]
|
||||
|
||||
operations = []
|
||||
@@ -1,162 +0,0 @@
|
||||
# Generated by Django 5.2.5 on 2025-08-24 18:23
|
||||
|
||||
import pgtrigger.compiler
|
||||
import pgtrigger.migrations
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("parks", "0005_merge_20250820_2020"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
pgtrigger.migrations.RemoveTrigger(
|
||||
model_name="company",
|
||||
name="insert_insert",
|
||||
),
|
||||
pgtrigger.migrations.RemoveTrigger(
|
||||
model_name="company",
|
||||
name="update_update",
|
||||
),
|
||||
pgtrigger.migrations.RemoveTrigger(
|
||||
model_name="park",
|
||||
name="insert_insert",
|
||||
),
|
||||
pgtrigger.migrations.RemoveTrigger(
|
||||
model_name="park",
|
||||
name="update_update",
|
||||
),
|
||||
pgtrigger.migrations.RemoveTrigger(
|
||||
model_name="parkarea",
|
||||
name="insert_insert",
|
||||
),
|
||||
pgtrigger.migrations.RemoveTrigger(
|
||||
model_name="parkarea",
|
||||
name="update_update",
|
||||
),
|
||||
pgtrigger.migrations.RemoveTrigger(
|
||||
model_name="parkreview",
|
||||
name="insert_insert",
|
||||
),
|
||||
pgtrigger.migrations.RemoveTrigger(
|
||||
model_name="parkreview",
|
||||
name="update_update",
|
||||
),
|
||||
pgtrigger.migrations.AddTrigger(
|
||||
model_name="company",
|
||||
trigger=pgtrigger.compiler.Trigger(
|
||||
name="insert_insert",
|
||||
sql=pgtrigger.compiler.UpsertTriggerSql(
|
||||
func='INSERT INTO "parks_companyevent" ("created_at", "description", "founded_year", "id", "name", "parks_count", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "rides_count", "roles", "slug", "updated_at", "website") VALUES (NEW."created_at", NEW."description", NEW."founded_year", NEW."id", NEW."name", NEW."parks_count", _pgh_attach_context(), NOW(), \'insert\', NEW."id", NEW."rides_count", NEW."roles", NEW."slug", NEW."updated_at", NEW."website"); RETURN NULL;',
|
||||
hash="0ed33eeca3344c43d8124d1f12e3acd3e6fdef02",
|
||||
operation="INSERT",
|
||||
pgid="pgtrigger_insert_insert_35b57",
|
||||
table="parks_company",
|
||||
when="AFTER",
|
||||
),
|
||||
),
|
||||
),
|
||||
pgtrigger.migrations.AddTrigger(
|
||||
model_name="company",
|
||||
trigger=pgtrigger.compiler.Trigger(
|
||||
name="update_update",
|
||||
sql=pgtrigger.compiler.UpsertTriggerSql(
|
||||
condition="WHEN (OLD.* IS DISTINCT FROM NEW.*)",
|
||||
func='INSERT INTO "parks_companyevent" ("created_at", "description", "founded_year", "id", "name", "parks_count", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "rides_count", "roles", "slug", "updated_at", "website") VALUES (NEW."created_at", NEW."description", NEW."founded_year", NEW."id", NEW."name", NEW."parks_count", _pgh_attach_context(), NOW(), \'update\', NEW."id", NEW."rides_count", NEW."roles", NEW."slug", NEW."updated_at", NEW."website"); RETURN NULL;',
|
||||
hash="ce9d8347090a033d0a9550419b80a1c4a339216c",
|
||||
operation="UPDATE",
|
||||
pgid="pgtrigger_update_update_d3286",
|
||||
table="parks_company",
|
||||
when="AFTER",
|
||||
),
|
||||
),
|
||||
),
|
||||
pgtrigger.migrations.AddTrigger(
|
||||
model_name="park",
|
||||
trigger=pgtrigger.compiler.Trigger(
|
||||
name="insert_insert",
|
||||
sql=pgtrigger.compiler.UpsertTriggerSql(
|
||||
func='INSERT INTO "parks_parkevent" ("average_rating", "closing_date", "coaster_count", "created_at", "description", "id", "name", "opening_date", "operating_season", "operator_id", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "property_owner_id", "ride_count", "size_acres", "slug", "status", "updated_at", "website") VALUES (NEW."average_rating", NEW."closing_date", NEW."coaster_count", NEW."created_at", NEW."description", NEW."id", NEW."name", NEW."opening_date", NEW."operating_season", NEW."operator_id", _pgh_attach_context(), NOW(), \'insert\', NEW."id", NEW."property_owner_id", NEW."ride_count", NEW."size_acres", NEW."slug", NEW."status", NEW."updated_at", NEW."website"); RETURN NULL;',
|
||||
hash="0a4fc95f70ad65df16aa5eaf2939266260c49213",
|
||||
operation="INSERT",
|
||||
pgid="pgtrigger_insert_insert_66883",
|
||||
table="parks_park",
|
||||
when="AFTER",
|
||||
),
|
||||
),
|
||||
),
|
||||
pgtrigger.migrations.AddTrigger(
|
||||
model_name="park",
|
||||
trigger=pgtrigger.compiler.Trigger(
|
||||
name="update_update",
|
||||
sql=pgtrigger.compiler.UpsertTriggerSql(
|
||||
condition="WHEN (OLD.* IS DISTINCT FROM NEW.*)",
|
||||
func='INSERT INTO "parks_parkevent" ("average_rating", "closing_date", "coaster_count", "created_at", "description", "id", "name", "opening_date", "operating_season", "operator_id", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "property_owner_id", "ride_count", "size_acres", "slug", "status", "updated_at", "website") VALUES (NEW."average_rating", NEW."closing_date", NEW."coaster_count", NEW."created_at", NEW."description", NEW."id", NEW."name", NEW."opening_date", NEW."operating_season", NEW."operator_id", _pgh_attach_context(), NOW(), \'update\', NEW."id", NEW."property_owner_id", NEW."ride_count", NEW."size_acres", NEW."slug", NEW."status", NEW."updated_at", NEW."website"); RETURN NULL;',
|
||||
hash="24a57b318c082585e7c79da37677be7032600db9",
|
||||
operation="UPDATE",
|
||||
pgid="pgtrigger_update_update_19f56",
|
||||
table="parks_park",
|
||||
when="AFTER",
|
||||
),
|
||||
),
|
||||
),
|
||||
pgtrigger.migrations.AddTrigger(
|
||||
model_name="parkarea",
|
||||
trigger=pgtrigger.compiler.Trigger(
|
||||
name="insert_insert",
|
||||
sql=pgtrigger.compiler.UpsertTriggerSql(
|
||||
func='INSERT INTO "parks_parkareaevent" ("closing_date", "created_at", "description", "id", "name", "opening_date", "park_id", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "slug", "updated_at") VALUES (NEW."closing_date", NEW."created_at", NEW."description", NEW."id", NEW."name", NEW."opening_date", NEW."park_id", _pgh_attach_context(), NOW(), \'insert\', NEW."id", NEW."slug", NEW."updated_at"); RETURN NULL;',
|
||||
hash="fa64ee07f872bf2214b2c1b638b028429752bac4",
|
||||
operation="INSERT",
|
||||
pgid="pgtrigger_insert_insert_13457",
|
||||
table="parks_parkarea",
|
||||
when="AFTER",
|
||||
),
|
||||
),
|
||||
),
|
||||
pgtrigger.migrations.AddTrigger(
|
||||
model_name="parkarea",
|
||||
trigger=pgtrigger.compiler.Trigger(
|
||||
name="update_update",
|
||||
sql=pgtrigger.compiler.UpsertTriggerSql(
|
||||
condition="WHEN (OLD.* IS DISTINCT FROM NEW.*)",
|
||||
func='INSERT INTO "parks_parkareaevent" ("closing_date", "created_at", "description", "id", "name", "opening_date", "park_id", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "slug", "updated_at") VALUES (NEW."closing_date", NEW."created_at", NEW."description", NEW."id", NEW."name", NEW."opening_date", NEW."park_id", _pgh_attach_context(), NOW(), \'update\', NEW."id", NEW."slug", NEW."updated_at"); RETURN NULL;',
|
||||
hash="59fa84527a4fd0fa51685058b6037fa22163a095",
|
||||
operation="UPDATE",
|
||||
pgid="pgtrigger_update_update_6e5aa",
|
||||
table="parks_parkarea",
|
||||
when="AFTER",
|
||||
),
|
||||
),
|
||||
),
|
||||
pgtrigger.migrations.AddTrigger(
|
||||
model_name="parkreview",
|
||||
trigger=pgtrigger.compiler.Trigger(
|
||||
name="insert_insert",
|
||||
sql=pgtrigger.compiler.UpsertTriggerSql(
|
||||
func='INSERT INTO "parks_parkreviewevent" ("content", "created_at", "id", "is_published", "moderated_at", "moderated_by_id", "moderation_notes", "park_id", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "rating", "title", "updated_at", "user_id", "visit_date") VALUES (NEW."content", NEW."created_at", NEW."id", NEW."is_published", NEW."moderated_at", NEW."moderated_by_id", NEW."moderation_notes", NEW."park_id", _pgh_attach_context(), NOW(), \'insert\', NEW."id", NEW."rating", NEW."title", NEW."updated_at", NEW."user_id", NEW."visit_date"); RETURN NULL;',
|
||||
hash="fb501d2b3a0d903a03f1a1ff0ae8dd79b189791f",
|
||||
operation="INSERT",
|
||||
pgid="pgtrigger_insert_insert_a99bc",
|
||||
table="parks_parkreview",
|
||||
when="AFTER",
|
||||
),
|
||||
),
|
||||
),
|
||||
pgtrigger.migrations.AddTrigger(
|
||||
model_name="parkreview",
|
||||
trigger=pgtrigger.compiler.Trigger(
|
||||
name="update_update",
|
||||
sql=pgtrigger.compiler.UpsertTriggerSql(
|
||||
condition="WHEN (OLD.* IS DISTINCT FROM NEW.*)",
|
||||
func='INSERT INTO "parks_parkreviewevent" ("content", "created_at", "id", "is_published", "moderated_at", "moderated_by_id", "moderation_notes", "park_id", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "rating", "title", "updated_at", "user_id", "visit_date") VALUES (NEW."content", NEW."created_at", NEW."id", NEW."is_published", NEW."moderated_at", NEW."moderated_by_id", NEW."moderation_notes", NEW."park_id", _pgh_attach_context(), NOW(), \'update\', NEW."id", NEW."rating", NEW."title", NEW."updated_at", NEW."user_id", NEW."visit_date"); RETURN NULL;',
|
||||
hash="254ab0f9ccc0488ea313f1c50a2c35603f7ef02d",
|
||||
operation="UPDATE",
|
||||
pgid="pgtrigger_update_update_0e40d",
|
||||
table="parks_parkreview",
|
||||
when="AFTER",
|
||||
),
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -1,231 +0,0 @@
|
||||
# Generated by Django 5.2.5 on 2025-08-24 19:25
|
||||
|
||||
import django.contrib.gis.db.models.fields
|
||||
import django.db.models.deletion
|
||||
import pgtrigger.compiler
|
||||
import pgtrigger.migrations
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("parks", "0006_remove_company_insert_insert_and_more"),
|
||||
("pghistory", "0007_auto_20250421_0444"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name="CompanyHeadquartersEvent",
|
||||
fields=[
|
||||
("pgh_id", models.AutoField(primary_key=True, serialize=False)),
|
||||
("pgh_created_at", models.DateTimeField(auto_now_add=True)),
|
||||
("pgh_label", models.TextField(help_text="The event label.")),
|
||||
("id", models.BigIntegerField()),
|
||||
(
|
||||
"street_address",
|
||||
models.CharField(
|
||||
blank=True,
|
||||
help_text="Mailing address if publicly available",
|
||||
max_length=255,
|
||||
),
|
||||
),
|
||||
(
|
||||
"city",
|
||||
models.CharField(help_text="Headquarters city", max_length=100),
|
||||
),
|
||||
(
|
||||
"state_province",
|
||||
models.CharField(
|
||||
blank=True, help_text="State/Province/Region", max_length=100
|
||||
),
|
||||
),
|
||||
(
|
||||
"country",
|
||||
models.CharField(
|
||||
default="USA",
|
||||
help_text="Country where headquarters is located",
|
||||
max_length=100,
|
||||
),
|
||||
),
|
||||
(
|
||||
"postal_code",
|
||||
models.CharField(
|
||||
blank=True, help_text="ZIP or postal code", max_length=20
|
||||
),
|
||||
),
|
||||
(
|
||||
"mailing_address",
|
||||
models.TextField(
|
||||
blank=True,
|
||||
help_text="Complete mailing address if different from basic address",
|
||||
),
|
||||
),
|
||||
("created_at", models.DateTimeField(auto_now_add=True)),
|
||||
("updated_at", models.DateTimeField(auto_now=True)),
|
||||
],
|
||||
options={
|
||||
"abstract": False,
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="ParkLocationEvent",
|
||||
fields=[
|
||||
("pgh_id", models.AutoField(primary_key=True, serialize=False)),
|
||||
("pgh_created_at", models.DateTimeField(auto_now_add=True)),
|
||||
("pgh_label", models.TextField(help_text="The event label.")),
|
||||
("id", models.BigIntegerField()),
|
||||
(
|
||||
"point",
|
||||
django.contrib.gis.db.models.fields.PointField(
|
||||
blank=True,
|
||||
help_text="Geographic coordinates (longitude, latitude)",
|
||||
null=True,
|
||||
srid=4326,
|
||||
),
|
||||
),
|
||||
("street_address", models.CharField(blank=True, max_length=255)),
|
||||
("city", models.CharField(max_length=100)),
|
||||
("state", models.CharField(max_length=100)),
|
||||
("country", models.CharField(default="USA", max_length=100)),
|
||||
("postal_code", models.CharField(blank=True, max_length=20)),
|
||||
("highway_exit", models.CharField(blank=True, max_length=100)),
|
||||
("parking_notes", models.TextField(blank=True)),
|
||||
("best_arrival_time", models.TimeField(blank=True, null=True)),
|
||||
("seasonal_notes", models.TextField(blank=True)),
|
||||
("osm_id", models.BigIntegerField(blank=True, null=True)),
|
||||
(
|
||||
"osm_type",
|
||||
models.CharField(
|
||||
blank=True,
|
||||
help_text="Type of OpenStreetMap object (node, way, or relation)",
|
||||
max_length=10,
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"abstract": False,
|
||||
},
|
||||
),
|
||||
pgtrigger.migrations.AddTrigger(
|
||||
model_name="companyheadquarters",
|
||||
trigger=pgtrigger.compiler.Trigger(
|
||||
name="insert_insert",
|
||||
sql=pgtrigger.compiler.UpsertTriggerSql(
|
||||
func='INSERT INTO "parks_companyheadquartersevent" ("city", "company_id", "country", "created_at", "id", "mailing_address", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "postal_code", "state_province", "street_address", "updated_at") VALUES (NEW."city", NEW."company_id", NEW."country", NEW."created_at", NEW."id", NEW."mailing_address", _pgh_attach_context(), NOW(), \'insert\', NEW."id", NEW."postal_code", NEW."state_province", NEW."street_address", NEW."updated_at"); RETURN NULL;',
|
||||
hash="acf99673091ec3717f404fdccefd6e0cb228c82e",
|
||||
operation="INSERT",
|
||||
pgid="pgtrigger_insert_insert_72259",
|
||||
table="parks_companyheadquarters",
|
||||
when="AFTER",
|
||||
),
|
||||
),
|
||||
),
|
||||
pgtrigger.migrations.AddTrigger(
|
||||
model_name="companyheadquarters",
|
||||
trigger=pgtrigger.compiler.Trigger(
|
||||
name="update_update",
|
||||
sql=pgtrigger.compiler.UpsertTriggerSql(
|
||||
condition="WHEN (OLD.* IS DISTINCT FROM NEW.*)",
|
||||
func='INSERT INTO "parks_companyheadquartersevent" ("city", "company_id", "country", "created_at", "id", "mailing_address", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "postal_code", "state_province", "street_address", "updated_at") VALUES (NEW."city", NEW."company_id", NEW."country", NEW."created_at", NEW."id", NEW."mailing_address", _pgh_attach_context(), NOW(), \'update\', NEW."id", NEW."postal_code", NEW."state_province", NEW."street_address", NEW."updated_at"); RETURN NULL;',
|
||||
hash="bbbff3a1c9748d3ce1b2bf1b705d03ea40530c9b",
|
||||
operation="UPDATE",
|
||||
pgid="pgtrigger_update_update_c5392",
|
||||
table="parks_companyheadquarters",
|
||||
when="AFTER",
|
||||
),
|
||||
),
|
||||
),
|
||||
pgtrigger.migrations.AddTrigger(
|
||||
model_name="parklocation",
|
||||
trigger=pgtrigger.compiler.Trigger(
|
||||
name="insert_insert",
|
||||
sql=pgtrigger.compiler.UpsertTriggerSql(
|
||||
func='INSERT INTO "parks_parklocationevent" ("best_arrival_time", "city", "country", "highway_exit", "id", "osm_id", "osm_type", "park_id", "parking_notes", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "point", "postal_code", "seasonal_notes", "state", "street_address") VALUES (NEW."best_arrival_time", NEW."city", NEW."country", NEW."highway_exit", NEW."id", NEW."osm_id", NEW."osm_type", NEW."park_id", NEW."parking_notes", _pgh_attach_context(), NOW(), \'insert\', NEW."id", NEW."point", NEW."postal_code", NEW."seasonal_notes", NEW."state", NEW."street_address"); RETURN NULL;',
|
||||
hash="fcc717a6f7408f959ac1f6406d4ba42b674e3c55",
|
||||
operation="INSERT",
|
||||
pgid="pgtrigger_insert_insert_f8c53",
|
||||
table="parks_parklocation",
|
||||
when="AFTER",
|
||||
),
|
||||
),
|
||||
),
|
||||
pgtrigger.migrations.AddTrigger(
|
||||
model_name="parklocation",
|
||||
trigger=pgtrigger.compiler.Trigger(
|
||||
name="update_update",
|
||||
sql=pgtrigger.compiler.UpsertTriggerSql(
|
||||
condition="WHEN (OLD.* IS DISTINCT FROM NEW.*)",
|
||||
func='INSERT INTO "parks_parklocationevent" ("best_arrival_time", "city", "country", "highway_exit", "id", "osm_id", "osm_type", "park_id", "parking_notes", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "point", "postal_code", "seasonal_notes", "state", "street_address") VALUES (NEW."best_arrival_time", NEW."city", NEW."country", NEW."highway_exit", NEW."id", NEW."osm_id", NEW."osm_type", NEW."park_id", NEW."parking_notes", _pgh_attach_context(), NOW(), \'update\', NEW."id", NEW."point", NEW."postal_code", NEW."seasonal_notes", NEW."state", NEW."street_address"); RETURN NULL;',
|
||||
hash="dedb3937ae968cc9f2b30309fbf72d9168039efe",
|
||||
operation="UPDATE",
|
||||
pgid="pgtrigger_update_update_6dd0d",
|
||||
table="parks_parklocation",
|
||||
when="AFTER",
|
||||
),
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="companyheadquartersevent",
|
||||
name="company",
|
||||
field=models.ForeignKey(
|
||||
db_constraint=False,
|
||||
on_delete=django.db.models.deletion.DO_NOTHING,
|
||||
related_name="+",
|
||||
related_query_name="+",
|
||||
to="parks.company",
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="companyheadquartersevent",
|
||||
name="pgh_context",
|
||||
field=models.ForeignKey(
|
||||
db_constraint=False,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.DO_NOTHING,
|
||||
related_name="+",
|
||||
to="pghistory.context",
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="companyheadquartersevent",
|
||||
name="pgh_obj",
|
||||
field=models.ForeignKey(
|
||||
db_constraint=False,
|
||||
on_delete=django.db.models.deletion.DO_NOTHING,
|
||||
related_name="events",
|
||||
to="parks.companyheadquarters",
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="parklocationevent",
|
||||
name="park",
|
||||
field=models.ForeignKey(
|
||||
db_constraint=False,
|
||||
on_delete=django.db.models.deletion.DO_NOTHING,
|
||||
related_name="+",
|
||||
related_query_name="+",
|
||||
to="parks.park",
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="parklocationevent",
|
||||
name="pgh_context",
|
||||
field=models.ForeignKey(
|
||||
db_constraint=False,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.DO_NOTHING,
|
||||
related_name="+",
|
||||
to="pghistory.context",
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="parklocationevent",
|
||||
name="pgh_obj",
|
||||
field=models.ForeignKey(
|
||||
db_constraint=False,
|
||||
on_delete=django.db.models.deletion.DO_NOTHING,
|
||||
related_name="events",
|
||||
to="parks.parklocation",
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -1,188 +0,0 @@
|
||||
# Generated by Django 5.2.5 on 2025-08-26 17:39
|
||||
|
||||
import apps.parks.models.media
|
||||
import django.db.models.deletion
|
||||
import pgtrigger.compiler
|
||||
import pgtrigger.migrations
|
||||
from django.conf import settings
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("parks", "0007_companyheadquartersevent_parklocationevent_and_more"),
|
||||
("pghistory", "0007_auto_20250421_0444"),
|
||||
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name="ParkPhoto",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.BigAutoField(
|
||||
auto_created=True,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
verbose_name="ID",
|
||||
),
|
||||
),
|
||||
(
|
||||
"image",
|
||||
models.ImageField(
|
||||
max_length=255,
|
||||
upload_to=apps.parks.models.media.park_photo_upload_path,
|
||||
),
|
||||
),
|
||||
("caption", models.CharField(blank=True, max_length=255)),
|
||||
("alt_text", models.CharField(blank=True, max_length=255)),
|
||||
("is_primary", models.BooleanField(default=False)),
|
||||
("is_approved", models.BooleanField(default=False)),
|
||||
("created_at", models.DateTimeField(auto_now_add=True)),
|
||||
("updated_at", models.DateTimeField(auto_now=True)),
|
||||
("date_taken", models.DateTimeField(blank=True, null=True)),
|
||||
(
|
||||
"park",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
related_name="photos",
|
||||
to="parks.park",
|
||||
),
|
||||
),
|
||||
(
|
||||
"uploaded_by",
|
||||
models.ForeignKey(
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
related_name="uploaded_park_photos",
|
||||
to=settings.AUTH_USER_MODEL,
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"ordering": ["-is_primary", "-created_at"],
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="ParkPhotoEvent",
|
||||
fields=[
|
||||
("pgh_id", models.AutoField(primary_key=True, serialize=False)),
|
||||
("pgh_created_at", models.DateTimeField(auto_now_add=True)),
|
||||
("pgh_label", models.TextField(help_text="The event label.")),
|
||||
("id", models.BigIntegerField()),
|
||||
(
|
||||
"image",
|
||||
models.ImageField(
|
||||
max_length=255,
|
||||
upload_to=apps.parks.models.media.park_photo_upload_path,
|
||||
),
|
||||
),
|
||||
("caption", models.CharField(blank=True, max_length=255)),
|
||||
("alt_text", models.CharField(blank=True, max_length=255)),
|
||||
("is_primary", models.BooleanField(default=False)),
|
||||
("is_approved", models.BooleanField(default=False)),
|
||||
("created_at", models.DateTimeField(auto_now_add=True)),
|
||||
("updated_at", models.DateTimeField(auto_now=True)),
|
||||
("date_taken", models.DateTimeField(blank=True, null=True)),
|
||||
(
|
||||
"park",
|
||||
models.ForeignKey(
|
||||
db_constraint=False,
|
||||
on_delete=django.db.models.deletion.DO_NOTHING,
|
||||
related_name="+",
|
||||
related_query_name="+",
|
||||
to="parks.park",
|
||||
),
|
||||
),
|
||||
(
|
||||
"pgh_context",
|
||||
models.ForeignKey(
|
||||
db_constraint=False,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.DO_NOTHING,
|
||||
related_name="+",
|
||||
to="pghistory.context",
|
||||
),
|
||||
),
|
||||
(
|
||||
"pgh_obj",
|
||||
models.ForeignKey(
|
||||
db_constraint=False,
|
||||
on_delete=django.db.models.deletion.DO_NOTHING,
|
||||
related_name="events",
|
||||
to="parks.parkphoto",
|
||||
),
|
||||
),
|
||||
(
|
||||
"uploaded_by",
|
||||
models.ForeignKey(
|
||||
db_constraint=False,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.DO_NOTHING,
|
||||
related_name="+",
|
||||
related_query_name="+",
|
||||
to=settings.AUTH_USER_MODEL,
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"abstract": False,
|
||||
},
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="parkphoto",
|
||||
index=models.Index(
|
||||
fields=["park", "is_primary"], name="parks_parkp_park_id_eda26e_idx"
|
||||
),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="parkphoto",
|
||||
index=models.Index(
|
||||
fields=["park", "is_approved"], name="parks_parkp_park_id_5fe576_idx"
|
||||
),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="parkphoto",
|
||||
index=models.Index(
|
||||
fields=["created_at"], name="parks_parkp_created_033dc3_idx"
|
||||
),
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name="parkphoto",
|
||||
constraint=models.UniqueConstraint(
|
||||
condition=models.Q(("is_primary", True)),
|
||||
fields=("park",),
|
||||
name="unique_primary_park_photo",
|
||||
),
|
||||
),
|
||||
pgtrigger.migrations.AddTrigger(
|
||||
model_name="parkphoto",
|
||||
trigger=pgtrigger.compiler.Trigger(
|
||||
name="insert_insert",
|
||||
sql=pgtrigger.compiler.UpsertTriggerSql(
|
||||
func='INSERT INTO "parks_parkphotoevent" ("alt_text", "caption", "created_at", "date_taken", "id", "image", "is_approved", "is_primary", "park_id", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "updated_at", "uploaded_by_id") VALUES (NEW."alt_text", NEW."caption", NEW."created_at", NEW."date_taken", NEW."id", NEW."image", NEW."is_approved", NEW."is_primary", NEW."park_id", _pgh_attach_context(), NOW(), \'insert\', NEW."id", NEW."updated_at", NEW."uploaded_by_id"); RETURN NULL;',
|
||||
hash="eeeb8afb335eb66cb4550a0f5abfaf7280472827",
|
||||
operation="INSERT",
|
||||
pgid="pgtrigger_insert_insert_e2033",
|
||||
table="parks_parkphoto",
|
||||
when="AFTER",
|
||||
),
|
||||
),
|
||||
),
|
||||
pgtrigger.migrations.AddTrigger(
|
||||
model_name="parkphoto",
|
||||
trigger=pgtrigger.compiler.Trigger(
|
||||
name="update_update",
|
||||
sql=pgtrigger.compiler.UpsertTriggerSql(
|
||||
condition="WHEN (OLD.* IS DISTINCT FROM NEW.*)",
|
||||
func='INSERT INTO "parks_parkphotoevent" ("alt_text", "caption", "created_at", "date_taken", "id", "image", "is_approved", "is_primary", "park_id", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "updated_at", "uploaded_by_id") VALUES (NEW."alt_text", NEW."caption", NEW."created_at", NEW."date_taken", NEW."id", NEW."image", NEW."is_approved", NEW."is_primary", NEW."park_id", _pgh_attach_context(), NOW(), \'update\', NEW."id", NEW."updated_at", NEW."uploaded_by_id"); RETURN NULL;',
|
||||
hash="bd95069068ba9e1a78708a0a9cc73d6507fab691",
|
||||
operation="UPDATE",
|
||||
pgid="pgtrigger_update_update_42711",
|
||||
table="parks_parkphoto",
|
||||
when="AFTER",
|
||||
),
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -1,105 +0,0 @@
|
||||
# Generated by Django 5.2.5 on 2025-08-28 18:35
|
||||
|
||||
import django.db.models.deletion
|
||||
import pgtrigger.compiler
|
||||
import pgtrigger.migrations
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("parks", "0008_parkphoto_parkphotoevent_and_more"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
pgtrigger.migrations.RemoveTrigger(
|
||||
model_name="park",
|
||||
name="insert_insert",
|
||||
),
|
||||
pgtrigger.migrations.RemoveTrigger(
|
||||
model_name="park",
|
||||
name="update_update",
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="park",
|
||||
name="banner_image",
|
||||
field=models.ForeignKey(
|
||||
blank=True,
|
||||
help_text="Photo to use as banner image for this park",
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
related_name="parks_using_as_banner",
|
||||
to="parks.parkphoto",
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="park",
|
||||
name="card_image",
|
||||
field=models.ForeignKey(
|
||||
blank=True,
|
||||
help_text="Photo to use as card image for this park",
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
related_name="parks_using_as_card",
|
||||
to="parks.parkphoto",
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="parkevent",
|
||||
name="banner_image",
|
||||
field=models.ForeignKey(
|
||||
blank=True,
|
||||
db_constraint=False,
|
||||
help_text="Photo to use as banner image for this park",
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.DO_NOTHING,
|
||||
related_name="+",
|
||||
related_query_name="+",
|
||||
to="parks.parkphoto",
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="parkevent",
|
||||
name="card_image",
|
||||
field=models.ForeignKey(
|
||||
blank=True,
|
||||
db_constraint=False,
|
||||
help_text="Photo to use as card image for this park",
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.DO_NOTHING,
|
||||
related_name="+",
|
||||
related_query_name="+",
|
||||
to="parks.parkphoto",
|
||||
),
|
||||
),
|
||||
pgtrigger.migrations.AddTrigger(
|
||||
model_name="park",
|
||||
trigger=pgtrigger.compiler.Trigger(
|
||||
name="insert_insert",
|
||||
sql=pgtrigger.compiler.UpsertTriggerSql(
|
||||
func='INSERT INTO "parks_parkevent" ("average_rating", "banner_image_id", "card_image_id", "closing_date", "coaster_count", "created_at", "description", "id", "name", "opening_date", "operating_season", "operator_id", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "property_owner_id", "ride_count", "size_acres", "slug", "status", "updated_at", "website") VALUES (NEW."average_rating", NEW."banner_image_id", NEW."card_image_id", NEW."closing_date", NEW."coaster_count", NEW."created_at", NEW."description", NEW."id", NEW."name", NEW."opening_date", NEW."operating_season", NEW."operator_id", _pgh_attach_context(), NOW(), \'insert\', NEW."id", NEW."property_owner_id", NEW."ride_count", NEW."size_acres", NEW."slug", NEW."status", NEW."updated_at", NEW."website"); RETURN NULL;',
|
||||
hash="291a6e8efb89a33ee43bff05f44598a7814a05f0",
|
||||
operation="INSERT",
|
||||
pgid="pgtrigger_insert_insert_66883",
|
||||
table="parks_park",
|
||||
when="AFTER",
|
||||
),
|
||||
),
|
||||
),
|
||||
pgtrigger.migrations.AddTrigger(
|
||||
model_name="park",
|
||||
trigger=pgtrigger.compiler.Trigger(
|
||||
name="update_update",
|
||||
sql=pgtrigger.compiler.UpsertTriggerSql(
|
||||
condition="WHEN (OLD.* IS DISTINCT FROM NEW.*)",
|
||||
func='INSERT INTO "parks_parkevent" ("average_rating", "banner_image_id", "card_image_id", "closing_date", "coaster_count", "created_at", "description", "id", "name", "opening_date", "operating_season", "operator_id", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "property_owner_id", "ride_count", "size_acres", "slug", "status", "updated_at", "website") VALUES (NEW."average_rating", NEW."banner_image_id", NEW."card_image_id", NEW."closing_date", NEW."coaster_count", NEW."created_at", NEW."description", NEW."id", NEW."name", NEW."opening_date", NEW."operating_season", NEW."operator_id", _pgh_attach_context(), NOW(), \'update\', NEW."id", NEW."property_owner_id", NEW."ride_count", NEW."size_acres", NEW."slug", NEW."status", NEW."updated_at", NEW."website"); RETURN NULL;',
|
||||
hash="a689acf5a74ebd3aa7ad333881edb99778185da2",
|
||||
operation="UPDATE",
|
||||
pgid="pgtrigger_update_update_19f56",
|
||||
table="parks_park",
|
||||
when="AFTER",
|
||||
),
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -1,62 +0,0 @@
|
||||
# Generated by Django 5.2.5 on 2025-08-28 22:59
|
||||
|
||||
import pgtrigger.compiler
|
||||
import pgtrigger.migrations
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("parks", "0010_add_banner_card_image_fields"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
pgtrigger.migrations.RemoveTrigger(
|
||||
model_name="park",
|
||||
name="insert_insert",
|
||||
),
|
||||
pgtrigger.migrations.RemoveTrigger(
|
||||
model_name="park",
|
||||
name="update_update",
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="park",
|
||||
name="url",
|
||||
field=models.URLField(blank=True, help_text="Frontend URL for this park"),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="parkevent",
|
||||
name="url",
|
||||
field=models.URLField(blank=True, help_text="Frontend URL for this park"),
|
||||
),
|
||||
pgtrigger.migrations.AddTrigger(
|
||||
model_name="park",
|
||||
trigger=pgtrigger.compiler.Trigger(
|
||||
name="insert_insert",
|
||||
sql=pgtrigger.compiler.UpsertTriggerSql(
|
||||
func='INSERT INTO "parks_parkevent" ("average_rating", "banner_image_id", "card_image_id", "closing_date", "coaster_count", "created_at", "description", "id", "name", "opening_date", "operating_season", "operator_id", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "property_owner_id", "ride_count", "size_acres", "slug", "status", "updated_at", "url", "website") VALUES (NEW."average_rating", NEW."banner_image_id", NEW."card_image_id", NEW."closing_date", NEW."coaster_count", NEW."created_at", NEW."description", NEW."id", NEW."name", NEW."opening_date", NEW."operating_season", NEW."operator_id", _pgh_attach_context(), NOW(), \'insert\', NEW."id", NEW."property_owner_id", NEW."ride_count", NEW."size_acres", NEW."slug", NEW."status", NEW."updated_at", NEW."url", NEW."website"); RETURN NULL;',
|
||||
hash="f677e88234ebc3dc93c46d4756cb0723f5468cbe",
|
||||
operation="INSERT",
|
||||
pgid="pgtrigger_insert_insert_66883",
|
||||
table="parks_park",
|
||||
when="AFTER",
|
||||
),
|
||||
),
|
||||
),
|
||||
pgtrigger.migrations.AddTrigger(
|
||||
model_name="park",
|
||||
trigger=pgtrigger.compiler.Trigger(
|
||||
name="update_update",
|
||||
sql=pgtrigger.compiler.UpsertTriggerSql(
|
||||
condition="WHEN (OLD.* IS DISTINCT FROM NEW.*)",
|
||||
func='INSERT INTO "parks_parkevent" ("average_rating", "banner_image_id", "card_image_id", "closing_date", "coaster_count", "created_at", "description", "id", "name", "opening_date", "operating_season", "operator_id", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "property_owner_id", "ride_count", "size_acres", "slug", "status", "updated_at", "url", "website") VALUES (NEW."average_rating", NEW."banner_image_id", NEW."card_image_id", NEW."closing_date", NEW."coaster_count", NEW."created_at", NEW."description", NEW."id", NEW."name", NEW."opening_date", NEW."operating_season", NEW."operator_id", _pgh_attach_context(), NOW(), \'update\', NEW."id", NEW."property_owner_id", NEW."ride_count", NEW."size_acres", NEW."slug", NEW."status", NEW."updated_at", NEW."url", NEW."website"); RETURN NULL;',
|
||||
hash="6fc430a517628d48341e8981fa38529031c3f35b",
|
||||
operation="UPDATE",
|
||||
pgid="pgtrigger_update_update_19f56",
|
||||
table="parks_park",
|
||||
when="AFTER",
|
||||
),
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -1,75 +0,0 @@
|
||||
# Generated by Django 5.2.5 on 2025-08-30 21:42
|
||||
|
||||
import django.db.models.deletion
|
||||
import pgtrigger.compiler
|
||||
import pgtrigger.migrations
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("django_cloudflareimages_toolkit", "0001_initial"),
|
||||
("parks", "0011_remove_park_insert_insert_remove_park_update_update_and_more"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
pgtrigger.migrations.RemoveTrigger(
|
||||
model_name="parkphoto",
|
||||
name="insert_insert",
|
||||
),
|
||||
pgtrigger.migrations.RemoveTrigger(
|
||||
model_name="parkphoto",
|
||||
name="update_update",
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="parkphoto",
|
||||
name="image",
|
||||
field=models.ForeignKey(
|
||||
help_text="Park photo stored on Cloudflare Images",
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
to="django_cloudflareimages_toolkit.cloudflareimage",
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="parkphotoevent",
|
||||
name="image",
|
||||
field=models.ForeignKey(
|
||||
db_constraint=False,
|
||||
help_text="Park photo stored on Cloudflare Images",
|
||||
on_delete=django.db.models.deletion.DO_NOTHING,
|
||||
related_name="+",
|
||||
related_query_name="+",
|
||||
to="django_cloudflareimages_toolkit.cloudflareimage",
|
||||
),
|
||||
),
|
||||
pgtrigger.migrations.AddTrigger(
|
||||
model_name="parkphoto",
|
||||
trigger=pgtrigger.compiler.Trigger(
|
||||
name="insert_insert",
|
||||
sql=pgtrigger.compiler.UpsertTriggerSql(
|
||||
func='INSERT INTO "parks_parkphotoevent" ("alt_text", "caption", "created_at", "date_taken", "id", "image_id", "is_approved", "is_primary", "park_id", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "updated_at", "uploaded_by_id") VALUES (NEW."alt_text", NEW."caption", NEW."created_at", NEW."date_taken", NEW."id", NEW."image_id", NEW."is_approved", NEW."is_primary", NEW."park_id", _pgh_attach_context(), NOW(), \'insert\', NEW."id", NEW."updated_at", NEW."uploaded_by_id"); RETURN NULL;',
|
||||
hash="403652164d3e615dae5a14052a56db2851c5cf05",
|
||||
operation="INSERT",
|
||||
pgid="pgtrigger_insert_insert_e2033",
|
||||
table="parks_parkphoto",
|
||||
when="AFTER",
|
||||
),
|
||||
),
|
||||
),
|
||||
pgtrigger.migrations.AddTrigger(
|
||||
model_name="parkphoto",
|
||||
trigger=pgtrigger.compiler.Trigger(
|
||||
name="update_update",
|
||||
sql=pgtrigger.compiler.UpsertTriggerSql(
|
||||
condition="WHEN (OLD.* IS DISTINCT FROM NEW.*)",
|
||||
func='INSERT INTO "parks_parkphotoevent" ("alt_text", "caption", "created_at", "date_taken", "id", "image_id", "is_approved", "is_primary", "park_id", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "updated_at", "uploaded_by_id") VALUES (NEW."alt_text", NEW."caption", NEW."created_at", NEW."date_taken", NEW."id", NEW."image_id", NEW."is_approved", NEW."is_primary", NEW."park_id", _pgh_attach_context(), NOW(), \'update\', NEW."id", NEW."updated_at", NEW."uploaded_by_id"); RETURN NULL;',
|
||||
hash="29c60ad09c570b8c03ad6c17052a8f9874314895",
|
||||
operation="UPDATE",
|
||||
pgid="pgtrigger_update_update_42711",
|
||||
table="parks_parkphoto",
|
||||
when="AFTER",
|
||||
),
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -1,153 +0,0 @@
|
||||
# Generated by Django 5.2.5 on 2025-08-31 01:46
|
||||
|
||||
import pgtrigger.compiler
|
||||
import pgtrigger.migrations
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("parks", "0012_remove_parkphoto_insert_insert_and_more"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
pgtrigger.migrations.RemoveTrigger(
|
||||
model_name="park",
|
||||
name="insert_insert",
|
||||
),
|
||||
pgtrigger.migrations.RemoveTrigger(
|
||||
model_name="park",
|
||||
name="update_update",
|
||||
),
|
||||
pgtrigger.migrations.RemoveTrigger(
|
||||
model_name="parklocation",
|
||||
name="insert_insert",
|
||||
),
|
||||
pgtrigger.migrations.RemoveTrigger(
|
||||
model_name="parklocation",
|
||||
name="update_update",
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="park",
|
||||
name="park_type",
|
||||
field=models.CharField(
|
||||
choices=[
|
||||
("THEME_PARK", "Theme Park"),
|
||||
("AMUSEMENT_PARK", "Amusement Park"),
|
||||
("WATER_PARK", "Water Park"),
|
||||
("FAMILY_ENTERTAINMENT_CENTER", "Family Entertainment Center"),
|
||||
("CARNIVAL", "Carnival"),
|
||||
("FAIR", "Fair"),
|
||||
("PIER", "Pier"),
|
||||
("BOARDWALK", "Boardwalk"),
|
||||
("SAFARI_PARK", "Safari Park"),
|
||||
("ZOO", "Zoo"),
|
||||
("OTHER", "Other"),
|
||||
],
|
||||
db_index=True,
|
||||
default="THEME_PARK",
|
||||
help_text="Type/category of the park",
|
||||
max_length=30,
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="parkevent",
|
||||
name="park_type",
|
||||
field=models.CharField(
|
||||
choices=[
|
||||
("THEME_PARK", "Theme Park"),
|
||||
("AMUSEMENT_PARK", "Amusement Park"),
|
||||
("WATER_PARK", "Water Park"),
|
||||
("FAMILY_ENTERTAINMENT_CENTER", "Family Entertainment Center"),
|
||||
("CARNIVAL", "Carnival"),
|
||||
("FAIR", "Fair"),
|
||||
("PIER", "Pier"),
|
||||
("BOARDWALK", "Boardwalk"),
|
||||
("SAFARI_PARK", "Safari Park"),
|
||||
("ZOO", "Zoo"),
|
||||
("OTHER", "Other"),
|
||||
],
|
||||
default="THEME_PARK",
|
||||
help_text="Type/category of the park",
|
||||
max_length=30,
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="parklocation",
|
||||
name="continent",
|
||||
field=models.CharField(
|
||||
blank=True,
|
||||
db_index=True,
|
||||
help_text="Continent where the park is located",
|
||||
max_length=50,
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="parklocationevent",
|
||||
name="continent",
|
||||
field=models.CharField(
|
||||
blank=True,
|
||||
help_text="Continent where the park is located",
|
||||
max_length=50,
|
||||
),
|
||||
),
|
||||
pgtrigger.migrations.AddTrigger(
|
||||
model_name="park",
|
||||
trigger=pgtrigger.compiler.Trigger(
|
||||
name="insert_insert",
|
||||
sql=pgtrigger.compiler.UpsertTriggerSql(
|
||||
func='INSERT INTO "parks_parkevent" ("average_rating", "banner_image_id", "card_image_id", "closing_date", "coaster_count", "created_at", "description", "id", "name", "opening_date", "operating_season", "operator_id", "park_type", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "property_owner_id", "ride_count", "size_acres", "slug", "status", "updated_at", "url", "website") VALUES (NEW."average_rating", NEW."banner_image_id", NEW."card_image_id", NEW."closing_date", NEW."coaster_count", NEW."created_at", NEW."description", NEW."id", NEW."name", NEW."opening_date", NEW."operating_season", NEW."operator_id", NEW."park_type", _pgh_attach_context(), NOW(), \'insert\', NEW."id", NEW."property_owner_id", NEW."ride_count", NEW."size_acres", NEW."slug", NEW."status", NEW."updated_at", NEW."url", NEW."website"); RETURN NULL;',
|
||||
hash="406615e99a0bd58eadc2ad3023c988364c61f65a",
|
||||
operation="INSERT",
|
||||
pgid="pgtrigger_insert_insert_66883",
|
||||
table="parks_park",
|
||||
when="AFTER",
|
||||
),
|
||||
),
|
||||
),
|
||||
pgtrigger.migrations.AddTrigger(
|
||||
model_name="park",
|
||||
trigger=pgtrigger.compiler.Trigger(
|
||||
name="update_update",
|
||||
sql=pgtrigger.compiler.UpsertTriggerSql(
|
||||
condition="WHEN (OLD.* IS DISTINCT FROM NEW.*)",
|
||||
func='INSERT INTO "parks_parkevent" ("average_rating", "banner_image_id", "card_image_id", "closing_date", "coaster_count", "created_at", "description", "id", "name", "opening_date", "operating_season", "operator_id", "park_type", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "property_owner_id", "ride_count", "size_acres", "slug", "status", "updated_at", "url", "website") VALUES (NEW."average_rating", NEW."banner_image_id", NEW."card_image_id", NEW."closing_date", NEW."coaster_count", NEW."created_at", NEW."description", NEW."id", NEW."name", NEW."opening_date", NEW."operating_season", NEW."operator_id", NEW."park_type", _pgh_attach_context(), NOW(), \'update\', NEW."id", NEW."property_owner_id", NEW."ride_count", NEW."size_acres", NEW."slug", NEW."status", NEW."updated_at", NEW."url", NEW."website"); RETURN NULL;',
|
||||
hash="ca8b337b9b1f1a937a4dd88cde8b31231d0fdff5",
|
||||
operation="UPDATE",
|
||||
pgid="pgtrigger_update_update_19f56",
|
||||
table="parks_park",
|
||||
when="AFTER",
|
||||
),
|
||||
),
|
||||
),
|
||||
pgtrigger.migrations.AddTrigger(
|
||||
model_name="parklocation",
|
||||
trigger=pgtrigger.compiler.Trigger(
|
||||
name="insert_insert",
|
||||
sql=pgtrigger.compiler.UpsertTriggerSql(
|
||||
func='INSERT INTO "parks_parklocationevent" ("best_arrival_time", "city", "continent", "country", "highway_exit", "id", "osm_id", "osm_type", "park_id", "parking_notes", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "point", "postal_code", "seasonal_notes", "state", "street_address") VALUES (NEW."best_arrival_time", NEW."city", NEW."continent", NEW."country", NEW."highway_exit", NEW."id", NEW."osm_id", NEW."osm_type", NEW."park_id", NEW."parking_notes", _pgh_attach_context(), NOW(), \'insert\', NEW."id", NEW."point", NEW."postal_code", NEW."seasonal_notes", NEW."state", NEW."street_address"); RETURN NULL;',
|
||||
hash="aecd083c917cea3170e944c73c4906a78eccd676",
|
||||
operation="INSERT",
|
||||
pgid="pgtrigger_insert_insert_f8c53",
|
||||
table="parks_parklocation",
|
||||
when="AFTER",
|
||||
),
|
||||
),
|
||||
),
|
||||
pgtrigger.migrations.AddTrigger(
|
||||
model_name="parklocation",
|
||||
trigger=pgtrigger.compiler.Trigger(
|
||||
name="update_update",
|
||||
sql=pgtrigger.compiler.UpsertTriggerSql(
|
||||
condition="WHEN (OLD.* IS DISTINCT FROM NEW.*)",
|
||||
func='INSERT INTO "parks_parklocationevent" ("best_arrival_time", "city", "continent", "country", "highway_exit", "id", "osm_id", "osm_type", "park_id", "parking_notes", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "point", "postal_code", "seasonal_notes", "state", "street_address") VALUES (NEW."best_arrival_time", NEW."city", NEW."continent", NEW."country", NEW."highway_exit", NEW."id", NEW."osm_id", NEW."osm_type", NEW."park_id", NEW."parking_notes", _pgh_attach_context(), NOW(), \'update\', NEW."id", NEW."point", NEW."postal_code", NEW."seasonal_notes", NEW."state", NEW."street_address"); RETURN NULL;',
|
||||
hash="a70bc26b34235fe4342009d491d80b990ee3ed7e",
|
||||
operation="UPDATE",
|
||||
pgid="pgtrigger_update_update_6dd0d",
|
||||
table="parks_parklocation",
|
||||
when="AFTER",
|
||||
),
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -1,88 +0,0 @@
|
||||
# Generated by Django 5.2.5 on 2025-09-14 19:01
|
||||
|
||||
import pgtrigger.compiler
|
||||
import pgtrigger.migrations
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("parks", "0013_remove_park_insert_insert_remove_park_update_update_and_more"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
pgtrigger.migrations.RemoveTrigger(
|
||||
model_name="park",
|
||||
name="insert_insert",
|
||||
),
|
||||
pgtrigger.migrations.RemoveTrigger(
|
||||
model_name="park",
|
||||
name="update_update",
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="park",
|
||||
name="opening_year",
|
||||
field=models.IntegerField(
|
||||
blank=True,
|
||||
db_index=True,
|
||||
help_text="Year the park opened (computed from opening_date)",
|
||||
null=True,
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="park",
|
||||
name="search_text",
|
||||
field=models.TextField(
|
||||
blank=True,
|
||||
db_index=True,
|
||||
help_text="Searchable text combining name, description, location, and operator",
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="parkevent",
|
||||
name="opening_year",
|
||||
field=models.IntegerField(
|
||||
blank=True,
|
||||
help_text="Year the park opened (computed from opening_date)",
|
||||
null=True,
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="parkevent",
|
||||
name="search_text",
|
||||
field=models.TextField(
|
||||
blank=True,
|
||||
help_text="Searchable text combining name, description, location, and operator",
|
||||
),
|
||||
),
|
||||
pgtrigger.migrations.AddTrigger(
|
||||
model_name="park",
|
||||
trigger=pgtrigger.compiler.Trigger(
|
||||
name="insert_insert",
|
||||
sql=pgtrigger.compiler.UpsertTriggerSql(
|
||||
func='INSERT INTO "parks_parkevent" ("average_rating", "banner_image_id", "card_image_id", "closing_date", "coaster_count", "created_at", "description", "id", "name", "opening_date", "opening_year", "operating_season", "operator_id", "park_type", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "property_owner_id", "ride_count", "search_text", "size_acres", "slug", "status", "updated_at", "url", "website") VALUES (NEW."average_rating", NEW."banner_image_id", NEW."card_image_id", NEW."closing_date", NEW."coaster_count", NEW."created_at", NEW."description", NEW."id", NEW."name", NEW."opening_date", NEW."opening_year", NEW."operating_season", NEW."operator_id", NEW."park_type", _pgh_attach_context(), NOW(), \'insert\', NEW."id", NEW."property_owner_id", NEW."ride_count", NEW."search_text", NEW."size_acres", NEW."slug", NEW."status", NEW."updated_at", NEW."url", NEW."website"); RETURN NULL;',
|
||||
hash="39ac89dc193467b8b41f06ff15903f0a3e22f6b0",
|
||||
operation="INSERT",
|
||||
pgid="pgtrigger_insert_insert_66883",
|
||||
table="parks_park",
|
||||
when="AFTER",
|
||||
),
|
||||
),
|
||||
),
|
||||
pgtrigger.migrations.AddTrigger(
|
||||
model_name="park",
|
||||
trigger=pgtrigger.compiler.Trigger(
|
||||
name="update_update",
|
||||
sql=pgtrigger.compiler.UpsertTriggerSql(
|
||||
condition="WHEN (OLD.* IS DISTINCT FROM NEW.*)",
|
||||
func='INSERT INTO "parks_parkevent" ("average_rating", "banner_image_id", "card_image_id", "closing_date", "coaster_count", "created_at", "description", "id", "name", "opening_date", "opening_year", "operating_season", "operator_id", "park_type", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "property_owner_id", "ride_count", "search_text", "size_acres", "slug", "status", "updated_at", "url", "website") VALUES (NEW."average_rating", NEW."banner_image_id", NEW."card_image_id", NEW."closing_date", NEW."coaster_count", NEW."created_at", NEW."description", NEW."id", NEW."name", NEW."opening_date", NEW."opening_year", NEW."operating_season", NEW."operator_id", NEW."park_type", _pgh_attach_context(), NOW(), \'update\', NEW."id", NEW."property_owner_id", NEW."ride_count", NEW."search_text", NEW."size_acres", NEW."slug", NEW."status", NEW."updated_at", NEW."url", NEW."website"); RETURN NULL;',
|
||||
hash="af7925b4ef24b42c66b7795b9e0c6c8f510e597c",
|
||||
operation="UPDATE",
|
||||
pgid="pgtrigger_update_update_19f56",
|
||||
table="parks_park",
|
||||
when="AFTER",
|
||||
),
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -1,64 +0,0 @@
|
||||
# Generated by Django 5.2.5 on 2025-09-14 19:01
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
def populate_computed_fields(apps, schema_editor):
|
||||
"""Populate computed fields for existing parks using raw SQL with disabled triggers"""
|
||||
|
||||
# Temporarily disable pghistory triggers
|
||||
schema_editor.execute("ALTER TABLE parks_park DISABLE TRIGGER ALL;")
|
||||
|
||||
try:
|
||||
# Use raw SQL to update opening_year from opening_date
|
||||
schema_editor.execute("""
|
||||
UPDATE parks_park
|
||||
SET opening_year = EXTRACT(YEAR FROM opening_date)
|
||||
WHERE opening_date IS NOT NULL;
|
||||
""")
|
||||
|
||||
# Use raw SQL to populate search_text
|
||||
# This is a simplified version - we'll populate it with just name and description
|
||||
schema_editor.execute("""
|
||||
UPDATE parks_park
|
||||
SET search_text = LOWER(
|
||||
COALESCE(name, '') || ' ' ||
|
||||
COALESCE(description, '')
|
||||
);
|
||||
""")
|
||||
|
||||
# Update search_text to include operator names using a join
|
||||
schema_editor.execute("""
|
||||
UPDATE parks_park
|
||||
SET search_text = LOWER(
|
||||
COALESCE(parks_park.name, '') || ' ' ||
|
||||
COALESCE(parks_park.description, '') || ' ' ||
|
||||
COALESCE(parks_company.name, '')
|
||||
)
|
||||
FROM parks_company
|
||||
WHERE parks_park.operator_id = parks_company.id;
|
||||
""")
|
||||
|
||||
finally:
|
||||
# Re-enable pghistory triggers
|
||||
schema_editor.execute("ALTER TABLE parks_park ENABLE TRIGGER ALL;")
|
||||
|
||||
|
||||
def reverse_populate_computed_fields(apps, schema_editor):
|
||||
"""Clear computed fields (reverse operation)"""
|
||||
Park = apps.get_model('parks', 'Park')
|
||||
Park.objects.update(opening_year=None, search_text='')
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("parks", "0014_add_hybrid_filtering_fields"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(
|
||||
populate_computed_fields,
|
||||
reverse_populate_computed_fields,
|
||||
),
|
||||
]
|
||||
@@ -1,85 +0,0 @@
|
||||
# Generated by Django 5.2.5 on 2025-09-14 19:12
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("parks", "0015_populate_hybrid_filtering_fields"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
# Composite indexes for common filter combinations
|
||||
migrations.RunSQL(
|
||||
"CREATE INDEX IF NOT EXISTS parks_park_status_park_type_idx ON parks_park (status, park_type);",
|
||||
reverse_sql="DROP INDEX IF EXISTS parks_park_status_park_type_idx;"
|
||||
),
|
||||
migrations.RunSQL(
|
||||
"CREATE INDEX IF NOT EXISTS parks_park_opening_year_status_idx ON parks_park (opening_year, status) WHERE opening_year IS NOT NULL;",
|
||||
reverse_sql="DROP INDEX IF EXISTS parks_park_opening_year_status_idx;"
|
||||
),
|
||||
migrations.RunSQL(
|
||||
"CREATE INDEX IF NOT EXISTS parks_park_size_rating_idx ON parks_park (size_acres, average_rating) WHERE size_acres IS NOT NULL AND average_rating IS NOT NULL;",
|
||||
reverse_sql="DROP INDEX IF EXISTS parks_park_size_rating_idx;"
|
||||
),
|
||||
migrations.RunSQL(
|
||||
"CREATE INDEX IF NOT EXISTS parks_park_ride_coaster_count_idx ON parks_park (ride_count, coaster_count) WHERE ride_count IS NOT NULL AND coaster_count IS NOT NULL;",
|
||||
reverse_sql="DROP INDEX IF EXISTS parks_park_ride_coaster_count_idx;"
|
||||
),
|
||||
|
||||
# Full-text search index for search_text field
|
||||
migrations.RunSQL(
|
||||
"CREATE INDEX IF NOT EXISTS parks_park_search_text_gin_idx ON parks_park USING gin(to_tsvector('english', search_text));",
|
||||
reverse_sql="DROP INDEX IF EXISTS parks_park_search_text_gin_idx;"
|
||||
),
|
||||
|
||||
# Trigram index for fuzzy search on search_text
|
||||
migrations.RunSQL(
|
||||
"CREATE EXTENSION IF NOT EXISTS pg_trgm;",
|
||||
reverse_sql="-- Cannot drop extension as it might be used elsewhere"
|
||||
),
|
||||
migrations.RunSQL(
|
||||
"CREATE INDEX IF NOT EXISTS parks_park_search_text_trgm_idx ON parks_park USING gin(search_text gin_trgm_ops);",
|
||||
reverse_sql="DROP INDEX IF EXISTS parks_park_search_text_trgm_idx;"
|
||||
),
|
||||
|
||||
# Indexes for location-based filtering (assuming location relationship exists)
|
||||
migrations.RunSQL(
|
||||
"""
|
||||
CREATE INDEX IF NOT EXISTS parks_parklocation_country_state_idx
|
||||
ON parks_parklocation (country, state)
|
||||
WHERE country IS NOT NULL AND state IS NOT NULL;
|
||||
""",
|
||||
reverse_sql="DROP INDEX IF EXISTS parks_parklocation_country_state_idx;"
|
||||
),
|
||||
|
||||
# Index for operator-based filtering
|
||||
migrations.RunSQL(
|
||||
"CREATE INDEX IF NOT EXISTS parks_park_operator_status_idx ON parks_park (operator_id, status);",
|
||||
reverse_sql="DROP INDEX IF EXISTS parks_park_operator_status_idx;"
|
||||
),
|
||||
|
||||
# Partial indexes for common status filters
|
||||
migrations.RunSQL(
|
||||
"CREATE INDEX IF NOT EXISTS parks_park_operating_parks_idx ON parks_park (name, opening_year) WHERE status IN ('OPERATING', 'CLOSED_TEMP');",
|
||||
reverse_sql="DROP INDEX IF EXISTS parks_park_operating_parks_idx;"
|
||||
),
|
||||
|
||||
# Index for ordering by name (already exists but ensuring it's optimized)
|
||||
migrations.RunSQL(
|
||||
"CREATE INDEX IF NOT EXISTS parks_park_name_lower_idx ON parks_park (LOWER(name));",
|
||||
reverse_sql="DROP INDEX IF EXISTS parks_park_name_lower_idx;"
|
||||
),
|
||||
|
||||
# Covering index for common query patterns
|
||||
migrations.RunSQL(
|
||||
"""
|
||||
CREATE INDEX IF NOT EXISTS parks_park_hybrid_covering_idx
|
||||
ON parks_park (status, park_type, opening_year)
|
||||
INCLUDE (name, slug, size_acres, average_rating, ride_count, coaster_count, operator_id)
|
||||
WHERE status IN ('OPERATING', 'CLOSED_TEMP');
|
||||
""",
|
||||
reverse_sql="DROP INDEX IF EXISTS parks_park_hybrid_covering_idx;"
|
||||
),
|
||||
]
|
||||
@@ -1,73 +0,0 @@
|
||||
# Generated by Django 5.2.5 on 2025-09-15 00:50
|
||||
|
||||
import django.utils.timezone
|
||||
import pgtrigger.compiler
|
||||
import pgtrigger.migrations
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("parks", "0016_add_hybrid_filtering_indexes"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
pgtrigger.migrations.RemoveTrigger(
|
||||
model_name="park",
|
||||
name="insert_insert",
|
||||
),
|
||||
pgtrigger.migrations.RemoveTrigger(
|
||||
model_name="park",
|
||||
name="update_update",
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="park",
|
||||
name="timezone",
|
||||
field=models.CharField(
|
||||
default=django.utils.timezone.now,
|
||||
help_text="Timezone identifier for park operations (e.g., 'America/New_York')",
|
||||
max_length=50,
|
||||
),
|
||||
preserve_default=False,
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="parkevent",
|
||||
name="timezone",
|
||||
field=models.CharField(
|
||||
default=django.utils.timezone.now,
|
||||
help_text="Timezone identifier for park operations (e.g., 'America/New_York')",
|
||||
max_length=50,
|
||||
),
|
||||
preserve_default=False,
|
||||
),
|
||||
pgtrigger.migrations.AddTrigger(
|
||||
model_name="park",
|
||||
trigger=pgtrigger.compiler.Trigger(
|
||||
name="insert_insert",
|
||||
sql=pgtrigger.compiler.UpsertTriggerSql(
|
||||
func='INSERT INTO "parks_parkevent" ("average_rating", "banner_image_id", "card_image_id", "closing_date", "coaster_count", "created_at", "description", "id", "name", "opening_date", "opening_year", "operating_season", "operator_id", "park_type", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "property_owner_id", "ride_count", "search_text", "size_acres", "slug", "status", "timezone", "updated_at", "url", "website") VALUES (NEW."average_rating", NEW."banner_image_id", NEW."card_image_id", NEW."closing_date", NEW."coaster_count", NEW."created_at", NEW."description", NEW."id", NEW."name", NEW."opening_date", NEW."opening_year", NEW."operating_season", NEW."operator_id", NEW."park_type", _pgh_attach_context(), NOW(), \'insert\', NEW."id", NEW."property_owner_id", NEW."ride_count", NEW."search_text", NEW."size_acres", NEW."slug", NEW."status", NEW."timezone", NEW."updated_at", NEW."url", NEW."website"); RETURN NULL;',
|
||||
hash="9da686bd8a1881fe7a3fdfebc14411680fe47527",
|
||||
operation="INSERT",
|
||||
pgid="pgtrigger_insert_insert_66883",
|
||||
table="parks_park",
|
||||
when="AFTER",
|
||||
),
|
||||
),
|
||||
),
|
||||
pgtrigger.migrations.AddTrigger(
|
||||
model_name="park",
|
||||
trigger=pgtrigger.compiler.Trigger(
|
||||
name="update_update",
|
||||
sql=pgtrigger.compiler.UpsertTriggerSql(
|
||||
condition="WHEN (OLD.* IS DISTINCT FROM NEW.*)",
|
||||
func='INSERT INTO "parks_parkevent" ("average_rating", "banner_image_id", "card_image_id", "closing_date", "coaster_count", "created_at", "description", "id", "name", "opening_date", "opening_year", "operating_season", "operator_id", "park_type", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "property_owner_id", "ride_count", "search_text", "size_acres", "slug", "status", "timezone", "updated_at", "url", "website") VALUES (NEW."average_rating", NEW."banner_image_id", NEW."card_image_id", NEW."closing_date", NEW."coaster_count", NEW."created_at", NEW."description", NEW."id", NEW."name", NEW."opening_date", NEW."opening_year", NEW."operating_season", NEW."operator_id", NEW."park_type", _pgh_attach_context(), NOW(), \'update\', NEW."id", NEW."property_owner_id", NEW."ride_count", NEW."search_text", NEW."size_acres", NEW."slug", NEW."status", NEW."timezone", NEW."updated_at", NEW."url", NEW."website"); RETURN NULL;',
|
||||
hash="787e3176b96b506020f056ee1122d90d25e4cb0d",
|
||||
operation="UPDATE",
|
||||
pgid="pgtrigger_update_update_19f56",
|
||||
table="parks_park",
|
||||
when="AFTER",
|
||||
),
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -1,12 +0,0 @@
|
||||
# Generated by Django 5.2.5 on 2025-09-15 01:03
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("parks", "0017_add_timezone_to_pghistory_triggers"),
|
||||
]
|
||||
|
||||
operations = []
|
||||
@@ -1,52 +0,0 @@
|
||||
# Generated manually to fix pghistory timezone issue
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("parks", "0018_auto_20250914_2103"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunSQL(
|
||||
sql="""
|
||||
-- Drop the existing trigger function
|
||||
DROP FUNCTION IF EXISTS pgtrigger_insert_insert_66883() CASCADE;
|
||||
|
||||
-- Recreate the trigger function with timezone field
|
||||
CREATE OR REPLACE FUNCTION pgtrigger_insert_insert_66883()
|
||||
RETURNS TRIGGER AS $$
|
||||
BEGIN
|
||||
INSERT INTO "parks_parkevent" (
|
||||
"average_rating", "banner_image_id", "card_image_id", "closing_date",
|
||||
"coaster_count", "created_at", "description", "id", "name", "opening_date",
|
||||
"opening_year", "operating_season", "operator_id", "park_type",
|
||||
"pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id",
|
||||
"property_owner_id", "ride_count", "search_text", "size_acres",
|
||||
"slug", "status", "updated_at", "url", "website", "timezone"
|
||||
) VALUES (
|
||||
NEW."average_rating", NEW."banner_image_id", NEW."card_image_id", NEW."closing_date",
|
||||
NEW."coaster_count", NEW."created_at", NEW."description", NEW."id", NEW."name", NEW."opening_date",
|
||||
NEW."opening_year", NEW."operating_season", NEW."operator_id", NEW."park_type",
|
||||
_pgh_attach_context(), NOW(), 'insert', NEW."id",
|
||||
NEW."property_owner_id", NEW."ride_count", NEW."search_text", NEW."size_acres",
|
||||
NEW."slug", NEW."status", NEW."updated_at", NEW."url", NEW."website", NEW."timezone"
|
||||
);
|
||||
RETURN NEW;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
-- Recreate the trigger
|
||||
CREATE TRIGGER pgtrigger_insert_insert_66883
|
||||
AFTER INSERT ON parks_park
|
||||
FOR EACH ROW
|
||||
EXECUTE FUNCTION pgtrigger_insert_insert_66883();
|
||||
""",
|
||||
reverse_sql="""
|
||||
-- This is irreversible, but we can drop and recreate without timezone
|
||||
DROP FUNCTION IF EXISTS pgtrigger_insert_insert_66883() CASCADE;
|
||||
"""
|
||||
),
|
||||
]
|
||||
@@ -1,52 +0,0 @@
|
||||
# Generated manually to fix pghistory UPDATE timezone issue
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("parks", "0019_fix_pghistory_timezone"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunSQL(
|
||||
sql="""
|
||||
-- Drop the existing UPDATE trigger function
|
||||
DROP FUNCTION IF EXISTS pgtrigger_update_update_19f56() CASCADE;
|
||||
|
||||
-- Recreate the UPDATE trigger function with timezone field
|
||||
CREATE OR REPLACE FUNCTION pgtrigger_update_update_19f56()
|
||||
RETURNS TRIGGER AS $$
|
||||
BEGIN
|
||||
INSERT INTO "parks_parkevent" (
|
||||
"average_rating", "banner_image_id", "card_image_id", "closing_date",
|
||||
"coaster_count", "created_at", "description", "id", "name", "opening_date",
|
||||
"opening_year", "operating_season", "operator_id", "park_type",
|
||||
"pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id",
|
||||
"property_owner_id", "ride_count", "search_text", "size_acres",
|
||||
"slug", "status", "updated_at", "url", "website", "timezone"
|
||||
) VALUES (
|
||||
NEW."average_rating", NEW."banner_image_id", NEW."card_image_id", NEW."closing_date",
|
||||
NEW."coaster_count", NEW."created_at", NEW."description", NEW."id", NEW."name", NEW."opening_date",
|
||||
NEW."opening_year", NEW."operating_season", NEW."operator_id", NEW."park_type",
|
||||
_pgh_attach_context(), NOW(), 'update', NEW."id",
|
||||
NEW."property_owner_id", NEW."ride_count", NEW."search_text", NEW."size_acres",
|
||||
NEW."slug", NEW."status", NEW."updated_at", NEW."url", NEW."website", NEW."timezone"
|
||||
);
|
||||
RETURN NEW;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
-- Recreate the UPDATE trigger
|
||||
CREATE TRIGGER pgtrigger_update_update_19f56
|
||||
AFTER UPDATE ON parks_park
|
||||
FOR EACH ROW
|
||||
EXECUTE FUNCTION pgtrigger_update_update_19f56();
|
||||
""",
|
||||
reverse_sql="""
|
||||
-- This is irreversible, but we can drop and recreate without timezone
|
||||
DROP FUNCTION IF EXISTS pgtrigger_update_update_19f56() CASCADE;
|
||||
"""
|
||||
),
|
||||
]
|
||||
@@ -1,103 +0,0 @@
|
||||
# Generated by Django 5.2.5 on 2025-09-15 17:35
|
||||
|
||||
import apps.core.choices.fields
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("parks", "0020_fix_pghistory_update_timezone"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="park",
|
||||
name="park_type",
|
||||
field=apps.core.choices.fields.RichChoiceField(
|
||||
allow_deprecated=False,
|
||||
choice_group="types",
|
||||
choices=[
|
||||
("THEME_PARK", "Theme Park"),
|
||||
("AMUSEMENT_PARK", "Amusement Park"),
|
||||
("WATER_PARK", "Water Park"),
|
||||
("FAMILY_ENTERTAINMENT_CENTER", "Family Entertainment Center"),
|
||||
("CARNIVAL", "Carnival"),
|
||||
("FAIR", "Fair"),
|
||||
("PIER", "Pier"),
|
||||
("BOARDWALK", "Boardwalk"),
|
||||
("SAFARI_PARK", "Safari Park"),
|
||||
("ZOO", "Zoo"),
|
||||
("OTHER", "Other"),
|
||||
],
|
||||
db_index=True,
|
||||
default="THEME_PARK",
|
||||
domain="parks",
|
||||
help_text="Type/category of the park",
|
||||
max_length=30,
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="park",
|
||||
name="status",
|
||||
field=apps.core.choices.fields.RichChoiceField(
|
||||
allow_deprecated=False,
|
||||
choice_group="statuses",
|
||||
choices=[
|
||||
("OPERATING", "Operating"),
|
||||
("CLOSED_TEMP", "Temporarily Closed"),
|
||||
("CLOSED_PERM", "Permanently Closed"),
|
||||
("UNDER_CONSTRUCTION", "Under Construction"),
|
||||
("DEMOLISHED", "Demolished"),
|
||||
("RELOCATED", "Relocated"),
|
||||
],
|
||||
default="OPERATING",
|
||||
domain="parks",
|
||||
max_length=20,
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="parkevent",
|
||||
name="park_type",
|
||||
field=apps.core.choices.fields.RichChoiceField(
|
||||
allow_deprecated=False,
|
||||
choice_group="types",
|
||||
choices=[
|
||||
("THEME_PARK", "Theme Park"),
|
||||
("AMUSEMENT_PARK", "Amusement Park"),
|
||||
("WATER_PARK", "Water Park"),
|
||||
("FAMILY_ENTERTAINMENT_CENTER", "Family Entertainment Center"),
|
||||
("CARNIVAL", "Carnival"),
|
||||
("FAIR", "Fair"),
|
||||
("PIER", "Pier"),
|
||||
("BOARDWALK", "Boardwalk"),
|
||||
("SAFARI_PARK", "Safari Park"),
|
||||
("ZOO", "Zoo"),
|
||||
("OTHER", "Other"),
|
||||
],
|
||||
default="THEME_PARK",
|
||||
domain="parks",
|
||||
help_text="Type/category of the park",
|
||||
max_length=30,
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="parkevent",
|
||||
name="status",
|
||||
field=apps.core.choices.fields.RichChoiceField(
|
||||
allow_deprecated=False,
|
||||
choice_group="statuses",
|
||||
choices=[
|
||||
("OPERATING", "Operating"),
|
||||
("CLOSED_TEMP", "Temporarily Closed"),
|
||||
("CLOSED_PERM", "Permanently Closed"),
|
||||
("UNDER_CONSTRUCTION", "Under Construction"),
|
||||
("DEMOLISHED", "Demolished"),
|
||||
("RELOCATED", "Relocated"),
|
||||
],
|
||||
default="OPERATING",
|
||||
domain="parks",
|
||||
max_length=20,
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -1,53 +0,0 @@
|
||||
# Generated by Django 5.2.5 on 2025-09-15 18:07
|
||||
|
||||
import apps.core.choices.fields
|
||||
import django.contrib.postgres.fields
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("parks", "0021_alter_park_park_type_alter_park_status_and_more"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="company",
|
||||
name="roles",
|
||||
field=django.contrib.postgres.fields.ArrayField(
|
||||
base_field=apps.core.choices.fields.RichChoiceField(
|
||||
allow_deprecated=False,
|
||||
choice_group="company_roles",
|
||||
choices=[
|
||||
("OPERATOR", "Park Operator"),
|
||||
("PROPERTY_OWNER", "Property Owner"),
|
||||
],
|
||||
domain="parks",
|
||||
max_length=20,
|
||||
),
|
||||
blank=True,
|
||||
default=list,
|
||||
size=None,
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="companyevent",
|
||||
name="roles",
|
||||
field=django.contrib.postgres.fields.ArrayField(
|
||||
base_field=apps.core.choices.fields.RichChoiceField(
|
||||
allow_deprecated=False,
|
||||
choice_group="company_roles",
|
||||
choices=[
|
||||
("OPERATOR", "Park Operator"),
|
||||
("PROPERTY_OWNER", "Property Owner"),
|
||||
],
|
||||
domain="parks",
|
||||
max_length=20,
|
||||
),
|
||||
blank=True,
|
||||
default=list,
|
||||
size=None,
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -1,36 +0,0 @@
|
||||
"""
|
||||
Parks app models with clean import interface.
|
||||
|
||||
This module provides a clean import interface for all parks-related models,
|
||||
enabling imports like: from parks.models import Park, Operator
|
||||
|
||||
The Company model is aliased as Operator to clarify its role as park operators,
|
||||
while maintaining backward compatibility through the Company alias.
|
||||
"""
|
||||
|
||||
from .parks import Park
|
||||
from .areas import ParkArea
|
||||
from .location import ParkLocation
|
||||
from .reviews import ParkReview
|
||||
from .companies import Company, CompanyHeadquarters
|
||||
from .media import ParkPhoto
|
||||
|
||||
# Import choices to trigger registration
|
||||
from ..choices import *
|
||||
|
||||
# Alias Company as Operator for clarity
|
||||
Operator = Company
|
||||
|
||||
__all__ = [
|
||||
# Primary models
|
||||
"Park",
|
||||
"ParkArea",
|
||||
"ParkLocation",
|
||||
"ParkReview",
|
||||
"ParkPhoto",
|
||||
# Company models with clear naming
|
||||
"Operator",
|
||||
"CompanyHeadquarters",
|
||||
# Backward compatibility
|
||||
"Company", # Alias to Operator
|
||||
]
|
||||
@@ -1,32 +0,0 @@
|
||||
from django.db import models
|
||||
from django.utils.text import slugify
|
||||
import pghistory
|
||||
|
||||
from apps.core.history import TrackedModel
|
||||
from .parks import Park
|
||||
|
||||
|
||||
@pghistory.track()
|
||||
class ParkArea(TrackedModel):
|
||||
# Import managers
|
||||
from ..managers import ParkAreaManager
|
||||
|
||||
objects = ParkAreaManager()
|
||||
id: int # Type hint for Django's automatic id field
|
||||
park = models.ForeignKey(Park, on_delete=models.CASCADE, related_name="areas")
|
||||
name = models.CharField(max_length=255)
|
||||
slug = models.SlugField(max_length=255)
|
||||
description = models.TextField(blank=True)
|
||||
opening_date = models.DateField(null=True, blank=True)
|
||||
closing_date = models.DateField(null=True, blank=True)
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
if not self.slug:
|
||||
self.slug = slugify(self.name)
|
||||
super().save(*args, **kwargs)
|
||||
|
||||
def __str__(self):
|
||||
return self.name
|
||||
|
||||
class Meta:
|
||||
unique_together = ("park", "slug")
|
||||
@@ -1,127 +0,0 @@
|
||||
from django.contrib.postgres.fields import ArrayField
|
||||
from django.db import models
|
||||
from django.utils.text import slugify
|
||||
from apps.core.models import TrackedModel
|
||||
from apps.core.choices.fields import RichChoiceField
|
||||
import pghistory
|
||||
|
||||
|
||||
@pghistory.track()
|
||||
class Company(TrackedModel):
|
||||
# Import managers
|
||||
from ..managers import CompanyManager
|
||||
|
||||
objects = CompanyManager()
|
||||
|
||||
name = models.CharField(max_length=255)
|
||||
slug = models.SlugField(max_length=255, unique=True)
|
||||
roles = ArrayField(
|
||||
RichChoiceField(choice_group="company_roles", domain="parks", max_length=20),
|
||||
default=list,
|
||||
blank=True,
|
||||
)
|
||||
description = models.TextField(blank=True)
|
||||
website = models.URLField(blank=True)
|
||||
|
||||
# Operator-specific fields
|
||||
founded_year = models.PositiveIntegerField(blank=True, null=True)
|
||||
parks_count = models.IntegerField(default=0)
|
||||
rides_count = models.IntegerField(default=0)
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
if not self.slug:
|
||||
self.slug = slugify(self.name)
|
||||
super().save(*args, **kwargs)
|
||||
|
||||
def __str__(self):
|
||||
return self.name
|
||||
|
||||
class Meta(TrackedModel.Meta):
|
||||
app_label = "parks"
|
||||
ordering = ["name"]
|
||||
verbose_name_plural = "Companies"
|
||||
|
||||
|
||||
@pghistory.track()
|
||||
class CompanyHeadquarters(models.Model):
|
||||
"""
|
||||
Simple address storage for company headquarters without coordinate tracking.
|
||||
Focus on human-readable location information for display purposes.
|
||||
"""
|
||||
|
||||
# Relationships
|
||||
company = models.OneToOneField(
|
||||
"Company", on_delete=models.CASCADE, related_name="headquarters"
|
||||
)
|
||||
|
||||
# Address Fields (No coordinates needed)
|
||||
street_address = models.CharField(
|
||||
max_length=255,
|
||||
blank=True,
|
||||
help_text="Mailing address if publicly available",
|
||||
)
|
||||
city = models.CharField(
|
||||
max_length=100, db_index=True, help_text="Headquarters city"
|
||||
)
|
||||
state_province = models.CharField(
|
||||
max_length=100,
|
||||
blank=True,
|
||||
db_index=True,
|
||||
help_text="State/Province/Region",
|
||||
)
|
||||
country = models.CharField(
|
||||
max_length=100,
|
||||
default="USA",
|
||||
db_index=True,
|
||||
help_text="Country where headquarters is located",
|
||||
)
|
||||
postal_code = models.CharField(
|
||||
max_length=20, blank=True, help_text="ZIP or postal code"
|
||||
)
|
||||
|
||||
# Optional mailing address if different or more complete
|
||||
mailing_address = models.TextField(
|
||||
blank=True,
|
||||
help_text="Complete mailing address if different from basic address",
|
||||
)
|
||||
|
||||
# Metadata
|
||||
created_at = models.DateTimeField(auto_now_add=True)
|
||||
updated_at = models.DateTimeField(auto_now=True)
|
||||
|
||||
@property
|
||||
def formatted_location(self):
|
||||
"""Returns a formatted address string for display."""
|
||||
components = []
|
||||
if self.street_address:
|
||||
components.append(self.street_address)
|
||||
if self.city:
|
||||
components.append(self.city)
|
||||
if self.state_province:
|
||||
components.append(self.state_province)
|
||||
if self.postal_code:
|
||||
components.append(self.postal_code)
|
||||
if self.country and self.country != "USA":
|
||||
components.append(self.country)
|
||||
return ", ".join(components) if components else f"{self.city}, {self.country}"
|
||||
|
||||
@property
|
||||
def location_display(self):
|
||||
"""Simple city, state/country display for compact views."""
|
||||
parts = [self.city]
|
||||
if self.state_province:
|
||||
parts.append(self.state_province)
|
||||
elif self.country != "USA":
|
||||
parts.append(self.country)
|
||||
return ", ".join(parts) if parts else "Unknown Location"
|
||||
|
||||
def __str__(self):
|
||||
return f"{self.company.name} Headquarters - {self.location_display}"
|
||||
|
||||
class Meta:
|
||||
verbose_name = "Company Headquarters"
|
||||
verbose_name_plural = "Company Headquarters"
|
||||
ordering = ["company__name"]
|
||||
indexes = [
|
||||
models.Index(fields=["city", "country"]),
|
||||
]
|
||||
@@ -1,120 +0,0 @@
|
||||
from django.contrib.gis.db import models
|
||||
from django.contrib.gis.geos import Point
|
||||
import pghistory
|
||||
|
||||
|
||||
@pghistory.track()
|
||||
class ParkLocation(models.Model):
|
||||
"""
|
||||
Represents the geographic location and address of a park, with PostGIS support.
|
||||
"""
|
||||
|
||||
park = models.OneToOneField(
|
||||
"parks.Park", on_delete=models.CASCADE, related_name="location"
|
||||
)
|
||||
|
||||
# Spatial Data
|
||||
point = models.PointField(
|
||||
srid=4326,
|
||||
null=True,
|
||||
blank=True,
|
||||
help_text="Geographic coordinates (longitude, latitude)",
|
||||
)
|
||||
|
||||
# Address Fields
|
||||
street_address = models.CharField(max_length=255, blank=True)
|
||||
city = models.CharField(max_length=100, db_index=True)
|
||||
state = models.CharField(max_length=100, db_index=True)
|
||||
country = models.CharField(max_length=100, default="USA")
|
||||
continent = models.CharField(
|
||||
max_length=50,
|
||||
blank=True,
|
||||
db_index=True,
|
||||
help_text="Continent where the park is located"
|
||||
)
|
||||
postal_code = models.CharField(max_length=20, blank=True)
|
||||
|
||||
# Road Trip Metadata
|
||||
highway_exit = models.CharField(max_length=100, blank=True)
|
||||
parking_notes = models.TextField(blank=True)
|
||||
best_arrival_time = models.TimeField(null=True, blank=True)
|
||||
seasonal_notes = models.TextField(blank=True)
|
||||
|
||||
# OSM Integration
|
||||
osm_id = models.BigIntegerField(null=True, blank=True)
|
||||
osm_type = models.CharField(
|
||||
max_length=10,
|
||||
blank=True,
|
||||
help_text="Type of OpenStreetMap object (node, way, or relation)",
|
||||
)
|
||||
|
||||
@property
|
||||
def latitude(self):
|
||||
"""Return latitude from point field."""
|
||||
if self.point:
|
||||
return self.point.y
|
||||
return None
|
||||
|
||||
@property
|
||||
def longitude(self):
|
||||
"""Return longitude from point field."""
|
||||
if self.point:
|
||||
return self.point.x
|
||||
return None
|
||||
|
||||
@property
|
||||
def coordinates(self):
|
||||
"""Return (latitude, longitude) tuple."""
|
||||
if self.point:
|
||||
return (self.latitude, self.longitude)
|
||||
return (None, None)
|
||||
|
||||
@property
|
||||
def formatted_address(self):
|
||||
"""Return a nicely formatted address string."""
|
||||
address_parts = [
|
||||
self.street_address,
|
||||
self.city,
|
||||
self.state,
|
||||
self.postal_code,
|
||||
self.country,
|
||||
]
|
||||
return ", ".join(part for part in address_parts if part)
|
||||
|
||||
def set_coordinates(self, latitude, longitude):
|
||||
"""
|
||||
Set the location's point from latitude and longitude coordinates.
|
||||
Validates coordinate ranges.
|
||||
"""
|
||||
if latitude is None or longitude is None:
|
||||
self.point = None
|
||||
return
|
||||
|
||||
if not -90 <= latitude <= 90:
|
||||
raise ValueError("Latitude must be between -90 and 90.")
|
||||
if not -180 <= longitude <= 180:
|
||||
raise ValueError("Longitude must be between -180 and 180.")
|
||||
|
||||
self.point = Point(longitude, latitude, srid=4326)
|
||||
|
||||
def distance_to(self, other_location):
|
||||
"""
|
||||
Calculate the distance to another ParkLocation instance.
|
||||
Returns distance in kilometers.
|
||||
"""
|
||||
if not self.point or not other_location.point:
|
||||
return None
|
||||
# Use geodetic distance calculation which returns meters, convert to km
|
||||
distance_m = self.point.distance(other_location.point)
|
||||
return distance_m / 1000.0
|
||||
|
||||
def __str__(self):
|
||||
return f"Location for {self.park.name}"
|
||||
|
||||
class Meta:
|
||||
verbose_name = "Park Location"
|
||||
verbose_name_plural = "Park Locations"
|
||||
ordering = ["park__name"]
|
||||
indexes = [
|
||||
models.Index(fields=["city", "state"]),
|
||||
]
|
||||
@@ -1,120 +0,0 @@
|
||||
"""
|
||||
Park-specific media models for ThrillWiki.
|
||||
|
||||
This module contains media models specific to parks domain.
|
||||
"""
|
||||
|
||||
from typing import Any, List, Optional, cast
|
||||
from django.db import models
|
||||
from django.conf import settings
|
||||
from apps.core.history import TrackedModel
|
||||
from apps.core.services.media_service import MediaService
|
||||
import pghistory
|
||||
|
||||
|
||||
def park_photo_upload_path(instance: models.Model, filename: str) -> str:
|
||||
"""Generate upload path for park photos."""
|
||||
photo = cast("ParkPhoto", instance)
|
||||
park = photo.park
|
||||
|
||||
if park is None:
|
||||
raise ValueError("Park cannot be None")
|
||||
|
||||
return MediaService.generate_upload_path(
|
||||
domain="park", identifier=park.slug, filename=filename
|
||||
)
|
||||
|
||||
|
||||
@pghistory.track()
|
||||
class ParkPhoto(TrackedModel):
|
||||
"""Photo model specific to parks."""
|
||||
|
||||
park = models.ForeignKey(
|
||||
"parks.Park", on_delete=models.CASCADE, related_name="photos"
|
||||
)
|
||||
|
||||
image = models.ForeignKey(
|
||||
'django_cloudflareimages_toolkit.CloudflareImage',
|
||||
on_delete=models.CASCADE,
|
||||
help_text="Park photo stored on Cloudflare Images"
|
||||
)
|
||||
|
||||
caption = models.CharField(max_length=255, blank=True)
|
||||
alt_text = models.CharField(max_length=255, blank=True)
|
||||
is_primary = models.BooleanField(default=False)
|
||||
is_approved = models.BooleanField(default=False)
|
||||
|
||||
# Metadata
|
||||
created_at = models.DateTimeField(auto_now_add=True)
|
||||
updated_at = models.DateTimeField(auto_now=True)
|
||||
date_taken = models.DateTimeField(null=True, blank=True)
|
||||
|
||||
# User who uploaded the photo
|
||||
uploaded_by = models.ForeignKey(
|
||||
settings.AUTH_USER_MODEL,
|
||||
on_delete=models.SET_NULL,
|
||||
null=True,
|
||||
related_name="uploaded_park_photos",
|
||||
)
|
||||
|
||||
class Meta(TrackedModel.Meta):
|
||||
app_label = "parks"
|
||||
ordering = ["-is_primary", "-created_at"]
|
||||
indexes = [
|
||||
models.Index(fields=["park", "is_primary"]),
|
||||
models.Index(fields=["park", "is_approved"]),
|
||||
models.Index(fields=["created_at"]),
|
||||
]
|
||||
constraints = [
|
||||
# Only one primary photo per park
|
||||
models.UniqueConstraint(
|
||||
fields=["park"],
|
||||
condition=models.Q(is_primary=True),
|
||||
name="unique_primary_park_photo",
|
||||
)
|
||||
]
|
||||
|
||||
def __str__(self) -> str:
|
||||
return f"Photo of {self.park.name} - {self.caption or 'No caption'}"
|
||||
|
||||
def save(self, *args: Any, **kwargs: Any) -> None:
|
||||
# Extract EXIF date if this is a new photo
|
||||
if not self.pk and not self.date_taken and self.image:
|
||||
self.date_taken = MediaService.extract_exif_date(self.image)
|
||||
|
||||
# Set default caption if not provided
|
||||
if not self.caption and self.uploaded_by:
|
||||
self.caption = MediaService.generate_default_caption(
|
||||
self.uploaded_by.username
|
||||
)
|
||||
|
||||
# If this is marked as primary, unmark other primary photos for this park
|
||||
if self.is_primary:
|
||||
ParkPhoto.objects.filter(
|
||||
park=self.park,
|
||||
is_primary=True,
|
||||
).exclude(
|
||||
pk=self.pk
|
||||
).update(is_primary=False)
|
||||
|
||||
super().save(*args, **kwargs)
|
||||
|
||||
@property
|
||||
def file_size(self) -> Optional[int]:
|
||||
"""Get file size in bytes."""
|
||||
try:
|
||||
return self.image.size
|
||||
except (ValueError, OSError):
|
||||
return None
|
||||
|
||||
@property
|
||||
def dimensions(self) -> Optional[List[int]]:
|
||||
"""Get image dimensions as [width, height]."""
|
||||
try:
|
||||
return [self.image.width, self.image.height]
|
||||
except (ValueError, OSError):
|
||||
return None
|
||||
|
||||
def get_absolute_url(self) -> str:
|
||||
"""Get absolute URL for this photo."""
|
||||
return f"/parks/{self.park.slug}/photos/{self.pk}/"
|
||||
@@ -1,362 +0,0 @@
|
||||
from django.db import models
|
||||
from django.urls import reverse
|
||||
from django.utils.text import slugify
|
||||
from django.core.exceptions import ValidationError
|
||||
from config.django import base as settings
|
||||
from typing import Optional, Any, TYPE_CHECKING, List
|
||||
import pghistory
|
||||
from apps.core.history import TrackedModel
|
||||
from apps.core.choices import RichChoiceField
|
||||
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from apps.rides.models import Ride
|
||||
from . import ParkArea
|
||||
|
||||
|
||||
@pghistory.track()
|
||||
class Park(TrackedModel):
|
||||
# Import managers
|
||||
from ..managers import ParkManager
|
||||
|
||||
objects = ParkManager()
|
||||
id: int # Type hint for Django's automatic id field
|
||||
|
||||
name = models.CharField(max_length=255)
|
||||
slug = models.SlugField(max_length=255, unique=True)
|
||||
description = models.TextField(blank=True)
|
||||
status = RichChoiceField(
|
||||
choice_group="statuses",
|
||||
domain="parks",
|
||||
max_length=20,
|
||||
default="OPERATING",
|
||||
)
|
||||
|
||||
park_type = RichChoiceField(
|
||||
choice_group="types",
|
||||
domain="parks",
|
||||
max_length=30,
|
||||
default="THEME_PARK",
|
||||
db_index=True,
|
||||
help_text="Type/category of the park"
|
||||
)
|
||||
|
||||
# Location relationship - reverse relation from ParkLocation
|
||||
# location will be available via the 'location' related_name on
|
||||
# ParkLocation
|
||||
|
||||
# Details
|
||||
opening_date = models.DateField(null=True, blank=True)
|
||||
closing_date = models.DateField(null=True, blank=True)
|
||||
operating_season = models.CharField(max_length=255, blank=True)
|
||||
size_acres = models.DecimalField(
|
||||
max_digits=10, decimal_places=2, null=True, blank=True
|
||||
)
|
||||
website = models.URLField(blank=True)
|
||||
|
||||
# Statistics
|
||||
average_rating = models.DecimalField(
|
||||
max_digits=3, decimal_places=2, null=True, blank=True
|
||||
)
|
||||
ride_count = models.IntegerField(null=True, blank=True)
|
||||
coaster_count = models.IntegerField(null=True, blank=True)
|
||||
|
||||
# Image settings - references to existing photos
|
||||
banner_image = models.ForeignKey(
|
||||
"ParkPhoto",
|
||||
on_delete=models.SET_NULL,
|
||||
null=True,
|
||||
blank=True,
|
||||
related_name="parks_using_as_banner",
|
||||
help_text="Photo to use as banner image for this park",
|
||||
)
|
||||
card_image = models.ForeignKey(
|
||||
"ParkPhoto",
|
||||
on_delete=models.SET_NULL,
|
||||
null=True,
|
||||
blank=True,
|
||||
related_name="parks_using_as_card",
|
||||
help_text="Photo to use as card image for this park",
|
||||
)
|
||||
|
||||
# Relationships
|
||||
operator = models.ForeignKey(
|
||||
"Company",
|
||||
on_delete=models.PROTECT,
|
||||
related_name="operated_parks",
|
||||
help_text="Company that operates this park",
|
||||
limit_choices_to={"roles__contains": ["OPERATOR"]},
|
||||
)
|
||||
property_owner = models.ForeignKey(
|
||||
"Company",
|
||||
on_delete=models.PROTECT,
|
||||
related_name="owned_parks",
|
||||
null=True,
|
||||
blank=True,
|
||||
help_text="Company that owns the property (if different from operator)",
|
||||
limit_choices_to={"roles__contains": ["PROPERTY_OWNER"]},
|
||||
)
|
||||
areas: models.Manager["ParkArea"] # Type hint for reverse relation
|
||||
# Type hint for reverse relation from rides app
|
||||
rides: models.Manager["Ride"]
|
||||
|
||||
# Metadata
|
||||
created_at = models.DateTimeField(auto_now_add=True, null=True)
|
||||
updated_at = models.DateTimeField(auto_now=True)
|
||||
|
||||
# Frontend URL
|
||||
url = models.URLField(blank=True, help_text="Frontend URL for this park")
|
||||
|
||||
# Computed fields for hybrid filtering
|
||||
opening_year = models.IntegerField(
|
||||
null=True,
|
||||
blank=True,
|
||||
db_index=True,
|
||||
help_text="Year the park opened (computed from opening_date)"
|
||||
)
|
||||
search_text = models.TextField(
|
||||
blank=True,
|
||||
db_index=True,
|
||||
help_text="Searchable text combining name, description, location, and operator"
|
||||
)
|
||||
|
||||
# Timezone for park operations
|
||||
timezone = models.CharField(
|
||||
max_length=50,
|
||||
help_text="Timezone identifier for park operations (e.g., 'America/New_York')"
|
||||
)
|
||||
|
||||
class Meta:
|
||||
ordering = ["name"]
|
||||
constraints = [
|
||||
# Business rule: Closing date must be after opening date
|
||||
models.CheckConstraint(
|
||||
name="park_closing_after_opening",
|
||||
check=models.Q(closing_date__isnull=True)
|
||||
| models.Q(opening_date__isnull=True)
|
||||
| models.Q(closing_date__gte=models.F("opening_date")),
|
||||
violation_error_message="Closing date must be after opening date",
|
||||
),
|
||||
# Business rule: Size must be positive
|
||||
models.CheckConstraint(
|
||||
name="park_size_positive",
|
||||
check=models.Q(size_acres__isnull=True) | models.Q(size_acres__gt=0),
|
||||
violation_error_message="Park size must be positive",
|
||||
),
|
||||
# Business rule: Rating must be between 1 and 10
|
||||
models.CheckConstraint(
|
||||
name="park_rating_range",
|
||||
check=models.Q(average_rating__isnull=True)
|
||||
| (models.Q(average_rating__gte=1) & models.Q(average_rating__lte=10)),
|
||||
violation_error_message="Average rating must be between 1 and 10",
|
||||
),
|
||||
# Business rule: Counts must be non-negative
|
||||
models.CheckConstraint(
|
||||
name="park_ride_count_non_negative",
|
||||
check=models.Q(ride_count__isnull=True) | models.Q(ride_count__gte=0),
|
||||
violation_error_message="Ride count must be non-negative",
|
||||
),
|
||||
models.CheckConstraint(
|
||||
name="park_coaster_count_non_negative",
|
||||
check=models.Q(coaster_count__isnull=True)
|
||||
| models.Q(coaster_count__gte=0),
|
||||
violation_error_message="Coaster count must be non-negative",
|
||||
),
|
||||
# Business rule: Coaster count cannot exceed ride count
|
||||
models.CheckConstraint(
|
||||
name="park_coaster_count_lte_ride_count",
|
||||
check=models.Q(coaster_count__isnull=True)
|
||||
| models.Q(ride_count__isnull=True)
|
||||
| models.Q(coaster_count__lte=models.F("ride_count")),
|
||||
violation_error_message="Coaster count cannot exceed total ride count",
|
||||
),
|
||||
]
|
||||
|
||||
def __str__(self) -> str:
|
||||
return self.name
|
||||
|
||||
def save(self, *args: Any, **kwargs: Any) -> None:
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from apps.core.history import HistoricalSlug
|
||||
|
||||
# Get old instance if it exists
|
||||
if self.pk:
|
||||
try:
|
||||
old_instance = type(self).objects.get(pk=self.pk)
|
||||
old_name = old_instance.name
|
||||
old_slug = old_instance.slug
|
||||
except type(self).DoesNotExist:
|
||||
old_name = None
|
||||
old_slug = None
|
||||
else:
|
||||
old_name = None
|
||||
old_slug = None
|
||||
|
||||
# Generate new slug if name has changed or slug is missing
|
||||
if not self.slug or (old_name and old_name != self.name):
|
||||
self.slug = slugify(self.name)
|
||||
|
||||
# Generate frontend URL
|
||||
frontend_domain = getattr(settings, "FRONTEND_DOMAIN", "https://thrillwiki.com")
|
||||
self.url = f"{frontend_domain}/parks/{self.slug}/"
|
||||
|
||||
# Populate computed fields for hybrid filtering
|
||||
self._populate_computed_fields()
|
||||
|
||||
# Save the model
|
||||
super().save(*args, **kwargs)
|
||||
|
||||
# If slug has changed, save historical record
|
||||
if old_slug and old_slug != self.slug:
|
||||
HistoricalSlug.objects.create(
|
||||
content_type=ContentType.objects.get_for_model(self),
|
||||
object_id=self.pk,
|
||||
slug=old_slug,
|
||||
)
|
||||
|
||||
def _populate_computed_fields(self) -> None:
|
||||
"""Populate computed fields for hybrid filtering"""
|
||||
# Populate opening_year from opening_date
|
||||
if self.opening_date:
|
||||
self.opening_year = self.opening_date.year
|
||||
else:
|
||||
self.opening_year = None
|
||||
|
||||
# Populate search_text for client-side filtering
|
||||
search_parts = [self.name]
|
||||
|
||||
if self.description:
|
||||
search_parts.append(self.description)
|
||||
|
||||
# Add location information if available
|
||||
try:
|
||||
if hasattr(self, 'location') and self.location:
|
||||
if self.location.city:
|
||||
search_parts.append(self.location.city)
|
||||
if self.location.state:
|
||||
search_parts.append(self.location.state)
|
||||
if self.location.country:
|
||||
search_parts.append(self.location.country)
|
||||
except Exception:
|
||||
# Handle case where location relationship doesn't exist yet
|
||||
pass
|
||||
|
||||
# Add operator information
|
||||
if self.operator:
|
||||
search_parts.append(self.operator.name)
|
||||
|
||||
# Add property owner information if different
|
||||
if self.property_owner and self.property_owner != self.operator:
|
||||
search_parts.append(self.property_owner.name)
|
||||
|
||||
# Combine all parts into searchable text
|
||||
self.search_text = ' '.join(filter(None, search_parts)).lower()
|
||||
|
||||
def clean(self):
|
||||
super().clean()
|
||||
if self.operator and "OPERATOR" not in self.operator.roles:
|
||||
raise ValidationError({"operator": "Company must have the OPERATOR role."})
|
||||
if self.property_owner and "PROPERTY_OWNER" not in self.property_owner.roles:
|
||||
raise ValidationError(
|
||||
{"property_owner": "Company must have the PROPERTY_OWNER role."}
|
||||
)
|
||||
|
||||
def get_absolute_url(self) -> str:
|
||||
return reverse("parks:park_detail", kwargs={"slug": self.slug})
|
||||
|
||||
def get_status_color(self) -> str:
|
||||
"""Get Tailwind color classes for park status"""
|
||||
status_colors = {
|
||||
"OPERATING": "bg-green-100 text-green-800",
|
||||
"CLOSED_TEMP": "bg-yellow-100 text-yellow-800",
|
||||
"CLOSED_PERM": "bg-red-100 text-red-800",
|
||||
"UNDER_CONSTRUCTION": "bg-blue-100 text-blue-800",
|
||||
"DEMOLISHED": "bg-gray-100 text-gray-800",
|
||||
"RELOCATED": "bg-purple-100 text-purple-800",
|
||||
}
|
||||
if self.status in status_colors:
|
||||
return status_colors[self.status]
|
||||
else:
|
||||
raise ValueError(f"Unknown park status: {self.status}")
|
||||
|
||||
@property
|
||||
def formatted_location(self) -> str:
|
||||
"""Get formatted address from ParkLocation if it exists"""
|
||||
if hasattr(self, "location") and self.location:
|
||||
return self.location.formatted_address
|
||||
return ""
|
||||
|
||||
@property
|
||||
def coordinates(self) -> Optional[List[float]]:
|
||||
"""Returns coordinates as a list [latitude, longitude]"""
|
||||
if hasattr(self, "location") and self.location:
|
||||
coords = self.location.coordinates
|
||||
if coords and isinstance(coords, (tuple, list)):
|
||||
return list(coords)
|
||||
return None
|
||||
|
||||
@classmethod
|
||||
def get_by_slug(cls, slug: str) -> tuple["Park", bool]:
|
||||
"""Get park by current or historical slug"""
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from apps.core.history import HistoricalSlug
|
||||
|
||||
print(f"\nLooking up slug: {slug}")
|
||||
|
||||
try:
|
||||
park = cls.objects.get(slug=slug)
|
||||
print(f"Found current park with slug: {slug}")
|
||||
return park, False
|
||||
except cls.DoesNotExist:
|
||||
print(f"No current park found with slug: {slug}")
|
||||
|
||||
# Try historical slugs in HistoricalSlug model
|
||||
content_type = ContentType.objects.get_for_model(cls)
|
||||
print(f"Searching HistoricalSlug with content_type: {content_type}")
|
||||
historical = (
|
||||
HistoricalSlug.objects.filter(content_type=content_type, slug=slug)
|
||||
.order_by("-created_at")
|
||||
.first()
|
||||
)
|
||||
|
||||
if historical:
|
||||
print(
|
||||
f"Found historical slug record for object_id: {
|
||||
historical.object_id
|
||||
}"
|
||||
)
|
||||
try:
|
||||
park = cls.objects.get(pk=historical.object_id)
|
||||
print(f"Found park from historical slug: {park.name}")
|
||||
return park, True
|
||||
except cls.DoesNotExist:
|
||||
print("Park not found for historical slug record")
|
||||
else:
|
||||
print("No historical slug record found")
|
||||
|
||||
# Try pghistory events
|
||||
print("Searching pghistory events")
|
||||
event_model = getattr(cls, "event_model", None)
|
||||
if event_model:
|
||||
historical_event = (
|
||||
event_model.objects.filter(slug=slug)
|
||||
.order_by("-pgh_created_at")
|
||||
.first()
|
||||
)
|
||||
|
||||
if historical_event:
|
||||
print(
|
||||
f"Found pghistory event for pgh_obj_id: {
|
||||
historical_event.pgh_obj_id
|
||||
}"
|
||||
)
|
||||
try:
|
||||
park = cls.objects.get(pk=historical_event.pgh_obj_id)
|
||||
print(f"Found park from pghistory: {park.name}")
|
||||
return park, True
|
||||
except cls.DoesNotExist:
|
||||
print("Park not found for pghistory event")
|
||||
else:
|
||||
print("No pghistory event found")
|
||||
|
||||
raise cls.DoesNotExist("No park found with this slug")
|
||||
@@ -1,76 +0,0 @@
|
||||
from django.db import models
|
||||
from django.db.models import functions
|
||||
from django.core.validators import MinValueValidator, MaxValueValidator
|
||||
from apps.core.history import TrackedModel
|
||||
import pghistory
|
||||
|
||||
|
||||
@pghistory.track()
|
||||
class ParkReview(TrackedModel):
|
||||
# Import managers
|
||||
from ..managers import ParkReviewManager
|
||||
|
||||
objects = ParkReviewManager()
|
||||
"""
|
||||
A review of a park.
|
||||
"""
|
||||
park = models.ForeignKey(
|
||||
"parks.Park", on_delete=models.CASCADE, related_name="reviews"
|
||||
)
|
||||
user = models.ForeignKey(
|
||||
"accounts.User", on_delete=models.CASCADE, related_name="park_reviews"
|
||||
)
|
||||
rating = models.PositiveSmallIntegerField(
|
||||
validators=[MinValueValidator(1), MaxValueValidator(10)]
|
||||
)
|
||||
title = models.CharField(max_length=200)
|
||||
content = models.TextField()
|
||||
visit_date = models.DateField()
|
||||
|
||||
# Metadata
|
||||
created_at = models.DateTimeField(auto_now_add=True)
|
||||
updated_at = models.DateTimeField(auto_now=True)
|
||||
|
||||
# Moderation
|
||||
is_published = models.BooleanField(default=True)
|
||||
moderation_notes = models.TextField(blank=True)
|
||||
moderated_by = models.ForeignKey(
|
||||
"accounts.User",
|
||||
on_delete=models.SET_NULL,
|
||||
null=True,
|
||||
blank=True,
|
||||
related_name="moderated_park_reviews",
|
||||
)
|
||||
moderated_at = models.DateTimeField(null=True, blank=True)
|
||||
|
||||
class Meta:
|
||||
ordering = ["-created_at"]
|
||||
unique_together = ["park", "user"]
|
||||
constraints = [
|
||||
# Business rule: Rating must be between 1 and 10 (database level
|
||||
# enforcement)
|
||||
models.CheckConstraint(
|
||||
name="park_review_rating_range",
|
||||
check=models.Q(rating__gte=1) & models.Q(rating__lte=10),
|
||||
violation_error_message="Rating must be between 1 and 10",
|
||||
),
|
||||
# Business rule: Visit date cannot be in the future
|
||||
models.CheckConstraint(
|
||||
name="park_review_visit_date_not_future",
|
||||
check=models.Q(visit_date__lte=functions.Now()),
|
||||
violation_error_message="Visit date cannot be in the future",
|
||||
),
|
||||
# Business rule: If moderated, must have moderator and timestamp
|
||||
models.CheckConstraint(
|
||||
name="park_review_moderation_consistency",
|
||||
check=models.Q(moderated_by__isnull=True, moderated_at__isnull=True)
|
||||
| models.Q(moderated_by__isnull=False, moderated_at__isnull=False),
|
||||
violation_error_message=(
|
||||
"Moderated reviews must have both moderator and moderation "
|
||||
"timestamp"
|
||||
),
|
||||
),
|
||||
]
|
||||
|
||||
def __str__(self):
|
||||
return f"Review of {self.park.name} by {self.user.username}"
|
||||
@@ -1,17 +0,0 @@
|
||||
from django.db.models import QuerySet, Count, Q
|
||||
from .models import Park
|
||||
|
||||
|
||||
def get_base_park_queryset() -> QuerySet[Park]:
|
||||
"""Get base queryset with all needed annotations and prefetches"""
|
||||
return (
|
||||
Park.objects.select_related("operator", "property_owner", "location")
|
||||
.prefetch_related("photos", "rides")
|
||||
.annotate(
|
||||
current_ride_count=Count("rides", distinct=True),
|
||||
current_coaster_count=Count(
|
||||
"rides", filter=Q(rides__category="RC"), distinct=True
|
||||
),
|
||||
)
|
||||
.order_by("name")
|
||||
)
|
||||
@@ -1,248 +0,0 @@
|
||||
"""
|
||||
Selectors for park-related data retrieval.
|
||||
Following Django styleguide pattern for separating data access from business logic.
|
||||
"""
|
||||
|
||||
from typing import Optional, Dict, Any
|
||||
from django.db.models import QuerySet, Q, Count, Avg, Prefetch
|
||||
from django.contrib.gis.geos import Point
|
||||
from django.contrib.gis.measure import Distance
|
||||
|
||||
from .models import Park, ParkArea, ParkReview
|
||||
from apps.rides.models import Ride
|
||||
|
||||
|
||||
def park_list_with_stats(*, filters: Optional[Dict[str, Any]] = None) -> QuerySet[Park]:
|
||||
"""
|
||||
Get parks optimized for list display with basic stats.
|
||||
|
||||
Args:
|
||||
filters: Optional dictionary of filter parameters
|
||||
|
||||
Returns:
|
||||
QuerySet of parks with optimized queries
|
||||
"""
|
||||
queryset = (
|
||||
Park.objects.select_related("operator", "property_owner")
|
||||
.prefetch_related("location")
|
||||
.annotate(
|
||||
ride_count_calculated=Count("rides", distinct=True),
|
||||
coaster_count_calculated=Count(
|
||||
"rides",
|
||||
filter=Q(rides__category__in=["RC", "WC"]),
|
||||
distinct=True,
|
||||
),
|
||||
average_rating_calculated=Avg("reviews__rating"),
|
||||
)
|
||||
)
|
||||
|
||||
if filters:
|
||||
if "status" in filters:
|
||||
queryset = queryset.filter(status=filters["status"])
|
||||
if "operator" in filters:
|
||||
queryset = queryset.filter(operator=filters["operator"])
|
||||
if "country" in filters:
|
||||
queryset = queryset.filter(location__country=filters["country"])
|
||||
if "search" in filters:
|
||||
search_term = filters["search"]
|
||||
queryset = queryset.filter(
|
||||
Q(name__icontains=search_term) | Q(description__icontains=search_term)
|
||||
)
|
||||
|
||||
return queryset.order_by("name")
|
||||
|
||||
|
||||
def park_detail_optimized(*, slug: str) -> Park:
|
||||
"""
|
||||
Get a single park with all related data optimized for detail view.
|
||||
|
||||
Args:
|
||||
slug: Park slug identifier
|
||||
|
||||
Returns:
|
||||
Park instance with optimized prefetches
|
||||
|
||||
Raises:
|
||||
Park.DoesNotExist: If park with slug doesn't exist
|
||||
"""
|
||||
return (
|
||||
Park.objects.select_related("operator", "property_owner")
|
||||
.prefetch_related(
|
||||
"location",
|
||||
"areas",
|
||||
Prefetch(
|
||||
"rides",
|
||||
queryset=Ride.objects.select_related(
|
||||
"manufacturer", "designer", "ride_model"
|
||||
),
|
||||
),
|
||||
Prefetch(
|
||||
"reviews",
|
||||
queryset=ParkReview.objects.select_related("user").filter(
|
||||
is_published=True
|
||||
),
|
||||
),
|
||||
"photos",
|
||||
)
|
||||
.get(slug=slug)
|
||||
)
|
||||
|
||||
|
||||
def parks_near_location(
|
||||
*, point: Point, distance_km: float = 50, limit: int = 10
|
||||
) -> QuerySet[Park]:
|
||||
"""
|
||||
Get parks near a specific geographic location.
|
||||
|
||||
Args:
|
||||
point: Geographic point (longitude, latitude)
|
||||
distance_km: Maximum distance in kilometers
|
||||
limit: Maximum number of results
|
||||
|
||||
Returns:
|
||||
QuerySet of nearby parks ordered by distance
|
||||
"""
|
||||
return (
|
||||
Park.objects.filter(
|
||||
location__coordinates__distance_lte=(
|
||||
point,
|
||||
Distance(km=distance_km),
|
||||
)
|
||||
)
|
||||
.select_related("operator")
|
||||
.prefetch_related("location")
|
||||
.distance(point)
|
||||
.order_by("distance")[:limit]
|
||||
)
|
||||
|
||||
|
||||
def park_statistics() -> Dict[str, Any]:
|
||||
"""
|
||||
Get overall park statistics for dashboard/analytics.
|
||||
|
||||
Returns:
|
||||
Dictionary containing park statistics
|
||||
"""
|
||||
total_parks = Park.objects.count()
|
||||
operating_parks = Park.objects.filter(status="OPERATING").count()
|
||||
total_rides = Ride.objects.count()
|
||||
total_coasters = Ride.objects.filter(category__in=["RC", "WC"]).count()
|
||||
|
||||
return {
|
||||
"total_parks": total_parks,
|
||||
"operating_parks": operating_parks,
|
||||
"closed_parks": total_parks - operating_parks,
|
||||
"total_rides": total_rides,
|
||||
"total_coasters": total_coasters,
|
||||
"average_rides_per_park": (total_rides / total_parks if total_parks > 0 else 0),
|
||||
}
|
||||
|
||||
|
||||
def parks_by_operator(*, operator_id: int) -> QuerySet[Park]:
|
||||
"""
|
||||
Get all parks operated by a specific company.
|
||||
|
||||
Args:
|
||||
operator_id: Company ID of the operator
|
||||
|
||||
Returns:
|
||||
QuerySet of parks operated by the company
|
||||
"""
|
||||
return (
|
||||
Park.objects.filter(operator_id=operator_id)
|
||||
.select_related("operator")
|
||||
.prefetch_related("location")
|
||||
.annotate(ride_count_calculated=Count("rides"))
|
||||
.order_by("name")
|
||||
)
|
||||
|
||||
|
||||
def parks_with_recent_reviews(*, days: int = 30) -> QuerySet[Park]:
|
||||
"""
|
||||
Get parks that have received reviews in the last N days.
|
||||
|
||||
Args:
|
||||
days: Number of days to look back for reviews
|
||||
|
||||
Returns:
|
||||
QuerySet of parks with recent reviews
|
||||
"""
|
||||
from django.utils import timezone
|
||||
from datetime import timedelta
|
||||
|
||||
cutoff_date = timezone.now() - timedelta(days=days)
|
||||
|
||||
return (
|
||||
Park.objects.filter(
|
||||
reviews__created_at__gte=cutoff_date, reviews__is_published=True
|
||||
)
|
||||
.select_related("operator")
|
||||
.prefetch_related("location")
|
||||
.annotate(
|
||||
recent_review_count=Count(
|
||||
"reviews", filter=Q(reviews__created_at__gte=cutoff_date)
|
||||
)
|
||||
)
|
||||
.order_by("-recent_review_count")
|
||||
.distinct()
|
||||
)
|
||||
|
||||
|
||||
def park_search_autocomplete(*, query: str, limit: int = 10) -> QuerySet[Park]:
|
||||
"""
|
||||
Get parks matching a search query for autocomplete functionality.
|
||||
|
||||
Args:
|
||||
query: Search string
|
||||
limit: Maximum number of results
|
||||
|
||||
Returns:
|
||||
QuerySet of matching parks for autocomplete
|
||||
"""
|
||||
return (
|
||||
Park.objects.filter(
|
||||
Q(name__icontains=query)
|
||||
| Q(location__city__icontains=query)
|
||||
| Q(location__region__icontains=query)
|
||||
)
|
||||
.select_related("operator")
|
||||
.prefetch_related("location")
|
||||
.order_by("name")[:limit]
|
||||
)
|
||||
|
||||
|
||||
def park_areas_for_park(*, park_slug: str) -> QuerySet[ParkArea]:
|
||||
"""
|
||||
Get all areas for a specific park.
|
||||
|
||||
Args:
|
||||
park_slug: Slug of the park
|
||||
|
||||
Returns:
|
||||
QuerySet of park areas with related data
|
||||
"""
|
||||
return (
|
||||
ParkArea.objects.filter(park__slug=park_slug)
|
||||
.select_related("park")
|
||||
.prefetch_related("rides")
|
||||
.annotate(ride_count=Count("rides"))
|
||||
.order_by("name")
|
||||
)
|
||||
|
||||
|
||||
def park_reviews_for_park(*, park_id: int, limit: int = 20) -> QuerySet[ParkReview]:
|
||||
"""
|
||||
Get reviews for a specific park.
|
||||
|
||||
Args:
|
||||
park_id: Park ID
|
||||
limit: Maximum number of reviews to return
|
||||
|
||||
Returns:
|
||||
QuerySet of park reviews
|
||||
"""
|
||||
return (
|
||||
ParkReview.objects.filter(park_id=park_id, is_published=True)
|
||||
.select_related("user", "park")
|
||||
.order_by("-created_at")[:limit]
|
||||
)
|
||||
@@ -1,229 +0,0 @@
|
||||
"""
|
||||
Services for park-related business logic.
|
||||
Following Django styleguide pattern for business logic encapsulation.
|
||||
"""
|
||||
|
||||
from typing import Optional, Dict, Any
|
||||
from django.db import transaction
|
||||
from django.db.models import Q
|
||||
from django.contrib.auth import get_user_model
|
||||
from django.contrib.auth.models import AbstractBaseUser
|
||||
|
||||
from .models import Park, ParkArea
|
||||
from .services.location_service import ParkLocationService
|
||||
|
||||
# Use AbstractBaseUser for type hinting
|
||||
UserType = AbstractBaseUser
|
||||
User = get_user_model()
|
||||
|
||||
|
||||
class ParkService:
|
||||
"""Service for managing park operations."""
|
||||
|
||||
@staticmethod
|
||||
def create_park(
|
||||
*,
|
||||
name: str,
|
||||
description: str = "",
|
||||
status: str = "OPERATING",
|
||||
operator_id: Optional[int] = None,
|
||||
property_owner_id: Optional[int] = None,
|
||||
opening_date: Optional[str] = None,
|
||||
closing_date: Optional[str] = None,
|
||||
operating_season: str = "",
|
||||
size_acres: Optional[float] = None,
|
||||
website: str = "",
|
||||
location_data: Optional[Dict[str, Any]] = None,
|
||||
created_by: Optional[UserType] = None,
|
||||
) -> Park:
|
||||
"""
|
||||
Create a new park with validation and location handling.
|
||||
|
||||
Args:
|
||||
name: Park name
|
||||
description: Park description
|
||||
status: Operating status
|
||||
operator_id: ID of operating company
|
||||
property_owner_id: ID of property owner company
|
||||
opening_date: Opening date
|
||||
closing_date: Closing date
|
||||
operating_season: Operating season description
|
||||
size_acres: Park size in acres
|
||||
website: Park website URL
|
||||
location_data: Dictionary containing location information
|
||||
created_by: User creating the park
|
||||
|
||||
Returns:
|
||||
Created Park instance
|
||||
|
||||
Raises:
|
||||
ValidationError: If park data is invalid
|
||||
"""
|
||||
with transaction.atomic():
|
||||
# Create park instance
|
||||
park = Park(
|
||||
name=name,
|
||||
description=description,
|
||||
status=status,
|
||||
opening_date=opening_date,
|
||||
closing_date=closing_date,
|
||||
operating_season=operating_season,
|
||||
size_acres=size_acres,
|
||||
website=website,
|
||||
)
|
||||
|
||||
# Set foreign key relationships if provided
|
||||
if operator_id:
|
||||
from .models import Company
|
||||
|
||||
park.operator = Company.objects.get(id=operator_id)
|
||||
|
||||
if property_owner_id:
|
||||
from .models import Company
|
||||
|
||||
park.property_owner = Company.objects.get(id=property_owner_id)
|
||||
|
||||
# CRITICAL STYLEGUIDE FIX: Call full_clean before save
|
||||
park.full_clean()
|
||||
park.save()
|
||||
|
||||
# Handle location if provided
|
||||
if location_data:
|
||||
ParkLocationService.create_park_location(park=park, **location_data)
|
||||
|
||||
return park
|
||||
|
||||
@staticmethod
|
||||
def update_park(
|
||||
*,
|
||||
park_id: int,
|
||||
updates: Dict[str, Any],
|
||||
updated_by: Optional[UserType] = None,
|
||||
) -> Park:
|
||||
"""
|
||||
Update an existing park with validation.
|
||||
|
||||
Args:
|
||||
park_id: ID of park to update
|
||||
updates: Dictionary of field updates
|
||||
updated_by: User performing the update
|
||||
|
||||
Returns:
|
||||
Updated Park instance
|
||||
|
||||
Raises:
|
||||
Park.DoesNotExist: If park doesn't exist
|
||||
ValidationError: If update data is invalid
|
||||
"""
|
||||
with transaction.atomic():
|
||||
park = Park.objects.select_for_update().get(id=park_id)
|
||||
|
||||
# Apply updates
|
||||
for field, value in updates.items():
|
||||
if hasattr(park, field):
|
||||
setattr(park, field, value)
|
||||
|
||||
# CRITICAL STYLEGUIDE FIX: Call full_clean before save
|
||||
park.full_clean()
|
||||
park.save()
|
||||
|
||||
return park
|
||||
|
||||
@staticmethod
|
||||
def delete_park(*, park_id: int, deleted_by: Optional[UserType] = None) -> bool:
|
||||
"""
|
||||
Soft delete a park by setting status to DEMOLISHED.
|
||||
|
||||
Args:
|
||||
park_id: ID of park to delete
|
||||
deleted_by: User performing the deletion
|
||||
|
||||
Returns:
|
||||
True if successfully deleted
|
||||
|
||||
Raises:
|
||||
Park.DoesNotExist: If park doesn't exist
|
||||
"""
|
||||
with transaction.atomic():
|
||||
park = Park.objects.select_for_update().get(id=park_id)
|
||||
park.status = "DEMOLISHED"
|
||||
|
||||
# CRITICAL STYLEGUIDE FIX: Call full_clean before save
|
||||
park.full_clean()
|
||||
park.save()
|
||||
|
||||
return True
|
||||
|
||||
@staticmethod
|
||||
def create_park_area(
|
||||
*,
|
||||
park_id: int,
|
||||
name: str,
|
||||
description: str = "",
|
||||
created_by: Optional[UserType] = None,
|
||||
) -> ParkArea:
|
||||
"""
|
||||
Create a new area within a park.
|
||||
|
||||
Args:
|
||||
park_id: ID of the parent park
|
||||
name: Area name
|
||||
description: Area description
|
||||
created_by: User creating the area
|
||||
|
||||
Returns:
|
||||
Created ParkArea instance
|
||||
|
||||
Raises:
|
||||
Park.DoesNotExist: If park doesn't exist
|
||||
ValidationError: If area data is invalid
|
||||
"""
|
||||
park = Park.objects.get(id=park_id)
|
||||
|
||||
area = ParkArea(park=park, name=name, description=description)
|
||||
|
||||
# CRITICAL STYLEGUIDE FIX: Call full_clean before save
|
||||
area.full_clean()
|
||||
area.save()
|
||||
|
||||
return area
|
||||
|
||||
@staticmethod
|
||||
def update_park_statistics(*, park_id: int) -> Park:
|
||||
"""
|
||||
Recalculate and update park statistics (ride counts, ratings).
|
||||
|
||||
Args:
|
||||
park_id: ID of park to update statistics for
|
||||
|
||||
Returns:
|
||||
Updated Park instance with fresh statistics
|
||||
"""
|
||||
from apps.rides.models import Ride
|
||||
from .models import ParkReview
|
||||
from django.db.models import Count, Avg
|
||||
|
||||
with transaction.atomic():
|
||||
park = Park.objects.select_for_update().get(id=park_id)
|
||||
|
||||
# Calculate ride counts
|
||||
ride_stats = Ride.objects.filter(park=park).aggregate(
|
||||
total_rides=Count("id"),
|
||||
coaster_count=Count("id", filter=Q(category__in=["RC", "WC"])),
|
||||
)
|
||||
|
||||
# Calculate average rating
|
||||
avg_rating = ParkReview.objects.filter(
|
||||
park=park, is_published=True
|
||||
).aggregate(avg_rating=Avg("rating"))["avg_rating"]
|
||||
|
||||
# Update park fields
|
||||
park.ride_count = ride_stats["total_rides"] or 0
|
||||
park.coaster_count = ride_stats["coaster_count"] or 0
|
||||
park.average_rating = avg_rating
|
||||
|
||||
# CRITICAL STYLEGUIDE FIX: Call full_clean before save
|
||||
park.full_clean()
|
||||
park.save()
|
||||
|
||||
return park
|
||||
@@ -1,13 +0,0 @@
|
||||
from .roadtrip import RoadTripService
|
||||
from .park_management import ParkService
|
||||
from .location_service import ParkLocationService
|
||||
from .filter_service import ParkFilterService
|
||||
from .media_service import ParkMediaService
|
||||
|
||||
__all__ = [
|
||||
"RoadTripService",
|
||||
"ParkService",
|
||||
"ParkLocationService",
|
||||
"ParkFilterService",
|
||||
"ParkMediaService",
|
||||
]
|
||||
@@ -1,304 +0,0 @@
|
||||
"""
|
||||
Park Filter Service
|
||||
|
||||
Provides filtering functionality, aggregations, and caching for park filters.
|
||||
This service handles complex filter logic and provides useful filter statistics.
|
||||
"""
|
||||
|
||||
from typing import Dict, List, Any, Optional
|
||||
from django.db.models import QuerySet, Count, Q
|
||||
from django.core.cache import cache
|
||||
from django.conf import settings
|
||||
from ..models import Park, Company
|
||||
from ..querysets import get_base_park_queryset
|
||||
|
||||
|
||||
class ParkFilterService:
|
||||
"""
|
||||
Service class for handling park filtering operations, aggregations,
|
||||
and providing filter suggestions based on available data.
|
||||
"""
|
||||
|
||||
CACHE_TIMEOUT = getattr(settings, "PARK_FILTER_CACHE_TIMEOUT", 300) # 5 minutes
|
||||
|
||||
def __init__(self):
|
||||
self.cache_prefix = "park_filter"
|
||||
|
||||
def get_filter_counts(
|
||||
self, base_queryset: Optional[QuerySet] = None
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Get counts for various filter options to show users what's available.
|
||||
|
||||
Args:
|
||||
base_queryset: Optional base queryset to use for calculations
|
||||
|
||||
Returns:
|
||||
Dictionary containing counts for different filter categories
|
||||
"""
|
||||
cache_key = f"{self.cache_prefix}:filter_counts"
|
||||
cached_result = cache.get(cache_key)
|
||||
|
||||
if cached_result is not None:
|
||||
return cached_result
|
||||
|
||||
if base_queryset is None:
|
||||
base_queryset = get_base_park_queryset()
|
||||
|
||||
# Calculate filter counts
|
||||
filter_counts = {
|
||||
"total_parks": base_queryset.count(),
|
||||
"operating_parks": base_queryset.filter(status="OPERATING").count(),
|
||||
"parks_with_coasters": base_queryset.filter(coaster_count__gt=0).count(),
|
||||
"big_parks": base_queryset.filter(ride_count__gte=10).count(),
|
||||
"highly_rated": base_queryset.filter(average_rating__gte=4.0).count(),
|
||||
"park_types": self._get_park_type_counts(base_queryset),
|
||||
"top_operators": self._get_top_operators(base_queryset),
|
||||
"countries": self._get_country_counts(base_queryset),
|
||||
}
|
||||
|
||||
# Cache the result
|
||||
cache.set(cache_key, filter_counts, self.CACHE_TIMEOUT)
|
||||
return filter_counts
|
||||
|
||||
def _get_park_type_counts(self, queryset: QuerySet) -> Dict[str, int]:
|
||||
"""Get counts for different park types based on operator names."""
|
||||
return {
|
||||
"disney": queryset.filter(operator__name__icontains="Disney").count(),
|
||||
"universal": queryset.filter(operator__name__icontains="Universal").count(),
|
||||
"six_flags": queryset.filter(operator__name__icontains="Six Flags").count(),
|
||||
"cedar_fair": queryset.filter(
|
||||
Q(operator__name__icontains="Cedar Fair")
|
||||
| Q(operator__name__icontains="Cedar Point")
|
||||
| Q(operator__name__icontains="Kings Island")
|
||||
).count(),
|
||||
}
|
||||
|
||||
def _get_top_operators(
|
||||
self, queryset: QuerySet, limit: int = 10
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""Get the top operators by number of parks."""
|
||||
return list(
|
||||
queryset.values("operator__name", "operator__id")
|
||||
.annotate(park_count=Count("id"))
|
||||
.filter(park_count__gt=0)
|
||||
.order_by("-park_count")[:limit]
|
||||
)
|
||||
|
||||
def _get_country_counts(
|
||||
self, queryset: QuerySet, limit: int = 10
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""Get countries with the most parks."""
|
||||
return list(
|
||||
queryset.filter(location__country__isnull=False)
|
||||
.values("location__country")
|
||||
.annotate(park_count=Count("id"))
|
||||
.filter(park_count__gt=0)
|
||||
.order_by("-park_count")[:limit]
|
||||
)
|
||||
|
||||
def get_filter_suggestions(self, query: str) -> Dict[str, List[str]]:
|
||||
"""
|
||||
Get filter suggestions based on a search query.
|
||||
|
||||
Args:
|
||||
query: Search query string
|
||||
|
||||
Returns:
|
||||
Dictionary with suggestion categories
|
||||
"""
|
||||
cache_key = f"{self.cache_prefix}:suggestions:{query.lower()}"
|
||||
cached_result = cache.get(cache_key)
|
||||
|
||||
if cached_result is not None:
|
||||
return cached_result
|
||||
|
||||
suggestions = {
|
||||
"parks": [],
|
||||
"operators": [],
|
||||
"locations": [],
|
||||
}
|
||||
|
||||
if len(query) >= 2: # Only search for queries of 2+ characters
|
||||
# Park name suggestions
|
||||
park_names = Park.objects.filter(name__icontains=query).values_list(
|
||||
"name", flat=True
|
||||
)[:5]
|
||||
suggestions["parks"] = list(park_names)
|
||||
|
||||
# Operator suggestions
|
||||
operator_names = Company.objects.filter(
|
||||
roles__contains=["OPERATOR"], name__icontains=query
|
||||
).values_list("name", flat=True)[:5]
|
||||
suggestions["operators"] = list(operator_names)
|
||||
|
||||
# Location suggestions (cities and countries)
|
||||
locations = Park.objects.filter(
|
||||
Q(location__city__icontains=query)
|
||||
| Q(location__country__icontains=query)
|
||||
).values_list("location__city", "location__country")[:5]
|
||||
|
||||
location_suggestions = []
|
||||
for city, country in locations:
|
||||
if city and city.lower().startswith(query.lower()):
|
||||
location_suggestions.append(city)
|
||||
elif country and country.lower().startswith(query.lower()):
|
||||
location_suggestions.append(country)
|
||||
|
||||
suggestions["locations"] = list(set(location_suggestions))[:5]
|
||||
|
||||
# Cache suggestions for a shorter time
|
||||
cache.set(cache_key, suggestions, 60) # 1 minute cache
|
||||
return suggestions
|
||||
|
||||
def get_popular_filters(self) -> Dict[str, Any]:
|
||||
"""
|
||||
Get commonly used filter combinations and popular filter values.
|
||||
|
||||
Returns:
|
||||
Dictionary containing popular filter configurations
|
||||
"""
|
||||
cache_key = f"{self.cache_prefix}:popular_filters"
|
||||
cached_result = cache.get(cache_key)
|
||||
|
||||
if cached_result is not None:
|
||||
return cached_result
|
||||
|
||||
base_qs = get_base_park_queryset()
|
||||
|
||||
popular_filters = {
|
||||
"quick_filters": [
|
||||
{
|
||||
"label": "Disney Parks",
|
||||
"filters": {"park_type": "disney"},
|
||||
"count": base_qs.filter(operator__name__icontains="Disney").count(),
|
||||
},
|
||||
{
|
||||
"label": "Parks with Coasters",
|
||||
"filters": {"has_coasters": True},
|
||||
"count": base_qs.filter(coaster_count__gt=0).count(),
|
||||
},
|
||||
{
|
||||
"label": "Highly Rated",
|
||||
"filters": {"min_rating": "4"},
|
||||
"count": base_qs.filter(average_rating__gte=4.0).count(),
|
||||
},
|
||||
{
|
||||
"label": "Major Parks",
|
||||
"filters": {"big_parks_only": True},
|
||||
"count": base_qs.filter(ride_count__gte=10).count(),
|
||||
},
|
||||
],
|
||||
"recommended_sorts": [
|
||||
{"value": "-average_rating", "label": "Highest Rated"},
|
||||
{"value": "-coaster_count", "label": "Most Coasters"},
|
||||
{"value": "name", "label": "A-Z"},
|
||||
],
|
||||
}
|
||||
|
||||
# Cache for longer since these don't change often
|
||||
cache.set(cache_key, popular_filters, self.CACHE_TIMEOUT * 2)
|
||||
return popular_filters
|
||||
|
||||
def clear_filter_cache(self) -> None:
|
||||
"""Clear all cached filter data."""
|
||||
# Simple cache clearing - delete known keys
|
||||
cache_keys = [
|
||||
f"{self.cache_prefix}:filter_counts",
|
||||
f"{self.cache_prefix}:popular_filters",
|
||||
]
|
||||
for key in cache_keys:
|
||||
cache.delete(key)
|
||||
|
||||
def get_filtered_queryset(self, filters: Dict[str, Any]) -> QuerySet: # noqa: C901
|
||||
"""
|
||||
Apply filters to get a filtered queryset with optimizations.
|
||||
|
||||
Args:
|
||||
filters: Dictionary of filter parameters
|
||||
|
||||
Returns:
|
||||
Filtered and optimized QuerySet
|
||||
"""
|
||||
queryset = (
|
||||
get_base_park_queryset()
|
||||
.select_related("operator", "property_owner", "location")
|
||||
.prefetch_related("photos", "rides__manufacturer")
|
||||
)
|
||||
|
||||
# Apply status filter
|
||||
if filters.get("status"):
|
||||
queryset = queryset.filter(status=filters["status"])
|
||||
|
||||
# Apply park type filter
|
||||
if filters.get("park_type"):
|
||||
queryset = self._apply_park_type_filter(queryset, filters["park_type"])
|
||||
|
||||
# Apply coaster filter
|
||||
if filters.get("has_coasters"):
|
||||
queryset = queryset.filter(coaster_count__gt=0)
|
||||
|
||||
# Apply rating filter
|
||||
if filters.get("min_rating"):
|
||||
try:
|
||||
min_rating = float(filters["min_rating"])
|
||||
queryset = queryset.filter(average_rating__gte=min_rating)
|
||||
except (ValueError, TypeError):
|
||||
pass
|
||||
|
||||
# Apply big parks filter
|
||||
if filters.get("big_parks_only"):
|
||||
queryset = queryset.filter(ride_count__gte=10)
|
||||
|
||||
# Apply search
|
||||
if filters.get("search"):
|
||||
search_query = filters["search"]
|
||||
queryset = queryset.filter(
|
||||
Q(name__icontains=search_query)
|
||||
| Q(description__icontains=search_query)
|
||||
| Q(location__city__icontains=search_query)
|
||||
| Q(location__country__icontains=search_query)
|
||||
)
|
||||
|
||||
# Apply location filters
|
||||
if filters.get("country_filter"):
|
||||
queryset = queryset.filter(
|
||||
location__country__icontains=filters["country_filter"]
|
||||
)
|
||||
|
||||
if filters.get("state_filter"):
|
||||
queryset = queryset.filter(
|
||||
location__state__icontains=filters["state_filter"]
|
||||
)
|
||||
|
||||
# Apply ordering
|
||||
if filters.get("ordering"):
|
||||
queryset = queryset.order_by(filters["ordering"])
|
||||
|
||||
return queryset.distinct()
|
||||
|
||||
def _apply_park_type_filter(self, queryset: QuerySet, park_type: str) -> QuerySet:
|
||||
"""Apply park type filter logic."""
|
||||
type_filters = {
|
||||
"disney": Q(operator__name__icontains="Disney"),
|
||||
"universal": Q(operator__name__icontains="Universal"),
|
||||
"six_flags": Q(operator__name__icontains="Six Flags"),
|
||||
"cedar_fair": (
|
||||
Q(operator__name__icontains="Cedar Fair")
|
||||
| Q(operator__name__icontains="Cedar Point")
|
||||
| Q(operator__name__icontains="Kings Island")
|
||||
| Q(operator__name__icontains="Canada's Wonderland")
|
||||
),
|
||||
"independent": ~(
|
||||
Q(operator__name__icontains="Disney")
|
||||
| Q(operator__name__icontains="Universal")
|
||||
| Q(operator__name__icontains="Six Flags")
|
||||
| Q(operator__name__icontains="Cedar Fair")
|
||||
| Q(operator__name__icontains="Cedar Point")
|
||||
),
|
||||
}
|
||||
|
||||
if park_type in type_filters:
|
||||
return queryset.filter(type_filters[park_type])
|
||||
|
||||
return queryset
|
||||
@@ -1,428 +0,0 @@
|
||||
"""
|
||||
Smart Park Loader for Hybrid Filtering Strategy
|
||||
|
||||
This module provides intelligent data loading capabilities for the hybrid filtering approach,
|
||||
optimizing database queries and implementing progressive loading strategies.
|
||||
"""
|
||||
|
||||
from typing import Dict, Optional, Any
|
||||
from django.db import models
|
||||
from django.core.cache import cache
|
||||
from django.conf import settings
|
||||
from apps.parks.models import Park
|
||||
|
||||
|
||||
class SmartParkLoader:
|
||||
"""
|
||||
Intelligent park data loader that optimizes queries based on filtering requirements.
|
||||
Implements progressive loading and smart caching strategies.
|
||||
"""
|
||||
|
||||
# Cache configuration
|
||||
CACHE_TIMEOUT = getattr(settings, 'HYBRID_FILTER_CACHE_TIMEOUT', 300) # 5 minutes
|
||||
CACHE_KEY_PREFIX = 'hybrid_parks'
|
||||
|
||||
# Progressive loading thresholds
|
||||
INITIAL_LOAD_SIZE = 50
|
||||
PROGRESSIVE_LOAD_SIZE = 25
|
||||
MAX_CLIENT_SIDE_RECORDS = 200
|
||||
|
||||
def __init__(self):
|
||||
self.base_queryset = self._get_optimized_queryset()
|
||||
|
||||
def _get_optimized_queryset(self) -> models.QuerySet:
|
||||
"""Get optimized base queryset with all necessary prefetches."""
|
||||
return Park.objects.select_related(
|
||||
'operator',
|
||||
'property_owner',
|
||||
'banner_image',
|
||||
'card_image',
|
||||
).prefetch_related(
|
||||
'location', # ParkLocation relationship
|
||||
).filter(
|
||||
# Only include operating and temporarily closed parks by default
|
||||
status__in=['OPERATING', 'CLOSED_TEMP']
|
||||
).order_by('name')
|
||||
|
||||
def get_initial_load(self, filters: Optional[Dict[str, Any]] = None) -> Dict[str, Any]:
|
||||
"""
|
||||
Get initial park data load with smart filtering decisions.
|
||||
|
||||
Args:
|
||||
filters: Optional filters to apply
|
||||
|
||||
Returns:
|
||||
Dictionary containing parks data and metadata
|
||||
"""
|
||||
cache_key = self._generate_cache_key('initial', filters)
|
||||
cached_result = cache.get(cache_key)
|
||||
|
||||
if cached_result:
|
||||
return cached_result
|
||||
|
||||
# Apply filters if provided
|
||||
queryset = self.base_queryset
|
||||
if filters:
|
||||
queryset = self._apply_filters(queryset, filters)
|
||||
|
||||
# Get total count for pagination decisions
|
||||
total_count = queryset.count()
|
||||
|
||||
# Determine loading strategy
|
||||
if total_count <= self.MAX_CLIENT_SIDE_RECORDS:
|
||||
# Load all data for client-side filtering
|
||||
parks = list(queryset.all())
|
||||
strategy = 'client_side'
|
||||
has_more = False
|
||||
else:
|
||||
# Load initial batch for server-side pagination
|
||||
parks = list(queryset[:self.INITIAL_LOAD_SIZE])
|
||||
strategy = 'server_side'
|
||||
has_more = total_count > self.INITIAL_LOAD_SIZE
|
||||
|
||||
result = {
|
||||
'parks': parks,
|
||||
'total_count': total_count,
|
||||
'strategy': strategy,
|
||||
'has_more': has_more,
|
||||
'next_offset': len(parks) if has_more else None,
|
||||
'filter_metadata': self._get_filter_metadata(queryset),
|
||||
}
|
||||
|
||||
# Cache the result
|
||||
cache.set(cache_key, result, self.CACHE_TIMEOUT)
|
||||
|
||||
return result
|
||||
|
||||
def get_progressive_load(
|
||||
self,
|
||||
offset: int,
|
||||
filters: Optional[Dict[str, Any]] = None
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Get next batch of parks for progressive loading.
|
||||
|
||||
Args:
|
||||
offset: Starting offset for the batch
|
||||
filters: Optional filters to apply
|
||||
|
||||
Returns:
|
||||
Dictionary containing parks data and metadata
|
||||
"""
|
||||
cache_key = self._generate_cache_key(f'progressive_{offset}', filters)
|
||||
cached_result = cache.get(cache_key)
|
||||
|
||||
if cached_result:
|
||||
return cached_result
|
||||
|
||||
# Apply filters if provided
|
||||
queryset = self.base_queryset
|
||||
if filters:
|
||||
queryset = self._apply_filters(queryset, filters)
|
||||
|
||||
# Get the batch
|
||||
end_offset = offset + self.PROGRESSIVE_LOAD_SIZE
|
||||
parks = list(queryset[offset:end_offset])
|
||||
|
||||
# Check if there are more records
|
||||
total_count = queryset.count()
|
||||
has_more = end_offset < total_count
|
||||
|
||||
result = {
|
||||
'parks': parks,
|
||||
'total_count': total_count,
|
||||
'has_more': has_more,
|
||||
'next_offset': end_offset if has_more else None,
|
||||
}
|
||||
|
||||
# Cache the result
|
||||
cache.set(cache_key, result, self.CACHE_TIMEOUT)
|
||||
|
||||
return result
|
||||
|
||||
def get_filter_metadata(self, filters: Optional[Dict[str, Any]] = None) -> Dict[str, Any]:
|
||||
"""
|
||||
Get metadata about available filter options.
|
||||
|
||||
Args:
|
||||
filters: Current filters to scope the metadata
|
||||
|
||||
Returns:
|
||||
Dictionary containing filter metadata
|
||||
"""
|
||||
cache_key = self._generate_cache_key('metadata', filters)
|
||||
cached_result = cache.get(cache_key)
|
||||
|
||||
if cached_result:
|
||||
return cached_result
|
||||
|
||||
# Apply filters if provided
|
||||
queryset = self.base_queryset
|
||||
if filters:
|
||||
queryset = self._apply_filters(queryset, filters)
|
||||
|
||||
result = self._get_filter_metadata(queryset)
|
||||
|
||||
# Cache the result
|
||||
cache.set(cache_key, result, self.CACHE_TIMEOUT)
|
||||
|
||||
return result
|
||||
|
||||
def _apply_filters(self, queryset: models.QuerySet, filters: Dict[str, Any]) -> models.QuerySet:
|
||||
"""Apply filters to the queryset."""
|
||||
|
||||
# Status filter
|
||||
if 'status' in filters and filters['status']:
|
||||
if isinstance(filters['status'], list):
|
||||
queryset = queryset.filter(status__in=filters['status'])
|
||||
else:
|
||||
queryset = queryset.filter(status=filters['status'])
|
||||
|
||||
# Park type filter
|
||||
if 'park_type' in filters and filters['park_type']:
|
||||
if isinstance(filters['park_type'], list):
|
||||
queryset = queryset.filter(park_type__in=filters['park_type'])
|
||||
else:
|
||||
queryset = queryset.filter(park_type=filters['park_type'])
|
||||
|
||||
# Country filter
|
||||
if 'country' in filters and filters['country']:
|
||||
queryset = queryset.filter(location__country__in=filters['country'])
|
||||
|
||||
# State filter
|
||||
if 'state' in filters and filters['state']:
|
||||
queryset = queryset.filter(location__state__in=filters['state'])
|
||||
|
||||
# Opening year range
|
||||
if 'opening_year_min' in filters and filters['opening_year_min']:
|
||||
queryset = queryset.filter(opening_year__gte=filters['opening_year_min'])
|
||||
|
||||
if 'opening_year_max' in filters and filters['opening_year_max']:
|
||||
queryset = queryset.filter(opening_year__lte=filters['opening_year_max'])
|
||||
|
||||
# Size range
|
||||
if 'size_min' in filters and filters['size_min']:
|
||||
queryset = queryset.filter(size_acres__gte=filters['size_min'])
|
||||
|
||||
if 'size_max' in filters and filters['size_max']:
|
||||
queryset = queryset.filter(size_acres__lte=filters['size_max'])
|
||||
|
||||
# Rating range
|
||||
if 'rating_min' in filters and filters['rating_min']:
|
||||
queryset = queryset.filter(average_rating__gte=filters['rating_min'])
|
||||
|
||||
if 'rating_max' in filters and filters['rating_max']:
|
||||
queryset = queryset.filter(average_rating__lte=filters['rating_max'])
|
||||
|
||||
# Ride count range
|
||||
if 'ride_count_min' in filters and filters['ride_count_min']:
|
||||
queryset = queryset.filter(ride_count__gte=filters['ride_count_min'])
|
||||
|
||||
if 'ride_count_max' in filters and filters['ride_count_max']:
|
||||
queryset = queryset.filter(ride_count__lte=filters['ride_count_max'])
|
||||
|
||||
# Coaster count range
|
||||
if 'coaster_count_min' in filters and filters['coaster_count_min']:
|
||||
queryset = queryset.filter(coaster_count__gte=filters['coaster_count_min'])
|
||||
|
||||
if 'coaster_count_max' in filters and filters['coaster_count_max']:
|
||||
queryset = queryset.filter(coaster_count__lte=filters['coaster_count_max'])
|
||||
|
||||
# Operator filter
|
||||
if 'operator' in filters and filters['operator']:
|
||||
if isinstance(filters['operator'], list):
|
||||
queryset = queryset.filter(operator__slug__in=filters['operator'])
|
||||
else:
|
||||
queryset = queryset.filter(operator__slug=filters['operator'])
|
||||
|
||||
# Search query
|
||||
if 'search' in filters and filters['search']:
|
||||
search_term = filters['search'].lower()
|
||||
queryset = queryset.filter(search_text__icontains=search_term)
|
||||
|
||||
return queryset
|
||||
|
||||
def _get_filter_metadata(self, queryset: models.QuerySet) -> Dict[str, Any]:
|
||||
"""Generate filter metadata from the current queryset."""
|
||||
|
||||
# Get distinct values for categorical filters with counts
|
||||
countries_data = list(
|
||||
queryset.values('location__country')
|
||||
.exclude(location__country__isnull=True)
|
||||
.annotate(count=models.Count('id'))
|
||||
.order_by('location__country')
|
||||
)
|
||||
|
||||
states_data = list(
|
||||
queryset.values('location__state')
|
||||
.exclude(location__state__isnull=True)
|
||||
.annotate(count=models.Count('id'))
|
||||
.order_by('location__state')
|
||||
)
|
||||
|
||||
park_types_data = list(
|
||||
queryset.values('park_type')
|
||||
.exclude(park_type__isnull=True)
|
||||
.annotate(count=models.Count('id'))
|
||||
.order_by('park_type')
|
||||
)
|
||||
|
||||
statuses_data = list(
|
||||
queryset.values('status')
|
||||
.annotate(count=models.Count('id'))
|
||||
.order_by('status')
|
||||
)
|
||||
|
||||
operators_data = list(
|
||||
queryset.select_related('operator')
|
||||
.values('operator__id', 'operator__name', 'operator__slug')
|
||||
.exclude(operator__isnull=True)
|
||||
.annotate(count=models.Count('id'))
|
||||
.order_by('operator__name')
|
||||
)
|
||||
|
||||
# Convert to frontend-expected format with value/label/count
|
||||
countries = [
|
||||
{
|
||||
'value': item['location__country'],
|
||||
'label': item['location__country'],
|
||||
'count': item['count']
|
||||
}
|
||||
for item in countries_data
|
||||
]
|
||||
|
||||
states = [
|
||||
{
|
||||
'value': item['location__state'],
|
||||
'label': item['location__state'],
|
||||
'count': item['count']
|
||||
}
|
||||
for item in states_data
|
||||
]
|
||||
|
||||
park_types = [
|
||||
{
|
||||
'value': item['park_type'],
|
||||
'label': item['park_type'],
|
||||
'count': item['count']
|
||||
}
|
||||
for item in park_types_data
|
||||
]
|
||||
|
||||
statuses = [
|
||||
{
|
||||
'value': item['status'],
|
||||
'label': self._get_status_label(item['status']),
|
||||
'count': item['count']
|
||||
}
|
||||
for item in statuses_data
|
||||
]
|
||||
|
||||
operators = [
|
||||
{
|
||||
'value': item['operator__slug'],
|
||||
'label': item['operator__name'],
|
||||
'count': item['count']
|
||||
}
|
||||
for item in operators_data
|
||||
]
|
||||
|
||||
# Get ranges for numerical filters
|
||||
aggregates = queryset.aggregate(
|
||||
opening_year_min=models.Min('opening_year'),
|
||||
opening_year_max=models.Max('opening_year'),
|
||||
size_min=models.Min('size_acres'),
|
||||
size_max=models.Max('size_acres'),
|
||||
rating_min=models.Min('average_rating'),
|
||||
rating_max=models.Max('average_rating'),
|
||||
ride_count_min=models.Min('ride_count'),
|
||||
ride_count_max=models.Max('ride_count'),
|
||||
coaster_count_min=models.Min('coaster_count'),
|
||||
coaster_count_max=models.Max('coaster_count'),
|
||||
)
|
||||
|
||||
return {
|
||||
'categorical': {
|
||||
'countries': countries,
|
||||
'states': states,
|
||||
'park_types': park_types,
|
||||
'statuses': statuses,
|
||||
'operators': operators,
|
||||
},
|
||||
'ranges': {
|
||||
'opening_year': {
|
||||
'min': aggregates['opening_year_min'],
|
||||
'max': aggregates['opening_year_max'],
|
||||
'step': 1,
|
||||
'unit': 'year'
|
||||
},
|
||||
'size_acres': {
|
||||
'min': float(aggregates['size_min']) if aggregates['size_min'] else None,
|
||||
'max': float(aggregates['size_max']) if aggregates['size_max'] else None,
|
||||
'step': 1.0,
|
||||
'unit': 'acres'
|
||||
},
|
||||
'average_rating': {
|
||||
'min': float(aggregates['rating_min']) if aggregates['rating_min'] else None,
|
||||
'max': float(aggregates['rating_max']) if aggregates['rating_max'] else None,
|
||||
'step': 0.1,
|
||||
'unit': 'stars'
|
||||
},
|
||||
'ride_count': {
|
||||
'min': aggregates['ride_count_min'],
|
||||
'max': aggregates['ride_count_max'],
|
||||
'step': 1,
|
||||
'unit': 'rides'
|
||||
},
|
||||
'coaster_count': {
|
||||
'min': aggregates['coaster_count_min'],
|
||||
'max': aggregates['coaster_count_max'],
|
||||
'step': 1,
|
||||
'unit': 'coasters'
|
||||
},
|
||||
},
|
||||
'total_count': queryset.count(),
|
||||
}
|
||||
|
||||
def _get_status_label(self, status: str) -> str:
|
||||
"""Convert status code to human-readable label."""
|
||||
status_labels = {
|
||||
'OPERATING': 'Operating',
|
||||
'CLOSED_TEMP': 'Temporarily Closed',
|
||||
'CLOSED_PERM': 'Permanently Closed',
|
||||
'UNDER_CONSTRUCTION': 'Under Construction',
|
||||
}
|
||||
if status in status_labels:
|
||||
return status_labels[status]
|
||||
else:
|
||||
raise ValueError(f"Unknown park status: {status}")
|
||||
|
||||
def _generate_cache_key(self, operation: str, filters: Optional[Dict[str, Any]] = None) -> str:
|
||||
"""Generate cache key for the given operation and filters."""
|
||||
key_parts = [self.CACHE_KEY_PREFIX, operation]
|
||||
|
||||
if filters:
|
||||
# Create a consistent string representation of filters
|
||||
filter_str = '_'.join(f"{k}:{v}" for k, v in sorted(filters.items()) if v)
|
||||
key_parts.append(filter_str)
|
||||
|
||||
return '_'.join(key_parts)
|
||||
|
||||
def invalidate_cache(self, filters: Optional[Dict[str, Any]] = None) -> None:
|
||||
"""Invalidate cached data for the given filters."""
|
||||
# This is a simplified implementation
|
||||
# In production, you might want to use cache versioning or tags
|
||||
cache_keys = [
|
||||
self._generate_cache_key('initial', filters),
|
||||
self._generate_cache_key('metadata', filters),
|
||||
]
|
||||
|
||||
# Also invalidate progressive load caches
|
||||
for offset in range(0, 1000, self.PROGRESSIVE_LOAD_SIZE):
|
||||
cache_keys.append(self._generate_cache_key(f'progressive_{offset}', filters))
|
||||
|
||||
cache.delete_many(cache_keys)
|
||||
|
||||
|
||||
# Singleton instance
|
||||
smart_park_loader = SmartParkLoader()
|
||||
@@ -1,491 +0,0 @@
|
||||
"""
|
||||
Parks-specific location services with OpenStreetMap integration.
|
||||
Handles geocoding, reverse geocoding, and location search for parks.
|
||||
"""
|
||||
|
||||
import requests
|
||||
from typing import List, Dict, Any, Optional
|
||||
from django.core.cache import cache
|
||||
from django.db import transaction
|
||||
import logging
|
||||
|
||||
from ..models import ParkLocation
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ParkLocationService:
|
||||
"""
|
||||
Location service specifically for parks using OpenStreetMap Nominatim API.
|
||||
"""
|
||||
|
||||
NOMINATIM_BASE_URL = "https://nominatim.openstreetmap.org"
|
||||
USER_AGENT = "ThrillWiki/1.0 (https://thrillwiki.com)"
|
||||
|
||||
@classmethod
|
||||
def search_locations(cls, query: str, limit: int = 10) -> Dict[str, Any]:
|
||||
"""
|
||||
Search for locations using OpenStreetMap Nominatim API.
|
||||
Optimized for finding theme parks and amusement parks.
|
||||
|
||||
Args:
|
||||
query: Search query string
|
||||
limit: Maximum number of results (default: 10, max: 25)
|
||||
|
||||
Returns:
|
||||
Dictionary with search results
|
||||
"""
|
||||
if not query.strip():
|
||||
return {"count": 0, "results": [], "query": query}
|
||||
|
||||
# Limit the number of results
|
||||
limit = min(limit, 25)
|
||||
|
||||
# Check cache first
|
||||
cache_key = f"park_location_search:{query.lower()}:{limit}"
|
||||
cached_result = cache.get(cache_key)
|
||||
if cached_result:
|
||||
return cached_result
|
||||
|
||||
try:
|
||||
params = {
|
||||
"q": query,
|
||||
"format": "json",
|
||||
"limit": limit,
|
||||
"addressdetails": 1,
|
||||
"extratags": 1,
|
||||
"namedetails": 1,
|
||||
"accept-language": "en",
|
||||
# Prioritize places that might be parks or entertainment venues
|
||||
"featuretype": "settlement,leisure,tourism",
|
||||
}
|
||||
|
||||
headers = {
|
||||
"User-Agent": cls.USER_AGENT,
|
||||
}
|
||||
|
||||
response = requests.get(
|
||||
f"{cls.NOMINATIM_BASE_URL}/search",
|
||||
params=params,
|
||||
headers=headers,
|
||||
timeout=10,
|
||||
)
|
||||
response.raise_for_status()
|
||||
|
||||
osm_results = response.json()
|
||||
|
||||
# Transform OSM results to our format
|
||||
results = []
|
||||
for item in osm_results:
|
||||
result = cls._transform_osm_result(item)
|
||||
if result:
|
||||
results.append(result)
|
||||
|
||||
result_data = {"count": len(results), "results": results, "query": query}
|
||||
|
||||
# Cache for 1 hour
|
||||
cache.set(cache_key, result_data, 3600)
|
||||
|
||||
return result_data
|
||||
|
||||
except requests.RequestException as e:
|
||||
logger.error(f"Error searching park locations: {str(e)}")
|
||||
return {
|
||||
"count": 0,
|
||||
"results": [],
|
||||
"query": query,
|
||||
"error": "Location search service temporarily unavailable",
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def reverse_geocode(cls, latitude: float, longitude: float) -> Dict[str, Any]:
|
||||
"""
|
||||
Reverse geocode coordinates to get location information using OSM.
|
||||
|
||||
Args:
|
||||
latitude: Latitude coordinate
|
||||
longitude: Longitude coordinate
|
||||
|
||||
Returns:
|
||||
Dictionary with location information
|
||||
"""
|
||||
# Validate coordinates
|
||||
if not (-90 <= latitude <= 90) or not (-180 <= longitude <= 180):
|
||||
return {"error": "Invalid coordinates"}
|
||||
|
||||
# Check cache first
|
||||
cache_key = f"park_reverse_geocode:{latitude:.6f}:{longitude:.6f}"
|
||||
cached_result = cache.get(cache_key)
|
||||
if cached_result:
|
||||
return cached_result
|
||||
|
||||
try:
|
||||
params = {
|
||||
"lat": latitude,
|
||||
"lon": longitude,
|
||||
"format": "json",
|
||||
"addressdetails": 1,
|
||||
"extratags": 1,
|
||||
"namedetails": 1,
|
||||
"accept-language": "en",
|
||||
}
|
||||
|
||||
headers = {
|
||||
"User-Agent": cls.USER_AGENT,
|
||||
}
|
||||
|
||||
response = requests.get(
|
||||
f"{cls.NOMINATIM_BASE_URL}/reverse",
|
||||
params=params,
|
||||
headers=headers,
|
||||
timeout=10,
|
||||
)
|
||||
response.raise_for_status()
|
||||
|
||||
osm_result = response.json()
|
||||
|
||||
if "error" in osm_result:
|
||||
return {"error": "Location not found"}
|
||||
|
||||
result = cls._transform_osm_reverse_result(osm_result)
|
||||
|
||||
# Cache for 24 hours
|
||||
cache.set(cache_key, result, 86400)
|
||||
|
||||
return result
|
||||
|
||||
except requests.RequestException as e:
|
||||
logger.error(f"Error reverse geocoding park location: {str(e)}")
|
||||
return {"error": "Reverse geocoding service temporarily unavailable"}
|
||||
|
||||
@classmethod
|
||||
def geocode_address(cls, address: str) -> Dict[str, Any]:
|
||||
"""
|
||||
Geocode an address to get coordinates using OSM.
|
||||
|
||||
Args:
|
||||
address: Address string to geocode
|
||||
|
||||
Returns:
|
||||
Dictionary with coordinates and location information
|
||||
"""
|
||||
if not address.strip():
|
||||
return {"error": "Address is required"}
|
||||
|
||||
# Use search_locations for geocoding
|
||||
results = cls.search_locations(address, limit=1)
|
||||
|
||||
if results["count"] > 0:
|
||||
return results["results"][0]
|
||||
else:
|
||||
return {"error": "Address not found"}
|
||||
|
||||
@classmethod
|
||||
def create_park_location(
|
||||
cls,
|
||||
*,
|
||||
park,
|
||||
latitude: Optional[float] = None,
|
||||
longitude: Optional[float] = None,
|
||||
street_address: str = "",
|
||||
city: str = "",
|
||||
state: str = "",
|
||||
country: str = "USA",
|
||||
postal_code: str = "",
|
||||
highway_exit: str = "",
|
||||
parking_notes: str = "",
|
||||
seasonal_notes: str = "",
|
||||
osm_id: Optional[int] = None,
|
||||
osm_type: str = "",
|
||||
) -> ParkLocation:
|
||||
"""
|
||||
Create a location for a park with OSM integration.
|
||||
|
||||
Args:
|
||||
park: Park instance
|
||||
latitude: Latitude coordinate
|
||||
longitude: Longitude coordinate
|
||||
street_address: Street address
|
||||
city: City name
|
||||
state: State/region name
|
||||
country: Country name (default: USA)
|
||||
postal_code: Postal/ZIP code
|
||||
highway_exit: Highway exit information
|
||||
parking_notes: Parking information
|
||||
seasonal_notes: Seasonal access notes
|
||||
osm_id: OpenStreetMap ID
|
||||
osm_type: OpenStreetMap type (node, way, relation)
|
||||
|
||||
Returns:
|
||||
Created ParkLocation instance
|
||||
"""
|
||||
with transaction.atomic():
|
||||
park_location = ParkLocation(
|
||||
park=park,
|
||||
street_address=street_address,
|
||||
city=city,
|
||||
state=state,
|
||||
country=country,
|
||||
postal_code=postal_code,
|
||||
highway_exit=highway_exit,
|
||||
parking_notes=parking_notes,
|
||||
seasonal_notes=seasonal_notes,
|
||||
osm_id=osm_id,
|
||||
osm_type=osm_type,
|
||||
)
|
||||
|
||||
# Set coordinates if provided
|
||||
if latitude is not None and longitude is not None:
|
||||
park_location.set_coordinates(latitude, longitude)
|
||||
|
||||
park_location.full_clean()
|
||||
park_location.save()
|
||||
|
||||
return park_location
|
||||
|
||||
@classmethod
|
||||
def update_park_location(
|
||||
cls, park_location: ParkLocation, **updates
|
||||
) -> ParkLocation:
|
||||
"""
|
||||
Update park location with validation.
|
||||
|
||||
Args:
|
||||
park_location: ParkLocation instance to update
|
||||
**updates: Fields to update
|
||||
|
||||
Returns:
|
||||
Updated ParkLocation instance
|
||||
"""
|
||||
with transaction.atomic():
|
||||
# Handle coordinates separately
|
||||
latitude = updates.pop("latitude", None)
|
||||
longitude = updates.pop("longitude", None)
|
||||
|
||||
# Update regular fields
|
||||
for field, value in updates.items():
|
||||
if hasattr(park_location, field):
|
||||
setattr(park_location, field, value)
|
||||
|
||||
# Update coordinates if provided
|
||||
if latitude is not None and longitude is not None:
|
||||
park_location.set_coordinates(latitude, longitude)
|
||||
|
||||
park_location.full_clean()
|
||||
park_location.save()
|
||||
|
||||
return park_location
|
||||
|
||||
@classmethod
|
||||
def find_nearby_parks(
|
||||
cls, latitude: float, longitude: float, radius_km: float = 50
|
||||
) -> List[ParkLocation]:
|
||||
"""
|
||||
Find parks near given coordinates using PostGIS.
|
||||
|
||||
Args:
|
||||
latitude: Center latitude
|
||||
longitude: Center longitude
|
||||
radius_km: Search radius in kilometers
|
||||
|
||||
Returns:
|
||||
List of nearby ParkLocation instances
|
||||
"""
|
||||
from django.contrib.gis.geos import Point
|
||||
from django.contrib.gis.measure import Distance
|
||||
|
||||
center_point = Point(longitude, latitude, srid=4326)
|
||||
|
||||
return list(
|
||||
ParkLocation.objects.filter(
|
||||
point__distance_lte=(center_point, Distance(km=radius_km))
|
||||
)
|
||||
.select_related("park", "park__operator")
|
||||
.order_by("point__distance")
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def enrich_location_from_osm(cls, park_location: ParkLocation) -> ParkLocation:
|
||||
"""
|
||||
Enrich park location data using OSM reverse geocoding.
|
||||
|
||||
Args:
|
||||
park_location: ParkLocation instance to enrich
|
||||
|
||||
Returns:
|
||||
Updated ParkLocation instance
|
||||
"""
|
||||
if not park_location.point:
|
||||
return park_location
|
||||
|
||||
# Get detailed location info from OSM
|
||||
osm_data = cls.reverse_geocode(park_location.latitude, park_location.longitude)
|
||||
|
||||
if "error" not in osm_data:
|
||||
updates = {}
|
||||
|
||||
# Update missing address components
|
||||
if not park_location.street_address and osm_data.get("street_address"):
|
||||
updates["street_address"] = osm_data["street_address"]
|
||||
if not park_location.city and osm_data.get("city"):
|
||||
updates["city"] = osm_data["city"]
|
||||
if not park_location.state and osm_data.get("state"):
|
||||
updates["state"] = osm_data["state"]
|
||||
if not park_location.country and osm_data.get("country"):
|
||||
updates["country"] = osm_data["country"]
|
||||
if not park_location.postal_code and osm_data.get("postal_code"):
|
||||
updates["postal_code"] = osm_data["postal_code"]
|
||||
|
||||
# Update OSM metadata
|
||||
if osm_data.get("osm_id"):
|
||||
updates["osm_id"] = osm_data["osm_id"]
|
||||
if osm_data.get("osm_type"):
|
||||
updates["osm_type"] = osm_data["osm_type"]
|
||||
|
||||
if updates:
|
||||
return cls.update_park_location(park_location, **updates)
|
||||
|
||||
return park_location
|
||||
|
||||
@classmethod
|
||||
def _transform_osm_result(
|
||||
cls, osm_item: Dict[str, Any]
|
||||
) -> Optional[Dict[str, Any]]:
|
||||
"""Transform OSM search result to our standard format."""
|
||||
try:
|
||||
address = osm_item.get("address", {})
|
||||
|
||||
# Extract address components
|
||||
street_number = address.get("house_number", "")
|
||||
street_name = address.get("road", "")
|
||||
street_address = f"{street_number} {street_name}".strip()
|
||||
|
||||
city = (
|
||||
address.get("city")
|
||||
or address.get("town")
|
||||
or address.get("village")
|
||||
or address.get("municipality")
|
||||
or ""
|
||||
)
|
||||
|
||||
state = (
|
||||
address.get("state")
|
||||
or address.get("province")
|
||||
or address.get("region")
|
||||
or ""
|
||||
)
|
||||
|
||||
country = address.get("country", "")
|
||||
postal_code = address.get("postcode", "")
|
||||
|
||||
# Build formatted address
|
||||
address_parts = []
|
||||
if street_address:
|
||||
address_parts.append(street_address)
|
||||
if city:
|
||||
address_parts.append(city)
|
||||
if state:
|
||||
address_parts.append(state)
|
||||
if postal_code:
|
||||
address_parts.append(postal_code)
|
||||
if country:
|
||||
address_parts.append(country)
|
||||
|
||||
formatted_address = ", ".join(address_parts)
|
||||
|
||||
# Check if this might be a theme park or entertainment venue
|
||||
place_type = osm_item.get("type", "").lower()
|
||||
extratags = osm_item.get("extratags", {})
|
||||
|
||||
is_park_related = any(
|
||||
[
|
||||
"park" in place_type,
|
||||
"theme" in place_type,
|
||||
"amusement" in place_type,
|
||||
"attraction" in place_type,
|
||||
extratags.get("tourism") == "theme_park",
|
||||
extratags.get("leisure") == "amusement_arcade",
|
||||
extratags.get("amenity") == "amusement_arcade",
|
||||
]
|
||||
)
|
||||
|
||||
return {
|
||||
"name": osm_item.get("display_name", ""),
|
||||
"latitude": float(osm_item["lat"]),
|
||||
"longitude": float(osm_item["lon"]),
|
||||
"formatted_address": formatted_address,
|
||||
"street_address": street_address,
|
||||
"city": city,
|
||||
"state": state,
|
||||
"country": country,
|
||||
"postal_code": postal_code,
|
||||
"osm_id": osm_item.get("osm_id"),
|
||||
"osm_type": osm_item.get("osm_type"),
|
||||
"place_type": place_type,
|
||||
"importance": osm_item.get("importance", 0),
|
||||
"is_park_related": is_park_related,
|
||||
}
|
||||
|
||||
except (KeyError, ValueError, TypeError) as e:
|
||||
logger.warning(f"Error transforming OSM result: {str(e)}")
|
||||
return None
|
||||
|
||||
@classmethod
|
||||
def _transform_osm_reverse_result(
|
||||
cls, osm_result: Dict[str, Any]
|
||||
) -> Dict[str, Any]:
|
||||
"""Transform OSM reverse geocoding result to our standard format."""
|
||||
address = osm_result.get("address", {})
|
||||
|
||||
# Extract address components
|
||||
street_number = address.get("house_number", "")
|
||||
street_name = address.get("road", "")
|
||||
street_address = f"{street_number} {street_name}".strip()
|
||||
|
||||
city = (
|
||||
address.get("city")
|
||||
or address.get("town")
|
||||
or address.get("village")
|
||||
or address.get("municipality")
|
||||
or ""
|
||||
)
|
||||
|
||||
state = (
|
||||
address.get("state")
|
||||
or address.get("province")
|
||||
or address.get("region")
|
||||
or ""
|
||||
)
|
||||
|
||||
country = address.get("country", "")
|
||||
postal_code = address.get("postcode", "")
|
||||
|
||||
# Build formatted address
|
||||
address_parts = []
|
||||
if street_address:
|
||||
address_parts.append(street_address)
|
||||
if city:
|
||||
address_parts.append(city)
|
||||
if state:
|
||||
address_parts.append(state)
|
||||
if postal_code:
|
||||
address_parts.append(postal_code)
|
||||
if country:
|
||||
address_parts.append(country)
|
||||
|
||||
formatted_address = ", ".join(address_parts)
|
||||
|
||||
return {
|
||||
"name": osm_result.get("display_name", ""),
|
||||
"latitude": float(osm_result["lat"]),
|
||||
"longitude": float(osm_result["lon"]),
|
||||
"formatted_address": formatted_address,
|
||||
"street_address": street_address,
|
||||
"city": city,
|
||||
"state": state,
|
||||
"country": country,
|
||||
"postal_code": postal_code,
|
||||
"osm_id": osm_result.get("osm_id"),
|
||||
"osm_type": osm_result.get("osm_type"),
|
||||
"place_type": osm_result.get("type", ""),
|
||||
}
|
||||
@@ -1,241 +0,0 @@
|
||||
"""
|
||||
Park-specific media service for ThrillWiki.
|
||||
|
||||
This module provides media management functionality specific to parks.
|
||||
"""
|
||||
|
||||
import logging
|
||||
from typing import List, Optional, Dict, Any
|
||||
from django.core.files.uploadedfile import UploadedFile
|
||||
from django.db import transaction
|
||||
from django.contrib.auth import get_user_model
|
||||
from apps.core.services.media_service import MediaService
|
||||
from ..models import Park, ParkPhoto
|
||||
|
||||
User = get_user_model()
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ParkMediaService:
|
||||
"""Service for managing park-specific media operations."""
|
||||
|
||||
@staticmethod
|
||||
def upload_photo(
|
||||
park: Park,
|
||||
image_file: UploadedFile,
|
||||
user: User,
|
||||
caption: str = "",
|
||||
alt_text: str = "",
|
||||
is_primary: bool = False,
|
||||
auto_approve: bool = False,
|
||||
) -> ParkPhoto:
|
||||
"""
|
||||
Upload a photo for a park.
|
||||
|
||||
Args:
|
||||
park: Park instance
|
||||
image_file: Uploaded image file
|
||||
user: User uploading the photo
|
||||
caption: Photo caption
|
||||
alt_text: Alt text for accessibility
|
||||
is_primary: Whether this should be the primary photo
|
||||
auto_approve: Whether to auto-approve the photo
|
||||
|
||||
Returns:
|
||||
Created ParkPhoto instance
|
||||
|
||||
Raises:
|
||||
ValueError: If image validation fails
|
||||
"""
|
||||
# Validate image file
|
||||
is_valid, error_message = MediaService.validate_image_file(image_file)
|
||||
if not is_valid:
|
||||
raise ValueError(error_message)
|
||||
|
||||
# Process image
|
||||
processed_image = MediaService.process_image(image_file)
|
||||
|
||||
with transaction.atomic():
|
||||
# Create photo instance
|
||||
photo = ParkPhoto(
|
||||
park=park,
|
||||
image=processed_image,
|
||||
caption=caption or MediaService.generate_default_caption(user.username),
|
||||
alt_text=alt_text,
|
||||
is_primary=is_primary,
|
||||
is_approved=auto_approve,
|
||||
uploaded_by=user,
|
||||
)
|
||||
|
||||
# Extract EXIF date
|
||||
photo.date_taken = MediaService.extract_exif_date(processed_image)
|
||||
|
||||
photo.save()
|
||||
|
||||
logger.info(f"Photo uploaded for park {park.slug} by user {user.username}")
|
||||
return photo
|
||||
|
||||
@staticmethod
|
||||
def get_park_photos(
|
||||
park: Park, approved_only: bool = True, primary_first: bool = True
|
||||
) -> List[ParkPhoto]:
|
||||
"""
|
||||
Get photos for a park.
|
||||
|
||||
Args:
|
||||
park: Park instance
|
||||
approved_only: Whether to only return approved photos
|
||||
primary_first: Whether to order primary photos first
|
||||
|
||||
Returns:
|
||||
List of ParkPhoto instances
|
||||
"""
|
||||
queryset = park.photos.all()
|
||||
|
||||
if approved_only:
|
||||
queryset = queryset.filter(is_approved=True)
|
||||
|
||||
if primary_first:
|
||||
queryset = queryset.order_by("-is_primary", "-created_at")
|
||||
else:
|
||||
queryset = queryset.order_by("-created_at")
|
||||
|
||||
return list(queryset)
|
||||
|
||||
@staticmethod
|
||||
def get_primary_photo(park: Park) -> Optional[ParkPhoto]:
|
||||
"""
|
||||
Get the primary photo for a park.
|
||||
|
||||
Args:
|
||||
park: Park instance
|
||||
|
||||
Returns:
|
||||
Primary ParkPhoto instance or None
|
||||
"""
|
||||
try:
|
||||
return park.photos.filter(is_primary=True, is_approved=True).first()
|
||||
except ParkPhoto.DoesNotExist:
|
||||
return None
|
||||
|
||||
@staticmethod
|
||||
def set_primary_photo(park: Park, photo: ParkPhoto) -> bool:
|
||||
"""
|
||||
Set a photo as the primary photo for a park.
|
||||
|
||||
Args:
|
||||
park: Park instance
|
||||
photo: ParkPhoto to set as primary
|
||||
|
||||
Returns:
|
||||
True if successful, False otherwise
|
||||
"""
|
||||
if photo.park != park:
|
||||
return False
|
||||
|
||||
with transaction.atomic():
|
||||
# Unset current primary
|
||||
park.photos.filter(is_primary=True).update(is_primary=False)
|
||||
|
||||
# Set new primary
|
||||
photo.is_primary = True
|
||||
photo.save()
|
||||
|
||||
logger.info(f"Set photo {photo.pk} as primary for park {park.slug}")
|
||||
return True
|
||||
|
||||
@staticmethod
|
||||
def approve_photo(photo: ParkPhoto, approved_by: User) -> bool:
|
||||
"""
|
||||
Approve a park photo.
|
||||
|
||||
Args:
|
||||
photo: ParkPhoto to approve
|
||||
approved_by: User approving the photo
|
||||
|
||||
Returns:
|
||||
True if successful, False otherwise
|
||||
"""
|
||||
try:
|
||||
photo.is_approved = True
|
||||
photo.save()
|
||||
|
||||
logger.info(f"Photo {photo.pk} approved by user {approved_by.username}")
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to approve photo {photo.pk}: {str(e)}")
|
||||
return False
|
||||
|
||||
@staticmethod
|
||||
def delete_photo(photo: ParkPhoto, deleted_by: User) -> bool:
|
||||
"""
|
||||
Delete a park photo.
|
||||
|
||||
Args:
|
||||
photo: ParkPhoto to delete
|
||||
deleted_by: User deleting the photo
|
||||
|
||||
Returns:
|
||||
True if successful, False otherwise
|
||||
"""
|
||||
try:
|
||||
park_slug = photo.park.slug
|
||||
photo_id = photo.pk
|
||||
|
||||
# Delete the file and database record
|
||||
if photo.image:
|
||||
photo.image.delete(save=False)
|
||||
photo.delete()
|
||||
|
||||
logger.info(
|
||||
f"Photo {photo_id} deleted from park {park_slug} by user {deleted_by.username}"
|
||||
)
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to delete photo {photo.pk}: {str(e)}")
|
||||
return False
|
||||
|
||||
@staticmethod
|
||||
def get_photo_stats(park: Park) -> Dict[str, Any]:
|
||||
"""
|
||||
Get photo statistics for a park.
|
||||
|
||||
Args:
|
||||
park: Park instance
|
||||
|
||||
Returns:
|
||||
Dictionary with photo statistics
|
||||
"""
|
||||
photos = park.photos.all()
|
||||
|
||||
return {
|
||||
"total_photos": photos.count(),
|
||||
"approved_photos": photos.filter(is_approved=True).count(),
|
||||
"pending_photos": photos.filter(is_approved=False).count(),
|
||||
"has_primary": photos.filter(is_primary=True).exists(),
|
||||
"recent_uploads": photos.order_by("-created_at")[:5].count(),
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
def bulk_approve_photos(photos: List[ParkPhoto], approved_by: User) -> int:
|
||||
"""
|
||||
Bulk approve multiple photos.
|
||||
|
||||
Args:
|
||||
photos: List of ParkPhoto instances to approve
|
||||
approved_by: User approving the photos
|
||||
|
||||
Returns:
|
||||
Number of photos successfully approved
|
||||
"""
|
||||
approved_count = 0
|
||||
|
||||
with transaction.atomic():
|
||||
for photo in photos:
|
||||
if ParkMediaService.approve_photo(photo, approved_by):
|
||||
approved_count += 1
|
||||
|
||||
logger.info(
|
||||
f"Bulk approved {approved_count} photos by user {approved_by.username}"
|
||||
)
|
||||
return approved_count
|
||||
@@ -1,228 +0,0 @@
|
||||
"""
|
||||
Services for park-related business logic.
|
||||
Following Django styleguide pattern for business logic encapsulation.
|
||||
"""
|
||||
|
||||
from typing import Optional, Dict, Any, TYPE_CHECKING
|
||||
from django.db import transaction
|
||||
from django.db.models import Q
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from django.contrib.auth.models import AbstractUser
|
||||
|
||||
from ..models import Park, ParkArea
|
||||
from .location_service import ParkLocationService
|
||||
|
||||
|
||||
class ParkService:
|
||||
"""Service for managing park operations."""
|
||||
|
||||
@staticmethod
|
||||
def create_park(
|
||||
*,
|
||||
name: str,
|
||||
description: str = "",
|
||||
status: str = "OPERATING",
|
||||
operator_id: Optional[int] = None,
|
||||
property_owner_id: Optional[int] = None,
|
||||
opening_date: Optional[str] = None,
|
||||
closing_date: Optional[str] = None,
|
||||
operating_season: str = "",
|
||||
size_acres: Optional[float] = None,
|
||||
website: str = "",
|
||||
location_data: Optional[Dict[str, Any]] = None,
|
||||
created_by: Optional["AbstractUser"] = None,
|
||||
) -> Park:
|
||||
"""
|
||||
Create a new park with validation and location handling.
|
||||
|
||||
Args:
|
||||
name: Park name
|
||||
description: Park description
|
||||
status: Operating status
|
||||
operator_id: ID of operating company
|
||||
property_owner_id: ID of property owner company
|
||||
opening_date: Opening date
|
||||
closing_date: Closing date
|
||||
operating_season: Operating season description
|
||||
size_acres: Park size in acres
|
||||
website: Park website URL
|
||||
location_data: Dictionary containing location information
|
||||
created_by: User creating the park
|
||||
|
||||
Returns:
|
||||
Created Park instance
|
||||
|
||||
Raises:
|
||||
ValidationError: If park data is invalid
|
||||
"""
|
||||
with transaction.atomic():
|
||||
# Create park instance
|
||||
park = Park(
|
||||
name=name,
|
||||
description=description,
|
||||
status=status,
|
||||
opening_date=opening_date,
|
||||
closing_date=closing_date,
|
||||
operating_season=operating_season,
|
||||
size_acres=size_acres,
|
||||
website=website,
|
||||
)
|
||||
|
||||
# Set foreign key relationships if provided
|
||||
if operator_id:
|
||||
from apps.parks.models import Company
|
||||
|
||||
park.operator = Company.objects.get(id=operator_id)
|
||||
|
||||
if property_owner_id:
|
||||
from apps.parks.models import Company
|
||||
|
||||
park.property_owner = Company.objects.get(id=property_owner_id)
|
||||
|
||||
# CRITICAL STYLEGUIDE FIX: Call full_clean before save
|
||||
park.full_clean()
|
||||
park.save()
|
||||
|
||||
# Handle location if provided
|
||||
if location_data:
|
||||
ParkLocationService.create_park_location(park=park, **location_data)
|
||||
|
||||
return park
|
||||
|
||||
@staticmethod
|
||||
def update_park(
|
||||
*,
|
||||
park_id: int,
|
||||
updates: Dict[str, Any],
|
||||
updated_by: Optional["AbstractUser"] = None,
|
||||
) -> Park:
|
||||
"""
|
||||
Update an existing park with validation.
|
||||
|
||||
Args:
|
||||
park_id: ID of park to update
|
||||
updates: Dictionary of field updates
|
||||
updated_by: User performing the update
|
||||
|
||||
Returns:
|
||||
Updated Park instance
|
||||
|
||||
Raises:
|
||||
Park.DoesNotExist: If park doesn't exist
|
||||
ValidationError: If update data is invalid
|
||||
"""
|
||||
with transaction.atomic():
|
||||
park = Park.objects.select_for_update().get(id=park_id)
|
||||
|
||||
# Apply updates
|
||||
for field, value in updates.items():
|
||||
if hasattr(park, field):
|
||||
setattr(park, field, value)
|
||||
|
||||
# CRITICAL STYLEGUIDE FIX: Call full_clean before save
|
||||
park.full_clean()
|
||||
park.save()
|
||||
|
||||
return park
|
||||
|
||||
@staticmethod
|
||||
def delete_park(
|
||||
*, park_id: int, deleted_by: Optional["AbstractUser"] = None
|
||||
) -> bool:
|
||||
"""
|
||||
Soft delete a park by setting status to DEMOLISHED.
|
||||
|
||||
Args:
|
||||
park_id: ID of park to delete
|
||||
deleted_by: User performing the deletion
|
||||
|
||||
Returns:
|
||||
True if successfully deleted
|
||||
|
||||
Raises:
|
||||
Park.DoesNotExist: If park doesn't exist
|
||||
"""
|
||||
with transaction.atomic():
|
||||
park = Park.objects.select_for_update().get(id=park_id)
|
||||
park.status = "DEMOLISHED"
|
||||
|
||||
# CRITICAL STYLEGUIDE FIX: Call full_clean before save
|
||||
park.full_clean()
|
||||
park.save()
|
||||
|
||||
return True
|
||||
|
||||
@staticmethod
|
||||
def create_park_area(
|
||||
*,
|
||||
park_id: int,
|
||||
name: str,
|
||||
description: str = "",
|
||||
created_by: Optional["AbstractUser"] = None,
|
||||
) -> ParkArea:
|
||||
"""
|
||||
Create a new area within a park.
|
||||
|
||||
Args:
|
||||
park_id: ID of the parent park
|
||||
name: Area name
|
||||
description: Area description
|
||||
created_by: User creating the area
|
||||
|
||||
Returns:
|
||||
Created ParkArea instance
|
||||
|
||||
Raises:
|
||||
Park.DoesNotExist: If park doesn't exist
|
||||
ValidationError: If area data is invalid
|
||||
"""
|
||||
park = Park.objects.get(id=park_id)
|
||||
|
||||
area = ParkArea(park=park, name=name, description=description)
|
||||
|
||||
# CRITICAL STYLEGUIDE FIX: Call full_clean before save
|
||||
area.full_clean()
|
||||
area.save()
|
||||
|
||||
return area
|
||||
|
||||
@staticmethod
|
||||
def update_park_statistics(*, park_id: int) -> Park:
|
||||
"""
|
||||
Recalculate and update park statistics (ride counts, ratings).
|
||||
|
||||
Args:
|
||||
park_id: ID of park to update statistics for
|
||||
|
||||
Returns:
|
||||
Updated Park instance with fresh statistics
|
||||
"""
|
||||
from apps.rides.models import Ride
|
||||
from apps.parks.models import ParkReview
|
||||
from django.db.models import Count, Avg
|
||||
|
||||
with transaction.atomic():
|
||||
park = Park.objects.select_for_update().get(id=park_id)
|
||||
|
||||
# Calculate ride counts
|
||||
ride_stats = Ride.objects.filter(park=park).aggregate(
|
||||
total_rides=Count("id"),
|
||||
coaster_count=Count("id", filter=Q(category__in=["RC", "WC"])),
|
||||
)
|
||||
|
||||
# Calculate average rating
|
||||
avg_rating = ParkReview.objects.filter(
|
||||
park=park, is_published=True
|
||||
).aggregate(avg_rating=Avg("rating"))["avg_rating"]
|
||||
|
||||
# Update park fields
|
||||
park.ride_count = ride_stats["total_rides"] or 0
|
||||
park.coaster_count = ride_stats["coaster_count"] or 0
|
||||
park.average_rating = avg_rating
|
||||
|
||||
# CRITICAL STYLEGUIDE FIX: Call full_clean before save
|
||||
park.full_clean()
|
||||
park.save()
|
||||
|
||||
return park
|
||||
@@ -1,698 +0,0 @@
|
||||
"""
|
||||
Road Trip Service for theme park planning using OpenStreetMap APIs.
|
||||
|
||||
This service provides functionality for:
|
||||
- Geocoding addresses using Nominatim
|
||||
- Route calculation using OSRM
|
||||
- Park discovery along routes
|
||||
- Multi-park trip planning
|
||||
- Proper rate limiting and caching
|
||||
"""
|
||||
|
||||
import time
|
||||
import math
|
||||
import logging
|
||||
import requests
|
||||
from typing import Dict, List, Optional, Any
|
||||
from dataclasses import dataclass
|
||||
from itertools import permutations
|
||||
|
||||
from django.conf import settings
|
||||
from django.core.cache import cache
|
||||
from django.contrib.gis.geos import Point
|
||||
from django.contrib.gis.measure import Distance
|
||||
from apps.parks.models import Park
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@dataclass
|
||||
class Coordinates:
|
||||
"""Represents latitude and longitude coordinates."""
|
||||
|
||||
latitude: float
|
||||
longitude: float
|
||||
|
||||
def to_list(self) -> List[float]:
|
||||
"""Return as [lat, lon] list."""
|
||||
return [self.latitude, self.longitude]
|
||||
|
||||
def to_point(self) -> Point:
|
||||
"""Convert to Django Point object."""
|
||||
return Point(self.longitude, self.latitude, srid=4326)
|
||||
|
||||
|
||||
@dataclass
|
||||
class RouteInfo:
|
||||
"""Information about a calculated route."""
|
||||
|
||||
distance_km: float
|
||||
duration_minutes: int
|
||||
geometry: Optional[str] = None # Encoded polyline
|
||||
|
||||
@property
|
||||
def formatted_distance(self) -> str:
|
||||
"""Return formatted distance string."""
|
||||
if self.distance_km < 1:
|
||||
return f"{self.distance_km * 1000:.0f}m"
|
||||
return f"{self.distance_km:.1f}km"
|
||||
|
||||
@property
|
||||
def formatted_duration(self) -> str:
|
||||
"""Return formatted duration string."""
|
||||
hours = self.duration_minutes // 60
|
||||
minutes = self.duration_minutes % 60
|
||||
if hours == 0:
|
||||
return f"{minutes}min"
|
||||
elif minutes == 0:
|
||||
return f"{hours}h"
|
||||
else:
|
||||
return f"{hours}h {minutes}min"
|
||||
|
||||
|
||||
@dataclass
|
||||
class TripLeg:
|
||||
"""Represents one leg of a multi-park trip."""
|
||||
|
||||
from_park: "Park"
|
||||
to_park: "Park"
|
||||
route: RouteInfo
|
||||
|
||||
@property
|
||||
def parks_along_route(self) -> List["Park"]:
|
||||
"""Get parks along this route segment."""
|
||||
# This would be populated by find_parks_along_route
|
||||
return []
|
||||
|
||||
|
||||
@dataclass
|
||||
class RoadTrip:
|
||||
"""Complete road trip with multiple parks."""
|
||||
|
||||
parks: List["Park"]
|
||||
legs: List[TripLeg]
|
||||
total_distance_km: float
|
||||
total_duration_minutes: int
|
||||
|
||||
@property
|
||||
def formatted_total_distance(self) -> str:
|
||||
"""Return formatted total distance."""
|
||||
return f"{self.total_distance_km:.1f}km"
|
||||
|
||||
@property
|
||||
def formatted_total_duration(self) -> str:
|
||||
"""Return formatted total duration."""
|
||||
hours = self.total_duration_minutes // 60
|
||||
minutes = self.total_duration_minutes % 60
|
||||
if hours == 0:
|
||||
return f"{minutes}min"
|
||||
elif minutes == 0:
|
||||
return f"{hours}h"
|
||||
else:
|
||||
return f"{hours}h {minutes}min"
|
||||
|
||||
|
||||
class RateLimiter:
|
||||
"""Simple rate limiter for API requests."""
|
||||
|
||||
def __init__(self, max_requests_per_second: float = 1.0):
|
||||
self.max_requests_per_second = max_requests_per_second
|
||||
self.min_interval = 1.0 / max_requests_per_second
|
||||
self.last_request_time = 0.0
|
||||
|
||||
def wait_if_needed(self):
|
||||
"""Wait if necessary to respect rate limits."""
|
||||
current_time = time.time()
|
||||
time_since_last = current_time - self.last_request_time
|
||||
|
||||
if time_since_last < self.min_interval:
|
||||
wait_time = self.min_interval - time_since_last
|
||||
time.sleep(wait_time)
|
||||
|
||||
self.last_request_time = time.time()
|
||||
|
||||
|
||||
class OSMAPIException(Exception):
|
||||
"""Exception for OSM API related errors."""
|
||||
|
||||
|
||||
class RoadTripService:
|
||||
"""
|
||||
Service for planning road trips between theme parks using OpenStreetMap APIs.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self.nominatim_base_url = "https://nominatim.openstreetmap.org"
|
||||
self.osrm_base_url = "http://router.project-osrm.org/route/v1/driving"
|
||||
|
||||
# Configuration from Django settings
|
||||
self.cache_timeout = getattr(settings, "ROADTRIP_CACHE_TIMEOUT", 3600 * 24)
|
||||
self.route_cache_timeout = getattr(
|
||||
settings, "ROADTRIP_ROUTE_CACHE_TIMEOUT", 3600 * 6
|
||||
)
|
||||
self.user_agent = getattr(
|
||||
settings, "ROADTRIP_USER_AGENT", "ThrillWiki Road Trip Planner"
|
||||
)
|
||||
self.request_timeout = getattr(settings, "ROADTRIP_REQUEST_TIMEOUT", 10)
|
||||
self.max_retries = getattr(settings, "ROADTRIP_MAX_RETRIES", 3)
|
||||
self.backoff_factor = getattr(settings, "ROADTRIP_BACKOFF_FACTOR", 2)
|
||||
|
||||
# Rate limiter
|
||||
max_rps = getattr(settings, "ROADTRIP_MAX_REQUESTS_PER_SECOND", 1)
|
||||
self.rate_limiter = RateLimiter(max_rps)
|
||||
|
||||
# Request session with proper headers
|
||||
self.session = requests.Session()
|
||||
self.session.headers.update(
|
||||
{
|
||||
"User-Agent": self.user_agent,
|
||||
"Accept": "application/json",
|
||||
}
|
||||
)
|
||||
|
||||
def _make_request(self, url: str, params: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""
|
||||
Make HTTP request with rate limiting, retries, and error handling.
|
||||
"""
|
||||
self.rate_limiter.wait_if_needed()
|
||||
|
||||
for attempt in range(self.max_retries):
|
||||
try:
|
||||
response = self.session.get(
|
||||
url, params=params, timeout=self.request_timeout
|
||||
)
|
||||
response.raise_for_status()
|
||||
return response.json()
|
||||
|
||||
except requests.exceptions.RequestException as e:
|
||||
logger.warning(f"Request attempt {attempt + 1} failed: {e}")
|
||||
|
||||
if attempt < self.max_retries - 1:
|
||||
wait_time = self.backoff_factor**attempt
|
||||
time.sleep(wait_time)
|
||||
else:
|
||||
raise OSMAPIException(
|
||||
f"Failed to make request after {self.max_retries} attempts: {e}"
|
||||
)
|
||||
|
||||
def geocode_address(self, address: str) -> Optional[Coordinates]:
|
||||
"""
|
||||
Convert address to coordinates using Nominatim geocoding service.
|
||||
|
||||
Args:
|
||||
address: Address string to geocode
|
||||
|
||||
Returns:
|
||||
Coordinates object or None if geocoding fails
|
||||
"""
|
||||
if not address or not address.strip():
|
||||
return None
|
||||
|
||||
# Check cache first
|
||||
cache_key = f"roadtrip:geocode:{hash(address.lower().strip())}"
|
||||
cached_result = cache.get(cache_key)
|
||||
if cached_result:
|
||||
return Coordinates(**cached_result)
|
||||
|
||||
try:
|
||||
params = {
|
||||
"q": address.strip(),
|
||||
"format": "json",
|
||||
"limit": 1,
|
||||
"addressdetails": 1,
|
||||
}
|
||||
|
||||
url = f"{self.nominatim_base_url}/search"
|
||||
response = self._make_request(url, params)
|
||||
|
||||
if response and len(response) > 0:
|
||||
result = response[0]
|
||||
coords = Coordinates(
|
||||
latitude=float(result["lat"]),
|
||||
longitude=float(result["lon"]),
|
||||
)
|
||||
|
||||
# Cache the result
|
||||
cache.set(
|
||||
cache_key,
|
||||
{
|
||||
"latitude": coords.latitude,
|
||||
"longitude": coords.longitude,
|
||||
},
|
||||
self.cache_timeout,
|
||||
)
|
||||
|
||||
logger.info(
|
||||
f"Geocoded '{address}' to {coords.latitude}, {coords.longitude}"
|
||||
)
|
||||
return coords
|
||||
else:
|
||||
logger.warning(f"No geocoding results for address: {address}")
|
||||
return None
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Geocoding failed for '{address}': {e}")
|
||||
return None
|
||||
|
||||
def calculate_route(
|
||||
self, start_coords: Coordinates, end_coords: Coordinates
|
||||
) -> Optional[RouteInfo]:
|
||||
"""
|
||||
Calculate route between two coordinate points using OSRM.
|
||||
|
||||
Args:
|
||||
start_coords: Starting coordinates
|
||||
end_coords: Ending coordinates
|
||||
|
||||
Returns:
|
||||
RouteInfo object or None if routing fails
|
||||
"""
|
||||
if not start_coords or not end_coords:
|
||||
return None
|
||||
|
||||
# Check cache first
|
||||
cache_key = f"roadtrip:route:{start_coords.latitude},{start_coords.longitude}:{
|
||||
end_coords.latitude
|
||||
},{end_coords.longitude}"
|
||||
cached_result = cache.get(cache_key)
|
||||
if cached_result:
|
||||
return RouteInfo(**cached_result)
|
||||
|
||||
try:
|
||||
# Format coordinates for OSRM (lon,lat format)
|
||||
coords_string = f"{start_coords.longitude},{start_coords.latitude};{
|
||||
end_coords.longitude
|
||||
},{end_coords.latitude}"
|
||||
url = f"{self.osrm_base_url}/{coords_string}"
|
||||
|
||||
params = {
|
||||
"overview": "full",
|
||||
"geometries": "polyline",
|
||||
"steps": "false",
|
||||
}
|
||||
|
||||
response = self._make_request(url, params)
|
||||
|
||||
if response.get("code") == "Ok" and response.get("routes"):
|
||||
route_data = response["routes"][0]
|
||||
|
||||
# Distance is in meters, convert to km
|
||||
distance_km = route_data["distance"] / 1000.0
|
||||
# Duration is in seconds, convert to minutes
|
||||
duration_minutes = int(route_data["duration"] / 60)
|
||||
|
||||
route_info = RouteInfo(
|
||||
distance_km=distance_km,
|
||||
duration_minutes=duration_minutes,
|
||||
geometry=route_data.get("geometry"),
|
||||
)
|
||||
|
||||
# Cache the result
|
||||
cache.set(
|
||||
cache_key,
|
||||
{
|
||||
"distance_km": route_info.distance_km,
|
||||
"duration_minutes": route_info.duration_minutes,
|
||||
"geometry": route_info.geometry,
|
||||
},
|
||||
self.route_cache_timeout,
|
||||
)
|
||||
|
||||
logger.info(
|
||||
f"Route calculated: {route_info.formatted_distance}, {
|
||||
route_info.formatted_duration
|
||||
}"
|
||||
)
|
||||
return route_info
|
||||
else:
|
||||
# Fallback to straight-line distance calculation
|
||||
logger.warning(
|
||||
"OSRM routing failed, falling back to straight-line distance"
|
||||
)
|
||||
return self._calculate_straight_line_route(start_coords, end_coords)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Route calculation failed: {e}")
|
||||
# Fallback to straight-line distance
|
||||
return self._calculate_straight_line_route(start_coords, end_coords)
|
||||
|
||||
def _calculate_straight_line_route(
|
||||
self, start_coords: Coordinates, end_coords: Coordinates
|
||||
) -> RouteInfo:
|
||||
"""
|
||||
Calculate straight-line distance as fallback when routing fails.
|
||||
"""
|
||||
# Haversine formula for great-circle distance
|
||||
lat1, lon1 = (
|
||||
math.radians(start_coords.latitude),
|
||||
math.radians(start_coords.longitude),
|
||||
)
|
||||
lat2, lon2 = (
|
||||
math.radians(end_coords.latitude),
|
||||
math.radians(end_coords.longitude),
|
||||
)
|
||||
|
||||
dlat = lat2 - lat1
|
||||
dlon = lon2 - lon1
|
||||
|
||||
a = (
|
||||
math.sin(dlat / 2) ** 2
|
||||
+ math.cos(lat1) * math.cos(lat2) * math.sin(dlon / 2) ** 2
|
||||
)
|
||||
c = 2 * math.asin(math.sqrt(a))
|
||||
|
||||
# Earth's radius in kilometers
|
||||
earth_radius_km = 6371.0
|
||||
distance_km = earth_radius_km * c
|
||||
|
||||
# Estimate driving time (assume average 80 km/h with 25% extra for
|
||||
# roads)
|
||||
estimated_duration_minutes = int((distance_km * 1.25 / 80.0) * 60)
|
||||
|
||||
return RouteInfo(
|
||||
distance_km=distance_km,
|
||||
duration_minutes=estimated_duration_minutes,
|
||||
geometry=None,
|
||||
)
|
||||
|
||||
def find_parks_along_route(
|
||||
self, start_park: "Park", end_park: "Park", max_detour_km: float = 50
|
||||
) -> List["Park"]:
|
||||
"""
|
||||
Find parks along a route within specified detour distance.
|
||||
|
||||
Args:
|
||||
start_park: Starting park
|
||||
end_park: Ending park
|
||||
max_detour_km: Maximum detour distance in kilometers
|
||||
|
||||
Returns:
|
||||
List of parks along the route
|
||||
"""
|
||||
from apps.parks.models import Park
|
||||
|
||||
if not hasattr(start_park, "location") or not hasattr(end_park, "location"):
|
||||
return []
|
||||
|
||||
if not start_park.location or not end_park.location:
|
||||
return []
|
||||
|
||||
start_coords = start_park.coordinates
|
||||
end_coords = end_park.coordinates
|
||||
|
||||
if not start_coords or not end_coords:
|
||||
return []
|
||||
|
||||
start_point = Point(start_coords[1], start_coords[0], srid=4326) # lon, lat
|
||||
# end_point is not used in this method - we use coordinates directly
|
||||
|
||||
# Find all parks within a reasonable distance from both start and end
|
||||
max_search_distance = Distance(km=max_detour_km * 2)
|
||||
|
||||
candidate_parks = (
|
||||
Park.objects.filter(
|
||||
location__point__distance_lte=(
|
||||
start_point,
|
||||
max_search_distance,
|
||||
)
|
||||
)
|
||||
.exclude(id__in=[start_park.id, end_park.id])
|
||||
.select_related("location")
|
||||
)
|
||||
|
||||
parks_along_route = []
|
||||
|
||||
for park in candidate_parks:
|
||||
if not park.location or not park.location.point:
|
||||
continue
|
||||
|
||||
park_coords = park.coordinates
|
||||
if not park_coords:
|
||||
continue
|
||||
|
||||
# Calculate detour distance
|
||||
detour_distance = self._calculate_detour_distance(
|
||||
Coordinates(*start_coords),
|
||||
Coordinates(*end_coords),
|
||||
Coordinates(*park_coords),
|
||||
)
|
||||
|
||||
if detour_distance and detour_distance <= max_detour_km:
|
||||
parks_along_route.append(park)
|
||||
|
||||
return parks_along_route
|
||||
|
||||
def _calculate_detour_distance(
|
||||
self, start: Coordinates, end: Coordinates, waypoint: Coordinates
|
||||
) -> Optional[float]:
|
||||
"""
|
||||
Calculate the detour distance when visiting a waypoint.
|
||||
"""
|
||||
try:
|
||||
# Direct route distance
|
||||
direct_route = self.calculate_route(start, end)
|
||||
if not direct_route:
|
||||
return None
|
||||
|
||||
# Route via waypoint
|
||||
route_to_waypoint = self.calculate_route(start, waypoint)
|
||||
route_from_waypoint = self.calculate_route(waypoint, end)
|
||||
|
||||
if not route_to_waypoint or not route_from_waypoint:
|
||||
return None
|
||||
|
||||
detour_distance = (
|
||||
route_to_waypoint.distance_km + route_from_waypoint.distance_km
|
||||
) - direct_route.distance_km
|
||||
return max(0, detour_distance) # Don't return negative detours
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to calculate detour distance: {e}")
|
||||
return None
|
||||
|
||||
def create_multi_park_trip(self, park_list: List["Park"]) -> Optional[RoadTrip]:
|
||||
"""
|
||||
Create optimized multi-park road trip using simple nearest neighbor heuristic.
|
||||
|
||||
Args:
|
||||
park_list: List of parks to visit
|
||||
|
||||
Returns:
|
||||
RoadTrip object with optimized route
|
||||
"""
|
||||
if len(park_list) < 2:
|
||||
return None
|
||||
|
||||
# For small numbers of parks, try all permutations
|
||||
if len(park_list) <= 6:
|
||||
return self._optimize_trip_exhaustive(park_list)
|
||||
else:
|
||||
return self._optimize_trip_nearest_neighbor(park_list)
|
||||
|
||||
def _optimize_trip_exhaustive(self, park_list: List["Park"]) -> Optional[RoadTrip]:
|
||||
"""
|
||||
Find optimal route by testing all permutations (for small lists).
|
||||
"""
|
||||
best_trip = None
|
||||
best_distance = float("inf")
|
||||
|
||||
# Try all possible orders (excluding the first park as starting point)
|
||||
for perm in permutations(park_list[1:]):
|
||||
ordered_parks = [park_list[0]] + list(perm)
|
||||
trip = self._create_trip_from_order(ordered_parks)
|
||||
|
||||
if trip and trip.total_distance_km < best_distance:
|
||||
best_distance = trip.total_distance_km
|
||||
best_trip = trip
|
||||
|
||||
return best_trip
|
||||
|
||||
def _optimize_trip_nearest_neighbor(
|
||||
self, park_list: List["Park"]
|
||||
) -> Optional[RoadTrip]:
|
||||
"""
|
||||
Optimize trip using nearest neighbor heuristic (for larger lists).
|
||||
"""
|
||||
if not park_list:
|
||||
return None
|
||||
|
||||
# Start with the first park
|
||||
current_park = park_list[0]
|
||||
ordered_parks = [current_park]
|
||||
remaining_parks = park_list[1:]
|
||||
|
||||
while remaining_parks:
|
||||
# Find nearest unvisited park
|
||||
nearest_park = None
|
||||
min_distance = float("inf")
|
||||
|
||||
current_coords = current_park.coordinates
|
||||
if not current_coords:
|
||||
break
|
||||
|
||||
for park in remaining_parks:
|
||||
park_coords = park.coordinates
|
||||
if not park_coords:
|
||||
continue
|
||||
|
||||
route = self.calculate_route(
|
||||
Coordinates(*current_coords), Coordinates(*park_coords)
|
||||
)
|
||||
|
||||
if route and route.distance_km < min_distance:
|
||||
min_distance = route.distance_km
|
||||
nearest_park = park
|
||||
|
||||
if nearest_park:
|
||||
ordered_parks.append(nearest_park)
|
||||
remaining_parks.remove(nearest_park)
|
||||
current_park = nearest_park
|
||||
else:
|
||||
break
|
||||
|
||||
return self._create_trip_from_order(ordered_parks)
|
||||
|
||||
def _create_trip_from_order(
|
||||
self, ordered_parks: List["Park"]
|
||||
) -> Optional[RoadTrip]:
|
||||
"""
|
||||
Create a RoadTrip object from an ordered list of parks.
|
||||
"""
|
||||
if len(ordered_parks) < 2:
|
||||
return None
|
||||
|
||||
legs = []
|
||||
total_distance = 0
|
||||
total_duration = 0
|
||||
|
||||
for i in range(len(ordered_parks) - 1):
|
||||
from_park = ordered_parks[i]
|
||||
to_park = ordered_parks[i + 1]
|
||||
|
||||
from_coords = from_park.coordinates
|
||||
to_coords = to_park.coordinates
|
||||
|
||||
if not from_coords or not to_coords:
|
||||
continue
|
||||
|
||||
route = self.calculate_route(
|
||||
Coordinates(*from_coords), Coordinates(*to_coords)
|
||||
)
|
||||
|
||||
if route:
|
||||
legs.append(TripLeg(from_park=from_park, to_park=to_park, route=route))
|
||||
total_distance += route.distance_km
|
||||
total_duration += route.duration_minutes
|
||||
|
||||
if not legs:
|
||||
return None
|
||||
|
||||
return RoadTrip(
|
||||
parks=ordered_parks,
|
||||
legs=legs,
|
||||
total_distance_km=total_distance,
|
||||
total_duration_minutes=total_duration,
|
||||
)
|
||||
|
||||
def get_park_distances(
|
||||
self, center_park: "Park", radius_km: float = 100
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Get all parks within radius of a center park with distances.
|
||||
|
||||
Args:
|
||||
center_park: Center park for search
|
||||
radius_km: Search radius in kilometers
|
||||
|
||||
Returns:
|
||||
List of dictionaries with park and distance information
|
||||
"""
|
||||
from apps.parks.models import Park
|
||||
|
||||
if not hasattr(center_park, "location") or not center_park.location:
|
||||
return []
|
||||
|
||||
center_coords = center_park.coordinates
|
||||
if not center_coords:
|
||||
return []
|
||||
|
||||
center_point = Point(center_coords[1], center_coords[0], srid=4326) # lon, lat
|
||||
search_distance = Distance(km=radius_km)
|
||||
|
||||
nearby_parks = (
|
||||
Park.objects.filter(
|
||||
location__point__distance_lte=(center_point, search_distance)
|
||||
)
|
||||
.exclude(id=center_park.id)
|
||||
.select_related("location")
|
||||
)
|
||||
|
||||
results = []
|
||||
|
||||
for park in nearby_parks:
|
||||
park_coords = park.coordinates
|
||||
if not park_coords:
|
||||
continue
|
||||
|
||||
route = self.calculate_route(
|
||||
Coordinates(*center_coords), Coordinates(*park_coords)
|
||||
)
|
||||
|
||||
if route:
|
||||
results.append(
|
||||
{
|
||||
"park": park,
|
||||
"distance_km": route.distance_km,
|
||||
"duration_minutes": route.duration_minutes,
|
||||
"formatted_distance": route.formatted_distance,
|
||||
"formatted_duration": route.formatted_duration,
|
||||
}
|
||||
)
|
||||
|
||||
# Sort by distance
|
||||
results.sort(key=lambda x: x["distance_km"])
|
||||
|
||||
return results
|
||||
|
||||
def geocode_park_if_needed(self, park: "Park") -> bool:
|
||||
"""
|
||||
Geocode park location if coordinates are missing.
|
||||
|
||||
Args:
|
||||
park: Park to geocode
|
||||
|
||||
Returns:
|
||||
True if geocoding succeeded or wasn't needed, False otherwise
|
||||
"""
|
||||
if not hasattr(park, "location") or not park.location:
|
||||
return False
|
||||
|
||||
location = park.location
|
||||
|
||||
# If we already have coordinates, no need to geocode
|
||||
if location.point:
|
||||
return True
|
||||
|
||||
# Build address string for geocoding
|
||||
address_parts = [
|
||||
park.name,
|
||||
location.street_address,
|
||||
location.city,
|
||||
location.state,
|
||||
location.country,
|
||||
]
|
||||
address = ", ".join(part for part in address_parts if part)
|
||||
|
||||
if not address:
|
||||
return False
|
||||
|
||||
coords = self.geocode_address(address)
|
||||
if coords:
|
||||
location.set_coordinates(coords.latitude, coords.longitude)
|
||||
location.save()
|
||||
logger.info(
|
||||
f"Geocoded park '{park.name}' to {coords.latitude}, {coords.longitude}"
|
||||
)
|
||||
return True
|
||||
|
||||
return False
|
||||
@@ -1,34 +0,0 @@
|
||||
from django.db.models.signals import post_save, post_delete
|
||||
from django.dispatch import receiver
|
||||
from django.db.models import Q
|
||||
|
||||
from apps.rides.models import Ride
|
||||
from .models import Park
|
||||
|
||||
|
||||
def update_park_ride_counts(park):
|
||||
"""Update ride_count and coaster_count for a park"""
|
||||
operating_rides = Q(status="OPERATING")
|
||||
|
||||
# Count total operating rides
|
||||
ride_count = park.rides.filter(operating_rides).count()
|
||||
|
||||
# Count total operating roller coasters
|
||||
coaster_count = park.rides.filter(operating_rides, category="RC").count()
|
||||
|
||||
# Update park counts
|
||||
Park.objects.filter(id=park.id).update(
|
||||
ride_count=ride_count, coaster_count=coaster_count
|
||||
)
|
||||
|
||||
|
||||
@receiver(post_save, sender=Ride)
|
||||
def ride_saved(sender, instance, **kwargs):
|
||||
"""Update park counts when a ride is saved"""
|
||||
update_park_ride_counts(instance.park)
|
||||
|
||||
|
||||
@receiver(post_delete, sender=Ride)
|
||||
def ride_deleted(sender, instance, **kwargs):
|
||||
"""Update park counts when a ride is deleted"""
|
||||
update_park_ride_counts(instance.park)
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,550 +0,0 @@
|
||||
/**
|
||||
* Enhanced Parks Search and Filter Management
|
||||
* Provides comprehensive UX improvements for the parks listing page
|
||||
*/
|
||||
|
||||
class ParkSearchManager {
|
||||
constructor() {
|
||||
this.debounceTimers = new Map();
|
||||
this.filterState = new Map();
|
||||
this.requestCount = 0;
|
||||
this.lastRequestTime = 0;
|
||||
|
||||
this.init();
|
||||
}
|
||||
|
||||
init() {
|
||||
this.setupEventListeners();
|
||||
this.initializeLazyLoading();
|
||||
this.setupKeyboardNavigation();
|
||||
this.restoreFilterState();
|
||||
this.setupPerformanceOptimizations();
|
||||
}
|
||||
|
||||
setupEventListeners() {
|
||||
// Enhanced HTMX request handling
|
||||
document.addEventListener('htmx:configRequest', (evt) => this.handleConfigRequest(evt));
|
||||
document.addEventListener('htmx:beforeRequest', (evt) => this.handleBeforeRequest(evt));
|
||||
document.addEventListener('htmx:afterRequest', (evt) => this.handleAfterRequest(evt));
|
||||
document.addEventListener('htmx:responseError', (evt) => this.handleResponseError(evt));
|
||||
document.addEventListener('htmx:historyRestore', (evt) => this.handleHistoryRestore(evt));
|
||||
document.addEventListener('htmx:afterSwap', (evt) => this.handleAfterSwap(evt));
|
||||
|
||||
// Enhanced form interactions
|
||||
document.addEventListener('input', (evt) => this.handleInput(evt));
|
||||
document.addEventListener('change', (evt) => this.handleChange(evt));
|
||||
document.addEventListener('focus', (evt) => this.handleFocus(evt));
|
||||
document.addEventListener('blur', (evt) => this.handleBlur(evt));
|
||||
|
||||
// Search suggestions
|
||||
document.addEventListener('keydown', (evt) => this.handleKeydown(evt));
|
||||
|
||||
// Window events
|
||||
window.addEventListener('beforeunload', () => this.saveFilterState());
|
||||
window.addEventListener('resize', this.debounce(() => this.handleResize(), 250));
|
||||
}
|
||||
|
||||
handleConfigRequest(evt) {
|
||||
// Preserve view mode
|
||||
const parkResults = document.getElementById('park-results');
|
||||
if (parkResults) {
|
||||
const viewMode = parkResults.getAttribute('data-view-mode');
|
||||
if (viewMode) {
|
||||
evt.detail.parameters['view_mode'] = viewMode;
|
||||
}
|
||||
}
|
||||
|
||||
// Preserve search terms
|
||||
const searchInput = document.querySelector('input[name="search"]');
|
||||
if (searchInput && searchInput.value) {
|
||||
evt.detail.parameters['search'] = searchInput.value;
|
||||
}
|
||||
|
||||
// Add request tracking
|
||||
evt.detail.parameters['_req_id'] = ++this.requestCount;
|
||||
this.lastRequestTime = Date.now();
|
||||
}
|
||||
|
||||
handleBeforeRequest(evt) {
|
||||
const target = evt.detail.target;
|
||||
if (target) {
|
||||
target.classList.add('htmx-requesting');
|
||||
this.showLoadingIndicator(target);
|
||||
}
|
||||
|
||||
// Disable form elements during request
|
||||
this.toggleFormElements(false);
|
||||
|
||||
// Track request analytics
|
||||
this.trackFilterUsage(evt);
|
||||
}
|
||||
|
||||
handleAfterRequest(evt) {
|
||||
const target = evt.detail.target;
|
||||
if (target) {
|
||||
target.classList.remove('htmx-requesting');
|
||||
this.hideLoadingIndicator(target);
|
||||
}
|
||||
|
||||
// Re-enable form elements
|
||||
this.toggleFormElements(true);
|
||||
|
||||
// Handle response timing
|
||||
const responseTime = Date.now() - this.lastRequestTime;
|
||||
if (responseTime > 3000) {
|
||||
this.showPerformanceWarning();
|
||||
}
|
||||
}
|
||||
|
||||
handleResponseError(evt) {
|
||||
this.hideLoadingIndicator(evt.detail.target);
|
||||
this.toggleFormElements(true);
|
||||
this.showErrorMessage('Failed to load results. Please try again.');
|
||||
}
|
||||
|
||||
handleAfterSwap(evt) {
|
||||
if (evt.detail.target.id === 'results-container') {
|
||||
this.initializeLazyLoading(evt.detail.target);
|
||||
this.updateResultsInfo(evt.detail.target);
|
||||
this.animateResults(evt.detail.target);
|
||||
}
|
||||
}
|
||||
|
||||
handleHistoryRestore(evt) {
|
||||
const parkResults = document.getElementById('park-results');
|
||||
if (parkResults && evt.detail.path) {
|
||||
const url = new URL(evt.detail.path, window.location.origin);
|
||||
const viewMode = url.searchParams.get('view_mode');
|
||||
if (viewMode) {
|
||||
parkResults.setAttribute('data-view-mode', viewMode);
|
||||
}
|
||||
}
|
||||
|
||||
// Restore filter state from URL
|
||||
this.restoreFiltersFromURL(evt.detail.path);
|
||||
}
|
||||
|
||||
handleInput(evt) {
|
||||
if (evt.target.type === 'search' || evt.target.type === 'text') {
|
||||
this.debounceInput(evt.target);
|
||||
}
|
||||
}
|
||||
|
||||
handleChange(evt) {
|
||||
if (evt.target.closest('#filter-form')) {
|
||||
this.updateFilterState();
|
||||
this.saveFilterState();
|
||||
}
|
||||
}
|
||||
|
||||
handleFocus(evt) {
|
||||
if (evt.target.type === 'search') {
|
||||
this.highlightSearchSuggestions(evt.target);
|
||||
}
|
||||
}
|
||||
|
||||
handleBlur(evt) {
|
||||
if (evt.target.type === 'search') {
|
||||
// Delay hiding suggestions to allow for clicks
|
||||
setTimeout(() => this.hideSearchSuggestions(), 150);
|
||||
}
|
||||
}
|
||||
|
||||
handleKeydown(evt) {
|
||||
if (evt.target.type === 'search') {
|
||||
this.handleSearchKeyboard(evt);
|
||||
}
|
||||
}
|
||||
|
||||
handleResize() {
|
||||
// Responsive adjustments
|
||||
this.adjustLayoutForViewport();
|
||||
}
|
||||
|
||||
debounceInput(input) {
|
||||
const key = input.name || input.id;
|
||||
if (this.debounceTimers.has(key)) {
|
||||
clearTimeout(this.debounceTimers.get(key));
|
||||
}
|
||||
|
||||
const delay = input.type === 'search' ? 300 : 500;
|
||||
const timer = setTimeout(() => {
|
||||
if (input.form) {
|
||||
htmx.trigger(input.form, 'change');
|
||||
}
|
||||
this.debounceTimers.delete(key);
|
||||
}, delay);
|
||||
|
||||
this.debounceTimers.set(key, timer);
|
||||
}
|
||||
|
||||
handleSearchKeyboard(evt) {
|
||||
const suggestions = document.getElementById('search-results');
|
||||
if (!suggestions) return;
|
||||
|
||||
const items = suggestions.querySelectorAll('[role="option"]');
|
||||
let activeIndex = Array.from(items).findIndex(item =>
|
||||
item.classList.contains('active') || item.classList.contains('highlighted')
|
||||
);
|
||||
|
||||
switch (evt.key) {
|
||||
case 'ArrowDown':
|
||||
evt.preventDefault();
|
||||
activeIndex = Math.min(activeIndex + 1, items.length - 1);
|
||||
this.highlightSuggestion(items, activeIndex);
|
||||
break;
|
||||
|
||||
case 'ArrowUp':
|
||||
evt.preventDefault();
|
||||
activeIndex = Math.max(activeIndex - 1, -1);
|
||||
this.highlightSuggestion(items, activeIndex);
|
||||
break;
|
||||
|
||||
case 'Enter':
|
||||
if (activeIndex >= 0 && items[activeIndex]) {
|
||||
evt.preventDefault();
|
||||
items[activeIndex].click();
|
||||
}
|
||||
break;
|
||||
|
||||
case 'Escape':
|
||||
this.hideSearchSuggestions();
|
||||
evt.target.blur();
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
highlightSuggestion(items, activeIndex) {
|
||||
items.forEach((item, index) => {
|
||||
item.classList.toggle('active', index === activeIndex);
|
||||
item.classList.toggle('highlighted', index === activeIndex);
|
||||
});
|
||||
}
|
||||
|
||||
highlightSearchSuggestions(input) {
|
||||
const suggestions = document.getElementById('search-results');
|
||||
if (suggestions && input.value) {
|
||||
suggestions.style.display = 'block';
|
||||
}
|
||||
}
|
||||
|
||||
hideSearchSuggestions() {
|
||||
const suggestions = document.getElementById('search-results');
|
||||
if (suggestions) {
|
||||
suggestions.style.display = 'none';
|
||||
}
|
||||
}
|
||||
|
||||
initializeLazyLoading(container = document) {
|
||||
if (!('IntersectionObserver' in window)) return;
|
||||
|
||||
const imageObserver = new IntersectionObserver((entries) => {
|
||||
entries.forEach(entry => {
|
||||
if (entry.isIntersecting) {
|
||||
const img = entry.target;
|
||||
if (img.dataset.src) {
|
||||
img.src = img.dataset.src;
|
||||
img.removeAttribute('data-src');
|
||||
img.classList.add('loaded');
|
||||
imageObserver.unobserve(img);
|
||||
}
|
||||
}
|
||||
});
|
||||
}, {
|
||||
threshold: 0.1,
|
||||
rootMargin: '50px'
|
||||
});
|
||||
|
||||
container.querySelectorAll('img[data-src]').forEach(img => {
|
||||
imageObserver.observe(img);
|
||||
});
|
||||
}
|
||||
|
||||
setupKeyboardNavigation() {
|
||||
// Tab navigation for filter cards
|
||||
document.addEventListener('keydown', (evt) => {
|
||||
if (evt.key === 'Tab' && evt.target.closest('.park-card')) {
|
||||
this.handleCardNavigation(evt);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
setupPerformanceOptimizations() {
|
||||
// Prefetch next page if pagination exists
|
||||
this.setupPrefetching();
|
||||
|
||||
// Optimize scroll performance
|
||||
this.setupScrollOptimization();
|
||||
}
|
||||
|
||||
setupPrefetching() {
|
||||
const nextPageLink = document.querySelector('a[rel="next"]');
|
||||
if (nextPageLink && 'IntersectionObserver' in window) {
|
||||
const observer = new IntersectionObserver((entries) => {
|
||||
entries.forEach(entry => {
|
||||
if (entry.isIntersecting) {
|
||||
this.prefetchPage(nextPageLink.href);
|
||||
observer.unobserve(entry.target);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
const trigger = document.querySelector('.pagination');
|
||||
if (trigger) {
|
||||
observer.observe(trigger);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
setupScrollOptimization() {
|
||||
let ticking = false;
|
||||
|
||||
window.addEventListener('scroll', () => {
|
||||
if (!ticking) {
|
||||
requestAnimationFrame(() => {
|
||||
this.handleScroll();
|
||||
ticking = false;
|
||||
});
|
||||
ticking = true;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
handleScroll() {
|
||||
// Show/hide back to top button
|
||||
const backToTop = document.getElementById('back-to-top');
|
||||
if (backToTop) {
|
||||
backToTop.style.display = window.scrollY > 500 ? 'block' : 'none';
|
||||
}
|
||||
}
|
||||
|
||||
prefetchPage(url) {
|
||||
const link = document.createElement('link');
|
||||
link.rel = 'prefetch';
|
||||
link.href = url;
|
||||
document.head.appendChild(link);
|
||||
}
|
||||
|
||||
showLoadingIndicator(target) {
|
||||
// Add subtle loading animation
|
||||
target.style.transition = 'opacity 0.3s ease-in-out';
|
||||
target.style.opacity = '0.7';
|
||||
}
|
||||
|
||||
hideLoadingIndicator(target) {
|
||||
target.style.opacity = '1';
|
||||
}
|
||||
|
||||
toggleFormElements(enabled) {
|
||||
const form = document.getElementById('filter-form');
|
||||
if (form) {
|
||||
const elements = form.querySelectorAll('input, select, button');
|
||||
elements.forEach(el => {
|
||||
el.disabled = !enabled;
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
updateFilterState() {
|
||||
const form = document.getElementById('filter-form');
|
||||
if (!form) return;
|
||||
|
||||
const formData = new FormData(form);
|
||||
this.filterState.clear();
|
||||
|
||||
for (const [key, value] of formData.entries()) {
|
||||
if (value && value !== '') {
|
||||
this.filterState.set(key, value);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
saveFilterState() {
|
||||
try {
|
||||
const state = Object.fromEntries(this.filterState);
|
||||
localStorage.setItem('parkFilters', JSON.stringify(state));
|
||||
} catch (e) {
|
||||
console.warn('Failed to save filter state:', e);
|
||||
}
|
||||
}
|
||||
|
||||
restoreFilterState() {
|
||||
try {
|
||||
const saved = localStorage.getItem('parkFilters');
|
||||
if (saved) {
|
||||
const state = JSON.parse(saved);
|
||||
this.applyFilterState(state);
|
||||
}
|
||||
} catch (e) {
|
||||
console.warn('Failed to restore filter state:', e);
|
||||
}
|
||||
}
|
||||
|
||||
restoreFiltersFromURL(path) {
|
||||
const url = new URL(path, window.location.origin);
|
||||
const params = new URLSearchParams(url.search);
|
||||
|
||||
const form = document.getElementById('filter-form');
|
||||
if (!form) return;
|
||||
|
||||
// Clear existing values
|
||||
form.reset();
|
||||
|
||||
// Apply URL parameters
|
||||
for (const [key, value] of params.entries()) {
|
||||
const input = form.querySelector(`[name="${key}"]`);
|
||||
if (input) {
|
||||
if (input.type === 'checkbox') {
|
||||
input.checked = value === 'on' || value === 'true';
|
||||
} else {
|
||||
input.value = value;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
applyFilterState(state) {
|
||||
const form = document.getElementById('filter-form');
|
||||
if (!form) return;
|
||||
|
||||
Object.entries(state).forEach(([key, value]) => {
|
||||
const input = form.querySelector(`[name="${key}"]`);
|
||||
if (input) {
|
||||
if (input.type === 'checkbox') {
|
||||
input.checked = value === 'on' || value === 'true';
|
||||
} else {
|
||||
input.value = value;
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
updateResultsInfo(container) {
|
||||
// Update any result count displays
|
||||
const countElements = container.querySelectorAll('[data-result-count]');
|
||||
countElements.forEach(el => {
|
||||
const count = container.querySelectorAll('.park-card').length;
|
||||
el.textContent = count;
|
||||
});
|
||||
}
|
||||
|
||||
animateResults(container) {
|
||||
// Subtle animation for new results
|
||||
const cards = container.querySelectorAll('.park-card');
|
||||
cards.forEach((card, index) => {
|
||||
card.style.opacity = '0';
|
||||
card.style.transform = 'translateY(20px)';
|
||||
|
||||
setTimeout(() => {
|
||||
card.style.transition = 'opacity 0.3s ease-out, transform 0.3s ease-out';
|
||||
card.style.opacity = '1';
|
||||
card.style.transform = 'translateY(0)';
|
||||
}, index * 50);
|
||||
});
|
||||
}
|
||||
|
||||
adjustLayoutForViewport() {
|
||||
const viewport = window.innerWidth;
|
||||
|
||||
// Adjust grid columns based on viewport
|
||||
const grid = document.querySelector('.park-card-grid');
|
||||
if (grid) {
|
||||
if (viewport < 768) {
|
||||
grid.style.gridTemplateColumns = '1fr';
|
||||
} else if (viewport < 1024) {
|
||||
grid.style.gridTemplateColumns = 'repeat(2, 1fr)';
|
||||
} else {
|
||||
grid.style.gridTemplateColumns = 'repeat(3, 1fr)';
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
trackFilterUsage(evt) {
|
||||
// Track which filters are being used for analytics
|
||||
if (window.gtag) {
|
||||
const formData = new FormData(evt.detail.elt);
|
||||
const activeFilters = [];
|
||||
|
||||
for (const [key, value] of formData.entries()) {
|
||||
if (value && value !== '' && key !== 'csrfmiddlewaretoken') {
|
||||
activeFilters.push(key);
|
||||
}
|
||||
}
|
||||
|
||||
window.gtag('event', 'filter_usage', {
|
||||
'filters_used': activeFilters.join(','),
|
||||
'filter_count': activeFilters.length
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
showPerformanceWarning() {
|
||||
// Show a subtle warning for slow responses
|
||||
const warning = document.createElement('div');
|
||||
warning.className = 'fixed top-4 right-4 bg-yellow-100 border border-yellow-400 text-yellow-700 px-4 py-3 rounded z-50';
|
||||
warning.innerHTML = `
|
||||
<span class="block sm:inline">Search is taking longer than expected...</span>
|
||||
<span class="absolute top-0 bottom-0 right-0 px-4 py-3 cursor-pointer" onclick="this.parentElement.remove()">
|
||||
<svg class="fill-current h-6 w-6 text-yellow-500" role="button" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 20 20">
|
||||
<path d="M14.348 14.849a1.2 1.2 0 0 1-1.697 0L10 11.819l-2.651 3.029a1.2 1.2 0 1 1-1.697-1.697l2.758-3.15-2.759-3.152a1.2 1.2 0 1 1 1.697-1.697L10 8.183l2.651-3.031a1.2 1.2 0 1 1 1.697 1.697l-2.758 3.152 2.758 3.15a1.2 1.2 0 0 1 0 1.698z"/>
|
||||
</svg>
|
||||
</span>
|
||||
`;
|
||||
|
||||
document.body.appendChild(warning);
|
||||
|
||||
setTimeout(() => {
|
||||
if (warning.parentElement) {
|
||||
warning.remove();
|
||||
}
|
||||
}, 5000);
|
||||
}
|
||||
|
||||
showErrorMessage(message) {
|
||||
// Show error message with retry option
|
||||
const errorDiv = document.createElement('div');
|
||||
errorDiv.className = 'fixed top-4 right-4 bg-red-100 border border-red-400 text-red-700 px-4 py-3 rounded z-50';
|
||||
errorDiv.innerHTML = `
|
||||
<span class="block sm:inline">${message}</span>
|
||||
<button class="ml-4 bg-red-500 hover:bg-red-700 text-white font-bold py-1 px-2 rounded text-sm" onclick="location.reload()">
|
||||
Retry
|
||||
</button>
|
||||
<span class="absolute top-0 bottom-0 right-0 px-4 py-3 cursor-pointer" onclick="this.parentElement.remove()">
|
||||
<svg class="fill-current h-6 w-6 text-red-500" role="button" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 20 20">
|
||||
<path d="M14.348 14.849a1.2 1.2 0 0 1-1.697 0L10 11.819l-2.651 3.029a1.2 1.2 0 1 1-1.697-1.697l2.758-3.15-2.759-3.152a1.2 1.2 0 1 1 1.697-1.697L10 8.183l2.651-3.031a1.2 1.2 0 1 1 1.697 1.697l-2.758 3.152 2.758 3.15a1.2 1.2 0 0 1 0 1.698z"/>
|
||||
</svg>
|
||||
</span>
|
||||
`;
|
||||
|
||||
document.body.appendChild(errorDiv);
|
||||
|
||||
setTimeout(() => {
|
||||
if (errorDiv.parentElement) {
|
||||
errorDiv.remove();
|
||||
}
|
||||
}, 10000);
|
||||
}
|
||||
|
||||
// Utility method for debouncing
|
||||
debounce(func, wait) {
|
||||
let timeout;
|
||||
return function executedFunction(...args) {
|
||||
const later = () => {
|
||||
clearTimeout(timeout);
|
||||
func(...args);
|
||||
};
|
||||
clearTimeout(timeout);
|
||||
timeout = setTimeout(later, wait);
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
// Initialize the enhanced search manager
|
||||
document.addEventListener('DOMContentLoaded', () => {
|
||||
window.parkSearchManager = new ParkSearchManager();
|
||||
});
|
||||
|
||||
// Export for potential module usage
|
||||
if (typeof module !== 'undefined' && module.exports) {
|
||||
module.exports = ParkSearchManager;
|
||||
}
|
||||
@@ -1,164 +0,0 @@
|
||||
{% extends "core/search/layouts/filtered_list.html" %}
|
||||
{% load static %}
|
||||
|
||||
{% block title %}Parks - ThrillWiki{% endblock %}
|
||||
|
||||
{% block list_actions %}
|
||||
{# Simple View Toggle #}
|
||||
<div class="flex items-center space-x-4">
|
||||
<div class="flex items-center bg-gray-100 dark:bg-gray-700 rounded-lg p-1">
|
||||
<button hx-get="{% url 'parks:park_list' %}?view_mode=grid{% if request.GET.search %}&search={{ request.GET.search }}{% endif %}"
|
||||
hx-target="#results-container"
|
||||
hx-push-url="true"
|
||||
class="px-3 py-1 rounded-md text-sm font-medium transition-all {% if request.GET.view_mode == 'grid' or not request.GET.view_mode %}bg-white dark:bg-gray-600 shadow-sm text-blue-600 dark:text-blue-400{% else %}text-gray-700 dark:text-gray-300{% endif %}">
|
||||
<svg class="w-4 h-4" fill="currentColor" viewBox="0 0 20 20">
|
||||
<path d="M5 3a2 2 0 00-2 2v2a2 2 0 002 2h2a2 2 0 002-2V5a2 2 0 00-2-2H5zM5 11a2 2 0 00-2 2v2a2 2 0 002 2h2a2 2 0 002-2v-2a2 2 0 00-2-2H5zM11 5a2 2 0 012-2h2a2 2 0 012 2v2a2 2 0 01-2 2h-2a2 2 0 01-2-2V5zM11 13a2 2 0 012-2h2a2 2 0 012 2v2a2 2 0 01-2 2h-2a2 2 0 01-2-2v-2z"/>
|
||||
</svg>
|
||||
</button>
|
||||
<button hx-get="{% url 'parks:park_list' %}?view_mode=list{% if request.GET.search %}&search={{ request.GET.search }}{% endif %}"
|
||||
hx-target="#results-container"
|
||||
hx-push-url="true"
|
||||
class="px-3 py-1 rounded-md text-sm font-medium transition-all {% if request.GET.view_mode == 'list' %}bg-white dark:bg-gray-600 shadow-sm text-blue-600 dark:text-blue-400{% else %}text-gray-700 dark:text-gray-300{% endif %}">
|
||||
<svg class="w-4 h-4" fill="currentColor" viewBox="0 0 20 20">
|
||||
<path fill-rule="evenodd" d="M3 4a1 1 0 011-1h12a1 1 0 110 2H4a1 1 0 01-1-1zm0 4a1 1 0 011-1h12a1 1 0 110 2H4a1 1 0 01-1-1zm0 4a1 1 0 011-1h12a1 1 0 110 2H4a1 1 0 01-1-1z" clip-rule="evenodd"/>
|
||||
</svg>
|
||||
</button>
|
||||
</div>
|
||||
|
||||
{% if user.is_authenticated %}
|
||||
<a href="{% url 'parks:park_create' %}"
|
||||
class="inline-flex items-center px-4 py-2 border border-transparent text-sm font-medium rounded-lg text-white bg-blue-600 hover:bg-blue-700 focus:outline-none focus:ring-2 focus:ring-offset-2 focus:ring-blue-500 transition-colors">
|
||||
<svg class="w-4 h-4 mr-2" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M12 6v6m0 0v6m0-6h6m-6 0H6"/>
|
||||
</svg>
|
||||
Add Park
|
||||
</a>
|
||||
{% endif %}
|
||||
</div>
|
||||
{% endblock %}
|
||||
|
||||
{% block results_list %}
|
||||
<div id="park-results"
|
||||
class="overflow-hidden transition-all duration-300"
|
||||
data-view-mode="{{ view_mode|default:'grid' }}">
|
||||
|
||||
{# Results Content with Adaptive Grid #}
|
||||
<div class="p-6">
|
||||
{% if parks %}
|
||||
{# Enhanced Responsive Grid Container #}
|
||||
<div class="{% if view_mode == 'list' %}space-y-4{% else %}grid gap-6 grid-cols-1 sm:grid-cols-2 xl:grid-cols-3 2xl:grid-cols-4{% endif %} transition-all duration-300">
|
||||
{% include "parks/partials/park_list_item.html" with parks=parks view_mode=view_mode|default:'grid' %}
|
||||
</div>
|
||||
{% else %}
|
||||
{# Enhanced No Results Found Section #}
|
||||
<div class="text-center py-16 px-4">
|
||||
<div class="mx-auto w-32 h-32 text-gray-300 dark:text-gray-600 mb-8">
|
||||
<svg fill="none" stroke="currentColor" viewBox="0 0 24 24" class="w-full h-full">
|
||||
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="1" d="M19 11H5m14 0a2 2 0 012 2v6a2 2 0 01-2 2H5a2 2 0 01-2-2v-6a2 2 0 012-2m14 0V9a2 2 0 00-2-2M5 11V9a2 2 0 012-2m0 0V5a2 2 0 012-2h6a2 2 0 012 2v2M7 7h10"></path>
|
||||
</svg>
|
||||
</div>
|
||||
|
||||
<h3 class="text-2xl font-bold text-gray-900 dark:text-white mb-4">No parks found</h3>
|
||||
|
||||
{% if request.GET.search %}
|
||||
<p class="text-gray-600 dark:text-gray-400 mb-8 max-w-md mx-auto">
|
||||
No parks match your search for <span class="font-semibold text-gray-900 dark:text-white">"{{ request.GET.search }}"</span>.
|
||||
<br>Try searching with different keywords or check your spelling.
|
||||
</p>
|
||||
{% elif request.GET.park_type or request.GET.has_coasters or request.GET.min_rating or request.GET.big_parks_only %}
|
||||
<p class="text-gray-600 dark:text-gray-400 mb-8 max-w-md mx-auto">
|
||||
No parks match your current filter criteria.
|
||||
<br>Try adjusting your filters or removing some restrictions.
|
||||
</p>
|
||||
{% else %}
|
||||
<p class="text-gray-600 dark:text-gray-400 mb-8 max-w-md mx-auto">
|
||||
No parks are currently available in the database.
|
||||
</p>
|
||||
{% endif %}
|
||||
|
||||
<div class="flex flex-col sm:flex-row items-center justify-center gap-4">
|
||||
{% if request.GET.search or request.GET.park_type or request.GET.has_coasters or request.GET.min_rating or request.GET.big_parks_only %}
|
||||
<button type="button"
|
||||
onclick="clearAllFilters()"
|
||||
class="inline-flex items-center px-6 py-3 border border-transparent text-sm font-semibold rounded-xl text-white bg-gradient-to-r from-blue-600 to-purple-600 hover:from-blue-700 hover:to-purple-700 focus:outline-none focus:ring-2 focus:ring-offset-2 focus:ring-blue-500 transition-all duration-300 transform hover:scale-105">
|
||||
<svg class="w-5 h-5 mr-2" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M4 4v5h.582m15.356 2A8.001 8.001 0 004.582 9m0 0H9m11 11v-5h-.581m0 0a8.003 8.003 0 01-15.357-2m15.357 2H15"></path>
|
||||
</svg>
|
||||
Clear All Filters
|
||||
</button>
|
||||
|
||||
<a href="{% url 'parks:park_list' %}"
|
||||
class="inline-flex items-center px-6 py-3 text-sm font-semibold text-blue-700 dark:text-blue-300 hover:text-blue-800 dark:hover:text-blue-200 transition-colors duration-200">
|
||||
<svg class="w-5 h-5 mr-2" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M10 19l-7-7m0 0l7-7m-7 7h18"></path>
|
||||
</svg>
|
||||
View all parks
|
||||
</a>
|
||||
{% endif %}
|
||||
|
||||
{% if user.is_authenticated %}
|
||||
<a href="{% url 'parks:park_create' %}"
|
||||
class="inline-flex items-center px-6 py-3 text-sm font-semibold text-green-700 dark:text-green-300 hover:text-green-800 dark:hover:text-green-200 transition-colors duration-200">
|
||||
<svg class="w-5 h-5 mr-2" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M12 6v6m0 0v6m0-6h6m-6 0H6"></path>
|
||||
</svg>
|
||||
Add a new park
|
||||
</a>
|
||||
{% endif %}
|
||||
</div>
|
||||
</div>
|
||||
{% endif %}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<script>
|
||||
// Enhanced clear all filters function
|
||||
function clearAllFilters() {
|
||||
const form = document.getElementById('filter-form');
|
||||
if (!form) return;
|
||||
|
||||
// Clear all form inputs
|
||||
const inputs = form.querySelectorAll('input, select, textarea');
|
||||
inputs.forEach(input => {
|
||||
if (input.type === 'checkbox' || input.type === 'radio') {
|
||||
input.checked = false;
|
||||
} else if (input.type !== 'hidden') {
|
||||
input.value = '';
|
||||
}
|
||||
});
|
||||
|
||||
// Clear search input as well
|
||||
const searchInput = document.querySelector('input[name="search"]');
|
||||
if (searchInput) {
|
||||
searchInput.value = '';
|
||||
}
|
||||
|
||||
// Submit form to reload without filters
|
||||
htmx.trigger(form, 'submit');
|
||||
}
|
||||
|
||||
// Smooth scroll to results after filter changes
|
||||
document.addEventListener('htmx:afterSwap', function(event) {
|
||||
if (event.detail.target.id === 'results-container') {
|
||||
// Add a subtle scroll to results with a small delay for content to settle
|
||||
setTimeout(() => {
|
||||
const resultsElement = document.getElementById('park-results');
|
||||
if (resultsElement) {
|
||||
resultsElement.scrollIntoView({
|
||||
behavior: 'smooth',
|
||||
block: 'start',
|
||||
inline: 'nearest'
|
||||
});
|
||||
}
|
||||
}, 100);
|
||||
|
||||
// Flash effect to indicate content update
|
||||
const resultsContainer = event.detail.target;
|
||||
resultsContainer.style.opacity = '0.8';
|
||||
setTimeout(() => {
|
||||
resultsContainer.style.opacity = '1';
|
||||
}, 150);
|
||||
}
|
||||
});
|
||||
</script>
|
||||
{% endblock %}
|
||||
@@ -1,168 +0,0 @@
|
||||
{% load static %}
|
||||
|
||||
{% if error %}
|
||||
<div class="p-4" data-testid="park-list-error">
|
||||
<div class="inline-flex items-center px-4 py-2 rounded-md bg-red-50 dark:bg-red-900/20 text-red-700 dark:text-red-400 border border-red-200 dark:border-red-800">
|
||||
<svg class="w-5 h-5 mr-2" fill="currentColor" viewBox="0 0 20 20">
|
||||
<path fill-rule="evenodd" d="M10 18a8 8 0 100-16 8 8 0 000 16zM8.707 7.293a1 1 0 00-1.414 1.414L8.586 10l-1.293 1.293a1 1 0 101.414 1.414L10 11.414l1.293 1.293a1 1 0 001.414-1.414L11.414 10l1.293-1.293a1 1 0 00-1.414-1.414L10 8.586 8.707 7.293z" clip-rule="evenodd"/>
|
||||
</svg>
|
||||
{{ error }}
|
||||
</div>
|
||||
</div>
|
||||
{% else %}
|
||||
{% for park in object_list|default:parks %}
|
||||
{% if view_mode == 'list' %}
|
||||
{# Enhanced List View Item #}
|
||||
<article class="group bg-white/80 dark:bg-gray-800/80 backdrop-blur-sm border border-gray-200/50 dark:border-gray-700/50 rounded-xl shadow-lg hover:shadow-xl transition-all duration-300 transform hover:scale-[1.02] overflow-hidden">
|
||||
<div class="p-6">
|
||||
<div class="flex flex-col lg:flex-row lg:items-center lg:justify-between gap-6">
|
||||
{# Main Content Section #}
|
||||
<div class="flex-1 min-w-0">
|
||||
<div class="flex items-start justify-between mb-3">
|
||||
<h2 class="text-xl lg:text-2xl font-bold">
|
||||
<a href="{% url 'parks:park_detail' park.slug %}"
|
||||
class="bg-gradient-to-r from-gray-900 to-gray-700 dark:from-white dark:to-gray-300 bg-clip-text text-transparent hover:from-blue-600 hover:to-purple-600 dark:hover:from-blue-400 dark:hover:to-purple-400 transition-all duration-300">
|
||||
{{ park.name }}
|
||||
</a>
|
||||
</h2>
|
||||
|
||||
{# Status Badge #}
|
||||
<span class="inline-flex items-center px-3 py-1 rounded-full text-xs font-semibold border
|
||||
{% if park.status == 'operating' %}bg-green-50 text-green-700 border-green-200 dark:bg-green-900/20 dark:text-green-400 dark:border-green-800
|
||||
{% elif park.status == 'closed' %}bg-red-50 text-red-700 border-red-200 dark:bg-red-900/20 dark:text-red-400 dark:border-red-800
|
||||
{% elif park.status == 'seasonal' %}bg-blue-50 text-blue-700 border-blue-200 dark:bg-blue-900/20 dark:text-blue-400 dark:border-blue-800
|
||||
{% else %}bg-gray-50 text-gray-700 border-gray-200 dark:bg-gray-700 dark:text-gray-300 dark:border-gray-600{% endif %}">
|
||||
{{ park.get_status_display }}
|
||||
</span>
|
||||
</div>
|
||||
|
||||
{% if park.operator %}
|
||||
<div class="text-base font-medium text-gray-600 dark:text-gray-400 mb-3">
|
||||
{{ park.operator.name }}
|
||||
</div>
|
||||
{% endif %}
|
||||
|
||||
{% if park.description %}
|
||||
<p class="text-gray-600 dark:text-gray-400 line-clamp-2 mb-4">
|
||||
{{ park.description|truncatewords:30 }}
|
||||
</p>
|
||||
{% endif %}
|
||||
</div>
|
||||
|
||||
{# Stats Section #}
|
||||
{% if park.ride_count or park.coaster_count %}
|
||||
<div class="flex items-center space-x-6 text-sm">
|
||||
{% if park.ride_count %}
|
||||
<div class="flex items-center space-x-2 px-4 py-2 bg-gradient-to-r from-blue-50 to-purple-50 dark:from-blue-900/20 dark:to-purple-900/20 rounded-lg border border-blue-200/50 dark:border-blue-800/50">
|
||||
<svg class="w-5 h-5 text-blue-600 dark:text-blue-400" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M19 11H5m14 0a2 2 0 012 2v6a2 2 0 01-2 2H5a2 2 0 01-2-2v-6a2 2 0 012-2m14 0V9a2 2 0 00-2-2M5 11V9a2 2 0 012-2m0 0V5a2 2 0 012-2h6a2 2 0 012 2v2M7 7h10"/>
|
||||
</svg>
|
||||
<span class="font-semibold text-blue-700 dark:text-blue-300">{{ park.ride_count }}</span>
|
||||
<span class="text-blue-600 dark:text-blue-400">rides</span>
|
||||
</div>
|
||||
{% endif %}
|
||||
{% if park.coaster_count %}
|
||||
<div class="flex items-center space-x-2 px-4 py-2 bg-gradient-to-r from-purple-50 to-pink-50 dark:from-purple-900/20 dark:to-pink-900/20 rounded-lg border border-purple-200/50 dark:border-purple-800/50">
|
||||
<svg class="w-5 h-5 text-purple-600 dark:text-purple-400" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M13 10V3L4 14h7v7l9-11h-7z"/>
|
||||
</svg>
|
||||
<span class="font-semibold text-purple-700 dark:text-purple-300">{{ park.coaster_count }}</span>
|
||||
<span class="text-purple-600 dark:text-purple-400">coasters</span>
|
||||
</div>
|
||||
{% endif %}
|
||||
</div>
|
||||
{% endif %}
|
||||
</div>
|
||||
</div>
|
||||
</article>
|
||||
{% else %}
|
||||
{# Enhanced Grid View Item #}
|
||||
<article class="group bg-white/80 dark:bg-gray-800/80 backdrop-blur-sm border border-gray-200/50 dark:border-gray-700/50 rounded-xl shadow-lg hover:shadow-xl transition-all duration-300 transform hover:scale-105 hover:-rotate-1 overflow-hidden">
|
||||
{# Card Header with Gradient #}
|
||||
<div class="h-2 bg-gradient-to-r from-blue-500 via-purple-500 to-pink-500"></div>
|
||||
|
||||
<div class="p-6">
|
||||
<div class="flex items-start justify-between mb-4">
|
||||
<h2 class="text-xl font-bold line-clamp-2 flex-1 mr-3">
|
||||
<a href="{% url 'parks:park_detail' park.slug %}"
|
||||
class="bg-gradient-to-r from-gray-900 to-gray-700 dark:from-white dark:to-gray-300 bg-clip-text text-transparent hover:from-blue-600 hover:to-purple-600 dark:hover:from-blue-400 dark:hover:to-purple-400 transition-all duration-300">
|
||||
{{ park.name }}
|
||||
</a>
|
||||
</h2>
|
||||
|
||||
{# Status Badge #}
|
||||
<span class="inline-flex items-center px-2.5 py-1 rounded-full text-xs font-semibold border shrink-0
|
||||
{% if park.status == 'operating' %}bg-green-50 text-green-700 border-green-200 dark:bg-green-900/20 dark:text-green-400 dark:border-green-800
|
||||
{% elif park.status == 'closed' %}bg-red-50 text-red-700 border-red-200 dark:bg-red-900/20 dark:text-red-400 dark:border-red-800
|
||||
{% elif park.status == 'seasonal' %}bg-blue-50 text-blue-700 border-blue-200 dark:bg-blue-900/20 dark:text-blue-400 dark:border-blue-800
|
||||
{% else %}bg-gray-50 text-gray-700 border-gray-200 dark:bg-gray-700 dark:text-gray-300 dark:border-gray-600{% endif %}">
|
||||
{{ park.get_status_display }}
|
||||
</span>
|
||||
</div>
|
||||
|
||||
{% if park.operator %}
|
||||
<div class="text-sm font-medium text-gray-600 dark:text-gray-400 mb-3 truncate">
|
||||
{{ park.operator.name }}
|
||||
</div>
|
||||
{% endif %}
|
||||
|
||||
{% if park.description %}
|
||||
<p class="text-sm text-gray-600 dark:text-gray-400 line-clamp-3 mb-4">
|
||||
{{ park.description|truncatewords:15 }}
|
||||
</p>
|
||||
{% endif %}
|
||||
|
||||
{# Stats Footer #}
|
||||
{% if park.ride_count or park.coaster_count %}
|
||||
<div class="flex items-center justify-between pt-4 border-t border-gray-200/50 dark:border-gray-600/50">
|
||||
<div class="flex items-center space-x-4 text-sm">
|
||||
{% if park.ride_count %}
|
||||
<div class="flex items-center space-x-1 text-blue-600 dark:text-blue-400">
|
||||
<svg class="w-4 h-4" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M19 11H5m14 0a2 2 0 012 2v6a2 2 0 01-2 2H5a2 2 0 01-2-2v-6a2 2 0 012-2m14 0V9a2 2 0 00-2-2M5 11V9a2 2 0 012-2m0 0V5a2 2 0 012-2h6a2 2 0 012 2v2M7 7h10"/>
|
||||
</svg>
|
||||
<span class="font-semibold">{{ park.ride_count }}</span>
|
||||
</div>
|
||||
{% endif %}
|
||||
{% if park.coaster_count %}
|
||||
<div class="flex items-center space-x-1 text-purple-600 dark:text-purple-400">
|
||||
<svg class="w-4 h-4" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M13 10V3L4 14h7v7l9-11h-7z"/>
|
||||
</svg>
|
||||
<span class="font-semibold">{{ park.coaster_count }}</span>
|
||||
</div>
|
||||
{% endif %}
|
||||
</div>
|
||||
|
||||
{# View Details Arrow #}
|
||||
<div class="text-gray-400 group-hover:text-blue-600 dark:group-hover:text-blue-400 transition-colors duration-200">
|
||||
<svg class="w-5 h-5 transform group-hover:translate-x-1 transition-transform duration-200" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M9 5l7 7-7 7"/>
|
||||
</svg>
|
||||
</div>
|
||||
</div>
|
||||
{% endif %}
|
||||
</div>
|
||||
</article>
|
||||
{% endif %}
|
||||
{% empty %}
|
||||
<div class="{% if view_mode == 'list' %}w-full{% else %}col-span-full{% endif %} p-12 text-center" data-testid="no-parks-found">
|
||||
<div class="mx-auto w-24 h-24 text-gray-300 dark:text-gray-600 mb-6">
|
||||
<svg fill="none" stroke="currentColor" viewBox="0 0 24 24" class="w-full h-full">
|
||||
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="1" d="M19 11H5m14 0a2 2 0 012 2v6a2 2 0 01-2 2H5a2 2 0 01-2-2v-6a2 2 0 012-2m14 0V9a2 2 0 00-2-2M5 11V9a2 2 0 012-2m0 0V5a2 2 0 012-2h6a2 2 0 012 2v2M7 7h10"/>
|
||||
</svg>
|
||||
</div>
|
||||
<h3 class="text-xl font-bold text-gray-900 dark:text-white mb-3">No parks found</h3>
|
||||
<div class="text-gray-600 dark:text-gray-400">
|
||||
{% if search_query %}
|
||||
<p class="mb-4">No parks found matching "{{ search_query }}". Try adjusting your search terms.</p>
|
||||
{% else %}
|
||||
<p class="mb-4">No parks found matching your criteria. Try adjusting your filters.</p>
|
||||
{% endif %}
|
||||
{% if user.is_authenticated %}
|
||||
<p>You can also <a href="{% url 'parks:park_create' %}" class="text-blue-600 dark:text-blue-400 hover:text-blue-800 dark:hover:text-blue-300 font-semibold">add a new park</a>.</p>
|
||||
{% endif %}
|
||||
</div>
|
||||
</div>
|
||||
{% endfor %}
|
||||
{% endif %}
|
||||
@@ -1,30 +0,0 @@
|
||||
{% load static %}
|
||||
|
||||
{% if parks %}
|
||||
<div class="py-2">
|
||||
{% for park in parks %}
|
||||
<button class="w-full text-left px-4 py-2 hover:bg-gray-100 focus:bg-gray-100 focus:outline-none dark:hover:bg-gray-700 dark:focus:bg-gray-700"
|
||||
role="option"
|
||||
@click="$dispatch('search-selected', '{{ park.name }}')"
|
||||
value="{{ park.id }}">
|
||||
<div class="flex justify-between items-center">
|
||||
<div>
|
||||
<div class="font-medium">{{ park.name }}</div>
|
||||
<div class="text-sm text-gray-500 dark:text-gray-400">
|
||||
{% if park.formatted_location %}
|
||||
{{ park.formatted_location }}
|
||||
{% endif %}
|
||||
</div>
|
||||
</div>
|
||||
<div class="text-sm text-gray-500 dark:text-gray-400">
|
||||
{{ park.get_status_display }}
|
||||
</div>
|
||||
</div>
|
||||
</button>
|
||||
{% endfor %}
|
||||
</div>
|
||||
{% else %}
|
||||
<div class="px-4 py-2 text-sm text-gray-500 dark:text-gray-400">
|
||||
No parks found matching "{{ query }}"
|
||||
</div>
|
||||
{% endif %}
|
||||
@@ -1,36 +0,0 @@
|
||||
{% if suggestions %}
|
||||
<div id="search-suggestions-results"
|
||||
class="w-full bg-white rounded-md shadow-lg border border-gray-200 max-h-96 overflow-y-auto"
|
||||
x-show="open"
|
||||
x-cloak
|
||||
@keydown.escape.window="open = false"
|
||||
x-transition:enter="transition ease-out duration-100"
|
||||
x-transition:enter-start="transform opacity-0 scale-95"
|
||||
x-transition:enter-end="transform opacity-100 scale-100"
|
||||
x-transition:leave="transition ease-in duration-75"
|
||||
x-transition:leave-start="transform opacity-100 scale-100"
|
||||
x-transition:leave-end="transform opacity-0 scale-95">
|
||||
{% for park in suggestions %}
|
||||
{% with location=park.location.first %}
|
||||
<button type="button"
|
||||
class="w-full text-left px-4 py-2 text-sm hover:bg-gray-100 cursor-pointer flex items-center justify-between gap-2 transition duration-150"
|
||||
:class="{ 'bg-blue-50': focusedIndex === {{ forloop.counter0 }} }"
|
||||
@mousedown.prevent="query = '{{ park.name }}'; $refs.search.value = '{{ park.name }}'"
|
||||
@mousedown.prevent="query = '{{ park.name }}'; $dispatch('search-selected', '{{ park.name }}'); open = false;"
|
||||
role="option"
|
||||
:aria-selected="focusedIndex === {{ forloop.counter0 }}"
|
||||
tabindex="-1"
|
||||
x-effect="if(focusedIndex === {{ forloop.counter0 }}) $el.scrollIntoView({block: 'nearest'})"
|
||||
aria-label="{{ park.name }}{% if location.city %} in {{ location.city }}{% endif %}{% if location.state %}, {{ location.state }}{% endif %}">
|
||||
<div class="flex items-center gap-2">
|
||||
<span class="font-medium" x-text="focusedIndex === {{ forloop.counter0 }} ? '▶ {{ park.name }}' : '{{ park.name }}'"></span>
|
||||
<span class="text-gray-500">
|
||||
{% if location.city %}{{ location.city }}, {% endif %}
|
||||
{% if location.state %}{{ location.state }}{% endif %}
|
||||
</span>
|
||||
</div>
|
||||
</button>
|
||||
{% endwith %}
|
||||
{% endfor %}
|
||||
</div>
|
||||
{% endif %}
|
||||
@@ -1,11 +0,0 @@
|
||||
from django import template
|
||||
|
||||
register = template.Library()
|
||||
|
||||
|
||||
@register.filter
|
||||
def has_reviewed_park(user, park):
|
||||
"""Check if a user has reviewed a park"""
|
||||
if not user.is_authenticated:
|
||||
return False
|
||||
return park.reviews.filter(user=user).exists()
|
||||
@@ -1,117 +0,0 @@
|
||||
from django.test import TestCase, Client
|
||||
from django.contrib.auth import get_user_model
|
||||
from apps.parks.models import Park, ParkArea, ParkLocation, Company as Operator
|
||||
|
||||
User = get_user_model()
|
||||
|
||||
|
||||
def create_test_location(park: Park) -> ParkLocation:
|
||||
"""Helper function to create a test location"""
|
||||
park_location = ParkLocation.objects.create(
|
||||
park=park,
|
||||
street_address="123 Test St",
|
||||
city="Test City",
|
||||
state="TS",
|
||||
country="Test Country",
|
||||
postal_code="12345",
|
||||
)
|
||||
# Set coordinates using the helper method
|
||||
park_location.set_coordinates(34.0522, -118.2437) # latitude, longitude
|
||||
park_location.save()
|
||||
return park_location
|
||||
|
||||
|
||||
class ParkModelTests(TestCase):
|
||||
@classmethod
|
||||
def setUpTestData(cls) -> None:
|
||||
# Create test user
|
||||
cls.user = User.objects.create_user(
|
||||
username="testuser",
|
||||
email="test@example.com",
|
||||
password="testpass123",
|
||||
)
|
||||
|
||||
# Create test company
|
||||
cls.operator = Operator.objects.create(
|
||||
name="Test Company", website="http://example.com"
|
||||
)
|
||||
|
||||
# Create test park
|
||||
cls.park = Park.objects.create(
|
||||
name="Test Park",
|
||||
operator=cls.operator,
|
||||
status="OPERATING",
|
||||
website="http://testpark.com",
|
||||
)
|
||||
|
||||
# Create test location
|
||||
cls.location = create_test_location(cls.park)
|
||||
|
||||
def test_park_creation(self) -> None:
|
||||
"""Test park instance creation and field values"""
|
||||
self.assertEqual(self.park.name, "Test Park")
|
||||
self.assertEqual(self.park.operator, self.operator)
|
||||
self.assertEqual(self.park.status, "OPERATING")
|
||||
self.assertEqual(self.park.website, "http://testpark.com")
|
||||
self.assertTrue(self.park.slug)
|
||||
|
||||
def test_park_str_representation(self) -> None:
|
||||
"""Test string representation of park"""
|
||||
self.assertEqual(str(self.park), "Test Park")
|
||||
|
||||
def test_park_coordinates(self) -> None:
|
||||
"""Test park coordinates property"""
|
||||
coords = self.park.coordinates
|
||||
self.assertIsNotNone(coords)
|
||||
if coords:
|
||||
self.assertAlmostEqual(coords[0], 34.0522, places=4) # latitude
|
||||
self.assertAlmostEqual(coords[1], -118.2437, places=4) # longitude
|
||||
|
||||
def test_park_formatted_location(self) -> None:
|
||||
"""Test park formatted_location property"""
|
||||
expected = "123 Test St, Test City, TS, 12345, Test Country"
|
||||
self.assertEqual(self.park.formatted_location, expected)
|
||||
|
||||
|
||||
class ParkAreaTests(TestCase):
|
||||
def setUp(self) -> None:
|
||||
# Create test company
|
||||
self.operator = Operator.objects.create(
|
||||
name="Test Company", website="http://example.com"
|
||||
)
|
||||
|
||||
# Create test park
|
||||
self.park = Park.objects.create(
|
||||
name="Test Park", operator=self.operator, status="OPERATING"
|
||||
)
|
||||
|
||||
# Create test location
|
||||
self.location = create_test_location(self.park)
|
||||
|
||||
# Create test area
|
||||
self.area = ParkArea.objects.create(
|
||||
park=self.park, name="Test Area", description="Test Description"
|
||||
)
|
||||
|
||||
def test_area_creation(self) -> None:
|
||||
"""Test park area creation"""
|
||||
self.assertEqual(self.area.name, "Test Area")
|
||||
self.assertEqual(self.area.park, self.park)
|
||||
self.assertTrue(self.area.slug)
|
||||
|
||||
|
||||
class ParkViewTests(TestCase):
|
||||
def setUp(self) -> None:
|
||||
self.client = Client()
|
||||
self.user = User.objects.create_user(
|
||||
username="testuser",
|
||||
email="test@example.com",
|
||||
password="testpass123",
|
||||
)
|
||||
self.operator = Operator.objects.create(
|
||||
name="Test Company", website="http://example.com"
|
||||
)
|
||||
self.park = Park.objects.create(
|
||||
name="Test Park", operator=self.operator, status="OPERATING"
|
||||
)
|
||||
self.location = create_test_location(self.park)
|
||||
@@ -1,127 +0,0 @@
|
||||
# Park Search Tests
|
||||
|
||||
## Overview
|
||||
|
||||
Test suite for the park search functionality including:
|
||||
- Autocomplete widget integration
|
||||
- Search form validation
|
||||
- Filter integration
|
||||
- HTMX interaction
|
||||
- View mode persistence
|
||||
|
||||
## Running Tests
|
||||
|
||||
```bash
|
||||
# Run all park tests
|
||||
uv run pytest parks/tests/
|
||||
|
||||
# Run specific search tests
|
||||
uv run pytest parks/tests/test_search.py
|
||||
|
||||
# Run with coverage
|
||||
uv run pytest --cov=parks parks/tests/
|
||||
```
|
||||
|
||||
## Test Coverage
|
||||
|
||||
### Search API Tests
|
||||
- `test_search_json_format`: Validates API response structure
|
||||
- `test_empty_search_json`: Tests empty search handling
|
||||
- `test_search_format_validation`: Verifies all required fields and types
|
||||
- `test_suggestion_limit`: Confirms 8-item result limit
|
||||
|
||||
### Search Functionality Tests
|
||||
- `test_autocomplete_results`: Validates real-time suggestion filtering
|
||||
- `test_search_with_filters`: Tests filter integration with search
|
||||
- `test_partial_match_search`: Verifies partial text matching works
|
||||
|
||||
### UI Integration Tests
|
||||
- `test_view_mode_persistence`: Ensures view mode is maintained
|
||||
- `test_empty_search`: Tests default state behavior
|
||||
- `test_htmx_request_handling`: Validates HTMX interactions
|
||||
|
||||
### Data Format Tests
|
||||
- Field types validation
|
||||
- Location formatting
|
||||
- Status display formatting
|
||||
- URL generation
|
||||
- Response structure
|
||||
|
||||
### Frontend Integration
|
||||
- HTMX partial updates
|
||||
- Alpine.js state management
|
||||
- Loading indicators
|
||||
- View mode persistence
|
||||
- Keyboard navigation
|
||||
|
||||
### Test Commands
|
||||
```bash
|
||||
# Run all park tests
|
||||
uv run pytest parks/tests/
|
||||
|
||||
# Run search tests specifically
|
||||
uv run pytest parks/tests/test_search.py
|
||||
|
||||
# Run with coverage
|
||||
uv run pytest --cov=parks parks/tests/
|
||||
```
|
||||
|
||||
### Coverage Areas
|
||||
1. Search Functionality:
|
||||
- Suggestion generation
|
||||
- Result filtering
|
||||
- Partial matching
|
||||
- Empty state handling
|
||||
|
||||
2. UI Integration:
|
||||
- HTMX requests
|
||||
- View mode switching
|
||||
- Loading states
|
||||
- Error handling
|
||||
|
||||
3. Performance:
|
||||
- Result limiting
|
||||
- Debouncing
|
||||
- Query optimization
|
||||
|
||||
4. Accessibility:
|
||||
- ARIA attributes
|
||||
- Keyboard controls
|
||||
- Screen reader support
|
||||
|
||||
## Configuration
|
||||
|
||||
Tests use pytest-django and require:
|
||||
- PostgreSQL database
|
||||
- HTMX middleware
|
||||
- Autocomplete app configuration
|
||||
|
||||
## Fixtures
|
||||
|
||||
The test suite uses standard Django test fixtures. No additional fixtures required.
|
||||
|
||||
## Common Issues
|
||||
|
||||
1. Database Errors
|
||||
- Ensure PostGIS extensions are installed
|
||||
- Verify database permissions
|
||||
|
||||
2. HTMX Tests
|
||||
- Use `HTTP_HX_REQUEST` header for HTMX requests
|
||||
- Check response content for HTMX attributes
|
||||
|
||||
## Adding New Tests
|
||||
|
||||
When adding tests, ensure:
|
||||
1. Database isolation using `@pytest.mark.django_db`
|
||||
2. Proper test naming following `test_*` convention
|
||||
3. Clear test descriptions in docstrings
|
||||
4. Coverage for both success and failure cases
|
||||
5. HTMX interaction testing where applicable
|
||||
|
||||
## Future Improvements
|
||||
|
||||
- Add performance benchmarks
|
||||
- Include accessibility tests
|
||||
- Add Playwright e2e tests
|
||||
- Implement geographic search tests
|
||||
@@ -1 +0,0 @@
|
||||
# Parks app test suite
|
||||
@@ -1,223 +0,0 @@
|
||||
"""
|
||||
Tests for park filtering functionality including search, status filtering,
|
||||
date ranges, and numeric validations.
|
||||
"""
|
||||
|
||||
from django.test import TestCase
|
||||
from datetime import date
|
||||
|
||||
from apps.parks.models import Park, ParkLocation, Company
|
||||
from apps.parks.filters import ParkFilter
|
||||
|
||||
# NOTE: These tests need to be updated to work with the new ParkLocation model
|
||||
# instead of the generic Location model
|
||||
|
||||
|
||||
class ParkFilterTests(TestCase):
|
||||
@classmethod
|
||||
def setUpTestData(cls):
|
||||
"""Set up test data for all filter tests"""
|
||||
# Create operators
|
||||
cls.operator1 = Company.objects.create(
|
||||
name="Thrilling Adventures Inc", slug="thrilling-adventures"
|
||||
)
|
||||
cls.operator2 = Company.objects.create(
|
||||
name="Family Fun Corp", slug="family-fun"
|
||||
)
|
||||
|
||||
# Create parks with various attributes for testing all filters
|
||||
cls.park1 = Park.objects.create(
|
||||
name="Thrilling Adventures Park",
|
||||
description="A thrilling park with lots of roller coasters",
|
||||
status="OPERATING",
|
||||
operator=cls.operator1,
|
||||
opening_date=date(2020, 1, 1),
|
||||
size_acres=100,
|
||||
ride_count=20,
|
||||
coaster_count=5,
|
||||
average_rating=4.5,
|
||||
)
|
||||
ParkLocation.objects.create(
|
||||
park=cls.park1,
|
||||
street_address="123 Thrill St",
|
||||
city="Thrill City",
|
||||
state="Thrill State",
|
||||
country="USA",
|
||||
postal_code="12345",
|
||||
)
|
||||
|
||||
cls.park2 = Park.objects.create(
|
||||
name="Family Fun Park",
|
||||
description="Family-friendly entertainment and attractions",
|
||||
status="CLOSED_TEMP",
|
||||
operator=cls.operator2,
|
||||
opening_date=date(2015, 6, 15),
|
||||
size_acres=50,
|
||||
ride_count=15,
|
||||
coaster_count=2,
|
||||
average_rating=4.0,
|
||||
)
|
||||
ParkLocation.objects.create(
|
||||
park=cls.park2,
|
||||
street_address="456 Fun St",
|
||||
city="Fun City",
|
||||
state="Fun State",
|
||||
country="Canada",
|
||||
postal_code="54321",
|
||||
)
|
||||
|
||||
# Park with minimal data for edge case testing
|
||||
cls.park3 = Park.objects.create(
|
||||
name="Incomplete Park",
|
||||
status="UNDER_CONSTRUCTION",
|
||||
operator=cls.operator1,
|
||||
)
|
||||
|
||||
def test_text_search(self):
|
||||
"""Test search functionality across different fields"""
|
||||
# Test name search
|
||||
queryset = ParkFilter(data={"search": "Thrilling"}).qs
|
||||
self.assertEqual(queryset.count(), 1)
|
||||
self.assertIn(self.park1, queryset)
|
||||
|
||||
# Test description search
|
||||
queryset = ParkFilter(data={"search": "family-friendly"}).qs
|
||||
self.assertEqual(queryset.count(), 1)
|
||||
self.assertIn(self.park2, queryset)
|
||||
|
||||
# Test location search
|
||||
queryset = ParkFilter(data={"search": "Thrill City"}).qs
|
||||
self.assertEqual(queryset.count(), 1)
|
||||
self.assertIn(self.park1, queryset)
|
||||
|
||||
# Test combined field search
|
||||
queryset = ParkFilter(data={"search": "Park"}).qs
|
||||
self.assertEqual(queryset.count(), 3)
|
||||
|
||||
# Test empty search
|
||||
queryset = ParkFilter(data={}).qs
|
||||
self.assertEqual(queryset.count(), 3)
|
||||
|
||||
def test_status_filtering(self):
|
||||
"""Test status filter with various values"""
|
||||
# Test each status
|
||||
status_tests = {
|
||||
"OPERATING": [self.park1],
|
||||
"CLOSED_TEMP": [self.park2],
|
||||
"UNDER_CONSTRUCTION": [self.park3],
|
||||
}
|
||||
|
||||
for status, expected_parks in status_tests.items():
|
||||
queryset = ParkFilter(data={"status": status}).qs
|
||||
self.assertEqual(queryset.count(), len(expected_parks))
|
||||
for park in expected_parks:
|
||||
self.assertIn(park, queryset)
|
||||
|
||||
# Test empty status (should return all)
|
||||
queryset = ParkFilter(data={}).qs
|
||||
self.assertEqual(queryset.count(), 3)
|
||||
|
||||
# Test empty string status (should return all)
|
||||
queryset = ParkFilter(data={"status": ""}).qs
|
||||
self.assertEqual(queryset.count(), 3)
|
||||
|
||||
# Test invalid status (should return no results)
|
||||
queryset = ParkFilter(data={"status": "INVALID"}).qs
|
||||
self.assertEqual(queryset.count(), 0)
|
||||
|
||||
def test_date_range_filtering(self):
|
||||
"""Test date range filter functionality"""
|
||||
# Test various date range scenarios
|
||||
test_cases = [
|
||||
# Start date only
|
||||
({"opening_date_after": "2019-01-01"}, [self.park1]),
|
||||
# End date only
|
||||
({"opening_date_before": "2016-01-01"}, [self.park2]),
|
||||
# Date range including one park
|
||||
(
|
||||
{
|
||||
"opening_date_after": "2014-01-01",
|
||||
"opening_date_before": "2016-01-01",
|
||||
},
|
||||
[self.park2],
|
||||
),
|
||||
# Date range including multiple parks
|
||||
(
|
||||
{
|
||||
"opening_date_after": "2014-01-01",
|
||||
"opening_date_before": "2022-01-01",
|
||||
},
|
||||
[self.park1, self.park2],
|
||||
),
|
||||
# Empty filter (should return all)
|
||||
({}, [self.park1, self.park2, self.park3]),
|
||||
# Future date (should return none)
|
||||
({"opening_date_after": "2030-01-01"}, []),
|
||||
]
|
||||
|
||||
for filter_data, expected_parks in test_cases:
|
||||
queryset = ParkFilter(data=filter_data).qs
|
||||
self.assertEqual(
|
||||
set(queryset),
|
||||
set(expected_parks),
|
||||
f"Failed for filter: {filter_data}",
|
||||
)
|
||||
|
||||
# Test invalid date formats
|
||||
invalid_dates = [
|
||||
{"opening_date_after": "invalid-date"},
|
||||
{"opening_date_before": "2023-13-01"}, # Invalid month
|
||||
{"opening_date_after": "2023-01-32"}, # Invalid day
|
||||
{"opening_date_before": "not-a-date"},
|
||||
]
|
||||
|
||||
for invalid_data in invalid_dates:
|
||||
filter_instance = ParkFilter(data=invalid_data)
|
||||
self.assertFalse(
|
||||
filter_instance.is_valid(),
|
||||
f"Filter should be invalid for data: {invalid_data}",
|
||||
)
|
||||
|
||||
def test_numeric_filtering(self):
|
||||
"""Test numeric filters with validation"""
|
||||
# Test minimum rides filter
|
||||
test_cases = [
|
||||
({"min_rides": "18"}, [self.park1]), # Only park1 has >= 18 rides
|
||||
(
|
||||
{"min_rides": "10"},
|
||||
[self.park1, self.park2],
|
||||
), # Both park1 and park2 have >= 10 rides
|
||||
(
|
||||
{"min_rides": "0"},
|
||||
[self.park1, self.park2, self.park3],
|
||||
), # All parks have >= 0 rides
|
||||
# No filter should return all
|
||||
({}, [self.park1, self.park2, self.park3]),
|
||||
]
|
||||
|
||||
for filter_data, expected_parks in test_cases:
|
||||
queryset = ParkFilter(data=filter_data).qs
|
||||
self.assertEqual(
|
||||
set(queryset),
|
||||
set(expected_parks),
|
||||
f"Failed for filter: {filter_data}",
|
||||
)
|
||||
|
||||
# Test coaster count filter
|
||||
queryset = ParkFilter(data={"min_coasters": "3"}).qs
|
||||
self.assertEqual(queryset.count(), 1)
|
||||
self.assertIn(self.park1, queryset)
|
||||
|
||||
# Test size filter
|
||||
queryset = ParkFilter(data={"min_size": "75"}).qs
|
||||
self.assertEqual(queryset.count(), 1)
|
||||
self.assertIn(self.park1, queryset)
|
||||
|
||||
# Test validation
|
||||
invalid_values = ["-1", "invalid", "0.5"]
|
||||
for value in invalid_values:
|
||||
filter_instance = ParkFilter(data={"min_rides": value})
|
||||
self.assertFalse(
|
||||
filter_instance.is_valid(),
|
||||
f"Filter should be invalid for value: {value}",
|
||||
)
|
||||
@@ -1,175 +0,0 @@
|
||||
"""
|
||||
Tests for park models functionality including CRUD operations,
|
||||
slug handling, status management, and location integration.
|
||||
"""
|
||||
|
||||
from django.test import TestCase
|
||||
from django.db import IntegrityError
|
||||
|
||||
from apps.parks.models import Park, ParkArea, ParkLocation, Company
|
||||
|
||||
# NOTE: These tests need to be updated to work with the new ParkLocation model
|
||||
# instead of the generic Location model
|
||||
|
||||
|
||||
class ParkModelTests(TestCase):
|
||||
def setUp(self):
|
||||
"""Set up test data"""
|
||||
self.operator = Company.objects.create(name="Test Company", slug="test-company")
|
||||
|
||||
# Create a basic park
|
||||
self.park = Park.objects.create(
|
||||
name="Test Park",
|
||||
description="A test park",
|
||||
status="OPERATING",
|
||||
operator=self.operator,
|
||||
)
|
||||
|
||||
# Create location for the park
|
||||
self.location = ParkLocation.objects.create(
|
||||
park=self.park,
|
||||
street_address="123 Test St",
|
||||
city="Test City",
|
||||
state="Test State",
|
||||
country="Test Country",
|
||||
postal_code="12345",
|
||||
)
|
||||
self.location.set_coordinates(40.7128, -74.0060)
|
||||
self.location.save()
|
||||
|
||||
def test_park_creation(self):
|
||||
"""Test basic park creation and fields"""
|
||||
self.assertEqual(self.park.name, "Test Park")
|
||||
self.assertEqual(self.park.slug, "test-park")
|
||||
self.assertEqual(self.park.status, "OPERATING")
|
||||
self.assertEqual(self.park.operator, self.operator)
|
||||
|
||||
def test_slug_generation(self):
|
||||
"""Test automatic slug generation"""
|
||||
park = Park.objects.create(
|
||||
name="Another Test Park",
|
||||
status="OPERATING",
|
||||
operator=self.operator,
|
||||
)
|
||||
self.assertEqual(park.slug, "another-test-park")
|
||||
|
||||
def test_historical_slug_lookup(self):
|
||||
"""Test finding park by historical slug"""
|
||||
from django.db import transaction
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from core.history import HistoricalSlug
|
||||
|
||||
with transaction.atomic():
|
||||
# Create initial park with a specific name/slug
|
||||
park = Park.objects.create(
|
||||
name="Original Park Name",
|
||||
description="Test description",
|
||||
status="OPERATING",
|
||||
operator=self.operator,
|
||||
)
|
||||
original_slug = park.slug
|
||||
print(f"\nInitial park created with slug: {original_slug}")
|
||||
|
||||
# Ensure we have a save to trigger history
|
||||
park.save()
|
||||
|
||||
# Modify name to trigger slug change
|
||||
park.name = "Updated Park Name"
|
||||
park.save()
|
||||
new_slug = park.slug
|
||||
print(f"Park updated with new slug: {new_slug}")
|
||||
|
||||
# Check HistoricalSlug records
|
||||
historical_slugs = HistoricalSlug.objects.filter(
|
||||
content_type=ContentType.objects.get_for_model(Park),
|
||||
object_id=park.id,
|
||||
)
|
||||
print(f"Historical slug records: {[h.slug for h in historical_slugs]}")
|
||||
|
||||
# Check pghistory records
|
||||
event_model = getattr(Park, "event_model", None)
|
||||
if event_model:
|
||||
historical_records = event_model.objects.filter(
|
||||
pgh_obj_id=park.id
|
||||
).order_by("-pgh_created_at")
|
||||
print("\nPG History records:")
|
||||
for record in historical_records:
|
||||
print(f"- Event ID: {record.pgh_id}")
|
||||
print(f" Name: {record.name}")
|
||||
print(f" Slug: {record.slug}")
|
||||
print(f" Created At: {record.pgh_created_at}")
|
||||
else:
|
||||
print("\nNo pghistory event model available")
|
||||
|
||||
# Try to find by old slug
|
||||
found_park, is_historical = Park.get_by_slug(original_slug)
|
||||
self.assertEqual(found_park.id, park.id)
|
||||
print(
|
||||
f"Found park by old slug: {found_park.slug}, is_historical: {is_historical}"
|
||||
)
|
||||
self.assertTrue(is_historical)
|
||||
|
||||
# Try current slug
|
||||
found_park, is_historical = Park.get_by_slug(new_slug)
|
||||
self.assertEqual(found_park.id, park.id)
|
||||
print(
|
||||
f"Found park by new slug: {found_park.slug}, is_historical: {is_historical}"
|
||||
)
|
||||
self.assertFalse(is_historical)
|
||||
|
||||
def test_status_color_mapping(self):
|
||||
"""Test status color class mapping"""
|
||||
status_tests = {
|
||||
"OPERATING": "bg-green-100 text-green-800",
|
||||
"CLOSED_TEMP": "bg-yellow-100 text-yellow-800",
|
||||
"CLOSED_PERM": "bg-red-100 text-red-800",
|
||||
"UNDER_CONSTRUCTION": "bg-blue-100 text-blue-800",
|
||||
"DEMOLISHED": "bg-gray-100 text-gray-800",
|
||||
"RELOCATED": "bg-purple-100 text-purple-800",
|
||||
}
|
||||
|
||||
for status, expected_color in status_tests.items():
|
||||
self.park.status = status
|
||||
self.assertEqual(self.park.get_status_color(), expected_color)
|
||||
|
||||
def test_absolute_url(self):
|
||||
"""Test get_absolute_url method"""
|
||||
expected_url = f"/parks/{self.park.slug}/"
|
||||
self.assertEqual(self.park.get_absolute_url(), expected_url)
|
||||
|
||||
|
||||
class ParkAreaModelTests(TestCase):
|
||||
def setUp(self):
|
||||
"""Set up test data"""
|
||||
self.operator = Company.objects.create(
|
||||
name="Test Company 2", slug="test-company-2"
|
||||
)
|
||||
self.park = Park.objects.create(
|
||||
name="Test Park", status="OPERATING", operator=self.operator
|
||||
)
|
||||
self.area = ParkArea.objects.create(
|
||||
park=self.park, name="Test Area", description="A test area"
|
||||
)
|
||||
|
||||
def test_area_creation(self):
|
||||
"""Test basic area creation and fields"""
|
||||
self.assertEqual(self.area.name, "Test Area")
|
||||
self.assertEqual(self.area.slug, "test-area")
|
||||
self.assertEqual(self.area.park, self.park)
|
||||
|
||||
def test_unique_together_constraint(self):
|
||||
"""Test unique_together constraint for park and slug"""
|
||||
from django.db import transaction
|
||||
|
||||
# Try to create area with same slug in same park
|
||||
with transaction.atomic():
|
||||
with self.assertRaises(IntegrityError):
|
||||
ParkArea.objects.create(
|
||||
park=self.park,
|
||||
name="Test Area", # Will generate same slug
|
||||
)
|
||||
|
||||
# Should be able to use same name in different park
|
||||
other_park = Park.objects.create(name="Other Park", operator=self.operator)
|
||||
area = ParkArea.objects.create(park=other_park, name="Test Area")
|
||||
self.assertEqual(area.slug, "test-area")
|
||||
@@ -1,173 +0,0 @@
|
||||
import pytest
|
||||
from django.urls import reverse
|
||||
from django.test import Client
|
||||
|
||||
from apps.parks.models import Park
|
||||
from apps.parks.forms import ParkAutocomplete, ParkSearchForm
|
||||
|
||||
|
||||
@pytest.mark.django_db
|
||||
class TestParkSearch:
|
||||
def test_autocomplete_results(self, client: Client):
|
||||
"""Test that autocomplete returns correct results"""
|
||||
# Create test parks
|
||||
park1 = Park.objects.create(name="Test Park")
|
||||
park2 = Park.objects.create(name="Another Park")
|
||||
park3 = Park.objects.create(name="Test Garden")
|
||||
|
||||
# Get autocomplete results
|
||||
url = reverse("parks:suggest_parks")
|
||||
response = client.get(url, {"search": "Test"})
|
||||
|
||||
# Check response
|
||||
assert response.status_code == 200
|
||||
content = response.content.decode()
|
||||
assert park1.name in content
|
||||
assert park3.name in content
|
||||
assert park2.name not in content
|
||||
|
||||
def test_search_form_valid(self):
|
||||
"""Test ParkSearchForm validation"""
|
||||
form = ParkSearchForm(data={})
|
||||
assert form.is_valid()
|
||||
|
||||
def test_autocomplete_class(self):
|
||||
"""Test ParkAutocomplete configuration"""
|
||||
ac = ParkAutocomplete()
|
||||
assert ac.model == Park
|
||||
assert "name" in ac.search_attrs
|
||||
|
||||
def test_search_with_filters(self, client: Client):
|
||||
"""Test search works with filters"""
|
||||
park = Park.objects.create(name="Test Park", status="OPERATING")
|
||||
|
||||
# Search with status filter
|
||||
url = reverse("parks:park_list")
|
||||
response = client.get(url, {"park": str(park.pk), "status": "OPERATING"})
|
||||
|
||||
assert response.status_code == 200
|
||||
assert park.name in response.content.decode()
|
||||
|
||||
def test_empty_search(self, client: Client):
|
||||
"""Test empty search returns all parks"""
|
||||
Park.objects.create(name="Test Park")
|
||||
Park.objects.create(name="Another Park")
|
||||
|
||||
url = reverse("parks:park_list")
|
||||
response = client.get(url)
|
||||
|
||||
assert response.status_code == 200
|
||||
content = response.content.decode()
|
||||
assert "Test Park" in content
|
||||
assert "Another Park" in content
|
||||
|
||||
def test_partial_match_search(self, client: Client):
|
||||
"""Test partial matching in search"""
|
||||
Park.objects.create(name="Adventure World")
|
||||
Park.objects.create(name="Water Adventure")
|
||||
|
||||
url = reverse("parks:suggest_parks")
|
||||
response = client.get(url, {"search": "Adv"})
|
||||
|
||||
assert response.status_code == 200
|
||||
content = response.content.decode()
|
||||
assert "Adventure World" in content
|
||||
assert "Water Adventure" in content
|
||||
|
||||
def test_htmx_request_handling(self, client: Client):
|
||||
"""Test HTMX-specific request handling"""
|
||||
Park.objects.create(name="Test Park")
|
||||
|
||||
url = reverse("parks:suggest_parks")
|
||||
response = client.get(url, {"search": "Test"}, HTTP_HX_REQUEST="true")
|
||||
|
||||
assert response.status_code == 200
|
||||
assert "Test Park" in response.content.decode()
|
||||
|
||||
def test_view_mode_persistence(self, client: Client):
|
||||
"""Test view mode is maintained during search"""
|
||||
Park.objects.create(name="Test Park")
|
||||
|
||||
url = reverse("parks:park_list")
|
||||
response = client.get(url, {"park": "Test", "view_mode": "list"})
|
||||
|
||||
assert response.status_code == 200
|
||||
assert 'data-view-mode="list"' in response.content.decode()
|
||||
|
||||
def test_suggestion_limit(self, client: Client):
|
||||
"""Test that suggestions are limited to 8 items"""
|
||||
# Create 10 parks
|
||||
for i in range(10):
|
||||
Park.objects.create(name=f"Test Park {i}")
|
||||
|
||||
url = reverse("parks:suggest_parks")
|
||||
response = client.get(url, {"search": "Test"})
|
||||
|
||||
content = response.content.decode()
|
||||
result_count = content.count("Test Park")
|
||||
assert result_count == 8 # Verify limit is enforced
|
||||
|
||||
def test_search_json_format(self, client: Client):
|
||||
"""Test that search returns properly formatted JSON"""
|
||||
park = Park.objects.create(
|
||||
name="Test Park",
|
||||
status="OPERATING",
|
||||
city="Test City",
|
||||
state="Test State",
|
||||
)
|
||||
|
||||
url = reverse("parks:suggest_parks")
|
||||
response = client.get(url, {"search": "Test"})
|
||||
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert "results" in data
|
||||
assert len(data["results"]) == 1
|
||||
|
||||
result = data["results"][0]
|
||||
assert result["id"] == str(park.pk)
|
||||
assert result["name"] == "Test Park"
|
||||
assert result["status"] == "Operating"
|
||||
assert result["location"] == park.formatted_location
|
||||
assert result["url"] == reverse("parks:park_detail", kwargs={"slug": park.slug})
|
||||
|
||||
def test_empty_search_json(self, client: Client):
|
||||
"""Test empty search returns empty results array"""
|
||||
url = reverse("parks:suggest_parks")
|
||||
response = client.get(url, {"search": ""})
|
||||
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert "results" in data
|
||||
assert len(data["results"]) == 0
|
||||
|
||||
def test_search_format_validation(self, client: Client):
|
||||
"""Test that all fields are properly formatted in search results"""
|
||||
Park.objects.create(
|
||||
name="Test Park",
|
||||
status="OPERATING",
|
||||
city="Test City",
|
||||
state="Test State",
|
||||
country="Test Country",
|
||||
)
|
||||
|
||||
expected_fields = {"id", "name", "status", "location", "url"}
|
||||
|
||||
url = reverse("parks:suggest_parks")
|
||||
response = client.get(url, {"search": "Test"})
|
||||
data = response.json()
|
||||
result = data["results"][0]
|
||||
|
||||
# Check all expected fields are present
|
||||
assert set(result.keys()) == expected_fields
|
||||
|
||||
# Check field types
|
||||
assert isinstance(result["id"], str)
|
||||
assert isinstance(result["name"], str)
|
||||
assert isinstance(result["status"], str)
|
||||
assert isinstance(result["location"], str)
|
||||
assert isinstance(result["url"], str)
|
||||
|
||||
# Check formatted location includes city and state
|
||||
assert "Test City" in result["location"]
|
||||
assert "Test State" in result["location"]
|
||||
@@ -1,109 +0,0 @@
|
||||
from django.urls import path, include
|
||||
from . import views, views_search
|
||||
from apps.rides.views import ParkSingleCategoryListView
|
||||
from .views_roadtrip import (
|
||||
RoadTripPlannerView,
|
||||
CreateTripView,
|
||||
TripDetailView,
|
||||
FindParksAlongRouteView,
|
||||
GeocodeAddressView,
|
||||
ParkDistanceCalculatorView,
|
||||
)
|
||||
|
||||
app_name = "parks"
|
||||
|
||||
urlpatterns = [
|
||||
# Park views with autocomplete search
|
||||
path("", views.ParkListView.as_view(), name="park_list"),
|
||||
path("operators/", views.OperatorListView.as_view(), name="operator_list"),
|
||||
path("create/", views.ParkCreateView.as_view(), name="park_create"),
|
||||
# Add park button endpoint (moved before park detail pattern)
|
||||
path("add-park-button/", views.add_park_button, name="add_park_button"),
|
||||
# Location search endpoints
|
||||
path("search/location/", views.location_search, name="location_search"),
|
||||
path(
|
||||
"search/reverse-geocode/",
|
||||
views.reverse_geocode,
|
||||
name="reverse_geocode",
|
||||
),
|
||||
# Areas and search endpoints for HTMX
|
||||
path("areas/", views.get_park_areas, name="get_park_areas"),
|
||||
path("suggest_parks/", views_search.suggest_parks, name="suggest_parks"),
|
||||
path("search/", views.search_parks, name="search_parks"),
|
||||
# Road trip planning URLs
|
||||
path("roadtrip/", RoadTripPlannerView.as_view(), name="roadtrip_planner"),
|
||||
path("roadtrip/create/", CreateTripView.as_view(), name="roadtrip_create"),
|
||||
path(
|
||||
"roadtrip/<str:trip_id>/",
|
||||
TripDetailView.as_view(),
|
||||
name="roadtrip_detail",
|
||||
),
|
||||
# Road trip HTMX endpoints
|
||||
path(
|
||||
"roadtrip/htmx/parks-along-route/",
|
||||
FindParksAlongRouteView.as_view(),
|
||||
name="roadtrip_htmx_parks_along_route",
|
||||
),
|
||||
path(
|
||||
"roadtrip/htmx/geocode/",
|
||||
GeocodeAddressView.as_view(),
|
||||
name="roadtrip_htmx_geocode",
|
||||
),
|
||||
path(
|
||||
"roadtrip/htmx/distance/",
|
||||
ParkDistanceCalculatorView.as_view(),
|
||||
name="roadtrip_htmx_distance",
|
||||
),
|
||||
# Park detail and related views
|
||||
path("<slug:slug>/", views.ParkDetailView.as_view(), name="park_detail"),
|
||||
path("<slug:slug>/edit/", views.ParkUpdateView.as_view(), name="park_update"),
|
||||
path("<slug:slug>/actions/", views.park_actions, name="park_actions"),
|
||||
# Area views
|
||||
path(
|
||||
"<slug:park_slug>/areas/<slug:area_slug>/",
|
||||
views.ParkAreaDetailView.as_view(),
|
||||
name="area_detail",
|
||||
),
|
||||
# Park-specific category URLs
|
||||
path(
|
||||
"<slug:park_slug>/roller-coasters/",
|
||||
ParkSingleCategoryListView.as_view(),
|
||||
{"category": "RC"},
|
||||
name="park_roller_coasters",
|
||||
),
|
||||
path(
|
||||
"<slug:park_slug>/dark-rides/",
|
||||
ParkSingleCategoryListView.as_view(),
|
||||
{"category": "DR"},
|
||||
name="park_dark_rides",
|
||||
),
|
||||
path(
|
||||
"<slug:park_slug>/flat-rides/",
|
||||
ParkSingleCategoryListView.as_view(),
|
||||
{"category": "FR"},
|
||||
name="park_flat_rides",
|
||||
),
|
||||
path(
|
||||
"<slug:park_slug>/water-rides/",
|
||||
ParkSingleCategoryListView.as_view(),
|
||||
{"category": "WR"},
|
||||
name="park_water_rides",
|
||||
),
|
||||
path(
|
||||
"<slug:park_slug>/transports/",
|
||||
ParkSingleCategoryListView.as_view(),
|
||||
{"category": "TR"},
|
||||
name="park_transports",
|
||||
),
|
||||
path(
|
||||
"<slug:park_slug>/others/",
|
||||
ParkSingleCategoryListView.as_view(),
|
||||
{"category": "OT"},
|
||||
name="park_others",
|
||||
),
|
||||
# Include park-specific rides URLs
|
||||
path(
|
||||
"<slug:park_slug>/rides/",
|
||||
include("apps.rides.park_urls", namespace="rides"),
|
||||
),
|
||||
]
|
||||
@@ -1,876 +0,0 @@
|
||||
from .querysets import get_base_park_queryset
|
||||
from apps.core.mixins import HTMXFilterableMixin
|
||||
from .models.location import ParkLocation
|
||||
from .models.media import ParkPhoto
|
||||
from apps.moderation.services import ModerationService
|
||||
from apps.moderation.mixins import (
|
||||
EditSubmissionMixin,
|
||||
PhotoSubmissionMixin,
|
||||
HistoryMixin,
|
||||
)
|
||||
from apps.core.views.views import SlugRedirectMixin
|
||||
from .filters import ParkFilter
|
||||
from .forms import ParkForm
|
||||
from .models import Park, ParkArea, ParkReview as Review
|
||||
from .services import ParkFilterService
|
||||
from django.http import (
|
||||
HttpResponseRedirect,
|
||||
HttpResponse,
|
||||
HttpRequest,
|
||||
JsonResponse,
|
||||
)
|
||||
from django.core.exceptions import ObjectDoesNotExist
|
||||
from django.contrib import messages
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from django.contrib.auth.mixins import LoginRequiredMixin
|
||||
from django.db.models import QuerySet
|
||||
from django.urls import reverse
|
||||
from django.shortcuts import get_object_or_404, render
|
||||
from decimal import InvalidOperation
|
||||
from django.views.generic import DetailView, ListView, CreateView, UpdateView
|
||||
import requests
|
||||
from decimal import Decimal, ROUND_DOWN
|
||||
from typing import Any, Optional, cast, Literal, Dict
|
||||
|
||||
# Constants
|
||||
PARK_DETAIL_URL = "parks:park_detail"
|
||||
PARK_LIST_ITEM_TEMPLATE = "parks/partials/park_list_item.html"
|
||||
REQUIRED_FIELDS_ERROR = (
|
||||
"Please correct the errors below. Required fields are marked with an asterisk (*)."
|
||||
)
|
||||
ALLOWED_ROLES = ["MODERATOR", "ADMIN", "SUPERUSER"]
|
||||
|
||||
|
||||
ViewMode = Literal["grid", "list"]
|
||||
|
||||
|
||||
def normalize_osm_result(result: dict) -> dict:
|
||||
"""Normalize OpenStreetMap result to a consistent format with enhanced address details""" # noqa: E501
|
||||
from .location_utils import get_english_name, normalize_coordinate
|
||||
|
||||
# Get address details
|
||||
address = result.get("address", {})
|
||||
|
||||
# Normalize coordinates
|
||||
lat = normalize_coordinate(float(result.get("lat")), 9, 6)
|
||||
lon = normalize_coordinate(float(result.get("lon")), 10, 6)
|
||||
|
||||
# Get English names where possible
|
||||
name = ""
|
||||
if "namedetails" in result:
|
||||
name = get_english_name(result["namedetails"])
|
||||
|
||||
# Build street address from available components
|
||||
street_parts = []
|
||||
if address.get("house_number"):
|
||||
street_parts.append(address["house_number"])
|
||||
if address.get("road") or address.get("street"):
|
||||
street_parts.append(address.get("road") or address.get("street"))
|
||||
elif address.get("pedestrian"):
|
||||
street_parts.append(address["pedestrian"])
|
||||
elif address.get("footway"):
|
||||
street_parts.append(address["footway"])
|
||||
|
||||
# Handle additional address components
|
||||
suburb = address.get("suburb", "")
|
||||
district = address.get("district", "")
|
||||
neighborhood = address.get("neighbourhood", "")
|
||||
|
||||
# Build city from available components
|
||||
city = (
|
||||
address.get("city")
|
||||
or address.get("town")
|
||||
or address.get("village")
|
||||
or address.get("municipality")
|
||||
or ""
|
||||
)
|
||||
|
||||
# Get detailed state/region information
|
||||
state = (
|
||||
address.get("state") or address.get("province") or address.get("region") or ""
|
||||
)
|
||||
|
||||
# Get postal code with fallbacks
|
||||
postal_code = address.get("postcode") or address.get("postal_code") or ""
|
||||
|
||||
return {
|
||||
"display_name": name or result.get("display_name", ""),
|
||||
"lat": lat,
|
||||
"lon": lon,
|
||||
"street": " ".join(street_parts).strip(),
|
||||
"suburb": suburb,
|
||||
"district": district,
|
||||
"neighborhood": neighborhood,
|
||||
"city": city,
|
||||
"state": state,
|
||||
"country": address.get("country", ""),
|
||||
"postal_code": postal_code,
|
||||
}
|
||||
|
||||
|
||||
def get_view_mode(request: HttpRequest) -> ViewMode:
|
||||
"""Get the current view mode from request, defaulting to grid"""
|
||||
view_mode = request.GET.get("view_mode", "grid")
|
||||
return cast(ViewMode, "list" if view_mode == "list" else "grid")
|
||||
|
||||
|
||||
def add_park_button(request: HttpRequest) -> HttpResponse:
|
||||
"""Return the add park button partial template"""
|
||||
return render(request, "parks/partials/add_park_button.html")
|
||||
|
||||
|
||||
def park_actions(request: HttpRequest, slug: str) -> HttpResponse:
|
||||
"""Return the park actions partial template"""
|
||||
park = get_object_or_404(Park, slug=slug)
|
||||
return render(request, "parks/partials/park_actions.html", {"park": park})
|
||||
|
||||
|
||||
def get_park_areas(request: HttpRequest) -> HttpResponse:
|
||||
"""Return park areas as options for a select element"""
|
||||
park_id = request.GET.get("park")
|
||||
if not park_id:
|
||||
return HttpResponse('<option value="">Select a park first</option>')
|
||||
|
||||
try:
|
||||
park = Park.objects.get(id=park_id)
|
||||
areas = park.areas.all()
|
||||
options = ['<option value="">No specific area</option>']
|
||||
options.extend(
|
||||
[f'<option value="{area.id}">{area.name}</option>' for area in areas]
|
||||
)
|
||||
return HttpResponse("\n".join(options))
|
||||
except Park.DoesNotExist:
|
||||
return HttpResponse('<option value="">Invalid park selected</option>')
|
||||
|
||||
|
||||
def location_search(request: HttpRequest) -> JsonResponse:
|
||||
"""Search for locations using OpenStreetMap Nominatim API"""
|
||||
query = request.GET.get("q", "")
|
||||
if not query:
|
||||
return JsonResponse({"results": []})
|
||||
|
||||
response = requests.get(
|
||||
"https://nominatim.openstreetmap.org/search",
|
||||
params={
|
||||
"q": query,
|
||||
"format": "json",
|
||||
"addressdetails": 1,
|
||||
"namedetails": 1,
|
||||
"accept-language": "en",
|
||||
"limit": 10,
|
||||
},
|
||||
headers={"User-Agent": "ThrillWiki/1.0"},
|
||||
timeout=60,
|
||||
)
|
||||
|
||||
if response.status_code == 200:
|
||||
results = response.json()
|
||||
normalized_results = [normalize_osm_result(result) for result in results]
|
||||
valid_results = [
|
||||
r
|
||||
for r in normalized_results
|
||||
if r["lat"] is not None and r["lon"] is not None
|
||||
]
|
||||
return JsonResponse({"results": valid_results})
|
||||
|
||||
return JsonResponse({"results": []})
|
||||
|
||||
|
||||
def reverse_geocode(request: HttpRequest) -> JsonResponse:
|
||||
"""Reverse geocode coordinates using OpenStreetMap Nominatim API"""
|
||||
try:
|
||||
lat = Decimal(request.GET.get("lat", ""))
|
||||
lon = Decimal(request.GET.get("lon", ""))
|
||||
except (TypeError, ValueError, InvalidOperation):
|
||||
return JsonResponse({"error": "Invalid coordinates"}, status=400)
|
||||
|
||||
if not lat or not lon:
|
||||
return JsonResponse({"error": "Missing coordinates"}, status=400)
|
||||
|
||||
lat = lat.quantize(Decimal("0.000001"), rounding=ROUND_DOWN)
|
||||
lon = lon.quantize(Decimal("0.000001"), rounding=ROUND_DOWN)
|
||||
|
||||
if lat < -90 or lat > 90:
|
||||
return JsonResponse(
|
||||
{"error": "Latitude must be between -90 and 90"}, status=400
|
||||
)
|
||||
if lon < -180 or lon > 180:
|
||||
return JsonResponse(
|
||||
{"error": "Longitude must be between -180 and 180"}, status=400
|
||||
)
|
||||
|
||||
response = requests.get(
|
||||
"https://nominatim.openstreetmap.org/reverse",
|
||||
params={
|
||||
"lat": str(lat),
|
||||
"lon": str(lon),
|
||||
"format": "json",
|
||||
"addressdetails": 1,
|
||||
"namedetails": 1,
|
||||
"accept-language": "en",
|
||||
},
|
||||
headers={"User-Agent": "ThrillWiki/1.0"},
|
||||
timeout=60,
|
||||
)
|
||||
|
||||
if response.status_code == 200:
|
||||
result = response.json()
|
||||
normalized_result = normalize_osm_result(result)
|
||||
if normalized_result["lat"] is None or normalized_result["lon"] is None:
|
||||
return JsonResponse({"error": "Invalid coordinates"}, status=400)
|
||||
return JsonResponse(normalized_result)
|
||||
|
||||
return JsonResponse({"error": "Geocoding failed"}, status=500)
|
||||
|
||||
|
||||
class ParkListView(HTMXFilterableMixin, ListView):
|
||||
model = Park
|
||||
template_name = "parks/park_list.html"
|
||||
context_object_name = "parks"
|
||||
filter_class = ParkFilter
|
||||
paginate_by = 20
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
super().__init__(**kwargs)
|
||||
self.filter_service = ParkFilterService()
|
||||
|
||||
def get_template_names(self) -> list[str]:
|
||||
"""Return park_list.html for HTMX requests"""
|
||||
if self.request.htmx:
|
||||
return ["parks/partials/park_list.html"]
|
||||
return [self.template_name]
|
||||
|
||||
def get_view_mode(self) -> ViewMode:
|
||||
"""Get the current view mode (grid or list)"""
|
||||
return get_view_mode(self.request)
|
||||
|
||||
def get_queryset(self) -> QuerySet[Park]:
|
||||
"""Get optimized queryset with filter service"""
|
||||
try:
|
||||
# Use filter service for optimized filtering
|
||||
filter_params = dict(self.request.GET.items())
|
||||
queryset = self.filter_service.get_filtered_queryset(filter_params)
|
||||
|
||||
# Also create filterset for form rendering
|
||||
self.filterset = self.filter_class(self.request.GET, queryset=queryset)
|
||||
return self.filterset.qs
|
||||
except Exception as e:
|
||||
messages.error(self.request, f"Error loading parks: {str(e)}")
|
||||
queryset = self.model.objects.none()
|
||||
self.filterset = self.filter_class(self.request.GET, queryset=queryset)
|
||||
return queryset
|
||||
|
||||
def get_context_data(self, **kwargs: Any) -> dict[str, Any]:
|
||||
"""Add enhanced context with filter stats and suggestions"""
|
||||
try:
|
||||
# Initialize filterset if not exists
|
||||
if not hasattr(self, "filterset"):
|
||||
self.filterset = self.filter_class(
|
||||
self.request.GET, queryset=self.model.objects.none()
|
||||
)
|
||||
|
||||
context = super().get_context_data(**kwargs)
|
||||
|
||||
# Add filter service data
|
||||
filter_counts = self.filter_service.get_filter_counts()
|
||||
popular_filters = self.filter_service.get_popular_filters()
|
||||
|
||||
context.update(
|
||||
{
|
||||
"view_mode": self.get_view_mode(),
|
||||
"is_search": bool(self.request.GET.get("search")),
|
||||
"search_query": self.request.GET.get("search", ""),
|
||||
"filter_counts": filter_counts,
|
||||
"popular_filters": popular_filters,
|
||||
"total_results": (
|
||||
context.get("paginator").count
|
||||
if context.get("paginator")
|
||||
else 0
|
||||
),
|
||||
}
|
||||
)
|
||||
|
||||
# Add filter suggestions for search queries
|
||||
search_query = self.request.GET.get("search", "")
|
||||
if search_query:
|
||||
context["filter_suggestions"] = (
|
||||
self.filter_service.get_filter_suggestions(search_query)
|
||||
)
|
||||
|
||||
return context
|
||||
|
||||
except Exception as e:
|
||||
messages.error(self.request, f"Error applying filters: {str(e)}")
|
||||
# Ensure filterset exists in error case
|
||||
if not hasattr(self, "filterset"):
|
||||
self.filterset = self.filter_class(
|
||||
self.request.GET, queryset=self.model.objects.none()
|
||||
)
|
||||
return {
|
||||
"filter": self.filterset,
|
||||
"error": "Unable to apply filters. Please try adjusting your criteria.",
|
||||
"view_mode": self.get_view_mode(),
|
||||
"is_search": bool(self.request.GET.get("search")),
|
||||
"search_query": self.request.GET.get("search", ""),
|
||||
}
|
||||
|
||||
def _get_clean_filter_params(self) -> Dict[str, Any]:
|
||||
"""Extract and clean filter parameters from request."""
|
||||
filter_params = {}
|
||||
|
||||
# Define valid filter fields
|
||||
valid_filters = {
|
||||
"status",
|
||||
"operator",
|
||||
"park_type",
|
||||
"has_coasters",
|
||||
"min_rating",
|
||||
"big_parks_only",
|
||||
"ordering",
|
||||
"search",
|
||||
}
|
||||
|
||||
for param, value in self.request.GET.items():
|
||||
if param in valid_filters and value:
|
||||
# Skip pagination parameter
|
||||
if param == "page":
|
||||
continue
|
||||
|
||||
# Clean and validate the value
|
||||
filter_params[param] = self._clean_filter_value(param, value)
|
||||
|
||||
return {k: v for k, v in filter_params.items() if v is not None}
|
||||
|
||||
def _clean_filter_value(self, param: str, value: str) -> Optional[Any]:
|
||||
"""Clean and validate a single filter value."""
|
||||
if param in ("has_coasters", "big_parks_only"):
|
||||
# Boolean filters
|
||||
return value.lower() in ("true", "1", "yes", "on")
|
||||
elif param == "min_rating":
|
||||
# Numeric filter
|
||||
try:
|
||||
rating = float(value)
|
||||
if 0 <= rating <= 5:
|
||||
return str(rating)
|
||||
except (ValueError, TypeError):
|
||||
pass # Skip invalid ratings
|
||||
return None
|
||||
elif param == "search":
|
||||
# Search filter
|
||||
clean_search = value.strip()
|
||||
return clean_search if clean_search else None
|
||||
else:
|
||||
# String filters
|
||||
return value.strip()
|
||||
|
||||
def _build_filter_query_string(self, filter_params: Dict[str, Any]) -> str:
|
||||
"""Build query string from filter parameters."""
|
||||
from urllib.parse import urlencode
|
||||
|
||||
# Convert boolean values to strings for URL
|
||||
url_params = {}
|
||||
for key, value in filter_params.items():
|
||||
if isinstance(value, bool):
|
||||
url_params[key] = "true" if value else "false"
|
||||
else:
|
||||
url_params[key] = str(value)
|
||||
|
||||
return urlencode(url_params)
|
||||
|
||||
def _get_pagination_urls(
|
||||
self, page_obj, filter_params: Dict[str, Any]
|
||||
) -> Dict[str, str]:
|
||||
"""Generate pagination URLs that preserve filter state."""
|
||||
|
||||
base_query = self._build_filter_query_string(filter_params)
|
||||
pagination_urls = {}
|
||||
|
||||
if page_obj.has_previous():
|
||||
prev_params = (
|
||||
f"{base_query}&page={page_obj.previous_page_number()}"
|
||||
if base_query
|
||||
else f"page={page_obj.previous_page_number()}"
|
||||
)
|
||||
pagination_urls["previous_url"] = f"?{prev_params}"
|
||||
|
||||
if page_obj.has_next():
|
||||
next_params = (
|
||||
f"{base_query}&page={page_obj.next_page_number()}"
|
||||
if base_query
|
||||
else f"page={page_obj.next_page_number()}"
|
||||
)
|
||||
pagination_urls["next_url"] = f"?{next_params}"
|
||||
|
||||
# First and last page URLs
|
||||
if page_obj.number > 1:
|
||||
first_params = f"{base_query}&page=1" if base_query else "page=1"
|
||||
pagination_urls["first_url"] = f"?{first_params}"
|
||||
|
||||
if page_obj.number < page_obj.paginator.num_pages:
|
||||
last_params = (
|
||||
f"{base_query}&page={page_obj.paginator.num_pages}"
|
||||
if base_query
|
||||
else f"page={page_obj.paginator.num_pages}"
|
||||
)
|
||||
pagination_urls["last_url"] = f"?{last_params}"
|
||||
|
||||
return pagination_urls
|
||||
|
||||
|
||||
def search_parks(request: HttpRequest) -> HttpResponse:
|
||||
"""Search parks and return results using park_list_item.html"""
|
||||
try:
|
||||
search_query = request.GET.get("search", "").strip()
|
||||
if not search_query:
|
||||
return HttpResponse("")
|
||||
|
||||
# Get current view mode from request
|
||||
current_view_mode = request.GET.get("view_mode", "grid")
|
||||
park_filter = ParkFilter(
|
||||
{"search": search_query}, queryset=get_base_park_queryset()
|
||||
)
|
||||
|
||||
parks = park_filter.qs
|
||||
if request.GET.get("quick_search"):
|
||||
parks = parks[:8] # Limit quick search results
|
||||
|
||||
response = render(
|
||||
request,
|
||||
PARK_LIST_ITEM_TEMPLATE,
|
||||
{
|
||||
"parks": parks,
|
||||
"view_mode": current_view_mode,
|
||||
"search_query": search_query,
|
||||
"is_search": True,
|
||||
},
|
||||
)
|
||||
response["HX-Trigger"] = "searchComplete"
|
||||
return response
|
||||
|
||||
except Exception as e:
|
||||
response = render(
|
||||
request,
|
||||
PARK_LIST_ITEM_TEMPLATE,
|
||||
{
|
||||
"parks": [],
|
||||
"error": f"Error performing search: {str(e)}",
|
||||
"is_search": True,
|
||||
},
|
||||
)
|
||||
response["HX-Trigger"] = "searchError"
|
||||
return response
|
||||
|
||||
|
||||
class ParkCreateView(LoginRequiredMixin, CreateView):
|
||||
model = Park
|
||||
form_class = ParkForm
|
||||
template_name = "parks/park_form.html"
|
||||
|
||||
def prepare_changes_data(self, cleaned_data: dict[str, Any]) -> dict[str, Any]:
|
||||
data = cleaned_data.copy()
|
||||
if data.get("owner"):
|
||||
data["owner"] = data["owner"].id
|
||||
if data.get("opening_date"):
|
||||
data["opening_date"] = data["opening_date"].isoformat()
|
||||
if data.get("closing_date"):
|
||||
data["closing_date"] = data["closing_date"].isoformat()
|
||||
decimal_fields = [
|
||||
"latitude",
|
||||
"longitude",
|
||||
"size_acres",
|
||||
"average_rating",
|
||||
]
|
||||
for field in decimal_fields:
|
||||
if data.get(field):
|
||||
data[field] = str(data[field])
|
||||
return data
|
||||
|
||||
def normalize_coordinates(self, form: ParkForm) -> None:
|
||||
if form.cleaned_data.get("latitude"):
|
||||
lat = Decimal(str(form.cleaned_data["latitude"]))
|
||||
form.cleaned_data["latitude"] = lat.quantize(
|
||||
Decimal("0.000001"), rounding=ROUND_DOWN
|
||||
)
|
||||
if form.cleaned_data.get("longitude"):
|
||||
lon = Decimal(str(form.cleaned_data["longitude"]))
|
||||
form.cleaned_data["longitude"] = lon.quantize(
|
||||
Decimal("0.000001"), rounding=ROUND_DOWN
|
||||
)
|
||||
|
||||
def form_valid(self, form: ParkForm) -> HttpResponse:
|
||||
self.normalize_coordinates(form)
|
||||
changes = self.prepare_changes_data(form.cleaned_data)
|
||||
|
||||
# Use the new queue routing service
|
||||
result = ModerationService.create_edit_submission_with_queue(
|
||||
content_object=None, # None for CREATE
|
||||
changes=changes,
|
||||
submitter=self.request.user,
|
||||
submission_type="CREATE",
|
||||
reason=self.request.POST.get("reason", ""),
|
||||
source=self.request.POST.get("source", ""),
|
||||
)
|
||||
|
||||
if result['status'] == 'auto_approved':
|
||||
# Moderator submission was auto-approved
|
||||
self.object = result['created_object']
|
||||
|
||||
if form.cleaned_data.get("latitude") and form.cleaned_data.get("longitude"):
|
||||
# Create or update ParkLocation
|
||||
park_location, created = ParkLocation.objects.get_or_create(
|
||||
park=self.object,
|
||||
defaults={
|
||||
"street_address": form.cleaned_data.get("street_address", ""),
|
||||
"city": form.cleaned_data.get("city", ""),
|
||||
"state": form.cleaned_data.get("state", ""),
|
||||
"country": form.cleaned_data.get("country", "USA"),
|
||||
"postal_code": form.cleaned_data.get("postal_code", ""),
|
||||
},
|
||||
)
|
||||
park_location.set_coordinates(
|
||||
form.cleaned_data["latitude"],
|
||||
form.cleaned_data["longitude"],
|
||||
)
|
||||
park_location.save()
|
||||
|
||||
photos = self.request.FILES.getlist("photos")
|
||||
uploaded_count = 0
|
||||
for photo_file in photos:
|
||||
try:
|
||||
ParkPhoto.objects.create(
|
||||
image=photo_file,
|
||||
uploaded_by=self.request.user,
|
||||
park=self.object,
|
||||
)
|
||||
uploaded_count += 1
|
||||
except Exception as e:
|
||||
messages.error(
|
||||
self.request,
|
||||
f"Error uploading photo {photo_file.name}: {str(e)}",
|
||||
)
|
||||
|
||||
messages.success(
|
||||
self.request,
|
||||
f"Successfully created {self.object.name}. "
|
||||
f"Added {uploaded_count} photo(s).",
|
||||
)
|
||||
return HttpResponseRedirect(self.get_success_url())
|
||||
|
||||
elif result['status'] == 'queued':
|
||||
# Regular user submission was queued
|
||||
messages.success(
|
||||
self.request,
|
||||
"Your park submission has been sent for review. "
|
||||
"You will be notified when it is approved.",
|
||||
)
|
||||
# Redirect to parks list since we don't have an object yet
|
||||
return HttpResponseRedirect(reverse("parks:park_list"))
|
||||
|
||||
elif result['status'] == 'failed':
|
||||
# Auto-approval failed
|
||||
messages.error(
|
||||
self.request,
|
||||
f"Error creating park: {result['message']}. Please check your input and try again.",
|
||||
)
|
||||
return self.form_invalid(form)
|
||||
|
||||
# Fallback error case
|
||||
messages.error(
|
||||
self.request,
|
||||
"An unexpected error occurred. Please try again.",
|
||||
)
|
||||
return self.form_invalid(form)
|
||||
|
||||
def get_success_url(self) -> str:
|
||||
return reverse(PARK_DETAIL_URL, kwargs={"slug": self.object.slug})
|
||||
|
||||
|
||||
class ParkUpdateView(LoginRequiredMixin, UpdateView):
|
||||
model = Park
|
||||
form_class = ParkForm
|
||||
template_name = "parks/park_form.html"
|
||||
|
||||
def get_context_data(self, **kwargs: Any) -> dict[str, Any]:
|
||||
context = super().get_context_data(**kwargs)
|
||||
context["is_edit"] = True
|
||||
return context
|
||||
|
||||
def prepare_changes_data(self, cleaned_data: dict[str, Any]) -> dict[str, Any]:
|
||||
data = cleaned_data.copy()
|
||||
if data.get("owner"):
|
||||
data["owner"] = data["owner"].id
|
||||
if data.get("opening_date"):
|
||||
data["opening_date"] = data["opening_date"].isoformat()
|
||||
if data.get("closing_date"):
|
||||
data["closing_date"] = data["closing_date"].isoformat()
|
||||
decimal_fields = [
|
||||
"latitude",
|
||||
"longitude",
|
||||
"size_acres",
|
||||
"average_rating",
|
||||
]
|
||||
for field in decimal_fields:
|
||||
if data.get(field):
|
||||
data[field] = str(data[field])
|
||||
return data
|
||||
|
||||
def normalize_coordinates(self, form: ParkForm) -> None:
|
||||
if form.cleaned_data.get("latitude"):
|
||||
lat = Decimal(str(form.cleaned_data["latitude"]))
|
||||
form.cleaned_data["latitude"] = lat.quantize(
|
||||
Decimal("0.000001"), rounding=ROUND_DOWN
|
||||
)
|
||||
if form.cleaned_data.get("longitude"):
|
||||
lon = Decimal(str(form.cleaned_data["longitude"]))
|
||||
form.cleaned_data["longitude"] = lon.quantize(
|
||||
Decimal("0.000001"), rounding=ROUND_DOWN
|
||||
)
|
||||
|
||||
def form_valid(self, form: ParkForm) -> HttpResponse: # noqa: C901
|
||||
self.normalize_coordinates(form)
|
||||
changes = self.prepare_changes_data(form.cleaned_data)
|
||||
|
||||
# Use the new queue routing service
|
||||
result = ModerationService.create_edit_submission_with_queue(
|
||||
content_object=self.object,
|
||||
changes=changes,
|
||||
submitter=self.request.user,
|
||||
submission_type="EDIT",
|
||||
reason=self.request.POST.get("reason", ""),
|
||||
source=self.request.POST.get("source", ""),
|
||||
)
|
||||
|
||||
if result['status'] == 'auto_approved':
|
||||
# Moderator submission was auto-approved
|
||||
# The object was already updated by the service
|
||||
self.object = result['created_object']
|
||||
|
||||
location_data = {
|
||||
"name": self.object.name,
|
||||
"location_type": "park",
|
||||
"latitude": form.cleaned_data.get("latitude"),
|
||||
"longitude": form.cleaned_data.get("longitude"),
|
||||
"street_address": form.cleaned_data.get("street_address", ""),
|
||||
"city": form.cleaned_data.get("city", ""),
|
||||
"state": form.cleaned_data.get("state", ""),
|
||||
"country": form.cleaned_data.get("country", ""),
|
||||
"postal_code": form.cleaned_data.get("postal_code", ""),
|
||||
}
|
||||
|
||||
# Create or update ParkLocation
|
||||
try:
|
||||
park_location = self.object.location
|
||||
# Update existing location
|
||||
for key, value in location_data.items():
|
||||
if key in ["latitude", "longitude"] and value:
|
||||
continue # Handle coordinates separately
|
||||
if hasattr(park_location, key):
|
||||
setattr(park_location, key, value)
|
||||
|
||||
# Handle coordinates if provided
|
||||
if "latitude" in location_data and "longitude" in location_data:
|
||||
if location_data["latitude"] and location_data["longitude"]:
|
||||
park_location.set_coordinates(
|
||||
float(location_data["latitude"]),
|
||||
float(location_data["longitude"]),
|
||||
)
|
||||
park_location.save()
|
||||
except ParkLocation.DoesNotExist:
|
||||
# Create new ParkLocation
|
||||
coordinates_data = {}
|
||||
if "latitude" in location_data and "longitude" in location_data:
|
||||
if location_data["latitude"] and location_data["longitude"]:
|
||||
coordinates_data = {
|
||||
"latitude": float(location_data["latitude"]),
|
||||
"longitude": float(location_data["longitude"]),
|
||||
}
|
||||
|
||||
# Remove coordinate fields from location_data for creation
|
||||
creation_data = {
|
||||
k: v
|
||||
for k, v in location_data.items()
|
||||
if k not in ["latitude", "longitude"]
|
||||
}
|
||||
creation_data.setdefault("country", "USA")
|
||||
|
||||
park_location = ParkLocation.objects.create(
|
||||
park=self.object, **creation_data
|
||||
)
|
||||
|
||||
if coordinates_data:
|
||||
park_location.set_coordinates(
|
||||
coordinates_data["latitude"],
|
||||
coordinates_data["longitude"],
|
||||
)
|
||||
park_location.save()
|
||||
|
||||
photos = self.request.FILES.getlist("photos")
|
||||
uploaded_count = 0
|
||||
for photo_file in photos:
|
||||
try:
|
||||
ParkPhoto.objects.create(
|
||||
image=photo_file,
|
||||
uploaded_by=self.request.user,
|
||||
content_type=ContentType.objects.get_for_model(Park),
|
||||
object_id=self.object.id,
|
||||
)
|
||||
uploaded_count += 1
|
||||
except Exception as e:
|
||||
messages.error(
|
||||
self.request,
|
||||
f"Error uploading photo {photo_file.name}: {str(e)}",
|
||||
)
|
||||
|
||||
messages.success(
|
||||
self.request,
|
||||
f"Successfully updated {self.object.name}. "
|
||||
f"Added {uploaded_count} new photo(s).",
|
||||
)
|
||||
return HttpResponseRedirect(self.get_success_url())
|
||||
|
||||
elif result['status'] == 'queued':
|
||||
# Regular user submission was queued
|
||||
messages.success(
|
||||
self.request,
|
||||
f"Your changes to {self.object.name} have been sent for review. "
|
||||
"You will be notified when they are approved.",
|
||||
)
|
||||
return HttpResponseRedirect(
|
||||
reverse(PARK_DETAIL_URL, kwargs={"slug": self.object.slug})
|
||||
)
|
||||
|
||||
elif result['status'] == 'failed':
|
||||
# Auto-approval failed
|
||||
messages.error(
|
||||
self.request,
|
||||
f"Error updating park: {result['message']}. Please check your input and try again.",
|
||||
)
|
||||
return self.form_invalid(form)
|
||||
|
||||
# Fallback error case
|
||||
messages.error(
|
||||
self.request,
|
||||
"An unexpected error occurred. Please try again.",
|
||||
)
|
||||
return self.form_invalid(form)
|
||||
|
||||
def form_invalid(self, form: ParkForm) -> HttpResponse:
|
||||
messages.error(self.request, REQUIRED_FIELDS_ERROR)
|
||||
for field, errors in form.errors.items():
|
||||
for error in errors:
|
||||
messages.error(self.request, f"{field}: {error}")
|
||||
return super().form_invalid(form)
|
||||
|
||||
def get_success_url(self) -> str:
|
||||
return reverse(PARK_DETAIL_URL, kwargs={"slug": self.object.slug})
|
||||
|
||||
|
||||
class ParkDetailView(
|
||||
SlugRedirectMixin,
|
||||
EditSubmissionMixin,
|
||||
PhotoSubmissionMixin,
|
||||
HistoryMixin,
|
||||
DetailView,
|
||||
):
|
||||
model = Park
|
||||
template_name = "parks/park_detail.html"
|
||||
context_object_name = "park"
|
||||
|
||||
def get_object(self, queryset: Optional[QuerySet[Park]] = None) -> Park:
|
||||
if queryset is None:
|
||||
queryset = self.get_queryset()
|
||||
slug = self.kwargs.get(self.slug_url_kwarg)
|
||||
if slug is None:
|
||||
raise ObjectDoesNotExist("No slug provided")
|
||||
park, _ = Park.get_by_slug(slug)
|
||||
return park
|
||||
|
||||
def get_queryset(self) -> QuerySet[Park]:
|
||||
return cast(
|
||||
QuerySet[Park],
|
||||
super()
|
||||
.get_queryset()
|
||||
.prefetch_related(
|
||||
"rides", "rides__manufacturer", "photos", "areas", "location"
|
||||
),
|
||||
)
|
||||
|
||||
def get_context_data(self, **kwargs: Any) -> dict[str, Any]:
|
||||
context = super().get_context_data(**kwargs)
|
||||
park = cast(Park, self.object)
|
||||
context["areas"] = park.areas.all()
|
||||
context["rides"] = park.rides.all().order_by("-status", "name")
|
||||
|
||||
if self.request.user.is_authenticated:
|
||||
context["has_reviewed"] = Review.objects.filter(
|
||||
user=self.request.user,
|
||||
park=park,
|
||||
).exists()
|
||||
else:
|
||||
context["has_reviewed"] = False
|
||||
|
||||
return context
|
||||
|
||||
def get_redirect_url_pattern(self) -> str:
|
||||
return PARK_DETAIL_URL
|
||||
|
||||
|
||||
class ParkAreaDetailView(
|
||||
SlugRedirectMixin,
|
||||
EditSubmissionMixin,
|
||||
PhotoSubmissionMixin,
|
||||
HistoryMixin,
|
||||
DetailView,
|
||||
):
|
||||
model = ParkArea
|
||||
template_name = "parks/area_detail.html"
|
||||
context_object_name = "area"
|
||||
slug_url_kwarg = "area_slug"
|
||||
|
||||
def get_object(self, queryset: Optional[QuerySet[ParkArea]] = None) -> ParkArea:
|
||||
if queryset is None:
|
||||
queryset = self.get_queryset()
|
||||
park_slug = self.kwargs.get("park_slug")
|
||||
area_slug = self.kwargs.get("area_slug")
|
||||
if park_slug is None or area_slug is None:
|
||||
raise ObjectDoesNotExist("Missing slug")
|
||||
area, _ = ParkArea.get_by_slug(area_slug)
|
||||
if area.park.slug != park_slug:
|
||||
raise ObjectDoesNotExist("Park slug doesn't match")
|
||||
return area
|
||||
|
||||
def get_context_data(self, **kwargs: Any) -> dict[str, Any]:
|
||||
context = super().get_context_data(**kwargs)
|
||||
return context
|
||||
|
||||
def get_redirect_url_pattern(self) -> str:
|
||||
return PARK_DETAIL_URL
|
||||
|
||||
def get_redirect_url_kwargs(self) -> dict[str, str]:
|
||||
area = cast(ParkArea, self.object)
|
||||
return {"park_slug": area.park.slug, "area_slug": area.slug}
|
||||
|
||||
|
||||
class OperatorListView(ListView):
|
||||
"""View for displaying a list of park operators"""
|
||||
|
||||
template_name = "operators/operator_list.html"
|
||||
context_object_name = "operators"
|
||||
paginate_by = 24
|
||||
|
||||
def get_queryset(self):
|
||||
"""Get companies that are operators"""
|
||||
from .models.companies import Company
|
||||
from django.db.models import Count
|
||||
|
||||
return (
|
||||
Company.objects.filter(roles__contains=["OPERATOR"])
|
||||
.annotate(park_count=Count("operated_parks"))
|
||||
.order_by("name")
|
||||
)
|
||||
|
||||
def get_context_data(self, **kwargs):
|
||||
"""Add context data"""
|
||||
context = super().get_context_data(**kwargs)
|
||||
context["total_operators"] = self.get_queryset().count()
|
||||
return context
|
||||
@@ -1,480 +0,0 @@
|
||||
"""
|
||||
Road trip planning views for theme parks.
|
||||
Provides interfaces for creating and managing multi-park road trips.
|
||||
"""
|
||||
|
||||
import json
|
||||
from typing import Dict, Any, List
|
||||
from django.shortcuts import render
|
||||
from django.http import JsonResponse, HttpRequest, HttpResponse
|
||||
from django.views.generic import TemplateView, View
|
||||
from django.urls import reverse
|
||||
|
||||
from .models import Park
|
||||
from .services.roadtrip import RoadTripService
|
||||
from apps.core.services.map_service import unified_map_service
|
||||
from apps.core.services.data_structures import LocationType
|
||||
|
||||
JSON_DECODE_ERROR_MSG = "Invalid JSON data"
|
||||
PARKS_ALONG_ROUTE_HTML = "parks/partials/parks_along_route.html"
|
||||
|
||||
|
||||
class RoadTripViewMixin:
|
||||
"""Mixin providing common functionality for road trip views."""
|
||||
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.roadtrip_service = RoadTripService()
|
||||
|
||||
def get_roadtrip_context(self) -> Dict[str, Any]:
|
||||
"""Get common context data for road trip views."""
|
||||
return {
|
||||
"roadtrip_api_urls": {
|
||||
"create_trip": "/roadtrip/create/",
|
||||
"find_parks_along_route": "/roadtrip/htmx/parks-along-route/",
|
||||
"geocode": "/roadtrip/htmx/geocode/",
|
||||
},
|
||||
"max_parks_per_trip": 10,
|
||||
"default_detour_km": 50,
|
||||
"enable_osm_integration": True,
|
||||
}
|
||||
|
||||
|
||||
class RoadTripPlannerView(RoadTripViewMixin, TemplateView):
|
||||
"""
|
||||
Main road trip planning interface.
|
||||
|
||||
URL: /roadtrip/
|
||||
"""
|
||||
|
||||
template_name = "parks/roadtrip_planner.html"
|
||||
|
||||
def get_context_data(self, **kwargs):
|
||||
context = super().get_context_data(**kwargs)
|
||||
context.update(self.get_roadtrip_context(self.request))
|
||||
|
||||
# Get popular parks for suggestions
|
||||
popular_parks = (
|
||||
Park.objects.filter(status="OPERATING", location__isnull=False)
|
||||
.select_related("location", "operator")
|
||||
.order_by("-ride_count")[:20]
|
||||
)
|
||||
|
||||
context.update(
|
||||
{
|
||||
"page_title": "Road Trip Planner",
|
||||
"popular_parks": popular_parks,
|
||||
"countries_with_parks": self._get_countries_with_parks(),
|
||||
"enable_route_optimization": True,
|
||||
"show_distance_estimates": True,
|
||||
}
|
||||
)
|
||||
|
||||
return context
|
||||
|
||||
def _get_countries_with_parks(self) -> List[str]:
|
||||
"""Get list of countries that have theme parks."""
|
||||
countries = (
|
||||
Park.objects.filter(status="OPERATING", location__country__isnull=False)
|
||||
.values_list("location__country", flat=True)
|
||||
.distinct()
|
||||
.order_by("location__country")
|
||||
)
|
||||
return list(countries)
|
||||
|
||||
|
||||
class CreateTripView(RoadTripViewMixin, View):
|
||||
"""
|
||||
Generate optimized road trip routes.
|
||||
|
||||
URL: /roadtrip/create/
|
||||
"""
|
||||
|
||||
def post(self, request: HttpRequest) -> HttpResponse:
|
||||
"""Create a new road trip with optimized routing."""
|
||||
try:
|
||||
data = json.loads(request.body)
|
||||
|
||||
# Parse park IDs
|
||||
park_ids = data.get("park_ids", [])
|
||||
if not park_ids or len(park_ids) < 2:
|
||||
return JsonResponse(
|
||||
{
|
||||
"status": "error",
|
||||
"message": "At least 2 parks are required for a road trip",
|
||||
},
|
||||
status=400,
|
||||
)
|
||||
|
||||
if len(park_ids) > 10:
|
||||
return JsonResponse(
|
||||
{
|
||||
"status": "error",
|
||||
"message": "Maximum 10 parks allowed per trip",
|
||||
},
|
||||
status=400,
|
||||
)
|
||||
|
||||
# Get parks
|
||||
parks = list(
|
||||
Park.objects.filter(
|
||||
id__in=park_ids, location__isnull=False
|
||||
).select_related("location", "operator")
|
||||
)
|
||||
|
||||
if len(parks) != len(park_ids):
|
||||
return JsonResponse(
|
||||
{
|
||||
"status": "error",
|
||||
"message": "Some parks could not be found or do not have location data",
|
||||
},
|
||||
status=400,
|
||||
)
|
||||
|
||||
# Create optimized trip
|
||||
trip = self.roadtrip_service.create_multi_park_trip(parks)
|
||||
|
||||
if not trip:
|
||||
return JsonResponse(
|
||||
{
|
||||
"status": "error",
|
||||
"message": "Could not create optimized route for the selected parks",
|
||||
},
|
||||
status=400,
|
||||
)
|
||||
|
||||
# Convert trip to dict for JSON response
|
||||
trip_data = {
|
||||
"parks": [self._park_to_dict(park) for park in trip.parks],
|
||||
"legs": [self._leg_to_dict(leg) for leg in trip.legs],
|
||||
"total_distance_km": trip.total_distance_km,
|
||||
"total_duration_minutes": trip.total_duration_minutes,
|
||||
"formatted_total_distance": trip.formatted_total_distance,
|
||||
"formatted_total_duration": trip.formatted_total_duration,
|
||||
}
|
||||
|
||||
return JsonResponse(
|
||||
{
|
||||
"status": "success",
|
||||
"data": trip_data,
|
||||
"trip_url": reverse(
|
||||
"parks:roadtrip_detail", kwargs={"trip_id": "temp"}
|
||||
),
|
||||
}
|
||||
)
|
||||
|
||||
except json.JSONDecodeError:
|
||||
return JsonResponse(
|
||||
{"status": "error", "message": JSON_DECODE_ERROR_MSG},
|
||||
status=400,
|
||||
)
|
||||
except Exception as e:
|
||||
return JsonResponse(
|
||||
{
|
||||
"status": "error",
|
||||
"message": f"Failed to create trip: {str(e)}",
|
||||
},
|
||||
status=500,
|
||||
)
|
||||
|
||||
def _park_to_dict(self, park: Park) -> Dict[str, Any]:
|
||||
"""Convert park instance to dictionary."""
|
||||
return {
|
||||
"id": park.id,
|
||||
"name": park.name,
|
||||
"slug": park.slug,
|
||||
"formatted_location": getattr(park, "formatted_location", ""),
|
||||
"coordinates": park.coordinates,
|
||||
"operator": park.operator.name if park.operator else None,
|
||||
"ride_count": getattr(park, "ride_count", 0),
|
||||
"url": reverse("parks:park_detail", kwargs={"slug": park.slug}),
|
||||
}
|
||||
|
||||
def _leg_to_dict(self, leg) -> Dict[str, Any]:
|
||||
"""Convert trip leg to dictionary."""
|
||||
return {
|
||||
"from_park": self._park_to_dict(leg.from_park),
|
||||
"to_park": self._park_to_dict(leg.to_park),
|
||||
"distance_km": leg.route.distance_km,
|
||||
"duration_minutes": leg.route.duration_minutes,
|
||||
"formatted_distance": leg.route.formatted_distance,
|
||||
"formatted_duration": leg.route.formatted_duration,
|
||||
"geometry": leg.route.geometry,
|
||||
}
|
||||
|
||||
|
||||
class TripDetailView(RoadTripViewMixin, TemplateView):
|
||||
"""
|
||||
Show trip details and map.
|
||||
|
||||
URL: /roadtrip/<trip_id>/
|
||||
"""
|
||||
|
||||
template_name = "parks/trip_detail.html"
|
||||
|
||||
def get_context_data(self, **kwargs):
|
||||
context = super().get_context_data(**kwargs)
|
||||
context.update(self.get_roadtrip_context(self.request))
|
||||
|
||||
# For now, this is a placeholder since we don't persist trips
|
||||
# In a full implementation, you would retrieve the trip from database
|
||||
trip_id = kwargs.get("trip_id")
|
||||
|
||||
context.update(
|
||||
{
|
||||
"page_title": f"Road Trip #{trip_id}",
|
||||
"trip_id": trip_id,
|
||||
"message": "Trip details would be loaded here. Currently trips are not persisted.",
|
||||
}
|
||||
)
|
||||
|
||||
return context
|
||||
|
||||
|
||||
class FindParksAlongRouteView(RoadTripViewMixin, View):
|
||||
"""
|
||||
HTMX endpoint for route-based park discovery.
|
||||
|
||||
URL: /roadtrip/htmx/parks-along-route/
|
||||
"""
|
||||
|
||||
def post(self, request: HttpRequest) -> HttpResponse:
|
||||
"""Find parks along a route between two points."""
|
||||
try:
|
||||
data = json.loads(request.body)
|
||||
|
||||
start_park_id = data.get("start_park_id")
|
||||
end_park_id = data.get("end_park_id")
|
||||
max_detour_km = min(100, max(10, float(data.get("max_detour_km", 50))))
|
||||
|
||||
if not start_park_id or not end_park_id:
|
||||
return render(
|
||||
request,
|
||||
PARKS_ALONG_ROUTE_HTML,
|
||||
{"error": "Start and end parks are required"},
|
||||
)
|
||||
|
||||
# Get start and end parks
|
||||
try:
|
||||
start_park = Park.objects.select_related("location").get(
|
||||
id=start_park_id, location__isnull=False
|
||||
)
|
||||
end_park = Park.objects.select_related("location").get(
|
||||
id=end_park_id, location__isnull=False
|
||||
)
|
||||
except Park.DoesNotExist:
|
||||
return render(
|
||||
request,
|
||||
PARKS_ALONG_ROUTE_HTML,
|
||||
{"error": "One or both parks could not be found"},
|
||||
)
|
||||
|
||||
# Find parks along route
|
||||
parks_along_route = self.roadtrip_service.find_parks_along_route(
|
||||
start_park, end_park, max_detour_km
|
||||
)
|
||||
|
||||
return render(
|
||||
request,
|
||||
PARKS_ALONG_ROUTE_HTML,
|
||||
{
|
||||
"parks": parks_along_route,
|
||||
"start_park": start_park,
|
||||
"end_park": end_park,
|
||||
"max_detour_km": max_detour_km,
|
||||
"count": len(parks_along_route),
|
||||
},
|
||||
)
|
||||
|
||||
except json.JSONDecodeError:
|
||||
return render(
|
||||
request,
|
||||
PARKS_ALONG_ROUTE_HTML,
|
||||
{"error": JSON_DECODE_ERROR_MSG},
|
||||
)
|
||||
except Exception as e:
|
||||
return render(request, PARKS_ALONG_ROUTE_HTML, {"error": str(e)})
|
||||
|
||||
|
||||
class GeocodeAddressView(RoadTripViewMixin, View):
|
||||
"""
|
||||
HTMX endpoint for geocoding addresses.
|
||||
|
||||
URL: /roadtrip/htmx/geocode/
|
||||
"""
|
||||
|
||||
def post(self, request: HttpRequest) -> HttpResponse:
|
||||
"""Geocode an address and find nearby parks."""
|
||||
try:
|
||||
data = json.loads(request.body)
|
||||
address = data.get("address", "").strip()
|
||||
|
||||
if not address:
|
||||
return JsonResponse(
|
||||
{"status": "error", "message": "Address is required"},
|
||||
status=400,
|
||||
)
|
||||
|
||||
# Geocode the address
|
||||
coordinates = self.roadtrip_service.geocode_address(address)
|
||||
|
||||
if not coordinates:
|
||||
return JsonResponse(
|
||||
{
|
||||
"status": "error",
|
||||
"message": "Could not geocode the provided address",
|
||||
},
|
||||
status=400,
|
||||
)
|
||||
|
||||
# Find nearby parks
|
||||
radius_km = min(200, max(10, float(data.get("radius_km", 100))))
|
||||
|
||||
# Use map service to find parks near coordinates
|
||||
from core.services.data_structures import GeoBounds
|
||||
|
||||
# Create a bounding box around the coordinates
|
||||
lat_delta = radius_km / 111.0 # Rough conversion: 1 degree ≈ 111km
|
||||
lng_delta = radius_km / (111.0 * abs(coordinates.latitude / 90.0))
|
||||
|
||||
bounds = GeoBounds(
|
||||
north=coordinates.latitude + lat_delta,
|
||||
south=coordinates.latitude - lat_delta,
|
||||
east=coordinates.longitude + lng_delta,
|
||||
west=coordinates.longitude - lng_delta,
|
||||
)
|
||||
|
||||
map_response = unified_map_service.get_locations_by_bounds(
|
||||
north=bounds.north,
|
||||
south=bounds.south,
|
||||
east=bounds.east,
|
||||
west=bounds.west,
|
||||
location_types={LocationType.PARK},
|
||||
)
|
||||
|
||||
return JsonResponse(
|
||||
{
|
||||
"status": "success",
|
||||
"data": {
|
||||
"coordinates": {
|
||||
"latitude": coordinates.latitude,
|
||||
"longitude": coordinates.longitude,
|
||||
},
|
||||
"address": address,
|
||||
"nearby_parks": [
|
||||
loc.to_dict() for loc in map_response.locations[:20]
|
||||
],
|
||||
"radius_km": radius_km,
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
except json.JSONDecodeError:
|
||||
return JsonResponse(
|
||||
{"status": "error", "message": JSON_DECODE_ERROR_MSG},
|
||||
status=400,
|
||||
)
|
||||
except Exception as e:
|
||||
return JsonResponse({"status": "error", "message": str(e)}, status=500)
|
||||
|
||||
|
||||
class ParkDistanceCalculatorView(RoadTripViewMixin, View):
|
||||
"""
|
||||
HTMX endpoint for calculating distances between parks.
|
||||
|
||||
URL: /roadtrip/htmx/distance/
|
||||
"""
|
||||
|
||||
def post(self, request: HttpRequest) -> HttpResponse:
|
||||
"""Calculate distance and duration between two parks."""
|
||||
try:
|
||||
data = json.loads(request.body)
|
||||
|
||||
park1_id = data.get("park1_id")
|
||||
park2_id = data.get("park2_id")
|
||||
|
||||
if not park1_id or not park2_id:
|
||||
return JsonResponse(
|
||||
{
|
||||
"status": "error",
|
||||
"message": "Both park IDs are required",
|
||||
},
|
||||
status=400,
|
||||
)
|
||||
|
||||
# Get parks
|
||||
try:
|
||||
park1 = Park.objects.select_related("location").get(
|
||||
id=park1_id, location__isnull=False
|
||||
)
|
||||
park2 = Park.objects.select_related("location").get(
|
||||
id=park2_id, location__isnull=False
|
||||
)
|
||||
except Park.DoesNotExist:
|
||||
return JsonResponse(
|
||||
{
|
||||
"status": "error",
|
||||
"message": "One or both parks could not be found",
|
||||
},
|
||||
status=400,
|
||||
)
|
||||
|
||||
# Calculate route
|
||||
coords1 = park1.coordinates
|
||||
coords2 = park2.coordinates
|
||||
|
||||
if not coords1 or not coords2:
|
||||
return JsonResponse(
|
||||
{
|
||||
"status": "error",
|
||||
"message": "One or both parks do not have coordinate data",
|
||||
},
|
||||
status=400,
|
||||
)
|
||||
|
||||
from services.roadtrip import Coordinates
|
||||
|
||||
route = self.roadtrip_service.calculate_route(
|
||||
Coordinates(*coords1), Coordinates(*coords2)
|
||||
)
|
||||
|
||||
if not route:
|
||||
return JsonResponse(
|
||||
{
|
||||
"status": "error",
|
||||
"message": "Could not calculate route between parks",
|
||||
},
|
||||
status=400,
|
||||
)
|
||||
|
||||
return JsonResponse(
|
||||
{
|
||||
"status": "success",
|
||||
"data": {
|
||||
"distance_km": route.distance_km,
|
||||
"duration_minutes": route.duration_minutes,
|
||||
"formatted_distance": route.formatted_distance,
|
||||
"formatted_duration": route.formatted_duration,
|
||||
"park1": {
|
||||
"name": park1.name,
|
||||
"formatted_location": getattr(
|
||||
park1, "formatted_location", ""
|
||||
),
|
||||
},
|
||||
"park2": {
|
||||
"name": park2.name,
|
||||
"formatted_location": getattr(
|
||||
park2, "formatted_location", ""
|
||||
),
|
||||
},
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
except json.JSONDecodeError:
|
||||
return JsonResponse(
|
||||
{"status": "error", "message": JSON_DECODE_ERROR_MSG},
|
||||
status=400,
|
||||
)
|
||||
except Exception as e:
|
||||
return JsonResponse({"status": "error", "message": str(e)}, status=500)
|
||||
@@ -1,59 +0,0 @@
|
||||
from django.http import HttpRequest, JsonResponse
|
||||
from django.views.generic import TemplateView
|
||||
from django.urls import reverse
|
||||
|
||||
from .filters import ParkFilter
|
||||
from .forms import ParkSearchForm
|
||||
from .querysets import get_base_park_queryset
|
||||
|
||||
|
||||
class ParkSearchView(TemplateView):
|
||||
"""View for handling park search with autocomplete."""
|
||||
|
||||
template_name = "parks/park_list.html"
|
||||
|
||||
def get_context_data(self, **kwargs):
|
||||
context = super().get_context_data(**kwargs)
|
||||
context["search_form"] = ParkSearchForm(self.request.GET)
|
||||
|
||||
# Initialize filter with current querystring
|
||||
queryset = get_base_park_queryset()
|
||||
filter_instance = ParkFilter(self.request.GET, queryset=queryset)
|
||||
context["filter"] = filter_instance
|
||||
|
||||
# Apply search if park ID selected via autocomplete
|
||||
park_id = self.request.GET.get("park")
|
||||
if park_id:
|
||||
queryset = filter_instance.qs.filter(id=park_id)
|
||||
else:
|
||||
queryset = filter_instance.qs
|
||||
|
||||
# Handle view mode
|
||||
context["view_mode"] = self.request.GET.get("view_mode", "grid")
|
||||
context["parks"] = queryset
|
||||
|
||||
return context
|
||||
|
||||
|
||||
def suggest_parks(request: HttpRequest) -> JsonResponse:
|
||||
"""Return park search suggestions as JSON."""
|
||||
query = request.GET.get("search", "").strip()
|
||||
if not query:
|
||||
return JsonResponse({"results": []})
|
||||
|
||||
queryset = get_base_park_queryset()
|
||||
filter_instance = ParkFilter({"search": query}, queryset=queryset)
|
||||
parks = filter_instance.qs[:8] # Limit to 8 suggestions
|
||||
|
||||
results = [
|
||||
{
|
||||
"id": str(park.pk),
|
||||
"name": park.name,
|
||||
"status": park.get_status_display(),
|
||||
"location": park.formatted_location or "",
|
||||
"url": reverse("parks:park_detail", kwargs={"slug": park.slug}),
|
||||
}
|
||||
for park in parks
|
||||
]
|
||||
|
||||
return JsonResponse({"results": results})
|
||||
@@ -1,16 +0,0 @@
|
||||
def prepare_changes_data(self, cleaned_data):
|
||||
data = cleaned_data.copy()
|
||||
# Convert model instances to IDs for JSON serialization
|
||||
if data.get("owner"):
|
||||
data["owner"] = data["owner"].id
|
||||
# Convert dates to ISO format strings
|
||||
if data.get("opening_date"):
|
||||
data["opening_date"] = data["opening_date"].isoformat()
|
||||
if data.get("closing_date"):
|
||||
data["closing_date"] = data["closing_date"].isoformat()
|
||||
# Convert Decimal fields to strings
|
||||
decimal_fields = ["latitude", "longitude", "size_acres", "average_rating"]
|
||||
for field in decimal_fields:
|
||||
if data.get(field):
|
||||
data[field] = str(data[field])
|
||||
return data
|
||||
Reference in New Issue
Block a user