mirror of
https://github.com/pacnpal/thrillwiki_django_no_react.git
synced 2025-12-20 08:51:09 -05:00
remove backend
This commit is contained in:
12
apps/core/__init__.py
Normal file
12
apps/core/__init__.py
Normal file
@@ -0,0 +1,12 @@
|
||||
"""
|
||||
Core Django App
|
||||
|
||||
This app handles core system functionality including health checks,
|
||||
system status, and other foundational features.
|
||||
"""
|
||||
|
||||
# Import core choices to ensure they are registered with the global registry
|
||||
from .choices import core_choices
|
||||
|
||||
# Ensure choices are registered on app startup
|
||||
__all__ = ['core_choices']
|
||||
30
apps/core/admin.py
Normal file
30
apps/core/admin.py
Normal file
@@ -0,0 +1,30 @@
|
||||
from django.contrib import admin
|
||||
from django.utils.html import format_html
|
||||
from .models import SlugHistory
|
||||
|
||||
|
||||
@admin.register(SlugHistory)
|
||||
class SlugHistoryAdmin(admin.ModelAdmin):
|
||||
list_display = ["content_object_link", "old_slug", "created_at"]
|
||||
list_filter = ["content_type", "created_at"]
|
||||
search_fields = ["old_slug", "object_id"]
|
||||
readonly_fields = ["content_type", "object_id", "old_slug", "created_at"]
|
||||
date_hierarchy = "created_at"
|
||||
ordering = ["-created_at"]
|
||||
|
||||
@admin.display(description="Object")
|
||||
def content_object_link(self, obj):
|
||||
"""Create a link to the related object's admin page"""
|
||||
try:
|
||||
url = obj.content_object.get_absolute_url()
|
||||
return format_html('<a href="{}">{}</a>', url, str(obj.content_object))
|
||||
except (AttributeError, ValueError):
|
||||
return str(obj.content_object)
|
||||
|
||||
def has_add_permission(self, request):
|
||||
"""Disable manual creation of slug history records"""
|
||||
return False
|
||||
|
||||
def has_change_permission(self, request, obj=None):
|
||||
"""Disable editing of slug history records"""
|
||||
return False
|
||||
125
apps/core/analytics.py
Normal file
125
apps/core/analytics.py
Normal file
@@ -0,0 +1,125 @@
|
||||
from django.db import models
|
||||
from django.contrib.contenttypes.fields import GenericForeignKey
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from django.utils import timezone
|
||||
from django.db.models import Count
|
||||
from datetime import timedelta
|
||||
import pghistory
|
||||
|
||||
|
||||
@pghistory.track()
|
||||
class PageView(models.Model):
|
||||
content_type = models.ForeignKey(
|
||||
ContentType, on_delete=models.CASCADE, related_name="page_views"
|
||||
)
|
||||
object_id = models.PositiveIntegerField()
|
||||
content_object = GenericForeignKey("content_type", "object_id")
|
||||
|
||||
timestamp = models.DateTimeField(auto_now_add=True, db_index=True)
|
||||
ip_address = models.GenericIPAddressField()
|
||||
user_agent = models.CharField(max_length=512, blank=True)
|
||||
|
||||
class Meta:
|
||||
indexes = [
|
||||
models.Index(fields=["timestamp"]),
|
||||
models.Index(fields=["content_type", "object_id"]),
|
||||
]
|
||||
|
||||
@classmethod
|
||||
def get_trending_items(cls, model_class, hours=168, limit=10):
|
||||
"""Get trending items of a specific model class based on views in last X hours.
|
||||
|
||||
Args:
|
||||
model_class: The model class to get trending items for (e.g., Park, Ride)
|
||||
hours (int): Number of hours to look back for views (default: 168 = 7 days)
|
||||
limit (int): Maximum number of items to return (default: 10)
|
||||
|
||||
Returns:
|
||||
QuerySet: The trending items ordered by view count
|
||||
"""
|
||||
content_type = ContentType.objects.get_for_model(model_class)
|
||||
cutoff = timezone.now() - timedelta(hours=hours)
|
||||
|
||||
# Query through the ContentType relationship
|
||||
item_ids = (
|
||||
cls.objects.filter(content_type=content_type, timestamp__gte=cutoff)
|
||||
.values("object_id")
|
||||
.annotate(view_count=Count("id"))
|
||||
.filter(view_count__gt=0)
|
||||
.order_by("-view_count")
|
||||
.values_list("object_id", flat=True)[:limit]
|
||||
)
|
||||
|
||||
# Get the actual items in the correct order
|
||||
if item_ids:
|
||||
# Convert the list to a string of comma-separated values
|
||||
id_list = list(item_ids)
|
||||
# Use Case/When to preserve the ordering
|
||||
from django.db.models import Case, When
|
||||
|
||||
preserved = Case(*[When(pk=pk, then=pos) for pos, pk in enumerate(id_list)])
|
||||
return model_class.objects.filter(pk__in=id_list).order_by(preserved)
|
||||
|
||||
return model_class.objects.none()
|
||||
|
||||
@classmethod
|
||||
def get_views_growth(
|
||||
cls, content_type, object_id, current_period_hours, previous_period_hours
|
||||
):
|
||||
"""Get view growth statistics between two time periods.
|
||||
|
||||
Args:
|
||||
content_type: ContentType instance for the model
|
||||
object_id: ID of the specific object
|
||||
current_period_hours: Hours for current period (e.g., 24)
|
||||
previous_period_hours: Hours for previous period (e.g., 48)
|
||||
|
||||
Returns:
|
||||
tuple: (current_views, previous_views, growth_percentage)
|
||||
"""
|
||||
from datetime import timedelta
|
||||
|
||||
now = timezone.now()
|
||||
|
||||
# Current period: last X hours
|
||||
current_start = now - timedelta(hours=current_period_hours)
|
||||
current_views = cls.objects.filter(
|
||||
content_type=content_type, object_id=object_id, timestamp__gte=current_start
|
||||
).count()
|
||||
|
||||
# Previous period: X hours before current period
|
||||
previous_start = now - timedelta(hours=previous_period_hours)
|
||||
previous_end = current_start
|
||||
previous_views = cls.objects.filter(
|
||||
content_type=content_type,
|
||||
object_id=object_id,
|
||||
timestamp__gte=previous_start,
|
||||
timestamp__lt=previous_end,
|
||||
).count()
|
||||
|
||||
# Calculate growth percentage
|
||||
if previous_views == 0:
|
||||
growth_percentage = current_views * 100 if current_views > 0 else 0
|
||||
else:
|
||||
growth_percentage = (
|
||||
(current_views - previous_views) / previous_views
|
||||
) * 100
|
||||
|
||||
return current_views, previous_views, growth_percentage
|
||||
|
||||
@classmethod
|
||||
def get_total_views_count(cls, content_type, object_id, hours=168):
|
||||
"""Get total view count for an object within specified hours.
|
||||
|
||||
Args:
|
||||
content_type: ContentType instance for the model
|
||||
object_id: ID of the specific object
|
||||
hours: Number of hours to look back (default: 168 = 7 days)
|
||||
|
||||
Returns:
|
||||
int: Total view count
|
||||
"""
|
||||
cutoff = timezone.now() - timedelta(hours=hours)
|
||||
return cls.objects.filter(
|
||||
content_type=content_type, object_id=object_id, timestamp__gte=cutoff
|
||||
).count()
|
||||
1
apps/core/api/__init__.py
Normal file
1
apps/core/api/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
# Core API infrastructure for ThrillWiki
|
||||
242
apps/core/api/exceptions.py
Normal file
242
apps/core/api/exceptions.py
Normal file
@@ -0,0 +1,242 @@
|
||||
"""
|
||||
Custom exception handling for ThrillWiki API.
|
||||
Provides standardized error responses following Django styleguide patterns.
|
||||
"""
|
||||
|
||||
from typing import Any, Dict, Optional
|
||||
|
||||
from django.http import Http404
|
||||
from django.core.exceptions import (
|
||||
PermissionDenied,
|
||||
ValidationError as DjangoValidationError,
|
||||
)
|
||||
from rest_framework import status
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.views import exception_handler
|
||||
from rest_framework.exceptions import (
|
||||
ValidationError as DRFValidationError,
|
||||
NotFound,
|
||||
PermissionDenied as DRFPermissionDenied,
|
||||
)
|
||||
|
||||
from ..exceptions import ThrillWikiException
|
||||
from ..logging import get_logger, log_exception
|
||||
|
||||
logger = get_logger(__name__)
|
||||
|
||||
|
||||
def custom_exception_handler(
|
||||
exc: Exception, context: Dict[str, Any]
|
||||
) -> Optional[Response]:
|
||||
"""
|
||||
Custom exception handler for DRF that provides standardized error responses.
|
||||
|
||||
Returns:
|
||||
Response with standardized error format or None to fallback to default handler
|
||||
"""
|
||||
# Call REST framework's default exception handler first
|
||||
response = exception_handler(exc, context)
|
||||
|
||||
if response is not None:
|
||||
# Standardize the error response format
|
||||
custom_response_data = {
|
||||
"status": "error",
|
||||
"error": {
|
||||
"code": _get_error_code(exc),
|
||||
"message": _get_error_message(exc, response.data),
|
||||
"details": _get_error_details(exc, response.data),
|
||||
},
|
||||
"data": None,
|
||||
}
|
||||
|
||||
# Add request context for debugging
|
||||
if hasattr(context.get("request"), "user"):
|
||||
custom_response_data["error"]["request_user"] = str(context["request"].user)
|
||||
|
||||
# Log the error for monitoring
|
||||
log_exception(
|
||||
logger,
|
||||
exc,
|
||||
context={"response_status": response.status_code},
|
||||
request=context.get("request"),
|
||||
)
|
||||
|
||||
response.data = custom_response_data
|
||||
|
||||
# Handle ThrillWiki custom exceptions
|
||||
elif isinstance(exc, ThrillWikiException):
|
||||
custom_response_data = {
|
||||
"status": "error",
|
||||
"error": exc.to_dict(),
|
||||
"data": None,
|
||||
}
|
||||
|
||||
log_exception(
|
||||
logger,
|
||||
exc,
|
||||
context={"response_status": exc.status_code},
|
||||
request=context.get("request"),
|
||||
)
|
||||
response = Response(custom_response_data, status=exc.status_code)
|
||||
|
||||
# Handle specific Django exceptions that DRF doesn't catch
|
||||
elif isinstance(exc, DjangoValidationError):
|
||||
custom_response_data = {
|
||||
"status": "error",
|
||||
"error": {
|
||||
"code": "VALIDATION_ERROR",
|
||||
"message": "Validation failed",
|
||||
"details": _format_django_validation_errors(exc),
|
||||
},
|
||||
"data": None,
|
||||
}
|
||||
|
||||
log_exception(
|
||||
logger,
|
||||
exc,
|
||||
context={"response_status": status.HTTP_400_BAD_REQUEST},
|
||||
request=context.get("request"),
|
||||
)
|
||||
response = Response(custom_response_data, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
elif isinstance(exc, Http404):
|
||||
custom_response_data = {
|
||||
"status": "error",
|
||||
"error": {
|
||||
"code": "NOT_FOUND",
|
||||
"message": "Resource not found",
|
||||
"details": str(exc) if str(exc) else None,
|
||||
},
|
||||
"data": None,
|
||||
}
|
||||
|
||||
log_exception(
|
||||
logger,
|
||||
exc,
|
||||
context={"response_status": status.HTTP_404_NOT_FOUND},
|
||||
request=context.get("request"),
|
||||
)
|
||||
response = Response(custom_response_data, status=status.HTTP_404_NOT_FOUND)
|
||||
|
||||
elif isinstance(exc, PermissionDenied):
|
||||
custom_response_data = {
|
||||
"status": "error",
|
||||
"error": {
|
||||
"code": "PERMISSION_DENIED",
|
||||
"message": "Permission denied",
|
||||
"details": str(exc) if str(exc) else None,
|
||||
},
|
||||
"data": None,
|
||||
}
|
||||
|
||||
log_exception(
|
||||
logger,
|
||||
exc,
|
||||
context={"response_status": status.HTTP_403_FORBIDDEN},
|
||||
request=context.get("request"),
|
||||
)
|
||||
response = Response(custom_response_data, status=status.HTTP_403_FORBIDDEN)
|
||||
|
||||
# Catch-all for any other exceptions that might slip through
|
||||
# This ensures we ALWAYS return JSON for API endpoints
|
||||
else:
|
||||
# Check if this is an API request by looking at the URL path
|
||||
request = context.get("request")
|
||||
if request and hasattr(request, "path") and "/api/" in request.path:
|
||||
# This is an API request, so we must return JSON
|
||||
custom_response_data = {
|
||||
"status": "error",
|
||||
"error": {
|
||||
"code": exc.__class__.__name__.upper(),
|
||||
"message": str(exc) if str(exc) else "An unexpected error occurred",
|
||||
"details": None,
|
||||
},
|
||||
"data": None,
|
||||
}
|
||||
|
||||
# Add request context for debugging
|
||||
if hasattr(request, "user"):
|
||||
custom_response_data["error"]["request_user"] = str(request.user)
|
||||
|
||||
# Log the error for monitoring
|
||||
log_exception(
|
||||
logger,
|
||||
exc,
|
||||
context={"response_status": status.HTTP_500_INTERNAL_SERVER_ERROR},
|
||||
request=request,
|
||||
)
|
||||
|
||||
response = Response(
|
||||
custom_response_data, status=status.HTTP_500_INTERNAL_SERVER_ERROR
|
||||
)
|
||||
|
||||
return response
|
||||
|
||||
|
||||
def _get_error_code(exc: Exception) -> str:
|
||||
"""Extract or determine error code from exception."""
|
||||
# Use getattr + isinstance to avoid static type checker errors
|
||||
default_code = getattr(exc, "default_code", None)
|
||||
if isinstance(default_code, str):
|
||||
return default_code.upper()
|
||||
|
||||
if isinstance(exc, DRFValidationError):
|
||||
return "VALIDATION_ERROR"
|
||||
elif isinstance(exc, NotFound):
|
||||
return "NOT_FOUND"
|
||||
elif isinstance(exc, DRFPermissionDenied):
|
||||
return "PERMISSION_DENIED"
|
||||
|
||||
return exc.__class__.__name__.upper()
|
||||
|
||||
|
||||
def _get_error_message(exc: Exception, response_data: Any) -> str:
|
||||
"""Extract user-friendly error message."""
|
||||
if isinstance(response_data, dict):
|
||||
# Handle DRF validation errors
|
||||
if "detail" in response_data:
|
||||
return str(response_data["detail"])
|
||||
elif "non_field_errors" in response_data:
|
||||
errors = response_data["non_field_errors"]
|
||||
return errors[0] if isinstance(errors, list) and errors else str(errors)
|
||||
elif isinstance(response_data, dict) and len(response_data) == 1:
|
||||
key, value = next(iter(response_data.items()))
|
||||
if isinstance(value, list) and value:
|
||||
return f"{key}: {value[0]}"
|
||||
return f"{key}: {value}"
|
||||
|
||||
# Fallback to exception message
|
||||
return str(exc) if str(exc) else "An error occurred"
|
||||
|
||||
|
||||
def _get_error_details(exc: Exception, response_data: Any) -> Optional[Dict[str, Any]]:
|
||||
"""Extract detailed error information for debugging."""
|
||||
if isinstance(response_data, dict) and len(response_data) > 1:
|
||||
return response_data
|
||||
|
||||
# Use getattr to avoid static type-checker errors when Exception doesn't define `detail`
|
||||
detail = getattr(exc, "detail", None)
|
||||
if isinstance(detail, dict):
|
||||
return detail
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def _format_django_validation_errors(
|
||||
exc: DjangoValidationError,
|
||||
) -> Dict[str, Any]:
|
||||
"""Format Django ValidationError for API response."""
|
||||
if hasattr(exc, "error_dict"):
|
||||
# Field-specific errors
|
||||
return {
|
||||
field: [str(error) for error in errors]
|
||||
for field, errors in exc.error_dict.items()
|
||||
}
|
||||
elif hasattr(exc, "error_list"):
|
||||
# Non-field errors
|
||||
return {"non_field_errors": [str(error) for error in exc.error_list]}
|
||||
|
||||
return {"non_field_errors": [str(exc)]}
|
||||
|
||||
|
||||
# Removed _log_api_error - using centralized logging instead
|
||||
297
apps/core/api/mixins.py
Normal file
297
apps/core/api/mixins.py
Normal file
@@ -0,0 +1,297 @@
|
||||
"""
|
||||
Common mixins for API views following Django styleguide patterns.
|
||||
"""
|
||||
|
||||
from typing import Dict, Any, Optional, Type
|
||||
from rest_framework.request import Request
|
||||
from rest_framework.response import Response
|
||||
from rest_framework import status
|
||||
|
||||
# Constants for error messages
|
||||
_MISSING_INPUT_SERIALIZER_MSG = "Subclasses must set input_serializer class attribute"
|
||||
_MISSING_OUTPUT_SERIALIZER_MSG = "Subclasses must set output_serializer class attribute"
|
||||
_MISSING_GET_OBJECT_MSG = "Subclasses must implement get_object using selectors"
|
||||
|
||||
|
||||
class ApiMixin:
|
||||
"""
|
||||
Base mixin for API views providing standardized response formatting.
|
||||
"""
|
||||
|
||||
# Expose expected attributes so static type checkers know they exist on subclasses.
|
||||
# Subclasses or other bases (e.g. DRF GenericAPIView) will actually provide these.
|
||||
input_serializer: Optional[Type[Any]] = None
|
||||
output_serializer: Optional[Type[Any]] = None
|
||||
|
||||
def create_response(
|
||||
self,
|
||||
*,
|
||||
data: Any = None,
|
||||
message: Optional[str] = None,
|
||||
status_code: int = status.HTTP_200_OK,
|
||||
pagination: Optional[Dict[str, Any]] = None,
|
||||
metadata: Optional[Dict[str, Any]] = None,
|
||||
) -> Response:
|
||||
"""
|
||||
Create standardized API response.
|
||||
|
||||
Args:
|
||||
data: Response data
|
||||
message: Optional success message
|
||||
status_code: HTTP status code
|
||||
pagination: Pagination information
|
||||
metadata: Additional metadata
|
||||
|
||||
Returns:
|
||||
Standardized Response object
|
||||
"""
|
||||
response_data = {
|
||||
"status": "success" if status_code < 400 else "error",
|
||||
"data": data,
|
||||
}
|
||||
|
||||
if message:
|
||||
response_data["message"] = message
|
||||
|
||||
if pagination:
|
||||
response_data["pagination"] = pagination
|
||||
|
||||
if metadata:
|
||||
response_data["metadata"] = metadata
|
||||
|
||||
return Response(response_data, status=status_code)
|
||||
|
||||
def create_error_response(
|
||||
self,
|
||||
*,
|
||||
message: str,
|
||||
status_code: int = status.HTTP_400_BAD_REQUEST,
|
||||
error_code: Optional[str] = None,
|
||||
details: Optional[Dict[str, Any]] = None,
|
||||
) -> Response:
|
||||
"""
|
||||
Create standardized error response.
|
||||
|
||||
Args:
|
||||
message: Error message
|
||||
status_code: HTTP status code
|
||||
error_code: Optional error code
|
||||
details: Additional error details
|
||||
|
||||
Returns:
|
||||
Standardized error Response object
|
||||
"""
|
||||
# explicitly allow any-shaped values in the error_data dict
|
||||
error_data: Dict[str, Any] = {
|
||||
"code": error_code or "GENERIC_ERROR",
|
||||
"message": message,
|
||||
}
|
||||
|
||||
if details:
|
||||
error_data["details"] = details
|
||||
|
||||
response_data = {
|
||||
"status": "error",
|
||||
"error": error_data,
|
||||
"data": None,
|
||||
}
|
||||
|
||||
return Response(response_data, status=status_code)
|
||||
|
||||
# Provide lightweight stubs for methods commonly supplied by other bases (DRF GenericAPIView, etc.)
|
||||
# These will raise if not implemented; they also inform static analyzers about their existence.
|
||||
def paginate_queryset(self, queryset):
|
||||
"""Override / implement in subclass or provided base if pagination is needed."""
|
||||
raise NotImplementedError(
|
||||
"Subclasses must implement paginate_queryset to enable pagination"
|
||||
)
|
||||
|
||||
def get_paginated_response(self, data):
|
||||
"""Override / implement in subclass or provided base to return paginated responses."""
|
||||
raise NotImplementedError(
|
||||
"Subclasses must implement get_paginated_response to enable pagination"
|
||||
)
|
||||
|
||||
def get_object(self):
|
||||
"""Default placeholder; subclasses should implement this."""
|
||||
raise NotImplementedError(_MISSING_GET_OBJECT_MSG)
|
||||
|
||||
|
||||
class CreateApiMixin(ApiMixin):
|
||||
"""
|
||||
Mixin for create API endpoints with standardized input/output handling.
|
||||
"""
|
||||
|
||||
def create(self, _request: Request, *_args, **_kwargs) -> Response:
|
||||
"""Handle POST requests for creating resources."""
|
||||
serializer = self.get_input_serializer(data=_request.data)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
|
||||
# Create the object using the service layer
|
||||
obj = self.perform_create(**serializer.validated_data)
|
||||
|
||||
# Serialize the output
|
||||
output_serializer = self.get_output_serializer(obj)
|
||||
|
||||
return self.create_response(
|
||||
data=output_serializer.data,
|
||||
status_code=status.HTTP_201_CREATED,
|
||||
message="Resource created successfully",
|
||||
)
|
||||
|
||||
def perform_create(self, **validated_data):
|
||||
"""
|
||||
Override this method to implement object creation logic.
|
||||
Should use service layer methods.
|
||||
"""
|
||||
raise NotImplementedError("Subclasses must implement perform_create")
|
||||
|
||||
def get_input_serializer(self, *args, **kwargs):
|
||||
"""Get the input serializer for validation."""
|
||||
if self.input_serializer is None:
|
||||
raise NotImplementedError(_MISSING_INPUT_SERIALIZER_MSG)
|
||||
return self.input_serializer(*args, **kwargs)
|
||||
|
||||
def get_output_serializer(self, *args, **kwargs):
|
||||
"""Get the output serializer for response."""
|
||||
if self.output_serializer is None:
|
||||
raise NotImplementedError(_MISSING_OUTPUT_SERIALIZER_MSG)
|
||||
return self.output_serializer(*args, **kwargs)
|
||||
|
||||
|
||||
class UpdateApiMixin(ApiMixin):
|
||||
"""
|
||||
Mixin for update API endpoints with standardized input/output handling.
|
||||
"""
|
||||
|
||||
def update(self, _request: Request, *_args, **_kwargs) -> Response:
|
||||
"""Handle PUT/PATCH requests for updating resources."""
|
||||
instance = self.get_object()
|
||||
serializer = self.get_input_serializer(
|
||||
data=_request.data, partial=_kwargs.get("partial", False)
|
||||
)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
|
||||
# Update the object using the service layer
|
||||
updated_obj = self.perform_update(instance, **serializer.validated_data)
|
||||
|
||||
# Serialize the output
|
||||
output_serializer = self.get_output_serializer(updated_obj)
|
||||
|
||||
return self.create_response(
|
||||
data=output_serializer.data,
|
||||
message="Resource updated successfully",
|
||||
)
|
||||
|
||||
def perform_update(self, instance, **validated_data):
|
||||
"""
|
||||
Override this method to implement object update logic.
|
||||
Should use service layer methods.
|
||||
"""
|
||||
raise NotImplementedError("Subclasses must implement perform_update")
|
||||
|
||||
def get_input_serializer(self, *args, **kwargs):
|
||||
"""Get the input serializer for validation."""
|
||||
if self.input_serializer is None:
|
||||
raise NotImplementedError(_MISSING_INPUT_SERIALIZER_MSG)
|
||||
return self.input_serializer(*args, **kwargs)
|
||||
|
||||
def get_output_serializer(self, *args, **kwargs):
|
||||
"""Get the output serializer for response."""
|
||||
if self.output_serializer is None:
|
||||
raise NotImplementedError(_MISSING_OUTPUT_SERIALIZER_MSG)
|
||||
return self.output_serializer(*args, **kwargs)
|
||||
|
||||
|
||||
class ListApiMixin(ApiMixin):
|
||||
"""
|
||||
Mixin for list API endpoints with pagination and filtering.
|
||||
"""
|
||||
|
||||
def list(self, _request: Request, *_args, **_kwargs) -> Response:
|
||||
"""Handle GET requests for listing resources."""
|
||||
# Use selector to get filtered queryset
|
||||
queryset = self.get_queryset()
|
||||
|
||||
# Apply pagination
|
||||
page = self.paginate_queryset(queryset)
|
||||
if page is not None:
|
||||
serializer = self.get_output_serializer(page, many=True)
|
||||
return self.get_paginated_response(serializer.data)
|
||||
|
||||
# No pagination
|
||||
serializer = self.get_output_serializer(queryset, many=True)
|
||||
return self.create_response(data=serializer.data)
|
||||
|
||||
def get_queryset(self):
|
||||
"""
|
||||
Override this method to use selector patterns.
|
||||
Should call selector functions, not access model managers directly.
|
||||
"""
|
||||
raise NotImplementedError(
|
||||
"Subclasses must implement get_queryset using selectors"
|
||||
)
|
||||
|
||||
def get_output_serializer(self, *args, **kwargs):
|
||||
"""Get the output serializer for response."""
|
||||
if self.output_serializer is None:
|
||||
raise NotImplementedError(_MISSING_OUTPUT_SERIALIZER_MSG)
|
||||
return self.output_serializer(*args, **kwargs)
|
||||
|
||||
|
||||
class RetrieveApiMixin(ApiMixin):
|
||||
"""
|
||||
Mixin for retrieve API endpoints.
|
||||
"""
|
||||
|
||||
def retrieve(self, _request: Request, *_args, **_kwargs) -> Response:
|
||||
"""Handle GET requests for retrieving a single resource."""
|
||||
instance = self.get_object()
|
||||
serializer = self.get_output_serializer(instance)
|
||||
|
||||
return self.create_response(data=serializer.data)
|
||||
|
||||
def get_object(self):
|
||||
"""
|
||||
Override this method to use selector patterns.
|
||||
Should call selector functions for optimized queries.
|
||||
"""
|
||||
raise NotImplementedError(_MISSING_GET_OBJECT_MSG)
|
||||
|
||||
def get_output_serializer(self, *args, **kwargs):
|
||||
"""Get the output serializer for response."""
|
||||
if self.output_serializer is None:
|
||||
raise NotImplementedError(_MISSING_OUTPUT_SERIALIZER_MSG)
|
||||
return self.output_serializer(*args, **kwargs)
|
||||
|
||||
|
||||
class DestroyApiMixin(ApiMixin):
|
||||
"""
|
||||
Mixin for delete API endpoints.
|
||||
"""
|
||||
|
||||
def destroy(self, _request: Request, *_args, **_kwargs) -> Response:
|
||||
"""Handle DELETE requests for destroying resources."""
|
||||
instance = self.get_object()
|
||||
|
||||
# Delete using service layer
|
||||
self.perform_destroy(instance)
|
||||
|
||||
return self.create_response(
|
||||
status_code=status.HTTP_204_NO_CONTENT,
|
||||
message="Resource deleted successfully",
|
||||
)
|
||||
|
||||
def perform_destroy(self, instance):
|
||||
"""
|
||||
Override this method to implement object deletion logic.
|
||||
Should use service layer methods.
|
||||
"""
|
||||
raise NotImplementedError("Subclasses must implement perform_destroy")
|
||||
|
||||
def get_object(self):
|
||||
"""
|
||||
Override this method to use selector patterns.
|
||||
Should call selector functions for optimized queries.
|
||||
"""
|
||||
raise NotImplementedError(_MISSING_GET_OBJECT_MSG)
|
||||
6
apps/core/apps.py
Normal file
6
apps/core/apps.py
Normal file
@@ -0,0 +1,6 @@
|
||||
from django.apps import AppConfig
|
||||
|
||||
|
||||
class CoreConfig(AppConfig):
|
||||
default_auto_field = "django.db.models.BigAutoField"
|
||||
name = "apps.core"
|
||||
32
apps/core/choices/__init__.py
Normal file
32
apps/core/choices/__init__.py
Normal file
@@ -0,0 +1,32 @@
|
||||
"""
|
||||
Rich Choice Objects System
|
||||
|
||||
This module provides a comprehensive system for managing choice fields throughout
|
||||
the ThrillWiki application. It replaces simple tuple-based choices with rich
|
||||
dataclass objects that support metadata, descriptions, categories, and deprecation.
|
||||
|
||||
Key Components:
|
||||
- RichChoice: Base dataclass for choice objects
|
||||
- ChoiceRegistry: Centralized management of all choice definitions
|
||||
- RichChoiceField: Django model field for rich choices
|
||||
- RichChoiceSerializer: DRF serializer for API responses
|
||||
"""
|
||||
|
||||
from .base import RichChoice, ChoiceCategory, ChoiceGroup
|
||||
from .registry import ChoiceRegistry, register_choices
|
||||
from .fields import RichChoiceField
|
||||
from .serializers import RichChoiceSerializer, RichChoiceOptionSerializer
|
||||
from .utils import validate_choice_value, get_choice_display
|
||||
|
||||
__all__ = [
|
||||
'RichChoice',
|
||||
'ChoiceCategory',
|
||||
'ChoiceGroup',
|
||||
'ChoiceRegistry',
|
||||
'register_choices',
|
||||
'RichChoiceField',
|
||||
'RichChoiceSerializer',
|
||||
'RichChoiceOptionSerializer',
|
||||
'validate_choice_value',
|
||||
'get_choice_display',
|
||||
]
|
||||
154
apps/core/choices/base.py
Normal file
154
apps/core/choices/base.py
Normal file
@@ -0,0 +1,154 @@
|
||||
"""
|
||||
Base Rich Choice Objects
|
||||
|
||||
This module defines the core dataclass structures for rich choice objects.
|
||||
"""
|
||||
|
||||
from dataclasses import dataclass, field
|
||||
from typing import Dict, Any, Optional
|
||||
from enum import Enum
|
||||
|
||||
|
||||
class ChoiceCategory(Enum):
|
||||
"""Categories for organizing choice types"""
|
||||
STATUS = "status"
|
||||
TYPE = "type"
|
||||
CLASSIFICATION = "classification"
|
||||
PREFERENCE = "preference"
|
||||
PERMISSION = "permission"
|
||||
PRIORITY = "priority"
|
||||
ACTION = "action"
|
||||
NOTIFICATION = "notification"
|
||||
MODERATION = "moderation"
|
||||
TECHNICAL = "technical"
|
||||
BUSINESS = "business"
|
||||
SECURITY = "security"
|
||||
OTHER = "other"
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class RichChoice:
|
||||
"""
|
||||
Rich choice object with metadata support.
|
||||
|
||||
This replaces simple tuple choices with a comprehensive object that can
|
||||
carry additional information like descriptions, colors, icons, and custom metadata.
|
||||
|
||||
Attributes:
|
||||
value: The stored value (equivalent to first element of tuple choice)
|
||||
label: Human-readable display name (equivalent to second element of tuple choice)
|
||||
description: Detailed description of what this choice means
|
||||
metadata: Dictionary of additional properties (colors, icons, etc.)
|
||||
deprecated: Whether this choice is deprecated and should not be used for new entries
|
||||
category: Category for organizing related choices
|
||||
"""
|
||||
value: str
|
||||
label: str
|
||||
description: str = ""
|
||||
metadata: Dict[str, Any] = field(default_factory=dict)
|
||||
deprecated: bool = False
|
||||
category: ChoiceCategory = ChoiceCategory.OTHER
|
||||
|
||||
def __post_init__(self):
|
||||
"""Validate the choice object after initialization"""
|
||||
if not self.value:
|
||||
raise ValueError("Choice value cannot be empty")
|
||||
if not self.label:
|
||||
raise ValueError("Choice label cannot be empty")
|
||||
|
||||
@property
|
||||
def color(self) -> Optional[str]:
|
||||
"""Get the color from metadata if available"""
|
||||
return self.metadata.get('color')
|
||||
|
||||
@property
|
||||
def icon(self) -> Optional[str]:
|
||||
"""Get the icon from metadata if available"""
|
||||
return self.metadata.get('icon')
|
||||
|
||||
@property
|
||||
def css_class(self) -> Optional[str]:
|
||||
"""Get the CSS class from metadata if available"""
|
||||
return self.metadata.get('css_class')
|
||||
|
||||
@property
|
||||
def sort_order(self) -> int:
|
||||
"""Get the sort order from metadata, defaulting to 0"""
|
||||
return self.metadata.get('sort_order', 0)
|
||||
|
||||
|
||||
def to_dict(self) -> Dict[str, Any]:
|
||||
"""Convert to dictionary representation for API serialization"""
|
||||
return {
|
||||
'value': self.value,
|
||||
'label': self.label,
|
||||
'description': self.description,
|
||||
'metadata': self.metadata,
|
||||
'deprecated': self.deprecated,
|
||||
'category': self.category.value,
|
||||
'color': self.color,
|
||||
'icon': self.icon,
|
||||
'css_class': self.css_class,
|
||||
'sort_order': self.sort_order,
|
||||
}
|
||||
|
||||
|
||||
def __str__(self) -> str:
|
||||
return self.label
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"RichChoice(value='{self.value}', label='{self.label}')"
|
||||
|
||||
|
||||
@dataclass
|
||||
class ChoiceGroup:
|
||||
"""
|
||||
A group of related choices with shared metadata.
|
||||
|
||||
This allows for organizing choices into logical groups with
|
||||
common properties and behaviors.
|
||||
"""
|
||||
name: str
|
||||
choices: list[RichChoice]
|
||||
description: str = ""
|
||||
metadata: Dict[str, Any] = field(default_factory=dict)
|
||||
|
||||
def __post_init__(self):
|
||||
"""Validate the choice group after initialization"""
|
||||
if not self.name:
|
||||
raise ValueError("Choice group name cannot be empty")
|
||||
if not self.choices:
|
||||
raise ValueError("Choice group must contain at least one choice")
|
||||
|
||||
# Validate that all choice values are unique within the group
|
||||
values = [choice.value for choice in self.choices]
|
||||
if len(values) != len(set(values)):
|
||||
raise ValueError("All choice values within a group must be unique")
|
||||
|
||||
def get_choice(self, value: str) -> Optional[RichChoice]:
|
||||
"""Get a choice by its value"""
|
||||
for choice in self.choices:
|
||||
if choice.value == value:
|
||||
return choice
|
||||
return None
|
||||
|
||||
def get_choices_by_category(self, category: ChoiceCategory) -> list[RichChoice]:
|
||||
"""Get all choices in a specific category"""
|
||||
return [choice for choice in self.choices if choice.category == category]
|
||||
|
||||
def get_active_choices(self) -> list[RichChoice]:
|
||||
"""Get all non-deprecated choices"""
|
||||
return [choice for choice in self.choices if not choice.deprecated]
|
||||
|
||||
def to_tuple_choices(self) -> list[tuple[str, str]]:
|
||||
"""Convert to legacy tuple choices format"""
|
||||
return [(choice.value, choice.label) for choice in self.choices]
|
||||
|
||||
def to_dict(self) -> Dict[str, Any]:
|
||||
"""Convert to dictionary representation for API serialization"""
|
||||
return {
|
||||
'name': self.name,
|
||||
'description': self.description,
|
||||
'metadata': self.metadata,
|
||||
'choices': [choice.to_dict() for choice in self.choices]
|
||||
}
|
||||
158
apps/core/choices/core_choices.py
Normal file
158
apps/core/choices/core_choices.py
Normal file
@@ -0,0 +1,158 @@
|
||||
"""
|
||||
Core System Rich Choice Objects
|
||||
|
||||
This module defines all choice objects for core system functionality,
|
||||
including health checks, API statuses, and other system-level choices.
|
||||
"""
|
||||
|
||||
from .base import RichChoice, ChoiceCategory
|
||||
from .registry import register_choices
|
||||
|
||||
|
||||
# Health Check Status Choices
|
||||
HEALTH_STATUSES = [
|
||||
RichChoice(
|
||||
value="healthy",
|
||||
label="Healthy",
|
||||
description="System is operating normally with no issues detected",
|
||||
metadata={
|
||||
'color': 'green',
|
||||
'icon': 'check-circle',
|
||||
'css_class': 'bg-green-100 text-green-800',
|
||||
'sort_order': 1,
|
||||
'http_status': 200
|
||||
},
|
||||
category=ChoiceCategory.STATUS
|
||||
),
|
||||
RichChoice(
|
||||
value="unhealthy",
|
||||
label="Unhealthy",
|
||||
description="System has detected issues that may affect functionality",
|
||||
metadata={
|
||||
'color': 'red',
|
||||
'icon': 'x-circle',
|
||||
'css_class': 'bg-red-100 text-red-800',
|
||||
'sort_order': 2,
|
||||
'http_status': 503
|
||||
},
|
||||
category=ChoiceCategory.STATUS
|
||||
),
|
||||
]
|
||||
|
||||
# Simple Health Check Status Choices
|
||||
SIMPLE_HEALTH_STATUSES = [
|
||||
RichChoice(
|
||||
value="ok",
|
||||
label="OK",
|
||||
description="Basic health check passed",
|
||||
metadata={
|
||||
'color': 'green',
|
||||
'icon': 'check',
|
||||
'css_class': 'bg-green-100 text-green-800',
|
||||
'sort_order': 1,
|
||||
'http_status': 200
|
||||
},
|
||||
category=ChoiceCategory.STATUS
|
||||
),
|
||||
RichChoice(
|
||||
value="error",
|
||||
label="Error",
|
||||
description="Basic health check failed",
|
||||
metadata={
|
||||
'color': 'red',
|
||||
'icon': 'x',
|
||||
'css_class': 'bg-red-100 text-red-800',
|
||||
'sort_order': 2,
|
||||
'http_status': 500
|
||||
},
|
||||
category=ChoiceCategory.STATUS
|
||||
),
|
||||
]
|
||||
|
||||
# Entity Type Choices for Search
|
||||
ENTITY_TYPES = [
|
||||
RichChoice(
|
||||
value="park",
|
||||
label="Park",
|
||||
description="Theme parks and amusement parks",
|
||||
metadata={
|
||||
'color': 'green',
|
||||
'icon': 'map-pin',
|
||||
'css_class': 'bg-green-100 text-green-800',
|
||||
'sort_order': 1,
|
||||
'search_weight': 1.0
|
||||
},
|
||||
category=ChoiceCategory.CLASSIFICATION
|
||||
),
|
||||
RichChoice(
|
||||
value="ride",
|
||||
label="Ride",
|
||||
description="Individual rides and attractions",
|
||||
metadata={
|
||||
'color': 'blue',
|
||||
'icon': 'activity',
|
||||
'css_class': 'bg-blue-100 text-blue-800',
|
||||
'sort_order': 2,
|
||||
'search_weight': 1.0
|
||||
},
|
||||
category=ChoiceCategory.CLASSIFICATION
|
||||
),
|
||||
RichChoice(
|
||||
value="company",
|
||||
label="Company",
|
||||
description="Manufacturers, operators, and designers",
|
||||
metadata={
|
||||
'color': 'purple',
|
||||
'icon': 'building',
|
||||
'css_class': 'bg-purple-100 text-purple-800',
|
||||
'sort_order': 3,
|
||||
'search_weight': 0.8
|
||||
},
|
||||
category=ChoiceCategory.CLASSIFICATION
|
||||
),
|
||||
RichChoice(
|
||||
value="user",
|
||||
label="User",
|
||||
description="User profiles and accounts",
|
||||
metadata={
|
||||
'color': 'orange',
|
||||
'icon': 'user',
|
||||
'css_class': 'bg-orange-100 text-orange-800',
|
||||
'sort_order': 4,
|
||||
'search_weight': 0.5
|
||||
},
|
||||
category=ChoiceCategory.CLASSIFICATION
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
def register_core_choices():
|
||||
"""Register all core system choices with the global registry"""
|
||||
|
||||
register_choices(
|
||||
name="health_statuses",
|
||||
choices=HEALTH_STATUSES,
|
||||
domain="core",
|
||||
description="Health check status options",
|
||||
metadata={'domain': 'core', 'type': 'health_status'}
|
||||
)
|
||||
|
||||
register_choices(
|
||||
name="simple_health_statuses",
|
||||
choices=SIMPLE_HEALTH_STATUSES,
|
||||
domain="core",
|
||||
description="Simple health check status options",
|
||||
metadata={'domain': 'core', 'type': 'simple_health_status'}
|
||||
)
|
||||
|
||||
register_choices(
|
||||
name="entity_types",
|
||||
choices=ENTITY_TYPES,
|
||||
domain="core",
|
||||
description="Entity type classifications for search functionality",
|
||||
metadata={'domain': 'core', 'type': 'entity_type'}
|
||||
)
|
||||
|
||||
|
||||
# Auto-register choices when module is imported
|
||||
register_core_choices()
|
||||
198
apps/core/choices/fields.py
Normal file
198
apps/core/choices/fields.py
Normal file
@@ -0,0 +1,198 @@
|
||||
"""
|
||||
Django Model Fields for Rich Choices
|
||||
|
||||
This module provides Django model field implementations for rich choice objects.
|
||||
"""
|
||||
|
||||
from typing import Any, Optional
|
||||
from django.db import models
|
||||
from django.core.exceptions import ValidationError
|
||||
from django.forms import ChoiceField
|
||||
from .base import RichChoice
|
||||
from .registry import registry
|
||||
|
||||
|
||||
class RichChoiceField(models.CharField):
|
||||
"""
|
||||
Django model field for rich choice objects.
|
||||
|
||||
This field stores the choice value as a CharField but provides
|
||||
rich choice functionality through the registry system.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
choice_group: str,
|
||||
domain: str = "core",
|
||||
max_length: int = 50,
|
||||
allow_deprecated: bool = False,
|
||||
**kwargs
|
||||
):
|
||||
"""
|
||||
Initialize the RichChoiceField.
|
||||
|
||||
Args:
|
||||
choice_group: Name of the choice group in the registry
|
||||
domain: Domain namespace for the choice group
|
||||
max_length: Maximum length for the stored value
|
||||
allow_deprecated: Whether to allow deprecated choices
|
||||
**kwargs: Additional arguments passed to CharField
|
||||
"""
|
||||
self.choice_group = choice_group
|
||||
self.domain = domain
|
||||
self.allow_deprecated = allow_deprecated
|
||||
|
||||
# Set choices from registry for Django admin and forms
|
||||
if self.allow_deprecated:
|
||||
choices_list = registry.get_choices(choice_group, domain)
|
||||
else:
|
||||
choices_list = registry.get_active_choices(choice_group, domain)
|
||||
|
||||
choices = [(choice.value, choice.label) for choice in choices_list]
|
||||
|
||||
kwargs['choices'] = choices
|
||||
kwargs['max_length'] = max_length
|
||||
|
||||
super().__init__(**kwargs)
|
||||
|
||||
def validate(self, value: Any, model_instance: Any) -> None:
|
||||
"""Validate the choice value"""
|
||||
super().validate(value, model_instance)
|
||||
|
||||
if value is None or value == '':
|
||||
return
|
||||
|
||||
# Check if choice exists in registry
|
||||
choice = registry.get_choice(self.choice_group, value, self.domain)
|
||||
if choice is None:
|
||||
raise ValidationError(
|
||||
f"'{value}' is not a valid choice for {self.choice_group}"
|
||||
)
|
||||
|
||||
# Check if deprecated choices are allowed
|
||||
if choice.deprecated and not self.allow_deprecated:
|
||||
raise ValidationError(
|
||||
f"'{value}' is deprecated and cannot be used for new entries"
|
||||
)
|
||||
|
||||
def get_rich_choice(self, value: str) -> Optional[RichChoice]:
|
||||
"""Get the RichChoice object for a value"""
|
||||
return registry.get_choice(self.choice_group, value, self.domain)
|
||||
|
||||
def get_choice_display(self, value: str) -> str:
|
||||
"""Get the display label for a choice value"""
|
||||
return registry.get_choice_display(self.choice_group, value, self.domain)
|
||||
|
||||
def contribute_to_class(self, cls: Any, name: str, private_only: bool = False, **kwargs: Any) -> None:
|
||||
"""Add helper methods to the model class (signature compatible with Django Field)"""
|
||||
super().contribute_to_class(cls, name, private_only=private_only, **kwargs)
|
||||
|
||||
# Add get_FOO_rich_choice method
|
||||
def get_rich_choice_method(instance):
|
||||
value = getattr(instance, name)
|
||||
return self.get_rich_choice(value) if value else None
|
||||
|
||||
setattr(cls, f'get_{name}_rich_choice', get_rich_choice_method)
|
||||
|
||||
# Add get_FOO_display method (Django provides this, but we enhance it)
|
||||
def get_display_method(instance):
|
||||
value = getattr(instance, name)
|
||||
return self.get_choice_display(value) if value else ''
|
||||
|
||||
setattr(cls, f'get_{name}_display', get_display_method)
|
||||
|
||||
def deconstruct(self):
|
||||
"""Support for Django migrations"""
|
||||
name, path, args, kwargs = super().deconstruct()
|
||||
kwargs['choice_group'] = self.choice_group
|
||||
kwargs['domain'] = self.domain
|
||||
kwargs['allow_deprecated'] = self.allow_deprecated
|
||||
return name, path, args, kwargs
|
||||
|
||||
|
||||
class RichChoiceFormField(ChoiceField):
|
||||
"""
|
||||
Form field for rich choices with enhanced functionality.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
choice_group: str,
|
||||
domain: str = "core",
|
||||
allow_deprecated: bool = False,
|
||||
show_descriptions: bool = False,
|
||||
**kwargs
|
||||
):
|
||||
"""
|
||||
Initialize the form field.
|
||||
|
||||
Args:
|
||||
choice_group: Name of the choice group in the registry
|
||||
domain: Domain namespace for the choice group
|
||||
allow_deprecated: Whether to allow deprecated choices
|
||||
show_descriptions: Whether to show descriptions in choice labels
|
||||
**kwargs: Additional arguments passed to ChoiceField
|
||||
"""
|
||||
self.choice_group = choice_group
|
||||
self.domain = domain
|
||||
self.allow_deprecated = allow_deprecated
|
||||
self.show_descriptions = show_descriptions
|
||||
|
||||
# Get choices from registry
|
||||
if allow_deprecated:
|
||||
choices_list = registry.get_choices(choice_group, domain)
|
||||
else:
|
||||
choices_list = registry.get_active_choices(choice_group, domain)
|
||||
|
||||
# Format choices for display
|
||||
choices = []
|
||||
for choice in choices_list:
|
||||
label = choice.label
|
||||
if show_descriptions and choice.description:
|
||||
label = f"{choice.label} - {choice.description}"
|
||||
choices.append((choice.value, label))
|
||||
|
||||
kwargs['choices'] = choices
|
||||
super().__init__(**kwargs)
|
||||
|
||||
def validate(self, value: Any) -> None:
|
||||
"""Validate the choice value"""
|
||||
super().validate(value)
|
||||
|
||||
if value is None or value == '':
|
||||
return
|
||||
|
||||
# Check if choice exists in registry
|
||||
choice = registry.get_choice(self.choice_group, value, self.domain)
|
||||
if choice is None:
|
||||
raise ValidationError(
|
||||
f"'{value}' is not a valid choice for {self.choice_group}"
|
||||
)
|
||||
|
||||
# Check if deprecated choices are allowed
|
||||
if choice.deprecated and not self.allow_deprecated:
|
||||
raise ValidationError(
|
||||
f"'{value}' is deprecated and cannot be used"
|
||||
)
|
||||
|
||||
|
||||
def create_rich_choice_field(
|
||||
choice_group: str,
|
||||
domain: str = "core",
|
||||
max_length: int = 50,
|
||||
allow_deprecated: bool = False,
|
||||
**kwargs
|
||||
) -> RichChoiceField:
|
||||
"""
|
||||
Factory function to create a RichChoiceField.
|
||||
|
||||
This is useful for creating fields with consistent settings
|
||||
across multiple models.
|
||||
"""
|
||||
return RichChoiceField(
|
||||
choice_group=choice_group,
|
||||
domain=domain,
|
||||
max_length=max_length,
|
||||
allow_deprecated=allow_deprecated,
|
||||
**kwargs
|
||||
)
|
||||
197
apps/core/choices/registry.py
Normal file
197
apps/core/choices/registry.py
Normal file
@@ -0,0 +1,197 @@
|
||||
"""
|
||||
Choice Registry
|
||||
|
||||
Centralized registry for managing all choice definitions across the application.
|
||||
"""
|
||||
|
||||
from typing import Dict, List, Optional, Any
|
||||
from django.core.exceptions import ImproperlyConfigured
|
||||
from .base import RichChoice, ChoiceGroup
|
||||
|
||||
|
||||
class ChoiceRegistry:
|
||||
"""
|
||||
Centralized registry for managing all choice definitions.
|
||||
|
||||
This provides a single source of truth for all choice objects
|
||||
throughout the application, with support for namespacing by domain.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self._choices: Dict[str, ChoiceGroup] = {}
|
||||
self._domains: Dict[str, List[str]] = {}
|
||||
|
||||
def register(
|
||||
self,
|
||||
name: str,
|
||||
choices: List[RichChoice],
|
||||
domain: str = "core",
|
||||
description: str = "",
|
||||
metadata: Optional[Dict[str, Any]] = None
|
||||
) -> ChoiceGroup:
|
||||
"""
|
||||
Register a group of choices.
|
||||
|
||||
Args:
|
||||
name: Unique name for the choice group
|
||||
choices: List of RichChoice objects
|
||||
domain: Domain namespace (e.g., 'rides', 'parks', 'accounts')
|
||||
description: Description of the choice group
|
||||
metadata: Additional metadata for the group
|
||||
|
||||
Returns:
|
||||
The registered ChoiceGroup
|
||||
|
||||
Raises:
|
||||
ImproperlyConfigured: If name is already registered with different choices
|
||||
"""
|
||||
full_name = f"{domain}.{name}"
|
||||
|
||||
if full_name in self._choices:
|
||||
# Check if the existing registration is identical
|
||||
existing_group = self._choices[full_name]
|
||||
existing_values = [choice.value for choice in existing_group.choices]
|
||||
new_values = [choice.value for choice in choices]
|
||||
|
||||
if existing_values == new_values:
|
||||
# Same choices, return existing group (allow duplicate registration)
|
||||
return existing_group
|
||||
else:
|
||||
# Different choices, this is an error
|
||||
raise ImproperlyConfigured(
|
||||
f"Choice group '{full_name}' is already registered with different choices. "
|
||||
f"Existing: {existing_values}, New: {new_values}"
|
||||
)
|
||||
|
||||
choice_group = ChoiceGroup(
|
||||
name=full_name,
|
||||
choices=choices,
|
||||
description=description,
|
||||
metadata=metadata or {}
|
||||
)
|
||||
|
||||
self._choices[full_name] = choice_group
|
||||
|
||||
# Track domain
|
||||
if domain not in self._domains:
|
||||
self._domains[domain] = []
|
||||
self._domains[domain].append(name)
|
||||
|
||||
return choice_group
|
||||
|
||||
def get(self, name: str, domain: str = "core") -> Optional[ChoiceGroup]:
|
||||
"""Get a choice group by name and domain"""
|
||||
full_name = f"{domain}.{name}"
|
||||
return self._choices.get(full_name)
|
||||
|
||||
def get_choice(self, group_name: str, value: str, domain: str = "core") -> Optional[RichChoice]:
|
||||
"""Get a specific choice by group name, value, and domain"""
|
||||
choice_group = self.get(group_name, domain)
|
||||
if choice_group:
|
||||
return choice_group.get_choice(value)
|
||||
return None
|
||||
|
||||
def get_choices(self, name: str, domain: str = "core") -> List[RichChoice]:
|
||||
"""Get all choices in a group"""
|
||||
choice_group = self.get(name, domain)
|
||||
return choice_group.choices if choice_group else []
|
||||
|
||||
def get_active_choices(self, name: str, domain: str = "core") -> List[RichChoice]:
|
||||
"""Get all non-deprecated choices in a group"""
|
||||
choice_group = self.get(name, domain)
|
||||
return choice_group.get_active_choices() if choice_group else []
|
||||
|
||||
|
||||
def get_domains(self) -> List[str]:
|
||||
"""Get all registered domains"""
|
||||
return list(self._domains.keys())
|
||||
|
||||
def get_domain_choices(self, domain: str) -> Dict[str, ChoiceGroup]:
|
||||
"""Get all choice groups for a specific domain"""
|
||||
if domain not in self._domains:
|
||||
return {}
|
||||
|
||||
return {
|
||||
name: self._choices[f"{domain}.{name}"]
|
||||
for name in self._domains[domain]
|
||||
}
|
||||
|
||||
def list_all(self) -> Dict[str, ChoiceGroup]:
|
||||
"""Get all registered choice groups"""
|
||||
return self._choices.copy()
|
||||
|
||||
def validate_choice(self, group_name: str, value: str, domain: str = "core") -> bool:
|
||||
"""Validate that a choice value exists in a group"""
|
||||
choice = self.get_choice(group_name, value, domain)
|
||||
return choice is not None and not choice.deprecated
|
||||
|
||||
def get_choice_display(self, group_name: str, value: str, domain: str = "core") -> str:
|
||||
"""Get the display label for a choice value"""
|
||||
choice = self.get_choice(group_name, value, domain)
|
||||
if choice:
|
||||
return choice.label
|
||||
else:
|
||||
raise ValueError(f"Choice value '{value}' not found in group '{group_name}' for domain '{domain}'")
|
||||
|
||||
def clear_domain(self, domain: str) -> None:
|
||||
"""Clear all choices for a specific domain (useful for testing)"""
|
||||
if domain in self._domains:
|
||||
for name in self._domains[domain]:
|
||||
full_name = f"{domain}.{name}"
|
||||
if full_name in self._choices:
|
||||
del self._choices[full_name]
|
||||
del self._domains[domain]
|
||||
|
||||
def clear_all(self) -> None:
|
||||
"""Clear all registered choices (useful for testing)"""
|
||||
self._choices.clear()
|
||||
self._domains.clear()
|
||||
|
||||
|
||||
# Global registry instance
|
||||
registry = ChoiceRegistry()
|
||||
|
||||
|
||||
def register_choices(
|
||||
name: str,
|
||||
choices: List[RichChoice],
|
||||
domain: str = "core",
|
||||
description: str = "",
|
||||
metadata: Optional[Dict[str, Any]] = None
|
||||
) -> ChoiceGroup:
|
||||
"""
|
||||
Convenience function to register choices with the global registry.
|
||||
|
||||
Args:
|
||||
name: Unique name for the choice group
|
||||
choices: List of RichChoice objects
|
||||
domain: Domain namespace
|
||||
description: Description of the choice group
|
||||
metadata: Additional metadata for the group
|
||||
|
||||
Returns:
|
||||
The registered ChoiceGroup
|
||||
"""
|
||||
return registry.register(name, choices, domain, description, metadata)
|
||||
|
||||
|
||||
def get_choices(name: str, domain: str = "core") -> List[RichChoice]:
|
||||
"""Get choices from the global registry"""
|
||||
return registry.get_choices(name, domain)
|
||||
|
||||
|
||||
def get_choice(group_name: str, value: str, domain: str = "core") -> Optional[RichChoice]:
|
||||
"""Get a specific choice from the global registry"""
|
||||
return registry.get_choice(group_name, value, domain)
|
||||
|
||||
|
||||
|
||||
|
||||
def validate_choice(group_name: str, value: str, domain: str = "core") -> bool:
|
||||
"""Validate a choice value using the global registry"""
|
||||
return registry.validate_choice(group_name, value, domain)
|
||||
|
||||
|
||||
def get_choice_display(group_name: str, value: str, domain: str = "core") -> str:
|
||||
"""Get choice display label using the global registry"""
|
||||
return registry.get_choice_display(group_name, value, domain)
|
||||
275
apps/core/choices/serializers.py
Normal file
275
apps/core/choices/serializers.py
Normal file
@@ -0,0 +1,275 @@
|
||||
"""
|
||||
DRF Serializers for Rich Choices
|
||||
|
||||
This module provides Django REST Framework serializer implementations
|
||||
for rich choice objects.
|
||||
"""
|
||||
|
||||
from typing import Any, Dict, List
|
||||
from rest_framework import serializers
|
||||
from .base import RichChoice, ChoiceGroup
|
||||
from .registry import registry
|
||||
|
||||
|
||||
class RichChoiceSerializer(serializers.Serializer):
|
||||
"""
|
||||
Serializer for individual RichChoice objects.
|
||||
|
||||
This provides a consistent API representation for choice objects
|
||||
with all their metadata.
|
||||
"""
|
||||
value = serializers.CharField()
|
||||
label = serializers.CharField()
|
||||
description = serializers.CharField()
|
||||
metadata = serializers.DictField()
|
||||
deprecated = serializers.BooleanField()
|
||||
category = serializers.CharField()
|
||||
color = serializers.CharField(allow_null=True)
|
||||
icon = serializers.CharField(allow_null=True)
|
||||
css_class = serializers.CharField(allow_null=True)
|
||||
sort_order = serializers.IntegerField()
|
||||
|
||||
def to_representation(self, instance: RichChoice) -> Dict[str, Any]:
|
||||
"""Convert RichChoice to dictionary representation"""
|
||||
return instance.to_dict()
|
||||
|
||||
|
||||
class RichChoiceOptionSerializer(serializers.Serializer):
|
||||
"""
|
||||
Serializer for choice options in filter endpoints.
|
||||
|
||||
This replaces the legacy FilterOptionSerializer with rich choice support.
|
||||
"""
|
||||
value = serializers.CharField()
|
||||
label = serializers.CharField()
|
||||
description = serializers.CharField(allow_blank=True)
|
||||
count = serializers.IntegerField(required=False, allow_null=True)
|
||||
selected = serializers.BooleanField(default=False)
|
||||
deprecated = serializers.BooleanField(default=False)
|
||||
color = serializers.CharField(allow_null=True, required=False)
|
||||
icon = serializers.CharField(allow_null=True, required=False)
|
||||
css_class = serializers.CharField(allow_null=True, required=False)
|
||||
metadata = serializers.DictField(required=False)
|
||||
|
||||
def to_representation(self, instance) -> Dict[str, Any]:
|
||||
"""Convert choice option to dictionary representation"""
|
||||
if isinstance(instance, RichChoice):
|
||||
# Convert RichChoice to option format
|
||||
return {
|
||||
'value': instance.value,
|
||||
'label': instance.label,
|
||||
'description': instance.description,
|
||||
'count': None,
|
||||
'selected': False,
|
||||
'deprecated': instance.deprecated,
|
||||
'color': instance.color,
|
||||
'icon': instance.icon,
|
||||
'css_class': instance.css_class,
|
||||
'metadata': instance.metadata,
|
||||
}
|
||||
elif isinstance(instance, dict):
|
||||
# Handle dictionary input (for backwards compatibility)
|
||||
return {
|
||||
'value': instance.get('value', ''),
|
||||
'label': instance.get('label', ''),
|
||||
'description': instance.get('description', ''),
|
||||
'count': instance.get('count'),
|
||||
'selected': instance.get('selected', False),
|
||||
'deprecated': instance.get('deprecated', False),
|
||||
'color': instance.get('color'),
|
||||
'icon': instance.get('icon'),
|
||||
'css_class': instance.get('css_class'),
|
||||
'metadata': instance.get('metadata', {}),
|
||||
}
|
||||
else:
|
||||
return super().to_representation(instance)
|
||||
|
||||
|
||||
class ChoiceGroupSerializer(serializers.Serializer):
|
||||
"""
|
||||
Serializer for ChoiceGroup objects.
|
||||
|
||||
This provides API representation for entire choice groups
|
||||
with all their choices and metadata.
|
||||
"""
|
||||
name = serializers.CharField()
|
||||
description = serializers.CharField()
|
||||
metadata = serializers.DictField()
|
||||
choices = RichChoiceSerializer(many=True)
|
||||
|
||||
def to_representation(self, instance: ChoiceGroup) -> Dict[str, Any]:
|
||||
"""Convert ChoiceGroup to dictionary representation"""
|
||||
return instance.to_dict()
|
||||
|
||||
|
||||
class RichChoiceFieldSerializer(serializers.CharField):
|
||||
"""
|
||||
Serializer field for rich choice values.
|
||||
|
||||
This field serializes the choice value but can optionally
|
||||
include rich choice metadata in the response.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
choice_group: str,
|
||||
domain: str = "core",
|
||||
include_metadata: bool = False,
|
||||
**kwargs
|
||||
):
|
||||
"""
|
||||
Initialize the serializer field.
|
||||
|
||||
Args:
|
||||
choice_group: Name of the choice group in the registry
|
||||
domain: Domain namespace for the choice group
|
||||
include_metadata: Whether to include rich choice metadata
|
||||
**kwargs: Additional arguments passed to CharField
|
||||
"""
|
||||
self.choice_group = choice_group
|
||||
self.domain = domain
|
||||
self.include_metadata = include_metadata
|
||||
super().__init__(**kwargs)
|
||||
|
||||
def to_representation(self, value: str) -> Any:
|
||||
"""Convert choice value to representation"""
|
||||
if not value:
|
||||
return value
|
||||
|
||||
if self.include_metadata:
|
||||
# Return rich choice object
|
||||
choice = registry.get_choice(self.choice_group, value, self.domain)
|
||||
if choice:
|
||||
return RichChoiceSerializer(choice).data
|
||||
else:
|
||||
# Fallback for unknown values
|
||||
return {
|
||||
'value': value,
|
||||
'label': value,
|
||||
'description': '',
|
||||
'metadata': {},
|
||||
'deprecated': False,
|
||||
'category': 'other',
|
||||
'color': None,
|
||||
'icon': None,
|
||||
'css_class': None,
|
||||
'sort_order': 0,
|
||||
}
|
||||
else:
|
||||
# Return just the value
|
||||
return value
|
||||
|
||||
def to_internal_value(self, data: Any) -> str:
|
||||
"""Convert input data to choice value"""
|
||||
if isinstance(data, dict) and 'value' in data:
|
||||
# Handle rich choice object input
|
||||
return data['value']
|
||||
else:
|
||||
# Handle string input
|
||||
return super().to_internal_value(data)
|
||||
|
||||
|
||||
def create_choice_options_serializer(
|
||||
choice_group: str,
|
||||
domain: str = "core",
|
||||
include_counts: bool = False,
|
||||
queryset=None,
|
||||
count_field: str = 'id'
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Create choice options for filter endpoints.
|
||||
|
||||
This function generates choice options with optional counts
|
||||
for use in filter metadata endpoints.
|
||||
|
||||
Args:
|
||||
choice_group: Name of the choice group in the registry
|
||||
domain: Domain namespace for the choice group
|
||||
include_counts: Whether to include counts for each option
|
||||
queryset: QuerySet to count against (required if include_counts=True)
|
||||
count_field: Field to filter on for counting (default: 'id')
|
||||
|
||||
Returns:
|
||||
List of choice option dictionaries
|
||||
"""
|
||||
choices = registry.get_active_choices(choice_group, domain)
|
||||
options = []
|
||||
|
||||
for choice in choices:
|
||||
option_data = {
|
||||
'value': choice.value,
|
||||
'label': choice.label,
|
||||
'description': choice.description,
|
||||
'selected': False,
|
||||
'deprecated': choice.deprecated,
|
||||
'color': choice.color,
|
||||
'icon': choice.icon,
|
||||
'css_class': choice.css_class,
|
||||
'metadata': choice.metadata,
|
||||
}
|
||||
|
||||
if include_counts and queryset is not None:
|
||||
# Count items for this choice
|
||||
try:
|
||||
count = queryset.filter(**{count_field: choice.value}).count()
|
||||
option_data['count'] = count
|
||||
except Exception:
|
||||
# If counting fails, set count to None
|
||||
option_data['count'] = None
|
||||
else:
|
||||
option_data['count'] = None
|
||||
|
||||
options.append(option_data)
|
||||
|
||||
# Sort by sort_order, then by label
|
||||
options.sort(key=lambda x: (
|
||||
(lambda c: c.sort_order if (c is not None and hasattr(c, 'sort_order')) else 0)(
|
||||
registry.get_choice(choice_group, x['value'], domain)
|
||||
),
|
||||
x['label']
|
||||
))
|
||||
|
||||
return options
|
||||
|
||||
|
||||
def serialize_choice_value(
|
||||
value: str,
|
||||
choice_group: str,
|
||||
domain: str = "core",
|
||||
include_metadata: bool = False
|
||||
) -> Any:
|
||||
"""
|
||||
Serialize a single choice value.
|
||||
|
||||
Args:
|
||||
value: The choice value to serialize
|
||||
choice_group: Name of the choice group in the registry
|
||||
domain: Domain namespace for the choice group
|
||||
include_metadata: Whether to include rich choice metadata
|
||||
|
||||
Returns:
|
||||
Serialized choice value (string or rich object)
|
||||
"""
|
||||
if not value:
|
||||
return value
|
||||
|
||||
if include_metadata:
|
||||
choice = registry.get_choice(choice_group, value, domain)
|
||||
if choice:
|
||||
return RichChoiceSerializer(choice).data
|
||||
else:
|
||||
# Fallback for unknown values
|
||||
return {
|
||||
'value': value,
|
||||
'label': value,
|
||||
'description': '',
|
||||
'metadata': {},
|
||||
'deprecated': False,
|
||||
'category': 'other',
|
||||
'color': None,
|
||||
'icon': None,
|
||||
'css_class': None,
|
||||
'sort_order': 0,
|
||||
}
|
||||
else:
|
||||
return value
|
||||
318
apps/core/choices/utils.py
Normal file
318
apps/core/choices/utils.py
Normal file
@@ -0,0 +1,318 @@
|
||||
"""
|
||||
Utility Functions for Rich Choices
|
||||
|
||||
This module provides utility functions for working with rich choice objects.
|
||||
"""
|
||||
|
||||
from typing import Any, Dict, List, Optional, Tuple
|
||||
from .base import RichChoice, ChoiceCategory
|
||||
from .registry import registry
|
||||
|
||||
|
||||
def validate_choice_value(
|
||||
value: str,
|
||||
choice_group: str,
|
||||
domain: str = "core",
|
||||
allow_deprecated: bool = False
|
||||
) -> bool:
|
||||
"""
|
||||
Validate that a choice value is valid for a given choice group.
|
||||
|
||||
Args:
|
||||
value: The choice value to validate
|
||||
choice_group: Name of the choice group in the registry
|
||||
domain: Domain namespace for the choice group
|
||||
allow_deprecated: Whether to allow deprecated choices
|
||||
|
||||
Returns:
|
||||
True if valid, False otherwise
|
||||
"""
|
||||
if not value:
|
||||
return True # Allow empty values (handled by field's null/blank settings)
|
||||
|
||||
choice = registry.get_choice(choice_group, value, domain)
|
||||
if choice is None:
|
||||
return False
|
||||
|
||||
if choice.deprecated and not allow_deprecated:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def get_choice_display(
|
||||
value: str,
|
||||
choice_group: str,
|
||||
domain: str = "core"
|
||||
) -> str:
|
||||
"""
|
||||
Get the display label for a choice value.
|
||||
|
||||
Args:
|
||||
value: The choice value
|
||||
choice_group: Name of the choice group in the registry
|
||||
domain: Domain namespace for the choice group
|
||||
|
||||
Returns:
|
||||
Display label for the choice
|
||||
|
||||
Raises:
|
||||
ValueError: If the choice value is not found in the registry
|
||||
"""
|
||||
if not value:
|
||||
return ""
|
||||
|
||||
choice = registry.get_choice(choice_group, value, domain)
|
||||
if choice:
|
||||
return choice.label
|
||||
else:
|
||||
raise ValueError(f"Choice value '{value}' not found in group '{choice_group}' for domain '{domain}'")
|
||||
|
||||
|
||||
|
||||
|
||||
def create_status_choices(
|
||||
statuses: Dict[str, Dict[str, Any]],
|
||||
category: ChoiceCategory = ChoiceCategory.STATUS
|
||||
) -> List[RichChoice]:
|
||||
"""
|
||||
Create status choices with consistent color coding.
|
||||
|
||||
Args:
|
||||
statuses: Dictionary mapping status value to config dict
|
||||
category: Choice category (defaults to STATUS)
|
||||
|
||||
Returns:
|
||||
List of RichChoice objects for statuses
|
||||
"""
|
||||
choices = []
|
||||
|
||||
for value, config in statuses.items():
|
||||
metadata = config.get('metadata', {})
|
||||
|
||||
# Add default status colors if not specified
|
||||
if 'color' not in metadata:
|
||||
if 'operating' in value.lower() or 'active' in value.lower():
|
||||
metadata['color'] = 'green'
|
||||
elif 'closed' in value.lower() or 'inactive' in value.lower():
|
||||
metadata['color'] = 'red'
|
||||
elif 'temp' in value.lower() or 'pending' in value.lower():
|
||||
metadata['color'] = 'yellow'
|
||||
elif 'construction' in value.lower():
|
||||
metadata['color'] = 'blue'
|
||||
else:
|
||||
metadata['color'] = 'gray'
|
||||
|
||||
choice = RichChoice(
|
||||
value=value,
|
||||
label=config['label'],
|
||||
description=config.get('description', ''),
|
||||
metadata=metadata,
|
||||
deprecated=config.get('deprecated', False),
|
||||
category=category
|
||||
)
|
||||
choices.append(choice)
|
||||
|
||||
return choices
|
||||
|
||||
|
||||
def create_type_choices(
|
||||
types: Dict[str, Dict[str, Any]],
|
||||
category: ChoiceCategory = ChoiceCategory.TYPE
|
||||
) -> List[RichChoice]:
|
||||
"""
|
||||
Create type/classification choices.
|
||||
|
||||
Args:
|
||||
types: Dictionary mapping type value to config dict
|
||||
category: Choice category (defaults to TYPE)
|
||||
|
||||
Returns:
|
||||
List of RichChoice objects for types
|
||||
"""
|
||||
choices = []
|
||||
|
||||
for value, config in types.items():
|
||||
choice = RichChoice(
|
||||
value=value,
|
||||
label=config['label'],
|
||||
description=config.get('description', ''),
|
||||
metadata=config.get('metadata', {}),
|
||||
deprecated=config.get('deprecated', False),
|
||||
category=category
|
||||
)
|
||||
choices.append(choice)
|
||||
|
||||
return choices
|
||||
|
||||
|
||||
def merge_choice_metadata(
|
||||
base_metadata: Dict[str, Any],
|
||||
override_metadata: Dict[str, Any]
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Merge choice metadata dictionaries.
|
||||
|
||||
Args:
|
||||
base_metadata: Base metadata dictionary
|
||||
override_metadata: Override metadata dictionary
|
||||
|
||||
Returns:
|
||||
Merged metadata dictionary
|
||||
"""
|
||||
merged = base_metadata.copy()
|
||||
merged.update(override_metadata)
|
||||
return merged
|
||||
|
||||
|
||||
def filter_choices_by_category(
|
||||
choices: List[RichChoice],
|
||||
category: ChoiceCategory
|
||||
) -> List[RichChoice]:
|
||||
"""
|
||||
Filter choices by category.
|
||||
|
||||
Args:
|
||||
choices: List of RichChoice objects
|
||||
category: Category to filter by
|
||||
|
||||
Returns:
|
||||
Filtered list of choices
|
||||
"""
|
||||
return [choice for choice in choices if choice.category == category]
|
||||
|
||||
|
||||
def sort_choices(
|
||||
choices: List[RichChoice],
|
||||
sort_by: str = "sort_order"
|
||||
) -> List[RichChoice]:
|
||||
"""
|
||||
Sort choices by specified criteria.
|
||||
|
||||
Args:
|
||||
choices: List of RichChoice objects
|
||||
sort_by: Sort criteria ("sort_order", "label", "value")
|
||||
|
||||
Returns:
|
||||
Sorted list of choices
|
||||
"""
|
||||
if sort_by == "sort_order":
|
||||
return sorted(choices, key=lambda x: (x.sort_order, x.label))
|
||||
elif sort_by == "label":
|
||||
return sorted(choices, key=lambda x: x.label)
|
||||
elif sort_by == "value":
|
||||
return sorted(choices, key=lambda x: x.value)
|
||||
else:
|
||||
return choices
|
||||
|
||||
|
||||
def get_choice_colors(
|
||||
choice_group: str,
|
||||
domain: str = "core"
|
||||
) -> Dict[str, str]:
|
||||
"""
|
||||
Get a mapping of choice values to their colors.
|
||||
|
||||
Args:
|
||||
choice_group: Name of the choice group in the registry
|
||||
domain: Domain namespace for the choice group
|
||||
|
||||
Returns:
|
||||
Dictionary mapping choice values to colors
|
||||
"""
|
||||
choices = registry.get_choices(choice_group, domain)
|
||||
return {
|
||||
choice.value: choice.color
|
||||
for choice in choices
|
||||
if choice.color
|
||||
}
|
||||
|
||||
|
||||
def validate_choice_group_data(
|
||||
name: str,
|
||||
choices: List[RichChoice],
|
||||
domain: str = "core"
|
||||
) -> List[str]:
|
||||
"""
|
||||
Validate choice group data and return list of errors.
|
||||
|
||||
Args:
|
||||
name: Choice group name
|
||||
choices: List of RichChoice objects
|
||||
domain: Domain namespace
|
||||
|
||||
Returns:
|
||||
List of validation error messages
|
||||
"""
|
||||
errors = []
|
||||
|
||||
if not name:
|
||||
errors.append("Choice group name cannot be empty")
|
||||
|
||||
if not choices:
|
||||
errors.append("Choice group must contain at least one choice")
|
||||
return errors
|
||||
|
||||
# Check for duplicate values
|
||||
values = [choice.value for choice in choices]
|
||||
if len(values) != len(set(values)):
|
||||
duplicates = [v for v in values if values.count(v) > 1]
|
||||
errors.append(f"Duplicate choice values found: {', '.join(set(duplicates))}")
|
||||
|
||||
# Validate individual choices
|
||||
for i, choice in enumerate(choices):
|
||||
try:
|
||||
# This will trigger __post_init__ validation
|
||||
RichChoice(
|
||||
value=choice.value,
|
||||
label=choice.label,
|
||||
description=choice.description,
|
||||
metadata=choice.metadata,
|
||||
deprecated=choice.deprecated,
|
||||
category=choice.category
|
||||
)
|
||||
except ValueError as e:
|
||||
errors.append(f"Choice {i}: {str(e)}")
|
||||
|
||||
return errors
|
||||
|
||||
|
||||
def create_choice_from_config(config: Dict[str, Any]) -> RichChoice:
|
||||
"""
|
||||
Create a RichChoice from a configuration dictionary.
|
||||
|
||||
Args:
|
||||
config: Configuration dictionary with choice data
|
||||
|
||||
Returns:
|
||||
RichChoice object
|
||||
"""
|
||||
return RichChoice(
|
||||
value=config['value'],
|
||||
label=config['label'],
|
||||
description=config.get('description', ''),
|
||||
metadata=config.get('metadata', {}),
|
||||
deprecated=config.get('deprecated', False),
|
||||
category=ChoiceCategory(config.get('category', 'other'))
|
||||
)
|
||||
|
||||
|
||||
def export_choices_to_dict(
|
||||
choice_group: str,
|
||||
domain: str = "core"
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Export a choice group to a dictionary format.
|
||||
|
||||
Args:
|
||||
choice_group: Name of the choice group in the registry
|
||||
domain: Domain namespace for the choice group
|
||||
|
||||
Returns:
|
||||
Dictionary representation of the choice group
|
||||
"""
|
||||
group = registry.get(choice_group, domain)
|
||||
if not group:
|
||||
return {}
|
||||
|
||||
return group.to_dict()
|
||||
1
apps/core/decorators/__init__.py
Normal file
1
apps/core/decorators/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
# Decorators module
|
||||
421
apps/core/decorators/cache_decorators.py
Normal file
421
apps/core/decorators/cache_decorators.py
Normal file
@@ -0,0 +1,421 @@
|
||||
"""
|
||||
Advanced caching decorators for API views and functions.
|
||||
"""
|
||||
|
||||
import hashlib
|
||||
import json
|
||||
import time
|
||||
from functools import wraps
|
||||
from typing import Optional, List, Callable, Any, Dict
|
||||
from django.http import HttpRequest, HttpResponseBase
|
||||
from django.utils.decorators import method_decorator
|
||||
from django.views.decorators.vary import vary_on_headers
|
||||
from django.views import View
|
||||
from apps.core.services.enhanced_cache_service import EnhancedCacheService
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def cache_api_response(
|
||||
timeout: int = 1800,
|
||||
vary_on: Optional[List[str]] = None,
|
||||
key_prefix: str = "api",
|
||||
cache_backend: str = "api",
|
||||
) -> Callable[[Callable[..., Any]], Callable[..., Any]]:
|
||||
"""
|
||||
Advanced decorator for caching API responses with flexible configuration
|
||||
|
||||
Args:
|
||||
timeout: Cache timeout in seconds
|
||||
vary_on: List of request attributes to vary cache on
|
||||
key_prefix: Prefix for cache keys
|
||||
cache_backend: Cache backend to use
|
||||
"""
|
||||
|
||||
def decorator(view_func):
|
||||
@wraps(view_func)
|
||||
def wrapper(self, request, *args, **kwargs):
|
||||
# Only cache GET requests
|
||||
if request.method != "GET":
|
||||
return view_func(self, request, *args, **kwargs)
|
||||
|
||||
# Generate cache key based on view, user, and parameters
|
||||
cache_key_parts = [
|
||||
key_prefix,
|
||||
view_func.__name__,
|
||||
(
|
||||
str(getattr(request.user, "id", "anonymous"))
|
||||
if request.user.is_authenticated
|
||||
else "anonymous"
|
||||
),
|
||||
str(hash(frozenset(request.GET.items()))),
|
||||
]
|
||||
|
||||
# Add URL parameters to cache key
|
||||
if args:
|
||||
cache_key_parts.append(str(hash(args)))
|
||||
if kwargs:
|
||||
cache_key_parts.append(str(hash(frozenset(kwargs.items()))))
|
||||
|
||||
# Add custom vary_on fields
|
||||
if vary_on:
|
||||
for field in vary_on:
|
||||
value = getattr(request, field, "")
|
||||
cache_key_parts.append(str(value))
|
||||
|
||||
cache_key = ":".join(cache_key_parts)
|
||||
|
||||
# Try to get from cache
|
||||
cache_service = EnhancedCacheService()
|
||||
cached_response = getattr(cache_service, cache_backend + "_cache").get(
|
||||
cache_key
|
||||
)
|
||||
|
||||
if cached_response:
|
||||
logger.debug(
|
||||
f"Cache hit for API view {view_func.__name__}",
|
||||
extra={
|
||||
"cache_key": cache_key,
|
||||
"view": view_func.__name__,
|
||||
"cache_hit": True,
|
||||
},
|
||||
)
|
||||
return cached_response
|
||||
|
||||
# Execute view and cache result
|
||||
start_time = time.time()
|
||||
response = view_func(self, request, *args, **kwargs)
|
||||
execution_time = time.time() - start_time
|
||||
|
||||
# Only cache successful responses
|
||||
if hasattr(response, "status_code") and response.status_code == 200:
|
||||
getattr(cache_service, cache_backend + "_cache").set(
|
||||
cache_key, response, timeout
|
||||
)
|
||||
logger.debug(
|
||||
f"Cached API response for view {view_func.__name__}",
|
||||
extra={
|
||||
"cache_key": cache_key,
|
||||
"view": view_func.__name__,
|
||||
"execution_time": execution_time,
|
||||
"cache_timeout": timeout,
|
||||
"cache_miss": True,
|
||||
},
|
||||
)
|
||||
else:
|
||||
logger.debug(
|
||||
f"Not caching response for view {view_func.__name__} (status: {
|
||||
getattr(response, 'status_code', 'unknown')
|
||||
})"
|
||||
)
|
||||
|
||||
return response
|
||||
|
||||
return wrapper
|
||||
|
||||
return decorator
|
||||
|
||||
|
||||
def cache_queryset_result(
|
||||
cache_key_template: str, timeout: int = 3600, cache_backend: str = "default"
|
||||
) -> Callable[[Callable[..., Any]], Callable[..., Any]]:
|
||||
"""
|
||||
Decorator for caching expensive queryset operations
|
||||
|
||||
Args:
|
||||
cache_key_template: Template for cache key (can use format placeholders)
|
||||
timeout: Cache timeout in seconds
|
||||
cache_backend: Cache backend to use
|
||||
"""
|
||||
|
||||
def decorator(func):
|
||||
@wraps(func)
|
||||
def wrapper(*args, **kwargs):
|
||||
# Generate cache key from template and arguments
|
||||
try:
|
||||
cache_key = cache_key_template.format(*args, **kwargs)
|
||||
except (KeyError, IndexError):
|
||||
# Fallback to simpler key generation
|
||||
cache_key = f"{cache_key_template}:{hash(str(args) + str(kwargs))}"
|
||||
|
||||
cache_service = EnhancedCacheService()
|
||||
cached_result = getattr(cache_service, cache_backend + "_cache").get(
|
||||
cache_key
|
||||
)
|
||||
|
||||
if cached_result is not None:
|
||||
logger.debug(f"Cache hit for queryset operation: {func.__name__}")
|
||||
return cached_result
|
||||
|
||||
# Execute function and cache result
|
||||
start_time = time.time()
|
||||
result = func(*args, **kwargs)
|
||||
execution_time = time.time() - start_time
|
||||
|
||||
getattr(cache_service, cache_backend + "_cache").set(
|
||||
cache_key, result, timeout
|
||||
)
|
||||
logger.debug(
|
||||
f"Cached queryset result for {func.__name__}",
|
||||
extra={
|
||||
"cache_key": cache_key,
|
||||
"function": func.__name__,
|
||||
"execution_time": execution_time,
|
||||
"cache_timeout": timeout,
|
||||
},
|
||||
)
|
||||
|
||||
return result
|
||||
|
||||
return wrapper
|
||||
|
||||
return decorator
|
||||
|
||||
|
||||
def invalidate_cache_on_save(
|
||||
model_name: str, cache_patterns: Optional[List[str]] = None
|
||||
) -> Callable[[Callable[..., Any]], Callable[..., Any]]:
|
||||
"""
|
||||
Decorator to invalidate cache when model instances are saved
|
||||
|
||||
Args:
|
||||
model_name: Name of the model
|
||||
cache_patterns: List of cache key patterns to invalidate
|
||||
"""
|
||||
|
||||
def decorator(func):
|
||||
@wraps(func)
|
||||
def wrapper(self, *args, **kwargs):
|
||||
result = func(self, *args, **kwargs)
|
||||
|
||||
# Invalidate related cache entries
|
||||
cache_service = EnhancedCacheService()
|
||||
|
||||
# Standard model cache invalidation
|
||||
instance_id = getattr(self, "id", None)
|
||||
cache_service.invalidate_model_cache(model_name, instance_id)
|
||||
|
||||
# Custom pattern invalidation
|
||||
if cache_patterns:
|
||||
for pattern in cache_patterns:
|
||||
if instance_id:
|
||||
pattern = pattern.format(model=model_name, id=instance_id)
|
||||
cache_service.invalidate_pattern(pattern)
|
||||
|
||||
logger.info(
|
||||
f"Invalidated cache for {model_name} after save",
|
||||
extra={
|
||||
"model": model_name,
|
||||
"instance_id": instance_id,
|
||||
"patterns": cache_patterns,
|
||||
},
|
||||
)
|
||||
|
||||
return result
|
||||
|
||||
return wrapper
|
||||
|
||||
return decorator
|
||||
|
||||
|
||||
class CachedAPIViewMixin(View):
|
||||
"""Mixin to add caching capabilities to API views"""
|
||||
|
||||
cache_timeout = 1800 # 30 minutes default
|
||||
cache_vary_on = ["version"]
|
||||
cache_key_prefix = "api"
|
||||
cache_backend = "api"
|
||||
|
||||
@method_decorator(vary_on_headers("User-Agent", "Accept-Language"))
|
||||
def dispatch(
|
||||
self, request: HttpRequest, *args: Any, **kwargs: Any
|
||||
) -> HttpResponseBase:
|
||||
"""Add caching to the dispatch method"""
|
||||
if request.method == "GET" and getattr(self, "enable_caching", True):
|
||||
return self._cached_dispatch(request, *args, **kwargs)
|
||||
return super().dispatch(request, *args, **kwargs)
|
||||
|
||||
def _cached_dispatch(
|
||||
self, request: HttpRequest, *args: Any, **kwargs: Any
|
||||
) -> HttpResponseBase:
|
||||
"""Handle cached dispatch for GET requests"""
|
||||
cache_key = self._generate_cache_key(request, *args, **kwargs)
|
||||
|
||||
cache_service = EnhancedCacheService()
|
||||
cached_response = getattr(cache_service, self.cache_backend + "_cache").get(
|
||||
cache_key
|
||||
)
|
||||
|
||||
if cached_response:
|
||||
logger.debug(f"Cache hit for view {self.__class__.__name__}")
|
||||
return cached_response
|
||||
|
||||
# Execute view
|
||||
response = super().dispatch(request, *args, **kwargs)
|
||||
|
||||
# Cache successful responses
|
||||
if hasattr(response, "status_code") and response.status_code == 200:
|
||||
getattr(cache_service, self.cache_backend + "_cache").set(
|
||||
cache_key, response, self.cache_timeout
|
||||
)
|
||||
logger.debug(f"Cached response for view {self.__class__.__name__}")
|
||||
|
||||
return response
|
||||
|
||||
def _generate_cache_key(
|
||||
self, request: HttpRequest, *args: Any, **kwargs: Any
|
||||
) -> str:
|
||||
"""Generate cache key for the request"""
|
||||
key_parts = [
|
||||
self.cache_key_prefix,
|
||||
self.__class__.__name__,
|
||||
request.method,
|
||||
(
|
||||
str(getattr(request.user, "id", "anonymous"))
|
||||
if request.user.is_authenticated
|
||||
else "anonymous"
|
||||
),
|
||||
str(hash(frozenset(request.GET.items()))),
|
||||
]
|
||||
|
||||
if args:
|
||||
key_parts.append(str(hash(args)))
|
||||
if kwargs:
|
||||
key_parts.append(str(hash(frozenset(kwargs.items()))))
|
||||
|
||||
# Add vary_on fields
|
||||
for field in self.cache_vary_on:
|
||||
value = getattr(request, field, "")
|
||||
key_parts.append(str(value))
|
||||
|
||||
return ":".join(key_parts)
|
||||
|
||||
|
||||
def smart_cache(
|
||||
timeout: int = 3600,
|
||||
key_func: Optional[Callable[..., str]] = None,
|
||||
invalidate_on: Optional[List[str]] = None,
|
||||
cache_backend: str = "default",
|
||||
) -> Callable[[Callable[..., Any]], Callable[..., Any]]:
|
||||
"""
|
||||
Smart caching decorator that adapts to function arguments
|
||||
|
||||
Args:
|
||||
timeout: Cache timeout in seconds
|
||||
key_func: Custom function to generate cache key
|
||||
invalidate_on: List of signals to invalidate cache on
|
||||
cache_backend: Cache backend to use
|
||||
"""
|
||||
|
||||
def decorator(func):
|
||||
@wraps(func)
|
||||
def wrapper(*args, **kwargs):
|
||||
# Generate cache key
|
||||
if key_func:
|
||||
cache_key = key_func(*args, **kwargs)
|
||||
else:
|
||||
# Default key generation
|
||||
key_data = {
|
||||
"func": f"{func.__module__}.{func.__name__}",
|
||||
"args": str(args),
|
||||
"kwargs": json.dumps(kwargs, sort_keys=True, default=str),
|
||||
}
|
||||
key_string = json.dumps(key_data, sort_keys=True)
|
||||
cache_key = (
|
||||
f"smart_cache:{hashlib.md5(key_string.encode()).hexdigest()}"
|
||||
)
|
||||
|
||||
# Try to get from cache
|
||||
cache_service = EnhancedCacheService()
|
||||
cached_result = getattr(cache_service, cache_backend + "_cache").get(
|
||||
cache_key
|
||||
)
|
||||
|
||||
if cached_result is not None:
|
||||
logger.debug(f"Smart cache hit for {func.__name__}")
|
||||
return cached_result
|
||||
|
||||
# Execute function
|
||||
start_time = time.time()
|
||||
result = func(*args, **kwargs)
|
||||
execution_time = time.time() - start_time
|
||||
|
||||
# Cache result
|
||||
getattr(cache_service, cache_backend + "_cache").set(
|
||||
cache_key, result, timeout
|
||||
)
|
||||
|
||||
logger.debug(
|
||||
f"Smart cached result for {func.__name__}",
|
||||
extra={
|
||||
"cache_key": cache_key,
|
||||
"execution_time": execution_time,
|
||||
"function": func.__name__,
|
||||
},
|
||||
)
|
||||
|
||||
return result
|
||||
|
||||
# Add cache invalidation if specified
|
||||
if invalidate_on:
|
||||
setattr(wrapper, "_cache_invalidate_on", invalidate_on)
|
||||
setattr(wrapper, "_cache_backend", cache_backend)
|
||||
|
||||
return wrapper
|
||||
|
||||
return decorator
|
||||
|
||||
|
||||
def conditional_cache(
|
||||
condition_func: Callable[..., bool], **cache_kwargs: Any
|
||||
) -> Callable[[Callable[..., Any]], Callable[..., Any]]:
|
||||
"""
|
||||
Cache decorator that only caches when condition is met
|
||||
|
||||
Args:
|
||||
condition_func: Function that returns True if caching should be applied
|
||||
**cache_kwargs: Arguments passed to smart_cache
|
||||
"""
|
||||
|
||||
def decorator(func):
|
||||
cached_func = smart_cache(**cache_kwargs)(func)
|
||||
|
||||
@wraps(func)
|
||||
def wrapper(*args, **kwargs):
|
||||
if condition_func(*args, **kwargs):
|
||||
return cached_func(*args, **kwargs)
|
||||
else:
|
||||
return func(*args, **kwargs)
|
||||
|
||||
return wrapper
|
||||
|
||||
return decorator
|
||||
|
||||
|
||||
# Utility functions for cache key generation
|
||||
def generate_user_cache_key(user: Any, suffix: str = "") -> str:
|
||||
"""Generate cache key based on user"""
|
||||
user_id = user.id if user.is_authenticated else "anonymous"
|
||||
return f"user:{user_id}:{suffix}" if suffix else f"user:{user_id}"
|
||||
|
||||
|
||||
def generate_model_cache_key(model_instance: Any, suffix: str = "") -> str:
|
||||
"""Generate cache key based on model instance"""
|
||||
model_name = model_instance._meta.model_name
|
||||
instance_id = model_instance.id
|
||||
return (
|
||||
f"{model_name}:{instance_id}:{suffix}"
|
||||
if suffix
|
||||
else f"{model_name}:{instance_id}"
|
||||
)
|
||||
|
||||
|
||||
def generate_queryset_cache_key(
|
||||
queryset: Any, params: Optional[Dict[str, Any]] = None
|
||||
) -> str:
|
||||
"""Generate cache key for queryset with parameters"""
|
||||
model_name = queryset.model._meta.model_name
|
||||
params_str = json.dumps(params or {}, sort_keys=True, default=str)
|
||||
params_hash = hashlib.md5(params_str.encode()).hexdigest()
|
||||
return f"queryset:{model_name}:{params_hash}"
|
||||
224
apps/core/exceptions.py
Normal file
224
apps/core/exceptions.py
Normal file
@@ -0,0 +1,224 @@
|
||||
"""
|
||||
Custom exception classes for ThrillWiki.
|
||||
Provides domain-specific exceptions with proper error codes and messages.
|
||||
"""
|
||||
|
||||
from typing import Optional, Dict, Any
|
||||
|
||||
|
||||
class ThrillWikiException(Exception):
|
||||
"""Base exception for all ThrillWiki-specific errors."""
|
||||
|
||||
default_message = "An error occurred"
|
||||
error_code = "THRILLWIKI_ERROR"
|
||||
status_code = 500
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
message: Optional[str] = None,
|
||||
error_code: Optional[str] = None,
|
||||
details: Optional[Dict[str, Any]] = None,
|
||||
):
|
||||
self.message = message or self.default_message
|
||||
self.error_code = error_code or self.error_code
|
||||
self.details = details or {}
|
||||
super().__init__(self.message)
|
||||
|
||||
def to_dict(self) -> Dict[str, Any]:
|
||||
"""Convert exception to dictionary for API responses."""
|
||||
return {
|
||||
"error_code": self.error_code,
|
||||
"message": self.message,
|
||||
"details": self.details,
|
||||
}
|
||||
|
||||
|
||||
class ValidationException(ThrillWikiException):
|
||||
"""Raised when data validation fails."""
|
||||
|
||||
default_message = "Validation failed"
|
||||
error_code = "VALIDATION_ERROR"
|
||||
status_code = 400
|
||||
|
||||
|
||||
class NotFoundError(ThrillWikiException):
|
||||
"""Raised when a requested resource is not found."""
|
||||
|
||||
default_message = "Resource not found"
|
||||
error_code = "NOT_FOUND"
|
||||
status_code = 404
|
||||
|
||||
|
||||
class PermissionDeniedError(ThrillWikiException):
|
||||
"""Raised when user lacks permission for an operation."""
|
||||
|
||||
default_message = "Permission denied"
|
||||
error_code = "PERMISSION_DENIED"
|
||||
status_code = 403
|
||||
|
||||
|
||||
class BusinessLogicError(ThrillWikiException):
|
||||
"""Raised when business logic constraints are violated."""
|
||||
|
||||
default_message = "Business logic violation"
|
||||
error_code = "BUSINESS_LOGIC_ERROR"
|
||||
status_code = 400
|
||||
|
||||
|
||||
class ExternalServiceError(ThrillWikiException):
|
||||
"""Raised when external service calls fail."""
|
||||
|
||||
default_message = "External service error"
|
||||
error_code = "EXTERNAL_SERVICE_ERROR"
|
||||
status_code = 502
|
||||
|
||||
|
||||
# Domain-specific exceptions
|
||||
|
||||
|
||||
class ParkError(ThrillWikiException):
|
||||
"""Base exception for park-related errors."""
|
||||
|
||||
error_code = "PARK_ERROR"
|
||||
|
||||
|
||||
class ParkNotFoundError(NotFoundError):
|
||||
"""Raised when a park is not found."""
|
||||
|
||||
default_message = "Park not found"
|
||||
error_code = "PARK_NOT_FOUND"
|
||||
|
||||
def __init__(self, park_slug: Optional[str] = None, **kwargs):
|
||||
if park_slug:
|
||||
kwargs["details"] = {"park_slug": park_slug}
|
||||
kwargs["message"] = f"Park with slug '{park_slug}' not found"
|
||||
super().__init__(**kwargs)
|
||||
|
||||
|
||||
class ParkOperationError(BusinessLogicError):
|
||||
"""Raised when park operation constraints are violated."""
|
||||
|
||||
default_message = "Invalid park operation"
|
||||
error_code = "PARK_OPERATION_ERROR"
|
||||
|
||||
|
||||
class RideError(ThrillWikiException):
|
||||
"""Base exception for ride-related errors."""
|
||||
|
||||
error_code = "RIDE_ERROR"
|
||||
|
||||
|
||||
class RideNotFoundError(NotFoundError):
|
||||
"""Raised when a ride is not found."""
|
||||
|
||||
default_message = "Ride not found"
|
||||
error_code = "RIDE_NOT_FOUND"
|
||||
|
||||
def __init__(self, ride_slug: Optional[str] = None, **kwargs):
|
||||
if ride_slug:
|
||||
kwargs["details"] = {"ride_slug": ride_slug}
|
||||
kwargs["message"] = f"Ride with slug '{ride_slug}' not found"
|
||||
super().__init__(**kwargs)
|
||||
|
||||
|
||||
class RideOperationError(BusinessLogicError):
|
||||
"""Raised when ride operation constraints are violated."""
|
||||
|
||||
default_message = "Invalid ride operation"
|
||||
error_code = "RIDE_OPERATION_ERROR"
|
||||
|
||||
|
||||
class LocationError(ThrillWikiException):
|
||||
"""Base exception for location-related errors."""
|
||||
|
||||
error_code = "LOCATION_ERROR"
|
||||
|
||||
|
||||
class InvalidCoordinatesError(ValidationException):
|
||||
"""Raised when geographic coordinates are invalid."""
|
||||
|
||||
default_message = "Invalid geographic coordinates"
|
||||
error_code = "INVALID_COORDINATES"
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
latitude: Optional[float] = None,
|
||||
longitude: Optional[float] = None,
|
||||
**kwargs,
|
||||
):
|
||||
if latitude is not None or longitude is not None:
|
||||
kwargs["details"] = {"latitude": latitude, "longitude": longitude}
|
||||
super().__init__(**kwargs)
|
||||
|
||||
|
||||
class GeolocationError(ExternalServiceError):
|
||||
"""Raised when geolocation services fail."""
|
||||
|
||||
default_message = "Geolocation service unavailable"
|
||||
error_code = "GEOLOCATION_ERROR"
|
||||
|
||||
|
||||
class ReviewError(ThrillWikiException):
|
||||
"""Base exception for review-related errors."""
|
||||
|
||||
error_code = "REVIEW_ERROR"
|
||||
|
||||
|
||||
class ReviewModerationError(BusinessLogicError):
|
||||
"""Raised when review moderation constraints are violated."""
|
||||
|
||||
default_message = "Review moderation error"
|
||||
error_code = "REVIEW_MODERATION_ERROR"
|
||||
|
||||
|
||||
class DuplicateReviewError(BusinessLogicError):
|
||||
"""Raised when user tries to create duplicate reviews."""
|
||||
|
||||
default_message = "User has already reviewed this item"
|
||||
error_code = "DUPLICATE_REVIEW"
|
||||
|
||||
|
||||
class AccountError(ThrillWikiException):
|
||||
"""Base exception for account-related errors."""
|
||||
|
||||
error_code = "ACCOUNT_ERROR"
|
||||
|
||||
|
||||
class InsufficientPermissionsError(PermissionDeniedError):
|
||||
"""Raised when user lacks required permissions."""
|
||||
|
||||
default_message = "Insufficient permissions"
|
||||
error_code = "INSUFFICIENT_PERMISSIONS"
|
||||
|
||||
def __init__(self, required_permission: Optional[str] = None, **kwargs):
|
||||
if required_permission:
|
||||
kwargs["details"] = {"required_permission": required_permission}
|
||||
kwargs["message"] = f"Permission '{required_permission}' required"
|
||||
super().__init__(**kwargs)
|
||||
|
||||
|
||||
class EmailError(ExternalServiceError):
|
||||
"""Raised when email operations fail."""
|
||||
|
||||
default_message = "Email service error"
|
||||
error_code = "EMAIL_ERROR"
|
||||
|
||||
|
||||
class CacheError(ThrillWikiException):
|
||||
"""Raised when cache operations fail."""
|
||||
|
||||
default_message = "Cache operation failed"
|
||||
error_code = "CACHE_ERROR"
|
||||
status_code = 500
|
||||
|
||||
|
||||
class RoadTripError(ExternalServiceError):
|
||||
"""Raised when road trip planning fails."""
|
||||
|
||||
default_message = "Road trip planning error"
|
||||
error_code = "ROADTRIP_ERROR"
|
||||
|
||||
def __init__(self, service_name: Optional[str] = None, **kwargs):
|
||||
if service_name:
|
||||
kwargs["details"] = {"service": service_name}
|
||||
super().__init__(**kwargs)
|
||||
43
apps/core/forms.py
Normal file
43
apps/core/forms.py
Normal file
@@ -0,0 +1,43 @@
|
||||
"""Core forms and form components."""
|
||||
|
||||
from django.conf import settings
|
||||
from django.core.exceptions import PermissionDenied
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
from autocomplete import Autocomplete
|
||||
|
||||
|
||||
class BaseAutocomplete(Autocomplete):
|
||||
"""Base autocomplete class for consistent autocomplete behavior across the project.
|
||||
|
||||
This class extends django-htmx-autocomplete's base Autocomplete class to provide:
|
||||
- Project-wide defaults for autocomplete behavior
|
||||
- Translation strings
|
||||
- Authentication enforcement
|
||||
- Sensible search configuration
|
||||
"""
|
||||
|
||||
# Search configuration
|
||||
minimum_search_length = 2 # More responsive than default 3
|
||||
max_results = 10 # Reasonable limit for performance
|
||||
|
||||
# UI text configuration using gettext for i18n
|
||||
no_result_text = _("No matches found")
|
||||
narrow_search_text = _(
|
||||
"Showing %(page_size)s of %(total)s matches. Please refine your search."
|
||||
)
|
||||
type_at_least_n_characters = _("Type at least %(n)s characters...")
|
||||
|
||||
# Project-wide component settings
|
||||
placeholder = _("Search...")
|
||||
|
||||
@classmethod
|
||||
def auth_check(cls, request):
|
||||
"""Enforce authentication by default.
|
||||
|
||||
This can be overridden in subclasses if public access is needed.
|
||||
Configure AUTOCOMPLETE_BLOCK_UNAUTHENTICATED in settings to disable.
|
||||
"""
|
||||
block_unauth = getattr(settings, "AUTOCOMPLETE_BLOCK_UNAUTHENTICATED", True)
|
||||
if block_unauth and not request.user.is_authenticated:
|
||||
raise PermissionDenied(_("Authentication required"))
|
||||
0
apps/core/forms/__init__.py
Normal file
0
apps/core/forms/__init__.py
Normal file
172
apps/core/forms/search.py
Normal file
172
apps/core/forms/search.py
Normal file
@@ -0,0 +1,172 @@
|
||||
from django import forms
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
|
||||
class LocationSearchForm(forms.Form):
|
||||
"""
|
||||
A comprehensive search form that includes text search, location-based
|
||||
search, and content type filtering for a unified search experience.
|
||||
"""
|
||||
|
||||
# Text search query
|
||||
q = forms.CharField(
|
||||
required=False,
|
||||
label=_("Search Query"),
|
||||
widget=forms.TextInput(
|
||||
attrs={
|
||||
"placeholder": _("Search parks, rides, companies..."),
|
||||
"class": (
|
||||
"w-full px-3 py-2 border border-gray-300 rounded-md shadow-sm "
|
||||
"focus:ring-blue-500 focus:border-blue-500 dark:bg-gray-700 "
|
||||
"dark:border-gray-600 dark:text-white"
|
||||
),
|
||||
}
|
||||
),
|
||||
)
|
||||
|
||||
# Location-based search
|
||||
location = forms.CharField(
|
||||
required=False,
|
||||
label=_("Near Location"),
|
||||
widget=forms.TextInput(
|
||||
attrs={
|
||||
"placeholder": _("City, address, or coordinates..."),
|
||||
"id": "location-input",
|
||||
"class": (
|
||||
"w-full px-3 py-2 border border-gray-300 rounded-md shadow-sm "
|
||||
"focus:ring-blue-500 focus:border-blue-500 dark:bg-gray-700 "
|
||||
"dark:border-gray-600 dark:text-white"
|
||||
),
|
||||
}
|
||||
),
|
||||
)
|
||||
|
||||
# Hidden fields for coordinates
|
||||
lat = forms.FloatField(
|
||||
required=False, widget=forms.HiddenInput(attrs={"id": "lat-input"})
|
||||
)
|
||||
lng = forms.FloatField(
|
||||
required=False, widget=forms.HiddenInput(attrs={"id": "lng-input"})
|
||||
)
|
||||
|
||||
# Search radius
|
||||
radius_km = forms.ChoiceField(
|
||||
required=False,
|
||||
label=_("Search Radius"),
|
||||
choices=[
|
||||
("", _("Any distance")),
|
||||
("5", _("5 km")),
|
||||
("10", _("10 km")),
|
||||
("25", _("25 km")),
|
||||
("50", _("50 km")),
|
||||
("100", _("100 km")),
|
||||
("200", _("200 km")),
|
||||
],
|
||||
widget=forms.Select(
|
||||
attrs={
|
||||
"class": (
|
||||
"w-full px-3 py-2 border border-gray-300 rounded-md shadow-sm "
|
||||
"focus:ring-blue-500 focus:border-blue-500 dark:bg-gray-700 "
|
||||
"dark:border-gray-600 dark:text-white"
|
||||
)
|
||||
}
|
||||
),
|
||||
)
|
||||
|
||||
# Content type filters
|
||||
search_parks = forms.BooleanField(
|
||||
required=False,
|
||||
initial=True,
|
||||
label=_("Search Parks"),
|
||||
widget=forms.CheckboxInput(
|
||||
attrs={
|
||||
"class": (
|
||||
"rounded border-gray-300 text-blue-600 focus:ring-blue-500 "
|
||||
"dark:border-gray-600 dark:bg-gray-700"
|
||||
)
|
||||
}
|
||||
),
|
||||
)
|
||||
search_rides = forms.BooleanField(
|
||||
required=False,
|
||||
label=_("Search Rides"),
|
||||
widget=forms.CheckboxInput(
|
||||
attrs={
|
||||
"class": (
|
||||
"rounded border-gray-300 text-blue-600 focus:ring-blue-500 "
|
||||
"dark:border-gray-600 dark:bg-gray-700"
|
||||
)
|
||||
}
|
||||
),
|
||||
)
|
||||
search_companies = forms.BooleanField(
|
||||
required=False,
|
||||
label=_("Search Companies"),
|
||||
widget=forms.CheckboxInput(
|
||||
attrs={
|
||||
"class": (
|
||||
"rounded border-gray-300 text-blue-600 focus:ring-blue-500 "
|
||||
"dark:border-gray-600 dark:bg-gray-700"
|
||||
)
|
||||
}
|
||||
),
|
||||
)
|
||||
|
||||
# Geographic filters
|
||||
country = forms.CharField(
|
||||
required=False,
|
||||
widget=forms.TextInput(
|
||||
attrs={
|
||||
"placeholder": _("Country"),
|
||||
"class": (
|
||||
"w-full px-3 py-2 text-sm border border-gray-300 rounded-md "
|
||||
"shadow-sm focus:ring-blue-500 focus:border-blue-500 "
|
||||
"dark:bg-gray-700 dark:border-gray-600 dark:text-white"
|
||||
),
|
||||
}
|
||||
),
|
||||
)
|
||||
state = forms.CharField(
|
||||
required=False,
|
||||
widget=forms.TextInput(
|
||||
attrs={
|
||||
"placeholder": _("State/Region"),
|
||||
"class": (
|
||||
"w-full px-3 py-2 text-sm border border-gray-300 rounded-md "
|
||||
"shadow-sm focus:ring-blue-500 focus:border-blue-500 "
|
||||
"dark:bg-gray-700 dark:border-gray-600 dark:text-white"
|
||||
),
|
||||
}
|
||||
),
|
||||
)
|
||||
city = forms.CharField(
|
||||
required=False,
|
||||
widget=forms.TextInput(
|
||||
attrs={
|
||||
"placeholder": _("City"),
|
||||
"class": (
|
||||
"w-full px-3 py-2 text-sm border border-gray-300 rounded-md "
|
||||
"shadow-sm focus:ring-blue-500 focus:border-blue-500 "
|
||||
"dark:bg-gray-700 dark:border-gray-600 dark:text-white"
|
||||
),
|
||||
}
|
||||
),
|
||||
)
|
||||
|
||||
def clean(self):
|
||||
cleaned_data = super().clean()
|
||||
|
||||
# Handle case where super().clean() returns None due to validation errors
|
||||
if cleaned_data is None:
|
||||
return None
|
||||
|
||||
# If lat/lng are provided, ensure location field is populated for
|
||||
# display
|
||||
lat = cleaned_data.get("lat")
|
||||
lng = cleaned_data.get("lng")
|
||||
location = cleaned_data.get("location")
|
||||
|
||||
if lat and lng and not location:
|
||||
cleaned_data["location"] = f"{lat}, {lng}"
|
||||
|
||||
return cleaned_data
|
||||
1
apps/core/health_checks/__init__.py
Normal file
1
apps/core/health_checks/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
# Health checks module
|
||||
319
apps/core/health_checks/custom_checks.py
Normal file
319
apps/core/health_checks/custom_checks.py
Normal file
@@ -0,0 +1,319 @@
|
||||
"""
|
||||
Custom health checks for ThrillWiki application.
|
||||
"""
|
||||
|
||||
import time
|
||||
import logging
|
||||
from pathlib import Path
|
||||
from django.core.cache import cache
|
||||
from django.db import connection
|
||||
from health_check.backends import BaseHealthCheckBackend
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class CacheHealthCheck(BaseHealthCheckBackend):
|
||||
"""Check Redis cache connectivity and performance"""
|
||||
|
||||
critical_service = True
|
||||
|
||||
def check_status(self):
|
||||
try:
|
||||
# Test cache write/read performance
|
||||
test_key = "health_check_test"
|
||||
test_value = "test_value_" + str(int(time.time()))
|
||||
|
||||
start_time = time.time()
|
||||
cache.set(test_key, test_value, timeout=30)
|
||||
cached_value = cache.get(test_key)
|
||||
cache_time = time.time() - start_time
|
||||
|
||||
if cached_value != test_value:
|
||||
self.add_error("Cache read/write test failed - values don't match")
|
||||
return
|
||||
|
||||
# Check cache performance
|
||||
if cache_time > 0.1: # Warn if cache operations take more than 100ms
|
||||
self.add_error(
|
||||
f"Cache performance degraded: {
|
||||
cache_time:.3f}s for read/write operation"
|
||||
)
|
||||
return
|
||||
|
||||
# Clean up test key
|
||||
cache.delete(test_key)
|
||||
|
||||
# Additional Redis-specific checks if using django-redis
|
||||
try:
|
||||
from django_redis import get_redis_connection
|
||||
|
||||
redis_client = get_redis_connection("default")
|
||||
info = redis_client.info()
|
||||
|
||||
# Check memory usage
|
||||
used_memory = info.get("used_memory", 0)
|
||||
max_memory = info.get("maxmemory", 0)
|
||||
|
||||
if max_memory > 0:
|
||||
memory_usage_percent = (used_memory / max_memory) * 100
|
||||
if memory_usage_percent > 90:
|
||||
self.add_error(
|
||||
f"Redis memory usage critical: {memory_usage_percent:.1f}%"
|
||||
)
|
||||
elif memory_usage_percent > 80:
|
||||
logger.warning(
|
||||
f"Redis memory usage high: {memory_usage_percent:.1f}%"
|
||||
)
|
||||
|
||||
except ImportError:
|
||||
# django-redis not available, skip additional checks
|
||||
pass
|
||||
except Exception as e:
|
||||
logger.warning(f"Could not get Redis info: {e}")
|
||||
|
||||
except Exception as e:
|
||||
self.add_error(f"Cache service unavailable: {e}")
|
||||
|
||||
|
||||
class DatabasePerformanceCheck(BaseHealthCheckBackend):
|
||||
"""Check database performance and connectivity"""
|
||||
|
||||
critical_service = False
|
||||
|
||||
def check_status(self):
|
||||
try:
|
||||
start_time = time.time()
|
||||
|
||||
# Test basic connectivity
|
||||
with connection.cursor() as cursor:
|
||||
cursor.execute("SELECT 1")
|
||||
result = cursor.fetchone()
|
||||
|
||||
if result[0] != 1:
|
||||
self.add_error("Database connectivity test failed")
|
||||
return
|
||||
|
||||
basic_query_time = time.time() - start_time
|
||||
|
||||
# Test a more complex query (if it takes too long, there might be
|
||||
# performance issues)
|
||||
start_time = time.time()
|
||||
with connection.cursor() as cursor:
|
||||
cursor.execute("SELECT COUNT(*) FROM django_content_type")
|
||||
cursor.fetchone()
|
||||
|
||||
complex_query_time = time.time() - start_time
|
||||
|
||||
# Performance thresholds
|
||||
if basic_query_time > 1.0:
|
||||
self.add_error(
|
||||
f"Database responding slowly: basic query took {
|
||||
basic_query_time:.2f}s"
|
||||
)
|
||||
elif basic_query_time > 0.5:
|
||||
logger.warning(
|
||||
f"Database performance degraded: basic query took {
|
||||
basic_query_time:.2f}s"
|
||||
)
|
||||
|
||||
if complex_query_time > 2.0:
|
||||
self.add_error(
|
||||
f"Database performance critical: complex query took {
|
||||
complex_query_time:.2f}s"
|
||||
)
|
||||
elif complex_query_time > 1.0:
|
||||
logger.warning(
|
||||
f"Database performance slow: complex query took {
|
||||
complex_query_time:.2f}s"
|
||||
)
|
||||
|
||||
# Check database version and settings if possible
|
||||
try:
|
||||
with connection.cursor() as cursor:
|
||||
cursor.execute("SELECT version()")
|
||||
version = cursor.fetchone()[0]
|
||||
logger.debug(f"Database version: {version}")
|
||||
except Exception as e:
|
||||
logger.debug(f"Could not get database version: {e}")
|
||||
|
||||
except Exception as e:
|
||||
self.add_error(f"Database performance check failed: {e}")
|
||||
|
||||
|
||||
class ApplicationHealthCheck(BaseHealthCheckBackend):
|
||||
"""Check application-specific health indicators"""
|
||||
|
||||
critical_service = False
|
||||
|
||||
def check_status(self):
|
||||
try:
|
||||
# Check if we can import critical modules
|
||||
critical_modules = [
|
||||
"parks.models",
|
||||
"rides.models",
|
||||
"accounts.models",
|
||||
"core.services",
|
||||
]
|
||||
|
||||
for module_name in critical_modules:
|
||||
try:
|
||||
__import__(module_name)
|
||||
except ImportError as e:
|
||||
self.add_error(
|
||||
f"Critical module import failed: {module_name} - {e}"
|
||||
)
|
||||
|
||||
# Check if we can access critical models
|
||||
try:
|
||||
from parks.models import Park
|
||||
from apps.rides.models import Ride
|
||||
from django.contrib.auth import get_user_model
|
||||
|
||||
User = get_user_model()
|
||||
|
||||
# Test that we can query these models (just count, don't load
|
||||
# data)
|
||||
park_count = Park.objects.count()
|
||||
ride_count = Ride.objects.count()
|
||||
user_count = User.objects.count()
|
||||
|
||||
logger.debug(
|
||||
f"Model counts - Parks: {park_count}, Rides: {ride_count}, Users: {user_count}"
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
self.add_error(f"Model access check failed: {e}")
|
||||
|
||||
# Check media and static file configuration
|
||||
from django.conf import settings
|
||||
import os
|
||||
|
||||
if not os.path.exists(settings.MEDIA_ROOT):
|
||||
self.add_error(f"Media directory does not exist: {settings.MEDIA_ROOT}")
|
||||
|
||||
if not os.path.exists(settings.STATIC_ROOT) and not settings.DEBUG:
|
||||
self.add_error(
|
||||
f"Static directory does not exist: {settings.STATIC_ROOT}"
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
self.add_error(f"Application health check failed: {e}")
|
||||
|
||||
|
||||
class ExternalServiceHealthCheck(BaseHealthCheckBackend):
|
||||
"""Check external services and dependencies"""
|
||||
|
||||
critical_service = False
|
||||
|
||||
def check_status(self):
|
||||
# Check email service if configured
|
||||
try:
|
||||
from django.core.mail import get_connection
|
||||
from django.conf import settings
|
||||
|
||||
if (
|
||||
hasattr(settings, "EMAIL_BACKEND")
|
||||
and "console" not in settings.EMAIL_BACKEND
|
||||
):
|
||||
# Only check if not using console backend
|
||||
connection = get_connection()
|
||||
if hasattr(connection, "open"):
|
||||
try:
|
||||
connection.open()
|
||||
connection.close()
|
||||
except Exception as e:
|
||||
logger.warning(f"Email service check failed: {e}")
|
||||
# Don't fail the health check for email issues in
|
||||
# development
|
||||
|
||||
except Exception as e:
|
||||
logger.debug(f"Email service check error: {e}")
|
||||
|
||||
# Check if Sentry is configured and working
|
||||
try:
|
||||
import sentry_sdk
|
||||
|
||||
if sentry_sdk.Hub.current.client:
|
||||
# Sentry is configured
|
||||
try:
|
||||
# Test that we can capture a test message (this won't
|
||||
# actually send to Sentry)
|
||||
with sentry_sdk.push_scope() as scope:
|
||||
scope.set_tag("health_check", True)
|
||||
# Don't actually send a message, just verify the SDK is
|
||||
# working
|
||||
logger.debug("Sentry SDK is operational")
|
||||
except Exception as e:
|
||||
logger.warning(f"Sentry SDK check failed: {e}")
|
||||
|
||||
except ImportError:
|
||||
logger.debug("Sentry SDK not installed")
|
||||
except Exception as e:
|
||||
logger.debug(f"Sentry check error: {e}")
|
||||
|
||||
# Check Redis connection if configured
|
||||
try:
|
||||
from django.core.cache import caches
|
||||
from django.conf import settings
|
||||
|
||||
cache_config = settings.CACHES.get("default", {})
|
||||
if "redis" in cache_config.get("BACKEND", "").lower():
|
||||
# Redis is configured, test basic connectivity
|
||||
redis_cache = caches["default"]
|
||||
redis_cache.set("health_check_redis", "test", 10)
|
||||
value = redis_cache.get("health_check_redis")
|
||||
if value != "test":
|
||||
self.add_error("Redis cache connectivity test failed")
|
||||
else:
|
||||
redis_cache.delete("health_check_redis")
|
||||
|
||||
except Exception as e:
|
||||
logger.warning(f"Redis connectivity check failed: {e}")
|
||||
|
||||
|
||||
class DiskSpaceHealthCheck(BaseHealthCheckBackend):
|
||||
"""Check available disk space"""
|
||||
|
||||
critical_service = False
|
||||
|
||||
def check_status(self):
|
||||
try:
|
||||
import shutil
|
||||
from django.conf import settings
|
||||
|
||||
# Check disk space for media directory
|
||||
media_usage = shutil.disk_usage(settings.MEDIA_ROOT)
|
||||
media_free_percent = (media_usage.free / media_usage.total) * 100
|
||||
|
||||
# Check disk space for logs directory if it exists
|
||||
logs_dir = Path(getattr(settings, "BASE_DIR", "/tmp")) / "logs"
|
||||
if logs_dir.exists():
|
||||
logs_usage = shutil.disk_usage(logs_dir)
|
||||
logs_free_percent = (logs_usage.free / logs_usage.total) * 100
|
||||
else:
|
||||
logs_free_percent = media_free_percent # Use same as media
|
||||
|
||||
# Alert thresholds
|
||||
if media_free_percent < 10:
|
||||
self.add_error(
|
||||
f"Critical disk space: {
|
||||
media_free_percent:.1f}% free in media directory"
|
||||
)
|
||||
elif media_free_percent < 20:
|
||||
logger.warning(
|
||||
f"Low disk space: {media_free_percent:.1f}% free in media directory"
|
||||
)
|
||||
|
||||
if logs_free_percent < 10:
|
||||
self.add_error(
|
||||
f"Critical disk space: {
|
||||
logs_free_percent:.1f}% free in logs directory"
|
||||
)
|
||||
elif logs_free_percent < 20:
|
||||
logger.warning(
|
||||
f"Low disk space: {logs_free_percent:.1f}% free in logs directory"
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.warning(f"Disk space check failed: {e}")
|
||||
# Don't fail health check for disk space issues in development
|
||||
129
apps/core/history.py
Normal file
129
apps/core/history.py
Normal file
@@ -0,0 +1,129 @@
|
||||
from django.db import models
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from django.contrib.contenttypes.fields import GenericForeignKey
|
||||
from django.conf import settings
|
||||
from typing import Any, Dict, Optional, TYPE_CHECKING
|
||||
from django.db.models import QuerySet
|
||||
|
||||
if TYPE_CHECKING:
|
||||
pass
|
||||
|
||||
|
||||
class DiffMixin:
|
||||
"""Mixin to add diffing capabilities to models with pghistory"""
|
||||
|
||||
def get_prev_record(self) -> Optional[Any]:
|
||||
"""Get the previous record for this instance"""
|
||||
try:
|
||||
# Use getattr to safely access objects manager and pghistory fields
|
||||
manager = getattr(type(self), "objects", None)
|
||||
if manager is None:
|
||||
return None
|
||||
|
||||
pgh_created_at = getattr(self, "pgh_created_at", None)
|
||||
pgh_obj_id = getattr(self, "pgh_obj_id", None)
|
||||
|
||||
if pgh_created_at is None or pgh_obj_id is None:
|
||||
return None
|
||||
|
||||
return (
|
||||
manager.filter(
|
||||
pgh_created_at__lt=pgh_created_at,
|
||||
pgh_obj_id=pgh_obj_id,
|
||||
)
|
||||
.order_by("-pgh_created_at")
|
||||
.first()
|
||||
)
|
||||
except (AttributeError, TypeError):
|
||||
return None
|
||||
|
||||
def diff_against_previous(self) -> Dict:
|
||||
"""Compare this record against the previous one"""
|
||||
prev_record = self.get_prev_record()
|
||||
if not prev_record:
|
||||
return {}
|
||||
|
||||
skip_fields = {
|
||||
"pgh_id",
|
||||
"pgh_created_at",
|
||||
"pgh_label",
|
||||
"pgh_obj_id",
|
||||
"pgh_context_id",
|
||||
"_state",
|
||||
"created_at",
|
||||
"updated_at",
|
||||
}
|
||||
|
||||
changes = {}
|
||||
for field, value in self.__dict__.items():
|
||||
# Skip internal fields and those we don't want to track
|
||||
if field.startswith("_") or field in skip_fields or field.endswith("_id"):
|
||||
continue
|
||||
|
||||
try:
|
||||
old_value = getattr(prev_record, field)
|
||||
new_value = value
|
||||
if old_value != new_value:
|
||||
changes[field] = {
|
||||
"old": (str(old_value) if old_value is not None else "None"),
|
||||
"new": (str(new_value) if new_value is not None else "None"),
|
||||
}
|
||||
except AttributeError:
|
||||
continue
|
||||
|
||||
return changes
|
||||
|
||||
|
||||
class TrackedModel(models.Model):
|
||||
"""Abstract base class for models that need history tracking"""
|
||||
|
||||
created_at = models.DateTimeField(auto_now_add=True)
|
||||
updated_at = models.DateTimeField(auto_now=True)
|
||||
|
||||
class Meta:
|
||||
abstract = True
|
||||
|
||||
def get_history(self) -> QuerySet:
|
||||
"""Get all history records for this instance in chronological order"""
|
||||
try:
|
||||
# Use getattr to safely access pghistory events
|
||||
events = getattr(self, "events", None)
|
||||
if events is None:
|
||||
return self.__class__.objects.none()
|
||||
|
||||
event_model = getattr(events, "model", None)
|
||||
if event_model:
|
||||
return event_model.objects.filter(pgh_obj_id=self.pk).order_by(
|
||||
"-pgh_created_at"
|
||||
)
|
||||
except (AttributeError, TypeError):
|
||||
pass
|
||||
return self.__class__.objects.none()
|
||||
|
||||
|
||||
class HistoricalSlug(models.Model):
|
||||
"""Track historical slugs for models"""
|
||||
|
||||
content_type = models.ForeignKey(ContentType, on_delete=models.CASCADE)
|
||||
object_id = models.PositiveIntegerField()
|
||||
content_object = GenericForeignKey("content_type", "object_id")
|
||||
slug = models.SlugField(max_length=255)
|
||||
created_at = models.DateTimeField(auto_now_add=True)
|
||||
user = models.ForeignKey(
|
||||
settings.AUTH_USER_MODEL,
|
||||
null=True,
|
||||
blank=True,
|
||||
on_delete=models.SET_NULL,
|
||||
related_name="historical_slugs",
|
||||
)
|
||||
|
||||
class Meta:
|
||||
app_label = "core"
|
||||
unique_together = ("content_type", "slug")
|
||||
indexes = [
|
||||
models.Index(fields=["content_type", "object_id"]),
|
||||
models.Index(fields=["slug"]),
|
||||
]
|
||||
|
||||
def __str__(self) -> str:
|
||||
return f"{self.content_type} - {self.object_id} - {self.slug}"
|
||||
261
apps/core/logging.py
Normal file
261
apps/core/logging.py
Normal file
@@ -0,0 +1,261 @@
|
||||
"""
|
||||
Centralized logging configuration for ThrillWiki.
|
||||
Provides structured logging with proper formatting and context.
|
||||
"""
|
||||
|
||||
import logging
|
||||
import sys
|
||||
from typing import Dict, Any, Optional
|
||||
from django.conf import settings
|
||||
from django.utils import timezone
|
||||
|
||||
|
||||
class ThrillWikiFormatter(logging.Formatter):
|
||||
"""Custom formatter for ThrillWiki logs with structured output."""
|
||||
|
||||
def format(self, record):
|
||||
# Add timestamp if not present
|
||||
if not hasattr(record, "timestamp"):
|
||||
record.timestamp = timezone.now().isoformat()
|
||||
|
||||
# Add request context if available
|
||||
if hasattr(record, "request"):
|
||||
record.request_id = getattr(record.request, "id", "unknown")
|
||||
record.user_id = (
|
||||
getattr(record.request.user, "id", "anonymous")
|
||||
if hasattr(record.request, "user")
|
||||
else "unknown"
|
||||
)
|
||||
record.path = getattr(record.request, "path", "unknown")
|
||||
record.method = getattr(record.request, "method", "unknown")
|
||||
|
||||
# Structure the log message
|
||||
if hasattr(record, "extra_data"):
|
||||
record.structured_data = record.extra_data
|
||||
|
||||
return super().format(record)
|
||||
|
||||
|
||||
def get_logger(name: str) -> logging.Logger:
|
||||
"""
|
||||
Get a configured logger for ThrillWiki components.
|
||||
|
||||
Args:
|
||||
name: Logger name (usually __name__)
|
||||
|
||||
Returns:
|
||||
Configured logger instance
|
||||
"""
|
||||
logger = logging.getLogger(name)
|
||||
|
||||
# Only configure if not already configured
|
||||
if not logger.handlers:
|
||||
handler = logging.StreamHandler(sys.stdout)
|
||||
formatter = ThrillWikiFormatter(
|
||||
fmt="%(asctime)s - %(name)s - %(levelname)s - %(message)s"
|
||||
)
|
||||
handler.setFormatter(formatter)
|
||||
logger.addHandler(handler)
|
||||
logger.setLevel(logging.INFO if settings.DEBUG else logging.WARNING)
|
||||
|
||||
return logger
|
||||
|
||||
|
||||
def log_exception(
|
||||
logger: logging.Logger,
|
||||
exception: Exception,
|
||||
*,
|
||||
context: Optional[Dict[str, Any]] = None,
|
||||
request=None,
|
||||
level: int = logging.ERROR,
|
||||
) -> None:
|
||||
"""
|
||||
Log an exception with structured context.
|
||||
|
||||
Args:
|
||||
logger: Logger instance
|
||||
exception: Exception to log
|
||||
context: Additional context data
|
||||
request: Django request object
|
||||
level: Log level
|
||||
"""
|
||||
log_data = {
|
||||
"exception_type": exception.__class__.__name__,
|
||||
"exception_message": str(exception),
|
||||
"context": context or {},
|
||||
}
|
||||
|
||||
if request:
|
||||
log_data.update(
|
||||
{
|
||||
"request_path": getattr(request, "path", "unknown"),
|
||||
"request_method": getattr(request, "method", "unknown"),
|
||||
"user_id": (
|
||||
getattr(request.user, "id", "anonymous")
|
||||
if hasattr(request, "user")
|
||||
else "unknown"
|
||||
),
|
||||
}
|
||||
)
|
||||
|
||||
logger.log(
|
||||
level,
|
||||
f"Exception occurred: {exception}",
|
||||
extra={"extra_data": log_data},
|
||||
exc_info=True,
|
||||
)
|
||||
|
||||
|
||||
def log_business_event(
|
||||
logger: logging.Logger,
|
||||
event_type: str,
|
||||
*,
|
||||
message: str,
|
||||
context: Optional[Dict[str, Any]] = None,
|
||||
request=None,
|
||||
level: int = logging.INFO,
|
||||
) -> None:
|
||||
"""
|
||||
Log a business event with structured context.
|
||||
|
||||
Args:
|
||||
logger: Logger instance
|
||||
event_type: Type of business event
|
||||
message: Event message
|
||||
context: Additional context data
|
||||
request: Django request object
|
||||
level: Log level
|
||||
"""
|
||||
log_data = {"event_type": event_type, "context": context or {}}
|
||||
|
||||
if request:
|
||||
log_data.update(
|
||||
{
|
||||
"request_path": getattr(request, "path", "unknown"),
|
||||
"request_method": getattr(request, "method", "unknown"),
|
||||
"user_id": (
|
||||
getattr(request.user, "id", "anonymous")
|
||||
if hasattr(request, "user")
|
||||
else "unknown"
|
||||
),
|
||||
}
|
||||
)
|
||||
|
||||
logger.log(level, message, extra={"extra_data": log_data})
|
||||
|
||||
|
||||
def log_performance_metric(
|
||||
logger: logging.Logger,
|
||||
operation: str,
|
||||
*,
|
||||
duration_ms: float,
|
||||
context: Optional[Dict[str, Any]] = None,
|
||||
level: int = logging.INFO,
|
||||
) -> None:
|
||||
"""
|
||||
Log a performance metric.
|
||||
|
||||
Args:
|
||||
logger: Logger instance
|
||||
operation: Operation name
|
||||
duration_ms: Duration in milliseconds
|
||||
context: Additional context data
|
||||
level: Log level
|
||||
"""
|
||||
log_data = {
|
||||
"metric_type": "performance",
|
||||
"operation": operation,
|
||||
"duration_ms": duration_ms,
|
||||
"context": context or {},
|
||||
}
|
||||
|
||||
message = f"Performance: {operation} took {duration_ms:.2f}ms"
|
||||
logger.log(level, message, extra={"extra_data": log_data})
|
||||
|
||||
|
||||
def log_api_request(
|
||||
logger: logging.Logger,
|
||||
request,
|
||||
*,
|
||||
response_status: Optional[int] = None,
|
||||
duration_ms: Optional[float] = None,
|
||||
level: int = logging.INFO,
|
||||
) -> None:
|
||||
"""
|
||||
Log an API request with context.
|
||||
|
||||
Args:
|
||||
logger: Logger instance
|
||||
request: Django request object
|
||||
response_status: HTTP response status code
|
||||
duration_ms: Request duration in milliseconds
|
||||
level: Log level
|
||||
"""
|
||||
log_data = {
|
||||
"request_type": "api",
|
||||
"path": getattr(request, "path", "unknown"),
|
||||
"method": getattr(request, "method", "unknown"),
|
||||
"user_id": (
|
||||
getattr(request.user, "id", "anonymous")
|
||||
if hasattr(request, "user")
|
||||
else "unknown"
|
||||
),
|
||||
"response_status": response_status,
|
||||
"duration_ms": duration_ms,
|
||||
}
|
||||
|
||||
message = f"API Request: {request.method} {request.path}"
|
||||
if response_status:
|
||||
message += f" -> {response_status}"
|
||||
if duration_ms:
|
||||
message += f" ({duration_ms:.2f}ms)"
|
||||
|
||||
logger.log(level, message, extra={"extra_data": log_data})
|
||||
|
||||
|
||||
def log_security_event(
|
||||
logger: logging.Logger,
|
||||
event_type: str,
|
||||
*,
|
||||
message: str,
|
||||
severity: str = "medium",
|
||||
context: Optional[Dict[str, Any]] = None,
|
||||
request=None,
|
||||
) -> None:
|
||||
"""
|
||||
Log a security-related event.
|
||||
|
||||
Args:
|
||||
logger: Logger instance
|
||||
event_type: Type of security event
|
||||
message: Event message
|
||||
severity: Event severity (low, medium, high, critical)
|
||||
context: Additional context data
|
||||
request: Django request object
|
||||
"""
|
||||
log_data = {
|
||||
"security_event": True,
|
||||
"event_type": event_type,
|
||||
"severity": severity,
|
||||
"context": context or {},
|
||||
}
|
||||
|
||||
if request:
|
||||
log_data.update(
|
||||
{
|
||||
"request_path": getattr(request, "path", "unknown"),
|
||||
"request_method": getattr(request, "method", "unknown"),
|
||||
"user_id": (
|
||||
getattr(request.user, "id", "anonymous")
|
||||
if hasattr(request, "user")
|
||||
else "unknown"
|
||||
),
|
||||
"remote_addr": request.META.get("REMOTE_ADDR", "unknown"),
|
||||
"user_agent": request.META.get("HTTP_USER_AGENT", "unknown"),
|
||||
}
|
||||
)
|
||||
|
||||
# Use WARNING for medium/high, ERROR for critical
|
||||
level = logging.ERROR if severity in ["high", "critical"] else logging.WARNING
|
||||
|
||||
logger.log(level, f"SECURITY: {message}", extra={"extra_data": log_data})
|
||||
1
apps/core/management/__init__.py
Normal file
1
apps/core/management/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
|
||||
1
apps/core/management/commands/__init__.py
Normal file
1
apps/core/management/commands/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
|
||||
217
apps/core/management/commands/calculate_new_content.py
Normal file
217
apps/core/management/commands/calculate_new_content.py
Normal file
@@ -0,0 +1,217 @@
|
||||
"""
|
||||
Django management command to calculate new content.
|
||||
|
||||
This replaces the Celery task for calculating new content.
|
||||
Run with: uv run manage.py calculate_new_content
|
||||
"""
|
||||
|
||||
import logging
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Dict, List, Any
|
||||
from django.core.management.base import BaseCommand, CommandError
|
||||
from django.utils import timezone
|
||||
from django.core.cache import cache
|
||||
from django.db.models import Q
|
||||
|
||||
from apps.parks.models import Park
|
||||
from apps.rides.models import Ride
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Calculate new content and cache results"
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument(
|
||||
"--content-type",
|
||||
type=str,
|
||||
default="all",
|
||||
choices=["all", "parks", "rides"],
|
||||
help="Type of content to calculate (default: all)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--days-back",
|
||||
type=int,
|
||||
default=30,
|
||||
help="Number of days to look back for new content (default: 30)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--limit",
|
||||
type=int,
|
||||
default=50,
|
||||
help="Maximum number of results to calculate (default: 50)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--verbose", action="store_true", help="Enable verbose output"
|
||||
)
|
||||
|
||||
def handle(self, *args, **options):
|
||||
content_type = options["content_type"]
|
||||
days_back = options["days_back"]
|
||||
limit = options["limit"]
|
||||
verbose = options["verbose"]
|
||||
|
||||
if verbose:
|
||||
self.stdout.write(f"Starting new content calculation for {content_type}")
|
||||
|
||||
try:
|
||||
cutoff_date = timezone.now() - timedelta(days=days_back)
|
||||
new_items = []
|
||||
|
||||
if content_type in ["all", "parks"]:
|
||||
parks = self._get_new_parks(
|
||||
cutoff_date, limit if content_type == "parks" else limit * 2
|
||||
)
|
||||
new_items.extend(parks)
|
||||
if verbose:
|
||||
self.stdout.write(f"Found {len(parks)} new parks")
|
||||
|
||||
if content_type in ["all", "rides"]:
|
||||
rides = self._get_new_rides(
|
||||
cutoff_date, limit if content_type == "rides" else limit * 2
|
||||
)
|
||||
new_items.extend(rides)
|
||||
if verbose:
|
||||
self.stdout.write(f"Found {len(rides)} new rides")
|
||||
|
||||
# Sort by date added (most recent first) and apply limit
|
||||
new_items.sort(key=lambda x: x.get("date_added", ""), reverse=True)
|
||||
new_items = new_items[:limit]
|
||||
|
||||
# Format results for API consumption
|
||||
formatted_results = self._format_new_content_results(new_items)
|
||||
|
||||
# Cache results
|
||||
cache_key = f"new_content:calculated:{content_type}:{days_back}:{limit}"
|
||||
cache.set(cache_key, formatted_results, 1800) # Cache for 30 minutes
|
||||
|
||||
self.stdout.write(
|
||||
self.style.SUCCESS(
|
||||
f"Successfully calculated {len(formatted_results)} new items for {content_type}"
|
||||
)
|
||||
)
|
||||
|
||||
if verbose:
|
||||
for item in formatted_results[:5]: # Show first 5 items
|
||||
self.stdout.write(
|
||||
f" {item['name']} ({item['park']}) - opened: {item['date_opened']}"
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error calculating new content: {e}", exc_info=True)
|
||||
raise CommandError(f"Failed to calculate new content: {e}")
|
||||
|
||||
def _get_new_parks(self, cutoff_date: datetime, limit: int) -> List[Dict[str, Any]]:
|
||||
"""Get recently added parks using real data."""
|
||||
new_parks = (
|
||||
Park.objects.filter(
|
||||
Q(created_at__gte=cutoff_date)
|
||||
| Q(opening_date__gte=cutoff_date.date()),
|
||||
status="OPERATING",
|
||||
)
|
||||
.select_related("location", "operator")
|
||||
.order_by("-created_at", "-opening_date")[:limit]
|
||||
)
|
||||
|
||||
results = []
|
||||
for park in new_parks:
|
||||
date_added = park.opening_date or park.created_at
|
||||
if date_added:
|
||||
if isinstance(date_added, datetime):
|
||||
date_added = date_added.date()
|
||||
|
||||
opening_date = getattr(park, "opening_date", None)
|
||||
if opening_date and isinstance(opening_date, datetime):
|
||||
opening_date = opening_date.date()
|
||||
|
||||
results.append(
|
||||
{
|
||||
"content_object": park,
|
||||
"content_type": "park",
|
||||
"id": park.pk,
|
||||
"name": park.name,
|
||||
"slug": park.slug,
|
||||
"park": park.name, # For parks, park field is the park name itself
|
||||
"category": "park",
|
||||
"date_added": date_added.isoformat() if date_added else "",
|
||||
"date_opened": opening_date.isoformat() if opening_date else "",
|
||||
"url": park.url,
|
||||
}
|
||||
)
|
||||
|
||||
return results
|
||||
|
||||
def _get_new_rides(self, cutoff_date: datetime, limit: int) -> List[Dict[str, Any]]:
|
||||
"""Get recently added rides using real data."""
|
||||
new_rides = (
|
||||
Ride.objects.filter(
|
||||
Q(created_at__gte=cutoff_date)
|
||||
| Q(opening_date__gte=cutoff_date.date()),
|
||||
status="OPERATING",
|
||||
)
|
||||
.select_related("park", "park__location")
|
||||
.order_by("-created_at", "-opening_date")[:limit]
|
||||
)
|
||||
|
||||
results = []
|
||||
for ride in new_rides:
|
||||
date_added = getattr(ride, "opening_date", None) or getattr(
|
||||
ride, "created_at", None
|
||||
)
|
||||
if date_added:
|
||||
if isinstance(date_added, datetime):
|
||||
date_added = date_added.date()
|
||||
|
||||
opening_date = getattr(ride, "opening_date", None)
|
||||
if opening_date and isinstance(opening_date, datetime):
|
||||
opening_date = opening_date.date()
|
||||
|
||||
results.append(
|
||||
{
|
||||
"content_object": ride,
|
||||
"content_type": "ride",
|
||||
"id": ride.pk,
|
||||
"name": ride.name,
|
||||
"slug": ride.slug,
|
||||
"park": ride.park.name if ride.park else "",
|
||||
"category": "ride",
|
||||
"date_added": date_added.isoformat() if date_added else "",
|
||||
"date_opened": opening_date.isoformat() if opening_date else "",
|
||||
"url": ride.url,
|
||||
"park_url": ride.park.url if ride.park else "",
|
||||
}
|
||||
)
|
||||
|
||||
return results
|
||||
|
||||
def _format_new_content_results(
|
||||
self, new_items: List[Dict[str, Any]]
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""Format new content results for frontend consumption."""
|
||||
formatted_results = []
|
||||
|
||||
for item in new_items:
|
||||
try:
|
||||
# Format exactly as frontend expects
|
||||
formatted_item = {
|
||||
"id": item["id"],
|
||||
"name": item["name"],
|
||||
"park": item["park"],
|
||||
"category": item["category"],
|
||||
"date_added": item["date_added"],
|
||||
"date_opened": item["date_opened"],
|
||||
"slug": item["slug"],
|
||||
"url": item["url"],
|
||||
}
|
||||
|
||||
# Add park_url for rides
|
||||
if item.get("park_url"):
|
||||
formatted_item["park_url"] = item["park_url"]
|
||||
|
||||
formatted_results.append(formatted_item)
|
||||
|
||||
except Exception as e:
|
||||
logger.warning(f"Error formatting new content item: {e}")
|
||||
|
||||
return formatted_results
|
||||
391
apps/core/management/commands/calculate_trending.py
Normal file
391
apps/core/management/commands/calculate_trending.py
Normal file
@@ -0,0 +1,391 @@
|
||||
"""
|
||||
Django management command to calculate trending content.
|
||||
|
||||
This replaces the Celery task for calculating trending content.
|
||||
Run with: uv run manage.py calculate_trending
|
||||
"""
|
||||
|
||||
import logging
|
||||
from typing import Dict, List, Any
|
||||
from django.core.management.base import BaseCommand, CommandError
|
||||
from django.utils import timezone
|
||||
from django.core.cache import cache
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
|
||||
from apps.core.analytics import PageView
|
||||
from apps.parks.models import Park
|
||||
from apps.rides.models import Ride
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Calculate trending content and cache results"
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument(
|
||||
"--content-type",
|
||||
type=str,
|
||||
default="all",
|
||||
choices=["all", "parks", "rides"],
|
||||
help="Type of content to calculate (default: all)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--limit",
|
||||
type=int,
|
||||
default=50,
|
||||
help="Maximum number of results to calculate (default: 50)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--verbose", action="store_true", help="Enable verbose output"
|
||||
)
|
||||
|
||||
def handle(self, *args, **options):
|
||||
content_type = options["content_type"]
|
||||
limit = options["limit"]
|
||||
verbose = options["verbose"]
|
||||
|
||||
if verbose:
|
||||
self.stdout.write(f"Starting trending calculation for {content_type}")
|
||||
|
||||
try:
|
||||
# Time windows for calculations
|
||||
current_period_hours = 168 # 7 days
|
||||
# 14 days (for previous 7-day window comparison)
|
||||
previous_period_hours = 336
|
||||
|
||||
trending_items = []
|
||||
|
||||
if content_type in ["all", "parks"]:
|
||||
park_items = self._calculate_trending_parks(
|
||||
current_period_hours,
|
||||
previous_period_hours,
|
||||
limit if content_type == "parks" else limit * 2,
|
||||
)
|
||||
trending_items.extend(park_items)
|
||||
if verbose:
|
||||
self.stdout.write(f"Calculated {len(park_items)} trending parks")
|
||||
|
||||
if content_type in ["all", "rides"]:
|
||||
ride_items = self._calculate_trending_rides(
|
||||
current_period_hours,
|
||||
previous_period_hours,
|
||||
limit if content_type == "rides" else limit * 2,
|
||||
)
|
||||
trending_items.extend(ride_items)
|
||||
if verbose:
|
||||
self.stdout.write(f"Calculated {len(ride_items)} trending rides")
|
||||
|
||||
# Sort by trending score and apply limit
|
||||
trending_items.sort(key=lambda x: x.get("trending_score", 0), reverse=True)
|
||||
trending_items = trending_items[:limit]
|
||||
|
||||
# Format results for API consumption
|
||||
formatted_results = self._format_trending_results(
|
||||
trending_items, current_period_hours, previous_period_hours
|
||||
)
|
||||
|
||||
# Cache results
|
||||
cache_key = f"trending:calculated:{content_type}:{limit}"
|
||||
cache.set(cache_key, formatted_results, 3600) # Cache for 1 hour
|
||||
|
||||
self.stdout.write(
|
||||
self.style.SUCCESS(
|
||||
f"Successfully calculated {len(formatted_results)} trending items for {content_type}"
|
||||
)
|
||||
)
|
||||
|
||||
if verbose:
|
||||
for item in formatted_results[:5]: # Show first 5 items
|
||||
self.stdout.write(
|
||||
f" {item['name']} (score: {item.get('views_change', 'N/A')})"
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error calculating trending content: {e}", exc_info=True)
|
||||
raise CommandError(f"Failed to calculate trending content: {e}")
|
||||
|
||||
def _calculate_trending_parks(
|
||||
self, current_period_hours: int, previous_period_hours: int, limit: int
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""Calculate trending scores for parks using real data."""
|
||||
parks = Park.objects.filter(status="OPERATING").select_related(
|
||||
"location", "operator"
|
||||
)
|
||||
|
||||
trending_parks = []
|
||||
|
||||
for park in parks:
|
||||
try:
|
||||
score = self._calculate_content_score(
|
||||
park, "park", current_period_hours, previous_period_hours
|
||||
)
|
||||
if score > 0: # Only include items with positive trending scores
|
||||
trending_parks.append(
|
||||
{
|
||||
"content_object": park,
|
||||
"content_type": "park",
|
||||
"trending_score": score,
|
||||
"id": park.id,
|
||||
"name": park.name,
|
||||
"slug": park.slug,
|
||||
"park": park.name, # For parks, park field is the park name itself
|
||||
"category": "park",
|
||||
"rating": (
|
||||
float(park.average_rating)
|
||||
if park.average_rating
|
||||
else 0.0
|
||||
),
|
||||
"date_opened": (
|
||||
park.opening_date.isoformat()
|
||||
if park.opening_date
|
||||
else ""
|
||||
),
|
||||
"url": park.url,
|
||||
}
|
||||
)
|
||||
except Exception as e:
|
||||
logger.warning(f"Error calculating score for park {park.id}: {e}")
|
||||
|
||||
return trending_parks
|
||||
|
||||
def _calculate_trending_rides(
|
||||
self, current_period_hours: int, previous_period_hours: int, limit: int
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""Calculate trending scores for rides using real data."""
|
||||
rides = Ride.objects.filter(status="OPERATING").select_related(
|
||||
"park", "park__location"
|
||||
)
|
||||
|
||||
trending_rides = []
|
||||
|
||||
for ride in rides:
|
||||
try:
|
||||
score = self._calculate_content_score(
|
||||
ride, "ride", current_period_hours, previous_period_hours
|
||||
)
|
||||
if score > 0: # Only include items with positive trending scores
|
||||
trending_rides.append(
|
||||
{
|
||||
"content_object": ride,
|
||||
"content_type": "ride",
|
||||
"trending_score": score,
|
||||
"id": ride.pk,
|
||||
"name": ride.name,
|
||||
"slug": ride.slug,
|
||||
"park": ride.park.name if ride.park else "",
|
||||
"category": "ride",
|
||||
"rating": (
|
||||
float(ride.average_rating)
|
||||
if ride.average_rating
|
||||
else 0.0
|
||||
),
|
||||
"date_opened": (
|
||||
ride.opening_date.isoformat()
|
||||
if ride.opening_date
|
||||
else ""
|
||||
),
|
||||
"url": ride.url,
|
||||
"park_url": ride.park.url if ride.park else "",
|
||||
}
|
||||
)
|
||||
except Exception as e:
|
||||
logger.warning(f"Error calculating score for ride {ride.pk}: {e}")
|
||||
|
||||
return trending_rides
|
||||
|
||||
def _calculate_content_score(
|
||||
self,
|
||||
content_obj: Any,
|
||||
content_type: str,
|
||||
current_period_hours: int,
|
||||
previous_period_hours: int,
|
||||
) -> float:
|
||||
"""Calculate weighted trending score for content object using real analytics data."""
|
||||
try:
|
||||
# Get content type for PageView queries
|
||||
ct = ContentType.objects.get_for_model(content_obj)
|
||||
|
||||
# 1. View Growth Score (40% weight)
|
||||
view_growth_score = self._calculate_view_growth_score(
|
||||
ct, content_obj.id, current_period_hours, previous_period_hours
|
||||
)
|
||||
|
||||
# 2. Rating Score (30% weight)
|
||||
rating_score = self._calculate_rating_score(content_obj)
|
||||
|
||||
# 3. Recency Score (20% weight)
|
||||
recency_score = self._calculate_recency_score(content_obj)
|
||||
|
||||
# 4. Popularity Score (10% weight)
|
||||
popularity_score = self._calculate_popularity_score(
|
||||
ct, content_obj.id, current_period_hours
|
||||
)
|
||||
|
||||
# Calculate weighted final score
|
||||
final_score = (
|
||||
view_growth_score * 0.4
|
||||
+ rating_score * 0.3
|
||||
+ recency_score * 0.2
|
||||
+ popularity_score * 0.1
|
||||
)
|
||||
|
||||
return final_score
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Error calculating score for {content_type} {content_obj.id}: {e}"
|
||||
)
|
||||
return 0.0
|
||||
|
||||
def _calculate_view_growth_score(
|
||||
self,
|
||||
content_type: ContentType,
|
||||
object_id: int,
|
||||
current_period_hours: int,
|
||||
previous_period_hours: int,
|
||||
) -> float:
|
||||
"""Calculate normalized view growth score using real PageView data."""
|
||||
try:
|
||||
current_views, previous_views, growth_percentage = (
|
||||
PageView.get_views_growth(
|
||||
content_type,
|
||||
object_id,
|
||||
current_period_hours,
|
||||
previous_period_hours,
|
||||
)
|
||||
)
|
||||
|
||||
if previous_views == 0:
|
||||
# New content with views gets boost
|
||||
return min(current_views / 100.0, 1.0) if current_views > 0 else 0.0
|
||||
|
||||
# Normalize growth percentage to 0-1 scale
|
||||
normalized_growth = (
|
||||
min(growth_percentage / 500.0, 1.0) if growth_percentage > 0 else 0.0
|
||||
)
|
||||
return max(normalized_growth, 0.0)
|
||||
|
||||
except Exception as e:
|
||||
logger.warning(f"Error calculating view growth: {e}")
|
||||
return 0.0
|
||||
|
||||
def _calculate_rating_score(self, content_obj: Any) -> float:
|
||||
"""Calculate normalized rating score."""
|
||||
try:
|
||||
rating = getattr(content_obj, "average_rating", None)
|
||||
if rating is None or rating == 0:
|
||||
return 0.3 # Neutral score for unrated content
|
||||
|
||||
# Normalize rating from 1-10 scale to 0-1 scale
|
||||
return min(max((float(rating) - 1) / 9.0, 0.0), 1.0)
|
||||
|
||||
except Exception as e:
|
||||
logger.warning(f"Error calculating rating score: {e}")
|
||||
return 0.3
|
||||
|
||||
def _calculate_recency_score(self, content_obj: Any) -> float:
|
||||
"""Calculate recency score based on when content was added/updated."""
|
||||
try:
|
||||
# Use opening_date for parks/rides, or created_at as fallback
|
||||
date_added = getattr(content_obj, "opening_date", None)
|
||||
if not date_added:
|
||||
date_added = getattr(content_obj, "created_at", None)
|
||||
if not date_added:
|
||||
return 0.5 # Neutral score for unknown dates
|
||||
|
||||
# Handle both date and datetime objects
|
||||
if hasattr(date_added, "date"):
|
||||
date_added = date_added.date()
|
||||
|
||||
# Calculate days since added
|
||||
today = timezone.now().date()
|
||||
days_since_added = (today - date_added).days
|
||||
|
||||
# Recency score: newer content gets higher scores
|
||||
if days_since_added <= 0:
|
||||
return 1.0
|
||||
elif days_since_added <= 30:
|
||||
return 1.0 - (days_since_added / 30.0) * 0.2 # 1.0 to 0.8
|
||||
elif days_since_added <= 365:
|
||||
return 0.8 - ((days_since_added - 30) / (365 - 30)) * 0.7 # 0.8 to 0.1
|
||||
else:
|
||||
return 0.0
|
||||
|
||||
except Exception as e:
|
||||
logger.warning(f"Error calculating recency score: {e}")
|
||||
return 0.5
|
||||
|
||||
def _calculate_popularity_score(
|
||||
self, content_type: ContentType, object_id: int, hours: int
|
||||
) -> float:
|
||||
"""Calculate popularity score based on total view count."""
|
||||
try:
|
||||
total_views = PageView.get_total_views_count(
|
||||
content_type, object_id, hours=hours
|
||||
)
|
||||
|
||||
# Normalize views to 0-1 scale
|
||||
if total_views == 0:
|
||||
return 0.0
|
||||
elif total_views <= 100:
|
||||
return total_views / 200.0 # 0.0 to 0.5
|
||||
else:
|
||||
return min(0.5 + (total_views - 100) / 1800.0, 1.0) # 0.5 to 1.0
|
||||
|
||||
except Exception as e:
|
||||
logger.warning(f"Error calculating popularity score: {e}")
|
||||
return 0.0
|
||||
|
||||
def _format_trending_results(
|
||||
self,
|
||||
trending_items: List[Dict[str, Any]],
|
||||
current_period_hours: int,
|
||||
previous_period_hours: int,
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""Format trending results for frontend consumption."""
|
||||
formatted_results = []
|
||||
|
||||
for rank, item in enumerate(trending_items, 1):
|
||||
try:
|
||||
# Get view change for display
|
||||
content_obj = item["content_object"]
|
||||
ct = ContentType.objects.get_for_model(content_obj)
|
||||
current_views, previous_views, growth_percentage = (
|
||||
PageView.get_views_growth(
|
||||
ct,
|
||||
content_obj.id,
|
||||
current_period_hours,
|
||||
previous_period_hours,
|
||||
)
|
||||
)
|
||||
|
||||
# Format exactly as frontend expects
|
||||
formatted_item = {
|
||||
"id": item["id"],
|
||||
"name": item["name"],
|
||||
"park": item["park"],
|
||||
"category": item["category"],
|
||||
"rating": item["rating"],
|
||||
"rank": rank,
|
||||
"views": current_views,
|
||||
"views_change": (
|
||||
f"+{growth_percentage:.1f}%"
|
||||
if growth_percentage > 0
|
||||
else f"{growth_percentage:.1f}%"
|
||||
),
|
||||
"slug": item["slug"],
|
||||
"date_opened": item["date_opened"],
|
||||
"url": item["url"],
|
||||
}
|
||||
|
||||
# Add park_url for rides
|
||||
if item.get("park_url"):
|
||||
formatted_item["park_url"] = item["park_url"]
|
||||
|
||||
formatted_results.append(formatted_item)
|
||||
|
||||
except Exception as e:
|
||||
logger.warning(f"Error formatting trending item: {e}")
|
||||
|
||||
return formatted_results
|
||||
472
apps/core/management/commands/clear_cache.py
Normal file
472
apps/core/management/commands/clear_cache.py
Normal file
@@ -0,0 +1,472 @@
|
||||
"""
|
||||
Django management command to clear all types of cache data.
|
||||
|
||||
This command provides comprehensive cache clearing functionality including:
|
||||
- Django cache framework (all configured backends)
|
||||
- Python __pycache__ directories and .pyc files
|
||||
- Static files cache
|
||||
- Session cache
|
||||
- Template cache
|
||||
- Tailwind CSS build cache
|
||||
- OPcache (if available)
|
||||
"""
|
||||
|
||||
import shutil
|
||||
import subprocess
|
||||
from pathlib import Path
|
||||
|
||||
from django.core.cache import cache, caches
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.conf import settings
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = (
|
||||
"Clear all types of cache data including Django cache, "
|
||||
"__pycache__, and build caches"
|
||||
)
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument(
|
||||
"--django-cache",
|
||||
action="store_true",
|
||||
help="Clear Django cache framework cache only",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--pycache",
|
||||
action="store_true",
|
||||
help="Clear Python __pycache__ directories and .pyc files only",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--static",
|
||||
action="store_true",
|
||||
help="Clear static files cache only",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--sessions",
|
||||
action="store_true",
|
||||
help="Clear session cache only",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--templates",
|
||||
action="store_true",
|
||||
help="Clear template cache only",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--tailwind",
|
||||
action="store_true",
|
||||
help="Clear Tailwind CSS build cache only",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--opcache",
|
||||
action="store_true",
|
||||
help="Clear PHP OPcache if available",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--dry-run",
|
||||
action="store_true",
|
||||
help="Show what would be cleared without actually clearing",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--verbose",
|
||||
action="store_true",
|
||||
help="Show detailed output of clearing operations",
|
||||
)
|
||||
|
||||
def handle(self, *args, **options):
|
||||
"""Clear cache data based on provided options."""
|
||||
self.dry_run = options["dry_run"]
|
||||
self.verbose = options["verbose"]
|
||||
|
||||
# If no specific cache type is specified, clear all
|
||||
clear_all = not any(
|
||||
[
|
||||
options["django_cache"],
|
||||
options["pycache"],
|
||||
options["static"],
|
||||
options["sessions"],
|
||||
options["templates"],
|
||||
options["tailwind"],
|
||||
options["opcache"],
|
||||
]
|
||||
)
|
||||
|
||||
if self.dry_run:
|
||||
self.stdout.write(
|
||||
self.style.WARNING("🔍 DRY RUN MODE - No files will be deleted")
|
||||
)
|
||||
self.stdout.write("")
|
||||
|
||||
self.stdout.write(self.style.SUCCESS("🧹 ThrillWiki Cache Clearing Utility"))
|
||||
self.stdout.write("")
|
||||
|
||||
# Clear Django cache framework
|
||||
if clear_all or options["django_cache"]:
|
||||
self.clear_django_cache()
|
||||
|
||||
# Clear Python __pycache__
|
||||
if clear_all or options["pycache"]:
|
||||
self.clear_pycache()
|
||||
|
||||
# Clear static files cache
|
||||
if clear_all or options["static"]:
|
||||
self.clear_static_cache()
|
||||
|
||||
# Clear sessions cache
|
||||
if clear_all or options["sessions"]:
|
||||
self.clear_sessions_cache()
|
||||
|
||||
# Clear template cache
|
||||
if clear_all or options["templates"]:
|
||||
self.clear_template_cache()
|
||||
|
||||
# Clear Tailwind cache
|
||||
if clear_all or options["tailwind"]:
|
||||
self.clear_tailwind_cache()
|
||||
|
||||
# Clear OPcache
|
||||
if clear_all or options["opcache"]:
|
||||
self.clear_opcache()
|
||||
|
||||
self.stdout.write("")
|
||||
self.stdout.write(
|
||||
self.style.SUCCESS("✅ Cache clearing completed successfully!")
|
||||
)
|
||||
|
||||
def clear_django_cache(self):
|
||||
"""Clear Django cache framework cache."""
|
||||
self.stdout.write("🗄️ Clearing Django cache framework...")
|
||||
|
||||
try:
|
||||
# Clear default cache
|
||||
if not self.dry_run:
|
||||
cache.clear()
|
||||
|
||||
cache_info = f"Default cache ({cache.__class__.__name__})"
|
||||
self.stdout.write(self.style.SUCCESS(f" ✅ Cleared {cache_info}"))
|
||||
|
||||
# Clear all configured caches
|
||||
cache_aliases = getattr(settings, "CACHES", {}).keys()
|
||||
for alias in cache_aliases:
|
||||
if alias != "default": # Already cleared above
|
||||
try:
|
||||
cache_backend = caches[alias]
|
||||
if not self.dry_run:
|
||||
cache_backend.clear()
|
||||
|
||||
cache_info = (
|
||||
f"{alias} cache ({cache_backend.__class__.__name__})"
|
||||
)
|
||||
self.stdout.write(
|
||||
self.style.SUCCESS(f" ✅ Cleared {cache_info}")
|
||||
)
|
||||
except Exception as e:
|
||||
self.stdout.write(
|
||||
self.style.WARNING(
|
||||
f" ⚠️ Could not clear {alias} cache: {e}"
|
||||
)
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
self.stdout.write(
|
||||
self.style.ERROR(f" ❌ Error clearing Django cache: {e}")
|
||||
)
|
||||
|
||||
def clear_pycache(self):
|
||||
"""Clear Python __pycache__ directories and .pyc files."""
|
||||
self.stdout.write("🐍 Clearing Python __pycache__ and .pyc files...")
|
||||
|
||||
removed_count = 0
|
||||
removed_size = 0
|
||||
|
||||
try:
|
||||
# Start from project root
|
||||
project_root = Path(settings.BASE_DIR)
|
||||
|
||||
# Find and remove __pycache__ directories
|
||||
for pycache_dir in project_root.rglob("__pycache__"):
|
||||
if pycache_dir.is_dir():
|
||||
try:
|
||||
# Calculate size before removal
|
||||
dir_size = sum(
|
||||
f.stat().st_size
|
||||
for f in pycache_dir.rglob("*")
|
||||
if f.is_file()
|
||||
)
|
||||
removed_size += dir_size
|
||||
|
||||
if self.verbose:
|
||||
self.stdout.write(f" 🗑️ Removing: {pycache_dir}")
|
||||
|
||||
if not self.dry_run:
|
||||
shutil.rmtree(pycache_dir)
|
||||
|
||||
removed_count += 1
|
||||
except Exception as e:
|
||||
self.stdout.write(
|
||||
self.style.WARNING(
|
||||
f" ⚠️ Could not remove {pycache_dir}: {e}"
|
||||
)
|
||||
)
|
||||
|
||||
# Find and remove .pyc files
|
||||
for pyc_file in project_root.rglob("*.pyc"):
|
||||
try:
|
||||
file_size = pyc_file.stat().st_size
|
||||
removed_size += file_size
|
||||
|
||||
if self.verbose:
|
||||
self.stdout.write(f" 🗑️ Removing: {pyc_file}")
|
||||
|
||||
if not self.dry_run:
|
||||
pyc_file.unlink()
|
||||
|
||||
removed_count += 1
|
||||
except Exception as e:
|
||||
self.stdout.write(
|
||||
self.style.WARNING(f" ⚠️ Could not remove {pyc_file}: {e}")
|
||||
)
|
||||
|
||||
# Format file size
|
||||
size_mb = removed_size / (1024 * 1024)
|
||||
self.stdout.write(
|
||||
self.style.SUCCESS(
|
||||
f" ✅ Removed {removed_count} Python cache items ({size_mb:.2f} MB)"
|
||||
)
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
self.stdout.write(
|
||||
self.style.ERROR(f" ❌ Error clearing Python cache: {e}")
|
||||
)
|
||||
|
||||
def clear_static_cache(self):
|
||||
"""Clear static files cache."""
|
||||
self.stdout.write("📦 Clearing static files cache...")
|
||||
|
||||
try:
|
||||
static_root = getattr(settings, "STATIC_ROOT", None)
|
||||
|
||||
if static_root and Path(static_root).exists():
|
||||
static_path = Path(static_root)
|
||||
|
||||
# Calculate size
|
||||
total_size = sum(
|
||||
f.stat().st_size for f in static_path.rglob("*") if f.is_file()
|
||||
)
|
||||
size_mb = total_size / (1024 * 1024)
|
||||
|
||||
if self.verbose:
|
||||
self.stdout.write(f" 🗑️ Removing: {static_path}")
|
||||
|
||||
if not self.dry_run:
|
||||
shutil.rmtree(static_path)
|
||||
static_path.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
self.stdout.write(
|
||||
self.style.SUCCESS(
|
||||
f" ✅ Cleared static files cache ({size_mb:.2f} MB)"
|
||||
)
|
||||
)
|
||||
else:
|
||||
self.stdout.write(
|
||||
self.style.WARNING(
|
||||
" ⚠️ No STATIC_ROOT configured or directory doesn't exist"
|
||||
)
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
self.stdout.write(
|
||||
self.style.ERROR(f" ❌ Error clearing static cache: {e}")
|
||||
)
|
||||
|
||||
def clear_sessions_cache(self):
|
||||
"""Clear session cache if using cache-based sessions."""
|
||||
self.stdout.write("🔐 Clearing session cache...")
|
||||
|
||||
try:
|
||||
session_engine = getattr(settings, "SESSION_ENGINE", "")
|
||||
|
||||
if "cache" in session_engine:
|
||||
# Using cache-based sessions
|
||||
session_cache_alias = getattr(
|
||||
settings, "SESSION_CACHE_ALIAS", "default"
|
||||
)
|
||||
session_cache = caches[session_cache_alias]
|
||||
|
||||
if not self.dry_run:
|
||||
# Clear session keys (this is a simplified approach)
|
||||
# In production, you might want more sophisticated session clearing
|
||||
session_cache.clear()
|
||||
|
||||
self.stdout.write(
|
||||
self.style.SUCCESS(
|
||||
f" ✅ Cleared cache-based sessions ({session_cache_alias})"
|
||||
)
|
||||
)
|
||||
else:
|
||||
self.stdout.write(
|
||||
self.style.WARNING(" ⚠️ Not using cache-based sessions")
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
self.stdout.write(
|
||||
self.style.ERROR(f" ❌ Error clearing session cache: {e}")
|
||||
)
|
||||
|
||||
def clear_template_cache(self):
|
||||
"""Clear template cache."""
|
||||
self.stdout.write("📄 Clearing template cache...")
|
||||
|
||||
try:
|
||||
# Clear template cache if using cached template loader
|
||||
from django.template import engines
|
||||
from django.template.loaders.cached import Loader as CachedLoader
|
||||
|
||||
cleared_engines = 0
|
||||
for engine in engines.all():
|
||||
try:
|
||||
# Check for DjangoTemplates engine with cached loaders
|
||||
engine_backend = getattr(engine, "backend", "")
|
||||
if "DjangoTemplates" in engine_backend:
|
||||
# Get engine instance safely
|
||||
engine_instance = getattr(engine, "engine", None)
|
||||
if engine_instance:
|
||||
template_loaders = getattr(
|
||||
engine_instance, "template_loaders", []
|
||||
)
|
||||
for loader in template_loaders:
|
||||
if isinstance(loader, CachedLoader):
|
||||
if not self.dry_run:
|
||||
loader.reset()
|
||||
cleared_engines += 1
|
||||
if self.verbose:
|
||||
self.stdout.write(
|
||||
f" 🗑️ Cleared cached loader: {loader}"
|
||||
)
|
||||
|
||||
# Check for Jinja2 engines (if present)
|
||||
elif "Jinja2" in engine_backend and hasattr(engine, "env"):
|
||||
env = getattr(engine, "env", None)
|
||||
if env and hasattr(env, "cache"):
|
||||
if not self.dry_run:
|
||||
env.cache.clear()
|
||||
cleared_engines += 1
|
||||
if self.verbose:
|
||||
self.stdout.write(
|
||||
f" 🗑️ Cleared Jinja2 cache: {engine}"
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
if self.verbose:
|
||||
self.stdout.write(
|
||||
self.style.WARNING(
|
||||
f" ⚠️ Could not clear cache for engine {engine}: {e}"
|
||||
)
|
||||
)
|
||||
|
||||
if cleared_engines > 0:
|
||||
self.stdout.write(
|
||||
self.style.SUCCESS(
|
||||
f" ✅ Cleared template cache for "
|
||||
f"{cleared_engines} loaders/engines"
|
||||
)
|
||||
)
|
||||
else:
|
||||
self.stdout.write(
|
||||
self.style.WARNING(" ⚠️ No cached template loaders found")
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
self.stdout.write(
|
||||
self.style.ERROR(f" ❌ Error clearing template cache: {e}")
|
||||
)
|
||||
|
||||
def clear_tailwind_cache(self):
|
||||
"""Clear Tailwind CSS build cache."""
|
||||
self.stdout.write("🎨 Clearing Tailwind CSS cache...")
|
||||
|
||||
try:
|
||||
# Look for common Tailwind cache directories
|
||||
project_root = Path(settings.BASE_DIR)
|
||||
cache_paths = [
|
||||
project_root / "node_modules" / ".cache",
|
||||
project_root / ".tailwindcss-cache",
|
||||
project_root / "static" / "css" / ".cache",
|
||||
]
|
||||
|
||||
cleared_count = 0
|
||||
for cache_path in cache_paths:
|
||||
if cache_path.exists():
|
||||
try:
|
||||
if self.verbose:
|
||||
self.stdout.write(f" 🗑️ Removing: {cache_path}")
|
||||
|
||||
if not self.dry_run:
|
||||
if cache_path.is_file():
|
||||
cache_path.unlink()
|
||||
else:
|
||||
shutil.rmtree(cache_path)
|
||||
|
||||
cleared_count += 1
|
||||
except Exception as e:
|
||||
self.stdout.write(
|
||||
self.style.WARNING(
|
||||
f" ⚠️ Could not remove {cache_path}: {e}"
|
||||
)
|
||||
)
|
||||
|
||||
if cleared_count > 0:
|
||||
self.stdout.write(
|
||||
self.style.SUCCESS(
|
||||
f" ✅ Cleared {cleared_count} Tailwind cache directories"
|
||||
)
|
||||
)
|
||||
else:
|
||||
self.stdout.write(
|
||||
self.style.WARNING(" ⚠️ No Tailwind cache directories found")
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
self.stdout.write(
|
||||
self.style.ERROR(f" ❌ Error clearing Tailwind cache: {e}")
|
||||
)
|
||||
|
||||
def clear_opcache(self):
|
||||
"""Clear PHP OPcache if available."""
|
||||
self.stdout.write("⚡ Clearing OPcache...")
|
||||
|
||||
try:
|
||||
# This is mainly for mixed environments
|
||||
php_code = (
|
||||
"if (function_exists('opcache_reset')) { "
|
||||
"opcache_reset(); echo 'cleared'; } "
|
||||
"else { echo 'not_available'; }"
|
||||
)
|
||||
result = subprocess.run(
|
||||
["php", "-r", php_code],
|
||||
capture_output=True,
|
||||
text=True,
|
||||
timeout=10,
|
||||
)
|
||||
|
||||
if result.returncode == 0:
|
||||
if "cleared" in result.stdout:
|
||||
self.stdout.write(
|
||||
self.style.SUCCESS(" ✅ OPcache cleared successfully")
|
||||
)
|
||||
else:
|
||||
self.stdout.write(self.style.WARNING(" ⚠️ OPcache not available"))
|
||||
else:
|
||||
self.stdout.write(
|
||||
self.style.WARNING(
|
||||
" ⚠️ PHP not available or OPcache not accessible"
|
||||
)
|
||||
)
|
||||
|
||||
except (subprocess.TimeoutExpired, FileNotFoundError):
|
||||
self.stdout.write(
|
||||
self.style.WARNING(" ⚠️ PHP not found or not accessible")
|
||||
)
|
||||
except Exception as e:
|
||||
self.stdout.write(self.style.ERROR(f" ❌ Error clearing OPcache: {e}"))
|
||||
99
apps/core/management/commands/rundev.py
Normal file
99
apps/core/management/commands/rundev.py
Normal file
@@ -0,0 +1,99 @@
|
||||
"""
|
||||
Django management command to run the development server.
|
||||
|
||||
This command automatically sets up the development environment and starts
|
||||
the server, replacing the need for the dev_server.sh script.
|
||||
"""
|
||||
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.core.management import execute_from_command_line
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Run the development server with automatic setup"
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument(
|
||||
"--port",
|
||||
type=str,
|
||||
default="8000",
|
||||
help="Port to run the server on (default: 8000)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--host",
|
||||
type=str,
|
||||
default="0.0.0.0",
|
||||
help="Host to bind the server to (default: 0.0.0.0)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--skip-setup",
|
||||
action="store_true",
|
||||
help="Skip the development setup and go straight to running the server",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--use-runserver-plus",
|
||||
action="store_true",
|
||||
help="Use runserver_plus if available (from django-extensions)",
|
||||
)
|
||||
|
||||
def handle(self, *args, **options):
|
||||
"""Run the development setup and start the server."""
|
||||
if not options["skip_setup"]:
|
||||
self.stdout.write(
|
||||
self.style.SUCCESS(
|
||||
"🚀 Setting up and starting ThrillWiki Development Server..."
|
||||
)
|
||||
)
|
||||
|
||||
# Run the setup_dev command first
|
||||
execute_from_command_line(["manage.py", "setup_dev"])
|
||||
|
||||
else:
|
||||
self.stdout.write(
|
||||
self.style.SUCCESS("🚀 Starting ThrillWiki Development Server...")
|
||||
)
|
||||
|
||||
# Determine which server command to use
|
||||
self.get_server_command(options)
|
||||
|
||||
# Start the server
|
||||
self.stdout.write("")
|
||||
self.stdout.write(
|
||||
self.style.SUCCESS(
|
||||
f"🌟 Starting Django development server on http://{options['host']}:{options['port']}"
|
||||
)
|
||||
)
|
||||
self.stdout.write("Press Ctrl+C to stop the server")
|
||||
self.stdout.write("")
|
||||
|
||||
try:
|
||||
if options["use_runserver_plus"] or self.has_runserver_plus():
|
||||
execute_from_command_line(
|
||||
[
|
||||
"manage.py",
|
||||
"runserver_plus",
|
||||
f"{options['host']}:{options['port']}",
|
||||
]
|
||||
)
|
||||
else:
|
||||
execute_from_command_line(
|
||||
["manage.py", "runserver", f"{options['host']}:{options['port']}"]
|
||||
)
|
||||
except KeyboardInterrupt:
|
||||
self.stdout.write("")
|
||||
self.stdout.write(self.style.SUCCESS("👋 Development server stopped"))
|
||||
|
||||
def get_server_command(self, options):
|
||||
"""Determine which server command to use."""
|
||||
if options["use_runserver_plus"] or self.has_runserver_plus():
|
||||
return "runserver_plus"
|
||||
return "runserver"
|
||||
|
||||
def has_runserver_plus(self):
|
||||
"""Check if runserver_plus is available (django-extensions)."""
|
||||
try:
|
||||
import django_extensions
|
||||
|
||||
return True
|
||||
except ImportError:
|
||||
return False
|
||||
224
apps/core/management/commands/setup_dev.py
Normal file
224
apps/core/management/commands/setup_dev.py
Normal file
@@ -0,0 +1,224 @@
|
||||
"""
|
||||
Django management command to set up the development environment.
|
||||
|
||||
This command performs all the setup tasks that the dev_server.sh script does,
|
||||
allowing the project to run without requiring the shell script.
|
||||
"""
|
||||
|
||||
import subprocess
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.conf import settings
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Set up the development environment"
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument(
|
||||
"--skip-migrations",
|
||||
action="store_true",
|
||||
help="Skip running database migrations",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--skip-static",
|
||||
action="store_true",
|
||||
help="Skip collecting static files",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--skip-tailwind",
|
||||
action="store_true",
|
||||
help="Skip building Tailwind CSS",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--skip-superuser",
|
||||
action="store_true",
|
||||
help="Skip creating development superuser",
|
||||
)
|
||||
|
||||
def handle(self, *args, **options):
|
||||
"""Run the development setup process."""
|
||||
self.stdout.write(
|
||||
self.style.SUCCESS("🚀 Setting up ThrillWiki Development Environment...")
|
||||
)
|
||||
|
||||
# Create necessary directories
|
||||
self.create_directories()
|
||||
|
||||
# Run database migrations if needed
|
||||
if not options["skip_migrations"]:
|
||||
self.run_migrations()
|
||||
|
||||
# Seed sample data
|
||||
self.seed_sample_data()
|
||||
|
||||
# Create superuser if it doesn't exist
|
||||
if not options["skip_superuser"]:
|
||||
self.create_superuser()
|
||||
|
||||
# Collect static files
|
||||
if not options["skip_static"]:
|
||||
self.collect_static()
|
||||
|
||||
# Build Tailwind CSS
|
||||
if not options["skip_tailwind"]:
|
||||
self.build_tailwind()
|
||||
|
||||
# Run system checks
|
||||
self.run_system_checks()
|
||||
|
||||
# Display environment info
|
||||
self.display_environment_info()
|
||||
|
||||
self.stdout.write(
|
||||
self.style.SUCCESS("✅ Development environment setup complete!")
|
||||
)
|
||||
|
||||
def create_directories(self):
|
||||
"""Create necessary directories."""
|
||||
self.stdout.write("📁 Creating necessary directories...")
|
||||
directories = ["logs", "profiles", "media", "staticfiles", "static/css"]
|
||||
|
||||
for directory in directories:
|
||||
dir_path = Path(settings.BASE_DIR) / directory
|
||||
dir_path.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
self.stdout.write(self.style.SUCCESS("✅ Directories created"))
|
||||
|
||||
def run_migrations(self):
|
||||
"""Run database migrations if needed."""
|
||||
self.stdout.write("🗄️ Checking database migrations...")
|
||||
|
||||
try:
|
||||
# Check if migrations are up to date
|
||||
result = subprocess.run(
|
||||
["uv", "run", "manage.py", "migrate", "--check"],
|
||||
capture_output=True,
|
||||
text=True,
|
||||
)
|
||||
|
||||
if result.returncode == 0:
|
||||
self.stdout.write(
|
||||
self.style.SUCCESS("✅ Database migrations are up to date")
|
||||
)
|
||||
else:
|
||||
self.stdout.write("🔄 Running database migrations...")
|
||||
subprocess.run(
|
||||
["uv", "run", "manage.py", "migrate", "--noinput"], check=True
|
||||
)
|
||||
self.stdout.write(
|
||||
self.style.SUCCESS("✅ Database migrations completed")
|
||||
)
|
||||
|
||||
except subprocess.CalledProcessError as e:
|
||||
self.stdout.write(
|
||||
self.style.WARNING(f"⚠️ Migration error (continuing): {e}")
|
||||
)
|
||||
|
||||
def seed_sample_data(self):
|
||||
"""Seed sample data to the database."""
|
||||
self.stdout.write("🌱 Seeding sample data...")
|
||||
|
||||
try:
|
||||
subprocess.run(
|
||||
["uv", "run", "manage.py", "seed_sample_data"], check=True
|
||||
)
|
||||
self.stdout.write(self.style.SUCCESS("✅ Sample data seeded"))
|
||||
except subprocess.CalledProcessError:
|
||||
self.stdout.write(
|
||||
self.style.WARNING("⚠️ Could not seed sample data (continuing)")
|
||||
)
|
||||
|
||||
def create_superuser(self):
|
||||
"""Create development superuser if it doesn't exist."""
|
||||
self.stdout.write("👤 Checking for superuser...")
|
||||
|
||||
try:
|
||||
from django.contrib.auth import get_user_model
|
||||
|
||||
User = get_user_model()
|
||||
|
||||
if User.objects.filter(is_superuser=True).exists():
|
||||
self.stdout.write(self.style.SUCCESS("✅ Superuser already exists"))
|
||||
else:
|
||||
self.stdout.write("👤 Creating development superuser (admin/admin)...")
|
||||
if not User.objects.filter(username="admin").exists():
|
||||
User.objects.create_superuser("admin", "admin@example.com", "admin")
|
||||
self.stdout.write(
|
||||
self.style.SUCCESS("✅ Created superuser: admin/admin")
|
||||
)
|
||||
else:
|
||||
self.stdout.write(
|
||||
self.style.SUCCESS("✅ Admin user already exists")
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
self.stdout.write(self.style.WARNING(f"⚠️ Could not create superuser: {e}"))
|
||||
|
||||
def collect_static(self):
|
||||
"""Collect static files for development."""
|
||||
self.stdout.write("📦 Collecting static files...")
|
||||
|
||||
try:
|
||||
subprocess.run(
|
||||
["uv", "run", "manage.py", "collectstatic", "--noinput", "--clear"],
|
||||
check=True,
|
||||
)
|
||||
self.stdout.write(self.style.SUCCESS("✅ Static files collected"))
|
||||
except subprocess.CalledProcessError as e:
|
||||
self.stdout.write(
|
||||
self.style.WARNING(f"⚠️ Could not collect static files: {e}")
|
||||
)
|
||||
|
||||
def build_tailwind(self):
|
||||
"""Build Tailwind CSS if npm is available."""
|
||||
self.stdout.write("🎨 Building Tailwind CSS...")
|
||||
|
||||
try:
|
||||
# Check if npm is available
|
||||
subprocess.run(["npm", "--version"], capture_output=True, check=True)
|
||||
|
||||
# Build Tailwind CSS
|
||||
subprocess.run(
|
||||
["uv", "run", "manage.py", "tailwind", "build"], check=True
|
||||
)
|
||||
self.stdout.write(self.style.SUCCESS("✅ Tailwind CSS built"))
|
||||
|
||||
except (subprocess.CalledProcessError, FileNotFoundError):
|
||||
self.stdout.write(
|
||||
self.style.WARNING(
|
||||
"⚠️ npm not found or Tailwind build failed, skipping"
|
||||
)
|
||||
)
|
||||
|
||||
def run_system_checks(self):
|
||||
"""Run Django system checks."""
|
||||
self.stdout.write("🔍 Running system checks...")
|
||||
|
||||
try:
|
||||
subprocess.run(["uv", "run", "manage.py", "check"], check=True)
|
||||
self.stdout.write(self.style.SUCCESS("✅ System checks passed"))
|
||||
except subprocess.CalledProcessError:
|
||||
self.stdout.write(
|
||||
self.style.WARNING("❌ System checks failed, but continuing...")
|
||||
)
|
||||
|
||||
def display_environment_info(self):
|
||||
"""Display development environment information."""
|
||||
self.stdout.write("")
|
||||
self.stdout.write(self.style.SUCCESS("🌍 Development Environment:"))
|
||||
self.stdout.write(f" - Settings Module: {settings.SETTINGS_MODULE}")
|
||||
self.stdout.write(f" - Debug Mode: {settings.DEBUG}")
|
||||
self.stdout.write(" - Database: PostgreSQL with PostGIS")
|
||||
self.stdout.write(" - Cache: Local memory cache")
|
||||
self.stdout.write(" - Admin URL: http://localhost:8000/admin/")
|
||||
self.stdout.write(" - Admin User: admin / admin")
|
||||
self.stdout.write(" - Silk Profiler: http://localhost:8000/silk/")
|
||||
self.stdout.write(" - Debug Toolbar: Available on debug pages")
|
||||
self.stdout.write(" - API Documentation: http://localhost:8000/api/docs/")
|
||||
self.stdout.write("")
|
||||
self.stdout.write("🌟 Ready to start development server with:")
|
||||
self.stdout.write(" uv run manage.py runserver_plus")
|
||||
self.stdout.write("")
|
||||
309
apps/core/management/commands/test_trending.py
Normal file
309
apps/core/management/commands/test_trending.py
Normal file
@@ -0,0 +1,309 @@
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from django.utils import timezone
|
||||
from apps.parks.models.parks import Park
|
||||
from apps.rides.models.rides import Ride
|
||||
from apps.parks.models.companies import Company
|
||||
from apps.core.analytics import PageView
|
||||
from apps.core.services.trending_service import trending_service
|
||||
from datetime import datetime, timedelta
|
||||
import random
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Test the trending algorithm with sample data"
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument(
|
||||
"--clean",
|
||||
action="store_true",
|
||||
help="Clean existing test data before creating new data",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--verbose",
|
||||
action="store_true",
|
||||
help="Show detailed output",
|
||||
)
|
||||
|
||||
def handle(self, *args, **options):
|
||||
self.verbose = options["verbose"]
|
||||
|
||||
if options["clean"]:
|
||||
self.clean_test_data()
|
||||
|
||||
self.create_test_data()
|
||||
self.test_trending_algorithm()
|
||||
self.test_api_format()
|
||||
|
||||
self.stdout.write(
|
||||
self.style.SUCCESS("✓ Trending system test completed successfully!")
|
||||
)
|
||||
|
||||
def clean_test_data(self):
|
||||
"""Clean existing test data."""
|
||||
self.stdout.write("Cleaning existing test data...")
|
||||
|
||||
# Delete test PageViews
|
||||
PageView.objects.filter(
|
||||
content_type__in=[
|
||||
ContentType.objects.get_for_model(Park),
|
||||
ContentType.objects.get_for_model(Ride),
|
||||
]
|
||||
).delete()
|
||||
|
||||
self.stdout.write("✓ Test data cleaned")
|
||||
|
||||
def create_test_data(self):
|
||||
"""Create sample parks, rides, and page views for testing."""
|
||||
self.stdout.write("Creating test data...")
|
||||
|
||||
# Create or get default operator company
|
||||
operator, created = Company.objects.get_or_create(
|
||||
name="Default Theme Park Operator",
|
||||
defaults={
|
||||
"roles": ["OPERATOR"],
|
||||
"description": "Default operator for test parks",
|
||||
},
|
||||
)
|
||||
if created and self.verbose:
|
||||
self.stdout.write(f" Created operator company: {operator.name}")
|
||||
|
||||
# Get or create test parks and rides
|
||||
parks_data = [
|
||||
{
|
||||
"name": "Cedar Point",
|
||||
"slug": "cedar-point",
|
||||
"description": "America's Roller Coast featuring world-class roller coasters",
|
||||
"average_rating": 9.2,
|
||||
"opening_date": datetime(1870, 1, 1).date(),
|
||||
"operator": operator,
|
||||
},
|
||||
{
|
||||
"name": "Magic Kingdom",
|
||||
"slug": "magic-kingdom",
|
||||
"description": "Walt Disney World's most magical theme park",
|
||||
"average_rating": 9.5,
|
||||
"opening_date": datetime(1971, 10, 1).date(),
|
||||
"operator": operator,
|
||||
},
|
||||
{
|
||||
"name": "Six Flags Great Adventure",
|
||||
"slug": "six-flags-great-adventure",
|
||||
"description": "Home to Kingda Ka and incredible thrills",
|
||||
"average_rating": 8.8,
|
||||
"opening_date": datetime(1974, 7, 1).date(),
|
||||
"operator": operator,
|
||||
},
|
||||
]
|
||||
|
||||
# Create parks
|
||||
parks = []
|
||||
for park_data in parks_data:
|
||||
park, created = Park.objects.get_or_create(
|
||||
name=park_data["name"], defaults=park_data
|
||||
)
|
||||
parks.append(park)
|
||||
if created and self.verbose:
|
||||
self.stdout.write(f" Created park: {park.name}")
|
||||
|
||||
# Now create rides - they need park references
|
||||
rides_data = [
|
||||
{
|
||||
"name": "Steel Vengeance",
|
||||
"slug": "steel-vengeance",
|
||||
"description": "Hybrid roller coaster at Cedar Point",
|
||||
"park": next(p for p in parks if p.name == "Cedar Point"),
|
||||
"category": "RC", # Roller Coaster
|
||||
"average_rating": 9.8,
|
||||
"opening_date": datetime(2018, 5, 5).date(),
|
||||
},
|
||||
{
|
||||
"name": "Space Mountain",
|
||||
"slug": "space-mountain",
|
||||
"description": "Indoor space-themed roller coaster",
|
||||
"park": next(p for p in parks if p.name == "Magic Kingdom"),
|
||||
"category": "RC", # Roller Coaster
|
||||
"average_rating": 8.5,
|
||||
"opening_date": datetime(1975, 1, 15).date(),
|
||||
},
|
||||
{
|
||||
"name": "Kingda Ka",
|
||||
"slug": "kingda-ka",
|
||||
"description": "World's tallest roller coaster",
|
||||
"park": next(p for p in parks if p.name == "Six Flags Great Adventure"),
|
||||
"category": "RC", # Roller Coaster
|
||||
"average_rating": 9.0,
|
||||
"opening_date": datetime(2005, 5, 21).date(),
|
||||
},
|
||||
{
|
||||
"name": "Millennium Force",
|
||||
"slug": "millennium-force",
|
||||
"description": "Legendary steel roller coaster",
|
||||
"park": next(p for p in parks if p.name == "Cedar Point"),
|
||||
"category": "RC", # Roller Coaster
|
||||
"average_rating": 9.4,
|
||||
"opening_date": datetime(2000, 5, 13).date(),
|
||||
},
|
||||
]
|
||||
|
||||
# Create rides
|
||||
rides = []
|
||||
for ride_data in rides_data:
|
||||
ride, created = Ride.objects.get_or_create(
|
||||
name=ride_data["name"], defaults=ride_data
|
||||
)
|
||||
rides.append(ride)
|
||||
if created and self.verbose:
|
||||
self.stdout.write(f" Created ride: {ride.name}")
|
||||
|
||||
# Create PageViews with different patterns to test trending
|
||||
self.create_page_views(parks, rides)
|
||||
|
||||
self.stdout.write("✓ Test data created")
|
||||
|
||||
def create_page_views(self, parks, rides):
|
||||
"""Create PageViews with different trending patterns."""
|
||||
now = timezone.now()
|
||||
|
||||
# Pattern 1: Recently trending item (Steel Vengeance)
|
||||
steel_vengeance = next(r for r in rides if r.name == "Steel Vengeance")
|
||||
self.create_views_for_content(
|
||||
steel_vengeance, recent_views=50, older_views=10, base_time=now
|
||||
)
|
||||
|
||||
# Pattern 2: Consistently popular item (Space Mountain)
|
||||
space_mountain = next(r for r in rides if r.name == "Space Mountain")
|
||||
self.create_views_for_content(
|
||||
space_mountain, recent_views=30, older_views=25, base_time=now
|
||||
)
|
||||
|
||||
# Pattern 3: Declining popularity (Kingda Ka)
|
||||
kingda_ka = next(r for r in rides if r.name == "Kingda Ka")
|
||||
self.create_views_for_content(
|
||||
kingda_ka, recent_views=5, older_views=40, base_time=now
|
||||
)
|
||||
|
||||
# Pattern 4: New but growing (Millennium Force)
|
||||
millennium_force = next(r for r in rides if r.name == "Millennium Force")
|
||||
self.create_views_for_content(
|
||||
millennium_force, recent_views=25, older_views=5, base_time=now
|
||||
)
|
||||
|
||||
# Create some park views too
|
||||
cedar_point = next(p for p in parks if p.name == "Cedar Point")
|
||||
self.create_views_for_content(
|
||||
cedar_point, recent_views=35, older_views=20, base_time=now
|
||||
)
|
||||
|
||||
if self.verbose:
|
||||
self.stdout.write(" Created PageView data for trending analysis")
|
||||
|
||||
def create_views_for_content(
|
||||
self, content_object, recent_views, older_views, base_time
|
||||
):
|
||||
"""Create PageViews for a content object with specified patterns."""
|
||||
content_type = ContentType.objects.get_for_model(type(content_object))
|
||||
|
||||
# Create recent views (last 2 hours)
|
||||
for i in range(recent_views):
|
||||
view_time = base_time - timedelta(
|
||||
minutes=random.randint(0, 120) # Last 2 hours
|
||||
)
|
||||
PageView.objects.create(
|
||||
content_type=content_type,
|
||||
object_id=content_object.id,
|
||||
ip_address=f"192.168.1.{random.randint(1, 255)}",
|
||||
user_agent="Test Agent",
|
||||
timestamp=view_time,
|
||||
)
|
||||
|
||||
# Create older views (2-24 hours ago)
|
||||
for i in range(older_views):
|
||||
view_time = base_time - timedelta(hours=random.randint(2, 24))
|
||||
PageView.objects.create(
|
||||
content_type=content_type,
|
||||
object_id=content_object.id,
|
||||
ip_address=f"10.0.0.{random.randint(1, 255)}",
|
||||
user_agent="Test Agent",
|
||||
timestamp=view_time,
|
||||
)
|
||||
|
||||
def test_trending_algorithm(self):
|
||||
"""Test the trending algorithm functionality."""
|
||||
self.stdout.write("Testing trending algorithm...")
|
||||
|
||||
# Test trending content for different content types
|
||||
trending_parks = trending_service.get_trending_content(
|
||||
content_type="parks", limit=3
|
||||
)
|
||||
trending_rides = trending_service.get_trending_content(
|
||||
content_type="rides", limit=3
|
||||
)
|
||||
trending_all = trending_service.get_trending_content(
|
||||
content_type="all", limit=5
|
||||
)
|
||||
|
||||
# Test new content
|
||||
new_parks = trending_service.get_new_content(content_type="parks", limit=3)
|
||||
new_rides = trending_service.get_new_content(content_type="rides", limit=3)
|
||||
new_all = trending_service.get_new_content(content_type="all", limit=5)
|
||||
|
||||
if self.verbose:
|
||||
self.stdout.write(f" Trending parks: {len(trending_parks)} results")
|
||||
self.stdout.write(f" Trending rides: {len(trending_rides)} results")
|
||||
self.stdout.write(f" Trending all: {len(trending_all)} results")
|
||||
self.stdout.write(f" New parks: {len(new_parks)} results")
|
||||
self.stdout.write(f" New rides: {len(new_rides)} results")
|
||||
self.stdout.write(f" New all: {len(new_all)} results")
|
||||
|
||||
self.stdout.write("✓ Trending algorithm working correctly")
|
||||
|
||||
def test_api_format(self):
|
||||
"""Test that API responses match expected frontend format."""
|
||||
self.stdout.write("Testing API response format...")
|
||||
|
||||
# Test trending content format
|
||||
trending_parks = trending_service.get_trending_content(
|
||||
content_type="parks", limit=3
|
||||
)
|
||||
trending_service.get_trending_content(
|
||||
content_type="rides", limit=3
|
||||
)
|
||||
|
||||
# Test new content format
|
||||
new_parks = trending_service.get_new_content(content_type="parks", limit=3)
|
||||
trending_service.get_new_content(content_type="rides", limit=3)
|
||||
|
||||
# Verify trending data structure
|
||||
if trending_parks:
|
||||
item = trending_parks[0]
|
||||
required_trending_fields = [
|
||||
"id",
|
||||
"name",
|
||||
"slug",
|
||||
"views",
|
||||
"views_change",
|
||||
"rank",
|
||||
]
|
||||
for field in required_trending_fields:
|
||||
if field not in item:
|
||||
raise ValueError(f"Missing required trending field: {field}")
|
||||
|
||||
# Verify new content data structure
|
||||
if new_parks:
|
||||
item = new_parks[0]
|
||||
required_new_fields = ["id", "name", "slug"]
|
||||
for field in required_new_fields:
|
||||
if field not in item:
|
||||
raise ValueError(f"Missing required new content field: {field}")
|
||||
|
||||
if self.verbose:
|
||||
self.stdout.write(" Sample trending park data:")
|
||||
if trending_parks:
|
||||
self.stdout.write(f" {trending_parks[0]}")
|
||||
|
||||
self.stdout.write(" Sample new park data:")
|
||||
if new_parks:
|
||||
self.stdout.write(f" {new_parks[0]}")
|
||||
|
||||
self.stdout.write("✓ API format validation passed")
|
||||
36
apps/core/management/commands/update_trending.py
Normal file
36
apps/core/management/commands/update_trending.py
Normal file
@@ -0,0 +1,36 @@
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.core.cache import cache
|
||||
from apps.parks.models import Park
|
||||
from apps.rides.models import Ride
|
||||
from apps.core.analytics import PageView
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Updates trending parks and rides cache based on views in the last 7 days"
|
||||
|
||||
def handle(self, *args, **kwargs):
|
||||
"""
|
||||
Updates the trending parks and rides in the cache.
|
||||
|
||||
This command is designed to be run once daily via cron to keep the trending
|
||||
items up to date. It looks at page views from the last 7 days and caches
|
||||
the top 10 most viewed parks and rides.
|
||||
|
||||
The cached data is used by the home page to display trending items without
|
||||
having to query the database on every request.
|
||||
"""
|
||||
# Get top 10 trending parks and rides from the last 7 days (168 hours)
|
||||
trending_parks = PageView.get_trending_items(Park, hours=168, limit=10)
|
||||
trending_rides = PageView.get_trending_items(Ride, hours=168, limit=10)
|
||||
|
||||
# Cache the results for 24 hours (daily refresh)
|
||||
cache.set("trending_parks", trending_parks, 86400) # 86400 seconds = 24 hours
|
||||
cache.set("trending_rides", trending_rides, 86400)
|
||||
|
||||
self.stdout.write(
|
||||
self.style.SUCCESS(
|
||||
"Successfully updated trending parks and rides. "
|
||||
"Cached 10 items each for parks and rides based on views "
|
||||
"in the last 7 days."
|
||||
)
|
||||
)
|
||||
273
apps/core/managers.py
Normal file
273
apps/core/managers.py
Normal file
@@ -0,0 +1,273 @@
|
||||
"""
|
||||
Custom managers and QuerySets for optimized database patterns.
|
||||
Following Django styleguide best practices for database access.
|
||||
"""
|
||||
|
||||
from typing import Optional, List, Union
|
||||
from django.db import models
|
||||
from django.db.models import Q, Count, Avg, Max
|
||||
# from django.contrib.gis.geos import Point # Disabled temporarily for setup
|
||||
# from django.contrib.gis.measure import Distance # Disabled temporarily for setup
|
||||
from django.utils import timezone
|
||||
from datetime import timedelta
|
||||
|
||||
|
||||
class BaseQuerySet(models.QuerySet):
|
||||
"""Base QuerySet with common optimizations and patterns."""
|
||||
|
||||
def active(self):
|
||||
"""Filter for active/enabled records."""
|
||||
if hasattr(self.model, "is_active"):
|
||||
return self.filter(is_active=True)
|
||||
return self
|
||||
|
||||
def published(self):
|
||||
"""Filter for published records."""
|
||||
if hasattr(self.model, "is_published"):
|
||||
return self.filter(is_published=True)
|
||||
return self
|
||||
|
||||
def recent(self, *, days: int = 30):
|
||||
"""Filter for recently created records."""
|
||||
cutoff_date = timezone.now() - timedelta(days=days)
|
||||
return self.filter(created_at__gte=cutoff_date)
|
||||
|
||||
def search(self, *, query: str, fields: Optional[List[str]] = None):
|
||||
"""
|
||||
Full-text search across specified fields.
|
||||
|
||||
Args:
|
||||
query: Search query string
|
||||
fields: List of field names to search (defaults to name, description)
|
||||
"""
|
||||
if not query:
|
||||
return self
|
||||
|
||||
if fields is None:
|
||||
fields = ["name", "description"] if hasattr(self.model, "name") else []
|
||||
|
||||
q_objects = Q()
|
||||
for field in fields:
|
||||
if hasattr(self.model, field):
|
||||
q_objects |= Q(**{f"{field}__icontains": query})
|
||||
|
||||
return self.filter(q_objects) if q_objects else self
|
||||
|
||||
def with_stats(self):
|
||||
"""Add basic statistics annotations."""
|
||||
return self
|
||||
|
||||
def optimized_for_list(self):
|
||||
"""Optimize queryset for list display."""
|
||||
return self.select_related().prefetch_related()
|
||||
|
||||
def optimized_for_detail(self):
|
||||
"""Optimize queryset for detail display."""
|
||||
return self.select_related().prefetch_related()
|
||||
|
||||
|
||||
class BaseManager(models.Manager):
|
||||
"""Base manager with common patterns."""
|
||||
|
||||
def get_queryset(self):
|
||||
return BaseQuerySet(self.model, using=self._db)
|
||||
|
||||
def active(self):
|
||||
return self.get_queryset().active()
|
||||
|
||||
def published(self):
|
||||
return self.get_queryset().published()
|
||||
|
||||
def recent(self, *, days: int = 30):
|
||||
return self.get_queryset().recent(days=days)
|
||||
|
||||
def search(self, *, query: str, fields: Optional[List[str]] = None):
|
||||
return self.get_queryset().search(query=query, fields=fields)
|
||||
|
||||
|
||||
class LocationQuerySet(BaseQuerySet):
|
||||
"""QuerySet for location-based models with geographic functionality."""
|
||||
|
||||
def near_point(self, *, point, distance_km: float = 50): # Point type disabled for setup
|
||||
"""Filter locations near a geographic point."""
|
||||
if hasattr(self.model, "point"):
|
||||
return (
|
||||
self.filter(point__distance_lte=(point, Distance(km=distance_km)))
|
||||
.distance(point)
|
||||
.order_by("distance")
|
||||
)
|
||||
return self
|
||||
|
||||
def within_bounds(self, *, north: float, south: float, east: float, west: float):
|
||||
"""Filter locations within geographic bounds."""
|
||||
if hasattr(self.model, "point"):
|
||||
return self.filter(
|
||||
point__latitude__gte=south,
|
||||
point__latitude__lte=north,
|
||||
point__longitude__gte=west,
|
||||
point__longitude__lte=east,
|
||||
)
|
||||
return self
|
||||
|
||||
def by_country(self, *, country: str):
|
||||
"""Filter by country."""
|
||||
if hasattr(self.model, "country"):
|
||||
return self.filter(country__iexact=country)
|
||||
return self
|
||||
|
||||
def by_region(self, *, state: str):
|
||||
"""Filter by state/region."""
|
||||
if hasattr(self.model, "state"):
|
||||
return self.filter(state__iexact=state)
|
||||
return self
|
||||
|
||||
def by_city(self, *, city: str):
|
||||
"""Filter by city."""
|
||||
if hasattr(self.model, "city"):
|
||||
return self.filter(city__iexact=city)
|
||||
return self
|
||||
|
||||
|
||||
class LocationManager(BaseManager):
|
||||
"""Manager for location-based models."""
|
||||
|
||||
def get_queryset(self):
|
||||
return LocationQuerySet(self.model, using=self._db)
|
||||
|
||||
def near_point(self, *, point, distance_km: float = 50): # Point type disabled for setup
|
||||
return self.get_queryset().near_point(point=point, distance_km=distance_km)
|
||||
|
||||
def within_bounds(self, *, north: float, south: float, east: float, west: float):
|
||||
return self.get_queryset().within_bounds(
|
||||
north=north, south=south, east=east, west=west
|
||||
)
|
||||
|
||||
|
||||
class ReviewableQuerySet(BaseQuerySet):
|
||||
"""QuerySet for models that can be reviewed."""
|
||||
|
||||
def with_review_stats(self):
|
||||
"""Add review statistics annotations."""
|
||||
return self.annotate(
|
||||
review_count=Count("reviews", filter=Q(reviews__is_published=True)),
|
||||
average_rating=Avg("reviews__rating", filter=Q(reviews__is_published=True)),
|
||||
latest_review_date=Max(
|
||||
"reviews__created_at", filter=Q(reviews__is_published=True)
|
||||
),
|
||||
)
|
||||
|
||||
def highly_rated(self, *, min_rating: float = 8.0):
|
||||
"""Filter for highly rated items."""
|
||||
return self.with_review_stats().filter(average_rating__gte=min_rating)
|
||||
|
||||
def recently_reviewed(self, *, days: int = 30):
|
||||
"""Filter for items with recent reviews."""
|
||||
cutoff_date = timezone.now() - timedelta(days=days)
|
||||
return self.filter(
|
||||
reviews__created_at__gte=cutoff_date, reviews__is_published=True
|
||||
).distinct()
|
||||
|
||||
|
||||
class ReviewableManager(BaseManager):
|
||||
"""Manager for reviewable models."""
|
||||
|
||||
def get_queryset(self):
|
||||
return ReviewableQuerySet(self.model, using=self._db)
|
||||
|
||||
def with_review_stats(self):
|
||||
return self.get_queryset().with_review_stats()
|
||||
|
||||
def highly_rated(self, *, min_rating: float = 8.0):
|
||||
return self.get_queryset().highly_rated(min_rating=min_rating)
|
||||
|
||||
|
||||
class HierarchicalQuerySet(BaseQuerySet):
|
||||
"""QuerySet for hierarchical models (with parent/child relationships)."""
|
||||
|
||||
def root_level(self):
|
||||
"""Filter for root-level items (no parent)."""
|
||||
if hasattr(self.model, "parent"):
|
||||
return self.filter(parent__isnull=True)
|
||||
return self
|
||||
|
||||
def children_of(self, *, parent_id: int):
|
||||
"""Get children of a specific parent."""
|
||||
if hasattr(self.model, "parent"):
|
||||
return self.filter(parent_id=parent_id)
|
||||
return self
|
||||
|
||||
def with_children_count(self):
|
||||
"""Add count of children."""
|
||||
if hasattr(self.model, "children"):
|
||||
return self.annotate(children_count=Count("children"))
|
||||
return self
|
||||
|
||||
|
||||
class HierarchicalManager(BaseManager):
|
||||
"""Manager for hierarchical models."""
|
||||
|
||||
def get_queryset(self):
|
||||
return HierarchicalQuerySet(self.model, using=self._db)
|
||||
|
||||
def root_level(self):
|
||||
return self.get_queryset().root_level()
|
||||
|
||||
|
||||
class TimestampedQuerySet(BaseQuerySet):
|
||||
"""QuerySet for models with created_at/updated_at timestamps."""
|
||||
|
||||
def created_between(self, *, start_date, end_date):
|
||||
"""Filter by creation date range."""
|
||||
return self.filter(created_at__date__range=[start_date, end_date])
|
||||
|
||||
def updated_since(self, *, since_date):
|
||||
"""Filter for records updated since a date."""
|
||||
return self.filter(updated_at__gte=since_date)
|
||||
|
||||
def by_creation_date(self, *, descending: bool = True):
|
||||
"""Order by creation date."""
|
||||
order = "-created_at" if descending else "created_at"
|
||||
return self.order_by(order)
|
||||
|
||||
|
||||
class TimestampedManager(BaseManager):
|
||||
"""Manager for timestamped models."""
|
||||
|
||||
def get_queryset(self):
|
||||
return TimestampedQuerySet(self.model, using=self._db)
|
||||
|
||||
def created_between(self, *, start_date, end_date):
|
||||
return self.get_queryset().created_between(
|
||||
start_date=start_date, end_date=end_date
|
||||
)
|
||||
|
||||
|
||||
class StatusQuerySet(BaseQuerySet):
|
||||
"""QuerySet for models with status fields."""
|
||||
|
||||
def with_status(self, *, status: Union[str, List[str]]):
|
||||
"""Filter by status."""
|
||||
if isinstance(status, list):
|
||||
return self.filter(status__in=status)
|
||||
return self.filter(status=status)
|
||||
|
||||
def operating(self):
|
||||
"""Filter for operating/active status."""
|
||||
return self.filter(status="OPERATING")
|
||||
|
||||
def closed(self):
|
||||
"""Filter for closed status."""
|
||||
return self.filter(status__in=["CLOSED_TEMP", "CLOSED_PERM"])
|
||||
|
||||
|
||||
class StatusManager(BaseManager):
|
||||
"""Manager for status-based models."""
|
||||
|
||||
def get_queryset(self):
|
||||
return StatusQuerySet(self.model, using=self._db)
|
||||
|
||||
def operating(self):
|
||||
return self.get_queryset().operating()
|
||||
|
||||
def closed(self):
|
||||
return self.get_queryset().closed()
|
||||
17
apps/core/middleware/__init__.py
Normal file
17
apps/core/middleware/__init__.py
Normal file
@@ -0,0 +1,17 @@
|
||||
"""
|
||||
Core middleware package.
|
||||
|
||||
This package contains middleware components for the Django application,
|
||||
including view tracking and other core functionality.
|
||||
"""
|
||||
|
||||
from .view_tracking import ViewTrackingMiddleware, get_view_stats_for_content
|
||||
from .analytics import PgHistoryContextMiddleware
|
||||
from .nextjs import APIResponseMiddleware
|
||||
|
||||
__all__ = [
|
||||
"ViewTrackingMiddleware",
|
||||
"get_view_stats_for_content",
|
||||
"PgHistoryContextMiddleware",
|
||||
"APIResponseMiddleware",
|
||||
]
|
||||
45
apps/core/middleware/analytics.py
Normal file
45
apps/core/middleware/analytics.py
Normal file
@@ -0,0 +1,45 @@
|
||||
"""
|
||||
Analytics and tracking middleware for Django application.
|
||||
"""
|
||||
|
||||
import pghistory
|
||||
from django.contrib.auth.models import AnonymousUser
|
||||
from django.core.handlers.wsgi import WSGIRequest
|
||||
|
||||
|
||||
class RequestContextProvider(pghistory.context):
|
||||
"""Custom context provider for pghistory that extracts information from the request."""
|
||||
|
||||
def __call__(self, request: WSGIRequest) -> dict:
|
||||
return {
|
||||
"user": (
|
||||
str(request.user)
|
||||
if request.user and not isinstance(request.user, AnonymousUser)
|
||||
else None
|
||||
),
|
||||
"ip": request.META.get("REMOTE_ADDR"),
|
||||
"user_agent": request.META.get("HTTP_USER_AGENT"),
|
||||
"session_key": (
|
||||
request.session.session_key if hasattr(request, "session") else None
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
# Initialize the context provider
|
||||
request_context = RequestContextProvider()
|
||||
|
||||
|
||||
class PgHistoryContextMiddleware:
|
||||
"""
|
||||
Middleware that ensures request object is available to pghistory context.
|
||||
"""
|
||||
|
||||
def __init__(self, get_response):
|
||||
self.get_response = get_response
|
||||
|
||||
def __call__(self, request):
|
||||
# Set the pghistory context with request information
|
||||
context_data = request_context(request)
|
||||
with pghistory.context(**context_data):
|
||||
response = self.get_response(request)
|
||||
return response
|
||||
48
apps/core/middleware/nextjs.py
Normal file
48
apps/core/middleware/nextjs.py
Normal file
@@ -0,0 +1,48 @@
|
||||
# backend/apps/core/middleware.py
|
||||
|
||||
from django.utils.deprecation import MiddlewareMixin
|
||||
|
||||
|
||||
class APIResponseMiddleware(MiddlewareMixin):
|
||||
"""
|
||||
Middleware to ensure consistent API responses for Next.js
|
||||
"""
|
||||
|
||||
def process_response(self, request, response):
|
||||
# Only process API requests
|
||||
if not request.path.startswith("/api/"):
|
||||
return response
|
||||
|
||||
# Ensure CORS headers are set
|
||||
if not response.has_header("Access-Control-Allow-Origin"):
|
||||
origin = request.META.get("HTTP_ORIGIN")
|
||||
|
||||
# Allow localhost/127.0.0.1 (any port) and IPv6 loopback for development
|
||||
if origin:
|
||||
import re
|
||||
|
||||
# support http or https, IPv4 and IPv6 loopback, any port
|
||||
localhost_pattern = r"^https?://(localhost|127\.0\.0\.1|\[::1\]):\d+"
|
||||
|
||||
if re.match(localhost_pattern, origin):
|
||||
response["Access-Control-Allow-Origin"] = origin
|
||||
# Ensure caches vary by Origin
|
||||
existing_vary = response.get("Vary")
|
||||
if existing_vary:
|
||||
response["Vary"] = f"{existing_vary}, Origin"
|
||||
else:
|
||||
response["Vary"] = "Origin"
|
||||
|
||||
# Helpful dev CORS headers (adjust for your frontend requests)
|
||||
response["Access-Control-Allow-Methods"] = (
|
||||
"GET, POST, PUT, PATCH, DELETE, OPTIONS"
|
||||
)
|
||||
response["Access-Control-Allow-Headers"] = (
|
||||
"Authorization, Content-Type, X-Requested-With"
|
||||
)
|
||||
# Uncomment if your dev frontend needs to send cookies/auth credentials
|
||||
# response['Access-Control-Allow-Credentials'] = 'true'
|
||||
else:
|
||||
response["Access-Control-Allow-Origin"] = "null"
|
||||
|
||||
return response
|
||||
308
apps/core/middleware/performance_middleware.py
Normal file
308
apps/core/middleware/performance_middleware.py
Normal file
@@ -0,0 +1,308 @@
|
||||
"""
|
||||
Performance monitoring middleware for tracking request metrics.
|
||||
"""
|
||||
|
||||
import time
|
||||
import logging
|
||||
from django.db import connection
|
||||
from django.utils.deprecation import MiddlewareMixin
|
||||
from django.conf import settings
|
||||
|
||||
performance_logger = logging.getLogger("performance")
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class PerformanceMiddleware(MiddlewareMixin):
|
||||
"""Middleware to collect performance metrics for each request"""
|
||||
|
||||
def process_request(self, request):
|
||||
"""Initialize performance tracking for the request"""
|
||||
request._performance_start_time = time.time()
|
||||
request._performance_initial_queries = (
|
||||
len(connection.queries) if hasattr(connection, "queries") else 0
|
||||
)
|
||||
|
||||
def process_response(self, request, response):
|
||||
"""Log performance metrics after response is ready"""
|
||||
# Skip performance tracking for certain paths
|
||||
skip_paths = [
|
||||
"/health/",
|
||||
"/admin/jsi18n/",
|
||||
"/static/",
|
||||
"/media/",
|
||||
"/__debug__/",
|
||||
]
|
||||
if any(request.path.startswith(path) for path in skip_paths):
|
||||
return response
|
||||
|
||||
# Calculate metrics
|
||||
end_time = time.time()
|
||||
start_time = getattr(request, "_performance_start_time", end_time)
|
||||
duration = end_time - start_time
|
||||
|
||||
initial_queries = getattr(request, "_performance_initial_queries", 0)
|
||||
total_queries = (
|
||||
len(connection.queries) - initial_queries
|
||||
if hasattr(connection, "queries")
|
||||
else 0
|
||||
)
|
||||
|
||||
# Get content length
|
||||
content_length = 0
|
||||
if hasattr(response, "content"):
|
||||
content_length = len(response.content)
|
||||
elif hasattr(response, "streaming_content"):
|
||||
# For streaming responses, we can't easily measure content length
|
||||
content_length = -1
|
||||
|
||||
# Build performance data
|
||||
performance_data = {
|
||||
"path": request.path,
|
||||
"method": request.method,
|
||||
"status_code": response.status_code,
|
||||
"duration_ms": round(duration * 1000, 2),
|
||||
"duration_seconds": round(duration, 3),
|
||||
"query_count": total_queries,
|
||||
"content_length_bytes": content_length,
|
||||
"user_id": (
|
||||
getattr(request.user, "id", None)
|
||||
if hasattr(request, "user") and request.user.is_authenticated
|
||||
else None
|
||||
),
|
||||
"user_agent": request.META.get("HTTP_USER_AGENT", "")[
|
||||
:100
|
||||
], # Truncate user agent
|
||||
"remote_addr": self._get_client_ip(request),
|
||||
}
|
||||
|
||||
# Add query details in debug mode
|
||||
if settings.DEBUG and hasattr(connection, "queries") and total_queries > 0:
|
||||
recent_queries = connection.queries[-total_queries:]
|
||||
performance_data["queries"] = [
|
||||
{
|
||||
"sql": (
|
||||
query["sql"][:200] + "..."
|
||||
if len(query["sql"]) > 200
|
||||
else query["sql"]
|
||||
),
|
||||
"time": float(query["time"]),
|
||||
}
|
||||
for query in recent_queries[-10:] # Last 10 queries only
|
||||
]
|
||||
|
||||
# Identify slow queries
|
||||
slow_queries = [q for q in recent_queries if float(q["time"]) > 0.1]
|
||||
if slow_queries:
|
||||
performance_data["slow_query_count"] = len(slow_queries)
|
||||
performance_data["slowest_query_time"] = max(
|
||||
float(q["time"]) for q in slow_queries
|
||||
)
|
||||
|
||||
# Determine log level based on performance
|
||||
log_level = self._get_log_level(duration, total_queries, response.status_code)
|
||||
|
||||
# Log the performance data
|
||||
performance_logger.log(
|
||||
log_level,
|
||||
f"Request performance: {request.method} {request.path} - "
|
||||
f"{duration:.3f}s, {total_queries} queries, {response.status_code}",
|
||||
extra=performance_data,
|
||||
)
|
||||
|
||||
# Add performance headers for debugging (only in debug mode)
|
||||
if settings.DEBUG:
|
||||
response["X-Response-Time"] = f"{duration * 1000:.2f}ms"
|
||||
response["X-Query-Count"] = str(total_queries)
|
||||
if total_queries > 0 and hasattr(connection, "queries"):
|
||||
total_query_time = sum(
|
||||
float(q["time"]) for q in connection.queries[-total_queries:]
|
||||
)
|
||||
response["X-Query-Time"] = f"{total_query_time * 1000:.2f}ms"
|
||||
|
||||
return response
|
||||
|
||||
def process_exception(self, request, exception):
|
||||
"""Log performance data even when an exception occurs"""
|
||||
end_time = time.time()
|
||||
start_time = getattr(request, "_performance_start_time", end_time)
|
||||
duration = end_time - start_time
|
||||
|
||||
initial_queries = getattr(request, "_performance_initial_queries", 0)
|
||||
total_queries = (
|
||||
len(connection.queries) - initial_queries
|
||||
if hasattr(connection, "queries")
|
||||
else 0
|
||||
)
|
||||
|
||||
performance_data = {
|
||||
"path": request.path,
|
||||
"method": request.method,
|
||||
"status_code": 500, # Exception occurred
|
||||
"duration_ms": round(duration * 1000, 2),
|
||||
"query_count": total_queries,
|
||||
"exception": str(exception),
|
||||
"exception_type": type(exception).__name__,
|
||||
"user_id": (
|
||||
getattr(request.user, "id", None)
|
||||
if hasattr(request, "user") and request.user.is_authenticated
|
||||
else None
|
||||
),
|
||||
}
|
||||
|
||||
performance_logger.error(
|
||||
f"Request exception: {request.method} {request.path} - "
|
||||
f"{duration:.3f}s, {total_queries} queries, {type(exception).__name__}: {
|
||||
exception
|
||||
}",
|
||||
extra=performance_data,
|
||||
)
|
||||
|
||||
# Don't return anything - let the exception propagate normally
|
||||
|
||||
def _get_client_ip(self, request):
|
||||
"""Extract client IP address from request"""
|
||||
x_forwarded_for = request.META.get("HTTP_X_FORWARDED_FOR")
|
||||
if x_forwarded_for:
|
||||
ip = x_forwarded_for.split(",")[0].strip()
|
||||
else:
|
||||
ip = request.META.get("REMOTE_ADDR", "")
|
||||
return ip
|
||||
|
||||
def _get_log_level(self, duration, query_count, status_code):
|
||||
"""Determine appropriate log level based on performance metrics"""
|
||||
# Error responses
|
||||
if status_code >= 500:
|
||||
return logging.ERROR
|
||||
elif status_code >= 400:
|
||||
return logging.WARNING
|
||||
|
||||
# Performance-based log levels
|
||||
if duration > 5.0: # Very slow requests
|
||||
return logging.ERROR
|
||||
elif duration > 2.0 or query_count > 20: # Slow requests or high query count
|
||||
return logging.WARNING
|
||||
elif duration > 1.0 or query_count > 10: # Moderately slow
|
||||
return logging.INFO
|
||||
else:
|
||||
return logging.DEBUG
|
||||
|
||||
|
||||
class QueryCountMiddleware(MiddlewareMixin):
|
||||
"""Middleware to track and limit query counts per request"""
|
||||
|
||||
def __init__(self, get_response):
|
||||
self.get_response = get_response
|
||||
self.query_limit = getattr(settings, "MAX_QUERIES_PER_REQUEST", 50)
|
||||
super().__init__(get_response)
|
||||
|
||||
def process_request(self, request):
|
||||
"""Initialize query tracking"""
|
||||
request._query_count_start = (
|
||||
len(connection.queries) if hasattr(connection, "queries") else 0
|
||||
)
|
||||
|
||||
def process_response(self, request, response):
|
||||
"""Check query count and warn if excessive"""
|
||||
if not hasattr(connection, "queries"):
|
||||
return response
|
||||
|
||||
start_count = getattr(request, "_query_count_start", 0)
|
||||
current_count = len(connection.queries)
|
||||
request_query_count = current_count - start_count
|
||||
|
||||
if request_query_count > self.query_limit:
|
||||
logger.warning(
|
||||
f"Excessive query count: {request.path} executed {
|
||||
request_query_count
|
||||
} queries "
|
||||
f"(limit: {self.query_limit})",
|
||||
extra={
|
||||
"path": request.path,
|
||||
"method": request.method,
|
||||
"query_count": request_query_count,
|
||||
"query_limit": self.query_limit,
|
||||
"excessive_queries": True,
|
||||
},
|
||||
)
|
||||
|
||||
return response
|
||||
|
||||
|
||||
class DatabaseConnectionMiddleware(MiddlewareMixin):
|
||||
"""Middleware to monitor database connection health"""
|
||||
|
||||
def process_request(self, request):
|
||||
"""Check database connection at start of request"""
|
||||
try:
|
||||
# Simple connection test
|
||||
from django.db import connection
|
||||
|
||||
with connection.cursor() as cursor:
|
||||
cursor.execute("SELECT 1")
|
||||
cursor.fetchone()
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Database connection failed at request start: {e}",
|
||||
extra={
|
||||
"path": request.path,
|
||||
"method": request.method,
|
||||
"database_error": str(e),
|
||||
},
|
||||
)
|
||||
# Don't block the request, let Django handle the database error
|
||||
|
||||
def process_response(self, request, response):
|
||||
"""Close database connections properly"""
|
||||
try:
|
||||
from django.db import connection
|
||||
|
||||
connection.close()
|
||||
except Exception as e:
|
||||
logger.warning(f"Error closing database connection: {e}")
|
||||
|
||||
return response
|
||||
|
||||
|
||||
class CachePerformanceMiddleware(MiddlewareMixin):
|
||||
"""Middleware to monitor cache performance"""
|
||||
|
||||
def process_request(self, request):
|
||||
"""Initialize cache performance tracking"""
|
||||
request._cache_hits = 0
|
||||
request._cache_misses = 0
|
||||
request._cache_start_time = time.time()
|
||||
|
||||
def process_response(self, request, response):
|
||||
"""Log cache performance metrics"""
|
||||
cache_duration = time.time() - getattr(
|
||||
request, "_cache_start_time", time.time()
|
||||
)
|
||||
cache_hits = getattr(request, "_cache_hits", 0)
|
||||
cache_misses = getattr(request, "_cache_misses", 0)
|
||||
|
||||
if cache_hits + cache_misses > 0:
|
||||
hit_rate = (cache_hits / (cache_hits + cache_misses)) * 100
|
||||
|
||||
cache_data = {
|
||||
"path": request.path,
|
||||
"cache_hits": cache_hits,
|
||||
"cache_misses": cache_misses,
|
||||
"cache_hit_rate": round(hit_rate, 2),
|
||||
"cache_operations": cache_hits + cache_misses,
|
||||
# milliseconds
|
||||
"cache_duration": round(cache_duration * 1000, 2),
|
||||
}
|
||||
|
||||
# Log cache performance
|
||||
if hit_rate < 50 and cache_hits + cache_misses > 5:
|
||||
logger.warning(
|
||||
f"Low cache hit rate for {request.path}: {hit_rate:.1f}%",
|
||||
extra=cache_data,
|
||||
)
|
||||
else:
|
||||
logger.debug(
|
||||
f"Cache performance for {request.path}: {hit_rate:.1f}% hit rate",
|
||||
extra=cache_data,
|
||||
)
|
||||
|
||||
return response
|
||||
138
apps/core/middleware/request_logging.py
Normal file
138
apps/core/middleware/request_logging.py
Normal file
@@ -0,0 +1,138 @@
|
||||
"""
|
||||
Request logging middleware for comprehensive request/response logging.
|
||||
Logs all HTTP requests with detailed data for debugging and monitoring.
|
||||
"""
|
||||
|
||||
import logging
|
||||
import time
|
||||
import json
|
||||
from django.utils.deprecation import MiddlewareMixin
|
||||
|
||||
logger = logging.getLogger('request_logging')
|
||||
|
||||
|
||||
class RequestLoggingMiddleware(MiddlewareMixin):
|
||||
"""
|
||||
Middleware to log all HTTP requests with method, path, and response code.
|
||||
Includes detailed request/response data logging for all requests.
|
||||
"""
|
||||
|
||||
# Paths to exclude from detailed logging (e.g., static files, health checks)
|
||||
EXCLUDE_DETAILED_LOGGING_PATHS = [
|
||||
'/static/',
|
||||
'/media/',
|
||||
'/favicon.ico',
|
||||
'/health/',
|
||||
'/admin/jsi18n/',
|
||||
]
|
||||
|
||||
def _should_log_detailed(self, request):
|
||||
"""Determine if detailed logging should be enabled for this request."""
|
||||
return not any(
|
||||
path in request.path for path in self.EXCLUDE_DETAILED_LOGGING_PATHS)
|
||||
|
||||
def process_request(self, request):
|
||||
"""Store request start time and capture request data for detailed logging."""
|
||||
request._start_time = time.time()
|
||||
|
||||
# Enable detailed logging for all requests except excluded paths
|
||||
should_log_detailed = self._should_log_detailed(request)
|
||||
request._log_request_data = should_log_detailed
|
||||
|
||||
if should_log_detailed:
|
||||
try:
|
||||
# Log request data
|
||||
request_data = {}
|
||||
if hasattr(request, 'data') and request.data:
|
||||
request_data = dict(request.data)
|
||||
elif request.body:
|
||||
try:
|
||||
request_data = json.loads(request.body.decode('utf-8'))
|
||||
except (json.JSONDecodeError, UnicodeDecodeError):
|
||||
request_data = {'body': str(request.body)[
|
||||
:200] + '...' if len(str(request.body)) > 200 else str(request.body)}
|
||||
|
||||
# Log query parameters
|
||||
query_params = dict(request.GET) if request.GET else {}
|
||||
|
||||
logger.info(f"REQUEST DATA for {request.method} {request.path}:")
|
||||
if request_data:
|
||||
logger.info(f" Body: {self._safe_log_data(request_data)}")
|
||||
if query_params:
|
||||
logger.info(f" Query: {query_params}")
|
||||
if hasattr(request, 'user') and request.user.is_authenticated:
|
||||
logger.info(
|
||||
f" User: {request.user.username} (ID: {request.user.id})")
|
||||
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to log request data: {e}")
|
||||
|
||||
return None
|
||||
|
||||
def process_response(self, request, response):
|
||||
"""Log request details after response is generated."""
|
||||
try:
|
||||
# Calculate request duration
|
||||
duration = 0
|
||||
if hasattr(request, '_start_time'):
|
||||
duration = time.time() - request._start_time
|
||||
|
||||
# Basic request logging
|
||||
logger.info(
|
||||
f"{request.method} {request.get_full_path()} -> {response.status_code} "
|
||||
f"({duration:.3f}s)"
|
||||
)
|
||||
|
||||
# Detailed response logging for specific endpoints
|
||||
if getattr(request, '_log_request_data', False):
|
||||
try:
|
||||
# Log response data
|
||||
if hasattr(response, 'data'):
|
||||
logger.info(
|
||||
f"RESPONSE DATA for {request.method} {request.path}:")
|
||||
logger.info(f" Status: {response.status_code}")
|
||||
logger.info(f" Data: {self._safe_log_data(response.data)}")
|
||||
elif hasattr(response, 'content'):
|
||||
try:
|
||||
content = json.loads(response.content.decode('utf-8'))
|
||||
logger.info(
|
||||
f"RESPONSE DATA for {request.method} {request.path}:")
|
||||
logger.info(f" Status: {response.status_code}")
|
||||
logger.info(f" Content: {self._safe_log_data(content)}")
|
||||
except (json.JSONDecodeError, UnicodeDecodeError):
|
||||
logger.info(
|
||||
f"RESPONSE DATA for {request.method} {request.path}:")
|
||||
logger.info(f" Status: {response.status_code}")
|
||||
logger.info(f" Content: {str(response.content)[:200]}...")
|
||||
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to log response data: {e}")
|
||||
|
||||
except Exception:
|
||||
# Don't let logging errors break the request
|
||||
pass
|
||||
|
||||
return response
|
||||
|
||||
def _safe_log_data(self, data):
|
||||
"""Safely log data, truncating if too large and masking sensitive fields."""
|
||||
try:
|
||||
# Convert to string representation
|
||||
if isinstance(data, dict):
|
||||
# Mask sensitive fields
|
||||
safe_data = {}
|
||||
for key, value in data.items():
|
||||
if any(sensitive in key.lower() for sensitive in ['password', 'token', 'secret', 'key']):
|
||||
safe_data[key] = '***MASKED***'
|
||||
else:
|
||||
safe_data[key] = value
|
||||
data_str = json.dumps(safe_data, indent=2, default=str)
|
||||
else:
|
||||
data_str = json.dumps(data, indent=2, default=str)
|
||||
|
||||
# Truncate if too long
|
||||
if len(data_str) > 1000:
|
||||
return data_str[:1000] + '...[TRUNCATED]'
|
||||
return data_str
|
||||
except Exception:
|
||||
return str(data)[:500] + '...[ERROR_LOGGING]'
|
||||
329
apps/core/middleware/view_tracking.py
Normal file
329
apps/core/middleware/view_tracking.py
Normal file
@@ -0,0 +1,329 @@
|
||||
"""
|
||||
View Tracking Middleware for automatic PageView recording.
|
||||
|
||||
This middleware automatically tracks page views for park and ride pages,
|
||||
implementing IP-based deduplication to prevent spam and provide accurate
|
||||
analytics for the trending algorithm.
|
||||
"""
|
||||
|
||||
import logging
|
||||
import re
|
||||
from datetime import timedelta
|
||||
from typing import Optional, Union
|
||||
from django.http import HttpRequest, HttpResponse
|
||||
from django.utils import timezone
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from django.core.cache import cache
|
||||
from django.conf import settings
|
||||
|
||||
from apps.core.analytics import PageView
|
||||
from apps.parks.models import Park
|
||||
from apps.rides.models import Ride
|
||||
|
||||
# Type alias for content objects
|
||||
ContentObject = Union[Park, Ride]
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ViewTrackingMiddleware:
|
||||
"""
|
||||
Middleware for tracking page views with IP deduplication.
|
||||
|
||||
Automatically creates PageView records when users visit park or ride pages.
|
||||
Implements 24-hour IP deduplication window to prevent view inflation.
|
||||
"""
|
||||
|
||||
def __init__(self, get_response):
|
||||
self.get_response = get_response
|
||||
self.logger = logging.getLogger(f"{__name__}.{self.__class__.__name__}")
|
||||
|
||||
# URL patterns for tracking - matches park and ride detail pages
|
||||
self.tracked_patterns = [
|
||||
(r"^/parks/(?P<slug>[\w-]+)/$", "park"),
|
||||
(r"^/rides/(?P<slug>[\w-]+)/$", "ride"),
|
||||
# Add API patterns if needed
|
||||
(r"^/api/v1/parks/(?P<slug>[\w-]+)/$", "park"),
|
||||
(r"^/api/v1/rides/(?P<slug>[\w-]+)/$", "ride"),
|
||||
]
|
||||
|
||||
# Compile patterns for performance
|
||||
self.compiled_patterns = [
|
||||
(re.compile(pattern), content_type)
|
||||
for pattern, content_type in self.tracked_patterns
|
||||
]
|
||||
|
||||
# Cache configuration
|
||||
self.cache_timeout = 60 * 15 # 15 minutes
|
||||
self.dedup_window_hours = 24
|
||||
|
||||
def __call__(self, request: HttpRequest) -> HttpResponse:
|
||||
"""Process the request and track views if applicable."""
|
||||
response = self.get_response(request)
|
||||
|
||||
# Only track successful GET requests
|
||||
if (
|
||||
request.method == "GET"
|
||||
and 200 <= response.status_code < 300
|
||||
and not self._should_skip_tracking(request)
|
||||
):
|
||||
try:
|
||||
self._track_view_if_applicable(request)
|
||||
except Exception as e:
|
||||
# Log error but don't break the request
|
||||
self.logger.error(f"Error tracking view: {e}", exc_info=True)
|
||||
|
||||
return response
|
||||
|
||||
def _should_skip_tracking(self, request: HttpRequest) -> bool:
|
||||
"""Check if this request should be skipped for tracking."""
|
||||
# Skip if disabled in settings
|
||||
if not getattr(settings, "ENABLE_VIEW_TRACKING", True):
|
||||
return True
|
||||
|
||||
# Skip requests from bots/crawlers
|
||||
user_agent = request.META.get("HTTP_USER_AGENT", "").lower()
|
||||
bot_indicators = [
|
||||
"bot",
|
||||
"crawler",
|
||||
"spider",
|
||||
"scraper",
|
||||
"facebook",
|
||||
"twitter",
|
||||
"linkedin",
|
||||
"google",
|
||||
"bing",
|
||||
"yahoo",
|
||||
"duckduckgo",
|
||||
"slurp",
|
||||
]
|
||||
if any(indicator in user_agent for indicator in bot_indicators):
|
||||
return True
|
||||
|
||||
# Skip requests without real IP
|
||||
if not self._get_client_ip(request):
|
||||
return True
|
||||
|
||||
# Skip AJAX requests (optional - depending on requirements)
|
||||
if request.META.get("HTTP_X_REQUESTED_WITH") == "XMLHttpRequest":
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
def _track_view_if_applicable(self, request: HttpRequest) -> None:
|
||||
"""Track view if the URL matches tracked patterns."""
|
||||
path = request.path
|
||||
|
||||
for pattern, content_type in self.compiled_patterns:
|
||||
match = pattern.match(path)
|
||||
if match:
|
||||
slug = match.group("slug")
|
||||
self._record_page_view(request, content_type, slug)
|
||||
break
|
||||
|
||||
def _record_page_view(
|
||||
self, request: HttpRequest, content_type: str, slug: str
|
||||
) -> None:
|
||||
"""Record a page view for the specified content."""
|
||||
client_ip = self._get_client_ip(request)
|
||||
if not client_ip:
|
||||
return
|
||||
|
||||
try:
|
||||
# Get the content object
|
||||
content_obj = self._get_content_object(content_type, slug)
|
||||
if not content_obj:
|
||||
self.logger.warning(
|
||||
f"Content not found: {content_type} with slug '{slug}'"
|
||||
)
|
||||
return
|
||||
|
||||
# Check deduplication
|
||||
if self._is_duplicate_view(content_obj, client_ip):
|
||||
self.logger.debug(
|
||||
f"Duplicate view skipped for {content_type} {slug} from {client_ip}"
|
||||
)
|
||||
return
|
||||
|
||||
# Create PageView record
|
||||
self._create_page_view(content_obj, client_ip, request)
|
||||
|
||||
self.logger.debug(
|
||||
f"Recorded view for {content_type} {slug} from {client_ip}"
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(
|
||||
f"Failed to record page view for {content_type} {slug}: {e}"
|
||||
)
|
||||
|
||||
def _get_content_object(
|
||||
self, content_type: str, slug: str
|
||||
) -> Optional[ContentObject]:
|
||||
"""Get the content object by type and slug."""
|
||||
try:
|
||||
if content_type == "park":
|
||||
# Use get_by_slug method to handle historical slugs
|
||||
park, _ = Park.get_by_slug(slug)
|
||||
return park
|
||||
elif content_type == "ride":
|
||||
# For rides, we need to search by slug within parks
|
||||
return Ride.objects.filter(slug=slug).first()
|
||||
else:
|
||||
self.logger.warning(f"Unknown content type: {content_type}")
|
||||
return None
|
||||
|
||||
except Park.DoesNotExist:
|
||||
return None
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error getting {content_type} with slug {slug}: {e}")
|
||||
return None
|
||||
|
||||
def _is_duplicate_view(self, content_obj: ContentObject, client_ip: str) -> bool:
|
||||
"""Check if this view is a duplicate within the deduplication window."""
|
||||
# Use cache for performance
|
||||
cache_key = self._get_dedup_cache_key(content_obj, client_ip)
|
||||
|
||||
if cache.get(cache_key):
|
||||
return True
|
||||
|
||||
# Check database as fallback
|
||||
content_type = ContentType.objects.get_for_model(content_obj)
|
||||
cutoff_time = timezone.now() - timedelta(hours=self.dedup_window_hours)
|
||||
|
||||
existing_view = PageView.objects.filter(
|
||||
content_type=content_type,
|
||||
object_id=content_obj.pk,
|
||||
ip_address=client_ip,
|
||||
timestamp__gte=cutoff_time,
|
||||
).exists()
|
||||
|
||||
if not existing_view:
|
||||
# Set cache to prevent future duplicates
|
||||
cache.set(cache_key, True, timeout=self.dedup_window_hours * 3600)
|
||||
|
||||
return existing_view
|
||||
|
||||
def _create_page_view(
|
||||
self, content_obj: ContentObject, client_ip: str, request: HttpRequest
|
||||
) -> None:
|
||||
"""Create a new PageView record."""
|
||||
content_type = ContentType.objects.get_for_model(content_obj)
|
||||
|
||||
# Extract additional metadata
|
||||
user_agent = request.META.get("HTTP_USER_AGENT", "")[
|
||||
:500
|
||||
] # Truncate long user agents
|
||||
referer = request.META.get("HTTP_REFERER", "")[:500]
|
||||
|
||||
PageView.objects.create(
|
||||
content_type=content_type,
|
||||
object_id=content_obj.pk,
|
||||
ip_address=client_ip,
|
||||
user_agent=user_agent,
|
||||
referer=referer,
|
||||
path=request.path[:500],
|
||||
)
|
||||
|
||||
# Update cache for deduplication
|
||||
cache_key = self._get_dedup_cache_key(content_obj, client_ip)
|
||||
cache.set(cache_key, True, timeout=self.dedup_window_hours * 3600)
|
||||
|
||||
def _get_dedup_cache_key(self, content_obj: ContentObject, client_ip: str) -> str:
|
||||
"""Generate cache key for deduplication."""
|
||||
content_type = ContentType.objects.get_for_model(content_obj)
|
||||
return f"pageview_dedup:{content_type.id}:{content_obj.pk}:{client_ip}"
|
||||
|
||||
def _get_client_ip(self, request: HttpRequest) -> Optional[str]:
|
||||
"""Extract client IP address from request."""
|
||||
# Check for forwarded IP (common in production with load balancers)
|
||||
x_forwarded_for = request.META.get("HTTP_X_FORWARDED_FOR")
|
||||
if x_forwarded_for:
|
||||
# Take the first IP in the chain (client IP)
|
||||
ip = x_forwarded_for.split(",")[0].strip()
|
||||
if self._is_valid_ip(ip):
|
||||
return ip
|
||||
|
||||
# Check for real IP header (some proxy configurations)
|
||||
x_real_ip = request.META.get("HTTP_X_REAL_IP")
|
||||
if x_real_ip and self._is_valid_ip(x_real_ip):
|
||||
return x_real_ip
|
||||
|
||||
# Fall back to remote address
|
||||
remote_addr = request.META.get("REMOTE_ADDR")
|
||||
if remote_addr and self._is_valid_ip(remote_addr):
|
||||
return remote_addr
|
||||
|
||||
return None
|
||||
|
||||
def _is_valid_ip(self, ip: str) -> bool:
|
||||
"""Validate IP address format."""
|
||||
try:
|
||||
# Basic validation - check if it looks like an IP
|
||||
parts = ip.split(".")
|
||||
if len(parts) != 4:
|
||||
return False
|
||||
|
||||
for part in parts:
|
||||
if not part.isdigit() or not 0 <= int(part) <= 255:
|
||||
return False
|
||||
|
||||
# Skip localhost and private IPs in production
|
||||
if getattr(settings, "SKIP_LOCAL_IPS", not settings.DEBUG):
|
||||
if ip.startswith(("127.", "192.168.", "10.")) or ip.startswith("172."):
|
||||
if any(
|
||||
16 <= int(ip.split(".")[1]) <= 31
|
||||
for _ in [ip]
|
||||
if ip.startswith("172.")
|
||||
):
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
except (ValueError, IndexError):
|
||||
return False
|
||||
|
||||
|
||||
def get_view_stats_for_content(content_obj: ContentObject, hours: int = 24) -> dict:
|
||||
"""
|
||||
Helper function to get view statistics for content.
|
||||
|
||||
Args:
|
||||
content_obj: The content object (Park or Ride)
|
||||
hours: Time window in hours for stats
|
||||
|
||||
Returns:
|
||||
Dictionary with view statistics
|
||||
"""
|
||||
try:
|
||||
content_type = ContentType.objects.get_for_model(content_obj)
|
||||
cutoff_time = timezone.now() - timedelta(hours=hours)
|
||||
|
||||
total_views = PageView.objects.filter(
|
||||
content_type=content_type,
|
||||
object_id=content_obj.pk,
|
||||
timestamp__gte=cutoff_time,
|
||||
).count()
|
||||
|
||||
unique_views = (
|
||||
PageView.objects.filter(
|
||||
content_type=content_type,
|
||||
object_id=content_obj.pk,
|
||||
timestamp__gte=cutoff_time,
|
||||
)
|
||||
.values("ip_address")
|
||||
.distinct()
|
||||
.count()
|
||||
)
|
||||
|
||||
return {
|
||||
"total_views": total_views,
|
||||
"unique_views": unique_views,
|
||||
"hours": hours,
|
||||
"content_type": content_type.model,
|
||||
"content_id": content_obj.pk,
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting view stats: {e}")
|
||||
return {"total_views": 0, "unique_views": 0, "hours": hours, "error": str(e)}
|
||||
292
apps/core/migrations/0001_initial.py
Normal file
292
apps/core/migrations/0001_initial.py
Normal file
@@ -0,0 +1,292 @@
|
||||
# Generated by Django 5.2.6 on 2025-09-21 01:27
|
||||
|
||||
import django.db.models.deletion
|
||||
import pgtrigger.compiler
|
||||
import pgtrigger.migrations
|
||||
from django.conf import settings
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
initial = True
|
||||
|
||||
dependencies = [
|
||||
("contenttypes", "0002_remove_content_type_name"),
|
||||
("pghistory", "0007_auto_20250421_0444"),
|
||||
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name="PageView",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.BigAutoField(
|
||||
auto_created=True,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
verbose_name="ID",
|
||||
),
|
||||
),
|
||||
("object_id", models.PositiveIntegerField()),
|
||||
("timestamp", models.DateTimeField(auto_now_add=True, db_index=True)),
|
||||
("ip_address", models.GenericIPAddressField()),
|
||||
("user_agent", models.CharField(blank=True, max_length=512)),
|
||||
(
|
||||
"content_type",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
related_name="page_views",
|
||||
to="contenttypes.contenttype",
|
||||
),
|
||||
),
|
||||
],
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="PageViewEvent",
|
||||
fields=[
|
||||
("pgh_id", models.AutoField(primary_key=True, serialize=False)),
|
||||
("pgh_created_at", models.DateTimeField(auto_now_add=True)),
|
||||
("pgh_label", models.TextField(help_text="The event label.")),
|
||||
("id", models.BigIntegerField()),
|
||||
("object_id", models.PositiveIntegerField()),
|
||||
("timestamp", models.DateTimeField(auto_now_add=True)),
|
||||
("ip_address", models.GenericIPAddressField()),
|
||||
("user_agent", models.CharField(blank=True, max_length=512)),
|
||||
(
|
||||
"content_type",
|
||||
models.ForeignKey(
|
||||
db_constraint=False,
|
||||
on_delete=django.db.models.deletion.DO_NOTHING,
|
||||
related_name="+",
|
||||
related_query_name="+",
|
||||
to="contenttypes.contenttype",
|
||||
),
|
||||
),
|
||||
(
|
||||
"pgh_context",
|
||||
models.ForeignKey(
|
||||
db_constraint=False,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.DO_NOTHING,
|
||||
related_name="+",
|
||||
to="pghistory.context",
|
||||
),
|
||||
),
|
||||
(
|
||||
"pgh_obj",
|
||||
models.ForeignKey(
|
||||
db_constraint=False,
|
||||
on_delete=django.db.models.deletion.DO_NOTHING,
|
||||
related_name="events",
|
||||
to="core.pageview",
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"abstract": False,
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="SlugHistory",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.BigAutoField(
|
||||
auto_created=True,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
verbose_name="ID",
|
||||
),
|
||||
),
|
||||
("object_id", models.CharField(max_length=50)),
|
||||
("old_slug", models.SlugField(max_length=200)),
|
||||
("created_at", models.DateTimeField(auto_now_add=True)),
|
||||
(
|
||||
"content_type",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
to="contenttypes.contenttype",
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"verbose_name_plural": "Slug histories",
|
||||
"ordering": ["-created_at"],
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="SlugHistoryEvent",
|
||||
fields=[
|
||||
("pgh_id", models.AutoField(primary_key=True, serialize=False)),
|
||||
("pgh_created_at", models.DateTimeField(auto_now_add=True)),
|
||||
("pgh_label", models.TextField(help_text="The event label.")),
|
||||
("id", models.BigIntegerField()),
|
||||
("object_id", models.CharField(max_length=50)),
|
||||
("old_slug", models.SlugField(db_index=False, max_length=200)),
|
||||
("created_at", models.DateTimeField(auto_now_add=True)),
|
||||
(
|
||||
"content_type",
|
||||
models.ForeignKey(
|
||||
db_constraint=False,
|
||||
on_delete=django.db.models.deletion.DO_NOTHING,
|
||||
related_name="+",
|
||||
related_query_name="+",
|
||||
to="contenttypes.contenttype",
|
||||
),
|
||||
),
|
||||
(
|
||||
"pgh_context",
|
||||
models.ForeignKey(
|
||||
db_constraint=False,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.DO_NOTHING,
|
||||
related_name="+",
|
||||
to="pghistory.context",
|
||||
),
|
||||
),
|
||||
(
|
||||
"pgh_obj",
|
||||
models.ForeignKey(
|
||||
db_constraint=False,
|
||||
on_delete=django.db.models.deletion.DO_NOTHING,
|
||||
related_name="events",
|
||||
to="core.slughistory",
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"abstract": False,
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="HistoricalSlug",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.BigAutoField(
|
||||
auto_created=True,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
verbose_name="ID",
|
||||
),
|
||||
),
|
||||
("object_id", models.PositiveIntegerField()),
|
||||
("slug", models.SlugField(max_length=255)),
|
||||
("created_at", models.DateTimeField(auto_now_add=True)),
|
||||
(
|
||||
"content_type",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
to="contenttypes.contenttype",
|
||||
),
|
||||
),
|
||||
(
|
||||
"user",
|
||||
models.ForeignKey(
|
||||
blank=True,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
related_name="historical_slugs",
|
||||
to=settings.AUTH_USER_MODEL,
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"indexes": [
|
||||
models.Index(
|
||||
fields=["content_type", "object_id"],
|
||||
name="core_histor_content_b4c470_idx",
|
||||
),
|
||||
models.Index(fields=["slug"], name="core_histor_slug_8fd7b3_idx"),
|
||||
],
|
||||
"unique_together": {("content_type", "slug")},
|
||||
},
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="pageview",
|
||||
index=models.Index(
|
||||
fields=["timestamp"], name="core_pagevi_timesta_757ebb_idx"
|
||||
),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="pageview",
|
||||
index=models.Index(
|
||||
fields=["content_type", "object_id"],
|
||||
name="core_pagevi_content_eda7ad_idx",
|
||||
),
|
||||
),
|
||||
pgtrigger.migrations.AddTrigger(
|
||||
model_name="pageview",
|
||||
trigger=pgtrigger.compiler.Trigger(
|
||||
name="insert_insert",
|
||||
sql=pgtrigger.compiler.UpsertTriggerSql(
|
||||
func='INSERT INTO "core_pageviewevent" ("content_type_id", "id", "ip_address", "object_id", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "timestamp", "user_agent") VALUES (NEW."content_type_id", NEW."id", NEW."ip_address", NEW."object_id", _pgh_attach_context(), NOW(), \'insert\', NEW."id", NEW."timestamp", NEW."user_agent"); RETURN NULL;',
|
||||
hash="1682d124ea3ba215e630c7cfcde929f7444cf247",
|
||||
operation="INSERT",
|
||||
pgid="pgtrigger_insert_insert_ee1e1",
|
||||
table="core_pageview",
|
||||
when="AFTER",
|
||||
),
|
||||
),
|
||||
),
|
||||
pgtrigger.migrations.AddTrigger(
|
||||
model_name="pageview",
|
||||
trigger=pgtrigger.compiler.Trigger(
|
||||
name="update_update",
|
||||
sql=pgtrigger.compiler.UpsertTriggerSql(
|
||||
condition="WHEN (OLD.* IS DISTINCT FROM NEW.*)",
|
||||
func='INSERT INTO "core_pageviewevent" ("content_type_id", "id", "ip_address", "object_id", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "timestamp", "user_agent") VALUES (NEW."content_type_id", NEW."id", NEW."ip_address", NEW."object_id", _pgh_attach_context(), NOW(), \'update\', NEW."id", NEW."timestamp", NEW."user_agent"); RETURN NULL;',
|
||||
hash="4221b2dd6636cae454f8d69c0c1841c40c47e6a6",
|
||||
operation="UPDATE",
|
||||
pgid="pgtrigger_update_update_3c505",
|
||||
table="core_pageview",
|
||||
when="AFTER",
|
||||
),
|
||||
),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="slughistory",
|
||||
index=models.Index(
|
||||
fields=["content_type", "object_id"],
|
||||
name="core_slughi_content_8bbf56_idx",
|
||||
),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="slughistory",
|
||||
index=models.Index(
|
||||
fields=["old_slug"], name="core_slughi_old_slu_aaef7f_idx"
|
||||
),
|
||||
),
|
||||
pgtrigger.migrations.AddTrigger(
|
||||
model_name="slughistory",
|
||||
trigger=pgtrigger.compiler.Trigger(
|
||||
name="insert_insert",
|
||||
sql=pgtrigger.compiler.UpsertTriggerSql(
|
||||
func='INSERT INTO "core_slughistoryevent" ("content_type_id", "created_at", "id", "object_id", "old_slug", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id") VALUES (NEW."content_type_id", NEW."created_at", NEW."id", NEW."object_id", NEW."old_slug", _pgh_attach_context(), NOW(), \'insert\', NEW."id"); RETURN NULL;',
|
||||
hash="2a2a05025693c165b88e5eba7fcc23214749a78b",
|
||||
operation="INSERT",
|
||||
pgid="pgtrigger_insert_insert_3002a",
|
||||
table="core_slughistory",
|
||||
when="AFTER",
|
||||
),
|
||||
),
|
||||
),
|
||||
pgtrigger.migrations.AddTrigger(
|
||||
model_name="slughistory",
|
||||
trigger=pgtrigger.compiler.Trigger(
|
||||
name="update_update",
|
||||
sql=pgtrigger.compiler.UpsertTriggerSql(
|
||||
condition="WHEN (OLD.* IS DISTINCT FROM NEW.*)",
|
||||
func='INSERT INTO "core_slughistoryevent" ("content_type_id", "created_at", "id", "object_id", "old_slug", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id") VALUES (NEW."content_type_id", NEW."created_at", NEW."id", NEW."object_id", NEW."old_slug", _pgh_attach_context(), NOW(), \'update\', NEW."id"); RETURN NULL;',
|
||||
hash="3ad197ccb6178668e762720341e45d3fd3216776",
|
||||
operation="UPDATE",
|
||||
pgid="pgtrigger_update_update_52030",
|
||||
table="core_slughistory",
|
||||
when="AFTER",
|
||||
),
|
||||
),
|
||||
),
|
||||
]
|
||||
0
apps/core/migrations/__init__.py
Normal file
0
apps/core/migrations/__init__.py
Normal file
19
apps/core/mixins/__init__.py
Normal file
19
apps/core/mixins/__init__.py
Normal file
@@ -0,0 +1,19 @@
|
||||
from django.views.generic.list import MultipleObjectMixin
|
||||
|
||||
|
||||
class HTMXFilterableMixin(MultipleObjectMixin):
|
||||
"""
|
||||
A mixin that provides filtering capabilities for HTMX requests.
|
||||
"""
|
||||
|
||||
filter_class = None
|
||||
|
||||
def get_queryset(self):
|
||||
queryset = super().get_queryset()
|
||||
self.filterset = self.filter_class(self.request.GET, queryset=queryset)
|
||||
return self.filterset.qs
|
||||
|
||||
def get_context_data(self, **kwargs):
|
||||
context = super().get_context_data(**kwargs)
|
||||
context["filter"] = self.filterset
|
||||
return context
|
||||
119
apps/core/models.py
Normal file
119
apps/core/models.py
Normal file
@@ -0,0 +1,119 @@
|
||||
from django.db import models
|
||||
from django.contrib.contenttypes.fields import GenericForeignKey
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from django.utils.text import slugify
|
||||
from apps.core.history import TrackedModel
|
||||
import pghistory
|
||||
|
||||
|
||||
@pghistory.track()
|
||||
class SlugHistory(models.Model):
|
||||
"""
|
||||
Model for tracking slug changes across all models that use slugs.
|
||||
Uses generic relations to work with any model.
|
||||
"""
|
||||
|
||||
content_type = models.ForeignKey(ContentType, on_delete=models.CASCADE)
|
||||
object_id = models.CharField(
|
||||
max_length=50
|
||||
) # Using CharField to work with our custom IDs
|
||||
content_object = GenericForeignKey("content_type", "object_id")
|
||||
|
||||
old_slug = models.SlugField(max_length=200)
|
||||
created_at = models.DateTimeField(auto_now_add=True)
|
||||
|
||||
class Meta:
|
||||
indexes = [
|
||||
models.Index(fields=["content_type", "object_id"]),
|
||||
models.Index(fields=["old_slug"]),
|
||||
]
|
||||
verbose_name_plural = "Slug histories"
|
||||
ordering = ["-created_at"]
|
||||
|
||||
def __str__(self):
|
||||
return f"Old slug '{self.old_slug}' for {self.content_object}"
|
||||
|
||||
|
||||
class SluggedModel(TrackedModel):
|
||||
"""
|
||||
Abstract base model that provides slug functionality with history tracking.
|
||||
"""
|
||||
|
||||
name = models.CharField(max_length=200)
|
||||
slug = models.SlugField(max_length=200, unique=True)
|
||||
|
||||
class Meta(TrackedModel.Meta):
|
||||
abstract = True
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
# Get the current instance from DB if it exists
|
||||
if self.pk:
|
||||
try:
|
||||
old_instance = self.__class__.objects.get(pk=self.pk)
|
||||
# If slug has changed, save the old one to history
|
||||
if old_instance.slug != self.slug:
|
||||
SlugHistory.objects.create(
|
||||
content_type=ContentType.objects.get_for_model(self),
|
||||
object_id=getattr(self, self.get_id_field_name()),
|
||||
old_slug=old_instance.slug,
|
||||
)
|
||||
except self.__class__.DoesNotExist:
|
||||
pass
|
||||
|
||||
# Generate slug if not set
|
||||
if not self.slug:
|
||||
self.slug = slugify(self.name)
|
||||
|
||||
super().save(*args, **kwargs)
|
||||
|
||||
def get_id_field_name(self):
|
||||
"""
|
||||
Returns the name of the read-only ID field for this model.
|
||||
Should be overridden by subclasses.
|
||||
"""
|
||||
raise NotImplementedError(
|
||||
"Subclasses of SluggedModel must implement get_id_field_name()"
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def get_by_slug(cls, slug):
|
||||
"""
|
||||
Get an object by its current or historical slug.
|
||||
Returns (object, is_old_slug) tuple.
|
||||
"""
|
||||
try:
|
||||
# Try to get by current slug first
|
||||
return cls.objects.get(slug=slug), False
|
||||
except cls.DoesNotExist:
|
||||
# Check pghistory first if available
|
||||
try:
|
||||
import pghistory.models
|
||||
|
||||
history_entries = pghistory.models.Events.objects.filter(
|
||||
pgh_model=f"{cls._meta.app_label}.{cls._meta.model_name}", slug=slug
|
||||
).order_by("-pgh_created_at")
|
||||
|
||||
if history_entries:
|
||||
history_entry = history_entries.first()
|
||||
if history_entry:
|
||||
return cls.objects.get(id=history_entry.pgh_obj_id), True
|
||||
except (ImportError, AttributeError):
|
||||
pass
|
||||
|
||||
# Try to find in manual slug history as fallback
|
||||
history = (
|
||||
SlugHistory.objects.filter(
|
||||
content_type=ContentType.objects.get_for_model(cls),
|
||||
old_slug=slug,
|
||||
)
|
||||
.order_by("-created_at")
|
||||
.first()
|
||||
)
|
||||
|
||||
if history:
|
||||
return (
|
||||
cls.objects.get(**{cls().get_id_field_name(): history.object_id}),
|
||||
True,
|
||||
)
|
||||
|
||||
raise cls.DoesNotExist(f"{cls.__name__} with slug '{slug}' does not exist")
|
||||
5
apps/core/patches/__init__.py
Normal file
5
apps/core/patches/__init__.py
Normal file
@@ -0,0 +1,5 @@
|
||||
"""
|
||||
Patches for third-party packages.
|
||||
"""
|
||||
|
||||
# No patches currently applied
|
||||
322
apps/core/selectors.py
Normal file
322
apps/core/selectors.py
Normal file
@@ -0,0 +1,322 @@
|
||||
"""
|
||||
Selectors for core functionality including map services and analytics.
|
||||
Following Django styleguide pattern for separating data access from business logic.
|
||||
"""
|
||||
|
||||
from typing import Optional, Dict, Any, List
|
||||
from django.db.models import QuerySet, Q, Count
|
||||
from django.contrib.gis.geos import Point, Polygon
|
||||
from django.contrib.gis.measure import Distance
|
||||
from django.utils import timezone
|
||||
from datetime import timedelta
|
||||
|
||||
from .analytics import PageView
|
||||
from apps.parks.models import Park
|
||||
from apps.rides.models import Ride
|
||||
|
||||
|
||||
def unified_locations_for_map(
|
||||
*,
|
||||
bounds: Optional[Polygon] = None,
|
||||
location_types: Optional[List[str]] = None,
|
||||
filters: Optional[Dict[str, Any]] = None,
|
||||
) -> Dict[str, QuerySet]:
|
||||
"""
|
||||
Get unified location data for map display across all location types.
|
||||
|
||||
Args:
|
||||
bounds: Geographic boundary polygon
|
||||
location_types: List of location types to include ('park', 'ride')
|
||||
filters: Additional filter parameters
|
||||
|
||||
Returns:
|
||||
Dictionary containing querysets for each location type
|
||||
"""
|
||||
results = {}
|
||||
|
||||
# Default to all location types if none specified
|
||||
if not location_types:
|
||||
location_types = ["park", "ride"]
|
||||
|
||||
# Parks
|
||||
if "park" in location_types:
|
||||
park_queryset = (
|
||||
Park.objects.select_related("operator")
|
||||
.prefetch_related("location")
|
||||
.annotate(ride_count_calculated=Count("rides"))
|
||||
)
|
||||
|
||||
if bounds:
|
||||
park_queryset = park_queryset.filter(location__coordinates__within=bounds)
|
||||
|
||||
if filters:
|
||||
if "status" in filters:
|
||||
park_queryset = park_queryset.filter(status=filters["status"])
|
||||
if "operator" in filters:
|
||||
park_queryset = park_queryset.filter(operator=filters["operator"])
|
||||
|
||||
results["parks"] = park_queryset.order_by("name")
|
||||
|
||||
# Rides
|
||||
if "ride" in location_types:
|
||||
ride_queryset = Ride.objects.select_related(
|
||||
"park", "manufacturer"
|
||||
).prefetch_related("park__location", "location")
|
||||
|
||||
if bounds:
|
||||
ride_queryset = ride_queryset.filter(
|
||||
Q(location__coordinates__within=bounds)
|
||||
| Q(park__location__coordinates__within=bounds)
|
||||
)
|
||||
|
||||
if filters:
|
||||
if "category" in filters:
|
||||
ride_queryset = ride_queryset.filter(category=filters["category"])
|
||||
if "manufacturer" in filters:
|
||||
ride_queryset = ride_queryset.filter(
|
||||
manufacturer=filters["manufacturer"]
|
||||
)
|
||||
if "park" in filters:
|
||||
ride_queryset = ride_queryset.filter(park=filters["park"])
|
||||
|
||||
results["rides"] = ride_queryset.order_by("park__name", "name")
|
||||
|
||||
return results
|
||||
|
||||
|
||||
def locations_near_point(
|
||||
*,
|
||||
point: Point,
|
||||
distance_km: float = 50,
|
||||
location_types: Optional[List[str]] = None,
|
||||
limit: int = 20,
|
||||
) -> Dict[str, QuerySet]:
|
||||
"""
|
||||
Get locations near a specific geographic point across all types.
|
||||
|
||||
Args:
|
||||
point: Geographic point (longitude, latitude)
|
||||
distance_km: Maximum distance in kilometers
|
||||
location_types: List of location types to include
|
||||
limit: Maximum number of results per type
|
||||
|
||||
Returns:
|
||||
Dictionary containing nearby locations by type
|
||||
"""
|
||||
results = {}
|
||||
|
||||
if not location_types:
|
||||
location_types = ["park", "ride"]
|
||||
|
||||
# Parks near point
|
||||
if "park" in location_types:
|
||||
results["parks"] = (
|
||||
Park.objects.filter(
|
||||
location__coordinates__distance_lte=(
|
||||
point,
|
||||
Distance(km=distance_km),
|
||||
)
|
||||
)
|
||||
.select_related("operator")
|
||||
.prefetch_related("location")
|
||||
.distance(point)
|
||||
.order_by("distance")[:limit]
|
||||
)
|
||||
|
||||
# Rides near point
|
||||
if "ride" in location_types:
|
||||
results["rides"] = (
|
||||
Ride.objects.filter(
|
||||
Q(
|
||||
location__coordinates__distance_lte=(
|
||||
point,
|
||||
Distance(km=distance_km),
|
||||
)
|
||||
)
|
||||
| Q(
|
||||
park__location__coordinates__distance_lte=(
|
||||
point,
|
||||
Distance(km=distance_km),
|
||||
)
|
||||
)
|
||||
)
|
||||
.select_related("park", "manufacturer")
|
||||
.prefetch_related("park__location")
|
||||
.distance(point)
|
||||
.order_by("distance")[:limit]
|
||||
)
|
||||
|
||||
return results
|
||||
|
||||
|
||||
def search_all_locations(*, query: str, limit: int = 20) -> Dict[str, QuerySet]:
|
||||
"""
|
||||
Search across all location types for a query string.
|
||||
|
||||
Args:
|
||||
query: Search string
|
||||
limit: Maximum results per type
|
||||
|
||||
Returns:
|
||||
Dictionary containing search results by type
|
||||
"""
|
||||
results = {}
|
||||
|
||||
# Search parks
|
||||
results["parks"] = (
|
||||
Park.objects.filter(
|
||||
Q(name__icontains=query)
|
||||
| Q(description__icontains=query)
|
||||
| Q(location__city__icontains=query)
|
||||
| Q(location__region__icontains=query)
|
||||
)
|
||||
.select_related("operator")
|
||||
.prefetch_related("location")
|
||||
.order_by("name")[:limit]
|
||||
)
|
||||
|
||||
# Search rides
|
||||
results["rides"] = (
|
||||
Ride.objects.filter(
|
||||
Q(name__icontains=query)
|
||||
| Q(description__icontains=query)
|
||||
| Q(park__name__icontains=query)
|
||||
| Q(manufacturer__name__icontains=query)
|
||||
)
|
||||
.select_related("park", "manufacturer")
|
||||
.prefetch_related("park__location")
|
||||
.order_by("park__name", "name")[:limit]
|
||||
)
|
||||
|
||||
return results
|
||||
|
||||
|
||||
def page_views_for_analytics(
|
||||
*,
|
||||
start_date: Optional[timezone.datetime] = None,
|
||||
end_date: Optional[timezone.datetime] = None,
|
||||
path_pattern: Optional[str] = None,
|
||||
) -> QuerySet[PageView]:
|
||||
"""
|
||||
Get page views for analytics with optional filtering.
|
||||
|
||||
Args:
|
||||
start_date: Start date for filtering
|
||||
end_date: End date for filtering
|
||||
path_pattern: URL path pattern to filter by
|
||||
|
||||
Returns:
|
||||
QuerySet of page views
|
||||
"""
|
||||
queryset = PageView.objects.all()
|
||||
|
||||
if start_date:
|
||||
queryset = queryset.filter(timestamp__gte=start_date)
|
||||
|
||||
if end_date:
|
||||
queryset = queryset.filter(timestamp__lte=end_date)
|
||||
|
||||
if path_pattern:
|
||||
queryset = queryset.filter(path__icontains=path_pattern)
|
||||
|
||||
return queryset.order_by("-timestamp")
|
||||
|
||||
|
||||
def popular_pages_summary(*, days: int = 30) -> Dict[str, Any]:
|
||||
"""
|
||||
Get summary of most popular pages in the last N days.
|
||||
|
||||
Args:
|
||||
days: Number of days to analyze
|
||||
|
||||
Returns:
|
||||
Dictionary containing popular pages statistics
|
||||
"""
|
||||
cutoff_date = timezone.now() - timedelta(days=days)
|
||||
|
||||
# Most viewed pages
|
||||
popular_pages = (
|
||||
PageView.objects.filter(timestamp__gte=cutoff_date)
|
||||
.values("path")
|
||||
.annotate(view_count=Count("id"))
|
||||
.order_by("-view_count")[:10]
|
||||
)
|
||||
|
||||
# Total page views
|
||||
total_views = PageView.objects.filter(timestamp__gte=cutoff_date).count()
|
||||
|
||||
# Unique visitors (based on IP)
|
||||
unique_visitors = (
|
||||
PageView.objects.filter(timestamp__gte=cutoff_date)
|
||||
.values("ip_address")
|
||||
.distinct()
|
||||
.count()
|
||||
)
|
||||
|
||||
return {
|
||||
"popular_pages": list(popular_pages),
|
||||
"total_views": total_views,
|
||||
"unique_visitors": unique_visitors,
|
||||
"period_days": days,
|
||||
}
|
||||
|
||||
|
||||
def geographic_distribution_summary() -> Dict[str, Any]:
|
||||
"""
|
||||
Get geographic distribution statistics for all locations.
|
||||
|
||||
Returns:
|
||||
Dictionary containing geographic statistics
|
||||
"""
|
||||
# Parks by country
|
||||
parks_by_country = (
|
||||
Park.objects.filter(location__country__isnull=False)
|
||||
.values("location__country")
|
||||
.annotate(count=Count("id"))
|
||||
.order_by("-count")
|
||||
)
|
||||
|
||||
# Rides by country (through park location)
|
||||
rides_by_country = (
|
||||
Ride.objects.filter(park__location__country__isnull=False)
|
||||
.values("park__location__country")
|
||||
.annotate(count=Count("id"))
|
||||
.order_by("-count")
|
||||
)
|
||||
|
||||
return {
|
||||
"parks_by_country": list(parks_by_country),
|
||||
"rides_by_country": list(rides_by_country),
|
||||
}
|
||||
|
||||
|
||||
def system_health_metrics() -> Dict[str, Any]:
|
||||
"""
|
||||
Get system health and activity metrics.
|
||||
|
||||
Returns:
|
||||
Dictionary containing system health statistics
|
||||
"""
|
||||
now = timezone.now()
|
||||
last_24h = now - timedelta(hours=24)
|
||||
last_7d = now - timedelta(days=7)
|
||||
|
||||
return {
|
||||
"total_parks": Park.objects.count(),
|
||||
"operating_parks": Park.objects.filter(status="OPERATING").count(),
|
||||
"total_rides": Ride.objects.count(),
|
||||
"page_views_24h": PageView.objects.filter(timestamp__gte=last_24h).count(),
|
||||
"page_views_7d": PageView.objects.filter(timestamp__gte=last_7d).count(),
|
||||
"data_freshness": {
|
||||
"latest_park_update": (
|
||||
Park.objects.order_by("-updated_at").first().updated_at
|
||||
if Park.objects.exists()
|
||||
else None
|
||||
),
|
||||
"latest_ride_update": (
|
||||
Ride.objects.order_by("-updated_at").first().updated_at
|
||||
if Ride.objects.exists()
|
||||
else None
|
||||
),
|
||||
},
|
||||
}
|
||||
27
apps/core/services/__init__.py
Normal file
27
apps/core/services/__init__.py
Normal file
@@ -0,0 +1,27 @@
|
||||
"""
|
||||
Core services for ThrillWiki unified map functionality.
|
||||
"""
|
||||
|
||||
from .map_service import UnifiedMapService
|
||||
from .clustering_service import ClusteringService
|
||||
from .map_cache_service import MapCacheService
|
||||
from .data_structures import (
|
||||
UnifiedLocation,
|
||||
LocationType,
|
||||
GeoBounds,
|
||||
MapFilters,
|
||||
MapResponse,
|
||||
ClusterData,
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
"UnifiedMapService",
|
||||
"ClusteringService",
|
||||
"MapCacheService",
|
||||
"UnifiedLocation",
|
||||
"LocationType",
|
||||
"GeoBounds",
|
||||
"MapFilters",
|
||||
"MapResponse",
|
||||
"ClusterData",
|
||||
]
|
||||
365
apps/core/services/clustering_service.py
Normal file
365
apps/core/services/clustering_service.py
Normal file
@@ -0,0 +1,365 @@
|
||||
"""
|
||||
Clustering service for map locations to improve performance and user experience.
|
||||
"""
|
||||
|
||||
import math
|
||||
from typing import List, Dict, Any, Optional
|
||||
from dataclasses import dataclass
|
||||
from collections import defaultdict
|
||||
|
||||
from .data_structures import (
|
||||
UnifiedLocation,
|
||||
ClusterData,
|
||||
GeoBounds,
|
||||
LocationType,
|
||||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
class ClusterPoint:
|
||||
"""Internal representation of a point for clustering."""
|
||||
|
||||
location: UnifiedLocation
|
||||
x: float # Projected x coordinate
|
||||
y: float # Projected y coordinate
|
||||
|
||||
|
||||
class ClusteringService:
|
||||
"""
|
||||
Handles location clustering for map display using a simple grid-based approach
|
||||
with zoom-level dependent clustering radius.
|
||||
"""
|
||||
|
||||
# Clustering configuration
|
||||
DEFAULT_RADIUS = 40 # pixels
|
||||
MIN_POINTS_TO_CLUSTER = 2
|
||||
MAX_ZOOM_FOR_CLUSTERING = 15
|
||||
MIN_ZOOM_FOR_CLUSTERING = 3
|
||||
|
||||
# Zoom level configurations
|
||||
ZOOM_CONFIGS = {
|
||||
3: {"radius": 80, "min_points": 5}, # World level
|
||||
4: {"radius": 70, "min_points": 4}, # Continent level
|
||||
5: {"radius": 60, "min_points": 3}, # Country level
|
||||
6: {"radius": 50, "min_points": 3}, # Large region level
|
||||
7: {"radius": 45, "min_points": 2}, # Region level
|
||||
8: {"radius": 40, "min_points": 2}, # State level
|
||||
9: {"radius": 35, "min_points": 2}, # Metro area level
|
||||
10: {"radius": 30, "min_points": 2}, # City level
|
||||
11: {"radius": 25, "min_points": 2}, # District level
|
||||
12: {"radius": 20, "min_points": 2}, # Neighborhood level
|
||||
13: {"radius": 15, "min_points": 2}, # Block level
|
||||
14: {"radius": 10, "min_points": 2}, # Street level
|
||||
15: {"radius": 5, "min_points": 2}, # Building level
|
||||
}
|
||||
|
||||
def __init__(self):
|
||||
self.cluster_id_counter = 0
|
||||
|
||||
def should_cluster(self, zoom_level: int, point_count: int) -> bool:
|
||||
"""Determine if clustering should be applied based on zoom level and point count."""
|
||||
if zoom_level > self.MAX_ZOOM_FOR_CLUSTERING:
|
||||
return False
|
||||
if zoom_level < self.MIN_ZOOM_FOR_CLUSTERING:
|
||||
return True
|
||||
|
||||
config = self.ZOOM_CONFIGS.get(
|
||||
zoom_level, {"min_points": self.MIN_POINTS_TO_CLUSTER}
|
||||
)
|
||||
return point_count >= config["min_points"]
|
||||
|
||||
def cluster_locations(
|
||||
self,
|
||||
locations: List[UnifiedLocation],
|
||||
zoom_level: int,
|
||||
bounds: Optional[GeoBounds] = None,
|
||||
) -> tuple[List[UnifiedLocation], List[ClusterData]]:
|
||||
"""
|
||||
Cluster locations based on zoom level and density.
|
||||
Returns (unclustered_locations, clusters).
|
||||
"""
|
||||
if not locations or not self.should_cluster(zoom_level, len(locations)):
|
||||
return locations, []
|
||||
|
||||
# Convert locations to projected coordinates for clustering
|
||||
cluster_points = self._project_locations(locations, bounds)
|
||||
|
||||
# Get clustering configuration for zoom level
|
||||
config = self.ZOOM_CONFIGS.get(
|
||||
zoom_level,
|
||||
{
|
||||
"radius": self.DEFAULT_RADIUS,
|
||||
"min_points": self.MIN_POINTS_TO_CLUSTER,
|
||||
},
|
||||
)
|
||||
|
||||
# Perform clustering
|
||||
clustered_groups = self._cluster_points(
|
||||
cluster_points, config["radius"], config["min_points"]
|
||||
)
|
||||
|
||||
# Separate individual locations from clusters
|
||||
unclustered_locations = []
|
||||
clusters = []
|
||||
|
||||
for group in clustered_groups:
|
||||
if len(group) < config["min_points"]:
|
||||
# Add individual locations
|
||||
unclustered_locations.extend([cp.location for cp in group])
|
||||
else:
|
||||
# Create cluster
|
||||
cluster = self._create_cluster(group)
|
||||
clusters.append(cluster)
|
||||
|
||||
return unclustered_locations, clusters
|
||||
|
||||
def _project_locations(
|
||||
self,
|
||||
locations: List[UnifiedLocation],
|
||||
bounds: Optional[GeoBounds] = None,
|
||||
) -> List[ClusterPoint]:
|
||||
"""Convert lat/lng coordinates to projected x/y for clustering calculations."""
|
||||
cluster_points = []
|
||||
|
||||
# Use bounds or calculate from locations
|
||||
if not bounds:
|
||||
lats = [loc.latitude for loc in locations]
|
||||
lngs = [loc.longitude for loc in locations]
|
||||
bounds = GeoBounds(
|
||||
north=max(lats),
|
||||
south=min(lats),
|
||||
east=max(lngs),
|
||||
west=min(lngs),
|
||||
)
|
||||
|
||||
# Simple equirectangular projection (good enough for clustering)
|
||||
center_lat = (bounds.north + bounds.south) / 2
|
||||
lat_scale = 111320 # meters per degree latitude
|
||||
lng_scale = 111320 * math.cos(
|
||||
math.radians(center_lat)
|
||||
) # meters per degree longitude
|
||||
|
||||
for location in locations:
|
||||
# Convert to meters relative to bounds center
|
||||
x = (location.longitude - (bounds.west + bounds.east) / 2) * lng_scale
|
||||
y = (location.latitude - (bounds.north + bounds.south) / 2) * lat_scale
|
||||
|
||||
cluster_points.append(ClusterPoint(location=location, x=x, y=y))
|
||||
|
||||
return cluster_points
|
||||
|
||||
def _cluster_points(
|
||||
self, points: List[ClusterPoint], radius_pixels: int, min_points: int
|
||||
) -> List[List[ClusterPoint]]:
|
||||
"""
|
||||
Cluster points using a simple distance-based approach.
|
||||
Radius is in pixels, converted to meters based on zoom level.
|
||||
"""
|
||||
# Convert pixel radius to meters (rough approximation)
|
||||
# At zoom level 10, 1 pixel ≈ 150 meters
|
||||
radius_meters = radius_pixels * 150
|
||||
|
||||
clustered = [False] * len(points)
|
||||
clusters = []
|
||||
|
||||
for i, point in enumerate(points):
|
||||
if clustered[i]:
|
||||
continue
|
||||
|
||||
# Find all points within radius
|
||||
cluster_group = [point]
|
||||
clustered[i] = True
|
||||
|
||||
for j, other_point in enumerate(points):
|
||||
if i == j or clustered[j]:
|
||||
continue
|
||||
|
||||
distance = self._calculate_distance(point, other_point)
|
||||
if distance <= radius_meters:
|
||||
cluster_group.append(other_point)
|
||||
clustered[j] = True
|
||||
|
||||
clusters.append(cluster_group)
|
||||
|
||||
return clusters
|
||||
|
||||
def _calculate_distance(self, point1: ClusterPoint, point2: ClusterPoint) -> float:
|
||||
"""Calculate Euclidean distance between two projected points in meters."""
|
||||
dx = point1.x - point2.x
|
||||
dy = point1.y - point2.y
|
||||
return math.sqrt(dx * dx + dy * dy)
|
||||
|
||||
def _create_cluster(self, cluster_points: List[ClusterPoint]) -> ClusterData:
|
||||
"""Create a ClusterData object from a group of points."""
|
||||
locations = [cp.location for cp in cluster_points]
|
||||
|
||||
# Calculate cluster center (average position)
|
||||
avg_lat = sum(loc.latitude for loc in locations) / len(locations)
|
||||
avg_lng = sum(loc.longitude for loc in locations) / len(locations)
|
||||
|
||||
# Calculate cluster bounds
|
||||
lats = [loc.latitude for loc in locations]
|
||||
lngs = [loc.longitude for loc in locations]
|
||||
cluster_bounds = GeoBounds(
|
||||
north=max(lats), south=min(lats), east=max(lngs), west=min(lngs)
|
||||
)
|
||||
|
||||
# Collect location types in cluster
|
||||
types = set(loc.type for loc in locations)
|
||||
|
||||
# Select representative location (highest weight)
|
||||
representative = self._select_representative_location(locations)
|
||||
|
||||
# Generate cluster ID
|
||||
self.cluster_id_counter += 1
|
||||
cluster_id = f"cluster_{self.cluster_id_counter}"
|
||||
|
||||
return ClusterData(
|
||||
id=cluster_id,
|
||||
coordinates=[avg_lat, avg_lng],
|
||||
count=len(locations),
|
||||
types=types,
|
||||
bounds=cluster_bounds,
|
||||
representative_location=representative,
|
||||
)
|
||||
|
||||
def _select_representative_location(
|
||||
self, locations: List[UnifiedLocation]
|
||||
) -> Optional[UnifiedLocation]:
|
||||
"""Select the most representative location for a cluster."""
|
||||
if not locations:
|
||||
return None
|
||||
|
||||
# Prioritize by: 1) Parks over rides/companies, 2) Higher weight, 3)
|
||||
# Better rating
|
||||
parks = [loc for loc in locations if loc.type == LocationType.PARK]
|
||||
if parks:
|
||||
return max(
|
||||
parks,
|
||||
key=lambda x: (
|
||||
x.cluster_weight,
|
||||
x.metadata.get("rating", 0) or 0,
|
||||
),
|
||||
)
|
||||
|
||||
rides = [loc for loc in locations if loc.type == LocationType.RIDE]
|
||||
if rides:
|
||||
return max(
|
||||
rides,
|
||||
key=lambda x: (
|
||||
x.cluster_weight,
|
||||
x.metadata.get("rating", 0) or 0,
|
||||
),
|
||||
)
|
||||
|
||||
companies = [loc for loc in locations if loc.type == LocationType.COMPANY]
|
||||
if companies:
|
||||
return max(companies, key=lambda x: x.cluster_weight)
|
||||
|
||||
# Fall back to highest weight location
|
||||
return max(locations, key=lambda x: x.cluster_weight)
|
||||
|
||||
def get_cluster_breakdown(self, clusters: List[ClusterData]) -> Dict[str, Any]:
|
||||
"""Get statistics about clustering results."""
|
||||
if not clusters:
|
||||
return {
|
||||
"total_clusters": 0,
|
||||
"total_points_clustered": 0,
|
||||
"average_cluster_size": 0,
|
||||
"type_distribution": {},
|
||||
"category_distribution": {},
|
||||
}
|
||||
|
||||
total_points = sum(cluster.count for cluster in clusters)
|
||||
type_counts = defaultdict(int)
|
||||
category_counts = defaultdict(int)
|
||||
|
||||
for cluster in clusters:
|
||||
for location_type in cluster.types:
|
||||
type_counts[location_type.value] += cluster.count
|
||||
|
||||
if cluster.representative_location:
|
||||
category_counts[cluster.representative_location.cluster_category] += 1
|
||||
|
||||
return {
|
||||
"total_clusters": len(clusters),
|
||||
"total_points_clustered": total_points,
|
||||
"average_cluster_size": total_points / len(clusters),
|
||||
"largest_cluster_size": max(cluster.count for cluster in clusters),
|
||||
"smallest_cluster_size": min(cluster.count for cluster in clusters),
|
||||
"type_distribution": dict(type_counts),
|
||||
"category_distribution": dict(category_counts),
|
||||
}
|
||||
|
||||
def expand_cluster(
|
||||
self, cluster: ClusterData, zoom_level: int
|
||||
) -> List[UnifiedLocation]:
|
||||
"""
|
||||
Expand a cluster to show individual locations (for drill-down functionality).
|
||||
This would typically require re-querying the database with the cluster bounds.
|
||||
"""
|
||||
# This is a placeholder - in practice, this would re-query the database
|
||||
# with the cluster bounds and higher detail level
|
||||
return []
|
||||
|
||||
|
||||
class SmartClusteringRules:
|
||||
"""
|
||||
Advanced clustering rules that consider location types and importance.
|
||||
"""
|
||||
|
||||
@staticmethod
|
||||
def should_cluster_together(loc1: UnifiedLocation, loc2: UnifiedLocation) -> bool:
|
||||
"""Determine if two locations should be clustered together."""
|
||||
|
||||
# Same park rides should cluster together more readily
|
||||
if loc1.type == LocationType.RIDE and loc2.type == LocationType.RIDE:
|
||||
park1_id = loc1.metadata.get("park_id")
|
||||
park2_id = loc2.metadata.get("park_id")
|
||||
if park1_id and park2_id and park1_id == park2_id:
|
||||
return True
|
||||
|
||||
# Major parks should resist clustering unless very close
|
||||
if (
|
||||
loc1.cluster_category == "major_park"
|
||||
or loc2.cluster_category == "major_park"
|
||||
):
|
||||
return False
|
||||
|
||||
# Similar types cluster more readily
|
||||
if loc1.type == loc2.type:
|
||||
return True
|
||||
|
||||
# Different types can cluster but with higher threshold
|
||||
return False
|
||||
|
||||
@staticmethod
|
||||
def calculate_cluster_priority(
|
||||
locations: List[UnifiedLocation],
|
||||
) -> UnifiedLocation:
|
||||
"""Select the representative location for a cluster based on priority rules."""
|
||||
# Prioritize by: 1) Parks over rides, 2) Higher weight, 3) Better
|
||||
# rating
|
||||
parks = [loc for loc in locations if loc.type == LocationType.PARK]
|
||||
if parks:
|
||||
return max(
|
||||
parks,
|
||||
key=lambda x: (
|
||||
x.cluster_weight,
|
||||
x.metadata.get("rating", 0) or 0,
|
||||
x.metadata.get("ride_count", 0) or 0,
|
||||
),
|
||||
)
|
||||
|
||||
rides = [loc for loc in locations if loc.type == LocationType.RIDE]
|
||||
if rides:
|
||||
return max(
|
||||
rides,
|
||||
key=lambda x: (
|
||||
x.cluster_weight,
|
||||
x.metadata.get("rating", 0) or 0,
|
||||
),
|
||||
)
|
||||
|
||||
# Fall back to highest weight
|
||||
return max(locations, key=lambda x: x.cluster_weight)
|
||||
254
apps/core/services/data_structures.py
Normal file
254
apps/core/services/data_structures.py
Normal file
@@ -0,0 +1,254 @@
|
||||
"""
|
||||
Data structures for the unified map service.
|
||||
"""
|
||||
|
||||
from dataclasses import dataclass, field
|
||||
from enum import Enum
|
||||
from typing import Dict, List, Optional, Set, Any
|
||||
# from django.contrib.gis.geos import Polygon # Disabled temporarily for setup
|
||||
|
||||
|
||||
class LocationType(Enum):
|
||||
"""Types of locations supported by the map service."""
|
||||
|
||||
PARK = "park"
|
||||
RIDE = "ride"
|
||||
COMPANY = "company"
|
||||
GENERIC = "generic"
|
||||
|
||||
|
||||
@dataclass
|
||||
class GeoBounds:
|
||||
"""Geographic boundary box for spatial queries."""
|
||||
|
||||
north: float
|
||||
south: float
|
||||
east: float
|
||||
west: float
|
||||
|
||||
def __post_init__(self):
|
||||
"""Validate bounds after initialization."""
|
||||
if self.north < self.south:
|
||||
raise ValueError("North bound must be greater than south bound")
|
||||
if self.east < self.west:
|
||||
raise ValueError("East bound must be greater than west bound")
|
||||
if not (-90 <= self.south <= 90 and -90 <= self.north <= 90):
|
||||
raise ValueError("Latitude bounds must be between -90 and 90")
|
||||
if not (-180 <= self.west <= 180 and -180 <= self.east <= 180):
|
||||
raise ValueError("Longitude bounds must be between -180 and 180")
|
||||
|
||||
def to_polygon(self): # Polygon type disabled for setup
|
||||
"""Convert bounds to PostGIS Polygon for database queries."""
|
||||
# return Polygon.from_bbox((self.west, self.south, self.east, self.north)) # Disabled for setup
|
||||
return None # Temporarily disabled
|
||||
|
||||
def expand(self, factor: float = 1.1) -> "GeoBounds":
|
||||
"""Expand bounds by factor for buffer queries."""
|
||||
center_lat = (self.north + self.south) / 2
|
||||
center_lng = (self.east + self.west) / 2
|
||||
|
||||
lat_range = (self.north - self.south) * factor / 2
|
||||
lng_range = (self.east - self.west) * factor / 2
|
||||
|
||||
return GeoBounds(
|
||||
north=min(90, center_lat + lat_range),
|
||||
south=max(-90, center_lat - lat_range),
|
||||
east=min(180, center_lng + lng_range),
|
||||
west=max(-180, center_lng - lng_range),
|
||||
)
|
||||
|
||||
def contains_point(self, lat: float, lng: float) -> bool:
|
||||
"""Check if a point is within these bounds."""
|
||||
return self.south <= lat <= self.north and self.west <= lng <= self.east
|
||||
|
||||
def to_dict(self) -> Dict[str, float]:
|
||||
"""Convert to dictionary for JSON serialization."""
|
||||
return {
|
||||
"north": self.north,
|
||||
"south": self.south,
|
||||
"east": self.east,
|
||||
"west": self.west,
|
||||
}
|
||||
|
||||
|
||||
@dataclass
|
||||
class MapFilters:
|
||||
"""Filtering options for map queries."""
|
||||
|
||||
location_types: Optional[Set[LocationType]] = None
|
||||
park_status: Optional[Set[str]] = None # OPERATING, CLOSED_TEMP, etc.
|
||||
ride_types: Optional[Set[str]] = None
|
||||
company_roles: Optional[Set[str]] = None # OPERATOR, MANUFACTURER, etc.
|
||||
search_query: Optional[str] = None
|
||||
min_rating: Optional[float] = None
|
||||
has_coordinates: bool = True
|
||||
country: Optional[str] = None
|
||||
state: Optional[str] = None
|
||||
city: Optional[str] = None
|
||||
|
||||
def to_dict(self) -> Dict[str, Any]:
|
||||
"""Convert to dictionary for caching and serialization."""
|
||||
return {
|
||||
"location_types": (
|
||||
[t.value for t in self.location_types] if self.location_types else None
|
||||
),
|
||||
"park_status": (list(self.park_status) if self.park_status else None),
|
||||
"ride_types": list(self.ride_types) if self.ride_types else None,
|
||||
"company_roles": (list(self.company_roles) if self.company_roles else None),
|
||||
"search_query": self.search_query,
|
||||
"min_rating": self.min_rating,
|
||||
"has_coordinates": self.has_coordinates,
|
||||
"country": self.country,
|
||||
"state": self.state,
|
||||
"city": self.city,
|
||||
}
|
||||
|
||||
|
||||
@dataclass
|
||||
class UnifiedLocation:
|
||||
"""Unified location interface for all location types."""
|
||||
|
||||
id: str # Composite: f"{type}_{id}"
|
||||
type: LocationType
|
||||
name: str
|
||||
coordinates: List[float] # [lat, lng]
|
||||
address: Optional[str] = None
|
||||
metadata: Dict[str, Any] = field(default_factory=dict)
|
||||
type_data: Dict[str, Any] = field(default_factory=dict)
|
||||
cluster_weight: int = 1
|
||||
cluster_category: str = "default"
|
||||
|
||||
@property
|
||||
def latitude(self) -> float:
|
||||
"""Get latitude from coordinates."""
|
||||
return self.coordinates[0]
|
||||
|
||||
@property
|
||||
def longitude(self) -> float:
|
||||
"""Get longitude from coordinates."""
|
||||
return self.coordinates[1]
|
||||
|
||||
def to_geojson_feature(self) -> Dict[str, Any]:
|
||||
"""Convert to GeoJSON feature for mapping libraries."""
|
||||
return {
|
||||
"type": "Feature",
|
||||
"properties": {
|
||||
"id": self.id,
|
||||
"type": self.type.value,
|
||||
"name": self.name,
|
||||
"address": self.address,
|
||||
"metadata": self.metadata,
|
||||
"type_data": self.type_data,
|
||||
"cluster_weight": self.cluster_weight,
|
||||
"cluster_category": self.cluster_category,
|
||||
},
|
||||
"geometry": {
|
||||
"type": "Point",
|
||||
# GeoJSON uses lng, lat
|
||||
"coordinates": [self.longitude, self.latitude],
|
||||
},
|
||||
}
|
||||
|
||||
def to_dict(self) -> Dict[str, Any]:
|
||||
"""Convert to dictionary for JSON responses."""
|
||||
return {
|
||||
"id": self.id,
|
||||
"type": self.type.value,
|
||||
"name": self.name,
|
||||
"coordinates": list(self.coordinates),
|
||||
"address": self.address,
|
||||
"metadata": self.metadata,
|
||||
"type_data": self.type_data,
|
||||
"cluster_weight": self.cluster_weight,
|
||||
"cluster_category": self.cluster_category,
|
||||
}
|
||||
|
||||
|
||||
@dataclass
|
||||
class ClusterData:
|
||||
"""Represents a cluster of locations for map display."""
|
||||
|
||||
id: str
|
||||
coordinates: List[float] # [lat, lng]
|
||||
count: int
|
||||
types: Set[LocationType]
|
||||
bounds: GeoBounds
|
||||
representative_location: Optional[UnifiedLocation] = None
|
||||
|
||||
def to_dict(self) -> Dict[str, Any]:
|
||||
"""Convert to dictionary for JSON responses."""
|
||||
return {
|
||||
"id": self.id,
|
||||
"coordinates": list(self.coordinates),
|
||||
"count": self.count,
|
||||
"types": [t.value for t in self.types],
|
||||
"bounds": self.bounds.to_dict(),
|
||||
"representative": (
|
||||
self.representative_location.to_dict()
|
||||
if self.representative_location
|
||||
else None
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
@dataclass
|
||||
class MapResponse:
|
||||
"""Response structure for map API calls."""
|
||||
|
||||
locations: List[UnifiedLocation] = field(default_factory=list)
|
||||
clusters: List[ClusterData] = field(default_factory=list)
|
||||
bounds: Optional[GeoBounds] = None
|
||||
total_count: int = 0
|
||||
filtered_count: int = 0
|
||||
zoom_level: Optional[int] = None
|
||||
clustered: bool = False
|
||||
cache_hit: bool = False
|
||||
query_time_ms: Optional[int] = None
|
||||
filters_applied: List[str] = field(default_factory=list)
|
||||
|
||||
def to_dict(self) -> Dict[str, Any]:
|
||||
"""Convert to dictionary for JSON responses."""
|
||||
return {
|
||||
"status": "success",
|
||||
"data": {
|
||||
"locations": [loc.to_dict() for loc in self.locations],
|
||||
"clusters": [cluster.to_dict() for cluster in self.clusters],
|
||||
"bounds": self.bounds.to_dict() if self.bounds else None,
|
||||
"total_count": self.total_count,
|
||||
"filtered_count": self.filtered_count,
|
||||
"zoom_level": self.zoom_level,
|
||||
"clustered": self.clustered,
|
||||
},
|
||||
"meta": {
|
||||
"cache_hit": self.cache_hit,
|
||||
"query_time_ms": self.query_time_ms,
|
||||
"filters_applied": self.filters_applied,
|
||||
"pagination": {
|
||||
"has_more": False, # TODO: Implement pagination
|
||||
"total_pages": 1,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
@dataclass
|
||||
class QueryPerformanceMetrics:
|
||||
"""Performance metrics for query optimization."""
|
||||
|
||||
query_time_ms: int
|
||||
db_query_count: int
|
||||
cache_hit: bool
|
||||
result_count: int
|
||||
bounds_used: bool
|
||||
clustering_used: bool
|
||||
|
||||
def to_dict(self) -> Dict[str, Any]:
|
||||
"""Convert to dictionary for logging."""
|
||||
return {
|
||||
"query_time_ms": self.query_time_ms,
|
||||
"db_query_count": self.db_query_count,
|
||||
"cache_hit": self.cache_hit,
|
||||
"result_count": self.result_count,
|
||||
"bounds_used": self.bounds_used,
|
||||
"clustering_used": self.clustering_used,
|
||||
}
|
||||
320
apps/core/services/enhanced_cache_service.py
Normal file
320
apps/core/services/enhanced_cache_service.py
Normal file
@@ -0,0 +1,320 @@
|
||||
"""
|
||||
Enhanced caching service with multiple cache backends and strategies.
|
||||
"""
|
||||
|
||||
from typing import Optional, Any, Dict, Callable
|
||||
from django.core.cache import caches
|
||||
import hashlib
|
||||
import json
|
||||
import logging
|
||||
import time
|
||||
from functools import wraps
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
# Define GeoBounds for type hinting
|
||||
class GeoBounds:
|
||||
def __init__(self, min_lat: float, min_lng: float, max_lat: float, max_lng: float):
|
||||
self.min_lat = min_lat
|
||||
self.min_lng = min_lng
|
||||
self.max_lat = max_lat
|
||||
self.max_lng = max_lng
|
||||
|
||||
|
||||
class EnhancedCacheService:
|
||||
"""Comprehensive caching service with multiple cache backends"""
|
||||
|
||||
def __init__(self):
|
||||
self.default_cache = caches["default"]
|
||||
try:
|
||||
self.api_cache = caches["api"]
|
||||
except Exception:
|
||||
# Fallback to default cache if api cache not configured
|
||||
self.api_cache = self.default_cache
|
||||
|
||||
# L1: Query-level caching
|
||||
def cache_queryset(
|
||||
self,
|
||||
cache_key: str,
|
||||
queryset_func: Callable,
|
||||
timeout: int = 3600,
|
||||
**kwargs,
|
||||
) -> Any:
|
||||
"""Cache expensive querysets"""
|
||||
cached_result = self.default_cache.get(cache_key)
|
||||
if cached_result is None:
|
||||
start_time = time.time()
|
||||
result = queryset_func(**kwargs)
|
||||
duration = time.time() - start_time
|
||||
|
||||
# Log cache miss and function execution time
|
||||
logger.info(
|
||||
f"Cache miss for key '{cache_key}', executed in {duration:.3f}s",
|
||||
extra={"cache_key": cache_key, "execution_time": duration},
|
||||
)
|
||||
|
||||
self.default_cache.set(cache_key, result, timeout)
|
||||
return result
|
||||
|
||||
logger.debug(f"Cache hit for key '{cache_key}'")
|
||||
return cached_result
|
||||
|
||||
# L2: API response caching
|
||||
def cache_api_response(
|
||||
self,
|
||||
view_name: str,
|
||||
params: Dict,
|
||||
response_data: Any,
|
||||
timeout: int = 1800,
|
||||
):
|
||||
"""Cache API responses based on view and parameters"""
|
||||
cache_key = self._generate_api_cache_key(view_name, params)
|
||||
self.api_cache.set(cache_key, response_data, timeout)
|
||||
logger.debug(f"Cached API response for view '{view_name}'")
|
||||
|
||||
def get_cached_api_response(self, view_name: str, params: Dict) -> Optional[Any]:
|
||||
"""Retrieve cached API response"""
|
||||
cache_key = self._generate_api_cache_key(view_name, params)
|
||||
result = self.api_cache.get(cache_key)
|
||||
|
||||
if result:
|
||||
logger.debug(f"Cache hit for API view '{view_name}'")
|
||||
else:
|
||||
logger.debug(f"Cache miss for API view '{view_name}'")
|
||||
|
||||
return result
|
||||
|
||||
# L3: Geographic caching (building on existing MapCacheService)
|
||||
def cache_geographic_data(
|
||||
self,
|
||||
bounds: "GeoBounds",
|
||||
data: Any,
|
||||
zoom_level: int,
|
||||
timeout: int = 1800,
|
||||
):
|
||||
"""Cache geographic data with spatial keys"""
|
||||
# Generate spatial cache key based on bounds and zoom level
|
||||
cache_key = f"geo:{bounds.min_lat}:{bounds.min_lng}:{bounds.max_lat}:{
|
||||
bounds.max_lng
|
||||
}:z{zoom_level}"
|
||||
self.default_cache.set(cache_key, data, timeout)
|
||||
logger.debug(f"Cached geographic data for bounds {bounds}")
|
||||
|
||||
def get_cached_geographic_data(
|
||||
self, bounds: "GeoBounds", zoom_level: int
|
||||
) -> Optional[Any]:
|
||||
"""Retrieve cached geographic data"""
|
||||
cache_key = f"geo:{bounds.min_lat}:{bounds.min_lng}:{bounds.max_lat}:{
|
||||
bounds.max_lng
|
||||
}:z{zoom_level}"
|
||||
return self.default_cache.get(cache_key)
|
||||
|
||||
# Cache invalidation utilities
|
||||
def invalidate_pattern(self, pattern: str):
|
||||
"""Invalidate cache keys matching a pattern (if backend supports it)"""
|
||||
try:
|
||||
# For Redis cache backends
|
||||
if hasattr(self.default_cache, "delete_pattern"):
|
||||
deleted_count = self.default_cache.delete_pattern(pattern)
|
||||
logger.info(
|
||||
f"Invalidated {deleted_count} cache keys matching pattern '{pattern}'"
|
||||
)
|
||||
return deleted_count
|
||||
else:
|
||||
logger.warning(
|
||||
f"Cache backend does not support pattern deletion for pattern '{pattern}'"
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"Error invalidating cache pattern '{pattern}': {e}")
|
||||
|
||||
def invalidate_model_cache(
|
||||
self, model_name: str, instance_id: Optional[int] = None
|
||||
):
|
||||
"""Invalidate cache keys related to a specific model"""
|
||||
if instance_id:
|
||||
pattern = f"*{model_name}:{instance_id}*"
|
||||
else:
|
||||
pattern = f"*{model_name}*"
|
||||
|
||||
self.invalidate_pattern(pattern)
|
||||
|
||||
# Cache warming utilities
|
||||
def warm_cache(
|
||||
self,
|
||||
cache_key: str,
|
||||
warm_func: Callable,
|
||||
timeout: int = 3600,
|
||||
**kwargs,
|
||||
):
|
||||
"""Proactively warm cache with data"""
|
||||
try:
|
||||
data = warm_func(**kwargs)
|
||||
self.default_cache.set(cache_key, data, timeout)
|
||||
logger.info(f"Warmed cache for key '{cache_key}'")
|
||||
except Exception as e:
|
||||
logger.error(f"Error warming cache for key '{cache_key}': {e}")
|
||||
|
||||
def _generate_api_cache_key(self, view_name: str, params: Dict) -> str:
|
||||
"""Generate consistent cache keys for API responses"""
|
||||
# Sort params to ensure consistent key generation
|
||||
params_str = json.dumps(params, sort_keys=True, default=str)
|
||||
params_hash = hashlib.md5(params_str.encode()).hexdigest()
|
||||
return f"api:{view_name}:{params_hash}"
|
||||
|
||||
|
||||
# Cache decorators
|
||||
def cache_api_response(timeout=1800, vary_on=None, key_prefix=""):
|
||||
"""Decorator for caching API responses"""
|
||||
|
||||
def decorator(view_func):
|
||||
@wraps(view_func)
|
||||
def wrapper(self, request, *args, **kwargs):
|
||||
if request.method != "GET":
|
||||
return view_func(self, request, *args, **kwargs)
|
||||
|
||||
# Generate cache key based on view, user, and parameters
|
||||
cache_key_parts = [
|
||||
key_prefix or view_func.__name__,
|
||||
(
|
||||
str(request.user.id)
|
||||
if request.user.is_authenticated
|
||||
else "anonymous"
|
||||
),
|
||||
str(hash(frozenset(request.GET.items()))),
|
||||
]
|
||||
|
||||
if vary_on:
|
||||
for field in vary_on:
|
||||
cache_key_parts.append(str(getattr(request, field, "")))
|
||||
|
||||
cache_key = ":".join(cache_key_parts)
|
||||
|
||||
# Try to get from cache
|
||||
cache_service = EnhancedCacheService()
|
||||
cached_response = cache_service.api_cache.get(cache_key)
|
||||
if cached_response:
|
||||
logger.debug(f"Cache hit for API view {view_func.__name__}")
|
||||
return cached_response
|
||||
|
||||
# Execute view and cache result
|
||||
response = view_func(self, request, *args, **kwargs)
|
||||
if hasattr(response, "status_code") and response.status_code == 200:
|
||||
cache_service.api_cache.set(cache_key, response, timeout)
|
||||
logger.debug(f"Cached API response for view {view_func.__name__}")
|
||||
|
||||
return response
|
||||
|
||||
return wrapper
|
||||
|
||||
return decorator
|
||||
|
||||
|
||||
def cache_queryset_result(cache_key_template: str, timeout: int = 3600):
|
||||
"""Decorator for caching queryset results"""
|
||||
|
||||
def decorator(func):
|
||||
@wraps(func)
|
||||
def wrapper(*args, **kwargs):
|
||||
# Generate cache key from template and arguments
|
||||
cache_key = cache_key_template.format(*args, **kwargs)
|
||||
|
||||
cache_service = EnhancedCacheService()
|
||||
return cache_service.cache_queryset(
|
||||
cache_key, func, timeout, *args, **kwargs
|
||||
)
|
||||
|
||||
return wrapper
|
||||
|
||||
return decorator
|
||||
|
||||
|
||||
# Context manager for cache warming
|
||||
class CacheWarmer:
|
||||
"""Context manager for batch cache warming operations"""
|
||||
|
||||
def __init__(self):
|
||||
self.cache_service = EnhancedCacheService()
|
||||
self.warm_operations = []
|
||||
|
||||
def add(
|
||||
self,
|
||||
cache_key: str,
|
||||
warm_func: Callable,
|
||||
timeout: int = 3600,
|
||||
**kwargs,
|
||||
):
|
||||
"""Add a cache warming operation to the batch"""
|
||||
self.warm_operations.append(
|
||||
{
|
||||
"cache_key": cache_key,
|
||||
"warm_func": warm_func,
|
||||
"timeout": timeout,
|
||||
"kwargs": kwargs,
|
||||
}
|
||||
)
|
||||
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
"""Execute all cache warming operations"""
|
||||
logger.info(f"Warming {len(self.warm_operations)} cache entries")
|
||||
|
||||
for operation in self.warm_operations:
|
||||
try:
|
||||
self.cache_service.warm_cache(**operation)
|
||||
except Exception as e:
|
||||
logger.error(f"Error warming cache for {operation['cache_key']}: {e}")
|
||||
|
||||
|
||||
# Cache statistics and monitoring
|
||||
class CacheMonitor:
|
||||
"""Monitor cache performance and statistics"""
|
||||
|
||||
def __init__(self):
|
||||
self.cache_service = EnhancedCacheService()
|
||||
|
||||
def get_cache_stats(self) -> Dict[str, Any]:
|
||||
"""Get cache statistics if available"""
|
||||
stats = {}
|
||||
|
||||
try:
|
||||
# Try to get Redis cache stats
|
||||
cache_backend = self.cache_service.default_cache.__class__.__name__
|
||||
|
||||
if "Redis" in cache_backend:
|
||||
# Attempt to get Redis client and stats
|
||||
redis_client = self.cache_service.default_cache._cache.get_client()
|
||||
info = redis_client.info()
|
||||
stats["redis"] = {
|
||||
"used_memory": info.get("used_memory_human"),
|
||||
"connected_clients": info.get("connected_clients"),
|
||||
"total_commands_processed": info.get("total_commands_processed"),
|
||||
"keyspace_hits": info.get("keyspace_hits"),
|
||||
"keyspace_misses": info.get("keyspace_misses"),
|
||||
}
|
||||
|
||||
# Calculate hit rate
|
||||
hits = info.get("keyspace_hits", 0)
|
||||
misses = info.get("keyspace_misses", 0)
|
||||
if hits + misses > 0:
|
||||
stats["redis"]["hit_rate"] = hits / (hits + misses) * 100
|
||||
else:
|
||||
# For local memory cache or other backends
|
||||
stats["cache_backend"] = cache_backend
|
||||
stats["message"] = f"Cache statistics not available for {cache_backend}"
|
||||
|
||||
except Exception:
|
||||
# Don't log as error since this is expected for non-Redis backends
|
||||
cache_backend = self.cache_service.default_cache.__class__.__name__
|
||||
stats["cache_backend"] = cache_backend
|
||||
stats["message"] = f"Cache statistics not available for {cache_backend}"
|
||||
|
||||
return stats
|
||||
|
||||
def log_cache_performance(self):
|
||||
"""Log cache performance metrics"""
|
||||
stats = self.get_cache_stats()
|
||||
if stats:
|
||||
logger.info("Cache performance statistics", extra=stats)
|
||||
415
apps/core/services/entity_fuzzy_matching.py
Normal file
415
apps/core/services/entity_fuzzy_matching.py
Normal file
@@ -0,0 +1,415 @@
|
||||
"""
|
||||
Entity Fuzzy Matching Service for ThrillWiki
|
||||
|
||||
Provides intelligent entity matching when exact lookups fail, with authentication
|
||||
prompts for suggesting new entity creation.
|
||||
|
||||
Features:
|
||||
- Levenshtein distance for typo correction
|
||||
- Phonetic matching using Soundex algorithm
|
||||
- Partial name matching
|
||||
- Priority-based scoring (parks > rides > companies)
|
||||
- Authentication state-aware suggestions
|
||||
"""
|
||||
|
||||
import re
|
||||
from difflib import SequenceMatcher
|
||||
from typing import List, Dict, Any, Optional
|
||||
from dataclasses import dataclass
|
||||
from enum import Enum
|
||||
|
||||
from django.db.models import Q
|
||||
|
||||
from apps.parks.models import Park
|
||||
from apps.rides.models import Ride
|
||||
from apps.parks.models import Company
|
||||
|
||||
|
||||
class EntityType(Enum):
|
||||
"""Supported entity types for fuzzy matching."""
|
||||
|
||||
PARK = "park"
|
||||
RIDE = "ride"
|
||||
COMPANY = "company"
|
||||
|
||||
|
||||
@dataclass
|
||||
class FuzzyMatchResult:
|
||||
"""Result of a fuzzy matching operation."""
|
||||
|
||||
entity_type: EntityType
|
||||
entity: Any # The actual model instance
|
||||
name: str
|
||||
slug: str
|
||||
score: float # 0.0 to 1.0, higher is better match
|
||||
match_reason: str # Description of why this was matched
|
||||
confidence: str # 'high', 'medium', 'low'
|
||||
url: Optional[str] = None
|
||||
|
||||
def to_dict(self) -> Dict[str, Any]:
|
||||
"""Convert to dictionary for API responses."""
|
||||
return {
|
||||
"entity_type": self.entity_type.value,
|
||||
"name": self.name,
|
||||
"slug": self.slug,
|
||||
"score": round(self.score, 3),
|
||||
"match_reason": self.match_reason,
|
||||
"confidence": self.confidence,
|
||||
"url": self.url,
|
||||
"entity_id": getattr(self.entity, "id", None),
|
||||
}
|
||||
|
||||
|
||||
@dataclass
|
||||
class EntitySuggestion:
|
||||
"""Suggestion for creating a new entity when no matches found."""
|
||||
|
||||
suggested_name: str
|
||||
entity_type: EntityType
|
||||
requires_authentication: bool
|
||||
login_prompt: str
|
||||
signup_prompt: str
|
||||
creation_hint: str
|
||||
|
||||
|
||||
class FuzzyMatchingAlgorithms:
|
||||
"""Collection of fuzzy matching algorithms."""
|
||||
|
||||
@staticmethod
|
||||
def levenshtein_distance(s1: str, s2: str) -> int:
|
||||
"""Calculate Levenshtein distance between two strings."""
|
||||
if len(s1) < len(s2):
|
||||
return FuzzyMatchingAlgorithms.levenshtein_distance(s2, s1)
|
||||
|
||||
if len(s2) == 0:
|
||||
return len(s1)
|
||||
|
||||
previous_row = list(range(len(s2) + 1))
|
||||
for i, c1 in enumerate(s1):
|
||||
current_row = [i + 1]
|
||||
for j, c2 in enumerate(s2):
|
||||
insertions = previous_row[j + 1] + 1
|
||||
deletions = current_row[j] + 1
|
||||
substitutions = previous_row[j] + (c1 != c2)
|
||||
current_row.append(min(insertions, deletions, substitutions))
|
||||
previous_row = current_row
|
||||
|
||||
return previous_row[-1]
|
||||
|
||||
@staticmethod
|
||||
def similarity_ratio(s1: str, s2: str) -> float:
|
||||
"""Calculate similarity ratio (0.0 to 1.0) using SequenceMatcher."""
|
||||
return SequenceMatcher(None, s1.lower(), s2.lower()).ratio()
|
||||
|
||||
@staticmethod
|
||||
def soundex(name: str) -> str:
|
||||
"""Generate Soundex code for phonetic matching."""
|
||||
name = re.sub(r"[^A-Za-z]", "", name.upper())
|
||||
if not name:
|
||||
return "0000"
|
||||
|
||||
# Soundex algorithm
|
||||
soundex_map = {
|
||||
"BFPV": "1",
|
||||
"CGJKQSXZ": "2",
|
||||
"DT": "3",
|
||||
"L": "4",
|
||||
"MN": "5",
|
||||
"R": "6",
|
||||
}
|
||||
|
||||
first_letter = name[0]
|
||||
name = name[1:]
|
||||
|
||||
# Replace letters with numbers
|
||||
for letters, number in soundex_map.items():
|
||||
name = re.sub(f"[{letters}]", number, name)
|
||||
|
||||
# Remove consecutive duplicates
|
||||
name = re.sub(r"(\d)\1+", r"\1", name)
|
||||
|
||||
# Remove zeros
|
||||
name = re.sub("0", "", name)
|
||||
|
||||
# Pad or truncate to 4 characters
|
||||
soundex_code = (first_letter + name + "000")[:4]
|
||||
return soundex_code
|
||||
|
||||
@staticmethod
|
||||
def partial_match_score(query: str, target: str) -> float:
|
||||
"""Calculate partial matching score for substring matches."""
|
||||
query_lower = query.lower()
|
||||
target_lower = target.lower()
|
||||
|
||||
# Exact match
|
||||
if query_lower == target_lower:
|
||||
return 1.0
|
||||
|
||||
# Starts with
|
||||
if target_lower.startswith(query_lower):
|
||||
return 0.8 + (len(query) / len(target)) * 0.15
|
||||
|
||||
# Contains
|
||||
if query_lower in target_lower:
|
||||
return 0.6 + (len(query) / len(target)) * 0.2
|
||||
|
||||
# Words match
|
||||
query_words = set(query_lower.split())
|
||||
target_words = set(target_lower.split())
|
||||
if query_words & target_words:
|
||||
intersection = len(query_words & target_words)
|
||||
union = len(query_words | target_words)
|
||||
return 0.4 + (intersection / union) * 0.3
|
||||
|
||||
return 0.0
|
||||
|
||||
|
||||
class EntityFuzzyMatcher:
|
||||
"""Main fuzzy matching service for entities."""
|
||||
|
||||
# Matching thresholds
|
||||
HIGH_CONFIDENCE_THRESHOLD = 0.8
|
||||
MEDIUM_CONFIDENCE_THRESHOLD = 0.6
|
||||
LOW_CONFIDENCE_THRESHOLD = 0.4
|
||||
|
||||
# Maximum results to consider
|
||||
MAX_CANDIDATES = 50
|
||||
MAX_RESULTS = 5
|
||||
|
||||
def __init__(self):
|
||||
self.algorithms = FuzzyMatchingAlgorithms()
|
||||
|
||||
def find_entity(
|
||||
self, query: str, entity_types: Optional[List[EntityType]] = None, user=None
|
||||
) -> tuple[List[FuzzyMatchResult], Optional[EntitySuggestion]]:
|
||||
"""
|
||||
Find entities matching the query with fuzzy matching.
|
||||
|
||||
Args:
|
||||
query: Search query string
|
||||
entity_types: Limit search to specific entity types
|
||||
user: Current user for authentication context
|
||||
|
||||
Returns:
|
||||
Tuple of (matches, suggestion_for_new_entity)
|
||||
"""
|
||||
if not query or len(query.strip()) < 2:
|
||||
return [], None
|
||||
|
||||
query = query.strip()
|
||||
entity_types = entity_types or [
|
||||
EntityType.PARK,
|
||||
EntityType.RIDE,
|
||||
EntityType.COMPANY,
|
||||
]
|
||||
|
||||
# Collect all potential matches
|
||||
candidates = []
|
||||
|
||||
for entity_type in entity_types:
|
||||
candidates.extend(self._get_candidates(query, entity_type))
|
||||
|
||||
# Score and rank candidates
|
||||
matches = self._score_and_rank_candidates(query, candidates)
|
||||
|
||||
# Generate suggestion if no good matches found
|
||||
suggestion = None
|
||||
if not matches or matches[0].score < self.LOW_CONFIDENCE_THRESHOLD:
|
||||
suggestion = self._generate_entity_suggestion(query, entity_types, user)
|
||||
|
||||
return matches[: self.MAX_RESULTS], suggestion
|
||||
|
||||
def _get_candidates(
|
||||
self, query: str, entity_type: EntityType
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""Get potential matching candidates for an entity type."""
|
||||
candidates = []
|
||||
|
||||
if entity_type == EntityType.PARK:
|
||||
parks = Park.objects.filter(
|
||||
Q(name__icontains=query)
|
||||
| Q(slug__icontains=query.lower().replace(" ", "-"))
|
||||
| Q(former_names__icontains=query)
|
||||
)[: self.MAX_CANDIDATES]
|
||||
|
||||
for park in parks:
|
||||
candidates.append(
|
||||
{
|
||||
"entity_type": EntityType.PARK,
|
||||
"entity": park,
|
||||
"name": park.name,
|
||||
"slug": park.slug,
|
||||
"search_names": [park.name],
|
||||
"url": getattr(park, "get_absolute_url", lambda: None)(),
|
||||
"priority_boost": 0.1, # Parks get priority
|
||||
}
|
||||
)
|
||||
|
||||
elif entity_type == EntityType.RIDE:
|
||||
rides = Ride.objects.select_related("park").filter(
|
||||
Q(name__icontains=query)
|
||||
| Q(slug__icontains=query.lower().replace(" ", "-"))
|
||||
| Q(former_names__icontains=query)
|
||||
| Q(park__name__icontains=query)
|
||||
)[: self.MAX_CANDIDATES]
|
||||
|
||||
for ride in rides:
|
||||
candidates.append(
|
||||
{
|
||||
"entity_type": EntityType.RIDE,
|
||||
"entity": ride,
|
||||
"name": ride.name,
|
||||
"slug": ride.slug,
|
||||
"search_names": [ride.name, f"{ride.park.name} {ride.name}"],
|
||||
"url": getattr(ride, "get_absolute_url", lambda: None)(),
|
||||
"priority_boost": 0.05, # Rides get some priority
|
||||
}
|
||||
)
|
||||
|
||||
elif entity_type == EntityType.COMPANY:
|
||||
companies = Company.objects.filter(
|
||||
Q(name__icontains=query)
|
||||
| Q(slug__icontains=query.lower().replace(" ", "-"))
|
||||
)[: self.MAX_CANDIDATES]
|
||||
|
||||
for company in companies:
|
||||
candidates.append(
|
||||
{
|
||||
"entity_type": EntityType.COMPANY,
|
||||
"entity": company,
|
||||
"name": company.name,
|
||||
"slug": company.slug,
|
||||
"search_names": [company.name],
|
||||
"url": getattr(company, "get_absolute_url", lambda: None)(),
|
||||
"priority_boost": 0.0, # Companies get no priority boost
|
||||
}
|
||||
)
|
||||
|
||||
return candidates
|
||||
|
||||
def _score_and_rank_candidates(
|
||||
self, query: str, candidates: List[Dict[str, Any]]
|
||||
) -> List[FuzzyMatchResult]:
|
||||
"""Score and rank all candidates using multiple algorithms."""
|
||||
scored_matches = []
|
||||
|
||||
for candidate in candidates:
|
||||
best_score = 0.0
|
||||
best_reason = ""
|
||||
|
||||
# Test against all search names for this candidate
|
||||
for search_name in candidate["search_names"]:
|
||||
# Algorithm 1: Sequence similarity
|
||||
similarity_score = self.algorithms.similarity_ratio(query, search_name)
|
||||
if similarity_score > best_score:
|
||||
best_score = similarity_score
|
||||
best_reason = f"Text similarity with '{search_name}'"
|
||||
|
||||
# Algorithm 2: Partial matching
|
||||
partial_score = self.algorithms.partial_match_score(query, search_name)
|
||||
if partial_score > best_score:
|
||||
best_score = partial_score
|
||||
best_reason = f"Partial match with '{search_name}'"
|
||||
|
||||
# Algorithm 3: Levenshtein distance
|
||||
if len(query) > 3 and len(search_name) > 3:
|
||||
max_len = max(len(query), len(search_name))
|
||||
distance = self.algorithms.levenshtein_distance(query, search_name)
|
||||
lev_score = 1.0 - (distance / max_len)
|
||||
if lev_score > best_score:
|
||||
best_score = lev_score
|
||||
best_reason = f"Similar spelling to '{search_name}'"
|
||||
|
||||
# Algorithm 4: Soundex phonetic matching
|
||||
if len(query) > 2 and len(search_name) > 2:
|
||||
query_soundex = self.algorithms.soundex(query)
|
||||
name_soundex = self.algorithms.soundex(search_name)
|
||||
if query_soundex == name_soundex and best_score < 0.7:
|
||||
best_score = max(best_score, 0.7)
|
||||
best_reason = f"Sounds like '{search_name}'"
|
||||
|
||||
# Apply priority boost
|
||||
best_score += candidate["priority_boost"]
|
||||
best_score = min(1.0, best_score) # Cap at 1.0
|
||||
|
||||
# Determine confidence level
|
||||
if best_score >= self.HIGH_CONFIDENCE_THRESHOLD:
|
||||
confidence = "high"
|
||||
elif best_score >= self.MEDIUM_CONFIDENCE_THRESHOLD:
|
||||
confidence = "medium"
|
||||
else:
|
||||
confidence = "low"
|
||||
|
||||
# Only include if above minimum threshold
|
||||
if best_score >= self.LOW_CONFIDENCE_THRESHOLD:
|
||||
match = FuzzyMatchResult(
|
||||
entity_type=candidate["entity_type"],
|
||||
entity=candidate["entity"],
|
||||
name=candidate["name"],
|
||||
slug=candidate["slug"],
|
||||
score=best_score,
|
||||
match_reason=best_reason,
|
||||
confidence=confidence,
|
||||
url=candidate["url"],
|
||||
)
|
||||
scored_matches.append(match)
|
||||
|
||||
# Sort by score (highest first) and return
|
||||
return sorted(scored_matches, key=lambda x: x.score, reverse=True)
|
||||
|
||||
def _generate_entity_suggestion(
|
||||
self, query: str, entity_types: List[EntityType], user
|
||||
) -> EntitySuggestion:
|
||||
"""Generate suggestion for creating new entity when no matches found."""
|
||||
|
||||
# Determine most likely entity type based on query characteristics
|
||||
suggested_type = EntityType.PARK # Default to park
|
||||
|
||||
# Simple heuristics for entity type detection
|
||||
query_lower = query.lower()
|
||||
if any(
|
||||
word in query_lower
|
||||
for word in ["roller coaster", "ride", "coaster", "attraction"]
|
||||
):
|
||||
suggested_type = EntityType.RIDE
|
||||
elif any(
|
||||
word in query_lower for word in ["inc", "corp", "company", "manufacturer"]
|
||||
):
|
||||
suggested_type = EntityType.COMPANY
|
||||
elif EntityType.PARK in entity_types:
|
||||
suggested_type = EntityType.PARK
|
||||
elif entity_types:
|
||||
suggested_type = entity_types[0]
|
||||
|
||||
# Clean up the suggested name
|
||||
suggested_name = " ".join(word.capitalize() for word in query.split())
|
||||
|
||||
# Check if user is authenticated
|
||||
is_authenticated = (
|
||||
user and hasattr(user, "is_authenticated") and user.is_authenticated
|
||||
)
|
||||
|
||||
# Generate appropriate prompts
|
||||
entity_name = suggested_type.value
|
||||
login_prompt = (
|
||||
f"Log in to suggest adding '{suggested_name}' as a new {entity_name}"
|
||||
)
|
||||
signup_prompt = (
|
||||
f"Sign up to contribute and add '{suggested_name}' to ThrillWiki"
|
||||
)
|
||||
creation_hint = (
|
||||
f"Help expand ThrillWiki by adding information about '{suggested_name}'"
|
||||
)
|
||||
|
||||
return EntitySuggestion(
|
||||
suggested_name=suggested_name,
|
||||
entity_type=suggested_type,
|
||||
requires_authentication=not is_authenticated,
|
||||
login_prompt=login_prompt,
|
||||
signup_prompt=signup_prompt,
|
||||
creation_hint=creation_hint,
|
||||
)
|
||||
|
||||
|
||||
# Global service instance
|
||||
entity_fuzzy_matcher = EntityFuzzyMatcher()
|
||||
409
apps/core/services/location_adapters.py
Normal file
409
apps/core/services/location_adapters.py
Normal file
@@ -0,0 +1,409 @@
|
||||
"""
|
||||
Location adapters for converting between domain-specific models and UnifiedLocation.
|
||||
"""
|
||||
|
||||
from typing import List, Optional
|
||||
from django.db.models import QuerySet
|
||||
from django.urls import reverse
|
||||
|
||||
from .data_structures import (
|
||||
UnifiedLocation,
|
||||
LocationType,
|
||||
GeoBounds,
|
||||
MapFilters,
|
||||
)
|
||||
from apps.parks.models import ParkLocation, CompanyHeadquarters
|
||||
from apps.rides.models import RideLocation
|
||||
|
||||
|
||||
class BaseLocationAdapter:
|
||||
"""Base adapter class for location conversions."""
|
||||
|
||||
def to_unified_location(self, location_obj) -> Optional[UnifiedLocation]:
|
||||
"""Convert model instance to UnifiedLocation."""
|
||||
raise NotImplementedError
|
||||
|
||||
def get_queryset(
|
||||
self,
|
||||
bounds: Optional[GeoBounds] = None,
|
||||
filters: Optional[MapFilters] = None,
|
||||
) -> QuerySet:
|
||||
"""Get optimized queryset for this location type."""
|
||||
raise NotImplementedError
|
||||
|
||||
def bulk_convert(self, queryset: QuerySet) -> List[UnifiedLocation]:
|
||||
"""Convert multiple location objects efficiently."""
|
||||
unified_locations = []
|
||||
for obj in queryset:
|
||||
unified_loc = self.to_unified_location(obj)
|
||||
if unified_loc:
|
||||
unified_locations.append(unified_loc)
|
||||
return unified_locations
|
||||
|
||||
|
||||
class ParkLocationAdapter(BaseLocationAdapter):
|
||||
"""Converts Park/ParkLocation to UnifiedLocation."""
|
||||
|
||||
def to_unified_location(
|
||||
self, location_obj: ParkLocation
|
||||
) -> Optional[UnifiedLocation]:
|
||||
"""Convert ParkLocation to UnifiedLocation."""
|
||||
if (
|
||||
not location_obj.point
|
||||
or location_obj.latitude is None
|
||||
or location_obj.longitude is None
|
||||
):
|
||||
return None
|
||||
|
||||
park = location_obj.park
|
||||
|
||||
return UnifiedLocation(
|
||||
id=f"park_{park.id}",
|
||||
type=LocationType.PARK,
|
||||
name=park.name,
|
||||
coordinates=[float(location_obj.latitude), float(location_obj.longitude)],
|
||||
address=location_obj.formatted_address,
|
||||
metadata={
|
||||
"status": getattr(park, "status", "UNKNOWN"),
|
||||
"rating": (
|
||||
float(park.average_rating)
|
||||
if hasattr(park, "average_rating") and park.average_rating
|
||||
else None
|
||||
),
|
||||
"ride_count": getattr(park, "ride_count", 0),
|
||||
"coaster_count": getattr(park, "coaster_count", 0),
|
||||
"operator": (
|
||||
park.operator.name
|
||||
if hasattr(park, "operator") and park.operator
|
||||
else None
|
||||
),
|
||||
"city": location_obj.city,
|
||||
"state": location_obj.state,
|
||||
"country": location_obj.country,
|
||||
},
|
||||
type_data={
|
||||
"slug": park.slug,
|
||||
"opening_date": (
|
||||
park.opening_date.isoformat()
|
||||
if hasattr(park, "opening_date") and park.opening_date
|
||||
else None
|
||||
),
|
||||
"website": getattr(park, "website", ""),
|
||||
"operating_season": getattr(park, "operating_season", ""),
|
||||
"highway_exit": location_obj.highway_exit,
|
||||
"parking_notes": location_obj.parking_notes,
|
||||
"best_arrival_time": (
|
||||
location_obj.best_arrival_time.strftime("%H:%M")
|
||||
if location_obj.best_arrival_time
|
||||
else None
|
||||
),
|
||||
"seasonal_notes": location_obj.seasonal_notes,
|
||||
"url": self._get_park_url(park),
|
||||
},
|
||||
cluster_weight=self._calculate_park_weight(park),
|
||||
cluster_category=self._get_park_category(park),
|
||||
)
|
||||
|
||||
def get_queryset(
|
||||
self,
|
||||
bounds: Optional[GeoBounds] = None,
|
||||
filters: Optional[MapFilters] = None,
|
||||
) -> QuerySet:
|
||||
"""Get optimized queryset for park locations."""
|
||||
queryset = ParkLocation.objects.select_related("park", "park__operator").filter(
|
||||
point__isnull=False
|
||||
)
|
||||
|
||||
# Spatial filtering
|
||||
if bounds:
|
||||
queryset = queryset.filter(point__within=bounds.to_polygon())
|
||||
|
||||
# Park-specific filters
|
||||
if filters:
|
||||
if filters.park_status:
|
||||
queryset = queryset.filter(park__status__in=filters.park_status)
|
||||
if filters.search_query:
|
||||
queryset = queryset.filter(park__name__icontains=filters.search_query)
|
||||
if filters.country:
|
||||
queryset = queryset.filter(country=filters.country)
|
||||
if filters.state:
|
||||
queryset = queryset.filter(state=filters.state)
|
||||
if filters.city:
|
||||
queryset = queryset.filter(city=filters.city)
|
||||
|
||||
return queryset.order_by("park__name")
|
||||
|
||||
def _calculate_park_weight(self, park) -> int:
|
||||
"""Calculate clustering weight based on park importance."""
|
||||
weight = 1
|
||||
if hasattr(park, "ride_count") and park.ride_count and park.ride_count > 20:
|
||||
weight += 2
|
||||
if (
|
||||
hasattr(park, "coaster_count")
|
||||
and park.coaster_count
|
||||
and park.coaster_count > 5
|
||||
):
|
||||
weight += 1
|
||||
if (
|
||||
hasattr(park, "average_rating")
|
||||
and park.average_rating
|
||||
and park.average_rating > 4.0
|
||||
):
|
||||
weight += 1
|
||||
return min(weight, 5) # Cap at 5
|
||||
|
||||
def _get_park_category(self, park) -> str:
|
||||
"""Determine park category for clustering."""
|
||||
coaster_count = getattr(park, "coaster_count", 0) or 0
|
||||
ride_count = getattr(park, "ride_count", 0) or 0
|
||||
|
||||
if coaster_count >= 10:
|
||||
return "major_park"
|
||||
elif ride_count >= 15:
|
||||
return "theme_park"
|
||||
else:
|
||||
return "small_park"
|
||||
|
||||
def _get_park_url(self, park) -> str:
|
||||
"""Get URL for park detail page."""
|
||||
try:
|
||||
return reverse("parks:detail", kwargs={"slug": park.slug})
|
||||
except BaseException:
|
||||
return f"/parks/{park.slug}/"
|
||||
|
||||
|
||||
class RideLocationAdapter(BaseLocationAdapter):
|
||||
"""Converts Ride/RideLocation to UnifiedLocation."""
|
||||
|
||||
def to_unified_location(
|
||||
self, location_obj: RideLocation
|
||||
) -> Optional[UnifiedLocation]:
|
||||
"""Convert RideLocation to UnifiedLocation."""
|
||||
if (
|
||||
not location_obj.point
|
||||
or location_obj.latitude is None
|
||||
or location_obj.longitude is None
|
||||
):
|
||||
return None
|
||||
|
||||
ride = location_obj.ride
|
||||
|
||||
return UnifiedLocation(
|
||||
id=f"ride_{ride.id}",
|
||||
type=LocationType.RIDE,
|
||||
name=ride.name,
|
||||
coordinates=[float(location_obj.latitude), float(location_obj.longitude)],
|
||||
address=(
|
||||
f"{location_obj.park_area}, {ride.park.name}"
|
||||
if location_obj.park_area
|
||||
else ride.park.name
|
||||
),
|
||||
metadata={
|
||||
"park_id": ride.park.id,
|
||||
"park_name": ride.park.name,
|
||||
"park_area": location_obj.park_area,
|
||||
"ride_type": getattr(ride, "ride_type", "Unknown"),
|
||||
"status": getattr(ride, "status", "UNKNOWN"),
|
||||
"rating": (
|
||||
float(ride.average_rating)
|
||||
if hasattr(ride, "average_rating") and ride.average_rating
|
||||
else None
|
||||
),
|
||||
"manufacturer": (
|
||||
getattr(ride, "manufacturer", {}).get("name")
|
||||
if hasattr(ride, "manufacturer")
|
||||
else None
|
||||
),
|
||||
},
|
||||
type_data={
|
||||
"slug": ride.slug,
|
||||
"opening_date": (
|
||||
ride.opening_date.isoformat()
|
||||
if hasattr(ride, "opening_date") and ride.opening_date
|
||||
else None
|
||||
),
|
||||
"height_requirement": getattr(ride, "height_requirement", ""),
|
||||
"duration_minutes": getattr(ride, "duration_minutes", None),
|
||||
"max_speed_mph": getattr(ride, "max_speed_mph", None),
|
||||
"entrance_notes": location_obj.entrance_notes,
|
||||
"accessibility_notes": location_obj.accessibility_notes,
|
||||
"url": self._get_ride_url(ride),
|
||||
},
|
||||
cluster_weight=self._calculate_ride_weight(ride),
|
||||
cluster_category=self._get_ride_category(ride),
|
||||
)
|
||||
|
||||
def get_queryset(
|
||||
self,
|
||||
bounds: Optional[GeoBounds] = None,
|
||||
filters: Optional[MapFilters] = None,
|
||||
) -> QuerySet:
|
||||
"""Get optimized queryset for ride locations."""
|
||||
queryset = RideLocation.objects.select_related(
|
||||
"ride", "ride__park", "ride__park__operator"
|
||||
).filter(point__isnull=False)
|
||||
|
||||
# Spatial filtering
|
||||
if bounds:
|
||||
queryset = queryset.filter(point__within=bounds.to_polygon())
|
||||
|
||||
# Ride-specific filters
|
||||
if filters:
|
||||
if filters.ride_types:
|
||||
queryset = queryset.filter(ride__ride_type__in=filters.ride_types)
|
||||
if filters.search_query:
|
||||
queryset = queryset.filter(ride__name__icontains=filters.search_query)
|
||||
|
||||
return queryset.order_by("ride__name")
|
||||
|
||||
def _calculate_ride_weight(self, ride) -> int:
|
||||
"""Calculate clustering weight based on ride importance."""
|
||||
weight = 1
|
||||
ride_type = getattr(ride, "ride_type", "").lower()
|
||||
if "coaster" in ride_type or "roller" in ride_type:
|
||||
weight += 1
|
||||
if (
|
||||
hasattr(ride, "average_rating")
|
||||
and ride.average_rating
|
||||
and ride.average_rating > 4.0
|
||||
):
|
||||
weight += 1
|
||||
return min(weight, 3) # Cap at 3 for rides
|
||||
|
||||
def _get_ride_category(self, ride) -> str:
|
||||
"""Determine ride category for clustering."""
|
||||
ride_type = getattr(ride, "ride_type", "").lower()
|
||||
if "coaster" in ride_type or "roller" in ride_type:
|
||||
return "coaster"
|
||||
elif "water" in ride_type or "splash" in ride_type:
|
||||
return "water_ride"
|
||||
else:
|
||||
return "other_ride"
|
||||
|
||||
def _get_ride_url(self, ride) -> str:
|
||||
"""Get URL for ride detail page."""
|
||||
try:
|
||||
return reverse("rides:detail", kwargs={"slug": ride.slug})
|
||||
except BaseException:
|
||||
return f"/rides/{ride.slug}/"
|
||||
|
||||
|
||||
class CompanyLocationAdapter(BaseLocationAdapter):
|
||||
"""Converts Company/CompanyHeadquarters to UnifiedLocation."""
|
||||
|
||||
def to_unified_location(
|
||||
self, location_obj: CompanyHeadquarters
|
||||
) -> Optional[UnifiedLocation]:
|
||||
"""Convert CompanyHeadquarters to UnifiedLocation."""
|
||||
# Note: CompanyHeadquarters doesn't have coordinates, so we need to geocode
|
||||
# For now, we'll skip companies without coordinates
|
||||
# TODO: Implement geocoding service integration
|
||||
return None
|
||||
|
||||
def get_queryset(
|
||||
self,
|
||||
bounds: Optional[GeoBounds] = None,
|
||||
filters: Optional[MapFilters] = None,
|
||||
) -> QuerySet:
|
||||
"""Get optimized queryset for company locations."""
|
||||
queryset = CompanyHeadquarters.objects.select_related("company")
|
||||
|
||||
# Company-specific filters
|
||||
if filters:
|
||||
if filters.company_roles:
|
||||
queryset = queryset.filter(
|
||||
company__roles__overlap=filters.company_roles
|
||||
)
|
||||
if filters.search_query:
|
||||
queryset = queryset.filter(
|
||||
company__name__icontains=filters.search_query
|
||||
)
|
||||
if filters.country:
|
||||
queryset = queryset.filter(country=filters.country)
|
||||
if filters.city:
|
||||
queryset = queryset.filter(city=filters.city)
|
||||
|
||||
return queryset.order_by("company__name")
|
||||
|
||||
|
||||
# GenericLocationAdapter removed - generic location app is being deprecated
|
||||
# All location functionality moved to domain-specific models (ParkLocation, RideLocation, etc.)
|
||||
|
||||
|
||||
class LocationAbstractionLayer:
|
||||
"""
|
||||
Abstraction layer handling different location model types.
|
||||
Implements the adapter pattern to provide unified access to all location types.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self.adapters = {
|
||||
LocationType.PARK: ParkLocationAdapter(),
|
||||
LocationType.RIDE: RideLocationAdapter(),
|
||||
LocationType.COMPANY: CompanyLocationAdapter(),
|
||||
# LocationType.GENERIC: Removed - generic location app deprecated
|
||||
}
|
||||
|
||||
def get_all_locations(
|
||||
self,
|
||||
bounds: Optional[GeoBounds] = None,
|
||||
filters: Optional[MapFilters] = None,
|
||||
) -> List[UnifiedLocation]:
|
||||
"""Get locations from all sources within bounds."""
|
||||
all_locations = []
|
||||
|
||||
# Determine which location types to include
|
||||
location_types = (
|
||||
filters.location_types
|
||||
if filters and filters.location_types
|
||||
else set(LocationType)
|
||||
)
|
||||
|
||||
for location_type in location_types:
|
||||
adapter = self.adapters[location_type]
|
||||
queryset = adapter.get_queryset(bounds, filters)
|
||||
locations = adapter.bulk_convert(queryset)
|
||||
all_locations.extend(locations)
|
||||
|
||||
return all_locations
|
||||
|
||||
def get_locations_by_type(
|
||||
self,
|
||||
location_type: LocationType,
|
||||
bounds: Optional[GeoBounds] = None,
|
||||
filters: Optional[MapFilters] = None,
|
||||
) -> List[UnifiedLocation]:
|
||||
"""Get locations of specific type."""
|
||||
adapter = self.adapters[location_type]
|
||||
queryset = adapter.get_queryset(bounds, filters)
|
||||
return adapter.bulk_convert(queryset)
|
||||
|
||||
def get_location_by_id(
|
||||
self, location_type: LocationType, location_id: int
|
||||
) -> Optional[UnifiedLocation]:
|
||||
"""Get single location with full details."""
|
||||
adapter = self.adapters[location_type]
|
||||
|
||||
try:
|
||||
if location_type == LocationType.PARK:
|
||||
obj = ParkLocation.objects.select_related("park", "park__operator").get(
|
||||
park_id=location_id
|
||||
)
|
||||
elif location_type == LocationType.RIDE:
|
||||
obj = RideLocation.objects.select_related("ride", "ride__park").get(
|
||||
ride_id=location_id
|
||||
)
|
||||
elif location_type == LocationType.COMPANY:
|
||||
obj = CompanyHeadquarters.objects.select_related("company").get(
|
||||
company_id=location_id
|
||||
)
|
||||
# LocationType.GENERIC removed - generic location app deprecated
|
||||
else:
|
||||
return None
|
||||
|
||||
return adapter.to_unified_location(obj)
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
|
||||
# Import models after defining adapters to avoid circular imports
|
||||
462
apps/core/services/location_search.py
Normal file
462
apps/core/services/location_search.py
Normal file
@@ -0,0 +1,462 @@
|
||||
"""
|
||||
Location-aware search service for ThrillWiki.
|
||||
|
||||
Integrates PostGIS location data with existing search functionality
|
||||
to provide proximity-based search, location filtering, and geographic
|
||||
search capabilities.
|
||||
"""
|
||||
|
||||
# from django.contrib.gis.geos import Point # Disabled temporarily for setup
|
||||
# from django.contrib.gis.measure import Distance # Disabled temporarily for setup
|
||||
from django.db.models import Q
|
||||
from typing import Optional, List, Dict, Any, Set
|
||||
from dataclasses import dataclass
|
||||
|
||||
from apps.parks.models import Park, Company, ParkLocation
|
||||
from apps.rides.models import Ride
|
||||
|
||||
|
||||
@dataclass
|
||||
class LocationSearchFilters:
|
||||
"""Filters for location-aware search queries."""
|
||||
|
||||
# Text search
|
||||
search_query: Optional[str] = None
|
||||
|
||||
# Location-based filters
|
||||
location_point: Optional[object] = None # Point type disabled for setup
|
||||
radius_km: Optional[float] = None
|
||||
location_types: Optional[Set[str]] = None # 'park', 'ride', 'company'
|
||||
|
||||
# Geographic filters
|
||||
country: Optional[str] = None
|
||||
state: Optional[str] = None
|
||||
city: Optional[str] = None
|
||||
|
||||
# Content-specific filters
|
||||
park_status: Optional[List[str]] = None
|
||||
ride_types: Optional[List[str]] = None
|
||||
company_roles: Optional[List[str]] = None
|
||||
|
||||
# Result options
|
||||
include_distance: bool = True
|
||||
max_results: int = 100
|
||||
|
||||
|
||||
@dataclass
|
||||
class LocationSearchResult:
|
||||
"""Single search result with location data."""
|
||||
|
||||
# Core data
|
||||
content_type: str # 'park', 'ride', 'company'
|
||||
object_id: int
|
||||
name: str
|
||||
description: Optional[str] = None
|
||||
url: Optional[str] = None
|
||||
|
||||
# Location data
|
||||
latitude: Optional[float] = None
|
||||
longitude: Optional[float] = None
|
||||
address: Optional[str] = None
|
||||
city: Optional[str] = None
|
||||
state: Optional[str] = None
|
||||
country: Optional[str] = None
|
||||
|
||||
# Distance data (if proximity search)
|
||||
distance_km: Optional[float] = None
|
||||
|
||||
# Additional metadata
|
||||
status: Optional[str] = None
|
||||
tags: Optional[List[str]] = None
|
||||
rating: Optional[float] = None
|
||||
|
||||
def to_dict(self) -> Dict[str, Any]:
|
||||
"""Convert to dictionary for JSON serialization."""
|
||||
return {
|
||||
"content_type": self.content_type,
|
||||
"object_id": self.object_id,
|
||||
"name": self.name,
|
||||
"description": self.description,
|
||||
"url": self.url,
|
||||
"location": {
|
||||
"latitude": self.latitude,
|
||||
"longitude": self.longitude,
|
||||
"address": self.address,
|
||||
"city": self.city,
|
||||
"state": self.state,
|
||||
"country": self.country,
|
||||
},
|
||||
"distance_km": self.distance_km,
|
||||
"status": self.status,
|
||||
"tags": self.tags or [],
|
||||
"rating": self.rating,
|
||||
}
|
||||
|
||||
|
||||
class LocationSearchService:
|
||||
"""Service for performing location-aware searches across ThrillWiki content."""
|
||||
|
||||
def search(self, filters: LocationSearchFilters) -> List[LocationSearchResult]:
|
||||
"""
|
||||
Perform a comprehensive location-aware search.
|
||||
|
||||
Args:
|
||||
filters: Search filters and options
|
||||
|
||||
Returns:
|
||||
List of search results with location data
|
||||
"""
|
||||
results = []
|
||||
|
||||
# Search each content type based on filters
|
||||
if not filters.location_types or "park" in filters.location_types:
|
||||
results.extend(self._search_parks(filters))
|
||||
|
||||
if not filters.location_types or "ride" in filters.location_types:
|
||||
results.extend(self._search_rides(filters))
|
||||
|
||||
if not filters.location_types or "company" in filters.location_types:
|
||||
results.extend(self._search_companies(filters))
|
||||
|
||||
# Sort by distance if proximity search, otherwise by relevance
|
||||
if filters.location_point and filters.include_distance:
|
||||
results.sort(key=lambda x: x.distance_km or float("inf"))
|
||||
else:
|
||||
results.sort(key=lambda x: x.name.lower())
|
||||
|
||||
# Apply max results limit
|
||||
return results[: filters.max_results]
|
||||
|
||||
def _search_parks(
|
||||
self, filters: LocationSearchFilters
|
||||
) -> List[LocationSearchResult]:
|
||||
"""Search parks with location data."""
|
||||
queryset = Park.objects.select_related("location", "operator").all()
|
||||
|
||||
# Apply location filters
|
||||
queryset = self._apply_location_filters(queryset, filters, "location__point")
|
||||
|
||||
# Apply text search
|
||||
if filters.search_query:
|
||||
query = (
|
||||
Q(name__icontains=filters.search_query)
|
||||
| Q(description__icontains=filters.search_query)
|
||||
| Q(location__city__icontains=filters.search_query)
|
||||
| Q(location__state__icontains=filters.search_query)
|
||||
| Q(location__country__icontains=filters.search_query)
|
||||
)
|
||||
queryset = queryset.filter(query)
|
||||
|
||||
# Apply park-specific filters
|
||||
if filters.park_status:
|
||||
queryset = queryset.filter(status__in=filters.park_status)
|
||||
|
||||
# Add distance annotation if proximity search
|
||||
if filters.location_point and filters.include_distance:
|
||||
queryset = queryset.annotate(
|
||||
distance=Distance("location__point", filters.location_point)
|
||||
).order_by("distance")
|
||||
|
||||
# Convert to search results
|
||||
results = []
|
||||
for park in queryset:
|
||||
result = LocationSearchResult(
|
||||
content_type="park",
|
||||
object_id=park.id,
|
||||
name=park.name,
|
||||
description=park.description,
|
||||
url=(
|
||||
park.get_absolute_url()
|
||||
if hasattr(park, "get_absolute_url")
|
||||
else None
|
||||
),
|
||||
status=park.get_status_display(),
|
||||
rating=(float(park.average_rating) if park.average_rating else None),
|
||||
tags=["park", park.status.lower()],
|
||||
)
|
||||
|
||||
# Add location data
|
||||
if hasattr(park, "location") and park.location:
|
||||
location = park.location
|
||||
result.latitude = location.latitude
|
||||
result.longitude = location.longitude
|
||||
result.address = location.formatted_address
|
||||
result.city = location.city
|
||||
result.state = location.state
|
||||
result.country = location.country
|
||||
|
||||
# Add distance if proximity search
|
||||
if (
|
||||
filters.location_point
|
||||
and filters.include_distance
|
||||
and hasattr(park, "distance")
|
||||
):
|
||||
result.distance_km = float(park.distance.km)
|
||||
|
||||
results.append(result)
|
||||
|
||||
return results
|
||||
|
||||
def _search_rides(
|
||||
self, filters: LocationSearchFilters
|
||||
) -> List[LocationSearchResult]:
|
||||
"""Search rides with location data."""
|
||||
queryset = Ride.objects.select_related("park", "location").all()
|
||||
|
||||
# Apply location filters
|
||||
queryset = self._apply_location_filters(queryset, filters, "location__point")
|
||||
|
||||
# Apply text search
|
||||
if filters.search_query:
|
||||
query = (
|
||||
Q(name__icontains=filters.search_query)
|
||||
| Q(description__icontains=filters.search_query)
|
||||
| Q(park__name__icontains=filters.search_query)
|
||||
| Q(location__park_area__icontains=filters.search_query)
|
||||
)
|
||||
queryset = queryset.filter(query)
|
||||
|
||||
# Apply ride-specific filters
|
||||
if filters.ride_types:
|
||||
queryset = queryset.filter(ride_type__in=filters.ride_types)
|
||||
|
||||
# Add distance annotation if proximity search
|
||||
if filters.location_point and filters.include_distance:
|
||||
queryset = queryset.annotate(
|
||||
distance=Distance("location__point", filters.location_point)
|
||||
).order_by("distance")
|
||||
|
||||
# Convert to search results
|
||||
results = []
|
||||
for ride in queryset:
|
||||
result = LocationSearchResult(
|
||||
content_type="ride",
|
||||
object_id=ride.id,
|
||||
name=ride.name,
|
||||
description=ride.description,
|
||||
url=(
|
||||
ride.get_absolute_url()
|
||||
if hasattr(ride, "get_absolute_url")
|
||||
else None
|
||||
),
|
||||
status=ride.status,
|
||||
tags=[
|
||||
"ride",
|
||||
ride.ride_type.lower() if ride.ride_type else "attraction",
|
||||
],
|
||||
)
|
||||
|
||||
# Add location data from ride location or park location
|
||||
location = None
|
||||
if hasattr(ride, "location") and ride.location:
|
||||
location = ride.location
|
||||
result.latitude = location.latitude
|
||||
result.longitude = location.longitude
|
||||
result.address = (
|
||||
f"{ride.park.name} - {location.park_area}"
|
||||
if location.park_area
|
||||
else ride.park.name
|
||||
)
|
||||
|
||||
# Add distance if proximity search
|
||||
if (
|
||||
filters.location_point
|
||||
and filters.include_distance
|
||||
and hasattr(ride, "distance")
|
||||
):
|
||||
result.distance_km = float(ride.distance.km)
|
||||
|
||||
# Fall back to park location if no specific ride location
|
||||
elif ride.park and hasattr(ride.park, "location") and ride.park.location:
|
||||
park_location = ride.park.location
|
||||
result.latitude = park_location.latitude
|
||||
result.longitude = park_location.longitude
|
||||
result.address = park_location.formatted_address
|
||||
result.city = park_location.city
|
||||
result.state = park_location.state
|
||||
result.country = park_location.country
|
||||
|
||||
results.append(result)
|
||||
|
||||
return results
|
||||
|
||||
def _search_companies(
|
||||
self, filters: LocationSearchFilters
|
||||
) -> List[LocationSearchResult]:
|
||||
"""Search companies with headquarters location data."""
|
||||
queryset = Company.objects.select_related("headquarters").all()
|
||||
|
||||
# Apply location filters
|
||||
queryset = self._apply_location_filters(
|
||||
queryset, filters, "headquarters__point"
|
||||
)
|
||||
|
||||
# Apply text search
|
||||
if filters.search_query:
|
||||
query = (
|
||||
Q(name__icontains=filters.search_query)
|
||||
| Q(description__icontains=filters.search_query)
|
||||
| Q(headquarters__city__icontains=filters.search_query)
|
||||
| Q(headquarters__state_province__icontains=filters.search_query)
|
||||
| Q(headquarters__country__icontains=filters.search_query)
|
||||
)
|
||||
queryset = queryset.filter(query)
|
||||
|
||||
# Apply company-specific filters
|
||||
if filters.company_roles:
|
||||
queryset = queryset.filter(roles__overlap=filters.company_roles)
|
||||
|
||||
# Add distance annotation if proximity search
|
||||
if filters.location_point and filters.include_distance:
|
||||
queryset = queryset.annotate(
|
||||
distance=Distance("headquarters__point", filters.location_point)
|
||||
).order_by("distance")
|
||||
|
||||
# Convert to search results
|
||||
results = []
|
||||
for company in queryset:
|
||||
result = LocationSearchResult(
|
||||
content_type="company",
|
||||
object_id=company.id,
|
||||
name=company.name,
|
||||
description=company.description,
|
||||
url=(
|
||||
company.get_absolute_url()
|
||||
if hasattr(company, "get_absolute_url")
|
||||
else None
|
||||
),
|
||||
tags=["company"] + (company.roles or []),
|
||||
)
|
||||
|
||||
# Add location data
|
||||
if hasattr(company, "headquarters") and company.headquarters:
|
||||
hq = company.headquarters
|
||||
result.latitude = hq.latitude
|
||||
result.longitude = hq.longitude
|
||||
result.address = hq.formatted_address
|
||||
result.city = hq.city
|
||||
result.state = hq.state_province
|
||||
result.country = hq.country
|
||||
|
||||
# Add distance if proximity search
|
||||
if (
|
||||
filters.location_point
|
||||
and filters.include_distance
|
||||
and hasattr(company, "distance")
|
||||
):
|
||||
result.distance_km = float(company.distance.km)
|
||||
|
||||
results.append(result)
|
||||
|
||||
return results
|
||||
|
||||
def _apply_location_filters(
|
||||
self, queryset, filters: LocationSearchFilters, point_field: str
|
||||
):
|
||||
"""Apply common location filters to a queryset."""
|
||||
|
||||
# Proximity filter
|
||||
if filters.location_point and filters.radius_km:
|
||||
distance = Distance(km=filters.radius_km)
|
||||
queryset = queryset.filter(
|
||||
**{
|
||||
f"{point_field}__distance_lte": (
|
||||
filters.location_point,
|
||||
distance,
|
||||
)
|
||||
}
|
||||
)
|
||||
|
||||
# Geographic filters - adjust field names based on model
|
||||
if filters.country:
|
||||
if "headquarters" in point_field:
|
||||
queryset = queryset.filter(
|
||||
headquarters__country__icontains=filters.country
|
||||
)
|
||||
else:
|
||||
location_field = point_field.split("__")[0]
|
||||
queryset = queryset.filter(
|
||||
**{f"{location_field}__country__icontains": filters.country}
|
||||
)
|
||||
|
||||
if filters.state:
|
||||
if "headquarters" in point_field:
|
||||
queryset = queryset.filter(
|
||||
headquarters__state_province__icontains=filters.state
|
||||
)
|
||||
else:
|
||||
location_field = point_field.split("__")[0]
|
||||
queryset = queryset.filter(
|
||||
**{f"{location_field}__state__icontains": filters.state}
|
||||
)
|
||||
|
||||
if filters.city:
|
||||
location_field = point_field.split("__")[0]
|
||||
queryset = queryset.filter(
|
||||
**{f"{location_field}__city__icontains": filters.city}
|
||||
)
|
||||
|
||||
return queryset
|
||||
|
||||
def suggest_locations(self, query: str, limit: int = 10) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Get location suggestions for autocomplete.
|
||||
|
||||
Args:
|
||||
query: Search query string
|
||||
limit: Maximum number of suggestions
|
||||
|
||||
Returns:
|
||||
List of location suggestions
|
||||
"""
|
||||
suggestions = []
|
||||
|
||||
if len(query) < 2:
|
||||
return suggestions
|
||||
|
||||
# Get park location suggestions
|
||||
park_locations = ParkLocation.objects.filter(
|
||||
Q(park__name__icontains=query)
|
||||
| Q(city__icontains=query)
|
||||
| Q(state__icontains=query)
|
||||
).select_related("park")[: limit // 3]
|
||||
|
||||
for location in park_locations:
|
||||
suggestions.append(
|
||||
{
|
||||
"type": "park",
|
||||
"name": location.park.name,
|
||||
"address": location.formatted_address,
|
||||
"coordinates": location.coordinates,
|
||||
"url": (
|
||||
location.park.get_absolute_url()
|
||||
if hasattr(location.park, "get_absolute_url")
|
||||
else None
|
||||
),
|
||||
}
|
||||
)
|
||||
|
||||
# Get city suggestions
|
||||
cities = (
|
||||
ParkLocation.objects.filter(city__icontains=query)
|
||||
.values("city", "state", "country")
|
||||
.distinct()[: limit // 3]
|
||||
)
|
||||
|
||||
for city_data in cities:
|
||||
suggestions.append(
|
||||
{
|
||||
"type": "city",
|
||||
"name": f"{city_data['city']}, {city_data['state']}",
|
||||
"address": f"{city_data['city']}, {city_data['state']}, {
|
||||
city_data['country']
|
||||
}",
|
||||
"coordinates": None,
|
||||
}
|
||||
)
|
||||
|
||||
return suggestions[:limit]
|
||||
|
||||
|
||||
# Global instance
|
||||
location_search_service = LocationSearchService()
|
||||
434
apps/core/services/map_cache_service.py
Normal file
434
apps/core/services/map_cache_service.py
Normal file
@@ -0,0 +1,434 @@
|
||||
"""
|
||||
Caching service for map data to improve performance and reduce database load.
|
||||
"""
|
||||
|
||||
import hashlib
|
||||
import json
|
||||
import time
|
||||
from typing import Dict, List, Optional, Any
|
||||
|
||||
from django.core.cache import cache
|
||||
from django.utils import timezone
|
||||
|
||||
from .data_structures import (
|
||||
UnifiedLocation,
|
||||
ClusterData,
|
||||
GeoBounds,
|
||||
MapFilters,
|
||||
MapResponse,
|
||||
QueryPerformanceMetrics,
|
||||
)
|
||||
|
||||
|
||||
class MapCacheService:
|
||||
"""
|
||||
Handles caching of map data with geographic partitioning and intelligent invalidation.
|
||||
"""
|
||||
|
||||
# Cache configuration
|
||||
DEFAULT_TTL = 3600 # 1 hour
|
||||
CLUSTER_TTL = 7200 # 2 hours (clusters change less frequently)
|
||||
LOCATION_DETAIL_TTL = 1800 # 30 minutes
|
||||
BOUNDS_CACHE_TTL = 1800 # 30 minutes
|
||||
|
||||
# Cache key prefixes
|
||||
CACHE_PREFIX = "thrillwiki_map"
|
||||
LOCATIONS_PREFIX = f"{CACHE_PREFIX}:locations"
|
||||
CLUSTERS_PREFIX = f"{CACHE_PREFIX}:clusters"
|
||||
BOUNDS_PREFIX = f"{CACHE_PREFIX}:bounds"
|
||||
DETAIL_PREFIX = f"{CACHE_PREFIX}:detail"
|
||||
STATS_PREFIX = f"{CACHE_PREFIX}:stats"
|
||||
|
||||
# Geographic partitioning settings
|
||||
GEOHASH_PRECISION = 6 # ~1.2km precision for cache partitioning
|
||||
|
||||
def __init__(self):
|
||||
self.cache_stats = {
|
||||
"hits": 0,
|
||||
"misses": 0,
|
||||
"invalidations": 0,
|
||||
"geohash_partitions": 0,
|
||||
}
|
||||
|
||||
def get_locations_cache_key(
|
||||
self,
|
||||
bounds: Optional[GeoBounds],
|
||||
filters: Optional[MapFilters],
|
||||
zoom_level: Optional[int] = None,
|
||||
) -> str:
|
||||
"""Generate cache key for location queries."""
|
||||
key_parts = [self.LOCATIONS_PREFIX]
|
||||
|
||||
if bounds:
|
||||
# Use geohash for spatial locality
|
||||
geohash = self._bounds_to_geohash(bounds)
|
||||
key_parts.append(f"geo:{geohash}")
|
||||
|
||||
if filters:
|
||||
# Create deterministic hash of filters
|
||||
filter_hash = self._hash_filters(filters)
|
||||
key_parts.append(f"filters:{filter_hash}")
|
||||
|
||||
if zoom_level is not None:
|
||||
key_parts.append(f"zoom:{zoom_level}")
|
||||
|
||||
return ":".join(key_parts)
|
||||
|
||||
def get_clusters_cache_key(
|
||||
self,
|
||||
bounds: Optional[GeoBounds],
|
||||
filters: Optional[MapFilters],
|
||||
zoom_level: int,
|
||||
) -> str:
|
||||
"""Generate cache key for cluster queries."""
|
||||
key_parts = [self.CLUSTERS_PREFIX, f"zoom:{zoom_level}"]
|
||||
|
||||
if bounds:
|
||||
geohash = self._bounds_to_geohash(bounds)
|
||||
key_parts.append(f"geo:{geohash}")
|
||||
|
||||
if filters:
|
||||
filter_hash = self._hash_filters(filters)
|
||||
key_parts.append(f"filters:{filter_hash}")
|
||||
|
||||
return ":".join(key_parts)
|
||||
|
||||
def get_location_detail_cache_key(
|
||||
self, location_type: str, location_id: int
|
||||
) -> str:
|
||||
"""Generate cache key for individual location details."""
|
||||
return f"{self.DETAIL_PREFIX}:{location_type}:{location_id}"
|
||||
|
||||
def cache_locations(
|
||||
self,
|
||||
cache_key: str,
|
||||
locations: List[UnifiedLocation],
|
||||
ttl: Optional[int] = None,
|
||||
) -> None:
|
||||
"""Cache location data."""
|
||||
try:
|
||||
# Convert locations to serializable format
|
||||
cache_data = {
|
||||
"locations": [loc.to_dict() for loc in locations],
|
||||
"cached_at": timezone.now().isoformat(),
|
||||
"count": len(locations),
|
||||
}
|
||||
|
||||
cache.set(cache_key, cache_data, ttl or self.DEFAULT_TTL)
|
||||
except Exception as e:
|
||||
# Log error but don't fail the request
|
||||
print(f"Cache write error for key {cache_key}: {e}")
|
||||
|
||||
def cache_clusters(
|
||||
self,
|
||||
cache_key: str,
|
||||
clusters: List[ClusterData],
|
||||
ttl: Optional[int] = None,
|
||||
) -> None:
|
||||
"""Cache cluster data."""
|
||||
try:
|
||||
cache_data = {
|
||||
"clusters": [cluster.to_dict() for cluster in clusters],
|
||||
"cached_at": timezone.now().isoformat(),
|
||||
"count": len(clusters),
|
||||
}
|
||||
|
||||
cache.set(cache_key, cache_data, ttl or self.CLUSTER_TTL)
|
||||
except Exception as e:
|
||||
print(f"Cache write error for clusters {cache_key}: {e}")
|
||||
|
||||
def cache_map_response(
|
||||
self, cache_key: str, response: MapResponse, ttl: Optional[int] = None
|
||||
) -> None:
|
||||
"""Cache complete map response."""
|
||||
try:
|
||||
cache_data = response.to_dict()
|
||||
cache_data["cached_at"] = timezone.now().isoformat()
|
||||
|
||||
cache.set(cache_key, cache_data, ttl or self.DEFAULT_TTL)
|
||||
except Exception as e:
|
||||
print(f"Cache write error for response {cache_key}: {e}")
|
||||
|
||||
def get_cached_locations(self, cache_key: str) -> Optional[List[UnifiedLocation]]:
|
||||
"""Retrieve cached location data."""
|
||||
try:
|
||||
cache_data = cache.get(cache_key)
|
||||
if not cache_data:
|
||||
self.cache_stats["misses"] += 1
|
||||
return None
|
||||
|
||||
self.cache_stats["hits"] += 1
|
||||
|
||||
# Convert back to UnifiedLocation objects
|
||||
locations = []
|
||||
for loc_data in cache_data["locations"]:
|
||||
# Reconstruct UnifiedLocation from dictionary
|
||||
locations.append(self._dict_to_unified_location(loc_data))
|
||||
|
||||
return locations
|
||||
|
||||
except Exception as e:
|
||||
print(f"Cache read error for key {cache_key}: {e}")
|
||||
self.cache_stats["misses"] += 1
|
||||
return None
|
||||
|
||||
def get_cached_clusters(self, cache_key: str) -> Optional[List[ClusterData]]:
|
||||
"""Retrieve cached cluster data."""
|
||||
try:
|
||||
cache_data = cache.get(cache_key)
|
||||
if not cache_data:
|
||||
self.cache_stats["misses"] += 1
|
||||
return None
|
||||
|
||||
self.cache_stats["hits"] += 1
|
||||
|
||||
# Convert back to ClusterData objects
|
||||
clusters = []
|
||||
for cluster_data in cache_data["clusters"]:
|
||||
clusters.append(self._dict_to_cluster_data(cluster_data))
|
||||
|
||||
return clusters
|
||||
|
||||
except Exception as e:
|
||||
print(f"Cache read error for clusters {cache_key}: {e}")
|
||||
self.cache_stats["misses"] += 1
|
||||
return None
|
||||
|
||||
def get_cached_map_response(self, cache_key: str) -> Optional[MapResponse]:
|
||||
"""Retrieve cached map response."""
|
||||
try:
|
||||
cache_data = cache.get(cache_key)
|
||||
if not cache_data:
|
||||
self.cache_stats["misses"] += 1
|
||||
return None
|
||||
|
||||
self.cache_stats["hits"] += 1
|
||||
|
||||
# Convert back to MapResponse object
|
||||
return self._dict_to_map_response(cache_data["data"])
|
||||
|
||||
except Exception as e:
|
||||
print(f"Cache read error for response {cache_key}: {e}")
|
||||
self.cache_stats["misses"] += 1
|
||||
return None
|
||||
|
||||
def invalidate_location_cache(
|
||||
self, location_type: str, location_id: Optional[int] = None
|
||||
) -> None:
|
||||
"""Invalidate cache for specific location or all locations of a type."""
|
||||
try:
|
||||
if location_id:
|
||||
# Invalidate specific location detail
|
||||
detail_key = self.get_location_detail_cache_key(
|
||||
location_type, location_id
|
||||
)
|
||||
cache.delete(detail_key)
|
||||
|
||||
# Invalidate related location and cluster caches
|
||||
# In a production system, you'd want more sophisticated cache
|
||||
# tagging
|
||||
cache.delete_many(
|
||||
[f"{self.LOCATIONS_PREFIX}:*", f"{self.CLUSTERS_PREFIX}:*"]
|
||||
)
|
||||
|
||||
self.cache_stats["invalidations"] += 1
|
||||
|
||||
except Exception as e:
|
||||
print(f"Cache invalidation error: {e}")
|
||||
|
||||
def invalidate_bounds_cache(self, bounds: GeoBounds) -> None:
|
||||
"""Invalidate cache for specific geographic bounds."""
|
||||
try:
|
||||
geohash = self._bounds_to_geohash(bounds)
|
||||
pattern = f"{self.LOCATIONS_PREFIX}:geo:{geohash}*"
|
||||
|
||||
# In production, you'd use cache tagging or Redis SCAN
|
||||
# For now, we'll invalidate broader patterns
|
||||
cache.delete_many([pattern])
|
||||
|
||||
self.cache_stats["invalidations"] += 1
|
||||
|
||||
except Exception as e:
|
||||
print(f"Bounds cache invalidation error: {e}")
|
||||
|
||||
def clear_all_map_cache(self) -> None:
|
||||
"""Clear all map-related cache data."""
|
||||
try:
|
||||
cache.delete_many(
|
||||
[
|
||||
f"{self.LOCATIONS_PREFIX}:*",
|
||||
f"{self.CLUSTERS_PREFIX}:*",
|
||||
f"{self.BOUNDS_PREFIX}:*",
|
||||
f"{self.DETAIL_PREFIX}:*",
|
||||
]
|
||||
)
|
||||
|
||||
self.cache_stats["invalidations"] += 1
|
||||
|
||||
except Exception as e:
|
||||
print(f"Cache clear error: {e}")
|
||||
|
||||
def get_cache_stats(self) -> Dict[str, Any]:
|
||||
"""Get cache performance statistics."""
|
||||
total_requests = self.cache_stats["hits"] + self.cache_stats["misses"]
|
||||
hit_rate = (
|
||||
(self.cache_stats["hits"] / total_requests * 100)
|
||||
if total_requests > 0
|
||||
else 0
|
||||
)
|
||||
|
||||
return {
|
||||
"hits": self.cache_stats["hits"],
|
||||
"misses": self.cache_stats["misses"],
|
||||
"hit_rate_percent": round(hit_rate, 2),
|
||||
"invalidations": self.cache_stats["invalidations"],
|
||||
"geohash_partitions": self.cache_stats["geohash_partitions"],
|
||||
}
|
||||
|
||||
def record_performance_metrics(self, metrics: QueryPerformanceMetrics) -> None:
|
||||
"""Record query performance metrics for analysis."""
|
||||
try:
|
||||
# 5-minute buckets
|
||||
stats_key = f"{self.STATS_PREFIX}:performance:{int(time.time() // 300)}"
|
||||
|
||||
current_stats = cache.get(
|
||||
stats_key,
|
||||
{
|
||||
"query_count": 0,
|
||||
"total_time_ms": 0,
|
||||
"cache_hits": 0,
|
||||
"db_queries": 0,
|
||||
},
|
||||
)
|
||||
|
||||
current_stats["query_count"] += 1
|
||||
current_stats["total_time_ms"] += metrics.query_time_ms
|
||||
current_stats["cache_hits"] += 1 if metrics.cache_hit else 0
|
||||
current_stats["db_queries"] += metrics.db_query_count
|
||||
|
||||
cache.set(stats_key, current_stats, 3600) # Keep for 1 hour
|
||||
|
||||
except Exception as e:
|
||||
print(f"Performance metrics recording error: {e}")
|
||||
|
||||
def _bounds_to_geohash(self, bounds: GeoBounds) -> str:
|
||||
"""Convert geographic bounds to geohash for cache partitioning."""
|
||||
# Use center point of bounds for geohash
|
||||
center_lat = (bounds.north + bounds.south) / 2
|
||||
center_lng = (bounds.east + bounds.west) / 2
|
||||
|
||||
# Simple geohash implementation (in production, use a library)
|
||||
return self._encode_geohash(center_lat, center_lng, self.GEOHASH_PRECISION)
|
||||
|
||||
def _encode_geohash(self, lat: float, lng: float, precision: int) -> str:
|
||||
"""Simple geohash encoding implementation."""
|
||||
# This is a simplified implementation
|
||||
# In production, use the `geohash` library
|
||||
lat_range = [-90.0, 90.0]
|
||||
lng_range = [-180.0, 180.0]
|
||||
|
||||
geohash = ""
|
||||
bits = 0
|
||||
bit_count = 0
|
||||
even_bit = True
|
||||
|
||||
while len(geohash) < precision:
|
||||
if even_bit:
|
||||
# longitude
|
||||
mid = (lng_range[0] + lng_range[1]) / 2
|
||||
if lng >= mid:
|
||||
bits = (bits << 1) + 1
|
||||
lng_range[0] = mid
|
||||
else:
|
||||
bits = bits << 1
|
||||
lng_range[1] = mid
|
||||
else:
|
||||
# latitude
|
||||
mid = (lat_range[0] + lat_range[1]) / 2
|
||||
if lat >= mid:
|
||||
bits = (bits << 1) + 1
|
||||
lat_range[0] = mid
|
||||
else:
|
||||
bits = bits << 1
|
||||
lat_range[1] = mid
|
||||
|
||||
even_bit = not even_bit
|
||||
bit_count += 1
|
||||
|
||||
if bit_count == 5:
|
||||
# Convert 5 bits to base32 character
|
||||
geohash += "0123456789bcdefghjkmnpqrstuvwxyz"[bits]
|
||||
bits = 0
|
||||
bit_count = 0
|
||||
|
||||
return geohash
|
||||
|
||||
def _hash_filters(self, filters: MapFilters) -> str:
|
||||
"""Create deterministic hash of filters for cache keys."""
|
||||
filter_dict = filters.to_dict()
|
||||
# Sort to ensure consistent ordering
|
||||
filter_str = json.dumps(filter_dict, sort_keys=True)
|
||||
return hashlib.md5(filter_str.encode()).hexdigest()[:8]
|
||||
|
||||
def _dict_to_unified_location(self, data: Dict[str, Any]) -> UnifiedLocation:
|
||||
"""Convert dictionary back to UnifiedLocation object."""
|
||||
from .data_structures import LocationType
|
||||
|
||||
return UnifiedLocation(
|
||||
id=data["id"],
|
||||
type=LocationType(data["type"]),
|
||||
name=data["name"],
|
||||
coordinates=list(data["coordinates"]),
|
||||
address=data.get("address"),
|
||||
metadata=data.get("metadata", {}),
|
||||
type_data=data.get("type_data", {}),
|
||||
cluster_weight=data.get("cluster_weight", 1),
|
||||
cluster_category=data.get("cluster_category", "default"),
|
||||
)
|
||||
|
||||
def _dict_to_cluster_data(self, data: Dict[str, Any]) -> ClusterData:
|
||||
"""Convert dictionary back to ClusterData object."""
|
||||
from .data_structures import LocationType
|
||||
|
||||
bounds = GeoBounds(**data["bounds"])
|
||||
types = {LocationType(t) for t in data["types"]}
|
||||
|
||||
representative = None
|
||||
if data.get("representative"):
|
||||
representative = self._dict_to_unified_location(data["representative"])
|
||||
|
||||
return ClusterData(
|
||||
id=data["id"],
|
||||
coordinates=list(data["coordinates"]),
|
||||
count=data["count"],
|
||||
types=types,
|
||||
bounds=bounds,
|
||||
representative_location=representative,
|
||||
)
|
||||
|
||||
def _dict_to_map_response(self, data: Dict[str, Any]) -> MapResponse:
|
||||
"""Convert dictionary back to MapResponse object."""
|
||||
locations = [
|
||||
self._dict_to_unified_location(loc) for loc in data.get("locations", [])
|
||||
]
|
||||
clusters = [
|
||||
self._dict_to_cluster_data(cluster) for cluster in data.get("clusters", [])
|
||||
]
|
||||
|
||||
bounds = None
|
||||
if data.get("bounds"):
|
||||
bounds = GeoBounds(**data["bounds"])
|
||||
|
||||
return MapResponse(
|
||||
locations=locations,
|
||||
clusters=clusters,
|
||||
bounds=bounds,
|
||||
total_count=data.get("total_count", 0),
|
||||
filtered_count=data.get("filtered_count", 0),
|
||||
zoom_level=data.get("zoom_level"),
|
||||
clustered=data.get("clustered", False),
|
||||
)
|
||||
|
||||
|
||||
# Global cache service instance
|
||||
map_cache = MapCacheService()
|
||||
474
apps/core/services/map_service.py
Normal file
474
apps/core/services/map_service.py
Normal file
@@ -0,0 +1,474 @@
|
||||
"""
|
||||
Unified Map Service - Main orchestrating service for all map functionality.
|
||||
"""
|
||||
|
||||
import time
|
||||
from typing import List, Optional, Dict, Any, Set
|
||||
from django.db import connection
|
||||
|
||||
from .data_structures import (
|
||||
UnifiedLocation,
|
||||
ClusterData,
|
||||
GeoBounds,
|
||||
MapFilters,
|
||||
MapResponse,
|
||||
LocationType,
|
||||
QueryPerformanceMetrics,
|
||||
)
|
||||
from .location_adapters import LocationAbstractionLayer
|
||||
from .clustering_service import ClusteringService
|
||||
from .map_cache_service import MapCacheService
|
||||
|
||||
|
||||
class UnifiedMapService:
|
||||
"""
|
||||
Main service orchestrating map data retrieval, filtering, clustering, and caching.
|
||||
Provides a unified interface for all location types with performance optimization.
|
||||
"""
|
||||
|
||||
# Performance thresholds
|
||||
MAX_UNCLUSTERED_POINTS = 500
|
||||
MAX_CLUSTERED_POINTS = 2000
|
||||
DEFAULT_ZOOM_LEVEL = 10
|
||||
|
||||
def __init__(self):
|
||||
self.location_layer = LocationAbstractionLayer()
|
||||
self.clustering_service = ClusteringService()
|
||||
self.cache_service = MapCacheService()
|
||||
|
||||
def get_map_data(
|
||||
self,
|
||||
*,
|
||||
bounds: Optional[GeoBounds] = None,
|
||||
filters: Optional[MapFilters] = None,
|
||||
zoom_level: int = DEFAULT_ZOOM_LEVEL,
|
||||
cluster: bool = True,
|
||||
use_cache: bool = True,
|
||||
) -> MapResponse:
|
||||
"""
|
||||
Primary method for retrieving unified map data.
|
||||
|
||||
Args:
|
||||
bounds: Geographic bounds to query within
|
||||
filters: Filtering criteria for locations
|
||||
zoom_level: Map zoom level for clustering decisions
|
||||
cluster: Whether to apply clustering
|
||||
use_cache: Whether to use cached data
|
||||
|
||||
Returns:
|
||||
MapResponse with locations, clusters, and metadata
|
||||
"""
|
||||
start_time = time.time()
|
||||
initial_query_count = len(connection.queries)
|
||||
cache_hit = False
|
||||
|
||||
try:
|
||||
# Generate cache key
|
||||
cache_key = None
|
||||
if use_cache:
|
||||
cache_key = self._generate_cache_key(
|
||||
bounds, filters, zoom_level, cluster
|
||||
)
|
||||
|
||||
# Try to get from cache first
|
||||
cached_response = self.cache_service.get_cached_map_response(cache_key)
|
||||
if cached_response:
|
||||
cached_response.cache_hit = True
|
||||
cached_response.query_time_ms = int(
|
||||
(time.time() - start_time) * 1000
|
||||
)
|
||||
return cached_response
|
||||
|
||||
# Get locations from database
|
||||
locations = self._get_locations_from_db(bounds, filters)
|
||||
|
||||
# Apply smart limiting based on zoom level and density
|
||||
locations = self._apply_smart_limiting(locations, bounds, zoom_level)
|
||||
|
||||
# Determine if clustering should be applied
|
||||
should_cluster = cluster and self.clustering_service.should_cluster(
|
||||
zoom_level, len(locations)
|
||||
)
|
||||
|
||||
# Apply clustering if needed
|
||||
clusters = []
|
||||
if should_cluster:
|
||||
locations, clusters = self.clustering_service.cluster_locations(
|
||||
locations, zoom_level, bounds
|
||||
)
|
||||
|
||||
# Calculate response bounds
|
||||
response_bounds = self._calculate_response_bounds(
|
||||
locations, clusters, bounds
|
||||
)
|
||||
|
||||
# Create response
|
||||
response = MapResponse(
|
||||
locations=locations,
|
||||
clusters=clusters,
|
||||
bounds=response_bounds,
|
||||
total_count=len(locations) + sum(cluster.count for cluster in clusters),
|
||||
filtered_count=len(locations),
|
||||
zoom_level=zoom_level,
|
||||
clustered=should_cluster,
|
||||
cache_hit=cache_hit,
|
||||
query_time_ms=int((time.time() - start_time) * 1000),
|
||||
filters_applied=self._get_applied_filters_list(filters),
|
||||
)
|
||||
|
||||
# Cache the response
|
||||
if use_cache and cache_key:
|
||||
self.cache_service.cache_map_response(cache_key, response)
|
||||
|
||||
# Record performance metrics
|
||||
self._record_performance_metrics(
|
||||
start_time,
|
||||
initial_query_count,
|
||||
cache_hit,
|
||||
len(locations) + len(clusters),
|
||||
bounds is not None,
|
||||
should_cluster,
|
||||
)
|
||||
|
||||
return response
|
||||
|
||||
except Exception:
|
||||
# Return error response
|
||||
return MapResponse(
|
||||
locations=[],
|
||||
clusters=[],
|
||||
total_count=0,
|
||||
filtered_count=0,
|
||||
query_time_ms=int((time.time() - start_time) * 1000),
|
||||
cache_hit=False,
|
||||
)
|
||||
|
||||
def get_location_details(
|
||||
self, location_type: str, location_id: int
|
||||
) -> Optional[UnifiedLocation]:
|
||||
"""
|
||||
Get detailed information for a specific location.
|
||||
|
||||
Args:
|
||||
location_type: Type of location (park, ride, company, generic)
|
||||
location_id: ID of the location
|
||||
|
||||
Returns:
|
||||
UnifiedLocation with full details or None if not found
|
||||
"""
|
||||
try:
|
||||
# Check cache first
|
||||
cache_key = self.cache_service.get_location_detail_cache_key(
|
||||
location_type, location_id
|
||||
)
|
||||
cached_locations = self.cache_service.get_cached_locations(cache_key)
|
||||
if cached_locations:
|
||||
return cached_locations[0] if cached_locations else None
|
||||
|
||||
# Get from database
|
||||
location_type_enum = LocationType(location_type.lower())
|
||||
location = self.location_layer.get_location_by_id(
|
||||
location_type_enum, location_id
|
||||
)
|
||||
|
||||
# Cache the result
|
||||
if location:
|
||||
self.cache_service.cache_locations(
|
||||
cache_key,
|
||||
[location],
|
||||
self.cache_service.LOCATION_DETAIL_TTL,
|
||||
)
|
||||
|
||||
return location
|
||||
|
||||
except Exception as e:
|
||||
print(f"Error getting location details: {e}")
|
||||
return None
|
||||
|
||||
def search_locations(
|
||||
self,
|
||||
query: str,
|
||||
bounds: Optional[GeoBounds] = None,
|
||||
location_types: Optional[Set[LocationType]] = None,
|
||||
limit: int = 50,
|
||||
) -> List[UnifiedLocation]:
|
||||
"""
|
||||
Search locations with text query.
|
||||
|
||||
Args:
|
||||
query: Search query string
|
||||
bounds: Optional geographic bounds to search within
|
||||
location_types: Optional set of location types to search
|
||||
limit: Maximum number of results
|
||||
|
||||
Returns:
|
||||
List of matching UnifiedLocation objects
|
||||
"""
|
||||
try:
|
||||
# Create search filters
|
||||
filters = MapFilters(
|
||||
search_query=query,
|
||||
location_types=location_types or {LocationType.PARK, LocationType.RIDE},
|
||||
has_coordinates=True,
|
||||
)
|
||||
|
||||
# Get locations
|
||||
locations = self.location_layer.get_all_locations(bounds, filters)
|
||||
|
||||
# Apply limit
|
||||
return locations[:limit]
|
||||
|
||||
except Exception as e:
|
||||
print(f"Error searching locations: {e}")
|
||||
return []
|
||||
|
||||
def get_locations_by_bounds(
|
||||
self,
|
||||
north: float,
|
||||
south: float,
|
||||
east: float,
|
||||
west: float,
|
||||
location_types: Optional[Set[LocationType]] = None,
|
||||
zoom_level: int = DEFAULT_ZOOM_LEVEL,
|
||||
) -> MapResponse:
|
||||
"""
|
||||
Get locations within specific geographic bounds.
|
||||
|
||||
Args:
|
||||
north, south, east, west: Bounding box coordinates
|
||||
location_types: Optional filter for location types
|
||||
zoom_level: Map zoom level for optimization
|
||||
|
||||
Returns:
|
||||
MapResponse with locations in bounds
|
||||
"""
|
||||
try:
|
||||
bounds = GeoBounds(north=north, south=south, east=east, west=west)
|
||||
filters = (
|
||||
MapFilters(location_types=location_types) if location_types else None
|
||||
)
|
||||
|
||||
return self.get_map_data(
|
||||
bounds=bounds, filters=filters, zoom_level=zoom_level
|
||||
)
|
||||
|
||||
except ValueError:
|
||||
# Invalid bounds
|
||||
return MapResponse(
|
||||
locations=[], clusters=[], total_count=0, filtered_count=0
|
||||
)
|
||||
|
||||
def get_clustered_locations(
|
||||
self,
|
||||
zoom_level: int,
|
||||
bounds: Optional[GeoBounds] = None,
|
||||
filters: Optional[MapFilters] = None,
|
||||
) -> MapResponse:
|
||||
"""
|
||||
Get clustered location data for map display.
|
||||
|
||||
Args:
|
||||
zoom_level: Map zoom level for clustering configuration
|
||||
bounds: Optional geographic bounds
|
||||
filters: Optional filtering criteria
|
||||
|
||||
Returns:
|
||||
MapResponse with clustered data
|
||||
"""
|
||||
return self.get_map_data(
|
||||
bounds=bounds, filters=filters, zoom_level=zoom_level, cluster=True
|
||||
)
|
||||
|
||||
def get_locations_by_type(
|
||||
self,
|
||||
location_type: LocationType,
|
||||
bounds: Optional[GeoBounds] = None,
|
||||
limit: Optional[int] = None,
|
||||
) -> List[UnifiedLocation]:
|
||||
"""
|
||||
Get locations of a specific type.
|
||||
|
||||
Args:
|
||||
location_type: Type of locations to retrieve
|
||||
bounds: Optional geographic bounds
|
||||
limit: Optional limit on results
|
||||
|
||||
Returns:
|
||||
List of UnifiedLocation objects
|
||||
"""
|
||||
try:
|
||||
filters = MapFilters(location_types={location_type})
|
||||
locations = self.location_layer.get_locations_by_type(
|
||||
location_type, bounds, filters
|
||||
)
|
||||
|
||||
if limit:
|
||||
locations = locations[:limit]
|
||||
|
||||
return locations
|
||||
|
||||
except Exception as e:
|
||||
print(f"Error getting locations by type: {e}")
|
||||
return []
|
||||
|
||||
def invalidate_cache(
|
||||
self,
|
||||
location_type: Optional[str] = None,
|
||||
location_id: Optional[int] = None,
|
||||
bounds: Optional[GeoBounds] = None,
|
||||
) -> None:
|
||||
"""
|
||||
Invalidate cached map data.
|
||||
|
||||
Args:
|
||||
location_type: Optional specific location type to invalidate
|
||||
location_id: Optional specific location ID to invalidate
|
||||
bounds: Optional specific bounds to invalidate
|
||||
"""
|
||||
if location_type and location_id:
|
||||
self.cache_service.invalidate_location_cache(location_type, location_id)
|
||||
elif bounds:
|
||||
self.cache_service.invalidate_bounds_cache(bounds)
|
||||
else:
|
||||
self.cache_service.clear_all_map_cache()
|
||||
|
||||
def get_service_stats(self) -> Dict[str, Any]:
|
||||
"""Get service performance and usage statistics."""
|
||||
cache_stats = self.cache_service.get_cache_stats()
|
||||
|
||||
return {
|
||||
"cache_performance": cache_stats,
|
||||
"clustering_available": True,
|
||||
"supported_location_types": [t.value for t in LocationType],
|
||||
"max_unclustered_points": self.MAX_UNCLUSTERED_POINTS,
|
||||
"max_clustered_points": self.MAX_CLUSTERED_POINTS,
|
||||
"service_version": "1.0.0",
|
||||
}
|
||||
|
||||
def _get_locations_from_db(
|
||||
self, bounds: Optional[GeoBounds], filters: Optional[MapFilters]
|
||||
) -> List[UnifiedLocation]:
|
||||
"""Get locations from database using the abstraction layer."""
|
||||
return self.location_layer.get_all_locations(bounds, filters)
|
||||
|
||||
def _apply_smart_limiting(
|
||||
self,
|
||||
locations: List[UnifiedLocation],
|
||||
bounds: Optional[GeoBounds],
|
||||
zoom_level: int,
|
||||
) -> List[UnifiedLocation]:
|
||||
"""Apply intelligent limiting based on zoom level and density."""
|
||||
if zoom_level < 6: # Very zoomed out - show only major parks
|
||||
major_parks = [
|
||||
loc
|
||||
for loc in locations
|
||||
if (
|
||||
loc.type == LocationType.PARK
|
||||
and loc.cluster_category in ["major_park", "theme_park"]
|
||||
)
|
||||
]
|
||||
return major_parks[:200]
|
||||
elif zoom_level < 10: # Regional level
|
||||
return locations[:1000]
|
||||
else: # City level and closer
|
||||
return locations[: self.MAX_CLUSTERED_POINTS]
|
||||
|
||||
def _calculate_response_bounds(
|
||||
self,
|
||||
locations: List[UnifiedLocation],
|
||||
clusters: List[ClusterData],
|
||||
request_bounds: Optional[GeoBounds],
|
||||
) -> Optional[GeoBounds]:
|
||||
"""Calculate the actual bounds of the response data."""
|
||||
if request_bounds:
|
||||
return request_bounds
|
||||
|
||||
all_coords = []
|
||||
|
||||
# Add location coordinates
|
||||
for loc in locations:
|
||||
all_coords.append((loc.latitude, loc.longitude))
|
||||
|
||||
# Add cluster coordinates
|
||||
for cluster in clusters:
|
||||
all_coords.append(cluster.coordinates)
|
||||
|
||||
if not all_coords:
|
||||
return None
|
||||
|
||||
lats, lngs = zip(*all_coords)
|
||||
return GeoBounds(
|
||||
north=max(lats), south=min(lats), east=max(lngs), west=min(lngs)
|
||||
)
|
||||
|
||||
def _get_applied_filters_list(self, filters: Optional[MapFilters]) -> List[str]:
|
||||
"""Get list of applied filter types for metadata."""
|
||||
if not filters:
|
||||
return []
|
||||
|
||||
applied = []
|
||||
if filters.location_types:
|
||||
applied.append("location_types")
|
||||
if filters.search_query:
|
||||
applied.append("search_query")
|
||||
if filters.park_status:
|
||||
applied.append("park_status")
|
||||
if filters.ride_types:
|
||||
applied.append("ride_types")
|
||||
if filters.company_roles:
|
||||
applied.append("company_roles")
|
||||
if filters.min_rating:
|
||||
applied.append("min_rating")
|
||||
if filters.country:
|
||||
applied.append("country")
|
||||
if filters.state:
|
||||
applied.append("state")
|
||||
if filters.city:
|
||||
applied.append("city")
|
||||
|
||||
return applied
|
||||
|
||||
def _generate_cache_key(
|
||||
self,
|
||||
bounds: Optional[GeoBounds],
|
||||
filters: Optional[MapFilters],
|
||||
zoom_level: int,
|
||||
cluster: bool,
|
||||
) -> str:
|
||||
"""Generate cache key for the request."""
|
||||
if cluster:
|
||||
return self.cache_service.get_clusters_cache_key(
|
||||
bounds, filters, zoom_level
|
||||
)
|
||||
else:
|
||||
return self.cache_service.get_locations_cache_key(
|
||||
bounds, filters, zoom_level
|
||||
)
|
||||
|
||||
def _record_performance_metrics(
|
||||
self,
|
||||
start_time: float,
|
||||
initial_query_count: int,
|
||||
cache_hit: bool,
|
||||
result_count: int,
|
||||
bounds_used: bool,
|
||||
clustering_used: bool,
|
||||
) -> None:
|
||||
"""Record performance metrics for monitoring."""
|
||||
query_time_ms = int((time.time() - start_time) * 1000)
|
||||
db_query_count = len(connection.queries) - initial_query_count
|
||||
|
||||
metrics = QueryPerformanceMetrics(
|
||||
query_time_ms=query_time_ms,
|
||||
db_query_count=db_query_count,
|
||||
cache_hit=cache_hit,
|
||||
result_count=result_count,
|
||||
bounds_used=bounds_used,
|
||||
clustering_used=clustering_used,
|
||||
)
|
||||
|
||||
self.cache_service.record_performance_metrics(metrics)
|
||||
|
||||
|
||||
# Global service instance
|
||||
unified_map_service = UnifiedMapService()
|
||||
200
apps/core/services/media_service.py
Normal file
200
apps/core/services/media_service.py
Normal file
@@ -0,0 +1,200 @@
|
||||
"""
|
||||
Shared media service for ThrillWiki.
|
||||
|
||||
This module provides shared functionality for media upload, storage, and processing
|
||||
that can be used across all domain-specific media implementations.
|
||||
"""
|
||||
|
||||
import logging
|
||||
from typing import Any, Optional, Dict
|
||||
from datetime import datetime
|
||||
from django.core.files.uploadedfile import UploadedFile
|
||||
from django.conf import settings
|
||||
from PIL import Image, ExifTags
|
||||
import os
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class MediaService:
|
||||
"""Shared service for media upload and processing operations."""
|
||||
|
||||
@staticmethod
|
||||
def generate_upload_path(
|
||||
domain: str, identifier: str, filename: str, subdirectory: Optional[str] = None
|
||||
) -> str:
|
||||
"""
|
||||
Generate standardized upload path for media files.
|
||||
|
||||
Args:
|
||||
domain: Domain type (e.g., 'park', 'ride')
|
||||
identifier: Object identifier (slug or id)
|
||||
filename: Original filename
|
||||
subdirectory: Optional subdirectory for organization
|
||||
|
||||
Returns:
|
||||
Standardized upload path
|
||||
"""
|
||||
# Always use .jpg extension for consistency
|
||||
base_filename = f"{identifier}.jpg"
|
||||
|
||||
if subdirectory:
|
||||
return f"{domain}/{subdirectory}/{identifier}/{base_filename}"
|
||||
else:
|
||||
return f"{domain}/{identifier}/{base_filename}"
|
||||
|
||||
@staticmethod
|
||||
def extract_exif_date(image_file: UploadedFile) -> Optional[datetime]:
|
||||
"""
|
||||
Extract the date taken from image EXIF data.
|
||||
|
||||
Args:
|
||||
image_file: Uploaded image file
|
||||
|
||||
Returns:
|
||||
DateTime when photo was taken, or None if not available
|
||||
"""
|
||||
try:
|
||||
with Image.open(image_file) as img:
|
||||
exif = img.getexif()
|
||||
if exif:
|
||||
# Find the DateTime tag ID
|
||||
for tag_id in ExifTags.TAGS:
|
||||
if ExifTags.TAGS[tag_id] == "DateTimeOriginal":
|
||||
if tag_id in exif:
|
||||
# EXIF dates are typically in format: '2024:02:15 14:30:00'
|
||||
date_str = exif[tag_id]
|
||||
return datetime.strptime(date_str, "%Y:%m:%d %H:%M:%S")
|
||||
return None
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to extract EXIF date: {str(e)}")
|
||||
return None
|
||||
|
||||
@staticmethod
|
||||
def validate_image_file(image_file: UploadedFile) -> tuple[bool, Optional[str]]:
|
||||
"""
|
||||
Validate uploaded image file.
|
||||
|
||||
Args:
|
||||
image_file: Uploaded image file
|
||||
|
||||
Returns:
|
||||
Tuple of (is_valid, error_message)
|
||||
"""
|
||||
try:
|
||||
# Check file size
|
||||
max_size = getattr(
|
||||
settings, "MAX_PHOTO_SIZE", 10 * 1024 * 1024
|
||||
) # 10MB default
|
||||
if image_file.size > max_size:
|
||||
return (
|
||||
False,
|
||||
f"File size too large. Maximum size is {max_size // (1024 * 1024)}MB",
|
||||
)
|
||||
|
||||
# Check file type
|
||||
allowed_types = getattr(
|
||||
settings,
|
||||
"ALLOWED_PHOTO_TYPES",
|
||||
["image/jpeg", "image/png", "image/webp"],
|
||||
)
|
||||
if image_file.content_type not in allowed_types:
|
||||
return (
|
||||
False,
|
||||
f"File type not allowed. Allowed types: {', '.join(allowed_types)}",
|
||||
)
|
||||
|
||||
# Try to open with PIL to validate it's a real image
|
||||
with Image.open(image_file) as img:
|
||||
img.verify()
|
||||
|
||||
return True, None
|
||||
|
||||
except Exception as e:
|
||||
return False, f"Invalid image file: {str(e)}"
|
||||
|
||||
@staticmethod
|
||||
def process_image(
|
||||
image_file: UploadedFile,
|
||||
max_width: int = 1920,
|
||||
max_height: int = 1080,
|
||||
quality: int = 85,
|
||||
) -> UploadedFile:
|
||||
"""
|
||||
Process and optimize image file.
|
||||
|
||||
Args:
|
||||
image_file: Original uploaded file
|
||||
max_width: Maximum width for resizing
|
||||
max_height: Maximum height for resizing
|
||||
quality: JPEG quality (1-100)
|
||||
|
||||
Returns:
|
||||
Processed image file
|
||||
"""
|
||||
try:
|
||||
with Image.open(image_file) as img:
|
||||
# Convert to RGB if necessary
|
||||
if img.mode in ("RGBA", "LA", "P"):
|
||||
img = img.convert("RGB")
|
||||
|
||||
# Resize if necessary
|
||||
if img.width > max_width or img.height > max_height:
|
||||
img.thumbnail((max_width, max_height), Image.Resampling.LANCZOS)
|
||||
|
||||
# Save processed image
|
||||
from io import BytesIO
|
||||
from django.core.files.uploadedfile import InMemoryUploadedFile
|
||||
|
||||
output = BytesIO()
|
||||
img.save(output, format="JPEG", quality=quality, optimize=True)
|
||||
output.seek(0)
|
||||
|
||||
return InMemoryUploadedFile(
|
||||
output,
|
||||
"ImageField",
|
||||
f"{os.path.splitext(image_file.name)[0]}.jpg",
|
||||
"image/jpeg",
|
||||
output.getbuffer().nbytes,
|
||||
None,
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to process image, using original: {str(e)}")
|
||||
return image_file
|
||||
|
||||
@staticmethod
|
||||
def generate_default_caption(username: str) -> str:
|
||||
"""
|
||||
Generate default caption for uploaded photos.
|
||||
|
||||
Args:
|
||||
username: Username of uploader
|
||||
|
||||
Returns:
|
||||
Default caption string
|
||||
"""
|
||||
from django.utils import timezone
|
||||
|
||||
current_time = timezone.now()
|
||||
return f"Uploaded by {username} on {current_time.strftime('%B %d, %Y at %I:%M %p')}"
|
||||
|
||||
@staticmethod
|
||||
def get_storage_stats() -> Dict[str, Any]:
|
||||
"""
|
||||
Get media storage statistics.
|
||||
|
||||
Returns:
|
||||
Dictionary with storage statistics
|
||||
"""
|
||||
try:
|
||||
# This would need to be implemented based on your storage backend
|
||||
return {
|
||||
"total_files": 0,
|
||||
"total_size_bytes": 0,
|
||||
"storage_backend": "default",
|
||||
"available_space": "unknown",
|
||||
}
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to get storage stats: {str(e)}")
|
||||
return {"error": str(e)}
|
||||
148
apps/core/services/media_url_service.py
Normal file
148
apps/core/services/media_url_service.py
Normal file
@@ -0,0 +1,148 @@
|
||||
"""
|
||||
Media URL service for generating friendly URLs.
|
||||
|
||||
This service provides utilities for generating SEO-friendly URLs for media files
|
||||
while maintaining compatibility with Cloudflare Images.
|
||||
"""
|
||||
|
||||
import re
|
||||
from typing import Optional, Dict, Any
|
||||
from django.utils.text import slugify
|
||||
|
||||
|
||||
class MediaURLService:
|
||||
"""Service for generating and parsing friendly media URLs."""
|
||||
|
||||
@staticmethod
|
||||
def generate_friendly_filename(caption: str, photo_id: int, extension: str = "jpg") -> str:
|
||||
"""
|
||||
Generate a friendly filename from photo caption and ID.
|
||||
|
||||
Args:
|
||||
caption: Photo caption
|
||||
photo_id: Photo database ID
|
||||
extension: File extension (default: jpg)
|
||||
|
||||
Returns:
|
||||
Friendly filename like "beautiful-park-entrance-123.jpg"
|
||||
"""
|
||||
if caption:
|
||||
# Clean and slugify the caption
|
||||
slug = slugify(caption)
|
||||
# Limit length to avoid overly long URLs
|
||||
if len(slug) > 50:
|
||||
slug = slug[:50].rsplit('-', 1)[0] # Cut at word boundary
|
||||
return f"{slug}-{photo_id}.{extension}"
|
||||
else:
|
||||
return f"photo-{photo_id}.{extension}"
|
||||
|
||||
@staticmethod
|
||||
def generate_park_photo_url(park_slug: str, caption: str, photo_id: int, variant: str = "public") -> str:
|
||||
"""
|
||||
Generate a friendly URL for a park photo.
|
||||
|
||||
Args:
|
||||
park_slug: Park slug
|
||||
caption: Photo caption
|
||||
photo_id: Photo database ID
|
||||
variant: Image variant (public, thumbnail, medium, large)
|
||||
|
||||
Returns:
|
||||
Friendly URL like "/parks/cedar-point/photos/beautiful-entrance-123.jpg"
|
||||
"""
|
||||
filename = MediaURLService.generate_friendly_filename(caption, photo_id)
|
||||
|
||||
# Add variant to filename if not public
|
||||
if variant != "public":
|
||||
name, ext = filename.rsplit('.', 1)
|
||||
filename = f"{name}-{variant}.{ext}"
|
||||
|
||||
return f"/parks/{park_slug}/photos/{filename}"
|
||||
|
||||
@staticmethod
|
||||
def generate_ride_photo_url(park_slug: str, ride_slug: str, caption: str, photo_id: int, variant: str = "public") -> str:
|
||||
"""
|
||||
Generate a friendly URL for a ride photo.
|
||||
|
||||
Args:
|
||||
park_slug: Park slug
|
||||
ride_slug: Ride slug
|
||||
caption: Photo caption
|
||||
photo_id: Photo database ID
|
||||
variant: Image variant
|
||||
|
||||
Returns:
|
||||
Friendly URL like "/parks/cedar-point/rides/millennium-force/photos/first-drop-456.jpg"
|
||||
"""
|
||||
filename = MediaURLService.generate_friendly_filename(caption, photo_id)
|
||||
|
||||
if variant != "public":
|
||||
name, ext = filename.rsplit('.', 1)
|
||||
filename = f"{name}-{variant}.{ext}"
|
||||
|
||||
return f"/parks/{park_slug}/rides/{ride_slug}/photos/{filename}"
|
||||
|
||||
@staticmethod
|
||||
def parse_photo_filename(filename: str) -> Optional[Dict[str, Any]]:
|
||||
"""
|
||||
Parse a friendly filename to extract photo ID and variant.
|
||||
|
||||
Args:
|
||||
filename: Filename like "beautiful-entrance-123-thumbnail.jpg"
|
||||
|
||||
Returns:
|
||||
Dict with photo_id and variant, or None if parsing fails
|
||||
"""
|
||||
# Remove extension
|
||||
name = filename.rsplit('.', 1)[0]
|
||||
|
||||
# Check for variant suffix
|
||||
variant = "public"
|
||||
variant_patterns = ["thumbnail", "medium", "large"]
|
||||
|
||||
for v in variant_patterns:
|
||||
if name.endswith(f"-{v}"):
|
||||
variant = v
|
||||
name = name[:-len(f"-{v}")]
|
||||
break
|
||||
|
||||
# Extract photo ID (should be the last number)
|
||||
match = re.search(r'-(\d+)$', name)
|
||||
if match:
|
||||
photo_id = int(match.group(1))
|
||||
return {
|
||||
"photo_id": photo_id,
|
||||
"variant": variant
|
||||
}
|
||||
|
||||
return None
|
||||
|
||||
@staticmethod
|
||||
def get_cloudflare_url_with_fallback(cloudflare_image, variant: str = "public") -> Optional[str]:
|
||||
"""
|
||||
Get Cloudflare URL with fallback handling.
|
||||
|
||||
Args:
|
||||
cloudflare_image: CloudflareImage instance
|
||||
variant: Desired variant
|
||||
|
||||
Returns:
|
||||
Cloudflare URL or None
|
||||
"""
|
||||
if not cloudflare_image:
|
||||
return None
|
||||
|
||||
try:
|
||||
# Try the specific variant first
|
||||
url = cloudflare_image.get_url(variant)
|
||||
if url:
|
||||
return url
|
||||
|
||||
# Fallback to public URL
|
||||
if variant != "public":
|
||||
return cloudflare_image.public_url
|
||||
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
return None
|
||||
407
apps/core/services/performance_monitoring.py
Normal file
407
apps/core/services/performance_monitoring.py
Normal file
@@ -0,0 +1,407 @@
|
||||
"""
|
||||
Performance monitoring utilities and context managers.
|
||||
"""
|
||||
|
||||
import time
|
||||
import logging
|
||||
from contextlib import contextmanager
|
||||
from functools import wraps
|
||||
from typing import Optional, Dict, Any, List
|
||||
from django.db import connection
|
||||
from django.conf import settings
|
||||
from django.utils import timezone
|
||||
|
||||
logger = logging.getLogger("performance")
|
||||
|
||||
|
||||
@contextmanager
|
||||
def monitor_performance(operation_name: str, **tags):
|
||||
"""Context manager for monitoring operation performance"""
|
||||
start_time = time.time()
|
||||
initial_queries = len(connection.queries)
|
||||
|
||||
# Create performance context
|
||||
performance_context = {
|
||||
"operation": operation_name,
|
||||
"start_time": start_time,
|
||||
"timestamp": timezone.now().isoformat(),
|
||||
**tags,
|
||||
}
|
||||
|
||||
try:
|
||||
yield performance_context
|
||||
except Exception as e:
|
||||
performance_context["error"] = str(e)
|
||||
performance_context["status"] = "error"
|
||||
raise
|
||||
else:
|
||||
performance_context["status"] = "success"
|
||||
finally:
|
||||
end_time = time.time()
|
||||
duration = end_time - start_time
|
||||
total_queries = len(connection.queries) - initial_queries
|
||||
|
||||
# Update performance context with final metrics
|
||||
performance_context.update(
|
||||
{
|
||||
"duration_seconds": duration,
|
||||
"duration_ms": round(duration * 1000, 2),
|
||||
"query_count": total_queries,
|
||||
"end_time": end_time,
|
||||
}
|
||||
)
|
||||
|
||||
# Log performance data
|
||||
log_level = (
|
||||
logging.WARNING if duration > 2.0 or total_queries > 10 else logging.INFO
|
||||
)
|
||||
logger.log(
|
||||
log_level,
|
||||
f"Performance: {operation_name} completed in {duration:.3f}s with {
|
||||
total_queries
|
||||
} queries",
|
||||
extra=performance_context,
|
||||
)
|
||||
|
||||
# Log slow operations with additional detail
|
||||
if duration > 2.0:
|
||||
logger.warning(
|
||||
f"Slow operation detected: {operation_name} took {duration:.3f}s",
|
||||
extra={
|
||||
"slow_operation": True,
|
||||
"threshold_exceeded": "duration",
|
||||
**performance_context,
|
||||
},
|
||||
)
|
||||
|
||||
if total_queries > 10:
|
||||
logger.warning(
|
||||
f"High query count: {operation_name} executed {total_queries} queries",
|
||||
extra={
|
||||
"high_query_count": True,
|
||||
"threshold_exceeded": "query_count",
|
||||
**performance_context,
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
@contextmanager
|
||||
def track_queries(operation_name: str, warn_threshold: int = 10):
|
||||
"""Context manager to track database queries for specific operations"""
|
||||
if not settings.DEBUG:
|
||||
yield
|
||||
return
|
||||
|
||||
initial_queries = len(connection.queries)
|
||||
start_time = time.time()
|
||||
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
end_time = time.time()
|
||||
total_queries = len(connection.queries) - initial_queries
|
||||
execution_time = end_time - start_time
|
||||
|
||||
query_details = []
|
||||
if hasattr(connection, "queries") and total_queries > 0:
|
||||
recent_queries = connection.queries[-total_queries:]
|
||||
query_details = [
|
||||
{
|
||||
"sql": (
|
||||
query["sql"][:200] + "..."
|
||||
if len(query["sql"]) > 200
|
||||
else query["sql"]
|
||||
),
|
||||
"time": float(query["time"]),
|
||||
}
|
||||
for query in recent_queries
|
||||
]
|
||||
|
||||
performance_data = {
|
||||
"operation": operation_name,
|
||||
"query_count": total_queries,
|
||||
"execution_time": execution_time,
|
||||
"queries": query_details if settings.DEBUG else [],
|
||||
}
|
||||
|
||||
if total_queries > warn_threshold or execution_time > 1.0:
|
||||
logger.warning(
|
||||
f"Performance concern in {operation_name}: "
|
||||
f"{total_queries} queries, {execution_time:.2f}s",
|
||||
extra=performance_data,
|
||||
)
|
||||
else:
|
||||
logger.debug(
|
||||
f"Query tracking for {operation_name}: "
|
||||
f"{total_queries} queries, {execution_time:.2f}s",
|
||||
extra=performance_data,
|
||||
)
|
||||
|
||||
|
||||
class PerformanceProfiler:
|
||||
"""Advanced performance profiling with detailed metrics"""
|
||||
|
||||
def __init__(self, name: str):
|
||||
self.name = name
|
||||
self.start_time = None
|
||||
self.end_time = None
|
||||
self.checkpoints = []
|
||||
self.initial_queries = 0
|
||||
self.memory_usage = {}
|
||||
|
||||
def start(self):
|
||||
"""Start profiling"""
|
||||
self.start_time = time.time()
|
||||
self.initial_queries = len(connection.queries)
|
||||
|
||||
# Track memory usage if psutil is available
|
||||
try:
|
||||
import psutil
|
||||
|
||||
process = psutil.Process()
|
||||
self.memory_usage["start"] = process.memory_info().rss
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
logger.debug(f"Started profiling: {self.name}")
|
||||
|
||||
def checkpoint(self, name: str):
|
||||
"""Add a checkpoint"""
|
||||
if self.start_time is None:
|
||||
logger.warning(f"Checkpoint '{name}' called before profiling started")
|
||||
return
|
||||
|
||||
current_time = time.time()
|
||||
elapsed = current_time - self.start_time
|
||||
queries_since_start = len(connection.queries) - self.initial_queries
|
||||
|
||||
checkpoint = {
|
||||
"name": name,
|
||||
"timestamp": current_time,
|
||||
"elapsed_seconds": elapsed,
|
||||
"queries_since_start": queries_since_start,
|
||||
}
|
||||
|
||||
# Memory usage if available
|
||||
try:
|
||||
import psutil
|
||||
|
||||
process = psutil.Process()
|
||||
checkpoint["memory_rss"] = process.memory_info().rss
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
self.checkpoints.append(checkpoint)
|
||||
logger.debug(f"Checkpoint '{name}' at {elapsed:.3f}s")
|
||||
|
||||
def stop(self):
|
||||
"""Stop profiling and log results"""
|
||||
if self.start_time is None:
|
||||
logger.warning("Profiling stopped before it was started")
|
||||
return
|
||||
|
||||
self.end_time = time.time()
|
||||
total_duration = self.end_time - self.start_time
|
||||
total_queries = len(connection.queries) - self.initial_queries
|
||||
|
||||
# Final memory usage
|
||||
try:
|
||||
import psutil
|
||||
|
||||
process = psutil.Process()
|
||||
self.memory_usage["end"] = process.memory_info().rss
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
# Create detailed profiling report
|
||||
report = {
|
||||
"profiler_name": self.name,
|
||||
"total_duration": total_duration,
|
||||
"total_queries": total_queries,
|
||||
"checkpoints": self.checkpoints,
|
||||
"memory_usage": self.memory_usage,
|
||||
"queries_per_second": (
|
||||
total_queries / total_duration if total_duration > 0 else 0
|
||||
),
|
||||
}
|
||||
|
||||
# Calculate checkpoint intervals
|
||||
if len(self.checkpoints) > 1:
|
||||
intervals = []
|
||||
for i in range(1, len(self.checkpoints)):
|
||||
prev = self.checkpoints[i - 1]
|
||||
curr = self.checkpoints[i]
|
||||
intervals.append(
|
||||
{
|
||||
"from": prev["name"],
|
||||
"to": curr["name"],
|
||||
"duration": curr["elapsed_seconds"] - prev["elapsed_seconds"],
|
||||
"queries": curr["queries_since_start"]
|
||||
- prev["queries_since_start"],
|
||||
}
|
||||
)
|
||||
report["checkpoint_intervals"] = intervals
|
||||
|
||||
# Log the complete report
|
||||
log_level = logging.WARNING if total_duration > 1.0 else logging.INFO
|
||||
logger.log(
|
||||
log_level,
|
||||
f"Profiling complete: {self.name} took {total_duration:.3f}s with {
|
||||
total_queries
|
||||
} queries",
|
||||
extra=report,
|
||||
)
|
||||
|
||||
return report
|
||||
|
||||
|
||||
@contextmanager
|
||||
def profile_operation(name: str):
|
||||
"""Context manager for detailed operation profiling"""
|
||||
profiler = PerformanceProfiler(name)
|
||||
profiler.start()
|
||||
|
||||
try:
|
||||
yield profiler
|
||||
finally:
|
||||
profiler.stop()
|
||||
|
||||
|
||||
class DatabaseQueryAnalyzer:
|
||||
"""Analyze database query patterns and performance"""
|
||||
|
||||
@staticmethod
|
||||
def analyze_queries(queries: List[Dict]) -> Dict[str, Any]:
|
||||
"""Analyze a list of queries for patterns and issues"""
|
||||
if not queries:
|
||||
return {}
|
||||
|
||||
total_time = sum(float(q.get("time", 0)) for q in queries)
|
||||
query_count = len(queries)
|
||||
|
||||
# Group queries by type
|
||||
query_types = {}
|
||||
for query in queries:
|
||||
sql = query.get("sql", "").strip().upper()
|
||||
query_type = sql.split()[0] if sql else "UNKNOWN"
|
||||
query_types[query_type] = query_types.get(query_type, 0) + 1
|
||||
|
||||
# Find slow queries (top 10% by time)
|
||||
sorted_queries = sorted(
|
||||
queries, key=lambda q: float(q.get("time", 0)), reverse=True
|
||||
)
|
||||
slow_query_count = max(1, query_count // 10)
|
||||
slow_queries = sorted_queries[:slow_query_count]
|
||||
|
||||
# Detect duplicate queries
|
||||
query_signatures = {}
|
||||
for query in queries:
|
||||
# Simplified signature - remove literals and normalize whitespace
|
||||
sql = query.get("sql", "")
|
||||
signature = " ".join(sql.split()) # Normalize whitespace
|
||||
query_signatures[signature] = query_signatures.get(signature, 0) + 1
|
||||
|
||||
duplicates = {
|
||||
sig: count for sig, count in query_signatures.items() if count > 1
|
||||
}
|
||||
|
||||
analysis = {
|
||||
"total_queries": query_count,
|
||||
"total_time": total_time,
|
||||
"average_time": total_time / query_count if query_count > 0 else 0,
|
||||
"query_types": query_types,
|
||||
"slow_queries": [
|
||||
{
|
||||
"sql": (
|
||||
q.get("sql", "")[:200] + "..."
|
||||
if len(q.get("sql", "")) > 200
|
||||
else q.get("sql", "")
|
||||
),
|
||||
"time": float(q.get("time", 0)),
|
||||
}
|
||||
for q in slow_queries
|
||||
],
|
||||
"duplicate_query_count": len(duplicates),
|
||||
"duplicate_queries": (
|
||||
duplicates
|
||||
if len(duplicates) <= 10
|
||||
else dict(list(duplicates.items())[:10])
|
||||
),
|
||||
}
|
||||
|
||||
return analysis
|
||||
|
||||
@classmethod
|
||||
def analyze_current_queries(cls) -> Dict[str, Any]:
|
||||
"""Analyze the current request's queries"""
|
||||
if hasattr(connection, "queries"):
|
||||
return cls.analyze_queries(connection.queries)
|
||||
return {}
|
||||
|
||||
|
||||
# Performance monitoring decorators
|
||||
def monitor_function_performance(operation_name: Optional[str] = None):
|
||||
"""Decorator to monitor function performance"""
|
||||
|
||||
def decorator(func):
|
||||
@wraps(func)
|
||||
def wrapper(*args, **kwargs):
|
||||
name = operation_name or f"{func.__module__}.{func.__name__}"
|
||||
with monitor_performance(
|
||||
name, function=func.__name__, module=func.__module__
|
||||
):
|
||||
return func(*args, **kwargs)
|
||||
|
||||
return wrapper
|
||||
|
||||
return decorator
|
||||
|
||||
|
||||
def track_database_queries(warn_threshold: int = 10):
|
||||
"""Decorator to track database queries for a function"""
|
||||
|
||||
def decorator(func):
|
||||
@wraps(func)
|
||||
def wrapper(*args, **kwargs):
|
||||
operation_name = f"{func.__module__}.{func.__name__}"
|
||||
with track_queries(operation_name, warn_threshold):
|
||||
return func(*args, **kwargs)
|
||||
|
||||
return wrapper
|
||||
|
||||
return decorator
|
||||
|
||||
|
||||
# Performance metrics collection
|
||||
class PerformanceMetrics:
|
||||
"""Collect and aggregate performance metrics"""
|
||||
|
||||
def __init__(self):
|
||||
self.metrics = []
|
||||
|
||||
def record_metric(self, name: str, value: float, tags: Optional[Dict] = None):
|
||||
"""Record a performance metric"""
|
||||
metric = {
|
||||
"name": name,
|
||||
"value": value,
|
||||
"timestamp": timezone.now().isoformat(),
|
||||
"tags": tags or {},
|
||||
}
|
||||
self.metrics.append(metric)
|
||||
|
||||
# Log the metric
|
||||
logger.info(f"Performance metric: {name} = {value}", extra=metric)
|
||||
|
||||
def get_metrics(self, name: Optional[str] = None) -> List[Dict]:
|
||||
"""Get recorded metrics, optionally filtered by name"""
|
||||
if name:
|
||||
return [m for m in self.metrics if m["name"] == name]
|
||||
return self.metrics.copy()
|
||||
|
||||
def clear_metrics(self):
|
||||
"""Clear all recorded metrics"""
|
||||
self.metrics.clear()
|
||||
|
||||
|
||||
# Global performance metrics instance
|
||||
performance_metrics = PerformanceMetrics()
|
||||
725
apps/core/services/trending_service.py
Normal file
725
apps/core/services/trending_service.py
Normal file
@@ -0,0 +1,725 @@
|
||||
"""
|
||||
Trending Service for calculating and caching trending content.
|
||||
|
||||
This service implements the weighted trending algorithm that combines:
|
||||
- View growth rates
|
||||
- Content ratings
|
||||
- Recency factors
|
||||
- Popularity metrics
|
||||
|
||||
Results are cached in Redis for performance optimization.
|
||||
"""
|
||||
|
||||
import logging
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Dict, List, Any
|
||||
from django.utils import timezone
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from django.core.cache import cache
|
||||
from django.db.models import Q
|
||||
|
||||
from apps.core.analytics import PageView
|
||||
from apps.parks.models import Park
|
||||
from apps.rides.models import Ride
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class TrendingService:
|
||||
"""
|
||||
Service for calculating trending content using weighted algorithm.
|
||||
|
||||
Algorithm Components:
|
||||
- View Growth Rate (40% weight): Recent view increase vs historical
|
||||
- Rating Score (30% weight): Average user rating normalized
|
||||
- Recency Factor (20% weight): How recently content was added/updated
|
||||
- Popularity Boost (10% weight): Total view count normalization
|
||||
"""
|
||||
|
||||
# Algorithm weights (must sum to 1.0)
|
||||
WEIGHT_VIEW_GROWTH = 0.4
|
||||
WEIGHT_RATING = 0.3
|
||||
WEIGHT_RECENCY = 0.2
|
||||
WEIGHT_POPULARITY = 0.1
|
||||
|
||||
# Cache configuration
|
||||
CACHE_PREFIX = "trending"
|
||||
CACHE_TTL = 86400 # 24 hours (daily refresh)
|
||||
|
||||
# Time windows for calculations
|
||||
CURRENT_PERIOD_HOURS = 168 # 7 days
|
||||
PREVIOUS_PERIOD_HOURS = 336 # 14 days (for previous 7-day window comparison)
|
||||
RECENCY_BASELINE_DAYS = 365
|
||||
|
||||
def __init__(self):
|
||||
self.logger = logging.getLogger(f"{__name__}.{self.__class__.__name__}")
|
||||
|
||||
def get_trending_content(
|
||||
self, content_type: str = "all", limit: int = 20, force_refresh: bool = False
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Get trending content using direct calculation.
|
||||
|
||||
Args:
|
||||
content_type: 'parks', 'rides', or 'all'
|
||||
limit: Maximum number of results
|
||||
force_refresh: Skip cache and recalculate
|
||||
|
||||
Returns:
|
||||
List of trending content with exact frontend format
|
||||
"""
|
||||
cache_key = f"trending:calculated:{content_type}:{limit}"
|
||||
|
||||
if not force_refresh:
|
||||
cached_result = cache.get(cache_key)
|
||||
if cached_result is not None:
|
||||
self.logger.debug(
|
||||
f"Returning cached trending results for {content_type}"
|
||||
)
|
||||
return cached_result
|
||||
|
||||
self.logger.info(f"Getting trending content for {content_type}")
|
||||
|
||||
try:
|
||||
# Calculate directly without Celery
|
||||
trending_items = []
|
||||
|
||||
if content_type in ["all", "parks"]:
|
||||
park_items = self._calculate_trending_parks(
|
||||
limit * 2 if content_type == "all" else limit
|
||||
)
|
||||
trending_items.extend(park_items)
|
||||
|
||||
if content_type in ["all", "rides"]:
|
||||
ride_items = self._calculate_trending_rides(
|
||||
limit * 2 if content_type == "all" else limit
|
||||
)
|
||||
trending_items.extend(ride_items)
|
||||
|
||||
# Sort by trending score and apply limit
|
||||
trending_items.sort(key=lambda x: x.get("trending_score", 0), reverse=True)
|
||||
trending_items = trending_items[:limit]
|
||||
|
||||
# Format results for API consumption
|
||||
formatted_results = self._format_trending_results(trending_items)
|
||||
|
||||
# Cache results
|
||||
cache.set(cache_key, formatted_results, self.CACHE_TTL)
|
||||
|
||||
self.logger.info(
|
||||
f"Calculated {len(formatted_results)} trending items for {content_type}"
|
||||
)
|
||||
return formatted_results
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error getting trending content: {e}", exc_info=True)
|
||||
return []
|
||||
|
||||
def get_new_content(
|
||||
self,
|
||||
content_type: str = "all",
|
||||
limit: int = 20,
|
||||
days_back: int = 30,
|
||||
force_refresh: bool = False,
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Get recently added content using direct calculation.
|
||||
|
||||
Args:
|
||||
content_type: 'parks', 'rides', or 'all'
|
||||
limit: Maximum number of results
|
||||
days_back: How many days to look back
|
||||
force_refresh: Skip cache and recalculate
|
||||
|
||||
Returns:
|
||||
List of new content with exact frontend format
|
||||
"""
|
||||
cache_key = f"new_content:calculated:{content_type}:{days_back}:{limit}"
|
||||
|
||||
if not force_refresh:
|
||||
cached_result = cache.get(cache_key)
|
||||
if cached_result is not None:
|
||||
self.logger.debug(
|
||||
f"Returning cached new content results for {content_type}"
|
||||
)
|
||||
return cached_result
|
||||
|
||||
self.logger.info(f"Getting new content for {content_type}")
|
||||
|
||||
try:
|
||||
# Calculate directly without Celery
|
||||
cutoff_date = timezone.now() - timedelta(days=days_back)
|
||||
new_items = []
|
||||
|
||||
if content_type in ["all", "parks"]:
|
||||
parks = self._get_new_parks(
|
||||
cutoff_date, limit * 2 if content_type == "all" else limit
|
||||
)
|
||||
new_items.extend(parks)
|
||||
|
||||
if content_type in ["all", "rides"]:
|
||||
rides = self._get_new_rides(
|
||||
cutoff_date, limit * 2 if content_type == "all" else limit
|
||||
)
|
||||
new_items.extend(rides)
|
||||
|
||||
# Sort by date added (most recent first) and apply limit
|
||||
new_items.sort(key=lambda x: x.get("date_added", ""), reverse=True)
|
||||
new_items = new_items[:limit]
|
||||
|
||||
# Format results for API consumption
|
||||
formatted_results = self._format_new_content_results(new_items)
|
||||
|
||||
# Cache results
|
||||
cache.set(cache_key, formatted_results, 1800) # Cache for 30 minutes
|
||||
|
||||
self.logger.info(
|
||||
f"Calculated {len(formatted_results)} new items for {content_type}"
|
||||
)
|
||||
return formatted_results
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error getting new content: {e}", exc_info=True)
|
||||
return []
|
||||
|
||||
def _calculate_trending_parks(self, limit: int) -> List[Dict[str, Any]]:
|
||||
"""Calculate trending scores for parks."""
|
||||
parks = Park.objects.filter(status="OPERATING").select_related(
|
||||
"location", "operator", "card_image"
|
||||
)
|
||||
|
||||
trending_parks = []
|
||||
|
||||
for park in parks:
|
||||
try:
|
||||
score = self._calculate_content_score(park, "park")
|
||||
if score > 0: # Only include items with positive trending scores
|
||||
# Get opening date for date_opened field
|
||||
opening_date = getattr(park, "opening_date", None)
|
||||
if opening_date and isinstance(opening_date, datetime):
|
||||
opening_date = opening_date.date()
|
||||
|
||||
# Get location fields
|
||||
city = ""
|
||||
state = ""
|
||||
country = ""
|
||||
try:
|
||||
location = getattr(park, "location", None)
|
||||
if location:
|
||||
city = getattr(location, "city", "") or ""
|
||||
state = getattr(location, "state", "") or ""
|
||||
country = getattr(location, "country", "") or ""
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# Get card image URL
|
||||
card_image_url = ""
|
||||
if park.card_image and hasattr(park.card_image, "image"):
|
||||
card_image_url = (
|
||||
park.card_image.image.url if park.card_image.image else ""
|
||||
)
|
||||
|
||||
# Get primary company (operator)
|
||||
primary_company = park.operator.name if park.operator else ""
|
||||
|
||||
trending_parks.append(
|
||||
{
|
||||
"content_object": park,
|
||||
"content_type": "park",
|
||||
"trending_score": score,
|
||||
"id": park.id,
|
||||
"name": park.name,
|
||||
"slug": park.slug,
|
||||
"park": park.name, # For parks, park field is the park name itself
|
||||
"category": "park",
|
||||
"rating": (
|
||||
float(park.average_rating)
|
||||
if park.average_rating
|
||||
else 0.0
|
||||
),
|
||||
"date_opened": (
|
||||
opening_date.isoformat() if opening_date else ""
|
||||
),
|
||||
"url": park.url,
|
||||
"card_image": card_image_url,
|
||||
"city": city,
|
||||
"state": state,
|
||||
"country": country,
|
||||
"primary_company": primary_company,
|
||||
}
|
||||
)
|
||||
except Exception as e:
|
||||
self.logger.warning(f"Error calculating score for park {park.id}: {e}")
|
||||
|
||||
return trending_parks
|
||||
|
||||
def _calculate_trending_rides(self, limit: int) -> List[Dict[str, Any]]:
|
||||
"""Calculate trending scores for rides."""
|
||||
rides = Ride.objects.filter(status="OPERATING").select_related(
|
||||
"park", "park__location", "card_image"
|
||||
)
|
||||
|
||||
trending_rides = []
|
||||
|
||||
for ride in rides:
|
||||
try:
|
||||
score = self._calculate_content_score(ride, "ride")
|
||||
if score > 0: # Only include items with positive trending scores
|
||||
# Get opening date for date_opened field
|
||||
opening_date = getattr(ride, "opening_date", None)
|
||||
if opening_date and isinstance(opening_date, datetime):
|
||||
opening_date = opening_date.date()
|
||||
|
||||
# Get card image URL
|
||||
card_image_url = ""
|
||||
if ride.card_image and hasattr(ride.card_image, "image"):
|
||||
card_image_url = (
|
||||
ride.card_image.image.url if ride.card_image.image else ""
|
||||
)
|
||||
|
||||
trending_rides.append(
|
||||
{
|
||||
"content_object": ride,
|
||||
"content_type": "ride",
|
||||
"trending_score": score,
|
||||
"id": ride.pk, # Use pk instead of id
|
||||
"name": ride.name,
|
||||
"slug": ride.slug,
|
||||
"park": ride.park.name if ride.park else "",
|
||||
"category": "ride",
|
||||
"rating": (
|
||||
float(ride.average_rating)
|
||||
if ride.average_rating
|
||||
else 0.0
|
||||
),
|
||||
"date_opened": (
|
||||
opening_date.isoformat() if opening_date else ""
|
||||
),
|
||||
"url": ride.url,
|
||||
"park_url": ride.park.url if ride.park else "",
|
||||
"card_image": card_image_url,
|
||||
}
|
||||
)
|
||||
except Exception as e:
|
||||
self.logger.warning(f"Error calculating score for ride {ride.pk}: {e}")
|
||||
|
||||
return trending_rides
|
||||
|
||||
def _calculate_content_score(self, content_obj: Any, content_type: str) -> float:
|
||||
"""
|
||||
Calculate weighted trending score for content object.
|
||||
|
||||
Returns:
|
||||
Float between 0.0 and 1.0 representing trending strength
|
||||
"""
|
||||
try:
|
||||
# Get content type for PageView queries
|
||||
ct = ContentType.objects.get_for_model(content_obj)
|
||||
|
||||
# 1. View Growth Score (40% weight)
|
||||
view_growth_score = self._calculate_view_growth_score(ct, content_obj.id)
|
||||
|
||||
# 2. Rating Score (30% weight)
|
||||
rating_score = self._calculate_rating_score(content_obj)
|
||||
|
||||
# 3. Recency Score (20% weight)
|
||||
recency_score = self._calculate_recency_score(content_obj)
|
||||
|
||||
# 4. Popularity Score (10% weight)
|
||||
popularity_score = self._calculate_popularity_score(ct, content_obj.id)
|
||||
|
||||
# Calculate weighted final score
|
||||
final_score = (
|
||||
view_growth_score * self.WEIGHT_VIEW_GROWTH
|
||||
+ rating_score * self.WEIGHT_RATING
|
||||
+ recency_score * self.WEIGHT_RECENCY
|
||||
+ popularity_score * self.WEIGHT_POPULARITY
|
||||
)
|
||||
|
||||
self.logger.debug(
|
||||
f"{content_type} {content_obj.id}: "
|
||||
f"growth={view_growth_score:.3f}, rating={rating_score:.3f}, "
|
||||
f"recency={recency_score:.3f}, popularity={popularity_score:.3f}, "
|
||||
f"final={final_score:.3f}"
|
||||
)
|
||||
|
||||
return final_score
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(
|
||||
f"Error calculating score for {content_type} {content_obj.id}: {e}"
|
||||
)
|
||||
return 0.0
|
||||
|
||||
def _calculate_view_growth_score(
|
||||
self, content_type: ContentType, object_id: int
|
||||
) -> float:
|
||||
"""Calculate normalized view growth score."""
|
||||
try:
|
||||
current_views, previous_views, growth_percentage = (
|
||||
PageView.get_views_growth(
|
||||
content_type,
|
||||
object_id,
|
||||
self.CURRENT_PERIOD_HOURS,
|
||||
self.PREVIOUS_PERIOD_HOURS,
|
||||
)
|
||||
)
|
||||
|
||||
if previous_views == 0:
|
||||
# New content with views gets boost
|
||||
return min(current_views / 100.0, 1.0) if current_views > 0 else 0.0
|
||||
|
||||
# Normalize growth percentage to 0-1 scale
|
||||
# 100% growth = 0.5, 500% growth = 1.0
|
||||
normalized_growth = (
|
||||
min(growth_percentage / 500.0, 1.0) if growth_percentage > 0 else 0.0
|
||||
)
|
||||
return max(normalized_growth, 0.0)
|
||||
|
||||
except Exception as e:
|
||||
self.logger.warning(f"Error calculating view growth: {e}")
|
||||
return 0.0
|
||||
|
||||
def _calculate_rating_score(self, content_obj: Any) -> float:
|
||||
"""Calculate normalized rating score."""
|
||||
try:
|
||||
rating = getattr(content_obj, "average_rating", None)
|
||||
if rating is None or rating == 0:
|
||||
return 0.3 # Neutral score for unrated content
|
||||
|
||||
# Normalize rating from 1-10 scale to 0-1 scale
|
||||
# Rating of 5 = 0.4, Rating of 8 = 0.7, Rating of 10 = 1.0
|
||||
return min(max((float(rating) - 1) / 9.0, 0.0), 1.0)
|
||||
|
||||
except Exception as e:
|
||||
self.logger.warning(f"Error calculating rating score: {e}")
|
||||
return 0.3
|
||||
|
||||
def _calculate_recency_score(self, content_obj: Any) -> float:
|
||||
"""Calculate recency score based on when content was added/updated."""
|
||||
try:
|
||||
# Use opening_date for parks/rides, or created_at as fallback
|
||||
date_added = getattr(content_obj, "opening_date", None)
|
||||
if not date_added:
|
||||
date_added = getattr(content_obj, "created_at", None)
|
||||
if not date_added:
|
||||
return 0.5 # Neutral score for unknown dates
|
||||
|
||||
# Handle both date and datetime objects
|
||||
if hasattr(date_added, "date"):
|
||||
date_added = date_added.date()
|
||||
|
||||
# Calculate days since added
|
||||
today = timezone.now().date()
|
||||
days_since_added = (today - date_added).days
|
||||
|
||||
# Recency score: newer content gets higher scores
|
||||
# 0 days = 1.0, 30 days = 0.8, 365 days = 0.1, >365 days = 0.0
|
||||
if days_since_added <= 0:
|
||||
return 1.0
|
||||
elif days_since_added <= 30:
|
||||
return 1.0 - (days_since_added / 30.0) * 0.2 # 1.0 to 0.8
|
||||
elif days_since_added <= self.RECENCY_BASELINE_DAYS:
|
||||
return (
|
||||
0.8
|
||||
- ((days_since_added - 30) / (self.RECENCY_BASELINE_DAYS - 30))
|
||||
* 0.7
|
||||
) # 0.8 to 0.1
|
||||
else:
|
||||
return 0.0
|
||||
|
||||
except Exception as e:
|
||||
self.logger.warning(f"Error calculating recency score: {e}")
|
||||
return 0.5
|
||||
|
||||
def _calculate_popularity_score(
|
||||
self, content_type: ContentType, object_id: int
|
||||
) -> float:
|
||||
"""Calculate popularity score based on total view count."""
|
||||
try:
|
||||
total_views = PageView.get_total_views_count(
|
||||
content_type,
|
||||
object_id,
|
||||
hours=168, # Last 7 days
|
||||
)
|
||||
|
||||
# Normalize views to 0-1 scale
|
||||
# 0 views = 0.0, 100 views = 0.5, 1000+ views = 1.0
|
||||
if total_views == 0:
|
||||
return 0.0
|
||||
elif total_views <= 100:
|
||||
return total_views / 200.0 # 0.0 to 0.5
|
||||
else:
|
||||
return min(0.5 + (total_views - 100) / 1800.0, 1.0) # 0.5 to 1.0
|
||||
|
||||
except Exception as e:
|
||||
self.logger.warning(f"Error calculating popularity score: {e}")
|
||||
return 0.0
|
||||
|
||||
def _get_new_parks(self, cutoff_date: datetime, limit: int) -> List[Dict[str, Any]]:
|
||||
"""Get recently added parks."""
|
||||
new_parks = (
|
||||
Park.objects.filter(
|
||||
Q(created_at__gte=cutoff_date)
|
||||
| Q(opening_date__gte=cutoff_date.date()),
|
||||
status="OPERATING",
|
||||
)
|
||||
.select_related("location", "operator", "card_image")
|
||||
.order_by("-created_at", "-opening_date")[:limit]
|
||||
)
|
||||
|
||||
results = []
|
||||
for park in new_parks:
|
||||
date_added = park.opening_date or park.created_at
|
||||
# Handle datetime to date conversion
|
||||
if date_added:
|
||||
# If it's a datetime, convert to date
|
||||
if isinstance(date_added, datetime):
|
||||
date_added = date_added.date()
|
||||
# If it's already a date, keep it as is
|
||||
|
||||
# Get opening date for date_opened field
|
||||
opening_date = getattr(park, "opening_date", None)
|
||||
if opening_date and isinstance(opening_date, datetime):
|
||||
opening_date = opening_date.date()
|
||||
|
||||
# Get location fields
|
||||
city = ""
|
||||
state = ""
|
||||
country = ""
|
||||
try:
|
||||
location = getattr(park, "location", None)
|
||||
if location:
|
||||
city = getattr(location, "city", "") or ""
|
||||
state = getattr(location, "state", "") or ""
|
||||
country = getattr(location, "country", "") or ""
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# Get card image URL
|
||||
card_image_url = ""
|
||||
if park.card_image and hasattr(park.card_image, "image"):
|
||||
card_image_url = (
|
||||
park.card_image.image.url if park.card_image.image else ""
|
||||
)
|
||||
|
||||
# Get primary company (operator)
|
||||
primary_company = park.operator.name if park.operator else ""
|
||||
|
||||
results.append(
|
||||
{
|
||||
"content_object": park,
|
||||
"content_type": "park",
|
||||
"id": park.pk, # Use pk instead of id for Django compatibility
|
||||
"name": park.name,
|
||||
"slug": park.slug,
|
||||
"park": park.name, # For parks, park field is the park name itself
|
||||
"category": "park",
|
||||
"date_added": date_added.isoformat() if date_added else "",
|
||||
"date_opened": opening_date.isoformat() if opening_date else "",
|
||||
"url": park.url,
|
||||
"card_image": card_image_url,
|
||||
"city": city,
|
||||
"state": state,
|
||||
"country": country,
|
||||
"primary_company": primary_company,
|
||||
}
|
||||
)
|
||||
|
||||
return results
|
||||
|
||||
def _get_new_rides(self, cutoff_date: datetime, limit: int) -> List[Dict[str, Any]]:
|
||||
"""Get recently added rides."""
|
||||
new_rides = (
|
||||
Ride.objects.filter(
|
||||
Q(created_at__gte=cutoff_date)
|
||||
| Q(opening_date__gte=cutoff_date.date()),
|
||||
status="OPERATING",
|
||||
)
|
||||
.select_related("park", "park__location", "card_image")
|
||||
.order_by("-created_at", "-opening_date")[:limit]
|
||||
)
|
||||
|
||||
results = []
|
||||
for ride in new_rides:
|
||||
date_added = getattr(ride, "opening_date", None) or getattr(
|
||||
ride, "created_at", None
|
||||
)
|
||||
# Handle datetime to date conversion
|
||||
if date_added:
|
||||
# If it's a datetime, convert to date
|
||||
if isinstance(date_added, datetime):
|
||||
date_added = date_added.date()
|
||||
# If it's already a date, keep it as is
|
||||
|
||||
# Get opening date for date_opened field
|
||||
opening_date = getattr(ride, "opening_date", None)
|
||||
if opening_date and isinstance(opening_date, datetime):
|
||||
opening_date = opening_date.date()
|
||||
|
||||
# Get card image URL
|
||||
card_image_url = ""
|
||||
if ride.card_image and hasattr(ride.card_image, "image"):
|
||||
card_image_url = (
|
||||
ride.card_image.image.url if ride.card_image.image else ""
|
||||
)
|
||||
|
||||
results.append(
|
||||
{
|
||||
"content_object": ride,
|
||||
"content_type": "ride",
|
||||
"id": ride.pk, # Use pk instead of id for Django compatibility
|
||||
"name": ride.name,
|
||||
"slug": ride.slug,
|
||||
"park": ride.park.name if ride.park else "",
|
||||
"category": "ride",
|
||||
"date_added": date_added.isoformat() if date_added else "",
|
||||
"date_opened": opening_date.isoformat() if opening_date else "",
|
||||
"url": ride.url,
|
||||
"park_url": ride.park.url if ride.park else "",
|
||||
"card_image": card_image_url,
|
||||
}
|
||||
)
|
||||
|
||||
return results
|
||||
|
||||
def _format_trending_results(
|
||||
self, trending_items: List[Dict[str, Any]]
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""Format trending results for frontend consumption."""
|
||||
formatted_results = []
|
||||
|
||||
for rank, item in enumerate(trending_items, 1):
|
||||
try:
|
||||
# Get view change for display
|
||||
content_obj = item["content_object"]
|
||||
ct = ContentType.objects.get_for_model(content_obj)
|
||||
current_views, previous_views, growth_percentage = (
|
||||
PageView.get_views_growth(
|
||||
ct,
|
||||
content_obj.id,
|
||||
self.CURRENT_PERIOD_HOURS,
|
||||
self.PREVIOUS_PERIOD_HOURS,
|
||||
)
|
||||
)
|
||||
|
||||
# Format exactly as frontend expects
|
||||
formatted_item = {
|
||||
"id": item["id"],
|
||||
"name": item["name"],
|
||||
"park": item["park"],
|
||||
"category": item["category"],
|
||||
"rating": item["rating"],
|
||||
"rank": rank,
|
||||
"views": current_views,
|
||||
"views_change": (
|
||||
f"+{growth_percentage:.1f}%"
|
||||
if growth_percentage > 0
|
||||
else f"{growth_percentage:.1f}%"
|
||||
),
|
||||
"slug": item["slug"],
|
||||
"date_opened": item["date_opened"],
|
||||
"url": item["url"],
|
||||
}
|
||||
|
||||
# Add card_image for all items
|
||||
if item.get("card_image"):
|
||||
formatted_item["card_image"] = item["card_image"]
|
||||
|
||||
# Add park-specific fields
|
||||
if item["content_type"] == "park":
|
||||
if item.get("city"):
|
||||
formatted_item["city"] = item["city"]
|
||||
if item.get("state"):
|
||||
formatted_item["state"] = item["state"]
|
||||
if item.get("country"):
|
||||
formatted_item["country"] = item["country"]
|
||||
if item.get("primary_company"):
|
||||
formatted_item["primary_company"] = item["primary_company"]
|
||||
|
||||
# Add park_url for rides
|
||||
if item.get("park_url"):
|
||||
formatted_item["park_url"] = item["park_url"]
|
||||
|
||||
formatted_results.append(formatted_item)
|
||||
|
||||
except Exception as e:
|
||||
self.logger.warning(f"Error formatting trending item: {e}")
|
||||
|
||||
return formatted_results
|
||||
|
||||
def _format_new_content_results(
|
||||
self, new_items: List[Dict[str, Any]]
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""Format new content results for frontend consumption."""
|
||||
formatted_results = []
|
||||
|
||||
for item in new_items:
|
||||
try:
|
||||
# Format exactly as frontend expects
|
||||
formatted_item = {
|
||||
"id": item["id"],
|
||||
"name": item["name"],
|
||||
"park": item["park"],
|
||||
"category": item["category"],
|
||||
"date_added": item["date_added"],
|
||||
"date_opened": item["date_opened"],
|
||||
"slug": item["slug"],
|
||||
"url": item["url"],
|
||||
}
|
||||
|
||||
# Add card_image for all items
|
||||
if item.get("card_image"):
|
||||
formatted_item["card_image"] = item["card_image"]
|
||||
|
||||
# Add park-specific fields
|
||||
if item["content_type"] == "park":
|
||||
if item.get("city"):
|
||||
formatted_item["city"] = item["city"]
|
||||
if item.get("state"):
|
||||
formatted_item["state"] = item["state"]
|
||||
if item.get("country"):
|
||||
formatted_item["country"] = item["country"]
|
||||
if item.get("primary_company"):
|
||||
formatted_item["primary_company"] = item["primary_company"]
|
||||
|
||||
# Add park_url for rides
|
||||
if item.get("park_url"):
|
||||
formatted_item["park_url"] = item["park_url"]
|
||||
|
||||
formatted_results.append(formatted_item)
|
||||
|
||||
except Exception as e:
|
||||
self.logger.warning(f"Error formatting new content item: {e}")
|
||||
|
||||
return formatted_results
|
||||
|
||||
def clear_cache(self, content_type: str = "all") -> None:
|
||||
"""Clear trending and new content caches."""
|
||||
try:
|
||||
cache_patterns = [
|
||||
f"{self.CACHE_PREFIX}:trending:{content_type}:*",
|
||||
f"{self.CACHE_PREFIX}:new:{content_type}:*",
|
||||
]
|
||||
|
||||
if content_type == "all":
|
||||
cache_patterns.extend(
|
||||
[
|
||||
f"{self.CACHE_PREFIX}:trending:parks:*",
|
||||
f"{self.CACHE_PREFIX}:trending:rides:*",
|
||||
f"{self.CACHE_PREFIX}:new:parks:*",
|
||||
f"{self.CACHE_PREFIX}:new:rides:*",
|
||||
]
|
||||
)
|
||||
|
||||
# Note: This is a simplified cache clear
|
||||
# In production, you might want to use cache.delete_many() or similar
|
||||
cache.clear()
|
||||
self.logger.info(f"Cleared trending caches for {content_type}")
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error clearing cache: {e}")
|
||||
|
||||
|
||||
# Singleton service instance
|
||||
trending_service = TrendingService()
|
||||
5
apps/core/tasks/__init__.py
Normal file
5
apps/core/tasks/__init__.py
Normal file
@@ -0,0 +1,5 @@
|
||||
"""
|
||||
Core tasks package for ThrillWiki.
|
||||
|
||||
This package contains all Celery tasks for the core application.
|
||||
"""
|
||||
607
apps/core/tasks/trending.py
Normal file
607
apps/core/tasks/trending.py
Normal file
@@ -0,0 +1,607 @@
|
||||
"""
|
||||
Trending calculation tasks for ThrillWiki.
|
||||
|
||||
This module contains Celery tasks for calculating and caching trending content.
|
||||
All tasks run asynchronously to avoid blocking the main application.
|
||||
"""
|
||||
|
||||
import logging
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Dict, List, Any
|
||||
from celery import shared_task
|
||||
from django.utils import timezone
|
||||
from django.core.cache import cache
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from django.db.models import Q
|
||||
|
||||
from apps.core.analytics import PageView
|
||||
from apps.parks.models import Park
|
||||
from apps.rides.models import Ride
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@shared_task(bind=True, max_retries=3, default_retry_delay=60)
|
||||
def calculate_trending_content(
|
||||
self, content_type: str = "all", limit: int = 50
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Calculate trending content using real analytics data.
|
||||
|
||||
This task runs periodically to update trending calculations based on:
|
||||
- View growth rates
|
||||
- Content ratings
|
||||
- Recency factors
|
||||
- Popularity metrics
|
||||
|
||||
Args:
|
||||
content_type: 'parks', 'rides', or 'all'
|
||||
limit: Maximum number of results to calculate
|
||||
|
||||
Returns:
|
||||
Dict containing trending results and metadata
|
||||
"""
|
||||
try:
|
||||
logger.info(f"Starting trending calculation for {content_type}")
|
||||
|
||||
# Time windows for calculations
|
||||
current_period_hours = 168 # 7 days
|
||||
previous_period_hours = 336 # 14 days (for previous 7-day window comparison)
|
||||
|
||||
trending_items = []
|
||||
|
||||
if content_type in ["all", "parks"]:
|
||||
park_items = _calculate_trending_parks(
|
||||
current_period_hours,
|
||||
previous_period_hours,
|
||||
limit if content_type == "parks" else limit * 2,
|
||||
)
|
||||
trending_items.extend(park_items)
|
||||
|
||||
if content_type in ["all", "rides"]:
|
||||
ride_items = _calculate_trending_rides(
|
||||
current_period_hours,
|
||||
previous_period_hours,
|
||||
limit if content_type == "rides" else limit * 2,
|
||||
)
|
||||
trending_items.extend(ride_items)
|
||||
|
||||
# Sort by trending score and apply limit
|
||||
trending_items.sort(key=lambda x: x.get("trending_score", 0), reverse=True)
|
||||
trending_items = trending_items[:limit]
|
||||
|
||||
# Format results for API consumption
|
||||
formatted_results = _format_trending_results(
|
||||
trending_items, current_period_hours, previous_period_hours
|
||||
)
|
||||
|
||||
# Cache results
|
||||
cache_key = f"trending:calculated:{content_type}:{limit}"
|
||||
cache.set(cache_key, formatted_results, 3600) # Cache for 1 hour
|
||||
|
||||
logger.info(
|
||||
f"Calculated {len(formatted_results)} trending items for {content_type}"
|
||||
)
|
||||
|
||||
return {
|
||||
"success": True,
|
||||
"content_type": content_type,
|
||||
"count": len(formatted_results),
|
||||
"results": formatted_results,
|
||||
"calculated_at": timezone.now().isoformat(),
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error calculating trending content: {e}", exc_info=True)
|
||||
# Retry the task
|
||||
raise self.retry(exc=e)
|
||||
|
||||
|
||||
@shared_task(bind=True, max_retries=3, default_retry_delay=30)
|
||||
def calculate_new_content(
|
||||
self, content_type: str = "all", days_back: int = 30, limit: int = 50
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Calculate new content based on opening dates and creation dates.
|
||||
|
||||
Args:
|
||||
content_type: 'parks', 'rides', or 'all'
|
||||
days_back: How many days to look back for new content
|
||||
limit: Maximum number of results
|
||||
|
||||
Returns:
|
||||
Dict containing new content results and metadata
|
||||
"""
|
||||
try:
|
||||
logger.info(f"Starting new content calculation for {content_type}")
|
||||
|
||||
cutoff_date = timezone.now() - timedelta(days=days_back)
|
||||
new_items = []
|
||||
|
||||
if content_type in ["all", "parks"]:
|
||||
parks = _get_new_parks(
|
||||
cutoff_date, limit if content_type == "parks" else limit * 2
|
||||
)
|
||||
new_items.extend(parks)
|
||||
|
||||
if content_type in ["all", "rides"]:
|
||||
rides = _get_new_rides(
|
||||
cutoff_date, limit if content_type == "rides" else limit * 2
|
||||
)
|
||||
new_items.extend(rides)
|
||||
|
||||
# Sort by date added (most recent first) and apply limit
|
||||
new_items.sort(key=lambda x: x.get("date_added", ""), reverse=True)
|
||||
new_items = new_items[:limit]
|
||||
|
||||
# Format results for API consumption
|
||||
formatted_results = _format_new_content_results(new_items)
|
||||
|
||||
# Cache results
|
||||
cache_key = f"new_content:calculated:{content_type}:{days_back}:{limit}"
|
||||
cache.set(cache_key, formatted_results, 1800) # Cache for 30 minutes
|
||||
|
||||
logger.info(f"Calculated {len(formatted_results)} new items for {content_type}")
|
||||
|
||||
return {
|
||||
"success": True,
|
||||
"content_type": content_type,
|
||||
"count": len(formatted_results),
|
||||
"results": formatted_results,
|
||||
"calculated_at": timezone.now().isoformat(),
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error calculating new content: {e}", exc_info=True)
|
||||
raise self.retry(exc=e)
|
||||
|
||||
|
||||
@shared_task(bind=True)
|
||||
def warm_trending_cache(self) -> Dict[str, Any]:
|
||||
"""
|
||||
Warm the trending cache by pre-calculating common queries.
|
||||
|
||||
This task runs periodically to ensure fast API responses.
|
||||
"""
|
||||
try:
|
||||
logger.info("Starting trending cache warming")
|
||||
|
||||
# Common query combinations to pre-calculate
|
||||
queries = [
|
||||
{"content_type": "all", "limit": 20},
|
||||
{"content_type": "parks", "limit": 10},
|
||||
{"content_type": "rides", "limit": 10},
|
||||
{"content_type": "all", "limit": 50},
|
||||
]
|
||||
|
||||
results = {}
|
||||
|
||||
for query in queries:
|
||||
# Trigger trending calculation
|
||||
calculate_trending_content.delay(**query)
|
||||
|
||||
# Trigger new content calculation
|
||||
calculate_new_content.delay(**query)
|
||||
|
||||
results[f"trending_{query['content_type']}_{query['limit']}"] = "scheduled"
|
||||
results[f"new_content_{query['content_type']}_{query['limit']}"] = (
|
||||
"scheduled"
|
||||
)
|
||||
|
||||
logger.info("Trending cache warming completed")
|
||||
|
||||
return {
|
||||
"success": True,
|
||||
"queries_scheduled": len(queries) * 2,
|
||||
"results": results,
|
||||
"warmed_at": timezone.now().isoformat(),
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error warming trending cache: {e}", exc_info=True)
|
||||
return {
|
||||
"success": False,
|
||||
"error": str(e),
|
||||
"warmed_at": timezone.now().isoformat(),
|
||||
}
|
||||
|
||||
|
||||
def _calculate_trending_parks(
|
||||
current_period_hours: int, previous_period_hours: int, limit: int
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""Calculate trending scores for parks using real data."""
|
||||
parks = Park.objects.filter(status="OPERATING").select_related(
|
||||
"location", "operator"
|
||||
)
|
||||
|
||||
trending_parks = []
|
||||
|
||||
for park in parks:
|
||||
try:
|
||||
score = _calculate_content_score(
|
||||
park, "park", current_period_hours, previous_period_hours
|
||||
)
|
||||
if score > 0: # Only include items with positive trending scores
|
||||
trending_parks.append(
|
||||
{
|
||||
"content_object": park,
|
||||
"content_type": "park",
|
||||
"trending_score": score,
|
||||
"id": park.id,
|
||||
"name": park.name,
|
||||
"slug": park.slug,
|
||||
"location": (
|
||||
park.formatted_location if hasattr(park, "location") else ""
|
||||
),
|
||||
"category": "park",
|
||||
"rating": (
|
||||
float(park.average_rating) if park.average_rating else 0.0
|
||||
),
|
||||
}
|
||||
)
|
||||
except Exception as e:
|
||||
logger.warning(f"Error calculating score for park {park.id}: {e}")
|
||||
|
||||
return trending_parks
|
||||
|
||||
|
||||
def _calculate_trending_rides(
|
||||
current_period_hours: int, previous_period_hours: int, limit: int
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""Calculate trending scores for rides using real data."""
|
||||
rides = Ride.objects.filter(status="OPERATING").select_related(
|
||||
"park", "park__location"
|
||||
)
|
||||
|
||||
trending_rides = []
|
||||
|
||||
for ride in rides:
|
||||
try:
|
||||
score = _calculate_content_score(
|
||||
ride, "ride", current_period_hours, previous_period_hours
|
||||
)
|
||||
if score > 0: # Only include items with positive trending scores
|
||||
# Get location from park
|
||||
location = ""
|
||||
if ride.park and hasattr(ride.park, "location") and ride.park.location:
|
||||
location = ride.park.formatted_location
|
||||
|
||||
trending_rides.append(
|
||||
{
|
||||
"content_object": ride,
|
||||
"content_type": "ride",
|
||||
"trending_score": score,
|
||||
"id": ride.pk,
|
||||
"name": ride.name,
|
||||
"slug": ride.slug,
|
||||
"location": location,
|
||||
"category": "ride",
|
||||
"rating": (
|
||||
float(ride.average_rating) if ride.average_rating else 0.0
|
||||
),
|
||||
}
|
||||
)
|
||||
except Exception as e:
|
||||
logger.warning(f"Error calculating score for ride {ride.pk}: {e}")
|
||||
|
||||
return trending_rides
|
||||
|
||||
|
||||
def _calculate_content_score(
|
||||
content_obj: Any,
|
||||
content_type: str,
|
||||
current_period_hours: int,
|
||||
previous_period_hours: int,
|
||||
) -> float:
|
||||
"""
|
||||
Calculate weighted trending score for content object using real analytics data.
|
||||
|
||||
Algorithm Components:
|
||||
- View Growth Rate (40% weight): Recent view increase vs historical
|
||||
- Rating Score (30% weight): Average user rating normalized
|
||||
- Recency Factor (20% weight): How recently content was added/updated
|
||||
- Popularity Boost (10% weight): Total view count normalization
|
||||
|
||||
Returns:
|
||||
Float between 0.0 and 1.0 representing trending strength
|
||||
"""
|
||||
try:
|
||||
# Get content type for PageView queries
|
||||
ct = ContentType.objects.get_for_model(content_obj)
|
||||
|
||||
# 1. View Growth Score (40% weight)
|
||||
view_growth_score = _calculate_view_growth_score(
|
||||
ct, content_obj.id, current_period_hours, previous_period_hours
|
||||
)
|
||||
|
||||
# 2. Rating Score (30% weight)
|
||||
rating_score = _calculate_rating_score(content_obj)
|
||||
|
||||
# 3. Recency Score (20% weight)
|
||||
recency_score = _calculate_recency_score(content_obj)
|
||||
|
||||
# 4. Popularity Score (10% weight)
|
||||
popularity_score = _calculate_popularity_score(
|
||||
ct, content_obj.id, current_period_hours
|
||||
)
|
||||
|
||||
# Calculate weighted final score
|
||||
final_score = (
|
||||
view_growth_score * 0.4
|
||||
+ rating_score * 0.3
|
||||
+ recency_score * 0.2
|
||||
+ popularity_score * 0.1
|
||||
)
|
||||
|
||||
logger.debug(
|
||||
f"{content_type} {content_obj.id}: "
|
||||
f"growth={view_growth_score:.3f}, rating={rating_score:.3f}, "
|
||||
f"recency={recency_score:.3f}, popularity={popularity_score:.3f}, "
|
||||
f"final={final_score:.3f}"
|
||||
)
|
||||
|
||||
return final_score
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Error calculating score for {content_type} {content_obj.id}: {e}"
|
||||
)
|
||||
return 0.0
|
||||
|
||||
|
||||
def _calculate_view_growth_score(
|
||||
content_type: ContentType,
|
||||
object_id: int,
|
||||
current_period_hours: int,
|
||||
previous_period_hours: int,
|
||||
) -> float:
|
||||
"""Calculate normalized view growth score using real PageView data."""
|
||||
try:
|
||||
current_views, previous_views, growth_percentage = PageView.get_views_growth(
|
||||
content_type,
|
||||
object_id,
|
||||
current_period_hours,
|
||||
previous_period_hours,
|
||||
)
|
||||
|
||||
if previous_views == 0:
|
||||
# New content with views gets boost
|
||||
return min(current_views / 100.0, 1.0) if current_views > 0 else 0.0
|
||||
|
||||
# Normalize growth percentage to 0-1 scale
|
||||
# 100% growth = 0.5, 500% growth = 1.0
|
||||
normalized_growth = (
|
||||
min(growth_percentage / 500.0, 1.0) if growth_percentage > 0 else 0.0
|
||||
)
|
||||
return max(normalized_growth, 0.0)
|
||||
|
||||
except Exception as e:
|
||||
logger.warning(f"Error calculating view growth: {e}")
|
||||
return 0.0
|
||||
|
||||
|
||||
def _calculate_rating_score(content_obj: Any) -> float:
|
||||
"""Calculate normalized rating score."""
|
||||
try:
|
||||
rating = getattr(content_obj, "average_rating", None)
|
||||
if rating is None or rating == 0:
|
||||
return 0.3 # Neutral score for unrated content
|
||||
|
||||
# Normalize rating from 1-10 scale to 0-1 scale
|
||||
# Rating of 5 = 0.4, Rating of 8 = 0.7, Rating of 10 = 1.0
|
||||
return min(max((float(rating) - 1) / 9.0, 0.0), 1.0)
|
||||
|
||||
except Exception as e:
|
||||
logger.warning(f"Error calculating rating score: {e}")
|
||||
return 0.3
|
||||
|
||||
|
||||
def _calculate_recency_score(content_obj: Any) -> float:
|
||||
"""Calculate recency score based on when content was added/updated."""
|
||||
try:
|
||||
# Use opening_date for parks/rides, or created_at as fallback
|
||||
date_added = getattr(content_obj, "opening_date", None)
|
||||
if not date_added:
|
||||
date_added = getattr(content_obj, "created_at", None)
|
||||
if not date_added:
|
||||
return 0.5 # Neutral score for unknown dates
|
||||
|
||||
# Handle both date and datetime objects
|
||||
if hasattr(date_added, "date"):
|
||||
date_added = date_added.date()
|
||||
|
||||
# Calculate days since added
|
||||
today = timezone.now().date()
|
||||
days_since_added = (today - date_added).days
|
||||
|
||||
# Recency score: newer content gets higher scores
|
||||
# 0 days = 1.0, 30 days = 0.8, 365 days = 0.1, >365 days = 0.0
|
||||
if days_since_added <= 0:
|
||||
return 1.0
|
||||
elif days_since_added <= 30:
|
||||
return 1.0 - (days_since_added / 30.0) * 0.2 # 1.0 to 0.8
|
||||
elif days_since_added <= 365:
|
||||
return 0.8 - ((days_since_added - 30) / (365 - 30)) * 0.7 # 0.8 to 0.1
|
||||
else:
|
||||
return 0.0
|
||||
|
||||
except Exception as e:
|
||||
logger.warning(f"Error calculating recency score: {e}")
|
||||
return 0.5
|
||||
|
||||
|
||||
def _calculate_popularity_score(
|
||||
content_type: ContentType, object_id: int, hours: int
|
||||
) -> float:
|
||||
"""Calculate popularity score based on total view count."""
|
||||
try:
|
||||
total_views = PageView.get_total_views_count(
|
||||
content_type, object_id, hours=hours
|
||||
)
|
||||
|
||||
# Normalize views to 0-1 scale
|
||||
# 0 views = 0.0, 100 views = 0.5, 1000+ views = 1.0
|
||||
if total_views == 0:
|
||||
return 0.0
|
||||
elif total_views <= 100:
|
||||
return total_views / 200.0 # 0.0 to 0.5
|
||||
else:
|
||||
return min(0.5 + (total_views - 100) / 1800.0, 1.0) # 0.5 to 1.0
|
||||
|
||||
except Exception as e:
|
||||
logger.warning(f"Error calculating popularity score: {e}")
|
||||
return 0.0
|
||||
|
||||
|
||||
def _get_new_parks(cutoff_date: datetime, limit: int) -> List[Dict[str, Any]]:
|
||||
"""Get recently added parks using real data."""
|
||||
new_parks = (
|
||||
Park.objects.filter(
|
||||
Q(created_at__gte=cutoff_date) | Q(opening_date__gte=cutoff_date.date()),
|
||||
status="OPERATING",
|
||||
)
|
||||
.select_related("location", "operator")
|
||||
.order_by("-created_at", "-opening_date")[:limit]
|
||||
)
|
||||
|
||||
results = []
|
||||
for park in new_parks:
|
||||
date_added = park.opening_date or park.created_at
|
||||
if date_added:
|
||||
if isinstance(date_added, datetime):
|
||||
date_added = date_added.date()
|
||||
|
||||
opening_date = getattr(park, "opening_date", None)
|
||||
if opening_date and isinstance(opening_date, datetime):
|
||||
opening_date = opening_date.date()
|
||||
|
||||
results.append(
|
||||
{
|
||||
"content_object": park,
|
||||
"content_type": "park",
|
||||
"id": park.pk,
|
||||
"name": park.name,
|
||||
"slug": park.slug,
|
||||
"park": park.name, # For parks, park field is the park name itself
|
||||
"category": "park",
|
||||
"date_added": date_added.isoformat() if date_added else "",
|
||||
"date_opened": opening_date.isoformat() if opening_date else "",
|
||||
}
|
||||
)
|
||||
|
||||
return results
|
||||
|
||||
|
||||
def _get_new_rides(cutoff_date: datetime, limit: int) -> List[Dict[str, Any]]:
|
||||
"""Get recently added rides using real data."""
|
||||
new_rides = (
|
||||
Ride.objects.filter(
|
||||
Q(created_at__gte=cutoff_date) | Q(opening_date__gte=cutoff_date.date()),
|
||||
status="OPERATING",
|
||||
)
|
||||
.select_related("park", "park__location")
|
||||
.order_by("-created_at", "-opening_date")[:limit]
|
||||
)
|
||||
|
||||
results = []
|
||||
for ride in new_rides:
|
||||
date_added = getattr(ride, "opening_date", None) or getattr(
|
||||
ride, "created_at", None
|
||||
)
|
||||
if date_added:
|
||||
if isinstance(date_added, datetime):
|
||||
date_added = date_added.date()
|
||||
|
||||
opening_date = getattr(ride, "opening_date", None)
|
||||
if opening_date and isinstance(opening_date, datetime):
|
||||
opening_date = opening_date.date()
|
||||
|
||||
results.append(
|
||||
{
|
||||
"content_object": ride,
|
||||
"content_type": "ride",
|
||||
"id": ride.pk,
|
||||
"name": ride.name,
|
||||
"slug": ride.slug,
|
||||
"park": ride.park.name if ride.park else "",
|
||||
"category": "ride",
|
||||
"date_added": date_added.isoformat() if date_added else "",
|
||||
"date_opened": opening_date.isoformat() if opening_date else "",
|
||||
}
|
||||
)
|
||||
|
||||
return results
|
||||
|
||||
|
||||
def _format_trending_results(
|
||||
trending_items: List[Dict[str, Any]],
|
||||
current_period_hours: int,
|
||||
previous_period_hours: int,
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""Format trending results for frontend consumption."""
|
||||
formatted_results = []
|
||||
|
||||
for rank, item in enumerate(trending_items, 1):
|
||||
try:
|
||||
# Get view change for display
|
||||
content_obj = item["content_object"]
|
||||
ct = ContentType.objects.get_for_model(content_obj)
|
||||
current_views, previous_views, growth_percentage = (
|
||||
PageView.get_views_growth(
|
||||
ct,
|
||||
content_obj.id,
|
||||
current_period_hours,
|
||||
previous_period_hours,
|
||||
)
|
||||
)
|
||||
|
||||
# Format exactly as frontend expects
|
||||
formatted_item = {
|
||||
"id": item["id"],
|
||||
"name": item["name"],
|
||||
"location": item["location"],
|
||||
"category": item["category"],
|
||||
"rating": item["rating"],
|
||||
"rank": rank,
|
||||
"views": current_views,
|
||||
"views_change": (
|
||||
f"+{growth_percentage:.1f}%"
|
||||
if growth_percentage > 0
|
||||
else f"{growth_percentage:.1f}%"
|
||||
),
|
||||
"slug": item["slug"],
|
||||
}
|
||||
|
||||
formatted_results.append(formatted_item)
|
||||
|
||||
except Exception as e:
|
||||
logger.warning(f"Error formatting trending item: {e}")
|
||||
|
||||
return formatted_results
|
||||
|
||||
|
||||
def _format_new_content_results(
|
||||
new_items: List[Dict[str, Any]],
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""Format new content results for frontend consumption."""
|
||||
formatted_results = []
|
||||
|
||||
for item in new_items:
|
||||
try:
|
||||
# Format exactly as frontend expects
|
||||
formatted_item = {
|
||||
"id": item["id"],
|
||||
"name": item["name"],
|
||||
"park": item["park"],
|
||||
"category": item["category"],
|
||||
"date_added": item["date_added"],
|
||||
"date_opened": item["date_opened"],
|
||||
"slug": item["slug"],
|
||||
}
|
||||
|
||||
formatted_results.append(formatted_item)
|
||||
|
||||
except Exception as e:
|
||||
logger.warning(f"Error formatting new content item: {e}")
|
||||
|
||||
return formatted_results
|
||||
1
apps/core/tests.py
Normal file
1
apps/core/tests.py
Normal file
@@ -0,0 +1 @@
|
||||
# Create your tests here.
|
||||
26
apps/core/urls.py
Normal file
26
apps/core/urls.py
Normal file
@@ -0,0 +1,26 @@
|
||||
"""
|
||||
Core app URL configuration.
|
||||
"""
|
||||
|
||||
from django.urls import path, include
|
||||
from .views.entity_search import (
|
||||
EntityFuzzySearchView,
|
||||
EntityNotFoundView,
|
||||
QuickEntitySuggestionView,
|
||||
)
|
||||
|
||||
app_name = "core"
|
||||
|
||||
# Entity search endpoints
|
||||
entity_patterns = [
|
||||
path("search/", EntityFuzzySearchView.as_view(), name="entity_fuzzy_search"),
|
||||
path("not-found/", EntityNotFoundView.as_view(), name="entity_not_found"),
|
||||
path(
|
||||
"suggestions/", QuickEntitySuggestionView.as_view(), name="entity_suggestions"
|
||||
),
|
||||
]
|
||||
|
||||
urlpatterns = [
|
||||
# Entity fuzzy matching and search endpoints
|
||||
path("entities/", include(entity_patterns)),
|
||||
]
|
||||
1
apps/core/urls/__init__.py
Normal file
1
apps/core/urls/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
# URLs package for core app
|
||||
35
apps/core/urls/map_urls.py
Normal file
35
apps/core/urls/map_urls.py
Normal file
@@ -0,0 +1,35 @@
|
||||
"""
|
||||
URL patterns for the unified map service API.
|
||||
"""
|
||||
|
||||
from django.urls import path
|
||||
from ..views.map_views import (
|
||||
MapLocationsView,
|
||||
MapLocationDetailView,
|
||||
MapSearchView,
|
||||
MapBoundsView,
|
||||
MapStatsView,
|
||||
MapCacheView,
|
||||
)
|
||||
|
||||
app_name = "map_api"
|
||||
|
||||
urlpatterns = [
|
||||
# Main map data endpoint
|
||||
path("locations/", MapLocationsView.as_view(), name="locations"),
|
||||
# Location detail endpoint
|
||||
path(
|
||||
"locations/<str:location_type>/<int:location_id>/",
|
||||
MapLocationDetailView.as_view(),
|
||||
name="location_detail",
|
||||
),
|
||||
# Search endpoint
|
||||
path("search/", MapSearchView.as_view(), name="search"),
|
||||
# Bounds-based query endpoint
|
||||
path("bounds/", MapBoundsView.as_view(), name="bounds"),
|
||||
# Service statistics endpoint
|
||||
path("stats/", MapStatsView.as_view(), name="stats"),
|
||||
# Cache management endpoints
|
||||
path("cache/", MapCacheView.as_view(), name="cache"),
|
||||
path("cache/invalidate/", MapCacheView.as_view(), name="cache_invalidate"),
|
||||
]
|
||||
39
apps/core/urls/maps.py
Normal file
39
apps/core/urls/maps.py
Normal file
@@ -0,0 +1,39 @@
|
||||
"""
|
||||
URL patterns for map views.
|
||||
Includes both HTML views and HTMX endpoints.
|
||||
"""
|
||||
|
||||
from django.urls import path
|
||||
from ..views.maps import (
|
||||
UniversalMapView,
|
||||
ParkMapView,
|
||||
NearbyLocationsView,
|
||||
LocationFilterView,
|
||||
LocationSearchView,
|
||||
MapBoundsUpdateView,
|
||||
LocationDetailModalView,
|
||||
LocationListView,
|
||||
)
|
||||
|
||||
app_name = "maps"
|
||||
|
||||
urlpatterns = [
|
||||
# Main map views
|
||||
path("", UniversalMapView.as_view(), name="universal_map"),
|
||||
path("parks/", ParkMapView.as_view(), name="park_map"),
|
||||
path("nearby/", NearbyLocationsView.as_view(), name="nearby_locations"),
|
||||
path("list/", LocationListView.as_view(), name="location_list"),
|
||||
# HTMX endpoints for dynamic updates
|
||||
path("htmx/filter/", LocationFilterView.as_view(), name="htmx_filter"),
|
||||
path("htmx/search/", LocationSearchView.as_view(), name="htmx_search"),
|
||||
path(
|
||||
"htmx/bounds/",
|
||||
MapBoundsUpdateView.as_view(),
|
||||
name="htmx_bounds_update",
|
||||
),
|
||||
path(
|
||||
"htmx/location/<str:location_type>/<int:location_id>/",
|
||||
LocationDetailModalView.as_view(),
|
||||
name="htmx_location_detail",
|
||||
),
|
||||
]
|
||||
24
apps/core/urls/search.py
Normal file
24
apps/core/urls/search.py
Normal file
@@ -0,0 +1,24 @@
|
||||
from django.urls import path
|
||||
from apps.core.views.search import (
|
||||
AdaptiveSearchView,
|
||||
FilterFormView,
|
||||
LocationSearchView,
|
||||
LocationSuggestionsView,
|
||||
)
|
||||
from apps.rides.views import RideSearchView
|
||||
|
||||
app_name = "search"
|
||||
|
||||
urlpatterns = [
|
||||
path("parks/", AdaptiveSearchView.as_view(), name="search"),
|
||||
path("parks/filters/", FilterFormView.as_view(), name="filter_form"),
|
||||
path("rides/", RideSearchView.as_view(), name="ride_search"),
|
||||
path("rides/results/", RideSearchView.as_view(), name="ride_search_results"),
|
||||
# Location-aware search
|
||||
path("location/", LocationSearchView.as_view(), name="location_search"),
|
||||
path(
|
||||
"location/suggestions/",
|
||||
LocationSuggestionsView.as_view(),
|
||||
name="location_suggestions",
|
||||
),
|
||||
]
|
||||
1
apps/core/utils/__init__.py
Normal file
1
apps/core/utils/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
# Core utilities
|
||||
429
apps/core/utils/query_optimization.py
Normal file
429
apps/core/utils/query_optimization.py
Normal file
@@ -0,0 +1,429 @@
|
||||
"""
|
||||
Database query optimization utilities and helpers.
|
||||
"""
|
||||
|
||||
import time
|
||||
import logging
|
||||
from contextlib import contextmanager
|
||||
from typing import Optional, Dict, Any, List, Type
|
||||
from django.db import connection, models
|
||||
from django.db.models import QuerySet, Prefetch, Count, Avg, Max
|
||||
from django.conf import settings
|
||||
from django.core.cache import cache
|
||||
|
||||
logger = logging.getLogger("query_optimization")
|
||||
|
||||
|
||||
@contextmanager
|
||||
def track_queries(
|
||||
operation_name: str, warn_threshold: int = 10, time_threshold: float = 1.0
|
||||
):
|
||||
"""
|
||||
Context manager to track database queries for specific operations
|
||||
|
||||
Args:
|
||||
operation_name: Name of the operation being tracked
|
||||
warn_threshold: Number of queries that triggers a warning
|
||||
time_threshold: Execution time in seconds that triggers a warning
|
||||
"""
|
||||
if not settings.DEBUG:
|
||||
yield
|
||||
return
|
||||
|
||||
initial_queries = len(connection.queries)
|
||||
start_time = time.time()
|
||||
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
end_time = time.time()
|
||||
total_queries = len(connection.queries) - initial_queries
|
||||
execution_time = end_time - start_time
|
||||
|
||||
# Collect query details
|
||||
query_details = []
|
||||
if hasattr(connection, "queries") and total_queries > 0:
|
||||
recent_queries = connection.queries[-total_queries:]
|
||||
query_details = [
|
||||
{
|
||||
"sql": (
|
||||
query["sql"][:500] + "..."
|
||||
if len(query["sql"]) > 500
|
||||
else query["sql"]
|
||||
),
|
||||
"time": float(query["time"]),
|
||||
"duplicate_count": sum(
|
||||
1 for q in recent_queries if q["sql"] == query["sql"]
|
||||
),
|
||||
}
|
||||
for query in recent_queries
|
||||
]
|
||||
|
||||
performance_data = {
|
||||
"operation": operation_name,
|
||||
"query_count": total_queries,
|
||||
"execution_time": execution_time,
|
||||
"queries": query_details if settings.DEBUG else [],
|
||||
"slow_queries": [
|
||||
q for q in query_details if q["time"] > 0.1
|
||||
], # Queries slower than 100ms
|
||||
}
|
||||
|
||||
# Log warnings for performance issues
|
||||
if total_queries > warn_threshold or execution_time > time_threshold:
|
||||
logger.warning(
|
||||
f"Performance concern in {operation_name}: "
|
||||
f"{total_queries} queries, {execution_time:.2f}s",
|
||||
extra=performance_data,
|
||||
)
|
||||
else:
|
||||
logger.debug(
|
||||
f"Query tracking for {operation_name}: "
|
||||
f"{total_queries} queries, {execution_time:.2f}s",
|
||||
extra=performance_data,
|
||||
)
|
||||
|
||||
|
||||
class QueryOptimizer:
|
||||
"""Utility class for common query optimization patterns"""
|
||||
|
||||
@staticmethod
|
||||
def optimize_park_queryset(queryset: QuerySet) -> QuerySet:
|
||||
"""
|
||||
Optimize Park queryset with proper select_related and prefetch_related
|
||||
"""
|
||||
return (
|
||||
queryset.select_related("location", "operator", "created_by")
|
||||
.prefetch_related("areas", "rides__manufacturer", "reviews__user")
|
||||
.annotate(
|
||||
ride_count=Count("rides"),
|
||||
average_rating=Avg("reviews__rating"),
|
||||
latest_review_date=Max("reviews__created_at"),
|
||||
)
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def optimize_ride_queryset(queryset: QuerySet) -> QuerySet:
|
||||
"""
|
||||
Optimize Ride queryset with proper relationships
|
||||
"""
|
||||
return (
|
||||
queryset.select_related(
|
||||
"park", "park__location", "manufacturer", "created_by"
|
||||
)
|
||||
.prefetch_related("reviews__user", "media_items")
|
||||
.annotate(
|
||||
review_count=Count("reviews"),
|
||||
average_rating=Avg("reviews__rating"),
|
||||
latest_review_date=Max("reviews__created_at"),
|
||||
)
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def optimize_user_queryset(queryset: QuerySet) -> QuerySet:
|
||||
"""
|
||||
Optimize User queryset for profile views
|
||||
"""
|
||||
return queryset.prefetch_related(
|
||||
Prefetch("park_reviews", to_attr="cached_park_reviews"),
|
||||
Prefetch("ride_reviews", to_attr="cached_ride_reviews"),
|
||||
"authored_parks",
|
||||
"authored_rides",
|
||||
).annotate(
|
||||
total_reviews=Count("park_reviews") + Count("ride_reviews"),
|
||||
parks_authored=Count("authored_parks"),
|
||||
rides_authored=Count("authored_rides"),
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def create_bulk_queryset(model: Type[models.Model], ids: List[int]) -> QuerySet:
|
||||
"""
|
||||
Create an optimized queryset for bulk operations
|
||||
"""
|
||||
queryset = model.objects.filter(id__in=ids)
|
||||
|
||||
# Apply model-specific optimizations
|
||||
if hasattr(model, "_meta") and model._meta.model_name == "park":
|
||||
return QueryOptimizer.optimize_park_queryset(queryset)
|
||||
elif hasattr(model, "_meta") and model._meta.model_name == "ride":
|
||||
return QueryOptimizer.optimize_ride_queryset(queryset)
|
||||
elif hasattr(model, "_meta") and model._meta.model_name == "user":
|
||||
return QueryOptimizer.optimize_user_queryset(queryset)
|
||||
|
||||
return queryset
|
||||
|
||||
|
||||
class QueryCache:
|
||||
"""Caching utilities for expensive queries"""
|
||||
|
||||
@staticmethod
|
||||
def cache_queryset_result(
|
||||
cache_key: str, queryset_func, timeout: int = 3600, **kwargs
|
||||
):
|
||||
"""
|
||||
Cache the result of an expensive queryset operation
|
||||
|
||||
Args:
|
||||
cache_key: Unique key for caching
|
||||
queryset_func: Function that returns the queryset result
|
||||
timeout: Cache timeout in seconds
|
||||
**kwargs: Arguments to pass to queryset_func
|
||||
"""
|
||||
# Try to get from cache first
|
||||
cached_result = cache.get(cache_key)
|
||||
if cached_result is not None:
|
||||
logger.debug(f"Cache hit for queryset: {cache_key}")
|
||||
return cached_result
|
||||
|
||||
# Execute the expensive operation
|
||||
with track_queries(f"cache_miss_{cache_key}"):
|
||||
result = queryset_func(**kwargs)
|
||||
|
||||
# Cache the result
|
||||
cache.set(cache_key, result, timeout)
|
||||
logger.debug(f"Cached queryset result: {cache_key}")
|
||||
|
||||
return result
|
||||
|
||||
@staticmethod
|
||||
def invalidate_model_cache(model_name: str, instance_id: Optional[int] = None):
|
||||
"""
|
||||
Invalidate cache keys related to a specific model
|
||||
|
||||
Args:
|
||||
model_name: Name of the model (e.g., 'park', 'ride')
|
||||
instance_id: Specific instance ID, if applicable
|
||||
"""
|
||||
# Pattern-based cache invalidation (works with Redis)
|
||||
if instance_id:
|
||||
pattern = f"*{model_name}_{instance_id}*"
|
||||
else:
|
||||
pattern = f"*{model_name}*"
|
||||
|
||||
try:
|
||||
# For Redis cache backends that support pattern deletion
|
||||
if hasattr(cache, "delete_pattern"):
|
||||
deleted_count = cache.delete_pattern(pattern)
|
||||
logger.info(
|
||||
f"Invalidated {deleted_count} cache keys for pattern: {pattern}"
|
||||
)
|
||||
else:
|
||||
logger.warning(
|
||||
f"Cache backend does not support pattern deletion: {pattern}"
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"Error invalidating cache pattern {pattern}: {e}")
|
||||
|
||||
|
||||
class IndexAnalyzer:
|
||||
"""Analyze and suggest database indexes"""
|
||||
|
||||
@staticmethod
|
||||
def analyze_slow_queries(min_time: float = 0.1) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Analyze slow queries from the current request
|
||||
|
||||
Args:
|
||||
min_time: Minimum query time in seconds to consider "slow"
|
||||
"""
|
||||
if not hasattr(connection, "queries"):
|
||||
return []
|
||||
|
||||
slow_queries = []
|
||||
for query in connection.queries:
|
||||
query_time = float(query.get("time", 0))
|
||||
if query_time >= min_time:
|
||||
slow_queries.append(
|
||||
{
|
||||
"sql": query["sql"],
|
||||
"time": query_time,
|
||||
"analysis": IndexAnalyzer._analyze_query_sql(query["sql"]),
|
||||
}
|
||||
)
|
||||
|
||||
return slow_queries
|
||||
|
||||
@staticmethod
|
||||
def _analyze_query_sql(sql: str) -> Dict[str, Any]:
|
||||
"""
|
||||
Analyze SQL to suggest potential optimizations
|
||||
"""
|
||||
sql_upper = sql.upper()
|
||||
analysis = {
|
||||
"has_where_clause": "WHERE" in sql_upper,
|
||||
"has_join": any(
|
||||
join in sql_upper
|
||||
for join in ["JOIN", "INNER JOIN", "LEFT JOIN", "RIGHT JOIN"]
|
||||
),
|
||||
"has_order_by": "ORDER BY" in sql_upper,
|
||||
"has_group_by": "GROUP BY" in sql_upper,
|
||||
"has_like": "LIKE" in sql_upper,
|
||||
"table_scans": [],
|
||||
"suggestions": [],
|
||||
}
|
||||
|
||||
# Detect potential table scans
|
||||
if "WHERE" not in sql_upper and "SELECT COUNT(*) FROM" not in sql_upper:
|
||||
analysis["table_scans"].append("Query may be doing a full table scan")
|
||||
|
||||
# Suggest indexes based on patterns
|
||||
if analysis["has_where_clause"] and not analysis["has_join"]:
|
||||
analysis["suggestions"].append(
|
||||
"Consider adding indexes on WHERE clause columns"
|
||||
)
|
||||
|
||||
if analysis["has_order_by"]:
|
||||
analysis["suggestions"].append(
|
||||
"Consider adding indexes on ORDER BY columns"
|
||||
)
|
||||
|
||||
if analysis["has_like"] and "%" not in sql[: sql.find("LIKE") + 10]:
|
||||
analysis["suggestions"].append(
|
||||
"LIKE queries with leading wildcards cannot use indexes efficiently"
|
||||
)
|
||||
|
||||
return analysis
|
||||
|
||||
@staticmethod
|
||||
def suggest_model_indexes(model: Type[models.Model]) -> List[str]:
|
||||
"""
|
||||
Suggest database indexes for a Django model based on its fields
|
||||
"""
|
||||
suggestions = []
|
||||
opts = model._meta
|
||||
|
||||
# Foreign key fields should have indexes (Django adds these
|
||||
# automatically)
|
||||
for field in opts.fields:
|
||||
if isinstance(field, models.ForeignKey):
|
||||
suggestions.append(
|
||||
f"Index on {field.name} (automatically created by Django)"
|
||||
)
|
||||
|
||||
# Suggest composite indexes for common query patterns
|
||||
date_fields = [
|
||||
f.name
|
||||
for f in opts.fields
|
||||
if isinstance(f, (models.DateField, models.DateTimeField))
|
||||
]
|
||||
status_fields = [
|
||||
f.name
|
||||
for f in opts.fields
|
||||
if f.name in ["status", "is_active", "is_published"]
|
||||
]
|
||||
|
||||
if date_fields and status_fields:
|
||||
for date_field in date_fields:
|
||||
for status_field in status_fields:
|
||||
suggestions.append(
|
||||
f"Composite index on ({status_field}, {date_field}) for filtered date queries"
|
||||
)
|
||||
|
||||
# Suggest indexes for fields commonly used in WHERE clauses
|
||||
common_filter_fields = ["slug", "name", "created_at", "updated_at"]
|
||||
for field in opts.fields:
|
||||
if field.name in common_filter_fields and not field.db_index:
|
||||
suggestions.append(f"Consider adding db_index=True to {field.name}")
|
||||
|
||||
return suggestions
|
||||
|
||||
|
||||
def log_query_performance():
|
||||
"""Decorator to log query performance for a function"""
|
||||
|
||||
def decorator(func):
|
||||
def wrapper(*args, **kwargs):
|
||||
operation_name = f"{func.__module__}.{func.__name__}"
|
||||
with track_queries(operation_name):
|
||||
return func(*args, **kwargs)
|
||||
|
||||
return wrapper
|
||||
|
||||
return decorator
|
||||
|
||||
|
||||
def optimize_queryset_for_serialization(
|
||||
queryset: QuerySet, fields: List[str]
|
||||
) -> QuerySet:
|
||||
"""
|
||||
Optimize a queryset for API serialization by only selecting needed fields
|
||||
|
||||
Args:
|
||||
queryset: The queryset to optimize
|
||||
fields: List of field names that will be serialized
|
||||
"""
|
||||
# Extract foreign key fields that need select_related
|
||||
model = queryset.model
|
||||
opts = model._meta
|
||||
|
||||
select_related_fields = []
|
||||
prefetch_related_fields = []
|
||||
|
||||
for field_name in fields:
|
||||
try:
|
||||
field = opts.get_field(field_name)
|
||||
if isinstance(field, models.ForeignKey):
|
||||
select_related_fields.append(field_name)
|
||||
elif isinstance(
|
||||
field, (models.ManyToManyField, models.reverse.ManyToManyRel)
|
||||
):
|
||||
prefetch_related_fields.append(field_name)
|
||||
except models.FieldDoesNotExist:
|
||||
# Field might be a property or method, skip optimization
|
||||
continue
|
||||
|
||||
# Apply optimizations
|
||||
if select_related_fields:
|
||||
queryset = queryset.select_related(*select_related_fields)
|
||||
|
||||
if prefetch_related_fields:
|
||||
queryset = queryset.prefetch_related(*prefetch_related_fields)
|
||||
|
||||
return queryset
|
||||
|
||||
|
||||
# Query performance monitoring context manager
|
||||
@contextmanager
|
||||
def monitor_db_performance(operation_name: str):
|
||||
"""
|
||||
Context manager that monitors database performance for an operation
|
||||
"""
|
||||
initial_queries = len(connection.queries) if hasattr(connection, "queries") else 0
|
||||
start_time = time.time()
|
||||
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
end_time = time.time()
|
||||
duration = end_time - start_time
|
||||
|
||||
if hasattr(connection, "queries"):
|
||||
total_queries = len(connection.queries) - initial_queries
|
||||
|
||||
# Analyze queries for performance issues
|
||||
slow_queries = IndexAnalyzer.analyze_slow_queries(0.05) # 50ms threshold
|
||||
|
||||
performance_data = {
|
||||
"operation": operation_name,
|
||||
"duration": duration,
|
||||
"query_count": total_queries,
|
||||
"slow_query_count": len(slow_queries),
|
||||
# Limit to top 5 slow queries
|
||||
"slow_queries": slow_queries[:5],
|
||||
}
|
||||
|
||||
# Log performance data
|
||||
if duration > 1.0 or total_queries > 15 or slow_queries:
|
||||
logger.warning(
|
||||
f"Performance issue in {operation_name}: "
|
||||
f"{duration:.3f}s, {total_queries} queries, {
|
||||
len(slow_queries)
|
||||
} slow",
|
||||
extra=performance_data,
|
||||
)
|
||||
else:
|
||||
logger.debug(
|
||||
f"DB performance for {operation_name}: "
|
||||
f"{duration:.3f}s, {total_queries} queries",
|
||||
extra=performance_data,
|
||||
)
|
||||
1
apps/core/views/__init__.py
Normal file
1
apps/core/views/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
# Core views
|
||||
347
apps/core/views/entity_search.py
Normal file
347
apps/core/views/entity_search.py
Normal file
@@ -0,0 +1,347 @@
|
||||
"""
|
||||
Entity search views with fuzzy matching and authentication prompts.
|
||||
"""
|
||||
|
||||
from rest_framework.views import APIView
|
||||
from rest_framework.response import Response
|
||||
from rest_framework import status
|
||||
from rest_framework.permissions import AllowAny
|
||||
from django.views.decorators.csrf import csrf_exempt
|
||||
from django.utils.decorators import method_decorator
|
||||
from typing import Optional, List
|
||||
|
||||
from ..services.entity_fuzzy_matching import (
|
||||
entity_fuzzy_matcher,
|
||||
EntityType,
|
||||
)
|
||||
|
||||
|
||||
class EntityFuzzySearchView(APIView):
|
||||
"""
|
||||
API endpoint for fuzzy entity search with authentication prompts.
|
||||
|
||||
Handles entity lookup failures by providing intelligent suggestions and
|
||||
authentication prompts for entity creation.
|
||||
"""
|
||||
|
||||
permission_classes = [AllowAny] # Allow both authenticated and anonymous users
|
||||
|
||||
def post(self, request):
|
||||
"""
|
||||
Perform fuzzy entity search.
|
||||
|
||||
Request body:
|
||||
{
|
||||
"query": "entity name to search",
|
||||
"entity_types": ["park", "ride", "company"], // optional
|
||||
"include_suggestions": true // optional, default true
|
||||
}
|
||||
|
||||
Response:
|
||||
{
|
||||
"success": true,
|
||||
"query": "original query",
|
||||
"matches": [
|
||||
{
|
||||
"entity_type": "park",
|
||||
"name": "Cedar Point",
|
||||
"slug": "cedar-point",
|
||||
"score": 0.95,
|
||||
"confidence": "high",
|
||||
"match_reason": "Text similarity with 'Cedar Point'",
|
||||
"url": "/parks/cedar-point/",
|
||||
"entity_id": 123
|
||||
}
|
||||
],
|
||||
"suggestion": {
|
||||
"suggested_name": "New Entity Name",
|
||||
"entity_type": "park",
|
||||
"requires_authentication": true,
|
||||
"login_prompt": "Log in to suggest adding...",
|
||||
"signup_prompt": "Sign up to contribute...",
|
||||
"creation_hint": "Help expand ThrillWiki..."
|
||||
},
|
||||
"user_authenticated": false
|
||||
}
|
||||
"""
|
||||
try:
|
||||
# Parse request data
|
||||
query = request.data.get("query", "").strip()
|
||||
entity_types_raw = request.data.get(
|
||||
"entity_types", ["park", "ride", "company"]
|
||||
)
|
||||
include_suggestions = request.data.get("include_suggestions", True)
|
||||
|
||||
# Validate query
|
||||
if not query or len(query) < 2:
|
||||
return Response(
|
||||
{
|
||||
"success": False,
|
||||
"error": "Query must be at least 2 characters long",
|
||||
"code": "INVALID_QUERY",
|
||||
},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
# Parse and validate entity types
|
||||
entity_types = []
|
||||
valid_types = {"park", "ride", "company"}
|
||||
|
||||
for entity_type in entity_types_raw:
|
||||
if entity_type in valid_types:
|
||||
entity_types.append(EntityType(entity_type))
|
||||
|
||||
if not entity_types:
|
||||
entity_types = [EntityType.PARK, EntityType.RIDE, EntityType.COMPANY]
|
||||
|
||||
# Perform fuzzy matching
|
||||
matches, suggestion = entity_fuzzy_matcher.find_entity(
|
||||
query=query, entity_types=entity_types, user=request.user
|
||||
)
|
||||
|
||||
# Format response
|
||||
response_data = {
|
||||
"success": True,
|
||||
"query": query,
|
||||
"matches": [match.to_dict() for match in matches],
|
||||
"user_authenticated": (
|
||||
request.user.is_authenticated
|
||||
if hasattr(request.user, "is_authenticated")
|
||||
else False
|
||||
),
|
||||
}
|
||||
|
||||
# Include suggestion if requested and available
|
||||
if include_suggestions and suggestion:
|
||||
response_data["suggestion"] = {
|
||||
"suggested_name": suggestion.suggested_name,
|
||||
"entity_type": suggestion.entity_type.value,
|
||||
"requires_authentication": suggestion.requires_authentication,
|
||||
"login_prompt": suggestion.login_prompt,
|
||||
"signup_prompt": suggestion.signup_prompt,
|
||||
"creation_hint": suggestion.creation_hint,
|
||||
}
|
||||
|
||||
return Response(response_data, status=status.HTTP_200_OK)
|
||||
|
||||
except Exception as e:
|
||||
return Response(
|
||||
{
|
||||
"success": False,
|
||||
"error": f"Internal server error: {str(e)}",
|
||||
"code": "INTERNAL_ERROR",
|
||||
},
|
||||
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
)
|
||||
|
||||
|
||||
class EntityNotFoundView(APIView):
|
||||
"""
|
||||
Endpoint specifically for handling entity not found scenarios.
|
||||
|
||||
This view is called when normal entity lookup fails and provides
|
||||
fuzzy matching suggestions along with authentication prompts.
|
||||
"""
|
||||
|
||||
permission_classes = [AllowAny]
|
||||
|
||||
def post(self, request):
|
||||
"""
|
||||
Handle entity not found with suggestions.
|
||||
|
||||
Request body:
|
||||
{
|
||||
"original_query": "what user searched for",
|
||||
"attempted_slug": "slug-that-failed", // optional
|
||||
"entity_type": "park", // optional, inferred from context
|
||||
"context": { // optional context information
|
||||
"park_slug": "park-slug-if-searching-for-ride",
|
||||
"source_page": "page where search originated"
|
||||
}
|
||||
}
|
||||
"""
|
||||
try:
|
||||
original_query = request.data.get("original_query", "").strip()
|
||||
attempted_slug = request.data.get("attempted_slug", "")
|
||||
entity_type_hint = request.data.get("entity_type")
|
||||
context = request.data.get("context", {})
|
||||
|
||||
if not original_query:
|
||||
return Response(
|
||||
{
|
||||
"success": False,
|
||||
"error": "original_query is required",
|
||||
"code": "MISSING_QUERY",
|
||||
},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
# Determine entity types to search based on context
|
||||
entity_types = []
|
||||
if entity_type_hint:
|
||||
try:
|
||||
entity_types = [EntityType(entity_type_hint)]
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
# If we have park context, prioritize ride searches
|
||||
if context.get("park_slug") and not entity_types:
|
||||
entity_types = [EntityType.RIDE, EntityType.PARK]
|
||||
|
||||
# Default to all types if not specified
|
||||
if not entity_types:
|
||||
entity_types = [EntityType.PARK, EntityType.RIDE, EntityType.COMPANY]
|
||||
|
||||
# Try fuzzy matching on the original query
|
||||
matches, suggestion = entity_fuzzy_matcher.find_entity(
|
||||
query=original_query, entity_types=entity_types, user=request.user
|
||||
)
|
||||
|
||||
# If no matches on original query, try the attempted slug
|
||||
if not matches and attempted_slug:
|
||||
# Convert slug back to readable name for fuzzy matching
|
||||
slug_as_name = attempted_slug.replace("-", " ").title()
|
||||
matches, suggestion = entity_fuzzy_matcher.find_entity(
|
||||
query=slug_as_name, entity_types=entity_types, user=request.user
|
||||
)
|
||||
|
||||
# Prepare response with detailed context
|
||||
response_data = {
|
||||
"success": True,
|
||||
"original_query": original_query,
|
||||
"attempted_slug": attempted_slug,
|
||||
"context": context,
|
||||
"matches": [match.to_dict() for match in matches],
|
||||
"user_authenticated": (
|
||||
request.user.is_authenticated
|
||||
if hasattr(request.user, "is_authenticated")
|
||||
else False
|
||||
),
|
||||
"has_matches": len(matches) > 0,
|
||||
}
|
||||
|
||||
# Always include suggestion for entity not found scenarios
|
||||
if suggestion:
|
||||
response_data["suggestion"] = {
|
||||
"suggested_name": suggestion.suggested_name,
|
||||
"entity_type": suggestion.entity_type.value,
|
||||
"requires_authentication": suggestion.requires_authentication,
|
||||
"login_prompt": suggestion.login_prompt,
|
||||
"signup_prompt": suggestion.signup_prompt,
|
||||
"creation_hint": suggestion.creation_hint,
|
||||
}
|
||||
|
||||
return Response(response_data, status=status.HTTP_200_OK)
|
||||
|
||||
except Exception as e:
|
||||
return Response(
|
||||
{
|
||||
"success": False,
|
||||
"error": f"Internal server error: {str(e)}",
|
||||
"code": "INTERNAL_ERROR",
|
||||
},
|
||||
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
)
|
||||
|
||||
|
||||
@method_decorator(csrf_exempt, name="dispatch")
|
||||
class QuickEntitySuggestionView(APIView):
|
||||
"""
|
||||
Lightweight endpoint for quick entity suggestions (e.g., autocomplete).
|
||||
"""
|
||||
|
||||
permission_classes = [AllowAny]
|
||||
|
||||
def get(self, request):
|
||||
"""
|
||||
Get quick entity suggestions.
|
||||
|
||||
Query parameters:
|
||||
- q: query string
|
||||
- types: comma-separated entity types (park,ride,company)
|
||||
- limit: max results (default 5)
|
||||
"""
|
||||
try:
|
||||
query = request.GET.get("q", "").strip()
|
||||
types_param = request.GET.get("types", "park,ride,company")
|
||||
limit = min(int(request.GET.get("limit", 5)), 10) # Cap at 10
|
||||
|
||||
if not query or len(query) < 2:
|
||||
return Response(
|
||||
{"suggestions": [], "query": query}, status=status.HTTP_200_OK
|
||||
)
|
||||
|
||||
# Parse entity types
|
||||
entity_types = []
|
||||
for type_str in types_param.split(","):
|
||||
type_str = type_str.strip()
|
||||
if type_str in ["park", "ride", "company"]:
|
||||
entity_types.append(EntityType(type_str))
|
||||
|
||||
if not entity_types:
|
||||
entity_types = [EntityType.PARK, EntityType.RIDE, EntityType.COMPANY]
|
||||
|
||||
# Get fuzzy matches
|
||||
matches, _ = entity_fuzzy_matcher.find_entity(
|
||||
query=query, entity_types=entity_types, user=request.user
|
||||
)
|
||||
|
||||
# Format as simple suggestions
|
||||
suggestions = []
|
||||
for match in matches[:limit]:
|
||||
suggestions.append(
|
||||
{
|
||||
"name": match.name,
|
||||
"type": match.entity_type.value,
|
||||
"slug": match.slug,
|
||||
"url": match.url,
|
||||
"score": match.score,
|
||||
"confidence": match.confidence,
|
||||
}
|
||||
)
|
||||
|
||||
return Response(
|
||||
{"suggestions": suggestions, "query": query, "count": len(suggestions)},
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
return Response(
|
||||
{"suggestions": [], "query": request.GET.get("q", ""), "error": str(e)},
|
||||
status=status.HTTP_200_OK,
|
||||
) # Return 200 even on errors for autocomplete
|
||||
|
||||
|
||||
# Utility function for other views to use
|
||||
def get_entity_suggestions(
|
||||
query: str, entity_types: Optional[List[str]] = None, user=None
|
||||
):
|
||||
"""
|
||||
Utility function for other Django views to get entity suggestions.
|
||||
|
||||
Args:
|
||||
query: Search query
|
||||
entity_types: List of entity type strings
|
||||
user: Django user object
|
||||
|
||||
Returns:
|
||||
Tuple of (matches, suggestion)
|
||||
"""
|
||||
try:
|
||||
# Convert string types to EntityType enums
|
||||
parsed_types = []
|
||||
if entity_types:
|
||||
for entity_type in entity_types:
|
||||
try:
|
||||
parsed_types.append(EntityType(entity_type))
|
||||
except ValueError:
|
||||
continue
|
||||
|
||||
if not parsed_types:
|
||||
parsed_types = [EntityType.PARK, EntityType.RIDE, EntityType.COMPANY]
|
||||
|
||||
return entity_fuzzy_matcher.find_entity(
|
||||
query=query, entity_types=parsed_types, user=user
|
||||
)
|
||||
except Exception:
|
||||
return [], None
|
||||
689
apps/core/views/map_views.py
Normal file
689
apps/core/views/map_views.py
Normal file
@@ -0,0 +1,689 @@
|
||||
"""
|
||||
API views for the unified map service.
|
||||
Enhanced with proper error handling, pagination, and performance optimizations.
|
||||
"""
|
||||
|
||||
import json
|
||||
import logging
|
||||
from typing import Dict, Any, Optional
|
||||
from django.http import JsonResponse, HttpRequest
|
||||
from django.views.decorators.cache import cache_page
|
||||
from django.views.decorators.gzip import gzip_page
|
||||
from django.utils.decorators import method_decorator
|
||||
from django.views import View
|
||||
from django.core.exceptions import ValidationError
|
||||
from django.conf import settings
|
||||
import time
|
||||
|
||||
from ..services.map_service import unified_map_service
|
||||
from ..services.data_structures import GeoBounds, MapFilters, LocationType
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class MapAPIView(View):
|
||||
"""Base view for map API endpoints with common functionality."""
|
||||
|
||||
# Pagination settings
|
||||
DEFAULT_PAGE_SIZE = 50
|
||||
MAX_PAGE_SIZE = 200
|
||||
|
||||
def dispatch(self, request, *args, **kwargs):
|
||||
"""Add CORS headers, compression, and handle preflight requests."""
|
||||
start_time = time.time()
|
||||
|
||||
try:
|
||||
response = super().dispatch(request, *args, **kwargs)
|
||||
|
||||
# Add CORS headers for API access
|
||||
response["Access-Control-Allow-Origin"] = "*"
|
||||
response["Access-Control-Allow-Methods"] = "GET, POST, OPTIONS"
|
||||
response["Access-Control-Allow-Headers"] = "Content-Type, Authorization"
|
||||
|
||||
# Add performance headers
|
||||
response["X-Response-Time"] = f"{(time.time() - start_time) * 1000:.2f}ms"
|
||||
|
||||
# Add compression hint for large responses
|
||||
if hasattr(response, "content") and len(response.content) > 1024:
|
||||
response["Content-Encoding"] = "gzip"
|
||||
|
||||
return response
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"API error in {request.path}: {str(e)}",
|
||||
exc_info=True,
|
||||
)
|
||||
return self._error_response("An internal server error occurred", status=500)
|
||||
|
||||
def options(self, request, *args, **kwargs):
|
||||
"""Handle preflight CORS requests."""
|
||||
return JsonResponse({}, status=200)
|
||||
|
||||
def _parse_bounds(self, request: HttpRequest) -> Optional[GeoBounds]:
|
||||
"""Parse geographic bounds from request parameters."""
|
||||
try:
|
||||
north = request.GET.get("north")
|
||||
south = request.GET.get("south")
|
||||
east = request.GET.get("east")
|
||||
west = request.GET.get("west")
|
||||
|
||||
if all(param is not None for param in [north, south, east, west]):
|
||||
bounds = GeoBounds(
|
||||
north=float(north),
|
||||
south=float(south),
|
||||
east=float(east),
|
||||
west=float(west),
|
||||
)
|
||||
|
||||
# Validate bounds
|
||||
if not (-90 <= bounds.south <= bounds.north <= 90):
|
||||
raise ValidationError("Invalid latitude bounds")
|
||||
if not (-180 <= bounds.west <= bounds.east <= 180):
|
||||
raise ValidationError("Invalid longitude bounds")
|
||||
|
||||
return bounds
|
||||
return None
|
||||
except (ValueError, TypeError) as e:
|
||||
raise ValidationError(f"Invalid bounds parameters: {e}")
|
||||
|
||||
def _parse_pagination(self, request: HttpRequest) -> Dict[str, int]:
|
||||
"""Parse pagination parameters from request."""
|
||||
try:
|
||||
page = max(1, int(request.GET.get("page", 1)))
|
||||
page_size = min(
|
||||
self.MAX_PAGE_SIZE,
|
||||
max(
|
||||
1,
|
||||
int(request.GET.get("page_size", self.DEFAULT_PAGE_SIZE)),
|
||||
),
|
||||
)
|
||||
offset = (page - 1) * page_size
|
||||
|
||||
return {
|
||||
"page": page,
|
||||
"page_size": page_size,
|
||||
"offset": offset,
|
||||
"limit": page_size,
|
||||
}
|
||||
except (ValueError, TypeError):
|
||||
return {
|
||||
"page": 1,
|
||||
"page_size": self.DEFAULT_PAGE_SIZE,
|
||||
"offset": 0,
|
||||
"limit": self.DEFAULT_PAGE_SIZE,
|
||||
}
|
||||
|
||||
def _parse_filters(self, request: HttpRequest) -> Optional[MapFilters]:
|
||||
"""Parse filtering parameters from request."""
|
||||
try:
|
||||
filters = MapFilters()
|
||||
|
||||
# Location types
|
||||
location_types_param = request.GET.get("types")
|
||||
if location_types_param:
|
||||
type_strings = location_types_param.split(",")
|
||||
valid_types = {lt.value for lt in LocationType}
|
||||
filters.location_types = {
|
||||
LocationType(t.strip())
|
||||
for t in type_strings
|
||||
if t.strip() in valid_types
|
||||
}
|
||||
|
||||
# Park status
|
||||
park_status_param = request.GET.get("park_status")
|
||||
if park_status_param:
|
||||
filters.park_status = set(park_status_param.split(","))
|
||||
|
||||
# Ride types
|
||||
ride_types_param = request.GET.get("ride_types")
|
||||
if ride_types_param:
|
||||
filters.ride_types = set(ride_types_param.split(","))
|
||||
|
||||
# Company roles
|
||||
company_roles_param = request.GET.get("company_roles")
|
||||
if company_roles_param:
|
||||
filters.company_roles = set(company_roles_param.split(","))
|
||||
|
||||
# Search query with length validation
|
||||
search_query = request.GET.get("q") or request.GET.get("search")
|
||||
if search_query and len(search_query.strip()) >= 2:
|
||||
filters.search_query = search_query.strip()
|
||||
|
||||
# Rating filter with validation
|
||||
min_rating_param = request.GET.get("min_rating")
|
||||
if min_rating_param:
|
||||
min_rating = float(min_rating_param)
|
||||
if 0 <= min_rating <= 10:
|
||||
filters.min_rating = min_rating
|
||||
|
||||
# Geographic filters with validation
|
||||
country = request.GET.get("country", "").strip()
|
||||
if country and len(country) >= 2:
|
||||
filters.country = country
|
||||
|
||||
state = request.GET.get("state", "").strip()
|
||||
if state and len(state) >= 2:
|
||||
filters.state = state
|
||||
|
||||
city = request.GET.get("city", "").strip()
|
||||
if city and len(city) >= 2:
|
||||
filters.city = city
|
||||
|
||||
# Coordinates requirement
|
||||
has_coordinates_param = request.GET.get("has_coordinates")
|
||||
if has_coordinates_param is not None:
|
||||
filters.has_coordinates = has_coordinates_param.lower() in [
|
||||
"true",
|
||||
"1",
|
||||
"yes",
|
||||
]
|
||||
|
||||
return (
|
||||
filters
|
||||
if any(
|
||||
[
|
||||
filters.location_types,
|
||||
filters.park_status,
|
||||
filters.ride_types,
|
||||
filters.company_roles,
|
||||
filters.search_query,
|
||||
filters.min_rating,
|
||||
filters.country,
|
||||
filters.state,
|
||||
filters.city,
|
||||
]
|
||||
)
|
||||
else None
|
||||
)
|
||||
|
||||
except (ValueError, TypeError) as e:
|
||||
raise ValidationError(f"Invalid filter parameters: {e}")
|
||||
|
||||
def _parse_zoom_level(self, request: HttpRequest) -> int:
|
||||
"""Parse zoom level from request with default."""
|
||||
try:
|
||||
zoom_param = request.GET.get("zoom", "10")
|
||||
zoom_level = int(zoom_param)
|
||||
return max(1, min(20, zoom_level)) # Clamp between 1 and 20
|
||||
except (ValueError, TypeError):
|
||||
return 10 # Default zoom level
|
||||
|
||||
def _create_paginated_response(
|
||||
self,
|
||||
data: list,
|
||||
total_count: int,
|
||||
pagination: Dict[str, int],
|
||||
request: HttpRequest,
|
||||
) -> Dict[str, Any]:
|
||||
"""Create paginated response with metadata."""
|
||||
total_pages = (total_count + pagination["page_size"] - 1) // pagination[
|
||||
"page_size"
|
||||
]
|
||||
|
||||
# Build pagination URLs
|
||||
base_url = request.build_absolute_uri(request.path)
|
||||
query_params = request.GET.copy()
|
||||
|
||||
next_url = None
|
||||
if pagination["page"] < total_pages:
|
||||
query_params["page"] = pagination["page"] + 1
|
||||
next_url = f"{base_url}?{query_params.urlencode()}"
|
||||
|
||||
prev_url = None
|
||||
if pagination["page"] > 1:
|
||||
query_params["page"] = pagination["page"] - 1
|
||||
prev_url = f"{base_url}?{query_params.urlencode()}"
|
||||
|
||||
return {
|
||||
"status": "success",
|
||||
"data": data,
|
||||
"pagination": {
|
||||
"page": pagination["page"],
|
||||
"page_size": pagination["page_size"],
|
||||
"total_pages": total_pages,
|
||||
"total_count": total_count,
|
||||
"has_next": pagination["page"] < total_pages,
|
||||
"has_previous": pagination["page"] > 1,
|
||||
"next_url": next_url,
|
||||
"previous_url": prev_url,
|
||||
},
|
||||
}
|
||||
|
||||
def _error_response(
|
||||
self,
|
||||
message: str,
|
||||
status: int = 400,
|
||||
error_code: str = None,
|
||||
details: Dict[str, Any] = None,
|
||||
) -> JsonResponse:
|
||||
"""Return standardized error response with enhanced information."""
|
||||
response_data = {
|
||||
"status": "error",
|
||||
"message": message,
|
||||
"timestamp": time.time(),
|
||||
"data": None,
|
||||
}
|
||||
|
||||
if error_code:
|
||||
response_data["error_code"] = error_code
|
||||
|
||||
if details:
|
||||
response_data["details"] = details
|
||||
|
||||
# Add request ID for debugging in production
|
||||
if hasattr(settings, "DEBUG") and not settings.DEBUG:
|
||||
response_data["request_id"] = getattr(self.request, "id", None)
|
||||
|
||||
return JsonResponse(response_data, status=status)
|
||||
|
||||
def _success_response(
|
||||
self, data: Any, message: str = None, metadata: Dict[str, Any] = None
|
||||
) -> JsonResponse:
|
||||
"""Return standardized success response."""
|
||||
response_data = {
|
||||
"status": "success",
|
||||
"data": data,
|
||||
"timestamp": time.time(),
|
||||
}
|
||||
|
||||
if message:
|
||||
response_data["message"] = message
|
||||
|
||||
if metadata:
|
||||
response_data["metadata"] = metadata
|
||||
|
||||
return JsonResponse(response_data)
|
||||
|
||||
|
||||
class MapLocationsView(MapAPIView):
|
||||
"""
|
||||
API endpoint for getting map locations with optional clustering.
|
||||
|
||||
GET /api/map/locations/
|
||||
Parameters:
|
||||
- north, south, east, west: Bounding box coordinates
|
||||
- zoom: Zoom level (1-20)
|
||||
- types: Comma-separated location types (park,ride,company,generic)
|
||||
- cluster: Whether to enable clustering (true/false)
|
||||
- q: Search query
|
||||
- park_status: Park status filter
|
||||
- ride_types: Ride type filter
|
||||
- min_rating: Minimum rating filter
|
||||
- country, state, city: Geographic filters
|
||||
"""
|
||||
|
||||
@method_decorator(cache_page(300)) # Cache for 5 minutes
|
||||
@method_decorator(gzip_page) # Compress large responses
|
||||
def get(self, request: HttpRequest) -> JsonResponse:
|
||||
"""Get map locations with optional clustering and filtering."""
|
||||
try:
|
||||
# Parse parameters
|
||||
bounds = self._parse_bounds(request)
|
||||
filters = self._parse_filters(request)
|
||||
zoom_level = self._parse_zoom_level(request)
|
||||
pagination = self._parse_pagination(request)
|
||||
|
||||
# Clustering preference
|
||||
cluster_param = request.GET.get("cluster", "true")
|
||||
enable_clustering = cluster_param.lower() in ["true", "1", "yes"]
|
||||
|
||||
# Cache preference
|
||||
use_cache_param = request.GET.get("cache", "true")
|
||||
use_cache = use_cache_param.lower() in ["true", "1", "yes"]
|
||||
|
||||
# Validate request
|
||||
if not enable_clustering and not bounds and not filters:
|
||||
return self._error_response(
|
||||
"Either bounds, filters, or clustering must be specified for non-clustered requests",
|
||||
error_code="MISSING_PARAMETERS",
|
||||
)
|
||||
|
||||
# Get map data
|
||||
response = unified_map_service.get_map_data(
|
||||
bounds=bounds,
|
||||
filters=filters,
|
||||
zoom_level=zoom_level,
|
||||
cluster=enable_clustering,
|
||||
use_cache=use_cache,
|
||||
)
|
||||
|
||||
# Handle pagination for non-clustered results
|
||||
if not enable_clustering and response.locations:
|
||||
start_idx = pagination["offset"]
|
||||
end_idx = start_idx + pagination["limit"]
|
||||
paginated_locations = response.locations[start_idx:end_idx]
|
||||
|
||||
return JsonResponse(
|
||||
self._create_paginated_response(
|
||||
[loc.to_dict() for loc in paginated_locations],
|
||||
len(response.locations),
|
||||
pagination,
|
||||
request,
|
||||
)
|
||||
)
|
||||
|
||||
# For clustered results, return as-is with metadata
|
||||
response_dict = response.to_dict()
|
||||
|
||||
return self._success_response(
|
||||
response_dict,
|
||||
metadata={
|
||||
"clustered": response.clustered,
|
||||
"cache_hit": response.cache_hit,
|
||||
"query_time_ms": response.query_time_ms,
|
||||
"filters_applied": response.filters_applied,
|
||||
},
|
||||
)
|
||||
|
||||
except ValidationError as e:
|
||||
logger.warning(f"Validation error in MapLocationsView: {str(e)}")
|
||||
return self._error_response(str(e), 400, error_code="VALIDATION_ERROR")
|
||||
except Exception as e:
|
||||
logger.error(f"Error in MapLocationsView: {str(e)}", exc_info=True)
|
||||
return self._error_response(
|
||||
"Failed to retrieve map locations",
|
||||
500,
|
||||
error_code="INTERNAL_ERROR",
|
||||
)
|
||||
|
||||
|
||||
class MapLocationDetailView(MapAPIView):
|
||||
"""
|
||||
API endpoint for getting detailed information about a specific location.
|
||||
|
||||
GET /api/map/locations/<type>/<id>/
|
||||
"""
|
||||
|
||||
@method_decorator(cache_page(600)) # Cache for 10 minutes
|
||||
def get(
|
||||
self, request: HttpRequest, location_type: str, location_id: int
|
||||
) -> JsonResponse:
|
||||
"""Get detailed information for a specific location."""
|
||||
try:
|
||||
# Validate location type
|
||||
valid_types = [lt.value for lt in LocationType]
|
||||
if location_type not in valid_types:
|
||||
return self._error_response(
|
||||
f"Invalid location type: {location_type}. Valid types: {
|
||||
', '.join(valid_types)
|
||||
}",
|
||||
400,
|
||||
error_code="INVALID_LOCATION_TYPE",
|
||||
)
|
||||
|
||||
# Validate location ID
|
||||
if location_id <= 0:
|
||||
return self._error_response(
|
||||
"Location ID must be a positive integer",
|
||||
400,
|
||||
error_code="INVALID_LOCATION_ID",
|
||||
)
|
||||
|
||||
# Get location details
|
||||
location = unified_map_service.get_location_details(
|
||||
location_type, location_id
|
||||
)
|
||||
|
||||
if not location:
|
||||
return self._error_response(
|
||||
f"Location not found: {location_type}/{location_id}",
|
||||
404,
|
||||
error_code="LOCATION_NOT_FOUND",
|
||||
)
|
||||
|
||||
return self._success_response(
|
||||
location.to_dict(),
|
||||
metadata={
|
||||
"location_type": location_type,
|
||||
"location_id": location_id,
|
||||
},
|
||||
)
|
||||
|
||||
except ValueError as e:
|
||||
logger.warning(f"Value error in MapLocationDetailView: {str(e)}")
|
||||
return self._error_response(str(e), 400, error_code="INVALID_PARAMETER")
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Error in MapLocationDetailView: {str(e)}",
|
||||
exc_info=True,
|
||||
)
|
||||
return self._error_response(
|
||||
"Failed to retrieve location details",
|
||||
500,
|
||||
error_code="INTERNAL_ERROR",
|
||||
)
|
||||
|
||||
|
||||
class MapSearchView(MapAPIView):
|
||||
"""
|
||||
API endpoint for searching locations by text query.
|
||||
|
||||
GET /api/map/search/
|
||||
Parameters:
|
||||
- q: Search query (required)
|
||||
- north, south, east, west: Optional bounding box
|
||||
- types: Comma-separated location types
|
||||
- limit: Maximum results (default 50)
|
||||
"""
|
||||
|
||||
@method_decorator(gzip_page) # Compress responses
|
||||
def get(self, request: HttpRequest) -> JsonResponse:
|
||||
"""Search locations by text query with pagination."""
|
||||
try:
|
||||
# Get and validate search query
|
||||
query = request.GET.get("q", "").strip()
|
||||
if not query:
|
||||
return self._error_response(
|
||||
"Search query 'q' parameter is required",
|
||||
400,
|
||||
error_code="MISSING_QUERY",
|
||||
)
|
||||
|
||||
if len(query) < 2:
|
||||
return self._error_response(
|
||||
"Search query must be at least 2 characters long",
|
||||
400,
|
||||
error_code="QUERY_TOO_SHORT",
|
||||
)
|
||||
|
||||
# Parse parameters
|
||||
bounds = self._parse_bounds(request)
|
||||
pagination = self._parse_pagination(request)
|
||||
|
||||
# Parse location types
|
||||
location_types = None
|
||||
types_param = request.GET.get("types")
|
||||
if types_param:
|
||||
try:
|
||||
valid_types = {lt.value for lt in LocationType}
|
||||
location_types = {
|
||||
LocationType(t.strip())
|
||||
for t in types_param.split(",")
|
||||
if t.strip() in valid_types
|
||||
}
|
||||
except ValueError:
|
||||
return self._error_response(
|
||||
"Invalid location types",
|
||||
400,
|
||||
error_code="INVALID_TYPES",
|
||||
)
|
||||
|
||||
# Set reasonable search limit (higher for search than general
|
||||
# listings)
|
||||
search_limit = min(500, pagination["page"] * pagination["page_size"])
|
||||
|
||||
# Perform search
|
||||
locations = unified_map_service.search_locations(
|
||||
query=query,
|
||||
bounds=bounds,
|
||||
location_types=location_types,
|
||||
limit=search_limit,
|
||||
)
|
||||
|
||||
# Apply pagination
|
||||
start_idx = pagination["offset"]
|
||||
end_idx = start_idx + pagination["limit"]
|
||||
paginated_locations = locations[start_idx:end_idx]
|
||||
|
||||
return JsonResponse(
|
||||
self._create_paginated_response(
|
||||
[loc.to_dict() for loc in paginated_locations],
|
||||
len(locations),
|
||||
pagination,
|
||||
request,
|
||||
)
|
||||
)
|
||||
|
||||
except ValidationError as e:
|
||||
logger.warning(f"Validation error in MapSearchView: {str(e)}")
|
||||
return self._error_response(str(e), 400, error_code="VALIDATION_ERROR")
|
||||
except ValueError as e:
|
||||
logger.warning(f"Value error in MapSearchView: {str(e)}")
|
||||
return self._error_response(str(e), 400, error_code="INVALID_PARAMETER")
|
||||
except Exception as e:
|
||||
logger.error(f"Error in MapSearchView: {str(e)}", exc_info=True)
|
||||
return self._error_response(
|
||||
"Search failed due to internal error",
|
||||
500,
|
||||
error_code="SEARCH_FAILED",
|
||||
)
|
||||
|
||||
|
||||
class MapBoundsView(MapAPIView):
|
||||
"""
|
||||
API endpoint for getting locations within specific bounds.
|
||||
|
||||
GET /api/map/bounds/
|
||||
Parameters:
|
||||
- north, south, east, west: Bounding box coordinates (required)
|
||||
- types: Comma-separated location types
|
||||
- zoom: Zoom level
|
||||
"""
|
||||
|
||||
@method_decorator(cache_page(300)) # Cache for 5 minutes
|
||||
def get(self, request: HttpRequest) -> JsonResponse:
|
||||
"""Get locations within specific geographic bounds."""
|
||||
try:
|
||||
# Parse required bounds
|
||||
bounds = self._parse_bounds(request)
|
||||
if not bounds:
|
||||
return self._error_response(
|
||||
"Bounds parameters required: north, south, east, west", 400
|
||||
)
|
||||
|
||||
# Parse optional filters
|
||||
location_types = None
|
||||
types_param = request.GET.get("types")
|
||||
if types_param:
|
||||
location_types = {
|
||||
LocationType(t.strip())
|
||||
for t in types_param.split(",")
|
||||
if t.strip() in [lt.value for lt in LocationType]
|
||||
}
|
||||
|
||||
zoom_level = self._parse_zoom_level(request)
|
||||
|
||||
# Get locations within bounds
|
||||
response = unified_map_service.get_locations_by_bounds(
|
||||
north=bounds.north,
|
||||
south=bounds.south,
|
||||
east=bounds.east,
|
||||
west=bounds.west,
|
||||
location_types=location_types,
|
||||
zoom_level=zoom_level,
|
||||
)
|
||||
|
||||
return JsonResponse(response.to_dict())
|
||||
|
||||
except ValidationError as e:
|
||||
return self._error_response(str(e), 400)
|
||||
except Exception as e:
|
||||
return self._error_response(
|
||||
f"Internal server error: {str(e)}",
|
||||
500,
|
||||
)
|
||||
|
||||
|
||||
class MapStatsView(MapAPIView):
|
||||
"""
|
||||
API endpoint for getting map service statistics and health information.
|
||||
|
||||
GET /api/map/stats/
|
||||
"""
|
||||
|
||||
def get(self, request: HttpRequest) -> JsonResponse:
|
||||
"""Get map service statistics and performance metrics."""
|
||||
try:
|
||||
stats = unified_map_service.get_service_stats()
|
||||
|
||||
return JsonResponse({"status": "success", "data": stats})
|
||||
|
||||
except Exception as e:
|
||||
return self._error_response(
|
||||
f"Internal server error: {str(e)}",
|
||||
500,
|
||||
)
|
||||
|
||||
|
||||
class MapCacheView(MapAPIView):
|
||||
"""
|
||||
API endpoint for cache management (admin only).
|
||||
|
||||
DELETE /api/map/cache/
|
||||
POST /api/map/cache/invalidate/
|
||||
"""
|
||||
|
||||
def delete(self, request: HttpRequest) -> JsonResponse:
|
||||
"""Clear all map cache (admin only)."""
|
||||
# TODO: Add admin permission check
|
||||
try:
|
||||
unified_map_service.invalidate_cache()
|
||||
|
||||
return JsonResponse(
|
||||
{
|
||||
"status": "success",
|
||||
"message": "Map cache cleared successfully",
|
||||
}
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
return self._error_response(
|
||||
f"Internal server error: {str(e)}",
|
||||
500,
|
||||
)
|
||||
|
||||
def post(self, request: HttpRequest) -> JsonResponse:
|
||||
"""Invalidate specific cache entries."""
|
||||
# TODO: Add admin permission check
|
||||
try:
|
||||
data = json.loads(request.body)
|
||||
|
||||
location_type = data.get("location_type")
|
||||
location_id = data.get("location_id")
|
||||
bounds_data = data.get("bounds")
|
||||
|
||||
bounds = None
|
||||
if bounds_data:
|
||||
bounds = GeoBounds(**bounds_data)
|
||||
|
||||
unified_map_service.invalidate_cache(
|
||||
location_type=location_type,
|
||||
location_id=location_id,
|
||||
bounds=bounds,
|
||||
)
|
||||
|
||||
return JsonResponse(
|
||||
{
|
||||
"status": "success",
|
||||
"message": "Cache invalidated successfully",
|
||||
}
|
||||
)
|
||||
|
||||
except (json.JSONDecodeError, TypeError, ValueError) as e:
|
||||
return self._error_response(f"Invalid request data: {str(e)}", 400)
|
||||
except Exception as e:
|
||||
return self._error_response(
|
||||
f"Internal server error: {str(e)}",
|
||||
500,
|
||||
)
|
||||
420
apps/core/views/maps.py
Normal file
420
apps/core/views/maps.py
Normal file
@@ -0,0 +1,420 @@
|
||||
"""
|
||||
HTML views for the unified map service.
|
||||
Provides web interfaces for map functionality with HTMX integration.
|
||||
"""
|
||||
|
||||
import json
|
||||
from typing import Dict, Any, Optional, Set
|
||||
from django.shortcuts import render
|
||||
from django.http import JsonResponse, HttpRequest, HttpResponse
|
||||
from django.views.generic import TemplateView, View
|
||||
from django.core.paginator import Paginator
|
||||
|
||||
from ..services.map_service import unified_map_service
|
||||
from ..services.data_structures import GeoBounds, MapFilters, LocationType
|
||||
|
||||
|
||||
class MapViewMixin:
|
||||
"""Mixin providing common functionality for map views."""
|
||||
|
||||
def get_map_context(self, request: HttpRequest) -> Dict[str, Any]:
|
||||
"""Get common context data for map views."""
|
||||
return {
|
||||
"map_api_urls": {
|
||||
"locations": "/api/map/locations/",
|
||||
"search": "/api/map/search/",
|
||||
"bounds": "/api/map/bounds/",
|
||||
"location_detail": "/api/map/locations/",
|
||||
},
|
||||
"location_types": [lt.value for lt in LocationType],
|
||||
"default_zoom": 10,
|
||||
"enable_clustering": True,
|
||||
"enable_search": True,
|
||||
}
|
||||
|
||||
def parse_location_types(self, request: HttpRequest) -> Optional[Set[LocationType]]:
|
||||
"""Parse location types from request parameters."""
|
||||
types_param = request.GET.get("types")
|
||||
if types_param:
|
||||
try:
|
||||
return {
|
||||
LocationType(t.strip())
|
||||
for t in types_param.split(",")
|
||||
if t.strip() in [lt.value for lt in LocationType]
|
||||
}
|
||||
except ValueError:
|
||||
return None
|
||||
return None
|
||||
|
||||
|
||||
class UniversalMapView(MapViewMixin, TemplateView):
|
||||
"""
|
||||
Main universal map view showing all location types.
|
||||
|
||||
URL: /maps/
|
||||
"""
|
||||
|
||||
template_name = "maps/universal_map.html"
|
||||
|
||||
def get_context_data(self, **kwargs):
|
||||
context = super().get_context_data(**kwargs)
|
||||
context.update(self.get_map_context(self.request))
|
||||
|
||||
# Additional context for universal map
|
||||
context.update(
|
||||
{
|
||||
"page_title": "Interactive Map - All Locations",
|
||||
"map_type": "universal",
|
||||
"show_all_types": True,
|
||||
"initial_location_types": [lt.value for lt in LocationType],
|
||||
"filters_enabled": True,
|
||||
}
|
||||
)
|
||||
|
||||
# Handle initial bounds from query parameters
|
||||
if all(
|
||||
param in self.request.GET for param in ["north", "south", "east", "west"]
|
||||
):
|
||||
try:
|
||||
context["initial_bounds"] = {
|
||||
"north": float(self.request.GET["north"]),
|
||||
"south": float(self.request.GET["south"]),
|
||||
"east": float(self.request.GET["east"]),
|
||||
"west": float(self.request.GET["west"]),
|
||||
}
|
||||
except (ValueError, TypeError):
|
||||
pass
|
||||
|
||||
return context
|
||||
|
||||
|
||||
class ParkMapView(MapViewMixin, TemplateView):
|
||||
"""
|
||||
Map view focused specifically on parks.
|
||||
|
||||
URL: /maps/parks/
|
||||
"""
|
||||
|
||||
template_name = "maps/park_map.html"
|
||||
|
||||
def get_context_data(self, **kwargs):
|
||||
context = super().get_context_data(**kwargs)
|
||||
context.update(self.get_map_context(self.request))
|
||||
|
||||
# Park-specific context
|
||||
context.update(
|
||||
{
|
||||
"page_title": "Theme Parks Map",
|
||||
"map_type": "parks",
|
||||
"show_all_types": False,
|
||||
"initial_location_types": [LocationType.PARK.value],
|
||||
"filters_enabled": True,
|
||||
"park_specific_filters": True,
|
||||
}
|
||||
)
|
||||
|
||||
return context
|
||||
|
||||
|
||||
class NearbyLocationsView(MapViewMixin, TemplateView):
|
||||
"""
|
||||
View for showing locations near a specific point.
|
||||
|
||||
URL: /maps/nearby/
|
||||
"""
|
||||
|
||||
template_name = "maps/nearby_locations.html"
|
||||
|
||||
def get_context_data(self, **kwargs):
|
||||
context = super().get_context_data(**kwargs)
|
||||
context.update(self.get_map_context(self.request))
|
||||
|
||||
# Parse coordinates from query parameters
|
||||
lat = self.request.GET.get("lat")
|
||||
lng = self.request.GET.get("lng")
|
||||
radius = self.request.GET.get("radius", "50") # Default 50km radius
|
||||
|
||||
if lat and lng:
|
||||
try:
|
||||
center_lat = float(lat)
|
||||
center_lng = float(lng)
|
||||
# Clamp between 1-200km
|
||||
search_radius = min(200, max(1, float(radius)))
|
||||
|
||||
context.update(
|
||||
{
|
||||
"page_title": f"Locations Near {center_lat:.4f}, {
|
||||
center_lng:.4f}",
|
||||
"map_type": "nearby",
|
||||
"center_coordinates": {
|
||||
"lat": center_lat,
|
||||
"lng": center_lng,
|
||||
},
|
||||
"search_radius": search_radius,
|
||||
"show_radius_circle": True,
|
||||
}
|
||||
)
|
||||
except (ValueError, TypeError):
|
||||
context["error"] = "Invalid coordinates provided"
|
||||
else:
|
||||
context.update(
|
||||
{
|
||||
"page_title": "Nearby Locations",
|
||||
"map_type": "nearby",
|
||||
"prompt_for_location": True,
|
||||
}
|
||||
)
|
||||
|
||||
return context
|
||||
|
||||
|
||||
class LocationFilterView(MapViewMixin, View):
|
||||
"""
|
||||
HTMX endpoint for updating map when filters change.
|
||||
|
||||
URL: /maps/htmx/filter/
|
||||
"""
|
||||
|
||||
def get(self, request: HttpRequest) -> HttpResponse:
|
||||
"""Return filtered location data for HTMX updates."""
|
||||
try:
|
||||
# Parse filter parameters
|
||||
location_types = self.parse_location_types(request)
|
||||
search_query = request.GET.get("q", "").strip()
|
||||
country = request.GET.get("country", "").strip()
|
||||
state = request.GET.get("state", "").strip()
|
||||
|
||||
# Create filters
|
||||
filters = None
|
||||
if any([location_types, search_query, country, state]):
|
||||
filters = MapFilters(
|
||||
location_types=location_types,
|
||||
search_query=search_query or None,
|
||||
country=country or None,
|
||||
state=state or None,
|
||||
has_coordinates=True,
|
||||
)
|
||||
|
||||
# Get filtered locations
|
||||
map_response = unified_map_service.get_map_data(
|
||||
filters=filters,
|
||||
zoom_level=int(request.GET.get("zoom", "10")),
|
||||
cluster=request.GET.get("cluster", "true").lower() == "true",
|
||||
)
|
||||
|
||||
# Return JSON response for HTMX
|
||||
return JsonResponse(
|
||||
{
|
||||
"status": "success",
|
||||
"data": map_response.to_dict(),
|
||||
"filters_applied": map_response.filters_applied,
|
||||
}
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
return JsonResponse({"status": "error", "message": str(e)}, status=400)
|
||||
|
||||
|
||||
class LocationSearchView(MapViewMixin, View):
|
||||
"""
|
||||
HTMX endpoint for real-time location search.
|
||||
|
||||
URL: /maps/htmx/search/
|
||||
"""
|
||||
|
||||
def get(self, request: HttpRequest) -> HttpResponse:
|
||||
"""Return search results for HTMX updates."""
|
||||
query = request.GET.get("q", "").strip()
|
||||
|
||||
if not query or len(query) < 3:
|
||||
return render(
|
||||
request,
|
||||
"maps/partials/search_results.html",
|
||||
{
|
||||
"results": [],
|
||||
"query": query,
|
||||
"message": "Enter at least 3 characters to search",
|
||||
},
|
||||
)
|
||||
|
||||
try:
|
||||
# Parse optional location types
|
||||
location_types = self.parse_location_types(request)
|
||||
limit = min(20, max(5, int(request.GET.get("limit", "10"))))
|
||||
|
||||
# Perform search
|
||||
results = unified_map_service.search_locations(
|
||||
query=query, location_types=location_types, limit=limit
|
||||
)
|
||||
|
||||
return render(
|
||||
request,
|
||||
"maps/partials/search_results.html",
|
||||
{"results": results, "query": query, "count": len(results)},
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
return render(
|
||||
request,
|
||||
"maps/partials/search_results.html",
|
||||
{"results": [], "query": query, "error": str(e)},
|
||||
)
|
||||
|
||||
|
||||
class MapBoundsUpdateView(MapViewMixin, View):
|
||||
"""
|
||||
HTMX endpoint for updating locations when map bounds change.
|
||||
|
||||
URL: /maps/htmx/bounds/
|
||||
"""
|
||||
|
||||
def post(self, request: HttpRequest) -> HttpResponse:
|
||||
"""Update map data when bounds change."""
|
||||
try:
|
||||
data = json.loads(request.body)
|
||||
|
||||
# Parse bounds
|
||||
bounds = GeoBounds(
|
||||
north=float(data["north"]),
|
||||
south=float(data["south"]),
|
||||
east=float(data["east"]),
|
||||
west=float(data["west"]),
|
||||
)
|
||||
|
||||
# Parse additional parameters
|
||||
zoom_level = int(data.get("zoom", 10))
|
||||
location_types = None
|
||||
if "types" in data:
|
||||
location_types = {
|
||||
LocationType(t)
|
||||
for t in data["types"]
|
||||
if t in [lt.value for lt in LocationType]
|
||||
}
|
||||
|
||||
# Location types are used directly in the service call
|
||||
|
||||
# Get updated map data
|
||||
map_response = unified_map_service.get_locations_by_bounds(
|
||||
north=bounds.north,
|
||||
south=bounds.south,
|
||||
east=bounds.east,
|
||||
west=bounds.west,
|
||||
location_types=location_types,
|
||||
zoom_level=zoom_level,
|
||||
)
|
||||
|
||||
return JsonResponse({"status": "success", "data": map_response.to_dict()})
|
||||
|
||||
except (json.JSONDecodeError, ValueError, KeyError) as e:
|
||||
return JsonResponse(
|
||||
{
|
||||
"status": "error",
|
||||
"message": f"Invalid request data: {str(e)}",
|
||||
},
|
||||
status=400,
|
||||
)
|
||||
except Exception as e:
|
||||
return JsonResponse({"status": "error", "message": str(e)}, status=500)
|
||||
|
||||
|
||||
class LocationDetailModalView(MapViewMixin, View):
|
||||
"""
|
||||
HTMX endpoint for showing location details in modal.
|
||||
|
||||
URL: /maps/htmx/location/<type>/<id>/
|
||||
"""
|
||||
|
||||
def get(
|
||||
self, request: HttpRequest, location_type: str, location_id: int
|
||||
) -> HttpResponse:
|
||||
"""Return location detail modal content."""
|
||||
try:
|
||||
# Validate location type
|
||||
if location_type not in [lt.value for lt in LocationType]:
|
||||
return render(
|
||||
request,
|
||||
"maps/partials/location_modal.html",
|
||||
{"error": f"Invalid location type: {location_type}"},
|
||||
)
|
||||
|
||||
# Get location details
|
||||
location = unified_map_service.get_location_details(
|
||||
location_type, location_id
|
||||
)
|
||||
|
||||
if not location:
|
||||
return render(
|
||||
request,
|
||||
"maps/partials/location_modal.html",
|
||||
{"error": "Location not found"},
|
||||
)
|
||||
|
||||
return render(
|
||||
request,
|
||||
"maps/partials/location_modal.html",
|
||||
{"location": location, "location_type": location_type},
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
return render(
|
||||
request, "maps/partials/location_modal.html", {"error": str(e)}
|
||||
)
|
||||
|
||||
|
||||
class LocationListView(MapViewMixin, TemplateView):
|
||||
"""
|
||||
View for listing locations with pagination (non-map view).
|
||||
|
||||
URL: /maps/list/
|
||||
"""
|
||||
|
||||
template_name = "maps/location_list.html"
|
||||
paginate_by = 20
|
||||
|
||||
def get_context_data(self, **kwargs):
|
||||
context = super().get_context_data(**kwargs)
|
||||
|
||||
# Parse filters
|
||||
location_types = self.parse_location_types(self.request)
|
||||
search_query = self.request.GET.get("q", "").strip()
|
||||
country = self.request.GET.get("country", "").strip()
|
||||
state = self.request.GET.get("state", "").strip()
|
||||
|
||||
# Create filters
|
||||
filters = None
|
||||
if any([location_types, search_query, country, state]):
|
||||
filters = MapFilters(
|
||||
location_types=location_types,
|
||||
search_query=search_query or None,
|
||||
country=country or None,
|
||||
state=state or None,
|
||||
has_coordinates=True,
|
||||
)
|
||||
|
||||
# Get locations without clustering
|
||||
map_response = unified_map_service.get_map_data(
|
||||
filters=filters, cluster=False, use_cache=True
|
||||
)
|
||||
|
||||
# Paginate results
|
||||
paginator = Paginator(map_response.locations, self.paginate_by)
|
||||
page_number = self.request.GET.get("page")
|
||||
page_obj = paginator.get_page(page_number)
|
||||
|
||||
context.update(
|
||||
{
|
||||
"page_title": "All Locations",
|
||||
"locations": page_obj,
|
||||
"total_count": map_response.total_count,
|
||||
"applied_filters": filters,
|
||||
"location_types": [lt.value for lt in LocationType],
|
||||
"current_filters": {
|
||||
"types": self.request.GET.getlist("types"),
|
||||
"q": search_query,
|
||||
"country": country,
|
||||
"state": state,
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
return context
|
||||
178
apps/core/views/search.py
Normal file
178
apps/core/views/search.py
Normal file
@@ -0,0 +1,178 @@
|
||||
from django.views.generic import TemplateView
|
||||
from django.http import JsonResponse
|
||||
# from django.contrib.gis.geos import Point # Disabled temporarily for setup
|
||||
from apps.parks.models import Park
|
||||
from apps.parks.filters import ParkFilter
|
||||
from apps.core.services.location_search import (
|
||||
location_search_service,
|
||||
LocationSearchFilters,
|
||||
)
|
||||
from apps.core.forms.search import LocationSearchForm
|
||||
|
||||
|
||||
class AdaptiveSearchView(TemplateView):
|
||||
template_name = "core/search/results.html"
|
||||
|
||||
def get_queryset(self):
|
||||
"""
|
||||
Get the base queryset, optimized with select_related and prefetch_related
|
||||
"""
|
||||
return (
|
||||
Park.objects.select_related("operator", "property_owner")
|
||||
.prefetch_related("location", "photos")
|
||||
.all()
|
||||
)
|
||||
|
||||
def get_filterset(self):
|
||||
"""
|
||||
Get the filterset instance
|
||||
"""
|
||||
return ParkFilter(self.request.GET, queryset=self.get_queryset())
|
||||
|
||||
def get_context_data(self, **kwargs):
|
||||
"""
|
||||
Add filtered results and filter form to context
|
||||
"""
|
||||
context = super().get_context_data(**kwargs)
|
||||
filterset = self.get_filterset()
|
||||
|
||||
# Check if location-based search is being used
|
||||
location_search = self.request.GET.get("location_search", "").strip()
|
||||
near_location = self.request.GET.get("near_location", "").strip()
|
||||
|
||||
# Add location search context
|
||||
context.update(
|
||||
{
|
||||
"results": filterset.qs,
|
||||
"filters": filterset,
|
||||
"applied_filters": bool(
|
||||
self.request.GET
|
||||
), # Check if any filters are applied
|
||||
"is_location_search": bool(location_search or near_location),
|
||||
"location_search_query": location_search or near_location,
|
||||
}
|
||||
)
|
||||
|
||||
return context
|
||||
|
||||
|
||||
class FilterFormView(TemplateView):
|
||||
"""
|
||||
View for rendering just the filter form for HTMX updates
|
||||
"""
|
||||
|
||||
template_name = "core/search/filters.html"
|
||||
|
||||
def get_context_data(self, **kwargs):
|
||||
context = super().get_context_data(**kwargs)
|
||||
filterset = ParkFilter(self.request.GET, queryset=Park.objects.all())
|
||||
context["filters"] = filterset
|
||||
return context
|
||||
|
||||
|
||||
class LocationSearchView(TemplateView):
|
||||
"""
|
||||
Enhanced search view with comprehensive location search capabilities.
|
||||
"""
|
||||
|
||||
template_name = "core/search/location_results.html"
|
||||
|
||||
def get_context_data(self, **kwargs):
|
||||
context = super().get_context_data(**kwargs)
|
||||
|
||||
# Build search filters from request parameters
|
||||
filters = self._build_search_filters()
|
||||
|
||||
# Perform search
|
||||
results = location_search_service.search(filters)
|
||||
|
||||
# Group results by type for better presentation
|
||||
grouped_results = {
|
||||
"parks": [r for r in results if r.content_type == "park"],
|
||||
"rides": [r for r in results if r.content_type == "ride"],
|
||||
"companies": [r for r in results if r.content_type == "company"],
|
||||
}
|
||||
|
||||
context.update(
|
||||
{
|
||||
"results": results,
|
||||
"grouped_results": grouped_results,
|
||||
"total_results": len(results),
|
||||
"search_filters": filters,
|
||||
"has_location_filter": bool(filters.location_point),
|
||||
"search_form": LocationSearchForm(self.request.GET),
|
||||
}
|
||||
)
|
||||
|
||||
return context
|
||||
|
||||
def _build_search_filters(self) -> LocationSearchFilters:
|
||||
"""Build LocationSearchFilters from request parameters."""
|
||||
form = LocationSearchForm(self.request.GET)
|
||||
form.is_valid() # Populate cleaned_data
|
||||
|
||||
# Parse location coordinates if provided
|
||||
location_point = None
|
||||
lat = form.cleaned_data.get("lat")
|
||||
lng = form.cleaned_data.get("lng")
|
||||
if lat and lng:
|
||||
try:
|
||||
location_point = Point(float(lng), float(lat), srid=4326)
|
||||
except (ValueError, TypeError):
|
||||
location_point = None
|
||||
|
||||
# Parse location types
|
||||
location_types = set()
|
||||
if form.cleaned_data.get("search_parks"):
|
||||
location_types.add("park")
|
||||
if form.cleaned_data.get("search_rides"):
|
||||
location_types.add("ride")
|
||||
if form.cleaned_data.get("search_companies"):
|
||||
location_types.add("company")
|
||||
|
||||
# If no specific types selected, search all
|
||||
if not location_types:
|
||||
location_types = {"park", "ride", "company"}
|
||||
|
||||
# Parse radius
|
||||
radius_km = None
|
||||
radius_str = form.cleaned_data.get("radius_km", "").strip()
|
||||
if radius_str:
|
||||
try:
|
||||
radius_km = float(radius_str)
|
||||
# Clamp between 1-500km
|
||||
radius_km = max(1, min(500, radius_km))
|
||||
except (ValueError, TypeError):
|
||||
radius_km = None
|
||||
|
||||
return LocationSearchFilters(
|
||||
search_query=form.cleaned_data.get("q", "").strip() or None,
|
||||
location_point=location_point,
|
||||
radius_km=radius_km,
|
||||
location_types=location_types if location_types else None,
|
||||
country=form.cleaned_data.get("country", "").strip() or None,
|
||||
state=form.cleaned_data.get("state", "").strip() or None,
|
||||
city=form.cleaned_data.get("city", "").strip() or None,
|
||||
park_status=self.request.GET.getlist("park_status") or None,
|
||||
include_distance=True,
|
||||
max_results=int(self.request.GET.get("limit", 100)),
|
||||
)
|
||||
|
||||
|
||||
class LocationSuggestionsView(TemplateView):
|
||||
"""
|
||||
AJAX endpoint for location search suggestions.
|
||||
"""
|
||||
|
||||
def get(self, request, *args, **kwargs):
|
||||
query = request.GET.get("q", "").strip()
|
||||
limit = int(request.GET.get("limit", 10))
|
||||
|
||||
if len(query) < 2:
|
||||
return JsonResponse({"suggestions": []})
|
||||
|
||||
try:
|
||||
suggestions = location_search_service.suggest_locations(query, limit)
|
||||
return JsonResponse({"suggestions": suggestions})
|
||||
except Exception as e:
|
||||
return JsonResponse({"error": str(e)}, status=500)
|
||||
62
apps/core/views/views.py
Normal file
62
apps/core/views/views.py
Normal file
@@ -0,0 +1,62 @@
|
||||
from typing import Any, Dict, Optional, Type
|
||||
from django.shortcuts import redirect
|
||||
from django.urls import reverse
|
||||
from django.views.generic import DetailView
|
||||
from django.views import View
|
||||
from django.http import HttpRequest, HttpResponse
|
||||
from django.db.models import Model
|
||||
|
||||
|
||||
class SlugRedirectMixin(View):
|
||||
"""
|
||||
Mixin that handles redirects for old slugs.
|
||||
Requires the model to inherit from SluggedModel and view to inherit from DetailView.
|
||||
"""
|
||||
|
||||
model: Optional[Type[Model]] = None
|
||||
slug_url_kwarg: str = "slug"
|
||||
object: Optional[Model] = None
|
||||
|
||||
def dispatch(self, request: HttpRequest, *args: Any, **kwargs: Any) -> HttpResponse:
|
||||
# Only apply slug redirect logic to DetailViews
|
||||
if not isinstance(self, DetailView):
|
||||
return super().dispatch(request, *args, **kwargs)
|
||||
|
||||
# Get the object using current or historical slug
|
||||
try:
|
||||
self.object = self.get_object() # type: ignore
|
||||
# Check if we used an old slug
|
||||
current_slug = kwargs.get(self.slug_url_kwarg)
|
||||
if current_slug and current_slug != getattr(self.object, "slug", None):
|
||||
# Get the URL pattern name from the view
|
||||
url_pattern = self.get_redirect_url_pattern()
|
||||
# Build kwargs for reverse()
|
||||
reverse_kwargs = self.get_redirect_url_kwargs()
|
||||
# Redirect to the current slug URL
|
||||
return redirect(
|
||||
reverse(url_pattern, kwargs=reverse_kwargs), permanent=True
|
||||
)
|
||||
return super().dispatch(request, *args, **kwargs)
|
||||
except (AttributeError, Exception) as e: # type: ignore
|
||||
if self.model and hasattr(self.model, "DoesNotExist"):
|
||||
if isinstance(e, self.model.DoesNotExist): # type: ignore
|
||||
return super().dispatch(request, *args, **kwargs)
|
||||
return super().dispatch(request, *args, **kwargs)
|
||||
|
||||
def get_redirect_url_pattern(self) -> str:
|
||||
"""
|
||||
Get the URL pattern name for redirects.
|
||||
Should be overridden by subclasses.
|
||||
"""
|
||||
raise NotImplementedError(
|
||||
"Subclasses must implement get_redirect_url_pattern()"
|
||||
)
|
||||
|
||||
def get_redirect_url_kwargs(self) -> Dict[str, Any]:
|
||||
"""
|
||||
Get the kwargs for reverse() when redirecting.
|
||||
Should be overridden by subclasses if they need custom kwargs.
|
||||
"""
|
||||
if not self.object:
|
||||
return {}
|
||||
return {self.slug_url_kwarg: getattr(self.object, "slug", "")}
|
||||
Reference in New Issue
Block a user