mirror of
https://github.com/pacnpal/thrillwiki_django_no_react.git
synced 2025-12-20 09:31:09 -05:00
Refactor test utilities and enhance ASGI settings
- Cleaned up and standardized assertions in ApiTestMixin for API response validation. - Updated ASGI settings to use os.environ for setting the DJANGO_SETTINGS_MODULE. - Removed unused imports and improved formatting in settings.py. - Refactored URL patterns in urls.py for better readability and organization. - Enhanced view functions in views.py for consistency and clarity. - Added .flake8 configuration for linting and style enforcement. - Introduced type stubs for django-environ to improve type checking with Pylance.
This commit is contained in:
@@ -1,29 +1,26 @@
|
||||
from django.contrib import admin
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from django.utils.html import format_html
|
||||
from .models import SlugHistory
|
||||
|
||||
|
||||
@admin.register(SlugHistory)
|
||||
class SlugHistoryAdmin(admin.ModelAdmin):
|
||||
list_display = ['content_object_link', 'old_slug', 'created_at']
|
||||
list_filter = ['content_type', 'created_at']
|
||||
search_fields = ['old_slug', 'object_id']
|
||||
readonly_fields = ['content_type', 'object_id', 'old_slug', 'created_at']
|
||||
date_hierarchy = 'created_at'
|
||||
ordering = ['-created_at']
|
||||
list_display = ["content_object_link", "old_slug", "created_at"]
|
||||
list_filter = ["content_type", "created_at"]
|
||||
search_fields = ["old_slug", "object_id"]
|
||||
readonly_fields = ["content_type", "object_id", "old_slug", "created_at"]
|
||||
date_hierarchy = "created_at"
|
||||
ordering = ["-created_at"]
|
||||
|
||||
def content_object_link(self, obj):
|
||||
"""Create a link to the related object's admin page"""
|
||||
try:
|
||||
url = obj.content_object.get_absolute_url()
|
||||
return format_html(
|
||||
'<a href="{}">{}</a>',
|
||||
url,
|
||||
str(obj.content_object)
|
||||
)
|
||||
return format_html('<a href="{}">{}</a>', url, str(obj.content_object))
|
||||
except (AttributeError, ValueError):
|
||||
return str(obj.content_object)
|
||||
content_object_link.short_description = 'Object'
|
||||
|
||||
content_object_link.short_description = "Object"
|
||||
|
||||
def has_add_permission(self, request):
|
||||
"""Disable manual creation of slug history records"""
|
||||
|
||||
@@ -3,47 +3,49 @@ from django.contrib.contenttypes.fields import GenericForeignKey
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from django.utils import timezone
|
||||
from django.db.models import Count
|
||||
from django.conf import settings
|
||||
|
||||
|
||||
class PageView(models.Model):
|
||||
content_type = models.ForeignKey(ContentType, on_delete=models.CASCADE, related_name='page_views')
|
||||
content_type = models.ForeignKey(
|
||||
ContentType, on_delete=models.CASCADE, related_name="page_views"
|
||||
)
|
||||
object_id = models.PositiveIntegerField()
|
||||
content_object = GenericForeignKey('content_type', 'object_id')
|
||||
|
||||
content_object = GenericForeignKey("content_type", "object_id")
|
||||
|
||||
timestamp = models.DateTimeField(auto_now_add=True, db_index=True)
|
||||
ip_address = models.GenericIPAddressField()
|
||||
user_agent = models.CharField(max_length=512, blank=True)
|
||||
|
||||
class Meta:
|
||||
indexes = [
|
||||
models.Index(fields=['timestamp']),
|
||||
models.Index(fields=['content_type', 'object_id']),
|
||||
models.Index(fields=["timestamp"]),
|
||||
models.Index(fields=["content_type", "object_id"]),
|
||||
]
|
||||
|
||||
@classmethod
|
||||
def get_trending_items(cls, model_class, hours=24, limit=10):
|
||||
"""Get trending items of a specific model class based on views in last X hours.
|
||||
|
||||
|
||||
Args:
|
||||
model_class: The model class to get trending items for (e.g., Park, Ride)
|
||||
hours (int): Number of hours to look back for views (default: 24)
|
||||
limit (int): Maximum number of items to return (default: 10)
|
||||
|
||||
|
||||
Returns:
|
||||
QuerySet: The trending items ordered by view count
|
||||
"""
|
||||
content_type = ContentType.objects.get_for_model(model_class)
|
||||
cutoff = timezone.now() - timezone.timedelta(hours=hours)
|
||||
|
||||
|
||||
# Query through the ContentType relationship
|
||||
item_ids = cls.objects.filter(
|
||||
content_type=content_type,
|
||||
timestamp__gte=cutoff
|
||||
).values('object_id').annotate(
|
||||
view_count=Count('id')
|
||||
).filter(
|
||||
view_count__gt=0
|
||||
).order_by('-view_count').values_list('object_id', flat=True)[:limit]
|
||||
item_ids = (
|
||||
cls.objects.filter(content_type=content_type, timestamp__gte=cutoff)
|
||||
.values("object_id")
|
||||
.annotate(view_count=Count("id"))
|
||||
.filter(view_count__gt=0)
|
||||
.order_by("-view_count")
|
||||
.values_list("object_id", flat=True)[:limit]
|
||||
)
|
||||
|
||||
# Get the actual items in the correct order
|
||||
if item_ids:
|
||||
@@ -51,7 +53,8 @@ class PageView(models.Model):
|
||||
id_list = list(item_ids)
|
||||
# Use Case/When to preserve the ordering
|
||||
from django.db.models import Case, When
|
||||
|
||||
preserved = Case(*[When(pk=pk, then=pos) for pos, pk in enumerate(id_list)])
|
||||
return model_class.objects.filter(pk__in=id_list).order_by(preserved)
|
||||
|
||||
|
||||
return model_class.objects.none()
|
||||
|
||||
@@ -3,15 +3,21 @@ Custom exception handling for ThrillWiki API.
|
||||
Provides standardized error responses following Django styleguide patterns.
|
||||
"""
|
||||
|
||||
import logging
|
||||
from typing import Any, Dict, Optional
|
||||
|
||||
from django.http import Http404
|
||||
from django.core.exceptions import PermissionDenied, ValidationError as DjangoValidationError
|
||||
from django.core.exceptions import (
|
||||
PermissionDenied,
|
||||
ValidationError as DjangoValidationError,
|
||||
)
|
||||
from rest_framework import status
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.views import exception_handler
|
||||
from rest_framework.exceptions import ValidationError as DRFValidationError, NotFound, PermissionDenied as DRFPermissionDenied
|
||||
from rest_framework.exceptions import (
|
||||
ValidationError as DRFValidationError,
|
||||
NotFound,
|
||||
PermissionDenied as DRFPermissionDenied,
|
||||
)
|
||||
|
||||
from ..exceptions import ThrillWikiException
|
||||
from ..logging import get_logger, log_exception
|
||||
@@ -19,106 +25,133 @@ from ..logging import get_logger, log_exception
|
||||
logger = get_logger(__name__)
|
||||
|
||||
|
||||
def custom_exception_handler(exc: Exception, context: Dict[str, Any]) -> Optional[Response]:
|
||||
def custom_exception_handler(
|
||||
exc: Exception, context: Dict[str, Any]
|
||||
) -> Optional[Response]:
|
||||
"""
|
||||
Custom exception handler for DRF that provides standardized error responses.
|
||||
|
||||
|
||||
Returns:
|
||||
Response with standardized error format or None to fallback to default handler
|
||||
"""
|
||||
# Call REST framework's default exception handler first
|
||||
response = exception_handler(exc, context)
|
||||
|
||||
|
||||
if response is not None:
|
||||
# Standardize the error response format
|
||||
custom_response_data = {
|
||||
'status': 'error',
|
||||
'error': {
|
||||
'code': _get_error_code(exc),
|
||||
'message': _get_error_message(exc, response.data),
|
||||
'details': _get_error_details(exc, response.data),
|
||||
"status": "error",
|
||||
"error": {
|
||||
"code": _get_error_code(exc),
|
||||
"message": _get_error_message(exc, response.data),
|
||||
"details": _get_error_details(exc, response.data),
|
||||
},
|
||||
'data': None,
|
||||
"data": None,
|
||||
}
|
||||
|
||||
|
||||
# Add request context for debugging
|
||||
if hasattr(context.get('request'), 'user'):
|
||||
custom_response_data['error']['request_user'] = str(context['request'].user)
|
||||
|
||||
if hasattr(context.get("request"), "user"):
|
||||
custom_response_data["error"]["request_user"] = str(context["request"].user)
|
||||
|
||||
# Log the error for monitoring
|
||||
log_exception(logger, exc, context={'response_status': response.status_code}, request=context.get('request'))
|
||||
|
||||
log_exception(
|
||||
logger,
|
||||
exc,
|
||||
context={"response_status": response.status_code},
|
||||
request=context.get("request"),
|
||||
)
|
||||
|
||||
response.data = custom_response_data
|
||||
|
||||
|
||||
# Handle ThrillWiki custom exceptions
|
||||
elif isinstance(exc, ThrillWikiException):
|
||||
custom_response_data = {
|
||||
'status': 'error',
|
||||
'error': exc.to_dict(),
|
||||
'data': None,
|
||||
"status": "error",
|
||||
"error": exc.to_dict(),
|
||||
"data": None,
|
||||
}
|
||||
|
||||
log_exception(logger, exc, context={'response_status': exc.status_code}, request=context.get('request'))
|
||||
|
||||
log_exception(
|
||||
logger,
|
||||
exc,
|
||||
context={"response_status": exc.status_code},
|
||||
request=context.get("request"),
|
||||
)
|
||||
response = Response(custom_response_data, status=exc.status_code)
|
||||
|
||||
|
||||
# Handle specific Django exceptions that DRF doesn't catch
|
||||
elif isinstance(exc, DjangoValidationError):
|
||||
custom_response_data = {
|
||||
'status': 'error',
|
||||
'error': {
|
||||
'code': 'VALIDATION_ERROR',
|
||||
'message': 'Validation failed',
|
||||
'details': _format_django_validation_errors(exc),
|
||||
"status": "error",
|
||||
"error": {
|
||||
"code": "VALIDATION_ERROR",
|
||||
"message": "Validation failed",
|
||||
"details": _format_django_validation_errors(exc),
|
||||
},
|
||||
'data': None,
|
||||
"data": None,
|
||||
}
|
||||
|
||||
log_exception(logger, exc, context={'response_status': status.HTTP_400_BAD_REQUEST}, request=context.get('request'))
|
||||
|
||||
log_exception(
|
||||
logger,
|
||||
exc,
|
||||
context={"response_status": status.HTTP_400_BAD_REQUEST},
|
||||
request=context.get("request"),
|
||||
)
|
||||
response = Response(custom_response_data, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
|
||||
elif isinstance(exc, Http404):
|
||||
custom_response_data = {
|
||||
'status': 'error',
|
||||
'error': {
|
||||
'code': 'NOT_FOUND',
|
||||
'message': 'Resource not found',
|
||||
'details': str(exc) if str(exc) else None,
|
||||
"status": "error",
|
||||
"error": {
|
||||
"code": "NOT_FOUND",
|
||||
"message": "Resource not found",
|
||||
"details": str(exc) if str(exc) else None,
|
||||
},
|
||||
'data': None,
|
||||
"data": None,
|
||||
}
|
||||
|
||||
log_exception(logger, exc, context={'response_status': status.HTTP_404_NOT_FOUND}, request=context.get('request'))
|
||||
|
||||
log_exception(
|
||||
logger,
|
||||
exc,
|
||||
context={"response_status": status.HTTP_404_NOT_FOUND},
|
||||
request=context.get("request"),
|
||||
)
|
||||
response = Response(custom_response_data, status=status.HTTP_404_NOT_FOUND)
|
||||
|
||||
|
||||
elif isinstance(exc, PermissionDenied):
|
||||
custom_response_data = {
|
||||
'status': 'error',
|
||||
'error': {
|
||||
'code': 'PERMISSION_DENIED',
|
||||
'message': 'Permission denied',
|
||||
'details': str(exc) if str(exc) else None,
|
||||
"status": "error",
|
||||
"error": {
|
||||
"code": "PERMISSION_DENIED",
|
||||
"message": "Permission denied",
|
||||
"details": str(exc) if str(exc) else None,
|
||||
},
|
||||
'data': None,
|
||||
"data": None,
|
||||
}
|
||||
|
||||
log_exception(logger, exc, context={'response_status': status.HTTP_403_FORBIDDEN}, request=context.get('request'))
|
||||
|
||||
log_exception(
|
||||
logger,
|
||||
exc,
|
||||
context={"response_status": status.HTTP_403_FORBIDDEN},
|
||||
request=context.get("request"),
|
||||
)
|
||||
response = Response(custom_response_data, status=status.HTTP_403_FORBIDDEN)
|
||||
|
||||
|
||||
return response
|
||||
|
||||
|
||||
def _get_error_code(exc: Exception) -> str:
|
||||
"""Extract or determine error code from exception."""
|
||||
if hasattr(exc, 'default_code'):
|
||||
if hasattr(exc, "default_code"):
|
||||
return exc.default_code.upper()
|
||||
|
||||
|
||||
if isinstance(exc, DRFValidationError):
|
||||
return 'VALIDATION_ERROR'
|
||||
return "VALIDATION_ERROR"
|
||||
elif isinstance(exc, NotFound):
|
||||
return 'NOT_FOUND'
|
||||
return "NOT_FOUND"
|
||||
elif isinstance(exc, DRFPermissionDenied):
|
||||
return 'PERMISSION_DENIED'
|
||||
|
||||
return "PERMISSION_DENIED"
|
||||
|
||||
return exc.__class__.__name__.upper()
|
||||
|
||||
|
||||
@@ -126,47 +159,47 @@ def _get_error_message(exc: Exception, response_data: Any) -> str:
|
||||
"""Extract user-friendly error message."""
|
||||
if isinstance(response_data, dict):
|
||||
# Handle DRF validation errors
|
||||
if 'detail' in response_data:
|
||||
return str(response_data['detail'])
|
||||
elif 'non_field_errors' in response_data:
|
||||
errors = response_data['non_field_errors']
|
||||
if "detail" in response_data:
|
||||
return str(response_data["detail"])
|
||||
elif "non_field_errors" in response_data:
|
||||
errors = response_data["non_field_errors"]
|
||||
return errors[0] if isinstance(errors, list) and errors else str(errors)
|
||||
elif isinstance(response_data, dict) and len(response_data) == 1:
|
||||
key, value = next(iter(response_data.items()))
|
||||
if isinstance(value, list) and value:
|
||||
return f"{key}: {value[0]}"
|
||||
return f"{key}: {value}"
|
||||
|
||||
|
||||
# Fallback to exception message
|
||||
return str(exc) if str(exc) else 'An error occurred'
|
||||
return str(exc) if str(exc) else "An error occurred"
|
||||
|
||||
|
||||
def _get_error_details(exc: Exception, response_data: Any) -> Optional[Dict[str, Any]]:
|
||||
"""Extract detailed error information for debugging."""
|
||||
if isinstance(response_data, dict) and len(response_data) > 1:
|
||||
return response_data
|
||||
|
||||
if hasattr(exc, 'detail') and isinstance(exc.detail, dict):
|
||||
|
||||
if hasattr(exc, "detail") and isinstance(exc.detail, dict):
|
||||
return exc.detail
|
||||
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def _format_django_validation_errors(exc: DjangoValidationError) -> Dict[str, Any]:
|
||||
def _format_django_validation_errors(
|
||||
exc: DjangoValidationError,
|
||||
) -> Dict[str, Any]:
|
||||
"""Format Django ValidationError for API response."""
|
||||
if hasattr(exc, 'error_dict'):
|
||||
if hasattr(exc, "error_dict"):
|
||||
# Field-specific errors
|
||||
return {
|
||||
field: [str(error) for error in errors]
|
||||
for field, errors in exc.error_dict.items()
|
||||
}
|
||||
elif hasattr(exc, 'error_list'):
|
||||
elif hasattr(exc, "error_list"):
|
||||
# Non-field errors
|
||||
return {
|
||||
'non_field_errors': [str(error) for error in exc.error_list]
|
||||
}
|
||||
|
||||
return {'non_field_errors': [str(exc)]}
|
||||
return {"non_field_errors": [str(error) for error in exc.error_list]}
|
||||
|
||||
return {"non_field_errors": [str(exc)]}
|
||||
|
||||
|
||||
# Removed _log_api_error - using centralized logging instead
|
||||
|
||||
@@ -12,79 +12,79 @@ class ApiMixin:
|
||||
"""
|
||||
Base mixin for API views providing standardized response formatting.
|
||||
"""
|
||||
|
||||
|
||||
def create_response(
|
||||
self,
|
||||
*,
|
||||
data: Any = None,
|
||||
self,
|
||||
*,
|
||||
data: Any = None,
|
||||
message: Optional[str] = None,
|
||||
status_code: int = status.HTTP_200_OK,
|
||||
pagination: Optional[Dict[str, Any]] = None,
|
||||
metadata: Optional[Dict[str, Any]] = None
|
||||
metadata: Optional[Dict[str, Any]] = None,
|
||||
) -> Response:
|
||||
"""
|
||||
Create standardized API response.
|
||||
|
||||
|
||||
Args:
|
||||
data: Response data
|
||||
message: Optional success message
|
||||
status_code: HTTP status code
|
||||
pagination: Pagination information
|
||||
metadata: Additional metadata
|
||||
|
||||
|
||||
Returns:
|
||||
Standardized Response object
|
||||
"""
|
||||
response_data = {
|
||||
'status': 'success' if status_code < 400 else 'error',
|
||||
'data': data,
|
||||
"status": "success" if status_code < 400 else "error",
|
||||
"data": data,
|
||||
}
|
||||
|
||||
|
||||
if message:
|
||||
response_data['message'] = message
|
||||
|
||||
response_data["message"] = message
|
||||
|
||||
if pagination:
|
||||
response_data['pagination'] = pagination
|
||||
|
||||
response_data["pagination"] = pagination
|
||||
|
||||
if metadata:
|
||||
response_data['metadata'] = metadata
|
||||
|
||||
response_data["metadata"] = metadata
|
||||
|
||||
return Response(response_data, status=status_code)
|
||||
|
||||
|
||||
def create_error_response(
|
||||
self,
|
||||
*,
|
||||
message: str,
|
||||
status_code: int = status.HTTP_400_BAD_REQUEST,
|
||||
error_code: Optional[str] = None,
|
||||
details: Optional[Dict[str, Any]] = None
|
||||
details: Optional[Dict[str, Any]] = None,
|
||||
) -> Response:
|
||||
"""
|
||||
Create standardized error response.
|
||||
|
||||
|
||||
Args:
|
||||
message: Error message
|
||||
status_code: HTTP status code
|
||||
error_code: Optional error code
|
||||
details: Additional error details
|
||||
|
||||
|
||||
Returns:
|
||||
Standardized error Response object
|
||||
"""
|
||||
error_data = {
|
||||
'code': error_code or 'GENERIC_ERROR',
|
||||
'message': message,
|
||||
"code": error_code or "GENERIC_ERROR",
|
||||
"message": message,
|
||||
}
|
||||
|
||||
|
||||
if details:
|
||||
error_data['details'] = details
|
||||
|
||||
error_data["details"] = details
|
||||
|
||||
response_data = {
|
||||
'status': 'error',
|
||||
'error': error_data,
|
||||
'data': None,
|
||||
"status": "error",
|
||||
"error": error_data,
|
||||
"data": None,
|
||||
}
|
||||
|
||||
|
||||
return Response(response_data, status=status_code)
|
||||
|
||||
|
||||
@@ -92,37 +92,37 @@ class CreateApiMixin(ApiMixin):
|
||||
"""
|
||||
Mixin for create API endpoints with standardized input/output handling.
|
||||
"""
|
||||
|
||||
|
||||
def create(self, request: Request, *args, **kwargs) -> Response:
|
||||
"""Handle POST requests for creating resources."""
|
||||
serializer = self.get_input_serializer(data=request.data)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
|
||||
|
||||
# Create the object using the service layer
|
||||
obj = self.perform_create(**serializer.validated_data)
|
||||
|
||||
|
||||
# Serialize the output
|
||||
output_serializer = self.get_output_serializer(obj)
|
||||
|
||||
|
||||
return self.create_response(
|
||||
data=output_serializer.data,
|
||||
status_code=status.HTTP_201_CREATED,
|
||||
message="Resource created successfully"
|
||||
message="Resource created successfully",
|
||||
)
|
||||
|
||||
|
||||
def perform_create(self, **validated_data):
|
||||
"""
|
||||
Override this method to implement object creation logic.
|
||||
Should use service layer methods.
|
||||
"""
|
||||
raise NotImplementedError("Subclasses must implement perform_create")
|
||||
|
||||
|
||||
def get_input_serializer(self, *args, **kwargs):
|
||||
"""Get the input serializer for validation."""
|
||||
return self.InputSerializer(*args, **kwargs)
|
||||
|
||||
|
||||
def get_output_serializer(self, *args, **kwargs):
|
||||
"""Get the output serializer for response."""
|
||||
"""Get the output serializer for response."""
|
||||
return self.OutputSerializer(*args, **kwargs)
|
||||
|
||||
|
||||
@@ -130,35 +130,37 @@ class UpdateApiMixin(ApiMixin):
|
||||
"""
|
||||
Mixin for update API endpoints with standardized input/output handling.
|
||||
"""
|
||||
|
||||
|
||||
def update(self, request: Request, *args, **kwargs) -> Response:
|
||||
"""Handle PUT/PATCH requests for updating resources."""
|
||||
instance = self.get_object()
|
||||
serializer = self.get_input_serializer(data=request.data, partial=kwargs.get('partial', False))
|
||||
serializer = self.get_input_serializer(
|
||||
data=request.data, partial=kwargs.get("partial", False)
|
||||
)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
|
||||
|
||||
# Update the object using the service layer
|
||||
updated_obj = self.perform_update(instance, **serializer.validated_data)
|
||||
|
||||
|
||||
# Serialize the output
|
||||
output_serializer = self.get_output_serializer(updated_obj)
|
||||
|
||||
|
||||
return self.create_response(
|
||||
data=output_serializer.data,
|
||||
message="Resource updated successfully"
|
||||
message="Resource updated successfully",
|
||||
)
|
||||
|
||||
|
||||
def perform_update(self, instance, **validated_data):
|
||||
"""
|
||||
Override this method to implement object update logic.
|
||||
Should use service layer methods.
|
||||
"""
|
||||
raise NotImplementedError("Subclasses must implement perform_update")
|
||||
|
||||
|
||||
def get_input_serializer(self, *args, **kwargs):
|
||||
"""Get the input serializer for validation."""
|
||||
return self.InputSerializer(*args, **kwargs)
|
||||
|
||||
|
||||
def get_output_serializer(self, *args, **kwargs):
|
||||
"""Get the output serializer for response."""
|
||||
return self.OutputSerializer(*args, **kwargs)
|
||||
@@ -168,29 +170,31 @@ class ListApiMixin(ApiMixin):
|
||||
"""
|
||||
Mixin for list API endpoints with pagination and filtering.
|
||||
"""
|
||||
|
||||
|
||||
def list(self, request: Request, *args, **kwargs) -> Response:
|
||||
"""Handle GET requests for listing resources."""
|
||||
# Use selector to get filtered queryset
|
||||
queryset = self.get_queryset()
|
||||
|
||||
|
||||
# Apply pagination
|
||||
page = self.paginate_queryset(queryset)
|
||||
if page is not None:
|
||||
serializer = self.get_output_serializer(page, many=True)
|
||||
return self.get_paginated_response(serializer.data)
|
||||
|
||||
|
||||
# No pagination
|
||||
serializer = self.get_output_serializer(queryset, many=True)
|
||||
return self.create_response(data=serializer.data)
|
||||
|
||||
|
||||
def get_queryset(self):
|
||||
"""
|
||||
Override this method to use selector patterns.
|
||||
Should call selector functions, not access model managers directly.
|
||||
"""
|
||||
raise NotImplementedError("Subclasses must implement get_queryset using selectors")
|
||||
|
||||
raise NotImplementedError(
|
||||
"Subclasses must implement get_queryset using selectors"
|
||||
)
|
||||
|
||||
def get_output_serializer(self, *args, **kwargs):
|
||||
"""Get the output serializer for response."""
|
||||
return self.OutputSerializer(*args, **kwargs)
|
||||
@@ -200,21 +204,23 @@ class RetrieveApiMixin(ApiMixin):
|
||||
"""
|
||||
Mixin for retrieve API endpoints.
|
||||
"""
|
||||
|
||||
|
||||
def retrieve(self, request: Request, *args, **kwargs) -> Response:
|
||||
"""Handle GET requests for retrieving a single resource."""
|
||||
instance = self.get_object()
|
||||
serializer = self.get_output_serializer(instance)
|
||||
|
||||
|
||||
return self.create_response(data=serializer.data)
|
||||
|
||||
|
||||
def get_object(self):
|
||||
"""
|
||||
Override this method to use selector patterns.
|
||||
Should call selector functions for optimized queries.
|
||||
"""
|
||||
raise NotImplementedError("Subclasses must implement get_object using selectors")
|
||||
|
||||
raise NotImplementedError(
|
||||
"Subclasses must implement get_object using selectors"
|
||||
)
|
||||
|
||||
def get_output_serializer(self, *args, **kwargs):
|
||||
"""Get the output serializer for response."""
|
||||
return self.OutputSerializer(*args, **kwargs)
|
||||
@@ -224,29 +230,31 @@ class DestroyApiMixin(ApiMixin):
|
||||
"""
|
||||
Mixin for delete API endpoints.
|
||||
"""
|
||||
|
||||
|
||||
def destroy(self, request: Request, *args, **kwargs) -> Response:
|
||||
"""Handle DELETE requests for destroying resources."""
|
||||
instance = self.get_object()
|
||||
|
||||
|
||||
# Delete using service layer
|
||||
self.perform_destroy(instance)
|
||||
|
||||
|
||||
return self.create_response(
|
||||
status_code=status.HTTP_204_NO_CONTENT,
|
||||
message="Resource deleted successfully"
|
||||
message="Resource deleted successfully",
|
||||
)
|
||||
|
||||
|
||||
def perform_destroy(self, instance):
|
||||
"""
|
||||
Override this method to implement object deletion logic.
|
||||
Should use service layer methods.
|
||||
"""
|
||||
raise NotImplementedError("Subclasses must implement perform_destroy")
|
||||
|
||||
|
||||
def get_object(self):
|
||||
"""
|
||||
Override this method to use selector patterns.
|
||||
Should call selector functions for optimized queries.
|
||||
"""
|
||||
raise NotImplementedError("Subclasses must implement get_object using selectors")
|
||||
raise NotImplementedError(
|
||||
"Subclasses must implement get_object using selectors"
|
||||
)
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
from django.apps import AppConfig
|
||||
|
||||
|
||||
class CoreConfig(AppConfig):
|
||||
default_auto_field = 'django.db.models.BigAutoField'
|
||||
name = 'core'
|
||||
default_auto_field = "django.db.models.BigAutoField"
|
||||
name = "core"
|
||||
|
||||
@@ -6,102 +6,127 @@ import hashlib
|
||||
import json
|
||||
import time
|
||||
from functools import wraps
|
||||
from typing import Optional, List, Callable, Any
|
||||
from django.core.cache import cache
|
||||
from django.http import JsonResponse
|
||||
from typing import Optional, List, Callable
|
||||
from django.utils.decorators import method_decorator
|
||||
from django.views.decorators.cache import cache_control, never_cache
|
||||
from django.views.decorators.vary import vary_on_headers
|
||||
from rest_framework.response import Response
|
||||
from core.services.enhanced_cache_service import EnhancedCacheService
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def cache_api_response(timeout=1800, vary_on=None, key_prefix='api', cache_backend='api'):
|
||||
def cache_api_response(
|
||||
timeout=1800, vary_on=None, key_prefix="api", cache_backend="api"
|
||||
):
|
||||
"""
|
||||
Advanced decorator for caching API responses with flexible configuration
|
||||
|
||||
|
||||
Args:
|
||||
timeout: Cache timeout in seconds
|
||||
vary_on: List of request attributes to vary cache on
|
||||
key_prefix: Prefix for cache keys
|
||||
cache_backend: Cache backend to use
|
||||
"""
|
||||
|
||||
def decorator(view_func):
|
||||
@wraps(view_func)
|
||||
def wrapper(self, request, *args, **kwargs):
|
||||
# Only cache GET requests
|
||||
if request.method != 'GET':
|
||||
if request.method != "GET":
|
||||
return view_func(self, request, *args, **kwargs)
|
||||
|
||||
|
||||
# Generate cache key based on view, user, and parameters
|
||||
cache_key_parts = [
|
||||
key_prefix,
|
||||
view_func.__name__,
|
||||
str(request.user.id) if request.user.is_authenticated else 'anonymous',
|
||||
(
|
||||
str(request.user.id)
|
||||
if request.user.is_authenticated
|
||||
else "anonymous"
|
||||
),
|
||||
str(hash(frozenset(request.GET.items()))),
|
||||
]
|
||||
|
||||
|
||||
# Add URL parameters to cache key
|
||||
if args:
|
||||
cache_key_parts.append(str(hash(args)))
|
||||
if kwargs:
|
||||
cache_key_parts.append(str(hash(frozenset(kwargs.items()))))
|
||||
|
||||
|
||||
# Add custom vary_on fields
|
||||
if vary_on:
|
||||
for field in vary_on:
|
||||
value = getattr(request, field, '')
|
||||
value = getattr(request, field, "")
|
||||
cache_key_parts.append(str(value))
|
||||
|
||||
cache_key = ':'.join(cache_key_parts)
|
||||
|
||||
|
||||
cache_key = ":".join(cache_key_parts)
|
||||
|
||||
# Try to get from cache
|
||||
cache_service = EnhancedCacheService()
|
||||
cached_response = getattr(cache_service, cache_backend + '_cache').get(cache_key)
|
||||
|
||||
cached_response = getattr(cache_service, cache_backend + "_cache").get(
|
||||
cache_key
|
||||
)
|
||||
|
||||
if cached_response:
|
||||
logger.debug(f"Cache hit for API view {view_func.__name__}", extra={
|
||||
'cache_key': cache_key,
|
||||
'view': view_func.__name__,
|
||||
'cache_hit': True
|
||||
})
|
||||
logger.debug(
|
||||
f"Cache hit for API view {view_func.__name__}",
|
||||
extra={
|
||||
"cache_key": cache_key,
|
||||
"view": view_func.__name__,
|
||||
"cache_hit": True,
|
||||
},
|
||||
)
|
||||
return cached_response
|
||||
|
||||
|
||||
# Execute view and cache result
|
||||
start_time = time.time()
|
||||
response = view_func(self, request, *args, **kwargs)
|
||||
execution_time = time.time() - start_time
|
||||
|
||||
|
||||
# Only cache successful responses
|
||||
if hasattr(response, 'status_code') and response.status_code == 200:
|
||||
getattr(cache_service, cache_backend + '_cache').set(cache_key, response, timeout)
|
||||
logger.debug(f"Cached API response for view {view_func.__name__}", extra={
|
||||
'cache_key': cache_key,
|
||||
'view': view_func.__name__,
|
||||
'execution_time': execution_time,
|
||||
'cache_timeout': timeout,
|
||||
'cache_miss': True
|
||||
})
|
||||
if hasattr(response, "status_code") and response.status_code == 200:
|
||||
getattr(cache_service, cache_backend + "_cache").set(
|
||||
cache_key, response, timeout
|
||||
)
|
||||
logger.debug(
|
||||
f"Cached API response for view {view_func.__name__}",
|
||||
extra={
|
||||
"cache_key": cache_key,
|
||||
"view": view_func.__name__,
|
||||
"execution_time": execution_time,
|
||||
"cache_timeout": timeout,
|
||||
"cache_miss": True,
|
||||
},
|
||||
)
|
||||
else:
|
||||
logger.debug(f"Not caching response for view {view_func.__name__} (status: {getattr(response, 'status_code', 'unknown')})")
|
||||
|
||||
logger.debug(
|
||||
f"Not caching response for view {
|
||||
view_func.__name__} (status: {
|
||||
getattr(
|
||||
response,
|
||||
'status_code',
|
||||
'unknown')})"
|
||||
)
|
||||
|
||||
return response
|
||||
|
||||
return wrapper
|
||||
|
||||
return decorator
|
||||
|
||||
|
||||
def cache_queryset_result(cache_key_template: str, timeout: int = 3600, cache_backend='default'):
|
||||
def cache_queryset_result(
|
||||
cache_key_template: str, timeout: int = 3600, cache_backend="default"
|
||||
):
|
||||
"""
|
||||
Decorator for caching expensive queryset operations
|
||||
|
||||
|
||||
Args:
|
||||
cache_key_template: Template for cache key (can use format placeholders)
|
||||
timeout: Cache timeout in seconds
|
||||
cache_backend: Cache backend to use
|
||||
"""
|
||||
|
||||
def decorator(func):
|
||||
@wraps(func)
|
||||
def wrapper(*args, **kwargs):
|
||||
@@ -110,147 +135,171 @@ def cache_queryset_result(cache_key_template: str, timeout: int = 3600, cache_ba
|
||||
cache_key = cache_key_template.format(*args, **kwargs)
|
||||
except (KeyError, IndexError):
|
||||
# Fallback to simpler key generation
|
||||
cache_key = f"{cache_key_template}:{hash(str(args) + str(kwargs))}"
|
||||
|
||||
cache_key = f"{cache_key_template}:{
|
||||
hash(
|
||||
str(args) +
|
||||
str(kwargs))}"
|
||||
|
||||
cache_service = EnhancedCacheService()
|
||||
cached_result = getattr(cache_service, cache_backend + '_cache').get(cache_key)
|
||||
|
||||
cached_result = getattr(cache_service, cache_backend + "_cache").get(
|
||||
cache_key
|
||||
)
|
||||
|
||||
if cached_result is not None:
|
||||
logger.debug(f"Cache hit for queryset operation: {func.__name__}")
|
||||
logger.debug(
|
||||
f"Cache hit for queryset operation: {
|
||||
func.__name__}"
|
||||
)
|
||||
return cached_result
|
||||
|
||||
|
||||
# Execute function and cache result
|
||||
start_time = time.time()
|
||||
result = func(*args, **kwargs)
|
||||
execution_time = time.time() - start_time
|
||||
|
||||
getattr(cache_service, cache_backend + '_cache').set(cache_key, result, timeout)
|
||||
logger.debug(f"Cached queryset result for {func.__name__}", extra={
|
||||
'cache_key': cache_key,
|
||||
'function': func.__name__,
|
||||
'execution_time': execution_time,
|
||||
'cache_timeout': timeout
|
||||
})
|
||||
|
||||
|
||||
getattr(cache_service, cache_backend + "_cache").set(
|
||||
cache_key, result, timeout
|
||||
)
|
||||
logger.debug(
|
||||
f"Cached queryset result for {func.__name__}",
|
||||
extra={
|
||||
"cache_key": cache_key,
|
||||
"function": func.__name__,
|
||||
"execution_time": execution_time,
|
||||
"cache_timeout": timeout,
|
||||
},
|
||||
)
|
||||
|
||||
return result
|
||||
|
||||
return wrapper
|
||||
|
||||
return decorator
|
||||
|
||||
|
||||
def invalidate_cache_on_save(model_name: str, cache_patterns: List[str] = None):
|
||||
"""
|
||||
Decorator to invalidate cache when model instances are saved
|
||||
|
||||
|
||||
Args:
|
||||
model_name: Name of the model
|
||||
cache_patterns: List of cache key patterns to invalidate
|
||||
"""
|
||||
|
||||
def decorator(func):
|
||||
@wraps(func)
|
||||
def wrapper(self, *args, **kwargs):
|
||||
result = func(self, *args, **kwargs)
|
||||
|
||||
|
||||
# Invalidate related cache entries
|
||||
cache_service = EnhancedCacheService()
|
||||
|
||||
|
||||
# Standard model cache invalidation
|
||||
instance_id = getattr(self, 'id', None)
|
||||
instance_id = getattr(self, "id", None)
|
||||
cache_service.invalidate_model_cache(model_name, instance_id)
|
||||
|
||||
|
||||
# Custom pattern invalidation
|
||||
if cache_patterns:
|
||||
for pattern in cache_patterns:
|
||||
if instance_id:
|
||||
pattern = pattern.format(model=model_name, id=instance_id)
|
||||
cache_service.invalidate_pattern(pattern)
|
||||
|
||||
logger.info(f"Invalidated cache for {model_name} after save", extra={
|
||||
'model': model_name,
|
||||
'instance_id': instance_id,
|
||||
'patterns': cache_patterns
|
||||
})
|
||||
|
||||
|
||||
logger.info(
|
||||
f"Invalidated cache for {model_name} after save",
|
||||
extra={
|
||||
"model": model_name,
|
||||
"instance_id": instance_id,
|
||||
"patterns": cache_patterns,
|
||||
},
|
||||
)
|
||||
|
||||
return result
|
||||
|
||||
return wrapper
|
||||
|
||||
return decorator
|
||||
|
||||
|
||||
class CachedAPIViewMixin:
|
||||
"""Mixin to add caching capabilities to API views"""
|
||||
|
||||
|
||||
cache_timeout = 1800 # 30 minutes default
|
||||
cache_vary_on = ['version']
|
||||
cache_key_prefix = 'api'
|
||||
cache_backend = 'api'
|
||||
|
||||
@method_decorator(vary_on_headers('User-Agent', 'Accept-Language'))
|
||||
cache_vary_on = ["version"]
|
||||
cache_key_prefix = "api"
|
||||
cache_backend = "api"
|
||||
|
||||
@method_decorator(vary_on_headers("User-Agent", "Accept-Language"))
|
||||
def dispatch(self, request, *args, **kwargs):
|
||||
"""Add caching to the dispatch method"""
|
||||
if request.method == 'GET' and getattr(self, 'enable_caching', True):
|
||||
if request.method == "GET" and getattr(self, "enable_caching", True):
|
||||
return self._cached_dispatch(request, *args, **kwargs)
|
||||
return super().dispatch(request, *args, **kwargs)
|
||||
|
||||
|
||||
def _cached_dispatch(self, request, *args, **kwargs):
|
||||
"""Handle cached dispatch for GET requests"""
|
||||
cache_key = self._generate_cache_key(request, *args, **kwargs)
|
||||
|
||||
|
||||
cache_service = EnhancedCacheService()
|
||||
cached_response = getattr(cache_service, self.cache_backend + '_cache').get(cache_key)
|
||||
|
||||
cached_response = getattr(cache_service, self.cache_backend + "_cache").get(
|
||||
cache_key
|
||||
)
|
||||
|
||||
if cached_response:
|
||||
logger.debug(f"Cache hit for view {self.__class__.__name__}")
|
||||
return cached_response
|
||||
|
||||
|
||||
# Execute view
|
||||
response = super().dispatch(request, *args, **kwargs)
|
||||
|
||||
|
||||
# Cache successful responses
|
||||
if hasattr(response, 'status_code') and response.status_code == 200:
|
||||
getattr(cache_service, self.cache_backend + '_cache').set(
|
||||
if hasattr(response, "status_code") and response.status_code == 200:
|
||||
getattr(cache_service, self.cache_backend + "_cache").set(
|
||||
cache_key, response, self.cache_timeout
|
||||
)
|
||||
logger.debug(f"Cached response for view {self.__class__.__name__}")
|
||||
|
||||
|
||||
return response
|
||||
|
||||
|
||||
def _generate_cache_key(self, request, *args, **kwargs):
|
||||
"""Generate cache key for the request"""
|
||||
key_parts = [
|
||||
self.cache_key_prefix,
|
||||
self.__class__.__name__,
|
||||
request.method,
|
||||
str(request.user.id) if request.user.is_authenticated else 'anonymous',
|
||||
(str(request.user.id) if request.user.is_authenticated else "anonymous"),
|
||||
str(hash(frozenset(request.GET.items()))),
|
||||
]
|
||||
|
||||
|
||||
if args:
|
||||
key_parts.append(str(hash(args)))
|
||||
if kwargs:
|
||||
key_parts.append(str(hash(frozenset(kwargs.items()))))
|
||||
|
||||
|
||||
# Add vary_on fields
|
||||
for field in self.cache_vary_on:
|
||||
value = getattr(request, field, '')
|
||||
value = getattr(request, field, "")
|
||||
key_parts.append(str(value))
|
||||
|
||||
return ':'.join(key_parts)
|
||||
|
||||
return ":".join(key_parts)
|
||||
|
||||
|
||||
def smart_cache(
|
||||
timeout: int = 3600,
|
||||
key_func: Optional[Callable] = None,
|
||||
invalidate_on: Optional[List[str]] = None,
|
||||
cache_backend: str = 'default'
|
||||
cache_backend: str = "default",
|
||||
):
|
||||
"""
|
||||
Smart caching decorator that adapts to function arguments
|
||||
|
||||
|
||||
Args:
|
||||
timeout: Cache timeout in seconds
|
||||
key_func: Custom function to generate cache key
|
||||
invalidate_on: List of signals to invalidate cache on
|
||||
cache_backend: Cache backend to use
|
||||
"""
|
||||
|
||||
def decorator(func):
|
||||
@wraps(func)
|
||||
def wrapper(*args, **kwargs):
|
||||
@@ -260,79 +309,96 @@ def smart_cache(
|
||||
else:
|
||||
# Default key generation
|
||||
key_data = {
|
||||
'func': f"{func.__module__}.{func.__name__}",
|
||||
'args': str(args),
|
||||
'kwargs': json.dumps(kwargs, sort_keys=True, default=str)
|
||||
"func": f"{func.__module__}.{func.__name__}",
|
||||
"args": str(args),
|
||||
"kwargs": json.dumps(kwargs, sort_keys=True, default=str),
|
||||
}
|
||||
key_string = json.dumps(key_data, sort_keys=True)
|
||||
cache_key = f"smart_cache:{hashlib.md5(key_string.encode()).hexdigest()}"
|
||||
|
||||
cache_key = f"smart_cache:{
|
||||
hashlib.md5(
|
||||
key_string.encode()).hexdigest()}"
|
||||
|
||||
# Try to get from cache
|
||||
cache_service = EnhancedCacheService()
|
||||
cached_result = getattr(cache_service, cache_backend + '_cache').get(cache_key)
|
||||
|
||||
cached_result = getattr(cache_service, cache_backend + "_cache").get(
|
||||
cache_key
|
||||
)
|
||||
|
||||
if cached_result is not None:
|
||||
logger.debug(f"Smart cache hit for {func.__name__}")
|
||||
return cached_result
|
||||
|
||||
|
||||
# Execute function
|
||||
start_time = time.time()
|
||||
result = func(*args, **kwargs)
|
||||
execution_time = time.time() - start_time
|
||||
|
||||
|
||||
# Cache result
|
||||
getattr(cache_service, cache_backend + '_cache').set(cache_key, result, timeout)
|
||||
|
||||
logger.debug(f"Smart cached result for {func.__name__}", extra={
|
||||
'cache_key': cache_key,
|
||||
'execution_time': execution_time,
|
||||
'function': func.__name__
|
||||
})
|
||||
|
||||
getattr(cache_service, cache_backend + "_cache").set(
|
||||
cache_key, result, timeout
|
||||
)
|
||||
|
||||
logger.debug(
|
||||
f"Smart cached result for {func.__name__}",
|
||||
extra={
|
||||
"cache_key": cache_key,
|
||||
"execution_time": execution_time,
|
||||
"function": func.__name__,
|
||||
},
|
||||
)
|
||||
|
||||
return result
|
||||
|
||||
|
||||
# Add cache invalidation if specified
|
||||
if invalidate_on:
|
||||
wrapper._cache_invalidate_on = invalidate_on
|
||||
wrapper._cache_backend = cache_backend
|
||||
|
||||
|
||||
return wrapper
|
||||
|
||||
return decorator
|
||||
|
||||
|
||||
def conditional_cache(condition_func: Callable, **cache_kwargs):
|
||||
"""
|
||||
Cache decorator that only caches when condition is met
|
||||
|
||||
|
||||
Args:
|
||||
condition_func: Function that returns True if caching should be applied
|
||||
**cache_kwargs: Arguments passed to smart_cache
|
||||
"""
|
||||
|
||||
def decorator(func):
|
||||
cached_func = smart_cache(**cache_kwargs)(func)
|
||||
|
||||
|
||||
@wraps(func)
|
||||
def wrapper(*args, **kwargs):
|
||||
if condition_func(*args, **kwargs):
|
||||
return cached_func(*args, **kwargs)
|
||||
else:
|
||||
return func(*args, **kwargs)
|
||||
|
||||
return wrapper
|
||||
|
||||
return decorator
|
||||
|
||||
|
||||
# Utility functions for cache key generation
|
||||
def generate_user_cache_key(user, suffix: str = ''):
|
||||
def generate_user_cache_key(user, suffix: str = ""):
|
||||
"""Generate cache key based on user"""
|
||||
user_id = user.id if user.is_authenticated else 'anonymous'
|
||||
user_id = user.id if user.is_authenticated else "anonymous"
|
||||
return f"user:{user_id}:{suffix}" if suffix else f"user:{user_id}"
|
||||
|
||||
|
||||
def generate_model_cache_key(model_instance, suffix: str = ''):
|
||||
def generate_model_cache_key(model_instance, suffix: str = ""):
|
||||
"""Generate cache key based on model instance"""
|
||||
model_name = model_instance._meta.model_name
|
||||
instance_id = model_instance.id
|
||||
return f"{model_name}:{instance_id}:{suffix}" if suffix else f"{model_name}:{instance_id}"
|
||||
return (
|
||||
f"{model_name}:{instance_id}:{suffix}"
|
||||
if suffix
|
||||
else f"{model_name}:{instance_id}"
|
||||
)
|
||||
|
||||
|
||||
def generate_queryset_cache_key(queryset, params: dict = None):
|
||||
|
||||
@@ -8,34 +8,34 @@ from typing import Optional, Dict, Any
|
||||
|
||||
class ThrillWikiException(Exception):
|
||||
"""Base exception for all ThrillWiki-specific errors."""
|
||||
|
||||
|
||||
default_message = "An error occurred"
|
||||
error_code = "THRILLWIKI_ERROR"
|
||||
status_code = 500
|
||||
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
message: Optional[str] = None,
|
||||
self,
|
||||
message: Optional[str] = None,
|
||||
error_code: Optional[str] = None,
|
||||
details: Optional[Dict[str, Any]] = None
|
||||
details: Optional[Dict[str, Any]] = None,
|
||||
):
|
||||
self.message = message or self.default_message
|
||||
self.error_code = error_code or self.error_code
|
||||
self.details = details or {}
|
||||
super().__init__(self.message)
|
||||
|
||||
|
||||
def to_dict(self) -> Dict[str, Any]:
|
||||
"""Convert exception to dictionary for API responses."""
|
||||
return {
|
||||
'error_code': self.error_code,
|
||||
'message': self.message,
|
||||
'details': self.details
|
||||
"error_code": self.error_code,
|
||||
"message": self.message,
|
||||
"details": self.details,
|
||||
}
|
||||
|
||||
|
||||
class ValidationException(ThrillWikiException):
|
||||
"""Raised when data validation fails."""
|
||||
|
||||
|
||||
default_message = "Validation failed"
|
||||
error_code = "VALIDATION_ERROR"
|
||||
status_code = 400
|
||||
@@ -43,7 +43,7 @@ class ValidationException(ThrillWikiException):
|
||||
|
||||
class NotFoundError(ThrillWikiException):
|
||||
"""Raised when a requested resource is not found."""
|
||||
|
||||
|
||||
default_message = "Resource not found"
|
||||
error_code = "NOT_FOUND"
|
||||
status_code = 404
|
||||
@@ -51,7 +51,7 @@ class NotFoundError(ThrillWikiException):
|
||||
|
||||
class PermissionDeniedError(ThrillWikiException):
|
||||
"""Raised when user lacks permission for an operation."""
|
||||
|
||||
|
||||
default_message = "Permission denied"
|
||||
error_code = "PERMISSION_DENIED"
|
||||
status_code = 403
|
||||
@@ -59,7 +59,7 @@ class PermissionDeniedError(ThrillWikiException):
|
||||
|
||||
class BusinessLogicError(ThrillWikiException):
|
||||
"""Raised when business logic constraints are violated."""
|
||||
|
||||
|
||||
default_message = "Business logic violation"
|
||||
error_code = "BUSINESS_LOGIC_ERROR"
|
||||
status_code = 400
|
||||
@@ -67,7 +67,7 @@ class BusinessLogicError(ThrillWikiException):
|
||||
|
||||
class ExternalServiceError(ThrillWikiException):
|
||||
"""Raised when external service calls fail."""
|
||||
|
||||
|
||||
default_message = "External service error"
|
||||
error_code = "EXTERNAL_SERVICE_ERROR"
|
||||
status_code = 502
|
||||
@@ -75,127 +75,138 @@ class ExternalServiceError(ThrillWikiException):
|
||||
|
||||
# Domain-specific exceptions
|
||||
|
||||
|
||||
class ParkError(ThrillWikiException):
|
||||
"""Base exception for park-related errors."""
|
||||
|
||||
error_code = "PARK_ERROR"
|
||||
|
||||
|
||||
class ParkNotFoundError(NotFoundError):
|
||||
"""Raised when a park is not found."""
|
||||
|
||||
|
||||
default_message = "Park not found"
|
||||
error_code = "PARK_NOT_FOUND"
|
||||
|
||||
|
||||
def __init__(self, park_slug: Optional[str] = None, **kwargs):
|
||||
if park_slug:
|
||||
kwargs['details'] = {'park_slug': park_slug}
|
||||
kwargs['message'] = f"Park with slug '{park_slug}' not found"
|
||||
kwargs["details"] = {"park_slug": park_slug}
|
||||
kwargs["message"] = f"Park with slug '{park_slug}' not found"
|
||||
super().__init__(**kwargs)
|
||||
|
||||
|
||||
class ParkOperationError(BusinessLogicError):
|
||||
"""Raised when park operation constraints are violated."""
|
||||
|
||||
|
||||
default_message = "Invalid park operation"
|
||||
error_code = "PARK_OPERATION_ERROR"
|
||||
|
||||
|
||||
class RideError(ThrillWikiException):
|
||||
"""Base exception for ride-related errors."""
|
||||
|
||||
error_code = "RIDE_ERROR"
|
||||
|
||||
|
||||
class RideNotFoundError(NotFoundError):
|
||||
"""Raised when a ride is not found."""
|
||||
|
||||
|
||||
default_message = "Ride not found"
|
||||
error_code = "RIDE_NOT_FOUND"
|
||||
|
||||
|
||||
def __init__(self, ride_slug: Optional[str] = None, **kwargs):
|
||||
if ride_slug:
|
||||
kwargs['details'] = {'ride_slug': ride_slug}
|
||||
kwargs['message'] = f"Ride with slug '{ride_slug}' not found"
|
||||
kwargs["details"] = {"ride_slug": ride_slug}
|
||||
kwargs["message"] = f"Ride with slug '{ride_slug}' not found"
|
||||
super().__init__(**kwargs)
|
||||
|
||||
|
||||
class RideOperationError(BusinessLogicError):
|
||||
"""Raised when ride operation constraints are violated."""
|
||||
|
||||
|
||||
default_message = "Invalid ride operation"
|
||||
error_code = "RIDE_OPERATION_ERROR"
|
||||
|
||||
|
||||
class LocationError(ThrillWikiException):
|
||||
"""Base exception for location-related errors."""
|
||||
|
||||
error_code = "LOCATION_ERROR"
|
||||
|
||||
|
||||
class InvalidCoordinatesError(ValidationException):
|
||||
"""Raised when geographic coordinates are invalid."""
|
||||
|
||||
|
||||
default_message = "Invalid geographic coordinates"
|
||||
error_code = "INVALID_COORDINATES"
|
||||
|
||||
def __init__(self, latitude: Optional[float] = None, longitude: Optional[float] = None, **kwargs):
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
latitude: Optional[float] = None,
|
||||
longitude: Optional[float] = None,
|
||||
**kwargs,
|
||||
):
|
||||
if latitude is not None or longitude is not None:
|
||||
kwargs['details'] = {'latitude': latitude, 'longitude': longitude}
|
||||
kwargs["details"] = {"latitude": latitude, "longitude": longitude}
|
||||
super().__init__(**kwargs)
|
||||
|
||||
|
||||
class GeolocationError(ExternalServiceError):
|
||||
"""Raised when geolocation services fail."""
|
||||
|
||||
|
||||
default_message = "Geolocation service unavailable"
|
||||
error_code = "GEOLOCATION_ERROR"
|
||||
|
||||
|
||||
class ReviewError(ThrillWikiException):
|
||||
"""Base exception for review-related errors."""
|
||||
|
||||
error_code = "REVIEW_ERROR"
|
||||
|
||||
|
||||
class ReviewModerationError(BusinessLogicError):
|
||||
"""Raised when review moderation constraints are violated."""
|
||||
|
||||
|
||||
default_message = "Review moderation error"
|
||||
error_code = "REVIEW_MODERATION_ERROR"
|
||||
|
||||
|
||||
class DuplicateReviewError(BusinessLogicError):
|
||||
"""Raised when user tries to create duplicate reviews."""
|
||||
|
||||
|
||||
default_message = "User has already reviewed this item"
|
||||
error_code = "DUPLICATE_REVIEW"
|
||||
|
||||
|
||||
class AccountError(ThrillWikiException):
|
||||
"""Base exception for account-related errors."""
|
||||
|
||||
error_code = "ACCOUNT_ERROR"
|
||||
|
||||
|
||||
class InsufficientPermissionsError(PermissionDeniedError):
|
||||
"""Raised when user lacks required permissions."""
|
||||
|
||||
|
||||
default_message = "Insufficient permissions"
|
||||
error_code = "INSUFFICIENT_PERMISSIONS"
|
||||
|
||||
|
||||
def __init__(self, required_permission: Optional[str] = None, **kwargs):
|
||||
if required_permission:
|
||||
kwargs['details'] = {'required_permission': required_permission}
|
||||
kwargs['message'] = f"Permission '{required_permission}' required"
|
||||
kwargs["details"] = {"required_permission": required_permission}
|
||||
kwargs["message"] = f"Permission '{required_permission}' required"
|
||||
super().__init__(**kwargs)
|
||||
|
||||
|
||||
class EmailError(ExternalServiceError):
|
||||
"""Raised when email operations fail."""
|
||||
|
||||
|
||||
default_message = "Email service error"
|
||||
error_code = "EMAIL_ERROR"
|
||||
|
||||
|
||||
class CacheError(ThrillWikiException):
|
||||
"""Raised when cache operations fail."""
|
||||
|
||||
|
||||
default_message = "Cache operation failed"
|
||||
error_code = "CACHE_ERROR"
|
||||
status_code = 500
|
||||
@@ -203,11 +214,11 @@ class CacheError(ThrillWikiException):
|
||||
|
||||
class RoadTripError(ExternalServiceError):
|
||||
"""Raised when road trip planning fails."""
|
||||
|
||||
|
||||
default_message = "Road trip planning error"
|
||||
error_code = "ROADTRIP_ERROR"
|
||||
|
||||
|
||||
def __init__(self, service_name: Optional[str] = None, **kwargs):
|
||||
if service_name:
|
||||
kwargs['details'] = {'service': service_name}
|
||||
kwargs["details"] = {"service": service_name}
|
||||
super().__init__(**kwargs)
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
"""Core forms and form components."""
|
||||
|
||||
from django.conf import settings
|
||||
from django.core.exceptions import PermissionDenied
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
@@ -8,20 +9,23 @@ from autocomplete import Autocomplete
|
||||
|
||||
class BaseAutocomplete(Autocomplete):
|
||||
"""Base autocomplete class for consistent autocomplete behavior across the project.
|
||||
|
||||
|
||||
This class extends django-htmx-autocomplete's base Autocomplete class to provide:
|
||||
- Project-wide defaults for autocomplete behavior
|
||||
- Translation strings
|
||||
- Authentication enforcement
|
||||
- Sensible search configuration
|
||||
"""
|
||||
|
||||
# Search configuration
|
||||
minimum_search_length = 2 # More responsive than default 3
|
||||
max_results = 10 # Reasonable limit for performance
|
||||
|
||||
# UI text configuration using gettext for i18n
|
||||
no_result_text = _("No matches found")
|
||||
narrow_search_text = _("Showing %(page_size)s of %(total)s matches. Please refine your search.")
|
||||
narrow_search_text = _(
|
||||
"Showing %(page_size)s of %(total)s matches. Please refine your search."
|
||||
)
|
||||
type_at_least_n_characters = _("Type at least %(n)s characters...")
|
||||
|
||||
# Project-wide component settings
|
||||
@@ -30,10 +34,10 @@ class BaseAutocomplete(Autocomplete):
|
||||
@staticmethod
|
||||
def auth_check(request):
|
||||
"""Enforce authentication by default.
|
||||
|
||||
|
||||
This can be overridden in subclasses if public access is needed.
|
||||
Configure AUTOCOMPLETE_BLOCK_UNAUTHENTICATED in settings to disable.
|
||||
"""
|
||||
block_unauth = getattr(settings, 'AUTOCOMPLETE_BLOCK_UNAUTHENTICATED', True)
|
||||
block_unauth = getattr(settings, "AUTOCOMPLETE_BLOCK_UNAUTHENTICATED", True)
|
||||
if block_unauth and not request.user.is_authenticated:
|
||||
raise PermissionDenied(_("Authentication required"))
|
||||
raise PermissionDenied(_("Authentication required"))
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
from .search import LocationSearchForm
|
||||
@@ -1,105 +1,168 @@
|
||||
from django import forms
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
|
||||
class LocationSearchForm(forms.Form):
|
||||
"""
|
||||
A comprehensive search form that includes text search, location-based
|
||||
search, and content type filtering for a unified search experience.
|
||||
"""
|
||||
|
||||
|
||||
# Text search query
|
||||
q = forms.CharField(
|
||||
required=False,
|
||||
label=_("Search Query"),
|
||||
widget=forms.TextInput(attrs={
|
||||
'placeholder': _("Search parks, rides, companies..."),
|
||||
'class': 'w-full px-3 py-2 border border-gray-300 rounded-md shadow-sm focus:ring-blue-500 focus:border-blue-500 dark:bg-gray-700 dark:border-gray-600 dark:text-white'
|
||||
})
|
||||
widget=forms.TextInput(
|
||||
attrs={
|
||||
"placeholder": _("Search parks, rides, companies..."),
|
||||
"class": (
|
||||
"w-full px-3 py-2 border border-gray-300 rounded-md shadow-sm "
|
||||
"focus:ring-blue-500 focus:border-blue-500 dark:bg-gray-700 "
|
||||
"dark:border-gray-600 dark:text-white"
|
||||
),
|
||||
}
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
# Location-based search
|
||||
location = forms.CharField(
|
||||
required=False,
|
||||
label=_("Near Location"),
|
||||
widget=forms.TextInput(attrs={
|
||||
'placeholder': _("City, address, or coordinates..."),
|
||||
'id': 'location-input',
|
||||
'class': 'w-full px-3 py-2 border border-gray-300 rounded-md shadow-sm focus:ring-blue-500 focus:border-blue-500 dark:bg-gray-700 dark:border-gray-600 dark:text-white'
|
||||
})
|
||||
widget=forms.TextInput(
|
||||
attrs={
|
||||
"placeholder": _("City, address, or coordinates..."),
|
||||
"id": "location-input",
|
||||
"class": (
|
||||
"w-full px-3 py-2 border border-gray-300 rounded-md shadow-sm "
|
||||
"focus:ring-blue-500 focus:border-blue-500 dark:bg-gray-700 "
|
||||
"dark:border-gray-600 dark:text-white"
|
||||
),
|
||||
}
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
# Hidden fields for coordinates
|
||||
lat = forms.FloatField(required=False, widget=forms.HiddenInput(attrs={'id': 'lat-input'}))
|
||||
lng = forms.FloatField(required=False, widget=forms.HiddenInput(attrs={'id': 'lng-input'}))
|
||||
|
||||
lat = forms.FloatField(
|
||||
required=False, widget=forms.HiddenInput(attrs={"id": "lat-input"})
|
||||
)
|
||||
lng = forms.FloatField(
|
||||
required=False, widget=forms.HiddenInput(attrs={"id": "lng-input"})
|
||||
)
|
||||
|
||||
# Search radius
|
||||
radius_km = forms.ChoiceField(
|
||||
required=False,
|
||||
label=_("Search Radius"),
|
||||
choices=[
|
||||
('', _("Any distance")),
|
||||
('5', _("5 km")),
|
||||
('10', _("10 km")),
|
||||
('25', _("25 km")),
|
||||
('50', _("50 km")),
|
||||
('100', _("100 km")),
|
||||
('200', _("200 km")),
|
||||
("", _("Any distance")),
|
||||
("5", _("5 km")),
|
||||
("10", _("10 km")),
|
||||
("25", _("25 km")),
|
||||
("50", _("50 km")),
|
||||
("100", _("100 km")),
|
||||
("200", _("200 km")),
|
||||
],
|
||||
widget=forms.Select(attrs={
|
||||
'class': 'w-full px-3 py-2 border border-gray-300 rounded-md shadow-sm focus:ring-blue-500 focus:border-blue-500 dark:bg-gray-700 dark:border-gray-600 dark:text-white'
|
||||
})
|
||||
widget=forms.Select(
|
||||
attrs={
|
||||
"class": (
|
||||
"w-full px-3 py-2 border border-gray-300 rounded-md shadow-sm "
|
||||
"focus:ring-blue-500 focus:border-blue-500 dark:bg-gray-700 "
|
||||
"dark:border-gray-600 dark:text-white"
|
||||
)
|
||||
}
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
# Content type filters
|
||||
search_parks = forms.BooleanField(
|
||||
required=False,
|
||||
initial=True,
|
||||
label=_("Search Parks"),
|
||||
widget=forms.CheckboxInput(attrs={'class': 'rounded border-gray-300 text-blue-600 focus:ring-blue-500 dark:border-gray-600 dark:bg-gray-700'})
|
||||
widget=forms.CheckboxInput(
|
||||
attrs={
|
||||
"class": (
|
||||
"rounded border-gray-300 text-blue-600 focus:ring-blue-500 "
|
||||
"dark:border-gray-600 dark:bg-gray-700"
|
||||
)
|
||||
}
|
||||
),
|
||||
)
|
||||
search_rides = forms.BooleanField(
|
||||
required=False,
|
||||
label=_("Search Rides"),
|
||||
widget=forms.CheckboxInput(attrs={'class': 'rounded border-gray-300 text-blue-600 focus:ring-blue-500 dark:border-gray-600 dark:bg-gray-700'})
|
||||
widget=forms.CheckboxInput(
|
||||
attrs={
|
||||
"class": (
|
||||
"rounded border-gray-300 text-blue-600 focus:ring-blue-500 "
|
||||
"dark:border-gray-600 dark:bg-gray-700"
|
||||
)
|
||||
}
|
||||
),
|
||||
)
|
||||
search_companies = forms.BooleanField(
|
||||
required=False,
|
||||
label=_("Search Companies"),
|
||||
widget=forms.CheckboxInput(attrs={'class': 'rounded border-gray-300 text-blue-600 focus:ring-blue-500 dark:border-gray-600 dark:bg-gray-700'})
|
||||
widget=forms.CheckboxInput(
|
||||
attrs={
|
||||
"class": (
|
||||
"rounded border-gray-300 text-blue-600 focus:ring-blue-500 "
|
||||
"dark:border-gray-600 dark:bg-gray-700"
|
||||
)
|
||||
}
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
# Geographic filters
|
||||
country = forms.CharField(
|
||||
required=False,
|
||||
widget=forms.TextInput(attrs={
|
||||
'placeholder': _("Country"),
|
||||
'class': 'w-full px-3 py-2 text-sm border border-gray-300 rounded-md shadow-sm focus:ring-blue-500 focus:border-blue-500 dark:bg-gray-700 dark:border-gray-600 dark:text-white'
|
||||
})
|
||||
widget=forms.TextInput(
|
||||
attrs={
|
||||
"placeholder": _("Country"),
|
||||
"class": (
|
||||
"w-full px-3 py-2 text-sm border border-gray-300 rounded-md "
|
||||
"shadow-sm focus:ring-blue-500 focus:border-blue-500 "
|
||||
"dark:bg-gray-700 dark:border-gray-600 dark:text-white"
|
||||
),
|
||||
}
|
||||
),
|
||||
)
|
||||
state = forms.CharField(
|
||||
required=False,
|
||||
widget=forms.TextInput(attrs={
|
||||
'placeholder': _("State/Region"),
|
||||
'class': 'w-full px-3 py-2 text-sm border border-gray-300 rounded-md shadow-sm focus:ring-blue-500 focus:border-blue-500 dark:bg-gray-700 dark:border-gray-600 dark:text-white'
|
||||
})
|
||||
widget=forms.TextInput(
|
||||
attrs={
|
||||
"placeholder": _("State/Region"),
|
||||
"class": (
|
||||
"w-full px-3 py-2 text-sm border border-gray-300 rounded-md "
|
||||
"shadow-sm focus:ring-blue-500 focus:border-blue-500 "
|
||||
"dark:bg-gray-700 dark:border-gray-600 dark:text-white"
|
||||
),
|
||||
}
|
||||
),
|
||||
)
|
||||
city = forms.CharField(
|
||||
required=False,
|
||||
widget=forms.TextInput(attrs={
|
||||
'placeholder': _("City"),
|
||||
'class': 'w-full px-3 py-2 text-sm border border-gray-300 rounded-md shadow-sm focus:ring-blue-500 focus:border-blue-500 dark:bg-gray-700 dark:border-gray-600 dark:text-white'
|
||||
})
|
||||
widget=forms.TextInput(
|
||||
attrs={
|
||||
"placeholder": _("City"),
|
||||
"class": (
|
||||
"w-full px-3 py-2 text-sm border border-gray-300 rounded-md "
|
||||
"shadow-sm focus:ring-blue-500 focus:border-blue-500 "
|
||||
"dark:bg-gray-700 dark:border-gray-600 dark:text-white"
|
||||
),
|
||||
}
|
||||
),
|
||||
)
|
||||
|
||||
def clean(self):
|
||||
cleaned_data = super().clean()
|
||||
|
||||
# If lat/lng are provided, ensure location field is populated for display
|
||||
lat = cleaned_data.get('lat')
|
||||
lng = cleaned_data.get('lng')
|
||||
location = cleaned_data.get('location')
|
||||
|
||||
|
||||
# If lat/lng are provided, ensure location field is populated for
|
||||
# display
|
||||
lat = cleaned_data.get("lat")
|
||||
lng = cleaned_data.get("lng")
|
||||
location = cleaned_data.get("location")
|
||||
|
||||
if lat and lng and not location:
|
||||
cleaned_data['location'] = f"{lat}, {lng}"
|
||||
|
||||
return cleaned_data
|
||||
cleaned_data["location"] = f"{lat}, {lng}"
|
||||
|
||||
return cleaned_data
|
||||
|
||||
@@ -7,105 +7,127 @@ import logging
|
||||
from django.core.cache import cache
|
||||
from django.db import connection
|
||||
from health_check.backends import BaseHealthCheckBackend
|
||||
from health_check.exceptions import ServiceUnavailable, ServiceReturnedUnexpectedResult
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class CacheHealthCheck(BaseHealthCheckBackend):
|
||||
"""Check Redis cache connectivity and performance"""
|
||||
|
||||
|
||||
critical_service = True
|
||||
|
||||
|
||||
def check_status(self):
|
||||
try:
|
||||
# Test cache write/read performance
|
||||
test_key = 'health_check_test'
|
||||
test_value = 'test_value_' + str(int(time.time()))
|
||||
|
||||
test_key = "health_check_test"
|
||||
test_value = "test_value_" + str(int(time.time()))
|
||||
|
||||
start_time = time.time()
|
||||
cache.set(test_key, test_value, timeout=30)
|
||||
cached_value = cache.get(test_key)
|
||||
cache_time = time.time() - start_time
|
||||
|
||||
|
||||
if cached_value != test_value:
|
||||
self.add_error("Cache read/write test failed - values don't match")
|
||||
return
|
||||
|
||||
|
||||
# Check cache performance
|
||||
if cache_time > 0.1: # Warn if cache operations take more than 100ms
|
||||
self.add_error(f"Cache performance degraded: {cache_time:.3f}s for read/write operation")
|
||||
self.add_error(
|
||||
f"Cache performance degraded: {
|
||||
cache_time:.3f}s for read/write operation"
|
||||
)
|
||||
return
|
||||
|
||||
|
||||
# Clean up test key
|
||||
cache.delete(test_key)
|
||||
|
||||
|
||||
# Additional Redis-specific checks if using django-redis
|
||||
try:
|
||||
from django_redis import get_redis_connection
|
||||
|
||||
redis_client = get_redis_connection("default")
|
||||
info = redis_client.info()
|
||||
|
||||
|
||||
# Check memory usage
|
||||
used_memory = info.get('used_memory', 0)
|
||||
max_memory = info.get('maxmemory', 0)
|
||||
|
||||
used_memory = info.get("used_memory", 0)
|
||||
max_memory = info.get("maxmemory", 0)
|
||||
|
||||
if max_memory > 0:
|
||||
memory_usage_percent = (used_memory / max_memory) * 100
|
||||
if memory_usage_percent > 90:
|
||||
self.add_error(f"Redis memory usage critical: {memory_usage_percent:.1f}%")
|
||||
self.add_error(
|
||||
f"Redis memory usage critical: {
|
||||
memory_usage_percent:.1f}%"
|
||||
)
|
||||
elif memory_usage_percent > 80:
|
||||
logger.warning(f"Redis memory usage high: {memory_usage_percent:.1f}%")
|
||||
|
||||
logger.warning(
|
||||
f"Redis memory usage high: {
|
||||
memory_usage_percent:.1f}%"
|
||||
)
|
||||
|
||||
except ImportError:
|
||||
# django-redis not available, skip additional checks
|
||||
pass
|
||||
except Exception as e:
|
||||
logger.warning(f"Could not get Redis info: {e}")
|
||||
|
||||
|
||||
except Exception as e:
|
||||
self.add_error(f"Cache service unavailable: {e}")
|
||||
|
||||
|
||||
class DatabasePerformanceCheck(BaseHealthCheckBackend):
|
||||
"""Check database performance and connectivity"""
|
||||
|
||||
|
||||
critical_service = False
|
||||
|
||||
|
||||
def check_status(self):
|
||||
try:
|
||||
start_time = time.time()
|
||||
|
||||
|
||||
# Test basic connectivity
|
||||
with connection.cursor() as cursor:
|
||||
cursor.execute("SELECT 1")
|
||||
result = cursor.fetchone()
|
||||
|
||||
|
||||
if result[0] != 1:
|
||||
self.add_error("Database connectivity test failed")
|
||||
return
|
||||
|
||||
|
||||
basic_query_time = time.time() - start_time
|
||||
|
||||
# Test a more complex query (if it takes too long, there might be performance issues)
|
||||
|
||||
# Test a more complex query (if it takes too long, there might be
|
||||
# performance issues)
|
||||
start_time = time.time()
|
||||
with connection.cursor() as cursor:
|
||||
cursor.execute("SELECT COUNT(*) FROM django_content_type")
|
||||
cursor.fetchone()
|
||||
|
||||
|
||||
complex_query_time = time.time() - start_time
|
||||
|
||||
|
||||
# Performance thresholds
|
||||
if basic_query_time > 1.0:
|
||||
self.add_error(f"Database responding slowly: basic query took {basic_query_time:.2f}s")
|
||||
self.add_error(
|
||||
f"Database responding slowly: basic query took {
|
||||
basic_query_time:.2f}s"
|
||||
)
|
||||
elif basic_query_time > 0.5:
|
||||
logger.warning(f"Database performance degraded: basic query took {basic_query_time:.2f}s")
|
||||
|
||||
logger.warning(
|
||||
f"Database performance degraded: basic query took {
|
||||
basic_query_time:.2f}s"
|
||||
)
|
||||
|
||||
if complex_query_time > 2.0:
|
||||
self.add_error(f"Database performance critical: complex query took {complex_query_time:.2f}s")
|
||||
self.add_error(
|
||||
f"Database performance critical: complex query took {
|
||||
complex_query_time:.2f}s"
|
||||
)
|
||||
elif complex_query_time > 1.0:
|
||||
logger.warning(f"Database performance slow: complex query took {complex_query_time:.2f}s")
|
||||
|
||||
logger.warning(
|
||||
f"Database performance slow: complex query took {
|
||||
complex_query_time:.2f}s"
|
||||
)
|
||||
|
||||
# Check database version and settings if possible
|
||||
try:
|
||||
with connection.cursor() as cursor:
|
||||
@@ -114,162 +136,190 @@ class DatabasePerformanceCheck(BaseHealthCheckBackend):
|
||||
logger.debug(f"Database version: {version}")
|
||||
except Exception as e:
|
||||
logger.debug(f"Could not get database version: {e}")
|
||||
|
||||
|
||||
except Exception as e:
|
||||
self.add_error(f"Database performance check failed: {e}")
|
||||
|
||||
|
||||
class ApplicationHealthCheck(BaseHealthCheckBackend):
|
||||
"""Check application-specific health indicators"""
|
||||
|
||||
|
||||
critical_service = False
|
||||
|
||||
|
||||
def check_status(self):
|
||||
try:
|
||||
# Check if we can import critical modules
|
||||
critical_modules = [
|
||||
'parks.models',
|
||||
'rides.models',
|
||||
'accounts.models',
|
||||
'core.services',
|
||||
"parks.models",
|
||||
"rides.models",
|
||||
"accounts.models",
|
||||
"core.services",
|
||||
]
|
||||
|
||||
|
||||
for module_name in critical_modules:
|
||||
try:
|
||||
__import__(module_name)
|
||||
except ImportError as e:
|
||||
self.add_error(f"Critical module import failed: {module_name} - {e}")
|
||||
|
||||
self.add_error(
|
||||
f"Critical module import failed: {module_name} - {e}"
|
||||
)
|
||||
|
||||
# Check if we can access critical models
|
||||
try:
|
||||
from parks.models import Park
|
||||
from rides.models import Ride
|
||||
from django.contrib.auth import get_user_model
|
||||
|
||||
|
||||
User = get_user_model()
|
||||
|
||||
# Test that we can query these models (just count, don't load data)
|
||||
|
||||
# Test that we can query these models (just count, don't load
|
||||
# data)
|
||||
park_count = Park.objects.count()
|
||||
ride_count = Ride.objects.count()
|
||||
user_count = User.objects.count()
|
||||
|
||||
logger.debug(f"Model counts - Parks: {park_count}, Rides: {ride_count}, Users: {user_count}")
|
||||
|
||||
|
||||
logger.debug(
|
||||
f"Model counts - Parks: {park_count}, Rides: {ride_count}, Users: {user_count}"
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
self.add_error(f"Model access check failed: {e}")
|
||||
|
||||
|
||||
# Check media and static file configuration
|
||||
from django.conf import settings
|
||||
import os
|
||||
|
||||
|
||||
if not os.path.exists(settings.MEDIA_ROOT):
|
||||
self.add_error(f"Media directory does not exist: {settings.MEDIA_ROOT}")
|
||||
|
||||
self.add_error(
|
||||
f"Media directory does not exist: {
|
||||
settings.MEDIA_ROOT}"
|
||||
)
|
||||
|
||||
if not os.path.exists(settings.STATIC_ROOT) and not settings.DEBUG:
|
||||
self.add_error(f"Static directory does not exist: {settings.STATIC_ROOT}")
|
||||
|
||||
self.add_error(
|
||||
f"Static directory does not exist: {settings.STATIC_ROOT}"
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
self.add_error(f"Application health check failed: {e}")
|
||||
|
||||
|
||||
class ExternalServiceHealthCheck(BaseHealthCheckBackend):
|
||||
"""Check external services and dependencies"""
|
||||
|
||||
|
||||
critical_service = False
|
||||
|
||||
|
||||
def check_status(self):
|
||||
# Check email service if configured
|
||||
try:
|
||||
from django.core.mail import get_connection
|
||||
from django.conf import settings
|
||||
|
||||
if hasattr(settings, 'EMAIL_BACKEND') and 'console' not in settings.EMAIL_BACKEND:
|
||||
|
||||
if (
|
||||
hasattr(settings, "EMAIL_BACKEND")
|
||||
and "console" not in settings.EMAIL_BACKEND
|
||||
):
|
||||
# Only check if not using console backend
|
||||
connection = get_connection()
|
||||
if hasattr(connection, 'open'):
|
||||
if hasattr(connection, "open"):
|
||||
try:
|
||||
connection.open()
|
||||
connection.close()
|
||||
except Exception as e:
|
||||
logger.warning(f"Email service check failed: {e}")
|
||||
# Don't fail the health check for email issues in development
|
||||
|
||||
# Don't fail the health check for email issues in
|
||||
# development
|
||||
|
||||
except Exception as e:
|
||||
logger.debug(f"Email service check error: {e}")
|
||||
|
||||
|
||||
# Check if Sentry is configured and working
|
||||
try:
|
||||
import sentry_sdk
|
||||
|
||||
|
||||
if sentry_sdk.Hub.current.client:
|
||||
# Sentry is configured
|
||||
try:
|
||||
# Test that we can capture a test message (this won't actually send to Sentry)
|
||||
# Test that we can capture a test message (this won't
|
||||
# actually send to Sentry)
|
||||
with sentry_sdk.push_scope() as scope:
|
||||
scope.set_tag("health_check", True)
|
||||
# Don't actually send a message, just verify the SDK is working
|
||||
# Don't actually send a message, just verify the SDK is
|
||||
# working
|
||||
logger.debug("Sentry SDK is operational")
|
||||
except Exception as e:
|
||||
logger.warning(f"Sentry SDK check failed: {e}")
|
||||
|
||||
|
||||
except ImportError:
|
||||
logger.debug("Sentry SDK not installed")
|
||||
except Exception as e:
|
||||
logger.debug(f"Sentry check error: {e}")
|
||||
|
||||
|
||||
# Check Redis connection if configured
|
||||
try:
|
||||
from django.core.cache import caches
|
||||
from django.conf import settings
|
||||
|
||||
cache_config = settings.CACHES.get('default', {})
|
||||
if 'redis' in cache_config.get('BACKEND', '').lower():
|
||||
|
||||
cache_config = settings.CACHES.get("default", {})
|
||||
if "redis" in cache_config.get("BACKEND", "").lower():
|
||||
# Redis is configured, test basic connectivity
|
||||
redis_cache = caches['default']
|
||||
redis_cache.set('health_check_redis', 'test', 10)
|
||||
value = redis_cache.get('health_check_redis')
|
||||
if value != 'test':
|
||||
redis_cache = caches["default"]
|
||||
redis_cache.set("health_check_redis", "test", 10)
|
||||
value = redis_cache.get("health_check_redis")
|
||||
if value != "test":
|
||||
self.add_error("Redis cache connectivity test failed")
|
||||
else:
|
||||
redis_cache.delete('health_check_redis')
|
||||
|
||||
redis_cache.delete("health_check_redis")
|
||||
|
||||
except Exception as e:
|
||||
logger.warning(f"Redis connectivity check failed: {e}")
|
||||
|
||||
|
||||
class DiskSpaceHealthCheck(BaseHealthCheckBackend):
|
||||
"""Check available disk space"""
|
||||
|
||||
|
||||
critical_service = False
|
||||
|
||||
|
||||
def check_status(self):
|
||||
try:
|
||||
import shutil
|
||||
from django.conf import settings
|
||||
|
||||
|
||||
# Check disk space for media directory
|
||||
media_usage = shutil.disk_usage(settings.MEDIA_ROOT)
|
||||
media_free_percent = (media_usage.free / media_usage.total) * 100
|
||||
|
||||
|
||||
# Check disk space for logs directory if it exists
|
||||
logs_dir = getattr(settings, 'BASE_DIR', '/tmp') / 'logs'
|
||||
logs_dir = getattr(settings, "BASE_DIR", "/tmp") / "logs"
|
||||
if logs_dir.exists():
|
||||
logs_usage = shutil.disk_usage(logs_dir)
|
||||
logs_free_percent = (logs_usage.free / logs_usage.total) * 100
|
||||
else:
|
||||
logs_free_percent = media_free_percent # Use same as media
|
||||
|
||||
|
||||
# Alert thresholds
|
||||
if media_free_percent < 10:
|
||||
self.add_error(f"Critical disk space: {media_free_percent:.1f}% free in media directory")
|
||||
self.add_error(
|
||||
f"Critical disk space: {
|
||||
media_free_percent:.1f}% free in media directory"
|
||||
)
|
||||
elif media_free_percent < 20:
|
||||
logger.warning(f"Low disk space: {media_free_percent:.1f}% free in media directory")
|
||||
|
||||
logger.warning(
|
||||
f"Low disk space: {
|
||||
media_free_percent:.1f}% free in media directory"
|
||||
)
|
||||
|
||||
if logs_free_percent < 10:
|
||||
self.add_error(f"Critical disk space: {logs_free_percent:.1f}% free in logs directory")
|
||||
self.add_error(
|
||||
f"Critical disk space: {
|
||||
logs_free_percent:.1f}% free in logs directory"
|
||||
)
|
||||
elif logs_free_percent < 20:
|
||||
logger.warning(f"Low disk space: {logs_free_percent:.1f}% free in logs directory")
|
||||
|
||||
logger.warning(
|
||||
f"Low disk space: {
|
||||
logs_free_percent:.1f}% free in logs directory"
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.warning(f"Disk space check failed: {e}")
|
||||
# Don't fail health check for disk space issues in development
|
||||
|
||||
@@ -5,16 +5,22 @@ from django.conf import settings
|
||||
from typing import Any, Dict, Optional
|
||||
from django.db.models import QuerySet
|
||||
|
||||
|
||||
class DiffMixin:
|
||||
"""Mixin to add diffing capabilities to models"""
|
||||
|
||||
|
||||
def get_prev_record(self) -> Optional[Any]:
|
||||
"""Get the previous record for this instance"""
|
||||
try:
|
||||
return type(self).objects.filter(
|
||||
pgh_created_at__lt=self.pgh_created_at,
|
||||
pgh_obj_id=self.pgh_obj_id
|
||||
).order_by('-pgh_created_at').first()
|
||||
return (
|
||||
type(self)
|
||||
.objects.filter(
|
||||
pgh_created_at__lt=self.pgh_created_at,
|
||||
pgh_obj_id=self.pgh_obj_id,
|
||||
)
|
||||
.order_by("-pgh_created_at")
|
||||
.first()
|
||||
)
|
||||
except (AttributeError, TypeError):
|
||||
return None
|
||||
|
||||
@@ -25,15 +31,20 @@ class DiffMixin:
|
||||
return {}
|
||||
|
||||
skip_fields = {
|
||||
'pgh_id', 'pgh_created_at', 'pgh_label',
|
||||
'pgh_obj_id', 'pgh_context_id', '_state',
|
||||
'created_at', 'updated_at'
|
||||
"pgh_id",
|
||||
"pgh_created_at",
|
||||
"pgh_label",
|
||||
"pgh_obj_id",
|
||||
"pgh_context_id",
|
||||
"_state",
|
||||
"created_at",
|
||||
"updated_at",
|
||||
}
|
||||
|
||||
changes = {}
|
||||
for field, value in self.__dict__.items():
|
||||
# Skip internal fields and those we don't want to track
|
||||
if field.startswith('_') or field in skip_fields or field.endswith('_id'):
|
||||
if field.startswith("_") or field in skip_fields or field.endswith("_id"):
|
||||
continue
|
||||
|
||||
try:
|
||||
@@ -41,16 +52,18 @@ class DiffMixin:
|
||||
new_value = value
|
||||
if old_value != new_value:
|
||||
changes[field] = {
|
||||
"old": str(old_value) if old_value is not None else "None",
|
||||
"new": str(new_value) if new_value is not None else "None"
|
||||
"old": (str(old_value) if old_value is not None else "None"),
|
||||
"new": (str(new_value) if new_value is not None else "None"),
|
||||
}
|
||||
except AttributeError:
|
||||
continue
|
||||
|
||||
return changes
|
||||
|
||||
|
||||
class TrackedModel(models.Model):
|
||||
"""Abstract base class for models that need history tracking"""
|
||||
|
||||
created_at = models.DateTimeField(auto_now_add=True)
|
||||
updated_at = models.DateTimeField(auto_now=True)
|
||||
|
||||
@@ -61,16 +74,18 @@ class TrackedModel(models.Model):
|
||||
"""Get all history records for this instance in chronological order"""
|
||||
event_model = self.events.model # pghistory provides this automatically
|
||||
if event_model:
|
||||
return event_model.objects.filter(
|
||||
pgh_obj_id=self.pk
|
||||
).order_by('-pgh_created_at')
|
||||
return event_model.objects.filter(pgh_obj_id=self.pk).order_by(
|
||||
"-pgh_created_at"
|
||||
)
|
||||
return self.__class__.objects.none()
|
||||
|
||||
|
||||
class HistoricalSlug(models.Model):
|
||||
"""Track historical slugs for models"""
|
||||
|
||||
content_type = models.ForeignKey(ContentType, on_delete=models.CASCADE)
|
||||
object_id = models.PositiveIntegerField()
|
||||
content_object = GenericForeignKey('content_type', 'object_id')
|
||||
content_object = GenericForeignKey("content_type", "object_id")
|
||||
slug = models.SlugField(max_length=255)
|
||||
created_at = models.DateTimeField(auto_now_add=True)
|
||||
user = models.ForeignKey(
|
||||
@@ -78,14 +93,14 @@ class HistoricalSlug(models.Model):
|
||||
null=True,
|
||||
blank=True,
|
||||
on_delete=models.SET_NULL,
|
||||
related_name='historical_slugs'
|
||||
related_name="historical_slugs",
|
||||
)
|
||||
|
||||
class Meta:
|
||||
unique_together = ('content_type', 'slug')
|
||||
unique_together = ("content_type", "slug")
|
||||
indexes = [
|
||||
models.Index(fields=['content_type', 'object_id']),
|
||||
models.Index(fields=['slug']),
|
||||
models.Index(fields=["content_type", "object_id"]),
|
||||
models.Index(fields=["slug"]),
|
||||
]
|
||||
|
||||
def __str__(self) -> str:
|
||||
|
||||
190
core/logging.py
190
core/logging.py
@@ -12,48 +12,52 @@ from django.utils import timezone
|
||||
|
||||
class ThrillWikiFormatter(logging.Formatter):
|
||||
"""Custom formatter for ThrillWiki logs with structured output."""
|
||||
|
||||
|
||||
def format(self, record):
|
||||
# Add timestamp if not present
|
||||
if not hasattr(record, 'timestamp'):
|
||||
if not hasattr(record, "timestamp"):
|
||||
record.timestamp = timezone.now().isoformat()
|
||||
|
||||
|
||||
# Add request context if available
|
||||
if hasattr(record, 'request'):
|
||||
record.request_id = getattr(record.request, 'id', 'unknown')
|
||||
record.user_id = getattr(record.request.user, 'id', 'anonymous') if hasattr(record.request, 'user') else 'unknown'
|
||||
record.path = getattr(record.request, 'path', 'unknown')
|
||||
record.method = getattr(record.request, 'method', 'unknown')
|
||||
|
||||
if hasattr(record, "request"):
|
||||
record.request_id = getattr(record.request, "id", "unknown")
|
||||
record.user_id = (
|
||||
getattr(record.request.user, "id", "anonymous")
|
||||
if hasattr(record.request, "user")
|
||||
else "unknown"
|
||||
)
|
||||
record.path = getattr(record.request, "path", "unknown")
|
||||
record.method = getattr(record.request, "method", "unknown")
|
||||
|
||||
# Structure the log message
|
||||
if hasattr(record, 'extra_data'):
|
||||
if hasattr(record, "extra_data"):
|
||||
record.structured_data = record.extra_data
|
||||
|
||||
|
||||
return super().format(record)
|
||||
|
||||
|
||||
def get_logger(name: str) -> logging.Logger:
|
||||
"""
|
||||
Get a configured logger for ThrillWiki components.
|
||||
|
||||
|
||||
Args:
|
||||
name: Logger name (usually __name__)
|
||||
|
||||
|
||||
Returns:
|
||||
Configured logger instance
|
||||
"""
|
||||
logger = logging.getLogger(name)
|
||||
|
||||
|
||||
# Only configure if not already configured
|
||||
if not logger.handlers:
|
||||
handler = logging.StreamHandler(sys.stdout)
|
||||
formatter = ThrillWikiFormatter(
|
||||
fmt='%(asctime)s - %(name)s - %(levelname)s - %(message)s'
|
||||
fmt="%(asctime)s - %(name)s - %(levelname)s - %(message)s"
|
||||
)
|
||||
handler.setFormatter(formatter)
|
||||
logger.addHandler(handler)
|
||||
logger.setLevel(logging.INFO if settings.DEBUG else logging.WARNING)
|
||||
|
||||
|
||||
return logger
|
||||
|
||||
|
||||
@@ -63,11 +67,11 @@ def log_exception(
|
||||
*,
|
||||
context: Optional[Dict[str, Any]] = None,
|
||||
request=None,
|
||||
level: int = logging.ERROR
|
||||
level: int = logging.ERROR,
|
||||
) -> None:
|
||||
"""
|
||||
Log an exception with structured context.
|
||||
|
||||
|
||||
Args:
|
||||
logger: Logger instance
|
||||
exception: Exception to log
|
||||
@@ -76,19 +80,30 @@ def log_exception(
|
||||
level: Log level
|
||||
"""
|
||||
log_data = {
|
||||
'exception_type': exception.__class__.__name__,
|
||||
'exception_message': str(exception),
|
||||
'context': context or {}
|
||||
"exception_type": exception.__class__.__name__,
|
||||
"exception_message": str(exception),
|
||||
"context": context or {},
|
||||
}
|
||||
|
||||
|
||||
if request:
|
||||
log_data.update({
|
||||
'request_path': getattr(request, 'path', 'unknown'),
|
||||
'request_method': getattr(request, 'method', 'unknown'),
|
||||
'user_id': getattr(request.user, 'id', 'anonymous') if hasattr(request, 'user') else 'unknown'
|
||||
})
|
||||
|
||||
logger.log(level, f"Exception occurred: {exception}", extra={'extra_data': log_data}, exc_info=True)
|
||||
log_data.update(
|
||||
{
|
||||
"request_path": getattr(request, "path", "unknown"),
|
||||
"request_method": getattr(request, "method", "unknown"),
|
||||
"user_id": (
|
||||
getattr(request.user, "id", "anonymous")
|
||||
if hasattr(request, "user")
|
||||
else "unknown"
|
||||
),
|
||||
}
|
||||
)
|
||||
|
||||
logger.log(
|
||||
level,
|
||||
f"Exception occurred: {exception}",
|
||||
extra={"extra_data": log_data},
|
||||
exc_info=True,
|
||||
)
|
||||
|
||||
|
||||
def log_business_event(
|
||||
@@ -98,11 +113,11 @@ def log_business_event(
|
||||
message: str,
|
||||
context: Optional[Dict[str, Any]] = None,
|
||||
request=None,
|
||||
level: int = logging.INFO
|
||||
level: int = logging.INFO,
|
||||
) -> None:
|
||||
"""
|
||||
Log a business event with structured context.
|
||||
|
||||
|
||||
Args:
|
||||
logger: Logger instance
|
||||
event_type: Type of business event
|
||||
@@ -111,19 +126,22 @@ def log_business_event(
|
||||
request: Django request object
|
||||
level: Log level
|
||||
"""
|
||||
log_data = {
|
||||
'event_type': event_type,
|
||||
'context': context or {}
|
||||
}
|
||||
|
||||
log_data = {"event_type": event_type, "context": context or {}}
|
||||
|
||||
if request:
|
||||
log_data.update({
|
||||
'request_path': getattr(request, 'path', 'unknown'),
|
||||
'request_method': getattr(request, 'method', 'unknown'),
|
||||
'user_id': getattr(request.user, 'id', 'anonymous') if hasattr(request, 'user') else 'unknown'
|
||||
})
|
||||
|
||||
logger.log(level, message, extra={'extra_data': log_data})
|
||||
log_data.update(
|
||||
{
|
||||
"request_path": getattr(request, "path", "unknown"),
|
||||
"request_method": getattr(request, "method", "unknown"),
|
||||
"user_id": (
|
||||
getattr(request.user, "id", "anonymous")
|
||||
if hasattr(request, "user")
|
||||
else "unknown"
|
||||
),
|
||||
}
|
||||
)
|
||||
|
||||
logger.log(level, message, extra={"extra_data": log_data})
|
||||
|
||||
|
||||
def log_performance_metric(
|
||||
@@ -132,11 +150,11 @@ def log_performance_metric(
|
||||
*,
|
||||
duration_ms: float,
|
||||
context: Optional[Dict[str, Any]] = None,
|
||||
level: int = logging.INFO
|
||||
level: int = logging.INFO,
|
||||
) -> None:
|
||||
"""
|
||||
Log a performance metric.
|
||||
|
||||
|
||||
Args:
|
||||
logger: Logger instance
|
||||
operation: Operation name
|
||||
@@ -145,14 +163,14 @@ def log_performance_metric(
|
||||
level: Log level
|
||||
"""
|
||||
log_data = {
|
||||
'metric_type': 'performance',
|
||||
'operation': operation,
|
||||
'duration_ms': duration_ms,
|
||||
'context': context or {}
|
||||
"metric_type": "performance",
|
||||
"operation": operation,
|
||||
"duration_ms": duration_ms,
|
||||
"context": context or {},
|
||||
}
|
||||
|
||||
|
||||
message = f"Performance: {operation} took {duration_ms:.2f}ms"
|
||||
logger.log(level, message, extra={'extra_data': log_data})
|
||||
logger.log(level, message, extra={"extra_data": log_data})
|
||||
|
||||
|
||||
def log_api_request(
|
||||
@@ -161,11 +179,11 @@ def log_api_request(
|
||||
*,
|
||||
response_status: Optional[int] = None,
|
||||
duration_ms: Optional[float] = None,
|
||||
level: int = logging.INFO
|
||||
level: int = logging.INFO,
|
||||
) -> None:
|
||||
"""
|
||||
Log an API request with context.
|
||||
|
||||
|
||||
Args:
|
||||
logger: Logger instance
|
||||
request: Django request object
|
||||
@@ -174,21 +192,25 @@ def log_api_request(
|
||||
level: Log level
|
||||
"""
|
||||
log_data = {
|
||||
'request_type': 'api',
|
||||
'path': getattr(request, 'path', 'unknown'),
|
||||
'method': getattr(request, 'method', 'unknown'),
|
||||
'user_id': getattr(request.user, 'id', 'anonymous') if hasattr(request, 'user') else 'unknown',
|
||||
'response_status': response_status,
|
||||
'duration_ms': duration_ms
|
||||
"request_type": "api",
|
||||
"path": getattr(request, "path", "unknown"),
|
||||
"method": getattr(request, "method", "unknown"),
|
||||
"user_id": (
|
||||
getattr(request.user, "id", "anonymous")
|
||||
if hasattr(request, "user")
|
||||
else "unknown"
|
||||
),
|
||||
"response_status": response_status,
|
||||
"duration_ms": duration_ms,
|
||||
}
|
||||
|
||||
|
||||
message = f"API Request: {request.method} {request.path}"
|
||||
if response_status:
|
||||
message += f" -> {response_status}"
|
||||
if duration_ms:
|
||||
message += f" ({duration_ms:.2f}ms)"
|
||||
|
||||
logger.log(level, message, extra={'extra_data': log_data})
|
||||
|
||||
logger.log(level, message, extra={"extra_data": log_data})
|
||||
|
||||
|
||||
def log_security_event(
|
||||
@@ -196,13 +218,13 @@ def log_security_event(
|
||||
event_type: str,
|
||||
*,
|
||||
message: str,
|
||||
severity: str = 'medium',
|
||||
severity: str = "medium",
|
||||
context: Optional[Dict[str, Any]] = None,
|
||||
request=None
|
||||
request=None,
|
||||
) -> None:
|
||||
"""
|
||||
Log a security-related event.
|
||||
|
||||
|
||||
Args:
|
||||
logger: Logger instance
|
||||
event_type: Type of security event
|
||||
@@ -212,22 +234,28 @@ def log_security_event(
|
||||
request: Django request object
|
||||
"""
|
||||
log_data = {
|
||||
'security_event': True,
|
||||
'event_type': event_type,
|
||||
'severity': severity,
|
||||
'context': context or {}
|
||||
"security_event": True,
|
||||
"event_type": event_type,
|
||||
"severity": severity,
|
||||
"context": context or {},
|
||||
}
|
||||
|
||||
|
||||
if request:
|
||||
log_data.update({
|
||||
'request_path': getattr(request, 'path', 'unknown'),
|
||||
'request_method': getattr(request, 'method', 'unknown'),
|
||||
'user_id': getattr(request.user, 'id', 'anonymous') if hasattr(request, 'user') else 'unknown',
|
||||
'remote_addr': request.META.get('REMOTE_ADDR', 'unknown'),
|
||||
'user_agent': request.META.get('HTTP_USER_AGENT', 'unknown')
|
||||
})
|
||||
|
||||
log_data.update(
|
||||
{
|
||||
"request_path": getattr(request, "path", "unknown"),
|
||||
"request_method": getattr(request, "method", "unknown"),
|
||||
"user_id": (
|
||||
getattr(request.user, "id", "anonymous")
|
||||
if hasattr(request, "user")
|
||||
else "unknown"
|
||||
),
|
||||
"remote_addr": request.META.get("REMOTE_ADDR", "unknown"),
|
||||
"user_agent": request.META.get("HTTP_USER_AGENT", "unknown"),
|
||||
}
|
||||
)
|
||||
|
||||
# Use WARNING for medium/high, ERROR for critical
|
||||
level = logging.ERROR if severity in ['high', 'critical'] else logging.WARNING
|
||||
|
||||
logger.log(level, f"SECURITY: {message}", extra={'extra_data': log_data})
|
||||
level = logging.ERROR if severity in ["high", "critical"] else logging.WARNING
|
||||
|
||||
logger.log(level, f"SECURITY: {message}", extra={"extra_data": log_data})
|
||||
|
||||
@@ -4,17 +4,18 @@ from parks.models import Park
|
||||
from rides.models import Ride
|
||||
from core.analytics import PageView
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = 'Updates trending parks and rides cache based on views in the last 24 hours'
|
||||
help = "Updates trending parks and rides cache based on views in the last 24 hours"
|
||||
|
||||
def handle(self, *args, **kwargs):
|
||||
"""
|
||||
Updates the trending parks and rides in the cache.
|
||||
|
||||
|
||||
This command is designed to be run every hour via cron to keep the trending
|
||||
items up to date. It looks at page views from the last 24 hours and caches
|
||||
the top 10 most viewed parks and rides.
|
||||
|
||||
|
||||
The cached data is used by the home page to display trending items without
|
||||
having to query the database on every request.
|
||||
"""
|
||||
@@ -23,12 +24,12 @@ class Command(BaseCommand):
|
||||
trending_rides = PageView.get_trending_items(Ride, hours=24, limit=10)
|
||||
|
||||
# Cache the results for 1 hour
|
||||
cache.set('trending_parks', trending_parks, 3600) # 3600 seconds = 1 hour
|
||||
cache.set('trending_rides', trending_rides, 3600)
|
||||
cache.set("trending_parks", trending_parks, 3600) # 3600 seconds = 1 hour
|
||||
cache.set("trending_rides", trending_rides, 3600)
|
||||
|
||||
self.stdout.write(
|
||||
self.style.SUCCESS(
|
||||
'Successfully updated trending parks and rides. '
|
||||
'Cached 10 items each for parks and rides based on views in the last 24 hours.'
|
||||
"Successfully updated trending parks and rides. "
|
||||
"Cached 10 items each for parks and rides based on views in the last 24 hours."
|
||||
)
|
||||
)
|
||||
|
||||
158
core/managers.py
158
core/managers.py
@@ -3,9 +3,9 @@ Custom managers and QuerySets for optimized database patterns.
|
||||
Following Django styleguide best practices for database access.
|
||||
"""
|
||||
|
||||
from typing import Optional, List, Dict, Any, Union
|
||||
from typing import Optional, List, Union
|
||||
from django.db import models
|
||||
from django.db.models import Q, F, Count, Avg, Max, Min, Sum, Prefetch
|
||||
from django.db.models import Q, Count, Avg, Max
|
||||
from django.contrib.gis.geos import Point
|
||||
from django.contrib.gis.measure import Distance
|
||||
from django.utils import timezone
|
||||
@@ -14,53 +14,53 @@ from datetime import timedelta
|
||||
|
||||
class BaseQuerySet(models.QuerySet):
|
||||
"""Base QuerySet with common optimizations and patterns."""
|
||||
|
||||
|
||||
def active(self):
|
||||
"""Filter for active/enabled records."""
|
||||
if hasattr(self.model, 'is_active'):
|
||||
if hasattr(self.model, "is_active"):
|
||||
return self.filter(is_active=True)
|
||||
return self
|
||||
|
||||
|
||||
def published(self):
|
||||
"""Filter for published records."""
|
||||
if hasattr(self.model, 'is_published'):
|
||||
"""Filter for published records."""
|
||||
if hasattr(self.model, "is_published"):
|
||||
return self.filter(is_published=True)
|
||||
return self
|
||||
|
||||
|
||||
def recent(self, *, days: int = 30):
|
||||
"""Filter for recently created records."""
|
||||
cutoff_date = timezone.now() - timedelta(days=days)
|
||||
return self.filter(created_at__gte=cutoff_date)
|
||||
|
||||
|
||||
def search(self, *, query: str, fields: Optional[List[str]] = None):
|
||||
"""
|
||||
Full-text search across specified fields.
|
||||
|
||||
|
||||
Args:
|
||||
query: Search query string
|
||||
fields: List of field names to search (defaults to name, description)
|
||||
"""
|
||||
if not query:
|
||||
return self
|
||||
|
||||
|
||||
if fields is None:
|
||||
fields = ['name', 'description'] if hasattr(self.model, 'name') else []
|
||||
|
||||
fields = ["name", "description"] if hasattr(self.model, "name") else []
|
||||
|
||||
q_objects = Q()
|
||||
for field in fields:
|
||||
if hasattr(self.model, field):
|
||||
q_objects |= Q(**{f"{field}__icontains": query})
|
||||
|
||||
|
||||
return self.filter(q_objects) if q_objects else self
|
||||
|
||||
|
||||
def with_stats(self):
|
||||
"""Add basic statistics annotations."""
|
||||
return self
|
||||
|
||||
|
||||
def optimized_for_list(self):
|
||||
"""Optimize queryset for list display."""
|
||||
return self.select_related().prefetch_related()
|
||||
|
||||
|
||||
def optimized_for_detail(self):
|
||||
"""Optimize queryset for detail display."""
|
||||
return self.select_related().prefetch_related()
|
||||
@@ -68,196 +68,206 @@ class BaseQuerySet(models.QuerySet):
|
||||
|
||||
class BaseManager(models.Manager):
|
||||
"""Base manager with common patterns."""
|
||||
|
||||
|
||||
def get_queryset(self):
|
||||
return BaseQuerySet(self.model, using=self._db)
|
||||
|
||||
|
||||
def active(self):
|
||||
return self.get_queryset().active()
|
||||
|
||||
|
||||
def published(self):
|
||||
return self.get_queryset().published()
|
||||
|
||||
|
||||
def recent(self, *, days: int = 30):
|
||||
return self.get_queryset().recent(days=days)
|
||||
|
||||
|
||||
def search(self, *, query: str, fields: Optional[List[str]] = None):
|
||||
return self.get_queryset().search(query=query, fields=fields)
|
||||
|
||||
|
||||
class LocationQuerySet(BaseQuerySet):
|
||||
"""QuerySet for location-based models with geographic functionality."""
|
||||
|
||||
|
||||
def near_point(self, *, point: Point, distance_km: float = 50):
|
||||
"""Filter locations near a geographic point."""
|
||||
if hasattr(self.model, 'point'):
|
||||
return self.filter(
|
||||
point__distance_lte=(point, Distance(km=distance_km))
|
||||
).distance(point).order_by('distance')
|
||||
if hasattr(self.model, "point"):
|
||||
return (
|
||||
self.filter(point__distance_lte=(point, Distance(km=distance_km)))
|
||||
.distance(point)
|
||||
.order_by("distance")
|
||||
)
|
||||
return self
|
||||
|
||||
|
||||
def within_bounds(self, *, north: float, south: float, east: float, west: float):
|
||||
"""Filter locations within geographic bounds."""
|
||||
if hasattr(self.model, 'point'):
|
||||
if hasattr(self.model, "point"):
|
||||
return self.filter(
|
||||
point__latitude__gte=south,
|
||||
point__latitude__lte=north,
|
||||
point__longitude__gte=west,
|
||||
point__longitude__lte=east
|
||||
point__longitude__lte=east,
|
||||
)
|
||||
return self
|
||||
|
||||
|
||||
def by_country(self, *, country: str):
|
||||
"""Filter by country."""
|
||||
if hasattr(self.model, 'country'):
|
||||
if hasattr(self.model, "country"):
|
||||
return self.filter(country__iexact=country)
|
||||
return self
|
||||
|
||||
|
||||
def by_region(self, *, state: str):
|
||||
"""Filter by state/region."""
|
||||
if hasattr(self.model, 'state'):
|
||||
if hasattr(self.model, "state"):
|
||||
return self.filter(state__iexact=state)
|
||||
return self
|
||||
|
||||
|
||||
def by_city(self, *, city: str):
|
||||
"""Filter by city."""
|
||||
if hasattr(self.model, 'city'):
|
||||
if hasattr(self.model, "city"):
|
||||
return self.filter(city__iexact=city)
|
||||
return self
|
||||
|
||||
|
||||
class LocationManager(BaseManager):
|
||||
"""Manager for location-based models."""
|
||||
|
||||
|
||||
def get_queryset(self):
|
||||
return LocationQuerySet(self.model, using=self._db)
|
||||
|
||||
|
||||
def near_point(self, *, point: Point, distance_km: float = 50):
|
||||
return self.get_queryset().near_point(point=point, distance_km=distance_km)
|
||||
|
||||
|
||||
def within_bounds(self, *, north: float, south: float, east: float, west: float):
|
||||
return self.get_queryset().within_bounds(north=north, south=south, east=east, west=west)
|
||||
return self.get_queryset().within_bounds(
|
||||
north=north, south=south, east=east, west=west
|
||||
)
|
||||
|
||||
|
||||
class ReviewableQuerySet(BaseQuerySet):
|
||||
"""QuerySet for models that can be reviewed."""
|
||||
|
||||
|
||||
def with_review_stats(self):
|
||||
"""Add review statistics annotations."""
|
||||
return self.annotate(
|
||||
review_count=Count('reviews', filter=Q(reviews__is_published=True)),
|
||||
average_rating=Avg('reviews__rating', filter=Q(reviews__is_published=True)),
|
||||
latest_review_date=Max('reviews__created_at', filter=Q(reviews__is_published=True))
|
||||
review_count=Count("reviews", filter=Q(reviews__is_published=True)),
|
||||
average_rating=Avg("reviews__rating", filter=Q(reviews__is_published=True)),
|
||||
latest_review_date=Max(
|
||||
"reviews__created_at", filter=Q(reviews__is_published=True)
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
def highly_rated(self, *, min_rating: float = 8.0):
|
||||
"""Filter for highly rated items."""
|
||||
return self.with_review_stats().filter(average_rating__gte=min_rating)
|
||||
|
||||
|
||||
def recently_reviewed(self, *, days: int = 30):
|
||||
"""Filter for items with recent reviews."""
|
||||
cutoff_date = timezone.now() - timedelta(days=days)
|
||||
return self.filter(reviews__created_at__gte=cutoff_date, reviews__is_published=True).distinct()
|
||||
return self.filter(
|
||||
reviews__created_at__gte=cutoff_date, reviews__is_published=True
|
||||
).distinct()
|
||||
|
||||
|
||||
class ReviewableManager(BaseManager):
|
||||
"""Manager for reviewable models."""
|
||||
|
||||
|
||||
def get_queryset(self):
|
||||
return ReviewableQuerySet(self.model, using=self._db)
|
||||
|
||||
|
||||
def with_review_stats(self):
|
||||
return self.get_queryset().with_review_stats()
|
||||
|
||||
|
||||
def highly_rated(self, *, min_rating: float = 8.0):
|
||||
return self.get_queryset().highly_rated(min_rating=min_rating)
|
||||
|
||||
|
||||
class HierarchicalQuerySet(BaseQuerySet):
|
||||
"""QuerySet for hierarchical models (with parent/child relationships)."""
|
||||
|
||||
|
||||
def root_level(self):
|
||||
"""Filter for root-level items (no parent)."""
|
||||
if hasattr(self.model, 'parent'):
|
||||
if hasattr(self.model, "parent"):
|
||||
return self.filter(parent__isnull=True)
|
||||
return self
|
||||
|
||||
|
||||
def children_of(self, *, parent_id: int):
|
||||
"""Get children of a specific parent."""
|
||||
if hasattr(self.model, 'parent'):
|
||||
if hasattr(self.model, "parent"):
|
||||
return self.filter(parent_id=parent_id)
|
||||
return self
|
||||
|
||||
|
||||
def with_children_count(self):
|
||||
"""Add count of children."""
|
||||
if hasattr(self.model, 'children'):
|
||||
return self.annotate(children_count=Count('children'))
|
||||
if hasattr(self.model, "children"):
|
||||
return self.annotate(children_count=Count("children"))
|
||||
return self
|
||||
|
||||
|
||||
class HierarchicalManager(BaseManager):
|
||||
"""Manager for hierarchical models."""
|
||||
|
||||
|
||||
def get_queryset(self):
|
||||
return HierarchicalQuerySet(self.model, using=self._db)
|
||||
|
||||
|
||||
def root_level(self):
|
||||
return self.get_queryset().root_level()
|
||||
|
||||
|
||||
class TimestampedQuerySet(BaseQuerySet):
|
||||
"""QuerySet for models with created_at/updated_at timestamps."""
|
||||
|
||||
|
||||
def created_between(self, *, start_date, end_date):
|
||||
"""Filter by creation date range."""
|
||||
return self.filter(created_at__date__range=[start_date, end_date])
|
||||
|
||||
|
||||
def updated_since(self, *, since_date):
|
||||
"""Filter for records updated since a date."""
|
||||
return self.filter(updated_at__gte=since_date)
|
||||
|
||||
|
||||
def by_creation_date(self, *, descending: bool = True):
|
||||
"""Order by creation date."""
|
||||
order = '-created_at' if descending else 'created_at'
|
||||
order = "-created_at" if descending else "created_at"
|
||||
return self.order_by(order)
|
||||
|
||||
|
||||
class TimestampedManager(BaseManager):
|
||||
"""Manager for timestamped models."""
|
||||
|
||||
|
||||
def get_queryset(self):
|
||||
return TimestampedQuerySet(self.model, using=self._db)
|
||||
|
||||
|
||||
def created_between(self, *, start_date, end_date):
|
||||
return self.get_queryset().created_between(start_date=start_date, end_date=end_date)
|
||||
return self.get_queryset().created_between(
|
||||
start_date=start_date, end_date=end_date
|
||||
)
|
||||
|
||||
|
||||
class StatusQuerySet(BaseQuerySet):
|
||||
"""QuerySet for models with status fields."""
|
||||
|
||||
|
||||
def with_status(self, *, status: Union[str, List[str]]):
|
||||
"""Filter by status."""
|
||||
if isinstance(status, list):
|
||||
return self.filter(status__in=status)
|
||||
return self.filter(status=status)
|
||||
|
||||
|
||||
def operating(self):
|
||||
"""Filter for operating/active status."""
|
||||
return self.filter(status='OPERATING')
|
||||
|
||||
return self.filter(status="OPERATING")
|
||||
|
||||
def closed(self):
|
||||
"""Filter for closed status."""
|
||||
return self.filter(status__in=['CLOSED_TEMP', 'CLOSED_PERM'])
|
||||
return self.filter(status__in=["CLOSED_TEMP", "CLOSED_PERM"])
|
||||
|
||||
|
||||
class StatusManager(BaseManager):
|
||||
"""Manager for status-based models."""
|
||||
|
||||
|
||||
def get_queryset(self):
|
||||
return StatusQuerySet(self.model, using=self._db)
|
||||
|
||||
|
||||
def operating(self):
|
||||
return self.get_queryset().operating()
|
||||
|
||||
|
||||
def closed(self):
|
||||
return self.get_queryset().closed()
|
||||
|
||||
@@ -8,15 +8,15 @@ from .performance_middleware import (
|
||||
PerformanceMiddleware,
|
||||
QueryCountMiddleware,
|
||||
DatabaseConnectionMiddleware,
|
||||
CachePerformanceMiddleware
|
||||
CachePerformanceMiddleware,
|
||||
)
|
||||
|
||||
# Make all middleware classes available at the package level
|
||||
__all__ = [
|
||||
'PageViewMiddleware',
|
||||
'PgHistoryContextMiddleware',
|
||||
'PerformanceMiddleware',
|
||||
'QueryCountMiddleware',
|
||||
'DatabaseConnectionMiddleware',
|
||||
'CachePerformanceMiddleware'
|
||||
"PageViewMiddleware",
|
||||
"PgHistoryContextMiddleware",
|
||||
"PerformanceMiddleware",
|
||||
"QueryCountMiddleware",
|
||||
"DatabaseConnectionMiddleware",
|
||||
"CachePerformanceMiddleware",
|
||||
]
|
||||
|
||||
@@ -13,12 +13,19 @@ from core.analytics import PageView
|
||||
|
||||
class RequestContextProvider(pghistory.context):
|
||||
"""Custom context provider for pghistory that extracts information from the request."""
|
||||
|
||||
def __call__(self, request: WSGIRequest) -> dict:
|
||||
return {
|
||||
'user': str(request.user) if request.user and not isinstance(request.user, AnonymousUser) else None,
|
||||
'ip': request.META.get('REMOTE_ADDR'),
|
||||
'user_agent': request.META.get('HTTP_USER_AGENT'),
|
||||
'session_key': request.session.session_key if hasattr(request, 'session') else None
|
||||
"user": (
|
||||
str(request.user)
|
||||
if request.user and not isinstance(request.user, AnonymousUser)
|
||||
else None
|
||||
),
|
||||
"ip": request.META.get("REMOTE_ADDR"),
|
||||
"user_agent": request.META.get("HTTP_USER_AGENT"),
|
||||
"session_key": (
|
||||
request.session.session_key if hasattr(request, "session") else None
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
@@ -30,6 +37,7 @@ class PgHistoryContextMiddleware:
|
||||
"""
|
||||
Middleware that ensures request object is available to pghistory context.
|
||||
"""
|
||||
|
||||
def __init__(self, get_response):
|
||||
self.get_response = get_response
|
||||
|
||||
@@ -40,14 +48,14 @@ class PgHistoryContextMiddleware:
|
||||
|
||||
class PageViewMiddleware(MiddlewareMixin):
|
||||
"""Middleware to track page views for DetailView-based pages."""
|
||||
|
||||
|
||||
def process_view(self, request, view_func, view_args, view_kwargs):
|
||||
# Only track GET requests
|
||||
if request.method != 'GET':
|
||||
if request.method != "GET":
|
||||
return None
|
||||
|
||||
# Get view class if it exists
|
||||
view_class = getattr(view_func, 'view_class', None)
|
||||
view_class = getattr(view_func, "view_class", None)
|
||||
if not view_class or not issubclass(view_class, DetailView):
|
||||
return None
|
||||
|
||||
@@ -66,8 +74,8 @@ class PageViewMiddleware(MiddlewareMixin):
|
||||
PageView.objects.create(
|
||||
content_type=ContentType.objects.get_for_model(obj.__class__),
|
||||
object_id=obj.pk,
|
||||
ip_address=request.META.get('REMOTE_ADDR', ''),
|
||||
user_agent=request.META.get('HTTP_USER_AGENT', '')[:512]
|
||||
ip_address=request.META.get("REMOTE_ADDR", ""),
|
||||
user_agent=request.META.get("HTTP_USER_AGENT", "")[:512],
|
||||
)
|
||||
except Exception:
|
||||
# Fail silently to not interrupt the request
|
||||
|
||||
@@ -8,131 +8,169 @@ from django.db import connection
|
||||
from django.utils.deprecation import MiddlewareMixin
|
||||
from django.conf import settings
|
||||
|
||||
performance_logger = logging.getLogger('performance')
|
||||
performance_logger = logging.getLogger("performance")
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class PerformanceMiddleware(MiddlewareMixin):
|
||||
"""Middleware to collect performance metrics for each request"""
|
||||
|
||||
|
||||
def process_request(self, request):
|
||||
"""Initialize performance tracking for the request"""
|
||||
request._performance_start_time = time.time()
|
||||
request._performance_initial_queries = len(connection.queries) if hasattr(connection, 'queries') else 0
|
||||
request._performance_initial_queries = (
|
||||
len(connection.queries) if hasattr(connection, "queries") else 0
|
||||
)
|
||||
return None
|
||||
|
||||
|
||||
def process_response(self, request, response):
|
||||
"""Log performance metrics after response is ready"""
|
||||
# Skip performance tracking for certain paths
|
||||
skip_paths = ['/health/', '/admin/jsi18n/', '/static/', '/media/', '/__debug__/']
|
||||
skip_paths = [
|
||||
"/health/",
|
||||
"/admin/jsi18n/",
|
||||
"/static/",
|
||||
"/media/",
|
||||
"/__debug__/",
|
||||
]
|
||||
if any(request.path.startswith(path) for path in skip_paths):
|
||||
return response
|
||||
|
||||
|
||||
# Calculate metrics
|
||||
end_time = time.time()
|
||||
start_time = getattr(request, '_performance_start_time', end_time)
|
||||
start_time = getattr(request, "_performance_start_time", end_time)
|
||||
duration = end_time - start_time
|
||||
|
||||
initial_queries = getattr(request, '_performance_initial_queries', 0)
|
||||
total_queries = len(connection.queries) - initial_queries if hasattr(connection, 'queries') else 0
|
||||
|
||||
|
||||
initial_queries = getattr(request, "_performance_initial_queries", 0)
|
||||
total_queries = (
|
||||
len(connection.queries) - initial_queries
|
||||
if hasattr(connection, "queries")
|
||||
else 0
|
||||
)
|
||||
|
||||
# Get content length
|
||||
content_length = 0
|
||||
if hasattr(response, 'content'):
|
||||
if hasattr(response, "content"):
|
||||
content_length = len(response.content)
|
||||
elif hasattr(response, 'streaming_content'):
|
||||
elif hasattr(response, "streaming_content"):
|
||||
# For streaming responses, we can't easily measure content length
|
||||
content_length = -1
|
||||
|
||||
|
||||
# Build performance data
|
||||
performance_data = {
|
||||
'path': request.path,
|
||||
'method': request.method,
|
||||
'status_code': response.status_code,
|
||||
'duration_ms': round(duration * 1000, 2),
|
||||
'duration_seconds': round(duration, 3),
|
||||
'query_count': total_queries,
|
||||
'content_length_bytes': content_length,
|
||||
'user_id': getattr(request.user, 'id', None) if hasattr(request, 'user') and request.user.is_authenticated else None,
|
||||
'user_agent': request.META.get('HTTP_USER_AGENT', '')[:100], # Truncate user agent
|
||||
'remote_addr': self._get_client_ip(request),
|
||||
"path": request.path,
|
||||
"method": request.method,
|
||||
"status_code": response.status_code,
|
||||
"duration_ms": round(duration * 1000, 2),
|
||||
"duration_seconds": round(duration, 3),
|
||||
"query_count": total_queries,
|
||||
"content_length_bytes": content_length,
|
||||
"user_id": (
|
||||
getattr(request.user, "id", None)
|
||||
if hasattr(request, "user") and request.user.is_authenticated
|
||||
else None
|
||||
),
|
||||
"user_agent": request.META.get("HTTP_USER_AGENT", "")[
|
||||
:100
|
||||
], # Truncate user agent
|
||||
"remote_addr": self._get_client_ip(request),
|
||||
}
|
||||
|
||||
|
||||
# Add query details in debug mode
|
||||
if settings.DEBUG and hasattr(connection, 'queries') and total_queries > 0:
|
||||
if settings.DEBUG and hasattr(connection, "queries") and total_queries > 0:
|
||||
recent_queries = connection.queries[-total_queries:]
|
||||
performance_data['queries'] = [
|
||||
performance_data["queries"] = [
|
||||
{
|
||||
'sql': query['sql'][:200] + '...' if len(query['sql']) > 200 else query['sql'],
|
||||
'time': float(query['time'])
|
||||
"sql": (
|
||||
query["sql"][:200] + "..."
|
||||
if len(query["sql"]) > 200
|
||||
else query["sql"]
|
||||
),
|
||||
"time": float(query["time"]),
|
||||
}
|
||||
for query in recent_queries[-10:] # Last 10 queries only
|
||||
]
|
||||
|
||||
|
||||
# Identify slow queries
|
||||
slow_queries = [q for q in recent_queries if float(q['time']) > 0.1]
|
||||
slow_queries = [q for q in recent_queries if float(q["time"]) > 0.1]
|
||||
if slow_queries:
|
||||
performance_data['slow_query_count'] = len(slow_queries)
|
||||
performance_data['slowest_query_time'] = max(float(q['time']) for q in slow_queries)
|
||||
|
||||
performance_data["slow_query_count"] = len(slow_queries)
|
||||
performance_data["slowest_query_time"] = max(
|
||||
float(q["time"]) for q in slow_queries
|
||||
)
|
||||
|
||||
# Determine log level based on performance
|
||||
log_level = self._get_log_level(duration, total_queries, response.status_code)
|
||||
|
||||
|
||||
# Log the performance data
|
||||
performance_logger.log(
|
||||
log_level,
|
||||
f"Request performance: {request.method} {request.path} - "
|
||||
f"{duration:.3f}s, {total_queries} queries, {response.status_code}",
|
||||
extra=performance_data
|
||||
extra=performance_data,
|
||||
)
|
||||
|
||||
|
||||
# Add performance headers for debugging (only in debug mode)
|
||||
if settings.DEBUG:
|
||||
response['X-Response-Time'] = f"{duration * 1000:.2f}ms"
|
||||
response['X-Query-Count'] = str(total_queries)
|
||||
if total_queries > 0 and hasattr(connection, 'queries'):
|
||||
total_query_time = sum(float(q['time']) for q in connection.queries[-total_queries:])
|
||||
response['X-Query-Time'] = f"{total_query_time * 1000:.2f}ms"
|
||||
|
||||
response["X-Response-Time"] = f"{duration * 1000:.2f}ms"
|
||||
response["X-Query-Count"] = str(total_queries)
|
||||
if total_queries > 0 and hasattr(connection, "queries"):
|
||||
total_query_time = sum(
|
||||
float(q["time"]) for q in connection.queries[-total_queries:]
|
||||
)
|
||||
response["X-Query-Time"] = f"{total_query_time * 1000:.2f}ms"
|
||||
|
||||
return response
|
||||
|
||||
|
||||
def process_exception(self, request, exception):
|
||||
"""Log performance data even when an exception occurs"""
|
||||
end_time = time.time()
|
||||
start_time = getattr(request, '_performance_start_time', end_time)
|
||||
start_time = getattr(request, "_performance_start_time", end_time)
|
||||
duration = end_time - start_time
|
||||
|
||||
initial_queries = getattr(request, '_performance_initial_queries', 0)
|
||||
total_queries = len(connection.queries) - initial_queries if hasattr(connection, 'queries') else 0
|
||||
|
||||
performance_data = {
|
||||
'path': request.path,
|
||||
'method': request.method,
|
||||
'status_code': 500, # Exception occurred
|
||||
'duration_ms': round(duration * 1000, 2),
|
||||
'query_count': total_queries,
|
||||
'exception': str(exception),
|
||||
'exception_type': type(exception).__name__,
|
||||
'user_id': getattr(request.user, 'id', None) if hasattr(request, 'user') and request.user.is_authenticated else None,
|
||||
}
|
||||
|
||||
performance_logger.error(
|
||||
f"Request exception: {request.method} {request.path} - "
|
||||
f"{duration:.3f}s, {total_queries} queries, {type(exception).__name__}: {exception}",
|
||||
extra=performance_data
|
||||
|
||||
initial_queries = getattr(request, "_performance_initial_queries", 0)
|
||||
total_queries = (
|
||||
len(connection.queries) - initial_queries
|
||||
if hasattr(connection, "queries")
|
||||
else 0
|
||||
)
|
||||
|
||||
|
||||
performance_data = {
|
||||
"path": request.path,
|
||||
"method": request.method,
|
||||
"status_code": 500, # Exception occurred
|
||||
"duration_ms": round(duration * 1000, 2),
|
||||
"query_count": total_queries,
|
||||
"exception": str(exception),
|
||||
"exception_type": type(exception).__name__,
|
||||
"user_id": (
|
||||
getattr(request.user, "id", None)
|
||||
if hasattr(request, "user") and request.user.is_authenticated
|
||||
else None
|
||||
),
|
||||
}
|
||||
|
||||
performance_logger.error(
|
||||
f"Request exception: {
|
||||
request.method} {
|
||||
request.path} - "
|
||||
f"{
|
||||
duration:.3f}s, {total_queries} queries, {
|
||||
type(exception).__name__}: {exception}",
|
||||
extra=performance_data,
|
||||
)
|
||||
|
||||
return None # Don't handle the exception, just log it
|
||||
|
||||
|
||||
def _get_client_ip(self, request):
|
||||
"""Extract client IP address from request"""
|
||||
x_forwarded_for = request.META.get('HTTP_X_FORWARDED_FOR')
|
||||
x_forwarded_for = request.META.get("HTTP_X_FORWARDED_FOR")
|
||||
if x_forwarded_for:
|
||||
ip = x_forwarded_for.split(',')[0].strip()
|
||||
ip = x_forwarded_for.split(",")[0].strip()
|
||||
else:
|
||||
ip = request.META.get('REMOTE_ADDR', '')
|
||||
ip = request.META.get("REMOTE_ADDR", "")
|
||||
return ip
|
||||
|
||||
|
||||
def _get_log_level(self, duration, query_count, status_code):
|
||||
"""Determine appropriate log level based on performance metrics"""
|
||||
# Error responses
|
||||
@@ -140,7 +178,7 @@ class PerformanceMiddleware(MiddlewareMixin):
|
||||
return logging.ERROR
|
||||
elif status_code >= 400:
|
||||
return logging.WARNING
|
||||
|
||||
|
||||
# Performance-based log levels
|
||||
if duration > 5.0: # Very slow requests
|
||||
return logging.ERROR
|
||||
@@ -154,50 +192,55 @@ class PerformanceMiddleware(MiddlewareMixin):
|
||||
|
||||
class QueryCountMiddleware(MiddlewareMixin):
|
||||
"""Middleware to track and limit query counts per request"""
|
||||
|
||||
|
||||
def __init__(self, get_response):
|
||||
self.get_response = get_response
|
||||
self.query_limit = getattr(settings, 'MAX_QUERIES_PER_REQUEST', 50)
|
||||
self.query_limit = getattr(settings, "MAX_QUERIES_PER_REQUEST", 50)
|
||||
super().__init__(get_response)
|
||||
|
||||
|
||||
def process_request(self, request):
|
||||
"""Initialize query tracking"""
|
||||
request._query_count_start = len(connection.queries) if hasattr(connection, 'queries') else 0
|
||||
request._query_count_start = (
|
||||
len(connection.queries) if hasattr(connection, "queries") else 0
|
||||
)
|
||||
return None
|
||||
|
||||
|
||||
def process_response(self, request, response):
|
||||
"""Check query count and warn if excessive"""
|
||||
if not hasattr(connection, 'queries'):
|
||||
if not hasattr(connection, "queries"):
|
||||
return response
|
||||
|
||||
start_count = getattr(request, '_query_count_start', 0)
|
||||
|
||||
start_count = getattr(request, "_query_count_start", 0)
|
||||
current_count = len(connection.queries)
|
||||
request_query_count = current_count - start_count
|
||||
|
||||
|
||||
if request_query_count > self.query_limit:
|
||||
logger.warning(
|
||||
f"Excessive query count: {request.path} executed {request_query_count} queries "
|
||||
f"(limit: {self.query_limit})",
|
||||
f"Excessive query count: {
|
||||
request.path} executed {request_query_count} queries "
|
||||
f"(limit: {
|
||||
self.query_limit})",
|
||||
extra={
|
||||
'path': request.path,
|
||||
'method': request.method,
|
||||
'query_count': request_query_count,
|
||||
'query_limit': self.query_limit,
|
||||
'excessive_queries': True
|
||||
}
|
||||
"path": request.path,
|
||||
"method": request.method,
|
||||
"query_count": request_query_count,
|
||||
"query_limit": self.query_limit,
|
||||
"excessive_queries": True,
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
return response
|
||||
|
||||
|
||||
class DatabaseConnectionMiddleware(MiddlewareMixin):
|
||||
"""Middleware to monitor database connection health"""
|
||||
|
||||
|
||||
def process_request(self, request):
|
||||
"""Check database connection at start of request"""
|
||||
try:
|
||||
# Simple connection test
|
||||
from django.db import connection
|
||||
|
||||
with connection.cursor() as cursor:
|
||||
cursor.execute("SELECT 1")
|
||||
cursor.fetchone()
|
||||
@@ -205,64 +248,70 @@ class DatabaseConnectionMiddleware(MiddlewareMixin):
|
||||
logger.error(
|
||||
f"Database connection failed at request start: {e}",
|
||||
extra={
|
||||
'path': request.path,
|
||||
'method': request.method,
|
||||
'database_error': str(e)
|
||||
}
|
||||
"path": request.path,
|
||||
"method": request.method,
|
||||
"database_error": str(e),
|
||||
},
|
||||
)
|
||||
# Don't block the request, let Django handle the database error
|
||||
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def process_response(self, request, response):
|
||||
"""Close database connections properly"""
|
||||
try:
|
||||
from django.db import connection
|
||||
|
||||
connection.close()
|
||||
except Exception as e:
|
||||
logger.warning(f"Error closing database connection: {e}")
|
||||
|
||||
|
||||
return response
|
||||
|
||||
|
||||
class CachePerformanceMiddleware(MiddlewareMixin):
|
||||
"""Middleware to monitor cache performance"""
|
||||
|
||||
|
||||
def process_request(self, request):
|
||||
"""Initialize cache performance tracking"""
|
||||
request._cache_hits = 0
|
||||
request._cache_misses = 0
|
||||
request._cache_start_time = time.time()
|
||||
return None
|
||||
|
||||
|
||||
def process_response(self, request, response):
|
||||
"""Log cache performance metrics"""
|
||||
cache_duration = time.time() - getattr(request, '_cache_start_time', time.time())
|
||||
cache_hits = getattr(request, '_cache_hits', 0)
|
||||
cache_misses = getattr(request, '_cache_misses', 0)
|
||||
|
||||
cache_duration = time.time() - getattr(
|
||||
request, "_cache_start_time", time.time()
|
||||
)
|
||||
cache_hits = getattr(request, "_cache_hits", 0)
|
||||
cache_misses = getattr(request, "_cache_misses", 0)
|
||||
|
||||
if cache_hits + cache_misses > 0:
|
||||
hit_rate = (cache_hits / (cache_hits + cache_misses)) * 100
|
||||
|
||||
|
||||
cache_data = {
|
||||
'path': request.path,
|
||||
'cache_hits': cache_hits,
|
||||
'cache_misses': cache_misses,
|
||||
'cache_hit_rate': round(hit_rate, 2),
|
||||
'cache_operations': cache_hits + cache_misses,
|
||||
'cache_duration': round(cache_duration * 1000, 2) # milliseconds
|
||||
"path": request.path,
|
||||
"cache_hits": cache_hits,
|
||||
"cache_misses": cache_misses,
|
||||
"cache_hit_rate": round(hit_rate, 2),
|
||||
"cache_operations": cache_hits + cache_misses,
|
||||
# milliseconds
|
||||
"cache_duration": round(cache_duration * 1000, 2),
|
||||
}
|
||||
|
||||
|
||||
# Log cache performance
|
||||
if hit_rate < 50 and cache_hits + cache_misses > 5:
|
||||
logger.warning(
|
||||
f"Low cache hit rate for {request.path}: {hit_rate:.1f}%",
|
||||
extra=cache_data
|
||||
extra=cache_data,
|
||||
)
|
||||
else:
|
||||
logger.debug(
|
||||
f"Cache performance for {request.path}: {hit_rate:.1f}% hit rate",
|
||||
extra=cache_data
|
||||
f"Cache performance for {
|
||||
request.path}: {
|
||||
hit_rate:.1f}% hit rate",
|
||||
extra=cache_data,
|
||||
)
|
||||
|
||||
|
||||
return response
|
||||
|
||||
@@ -45,7 +45,8 @@ class Migration(migrations.Migration):
|
||||
name="core_slughi_content_8bbf56_idx",
|
||||
),
|
||||
models.Index(
|
||||
fields=["old_slug"], name="core_slughi_old_slu_aaef7f_idx"
|
||||
fields=["old_slug"],
|
||||
name="core_slughi_old_slu_aaef7f_idx",
|
||||
),
|
||||
],
|
||||
},
|
||||
|
||||
@@ -71,7 +71,10 @@ class Migration(migrations.Migration):
|
||||
),
|
||||
),
|
||||
("object_id", models.PositiveIntegerField()),
|
||||
("timestamp", models.DateTimeField(auto_now_add=True, db_index=True)),
|
||||
(
|
||||
"timestamp",
|
||||
models.DateTimeField(auto_now_add=True, db_index=True),
|
||||
),
|
||||
("ip_address", models.GenericIPAddressField()),
|
||||
("user_agent", models.CharField(blank=True, max_length=512)),
|
||||
(
|
||||
@@ -86,7 +89,8 @@ class Migration(migrations.Migration):
|
||||
options={
|
||||
"indexes": [
|
||||
models.Index(
|
||||
fields=["timestamp"], name="core_pagevi_timesta_757ebb_idx"
|
||||
fields=["timestamp"],
|
||||
name="core_pagevi_timesta_757ebb_idx",
|
||||
),
|
||||
models.Index(
|
||||
fields=["content_type", "object_id"],
|
||||
|
||||
@@ -1,9 +1,11 @@
|
||||
from django.views.generic.list import MultipleObjectMixin
|
||||
|
||||
|
||||
class HTMXFilterableMixin(MultipleObjectMixin):
|
||||
"""
|
||||
A mixin that provides filtering capabilities for HTMX requests.
|
||||
"""
|
||||
|
||||
filter_class = None
|
||||
|
||||
def get_queryset(self):
|
||||
@@ -13,5 +15,5 @@ class HTMXFilterableMixin(MultipleObjectMixin):
|
||||
|
||||
def get_context_data(self, **kwargs):
|
||||
context = super().get_context_data(**kwargs)
|
||||
context['filter'] = self.filterset
|
||||
return context
|
||||
context["filter"] = self.filterset
|
||||
return context
|
||||
|
||||
@@ -4,33 +4,39 @@ from django.contrib.contenttypes.models import ContentType
|
||||
from django.utils.text import slugify
|
||||
from core.history import TrackedModel
|
||||
|
||||
|
||||
class SlugHistory(models.Model):
|
||||
"""
|
||||
Model for tracking slug changes across all models that use slugs.
|
||||
Uses generic relations to work with any model.
|
||||
"""
|
||||
|
||||
content_type = models.ForeignKey(ContentType, on_delete=models.CASCADE)
|
||||
object_id = models.CharField(max_length=50) # Using CharField to work with our custom IDs
|
||||
content_object = GenericForeignKey('content_type', 'object_id')
|
||||
|
||||
object_id = models.CharField(
|
||||
max_length=50
|
||||
) # Using CharField to work with our custom IDs
|
||||
content_object = GenericForeignKey("content_type", "object_id")
|
||||
|
||||
old_slug = models.SlugField(max_length=200)
|
||||
created_at = models.DateTimeField(auto_now_add=True)
|
||||
|
||||
class Meta:
|
||||
indexes = [
|
||||
models.Index(fields=['content_type', 'object_id']),
|
||||
models.Index(fields=['old_slug']),
|
||||
models.Index(fields=["content_type", "object_id"]),
|
||||
models.Index(fields=["old_slug"]),
|
||||
]
|
||||
verbose_name_plural = 'Slug histories'
|
||||
ordering = ['-created_at']
|
||||
verbose_name_plural = "Slug histories"
|
||||
ordering = ["-created_at"]
|
||||
|
||||
def __str__(self):
|
||||
return f"Old slug '{self.old_slug}' for {self.content_object}"
|
||||
|
||||
|
||||
class SluggedModel(TrackedModel):
|
||||
"""
|
||||
Abstract base model that provides slug functionality with history tracking.
|
||||
"""
|
||||
|
||||
name = models.CharField(max_length=200)
|
||||
slug = models.SlugField(max_length=200, unique=True)
|
||||
|
||||
@@ -47,7 +53,7 @@ class SluggedModel(TrackedModel):
|
||||
SlugHistory.objects.create(
|
||||
content_type=ContentType.objects.get_for_model(self),
|
||||
object_id=getattr(self, self.get_id_field_name()),
|
||||
old_slug=old_instance.slug
|
||||
old_slug=old_instance.slug,
|
||||
)
|
||||
except self.__class__.DoesNotExist:
|
||||
pass
|
||||
@@ -81,24 +87,27 @@ class SluggedModel(TrackedModel):
|
||||
history_model = cls.get_history_model()
|
||||
history_entry = (
|
||||
history_model.objects.filter(slug=slug)
|
||||
.order_by('-pgh_created_at')
|
||||
.order_by("-pgh_created_at")
|
||||
.first()
|
||||
)
|
||||
|
||||
|
||||
if history_entry:
|
||||
return cls.objects.get(id=history_entry.pgh_obj_id), True
|
||||
|
||||
|
||||
# Try to find in manual slug history as fallback
|
||||
history = SlugHistory.objects.filter(
|
||||
content_type=ContentType.objects.get_for_model(cls),
|
||||
old_slug=slug
|
||||
).order_by('-created_at').first()
|
||||
|
||||
if history:
|
||||
return cls.objects.get(
|
||||
**{cls.get_id_field_name(): history.object_id}
|
||||
), True
|
||||
|
||||
raise cls.DoesNotExist(
|
||||
f"{cls.__name__} with slug '{slug}' does not exist"
|
||||
history = (
|
||||
SlugHistory.objects.filter(
|
||||
content_type=ContentType.objects.get_for_model(cls),
|
||||
old_slug=slug,
|
||||
)
|
||||
.order_by("-created_at")
|
||||
.first()
|
||||
)
|
||||
|
||||
if history:
|
||||
return (
|
||||
cls.objects.get(**{cls.get_id_field_name(): history.object_id}),
|
||||
True,
|
||||
)
|
||||
|
||||
raise cls.DoesNotExist(f"{cls.__name__} with slug '{slug}' does not exist")
|
||||
|
||||
@@ -3,8 +3,8 @@ Selectors for core functionality including map services and analytics.
|
||||
Following Django styleguide pattern for separating data access from business logic.
|
||||
"""
|
||||
|
||||
from typing import Optional, Dict, Any, List, Union
|
||||
from django.db.models import QuerySet, Q, F, Count, Avg
|
||||
from typing import Optional, Dict, Any, List
|
||||
from django.db.models import QuerySet, Q, Count
|
||||
from django.contrib.gis.geos import Point, Polygon
|
||||
from django.contrib.gis.measure import Distance
|
||||
from django.utils import timezone
|
||||
@@ -16,284 +16,307 @@ from rides.models import Ride
|
||||
|
||||
|
||||
def unified_locations_for_map(
|
||||
*,
|
||||
*,
|
||||
bounds: Optional[Polygon] = None,
|
||||
location_types: Optional[List[str]] = None,
|
||||
filters: Optional[Dict[str, Any]] = None
|
||||
filters: Optional[Dict[str, Any]] = None,
|
||||
) -> Dict[str, QuerySet]:
|
||||
"""
|
||||
Get unified location data for map display across all location types.
|
||||
|
||||
|
||||
Args:
|
||||
bounds: Geographic boundary polygon
|
||||
location_types: List of location types to include ('park', 'ride')
|
||||
filters: Additional filter parameters
|
||||
|
||||
|
||||
Returns:
|
||||
Dictionary containing querysets for each location type
|
||||
"""
|
||||
results = {}
|
||||
|
||||
|
||||
# Default to all location types if none specified
|
||||
if not location_types:
|
||||
location_types = ['park', 'ride']
|
||||
|
||||
location_types = ["park", "ride"]
|
||||
|
||||
# Parks
|
||||
if 'park' in location_types:
|
||||
park_queryset = Park.objects.select_related(
|
||||
'operator'
|
||||
).prefetch_related(
|
||||
'location'
|
||||
).annotate(
|
||||
ride_count_calculated=Count('rides')
|
||||
if "park" in location_types:
|
||||
park_queryset = (
|
||||
Park.objects.select_related("operator")
|
||||
.prefetch_related("location")
|
||||
.annotate(ride_count_calculated=Count("rides"))
|
||||
)
|
||||
|
||||
|
||||
if bounds:
|
||||
park_queryset = park_queryset.filter(
|
||||
location__coordinates__within=bounds
|
||||
)
|
||||
|
||||
park_queryset = park_queryset.filter(location__coordinates__within=bounds)
|
||||
|
||||
if filters:
|
||||
if 'status' in filters:
|
||||
park_queryset = park_queryset.filter(status=filters['status'])
|
||||
if 'operator' in filters:
|
||||
park_queryset = park_queryset.filter(operator=filters['operator'])
|
||||
|
||||
results['parks'] = park_queryset.order_by('name')
|
||||
|
||||
if "status" in filters:
|
||||
park_queryset = park_queryset.filter(status=filters["status"])
|
||||
if "operator" in filters:
|
||||
park_queryset = park_queryset.filter(operator=filters["operator"])
|
||||
|
||||
results["parks"] = park_queryset.order_by("name")
|
||||
|
||||
# Rides
|
||||
if 'ride' in location_types:
|
||||
if "ride" in location_types:
|
||||
ride_queryset = Ride.objects.select_related(
|
||||
'park',
|
||||
'manufacturer'
|
||||
).prefetch_related(
|
||||
'park__location',
|
||||
'location'
|
||||
)
|
||||
|
||||
"park", "manufacturer"
|
||||
).prefetch_related("park__location", "location")
|
||||
|
||||
if bounds:
|
||||
ride_queryset = ride_queryset.filter(
|
||||
Q(location__coordinates__within=bounds) |
|
||||
Q(park__location__coordinates__within=bounds)
|
||||
Q(location__coordinates__within=bounds)
|
||||
| Q(park__location__coordinates__within=bounds)
|
||||
)
|
||||
|
||||
|
||||
if filters:
|
||||
if 'category' in filters:
|
||||
ride_queryset = ride_queryset.filter(category=filters['category'])
|
||||
if 'manufacturer' in filters:
|
||||
ride_queryset = ride_queryset.filter(manufacturer=filters['manufacturer'])
|
||||
if 'park' in filters:
|
||||
ride_queryset = ride_queryset.filter(park=filters['park'])
|
||||
|
||||
results['rides'] = ride_queryset.order_by('park__name', 'name')
|
||||
|
||||
if "category" in filters:
|
||||
ride_queryset = ride_queryset.filter(category=filters["category"])
|
||||
if "manufacturer" in filters:
|
||||
ride_queryset = ride_queryset.filter(
|
||||
manufacturer=filters["manufacturer"]
|
||||
)
|
||||
if "park" in filters:
|
||||
ride_queryset = ride_queryset.filter(park=filters["park"])
|
||||
|
||||
results["rides"] = ride_queryset.order_by("park__name", "name")
|
||||
|
||||
return results
|
||||
|
||||
|
||||
def locations_near_point(
|
||||
*,
|
||||
point: Point,
|
||||
*,
|
||||
point: Point,
|
||||
distance_km: float = 50,
|
||||
location_types: Optional[List[str]] = None,
|
||||
limit: int = 20
|
||||
limit: int = 20,
|
||||
) -> Dict[str, QuerySet]:
|
||||
"""
|
||||
Get locations near a specific geographic point across all types.
|
||||
|
||||
|
||||
Args:
|
||||
point: Geographic point (longitude, latitude)
|
||||
distance_km: Maximum distance in kilometers
|
||||
location_types: List of location types to include
|
||||
limit: Maximum number of results per type
|
||||
|
||||
|
||||
Returns:
|
||||
Dictionary containing nearby locations by type
|
||||
"""
|
||||
results = {}
|
||||
|
||||
|
||||
if not location_types:
|
||||
location_types = ['park', 'ride']
|
||||
|
||||
location_types = ["park", "ride"]
|
||||
|
||||
# Parks near point
|
||||
if 'park' in location_types:
|
||||
results['parks'] = Park.objects.filter(
|
||||
location__coordinates__distance_lte=(point, Distance(km=distance_km))
|
||||
).select_related(
|
||||
'operator'
|
||||
).prefetch_related(
|
||||
'location'
|
||||
).distance(point).order_by('distance')[:limit]
|
||||
|
||||
if "park" in location_types:
|
||||
results["parks"] = (
|
||||
Park.objects.filter(
|
||||
location__coordinates__distance_lte=(
|
||||
point,
|
||||
Distance(km=distance_km),
|
||||
)
|
||||
)
|
||||
.select_related("operator")
|
||||
.prefetch_related("location")
|
||||
.distance(point)
|
||||
.order_by("distance")[:limit]
|
||||
)
|
||||
|
||||
# Rides near point
|
||||
if 'ride' in location_types:
|
||||
results['rides'] = Ride.objects.filter(
|
||||
Q(location__coordinates__distance_lte=(point, Distance(km=distance_km))) |
|
||||
Q(park__location__coordinates__distance_lte=(point, Distance(km=distance_km)))
|
||||
).select_related(
|
||||
'park',
|
||||
'manufacturer'
|
||||
).prefetch_related(
|
||||
'park__location'
|
||||
).distance(point).order_by('distance')[:limit]
|
||||
|
||||
if "ride" in location_types:
|
||||
results["rides"] = (
|
||||
Ride.objects.filter(
|
||||
Q(
|
||||
location__coordinates__distance_lte=(
|
||||
point,
|
||||
Distance(km=distance_km),
|
||||
)
|
||||
)
|
||||
| Q(
|
||||
park__location__coordinates__distance_lte=(
|
||||
point,
|
||||
Distance(km=distance_km),
|
||||
)
|
||||
)
|
||||
)
|
||||
.select_related("park", "manufacturer")
|
||||
.prefetch_related("park__location")
|
||||
.distance(point)
|
||||
.order_by("distance")[:limit]
|
||||
)
|
||||
|
||||
return results
|
||||
|
||||
|
||||
def search_all_locations(*, query: str, limit: int = 20) -> Dict[str, QuerySet]:
|
||||
"""
|
||||
Search across all location types for a query string.
|
||||
|
||||
|
||||
Args:
|
||||
query: Search string
|
||||
limit: Maximum results per type
|
||||
|
||||
|
||||
Returns:
|
||||
Dictionary containing search results by type
|
||||
"""
|
||||
results = {}
|
||||
|
||||
|
||||
# Search parks
|
||||
results['parks'] = Park.objects.filter(
|
||||
Q(name__icontains=query) |
|
||||
Q(description__icontains=query) |
|
||||
Q(location__city__icontains=query) |
|
||||
Q(location__region__icontains=query)
|
||||
).select_related(
|
||||
'operator'
|
||||
).prefetch_related(
|
||||
'location'
|
||||
).order_by('name')[:limit]
|
||||
|
||||
results["parks"] = (
|
||||
Park.objects.filter(
|
||||
Q(name__icontains=query)
|
||||
| Q(description__icontains=query)
|
||||
| Q(location__city__icontains=query)
|
||||
| Q(location__region__icontains=query)
|
||||
)
|
||||
.select_related("operator")
|
||||
.prefetch_related("location")
|
||||
.order_by("name")[:limit]
|
||||
)
|
||||
|
||||
# Search rides
|
||||
results['rides'] = Ride.objects.filter(
|
||||
Q(name__icontains=query) |
|
||||
Q(description__icontains=query) |
|
||||
Q(park__name__icontains=query) |
|
||||
Q(manufacturer__name__icontains=query)
|
||||
).select_related(
|
||||
'park',
|
||||
'manufacturer'
|
||||
).prefetch_related(
|
||||
'park__location'
|
||||
).order_by('park__name', 'name')[:limit]
|
||||
|
||||
results["rides"] = (
|
||||
Ride.objects.filter(
|
||||
Q(name__icontains=query)
|
||||
| Q(description__icontains=query)
|
||||
| Q(park__name__icontains=query)
|
||||
| Q(manufacturer__name__icontains=query)
|
||||
)
|
||||
.select_related("park", "manufacturer")
|
||||
.prefetch_related("park__location")
|
||||
.order_by("park__name", "name")[:limit]
|
||||
)
|
||||
|
||||
return results
|
||||
|
||||
|
||||
def page_views_for_analytics(
|
||||
*,
|
||||
*,
|
||||
start_date: Optional[timezone.datetime] = None,
|
||||
end_date: Optional[timezone.datetime] = None,
|
||||
path_pattern: Optional[str] = None
|
||||
path_pattern: Optional[str] = None,
|
||||
) -> QuerySet[PageView]:
|
||||
"""
|
||||
Get page views for analytics with optional filtering.
|
||||
|
||||
|
||||
Args:
|
||||
start_date: Start date for filtering
|
||||
end_date: End date for filtering
|
||||
path_pattern: URL path pattern to filter by
|
||||
|
||||
|
||||
Returns:
|
||||
QuerySet of page views
|
||||
"""
|
||||
queryset = PageView.objects.all()
|
||||
|
||||
|
||||
if start_date:
|
||||
queryset = queryset.filter(timestamp__gte=start_date)
|
||||
|
||||
|
||||
if end_date:
|
||||
queryset = queryset.filter(timestamp__lte=end_date)
|
||||
|
||||
|
||||
if path_pattern:
|
||||
queryset = queryset.filter(path__icontains=path_pattern)
|
||||
|
||||
return queryset.order_by('-timestamp')
|
||||
|
||||
return queryset.order_by("-timestamp")
|
||||
|
||||
|
||||
def popular_pages_summary(*, days: int = 30) -> Dict[str, Any]:
|
||||
"""
|
||||
Get summary of most popular pages in the last N days.
|
||||
|
||||
|
||||
Args:
|
||||
days: Number of days to analyze
|
||||
|
||||
|
||||
Returns:
|
||||
Dictionary containing popular pages statistics
|
||||
"""
|
||||
cutoff_date = timezone.now() - timedelta(days=days)
|
||||
|
||||
|
||||
# Most viewed pages
|
||||
popular_pages = PageView.objects.filter(
|
||||
timestamp__gte=cutoff_date
|
||||
).values('path').annotate(
|
||||
view_count=Count('id')
|
||||
).order_by('-view_count')[:10]
|
||||
|
||||
popular_pages = (
|
||||
PageView.objects.filter(timestamp__gte=cutoff_date)
|
||||
.values("path")
|
||||
.annotate(view_count=Count("id"))
|
||||
.order_by("-view_count")[:10]
|
||||
)
|
||||
|
||||
# Total page views
|
||||
total_views = PageView.objects.filter(
|
||||
timestamp__gte=cutoff_date
|
||||
).count()
|
||||
|
||||
total_views = PageView.objects.filter(timestamp__gte=cutoff_date).count()
|
||||
|
||||
# Unique visitors (based on IP)
|
||||
unique_visitors = PageView.objects.filter(
|
||||
timestamp__gte=cutoff_date
|
||||
).values('ip_address').distinct().count()
|
||||
|
||||
unique_visitors = (
|
||||
PageView.objects.filter(timestamp__gte=cutoff_date)
|
||||
.values("ip_address")
|
||||
.distinct()
|
||||
.count()
|
||||
)
|
||||
|
||||
return {
|
||||
'popular_pages': list(popular_pages),
|
||||
'total_views': total_views,
|
||||
'unique_visitors': unique_visitors,
|
||||
'period_days': days
|
||||
"popular_pages": list(popular_pages),
|
||||
"total_views": total_views,
|
||||
"unique_visitors": unique_visitors,
|
||||
"period_days": days,
|
||||
}
|
||||
|
||||
|
||||
def geographic_distribution_summary() -> Dict[str, Any]:
|
||||
"""
|
||||
Get geographic distribution statistics for all locations.
|
||||
|
||||
|
||||
Returns:
|
||||
Dictionary containing geographic statistics
|
||||
"""
|
||||
# Parks by country
|
||||
parks_by_country = Park.objects.filter(
|
||||
location__country__isnull=False
|
||||
).values('location__country').annotate(
|
||||
count=Count('id')
|
||||
).order_by('-count')
|
||||
|
||||
parks_by_country = (
|
||||
Park.objects.filter(location__country__isnull=False)
|
||||
.values("location__country")
|
||||
.annotate(count=Count("id"))
|
||||
.order_by("-count")
|
||||
)
|
||||
|
||||
# Rides by country (through park location)
|
||||
rides_by_country = Ride.objects.filter(
|
||||
park__location__country__isnull=False
|
||||
).values('park__location__country').annotate(
|
||||
count=Count('id')
|
||||
).order_by('-count')
|
||||
|
||||
rides_by_country = (
|
||||
Ride.objects.filter(park__location__country__isnull=False)
|
||||
.values("park__location__country")
|
||||
.annotate(count=Count("id"))
|
||||
.order_by("-count")
|
||||
)
|
||||
|
||||
return {
|
||||
'parks_by_country': list(parks_by_country),
|
||||
'rides_by_country': list(rides_by_country)
|
||||
"parks_by_country": list(parks_by_country),
|
||||
"rides_by_country": list(rides_by_country),
|
||||
}
|
||||
|
||||
|
||||
def system_health_metrics() -> Dict[str, Any]:
|
||||
"""
|
||||
Get system health and activity metrics.
|
||||
|
||||
|
||||
Returns:
|
||||
Dictionary containing system health statistics
|
||||
"""
|
||||
now = timezone.now()
|
||||
last_24h = now - timedelta(hours=24)
|
||||
last_7d = now - timedelta(days=7)
|
||||
|
||||
|
||||
return {
|
||||
'total_parks': Park.objects.count(),
|
||||
'operating_parks': Park.objects.filter(status='OPERATING').count(),
|
||||
'total_rides': Ride.objects.count(),
|
||||
'page_views_24h': PageView.objects.filter(timestamp__gte=last_24h).count(),
|
||||
'page_views_7d': PageView.objects.filter(timestamp__gte=last_7d).count(),
|
||||
'data_freshness': {
|
||||
'latest_park_update': Park.objects.order_by('-updated_at').first().updated_at if Park.objects.exists() else None,
|
||||
'latest_ride_update': Ride.objects.order_by('-updated_at').first().updated_at if Ride.objects.exists() else None,
|
||||
}
|
||||
"total_parks": Park.objects.count(),
|
||||
"operating_parks": Park.objects.filter(status="OPERATING").count(),
|
||||
"total_rides": Ride.objects.count(),
|
||||
"page_views_24h": PageView.objects.filter(timestamp__gte=last_24h).count(),
|
||||
"page_views_7d": PageView.objects.filter(timestamp__gte=last_7d).count(),
|
||||
"data_freshness": {
|
||||
"latest_park_update": (
|
||||
Park.objects.order_by("-updated_at").first().updated_at
|
||||
if Park.objects.exists()
|
||||
else None
|
||||
),
|
||||
"latest_ride_update": (
|
||||
Ride.objects.order_by("-updated_at").first().updated_at
|
||||
if Ride.objects.exists()
|
||||
else None
|
||||
),
|
||||
},
|
||||
}
|
||||
|
||||
@@ -11,17 +11,17 @@ from .data_structures import (
|
||||
GeoBounds,
|
||||
MapFilters,
|
||||
MapResponse,
|
||||
ClusterData
|
||||
ClusterData,
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
'UnifiedMapService',
|
||||
'ClusteringService',
|
||||
'MapCacheService',
|
||||
'UnifiedLocation',
|
||||
'LocationType',
|
||||
'GeoBounds',
|
||||
'MapFilters',
|
||||
'MapResponse',
|
||||
'ClusterData'
|
||||
]
|
||||
"UnifiedMapService",
|
||||
"ClusteringService",
|
||||
"MapCacheService",
|
||||
"UnifiedLocation",
|
||||
"LocationType",
|
||||
"GeoBounds",
|
||||
"MapFilters",
|
||||
"MapResponse",
|
||||
"ClusterData",
|
||||
]
|
||||
|
||||
@@ -3,21 +3,22 @@ Clustering service for map locations to improve performance and user experience.
|
||||
"""
|
||||
|
||||
import math
|
||||
from typing import List, Tuple, Dict, Any, Optional, Set
|
||||
from typing import List, Tuple, Dict, Any, Optional
|
||||
from dataclasses import dataclass
|
||||
from collections import defaultdict
|
||||
|
||||
from .data_structures import (
|
||||
UnifiedLocation,
|
||||
ClusterData,
|
||||
GeoBounds,
|
||||
LocationType
|
||||
UnifiedLocation,
|
||||
ClusterData,
|
||||
GeoBounds,
|
||||
LocationType,
|
||||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
class ClusterPoint:
|
||||
"""Internal representation of a point for clustering."""
|
||||
|
||||
location: UnifiedLocation
|
||||
x: float # Projected x coordinate
|
||||
y: float # Projected y coordinate
|
||||
@@ -28,48 +29,50 @@ class ClusteringService:
|
||||
Handles location clustering for map display using a simple grid-based approach
|
||||
with zoom-level dependent clustering radius.
|
||||
"""
|
||||
|
||||
|
||||
# Clustering configuration
|
||||
DEFAULT_RADIUS = 40 # pixels
|
||||
MIN_POINTS_TO_CLUSTER = 2
|
||||
MAX_ZOOM_FOR_CLUSTERING = 15
|
||||
MIN_ZOOM_FOR_CLUSTERING = 3
|
||||
|
||||
|
||||
# Zoom level configurations
|
||||
ZOOM_CONFIGS = {
|
||||
3: {'radius': 80, 'min_points': 5}, # World level
|
||||
4: {'radius': 70, 'min_points': 4}, # Continent level
|
||||
5: {'radius': 60, 'min_points': 3}, # Country level
|
||||
6: {'radius': 50, 'min_points': 3}, # Large region level
|
||||
7: {'radius': 45, 'min_points': 2}, # Region level
|
||||
8: {'radius': 40, 'min_points': 2}, # State level
|
||||
9: {'radius': 35, 'min_points': 2}, # Metro area level
|
||||
10: {'radius': 30, 'min_points': 2}, # City level
|
||||
11: {'radius': 25, 'min_points': 2}, # District level
|
||||
12: {'radius': 20, 'min_points': 2}, # Neighborhood level
|
||||
13: {'radius': 15, 'min_points': 2}, # Block level
|
||||
14: {'radius': 10, 'min_points': 2}, # Street level
|
||||
15: {'radius': 5, 'min_points': 2}, # Building level
|
||||
3: {"radius": 80, "min_points": 5}, # World level
|
||||
4: {"radius": 70, "min_points": 4}, # Continent level
|
||||
5: {"radius": 60, "min_points": 3}, # Country level
|
||||
6: {"radius": 50, "min_points": 3}, # Large region level
|
||||
7: {"radius": 45, "min_points": 2}, # Region level
|
||||
8: {"radius": 40, "min_points": 2}, # State level
|
||||
9: {"radius": 35, "min_points": 2}, # Metro area level
|
||||
10: {"radius": 30, "min_points": 2}, # City level
|
||||
11: {"radius": 25, "min_points": 2}, # District level
|
||||
12: {"radius": 20, "min_points": 2}, # Neighborhood level
|
||||
13: {"radius": 15, "min_points": 2}, # Block level
|
||||
14: {"radius": 10, "min_points": 2}, # Street level
|
||||
15: {"radius": 5, "min_points": 2}, # Building level
|
||||
}
|
||||
|
||||
|
||||
def __init__(self):
|
||||
self.cluster_id_counter = 0
|
||||
|
||||
|
||||
def should_cluster(self, zoom_level: int, point_count: int) -> bool:
|
||||
"""Determine if clustering should be applied based on zoom level and point count."""
|
||||
if zoom_level > self.MAX_ZOOM_FOR_CLUSTERING:
|
||||
return False
|
||||
if zoom_level < self.MIN_ZOOM_FOR_CLUSTERING:
|
||||
return True
|
||||
|
||||
config = self.ZOOM_CONFIGS.get(zoom_level, {'min_points': self.MIN_POINTS_TO_CLUSTER})
|
||||
return point_count >= config['min_points']
|
||||
|
||||
|
||||
config = self.ZOOM_CONFIGS.get(
|
||||
zoom_level, {"min_points": self.MIN_POINTS_TO_CLUSTER}
|
||||
)
|
||||
return point_count >= config["min_points"]
|
||||
|
||||
def cluster_locations(
|
||||
self,
|
||||
locations: List[UnifiedLocation],
|
||||
self,
|
||||
locations: List[UnifiedLocation],
|
||||
zoom_level: int,
|
||||
bounds: Optional[GeoBounds] = None
|
||||
bounds: Optional[GeoBounds] = None,
|
||||
) -> Tuple[List[UnifiedLocation], List[ClusterData]]:
|
||||
"""
|
||||
Cluster locations based on zoom level and density.
|
||||
@@ -77,42 +80,47 @@ class ClusteringService:
|
||||
"""
|
||||
if not locations or not self.should_cluster(zoom_level, len(locations)):
|
||||
return locations, []
|
||||
|
||||
|
||||
# Convert locations to projected coordinates for clustering
|
||||
cluster_points = self._project_locations(locations, bounds)
|
||||
|
||||
|
||||
# Get clustering configuration for zoom level
|
||||
config = self.ZOOM_CONFIGS.get(zoom_level, {
|
||||
'radius': self.DEFAULT_RADIUS,
|
||||
'min_points': self.MIN_POINTS_TO_CLUSTER
|
||||
})
|
||||
|
||||
config = self.ZOOM_CONFIGS.get(
|
||||
zoom_level,
|
||||
{
|
||||
"radius": self.DEFAULT_RADIUS,
|
||||
"min_points": self.MIN_POINTS_TO_CLUSTER,
|
||||
},
|
||||
)
|
||||
|
||||
# Perform clustering
|
||||
clustered_groups = self._cluster_points(cluster_points, config['radius'], config['min_points'])
|
||||
|
||||
clustered_groups = self._cluster_points(
|
||||
cluster_points, config["radius"], config["min_points"]
|
||||
)
|
||||
|
||||
# Separate individual locations from clusters
|
||||
unclustered_locations = []
|
||||
clusters = []
|
||||
|
||||
|
||||
for group in clustered_groups:
|
||||
if len(group) < config['min_points']:
|
||||
if len(group) < config["min_points"]:
|
||||
# Add individual locations
|
||||
unclustered_locations.extend([cp.location for cp in group])
|
||||
else:
|
||||
# Create cluster
|
||||
cluster = self._create_cluster(group)
|
||||
clusters.append(cluster)
|
||||
|
||||
|
||||
return unclustered_locations, clusters
|
||||
|
||||
|
||||
def _project_locations(
|
||||
self,
|
||||
locations: List[UnifiedLocation],
|
||||
bounds: Optional[GeoBounds] = None
|
||||
self,
|
||||
locations: List[UnifiedLocation],
|
||||
bounds: Optional[GeoBounds] = None,
|
||||
) -> List[ClusterPoint]:
|
||||
"""Convert lat/lng coordinates to projected x/y for clustering calculations."""
|
||||
cluster_points = []
|
||||
|
||||
|
||||
# Use bounds or calculate from locations
|
||||
if not bounds:
|
||||
lats = [loc.latitude for loc in locations]
|
||||
@@ -121,32 +129,27 @@ class ClusteringService:
|
||||
north=max(lats),
|
||||
south=min(lats),
|
||||
east=max(lngs),
|
||||
west=min(lngs)
|
||||
west=min(lngs),
|
||||
)
|
||||
|
||||
|
||||
# Simple equirectangular projection (good enough for clustering)
|
||||
center_lat = (bounds.north + bounds.south) / 2
|
||||
lat_scale = 111320 # meters per degree latitude
|
||||
lng_scale = 111320 * math.cos(math.radians(center_lat)) # meters per degree longitude
|
||||
|
||||
lng_scale = 111320 * math.cos(
|
||||
math.radians(center_lat)
|
||||
) # meters per degree longitude
|
||||
|
||||
for location in locations:
|
||||
# Convert to meters relative to bounds center
|
||||
x = (location.longitude - (bounds.west + bounds.east) / 2) * lng_scale
|
||||
y = (location.latitude - (bounds.north + bounds.south) / 2) * lat_scale
|
||||
|
||||
cluster_points.append(ClusterPoint(
|
||||
location=location,
|
||||
x=x,
|
||||
y=y
|
||||
))
|
||||
|
||||
|
||||
cluster_points.append(ClusterPoint(location=location, x=x, y=y))
|
||||
|
||||
return cluster_points
|
||||
|
||||
|
||||
def _cluster_points(
|
||||
self,
|
||||
points: List[ClusterPoint],
|
||||
radius_pixels: int,
|
||||
min_points: int
|
||||
self, points: List[ClusterPoint], radius_pixels: int, min_points: int
|
||||
) -> List[List[ClusterPoint]]:
|
||||
"""
|
||||
Cluster points using a simple distance-based approach.
|
||||
@@ -155,134 +158,142 @@ class ClusteringService:
|
||||
# Convert pixel radius to meters (rough approximation)
|
||||
# At zoom level 10, 1 pixel ≈ 150 meters
|
||||
radius_meters = radius_pixels * 150
|
||||
|
||||
|
||||
clustered = [False] * len(points)
|
||||
clusters = []
|
||||
|
||||
|
||||
for i, point in enumerate(points):
|
||||
if clustered[i]:
|
||||
continue
|
||||
|
||||
|
||||
# Find all points within radius
|
||||
cluster_group = [point]
|
||||
clustered[i] = True
|
||||
|
||||
|
||||
for j, other_point in enumerate(points):
|
||||
if i == j or clustered[j]:
|
||||
continue
|
||||
|
||||
|
||||
distance = self._calculate_distance(point, other_point)
|
||||
if distance <= radius_meters:
|
||||
cluster_group.append(other_point)
|
||||
clustered[j] = True
|
||||
|
||||
|
||||
clusters.append(cluster_group)
|
||||
|
||||
|
||||
return clusters
|
||||
|
||||
|
||||
def _calculate_distance(self, point1: ClusterPoint, point2: ClusterPoint) -> float:
|
||||
"""Calculate Euclidean distance between two projected points in meters."""
|
||||
dx = point1.x - point2.x
|
||||
dy = point1.y - point2.y
|
||||
return math.sqrt(dx * dx + dy * dy)
|
||||
|
||||
|
||||
def _create_cluster(self, cluster_points: List[ClusterPoint]) -> ClusterData:
|
||||
"""Create a ClusterData object from a group of points."""
|
||||
locations = [cp.location for cp in cluster_points]
|
||||
|
||||
|
||||
# Calculate cluster center (average position)
|
||||
avg_lat = sum(loc.latitude for loc in locations) / len(locations)
|
||||
avg_lng = sum(loc.longitude for loc in locations) / len(locations)
|
||||
|
||||
|
||||
# Calculate cluster bounds
|
||||
lats = [loc.latitude for loc in locations]
|
||||
lngs = [loc.longitude for loc in locations]
|
||||
cluster_bounds = GeoBounds(
|
||||
north=max(lats),
|
||||
south=min(lats),
|
||||
east=max(lngs),
|
||||
west=min(lngs)
|
||||
north=max(lats), south=min(lats), east=max(lngs), west=min(lngs)
|
||||
)
|
||||
|
||||
|
||||
# Collect location types in cluster
|
||||
types = set(loc.type for loc in locations)
|
||||
|
||||
|
||||
# Select representative location (highest weight)
|
||||
representative = self._select_representative_location(locations)
|
||||
|
||||
|
||||
# Generate cluster ID
|
||||
self.cluster_id_counter += 1
|
||||
cluster_id = f"cluster_{self.cluster_id_counter}"
|
||||
|
||||
|
||||
return ClusterData(
|
||||
id=cluster_id,
|
||||
coordinates=(avg_lat, avg_lng),
|
||||
count=len(locations),
|
||||
types=types,
|
||||
bounds=cluster_bounds,
|
||||
representative_location=representative
|
||||
representative_location=representative,
|
||||
)
|
||||
|
||||
def _select_representative_location(self, locations: List[UnifiedLocation]) -> Optional[UnifiedLocation]:
|
||||
|
||||
def _select_representative_location(
|
||||
self, locations: List[UnifiedLocation]
|
||||
) -> Optional[UnifiedLocation]:
|
||||
"""Select the most representative location for a cluster."""
|
||||
if not locations:
|
||||
return None
|
||||
|
||||
# Prioritize by: 1) Parks over rides/companies, 2) Higher weight, 3) Better rating
|
||||
|
||||
# Prioritize by: 1) Parks over rides/companies, 2) Higher weight, 3)
|
||||
# Better rating
|
||||
parks = [loc for loc in locations if loc.type == LocationType.PARK]
|
||||
if parks:
|
||||
return max(parks, key=lambda x: (
|
||||
x.cluster_weight,
|
||||
x.metadata.get('rating', 0) or 0
|
||||
))
|
||||
|
||||
return max(
|
||||
parks,
|
||||
key=lambda x: (
|
||||
x.cluster_weight,
|
||||
x.metadata.get("rating", 0) or 0,
|
||||
),
|
||||
)
|
||||
|
||||
rides = [loc for loc in locations if loc.type == LocationType.RIDE]
|
||||
if rides:
|
||||
return max(rides, key=lambda x: (
|
||||
x.cluster_weight,
|
||||
x.metadata.get('rating', 0) or 0
|
||||
))
|
||||
|
||||
return max(
|
||||
rides,
|
||||
key=lambda x: (
|
||||
x.cluster_weight,
|
||||
x.metadata.get("rating", 0) or 0,
|
||||
),
|
||||
)
|
||||
|
||||
companies = [loc for loc in locations if loc.type == LocationType.COMPANY]
|
||||
if companies:
|
||||
return max(companies, key=lambda x: x.cluster_weight)
|
||||
|
||||
|
||||
# Fall back to highest weight location
|
||||
return max(locations, key=lambda x: x.cluster_weight)
|
||||
|
||||
|
||||
def get_cluster_breakdown(self, clusters: List[ClusterData]) -> Dict[str, Any]:
|
||||
"""Get statistics about clustering results."""
|
||||
if not clusters:
|
||||
return {
|
||||
'total_clusters': 0,
|
||||
'total_points_clustered': 0,
|
||||
'average_cluster_size': 0,
|
||||
'type_distribution': {},
|
||||
'category_distribution': {}
|
||||
"total_clusters": 0,
|
||||
"total_points_clustered": 0,
|
||||
"average_cluster_size": 0,
|
||||
"type_distribution": {},
|
||||
"category_distribution": {},
|
||||
}
|
||||
|
||||
|
||||
total_points = sum(cluster.count for cluster in clusters)
|
||||
type_counts = defaultdict(int)
|
||||
category_counts = defaultdict(int)
|
||||
|
||||
|
||||
for cluster in clusters:
|
||||
for location_type in cluster.types:
|
||||
type_counts[location_type.value] += cluster.count
|
||||
|
||||
|
||||
if cluster.representative_location:
|
||||
category_counts[cluster.representative_location.cluster_category] += 1
|
||||
|
||||
|
||||
return {
|
||||
'total_clusters': len(clusters),
|
||||
'total_points_clustered': total_points,
|
||||
'average_cluster_size': total_points / len(clusters),
|
||||
'largest_cluster_size': max(cluster.count for cluster in clusters),
|
||||
'smallest_cluster_size': min(cluster.count for cluster in clusters),
|
||||
'type_distribution': dict(type_counts),
|
||||
'category_distribution': dict(category_counts)
|
||||
"total_clusters": len(clusters),
|
||||
"total_points_clustered": total_points,
|
||||
"average_cluster_size": total_points / len(clusters),
|
||||
"largest_cluster_size": max(cluster.count for cluster in clusters),
|
||||
"smallest_cluster_size": min(cluster.count for cluster in clusters),
|
||||
"type_distribution": dict(type_counts),
|
||||
"category_distribution": dict(category_counts),
|
||||
}
|
||||
|
||||
def expand_cluster(self, cluster: ClusterData, zoom_level: int) -> List[UnifiedLocation]:
|
||||
|
||||
def expand_cluster(
|
||||
self, cluster: ClusterData, zoom_level: int
|
||||
) -> List[UnifiedLocation]:
|
||||
"""
|
||||
Expand a cluster to show individual locations (for drill-down functionality).
|
||||
This would typically require re-querying the database with the cluster bounds.
|
||||
@@ -296,47 +307,59 @@ class SmartClusteringRules:
|
||||
"""
|
||||
Advanced clustering rules that consider location types and importance.
|
||||
"""
|
||||
|
||||
|
||||
@staticmethod
|
||||
def should_cluster_together(loc1: UnifiedLocation, loc2: UnifiedLocation) -> bool:
|
||||
"""Determine if two locations should be clustered together."""
|
||||
|
||||
|
||||
# Same park rides should cluster together more readily
|
||||
if loc1.type == LocationType.RIDE and loc2.type == LocationType.RIDE:
|
||||
park1_id = loc1.metadata.get('park_id')
|
||||
park2_id = loc2.metadata.get('park_id')
|
||||
park1_id = loc1.metadata.get("park_id")
|
||||
park2_id = loc2.metadata.get("park_id")
|
||||
if park1_id and park2_id and park1_id == park2_id:
|
||||
return True
|
||||
|
||||
|
||||
# Major parks should resist clustering unless very close
|
||||
if (loc1.cluster_category == "major_park" or loc2.cluster_category == "major_park"):
|
||||
if (
|
||||
loc1.cluster_category == "major_park"
|
||||
or loc2.cluster_category == "major_park"
|
||||
):
|
||||
return False
|
||||
|
||||
|
||||
# Similar types cluster more readily
|
||||
if loc1.type == loc2.type:
|
||||
return True
|
||||
|
||||
|
||||
# Different types can cluster but with higher threshold
|
||||
return False
|
||||
|
||||
|
||||
@staticmethod
|
||||
def calculate_cluster_priority(locations: List[UnifiedLocation]) -> UnifiedLocation:
|
||||
def calculate_cluster_priority(
|
||||
locations: List[UnifiedLocation],
|
||||
) -> UnifiedLocation:
|
||||
"""Select the representative location for a cluster based on priority rules."""
|
||||
# Prioritize by: 1) Parks over rides, 2) Higher weight, 3) Better rating
|
||||
# Prioritize by: 1) Parks over rides, 2) Higher weight, 3) Better
|
||||
# rating
|
||||
parks = [loc for loc in locations if loc.type == LocationType.PARK]
|
||||
if parks:
|
||||
return max(parks, key=lambda x: (
|
||||
x.cluster_weight,
|
||||
x.metadata.get('rating', 0) or 0,
|
||||
x.metadata.get('ride_count', 0) or 0
|
||||
))
|
||||
|
||||
return max(
|
||||
parks,
|
||||
key=lambda x: (
|
||||
x.cluster_weight,
|
||||
x.metadata.get("rating", 0) or 0,
|
||||
x.metadata.get("ride_count", 0) or 0,
|
||||
),
|
||||
)
|
||||
|
||||
rides = [loc for loc in locations if loc.type == LocationType.RIDE]
|
||||
if rides:
|
||||
return max(rides, key=lambda x: (
|
||||
x.cluster_weight,
|
||||
x.metadata.get('rating', 0) or 0
|
||||
))
|
||||
|
||||
return max(
|
||||
rides,
|
||||
key=lambda x: (
|
||||
x.cluster_weight,
|
||||
x.metadata.get("rating", 0) or 0,
|
||||
),
|
||||
)
|
||||
|
||||
# Fall back to highest weight
|
||||
return max(locations, key=lambda x: x.cluster_weight)
|
||||
return max(locations, key=lambda x: x.cluster_weight)
|
||||
|
||||
@@ -5,11 +5,12 @@ Data structures for the unified map service.
|
||||
from dataclasses import dataclass, field
|
||||
from enum import Enum
|
||||
from typing import Dict, List, Optional, Set, Tuple, Any
|
||||
from django.contrib.gis.geos import Polygon, Point
|
||||
from django.contrib.gis.geos import Polygon
|
||||
|
||||
|
||||
class LocationType(Enum):
|
||||
"""Types of locations supported by the map service."""
|
||||
|
||||
PARK = "park"
|
||||
RIDE = "ride"
|
||||
COMPANY = "company"
|
||||
@@ -19,11 +20,12 @@ class LocationType(Enum):
|
||||
@dataclass
|
||||
class GeoBounds:
|
||||
"""Geographic boundary box for spatial queries."""
|
||||
|
||||
north: float
|
||||
south: float
|
||||
east: float
|
||||
west: float
|
||||
|
||||
|
||||
def __post_init__(self):
|
||||
"""Validate bounds after initialization."""
|
||||
if self.north < self.south:
|
||||
@@ -34,44 +36,44 @@ class GeoBounds:
|
||||
raise ValueError("Latitude bounds must be between -90 and 90")
|
||||
if not (-180 <= self.west <= 180 and -180 <= self.east <= 180):
|
||||
raise ValueError("Longitude bounds must be between -180 and 180")
|
||||
|
||||
|
||||
def to_polygon(self) -> Polygon:
|
||||
"""Convert bounds to PostGIS Polygon for database queries."""
|
||||
return Polygon.from_bbox((self.west, self.south, self.east, self.north))
|
||||
|
||||
def expand(self, factor: float = 1.1) -> 'GeoBounds':
|
||||
|
||||
def expand(self, factor: float = 1.1) -> "GeoBounds":
|
||||
"""Expand bounds by factor for buffer queries."""
|
||||
center_lat = (self.north + self.south) / 2
|
||||
center_lng = (self.east + self.west) / 2
|
||||
|
||||
|
||||
lat_range = (self.north - self.south) * factor / 2
|
||||
lng_range = (self.east - self.west) * factor / 2
|
||||
|
||||
|
||||
return GeoBounds(
|
||||
north=min(90, center_lat + lat_range),
|
||||
south=max(-90, center_lat - lat_range),
|
||||
east=min(180, center_lng + lng_range),
|
||||
west=max(-180, center_lng - lng_range)
|
||||
west=max(-180, center_lng - lng_range),
|
||||
)
|
||||
|
||||
|
||||
def contains_point(self, lat: float, lng: float) -> bool:
|
||||
"""Check if a point is within these bounds."""
|
||||
return (self.south <= lat <= self.north and
|
||||
self.west <= lng <= self.east)
|
||||
|
||||
return self.south <= lat <= self.north and self.west <= lng <= self.east
|
||||
|
||||
def to_dict(self) -> Dict[str, float]:
|
||||
"""Convert to dictionary for JSON serialization."""
|
||||
return {
|
||||
'north': self.north,
|
||||
'south': self.south,
|
||||
'east': self.east,
|
||||
'west': self.west
|
||||
"north": self.north,
|
||||
"south": self.south,
|
||||
"east": self.east,
|
||||
"west": self.west,
|
||||
}
|
||||
|
||||
|
||||
@dataclass
|
||||
class MapFilters:
|
||||
"""Filtering options for map queries."""
|
||||
|
||||
location_types: Optional[Set[LocationType]] = None
|
||||
park_status: Optional[Set[str]] = None # OPERATING, CLOSED_TEMP, etc.
|
||||
ride_types: Optional[Set[str]] = None
|
||||
@@ -82,26 +84,29 @@ class MapFilters:
|
||||
country: Optional[str] = None
|
||||
state: Optional[str] = None
|
||||
city: Optional[str] = None
|
||||
|
||||
|
||||
def to_dict(self) -> Dict[str, Any]:
|
||||
"""Convert to dictionary for caching and serialization."""
|
||||
return {
|
||||
'location_types': [t.value for t in self.location_types] if self.location_types else None,
|
||||
'park_status': list(self.park_status) if self.park_status else None,
|
||||
'ride_types': list(self.ride_types) if self.ride_types else None,
|
||||
'company_roles': list(self.company_roles) if self.company_roles else None,
|
||||
'search_query': self.search_query,
|
||||
'min_rating': self.min_rating,
|
||||
'has_coordinates': self.has_coordinates,
|
||||
'country': self.country,
|
||||
'state': self.state,
|
||||
'city': self.city,
|
||||
"location_types": (
|
||||
[t.value for t in self.location_types] if self.location_types else None
|
||||
),
|
||||
"park_status": (list(self.park_status) if self.park_status else None),
|
||||
"ride_types": list(self.ride_types) if self.ride_types else None,
|
||||
"company_roles": (list(self.company_roles) if self.company_roles else None),
|
||||
"search_query": self.search_query,
|
||||
"min_rating": self.min_rating,
|
||||
"has_coordinates": self.has_coordinates,
|
||||
"country": self.country,
|
||||
"state": self.state,
|
||||
"city": self.city,
|
||||
}
|
||||
|
||||
|
||||
@dataclass
|
||||
class UnifiedLocation:
|
||||
"""Unified location interface for all location types."""
|
||||
|
||||
id: str # Composite: f"{type}_{id}"
|
||||
type: LocationType
|
||||
name: str
|
||||
@@ -111,77 +116,84 @@ class UnifiedLocation:
|
||||
type_data: Dict[str, Any] = field(default_factory=dict)
|
||||
cluster_weight: int = 1
|
||||
cluster_category: str = "default"
|
||||
|
||||
|
||||
@property
|
||||
def latitude(self) -> float:
|
||||
"""Get latitude from coordinates."""
|
||||
return self.coordinates[0]
|
||||
|
||||
|
||||
@property
|
||||
def longitude(self) -> float:
|
||||
"""Get longitude from coordinates."""
|
||||
return self.coordinates[1]
|
||||
|
||||
|
||||
def to_geojson_feature(self) -> Dict[str, Any]:
|
||||
"""Convert to GeoJSON feature for mapping libraries."""
|
||||
return {
|
||||
'type': 'Feature',
|
||||
'properties': {
|
||||
'id': self.id,
|
||||
'type': self.type.value,
|
||||
'name': self.name,
|
||||
'address': self.address,
|
||||
'metadata': self.metadata,
|
||||
'type_data': self.type_data,
|
||||
'cluster_weight': self.cluster_weight,
|
||||
'cluster_category': self.cluster_category
|
||||
"type": "Feature",
|
||||
"properties": {
|
||||
"id": self.id,
|
||||
"type": self.type.value,
|
||||
"name": self.name,
|
||||
"address": self.address,
|
||||
"metadata": self.metadata,
|
||||
"type_data": self.type_data,
|
||||
"cluster_weight": self.cluster_weight,
|
||||
"cluster_category": self.cluster_category,
|
||||
},
|
||||
"geometry": {
|
||||
"type": "Point",
|
||||
# GeoJSON uses lng, lat
|
||||
"coordinates": [self.longitude, self.latitude],
|
||||
},
|
||||
'geometry': {
|
||||
'type': 'Point',
|
||||
'coordinates': [self.longitude, self.latitude] # GeoJSON uses lng, lat
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
def to_dict(self) -> Dict[str, Any]:
|
||||
"""Convert to dictionary for JSON responses."""
|
||||
return {
|
||||
'id': self.id,
|
||||
'type': self.type.value,
|
||||
'name': self.name,
|
||||
'coordinates': list(self.coordinates),
|
||||
'address': self.address,
|
||||
'metadata': self.metadata,
|
||||
'type_data': self.type_data,
|
||||
'cluster_weight': self.cluster_weight,
|
||||
'cluster_category': self.cluster_category
|
||||
"id": self.id,
|
||||
"type": self.type.value,
|
||||
"name": self.name,
|
||||
"coordinates": list(self.coordinates),
|
||||
"address": self.address,
|
||||
"metadata": self.metadata,
|
||||
"type_data": self.type_data,
|
||||
"cluster_weight": self.cluster_weight,
|
||||
"cluster_category": self.cluster_category,
|
||||
}
|
||||
|
||||
|
||||
@dataclass
|
||||
class ClusterData:
|
||||
"""Represents a cluster of locations for map display."""
|
||||
|
||||
id: str
|
||||
coordinates: Tuple[float, float] # (lat, lng)
|
||||
count: int
|
||||
types: Set[LocationType]
|
||||
bounds: GeoBounds
|
||||
representative_location: Optional[UnifiedLocation] = None
|
||||
|
||||
|
||||
def to_dict(self) -> Dict[str, Any]:
|
||||
"""Convert to dictionary for JSON responses."""
|
||||
return {
|
||||
'id': self.id,
|
||||
'coordinates': list(self.coordinates),
|
||||
'count': self.count,
|
||||
'types': [t.value for t in self.types],
|
||||
'bounds': self.bounds.to_dict(),
|
||||
'representative': self.representative_location.to_dict() if self.representative_location else None
|
||||
"id": self.id,
|
||||
"coordinates": list(self.coordinates),
|
||||
"count": self.count,
|
||||
"types": [t.value for t in self.types],
|
||||
"bounds": self.bounds.to_dict(),
|
||||
"representative": (
|
||||
self.representative_location.to_dict()
|
||||
if self.representative_location
|
||||
else None
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
@dataclass
|
||||
class MapResponse:
|
||||
"""Response structure for map API calls."""
|
||||
|
||||
locations: List[UnifiedLocation] = field(default_factory=list)
|
||||
clusters: List[ClusterData] = field(default_factory=list)
|
||||
bounds: Optional[GeoBounds] = None
|
||||
@@ -192,49 +204,50 @@ class MapResponse:
|
||||
cache_hit: bool = False
|
||||
query_time_ms: Optional[int] = None
|
||||
filters_applied: List[str] = field(default_factory=list)
|
||||
|
||||
|
||||
def to_dict(self) -> Dict[str, Any]:
|
||||
"""Convert to dictionary for JSON responses."""
|
||||
return {
|
||||
'status': 'success',
|
||||
'data': {
|
||||
'locations': [loc.to_dict() for loc in self.locations],
|
||||
'clusters': [cluster.to_dict() for cluster in self.clusters],
|
||||
'bounds': self.bounds.to_dict() if self.bounds else None,
|
||||
'total_count': self.total_count,
|
||||
'filtered_count': self.filtered_count,
|
||||
'zoom_level': self.zoom_level,
|
||||
'clustered': self.clustered
|
||||
"status": "success",
|
||||
"data": {
|
||||
"locations": [loc.to_dict() for loc in self.locations],
|
||||
"clusters": [cluster.to_dict() for cluster in self.clusters],
|
||||
"bounds": self.bounds.to_dict() if self.bounds else None,
|
||||
"total_count": self.total_count,
|
||||
"filtered_count": self.filtered_count,
|
||||
"zoom_level": self.zoom_level,
|
||||
"clustered": self.clustered,
|
||||
},
|
||||
"meta": {
|
||||
"cache_hit": self.cache_hit,
|
||||
"query_time_ms": self.query_time_ms,
|
||||
"filters_applied": self.filters_applied,
|
||||
"pagination": {
|
||||
"has_more": False, # TODO: Implement pagination
|
||||
"total_pages": 1,
|
||||
},
|
||||
},
|
||||
'meta': {
|
||||
'cache_hit': self.cache_hit,
|
||||
'query_time_ms': self.query_time_ms,
|
||||
'filters_applied': self.filters_applied,
|
||||
'pagination': {
|
||||
'has_more': False, # TODO: Implement pagination
|
||||
'total_pages': 1
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@dataclass
|
||||
class QueryPerformanceMetrics:
|
||||
"""Performance metrics for query optimization."""
|
||||
|
||||
query_time_ms: int
|
||||
db_query_count: int
|
||||
cache_hit: bool
|
||||
result_count: int
|
||||
bounds_used: bool
|
||||
clustering_used: bool
|
||||
|
||||
|
||||
def to_dict(self) -> Dict[str, Any]:
|
||||
"""Convert to dictionary for logging."""
|
||||
return {
|
||||
'query_time_ms': self.query_time_ms,
|
||||
'db_query_count': self.db_query_count,
|
||||
'cache_hit': self.cache_hit,
|
||||
'result_count': self.result_count,
|
||||
'bounds_used': self.bounds_used,
|
||||
'clustering_used': self.clustering_used
|
||||
}
|
||||
"query_time_ms": self.query_time_ms,
|
||||
"db_query_count": self.db_query_count,
|
||||
"cache_hit": self.cache_hit,
|
||||
"result_count": self.result_count,
|
||||
"bounds_used": self.bounds_used,
|
||||
"clustering_used": self.clustering_used,
|
||||
}
|
||||
|
||||
@@ -2,10 +2,8 @@
|
||||
Enhanced caching service with multiple cache backends and strategies.
|
||||
"""
|
||||
|
||||
from typing import Optional, Any, Dict, List, Callable
|
||||
from typing import Optional, Any, Dict, Callable
|
||||
from django.core.cache import caches
|
||||
from django.core.cache.utils import make_template_fragment_key
|
||||
from django.conf import settings
|
||||
import hashlib
|
||||
import json
|
||||
import logging
|
||||
@@ -14,6 +12,7 @@ from functools import wraps
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
# Define GeoBounds for type hinting
|
||||
class GeoBounds:
|
||||
def __init__(self, min_lat: float, min_lng: float, max_lat: float, max_lng: float):
|
||||
@@ -25,93 +24,134 @@ class GeoBounds:
|
||||
|
||||
class EnhancedCacheService:
|
||||
"""Comprehensive caching service with multiple cache backends"""
|
||||
|
||||
|
||||
def __init__(self):
|
||||
self.default_cache = caches['default']
|
||||
self.default_cache = caches["default"]
|
||||
try:
|
||||
self.api_cache = caches['api']
|
||||
self.api_cache = caches["api"]
|
||||
except Exception:
|
||||
# Fallback to default cache if api cache not configured
|
||||
self.api_cache = self.default_cache
|
||||
|
||||
|
||||
# L1: Query-level caching
|
||||
def cache_queryset(self, cache_key: str, queryset_func: Callable, timeout: int = 3600, **kwargs) -> Any:
|
||||
def cache_queryset(
|
||||
self,
|
||||
cache_key: str,
|
||||
queryset_func: Callable,
|
||||
timeout: int = 3600,
|
||||
**kwargs,
|
||||
) -> Any:
|
||||
"""Cache expensive querysets"""
|
||||
cached_result = self.default_cache.get(cache_key)
|
||||
if cached_result is None:
|
||||
start_time = time.time()
|
||||
result = queryset_func(**kwargs)
|
||||
duration = time.time() - start_time
|
||||
|
||||
|
||||
# Log cache miss and function execution time
|
||||
logger.info(
|
||||
f"Cache miss for key '{cache_key}', executed in {duration:.3f}s",
|
||||
extra={'cache_key': cache_key, 'execution_time': duration}
|
||||
f"Cache miss for key '{cache_key}', executed in {
|
||||
duration:.3f}s",
|
||||
extra={"cache_key": cache_key, "execution_time": duration},
|
||||
)
|
||||
|
||||
|
||||
self.default_cache.set(cache_key, result, timeout)
|
||||
return result
|
||||
|
||||
|
||||
logger.debug(f"Cache hit for key '{cache_key}'")
|
||||
return cached_result
|
||||
|
||||
# L2: API response caching
|
||||
def cache_api_response(self, view_name: str, params: Dict, response_data: Any, timeout: int = 1800):
|
||||
|
||||
# L2: API response caching
|
||||
def cache_api_response(
|
||||
self,
|
||||
view_name: str,
|
||||
params: Dict,
|
||||
response_data: Any,
|
||||
timeout: int = 1800,
|
||||
):
|
||||
"""Cache API responses based on view and parameters"""
|
||||
cache_key = self._generate_api_cache_key(view_name, params)
|
||||
self.api_cache.set(cache_key, response_data, timeout)
|
||||
logger.debug(f"Cached API response for view '{view_name}'")
|
||||
|
||||
|
||||
def get_cached_api_response(self, view_name: str, params: Dict) -> Optional[Any]:
|
||||
"""Retrieve cached API response"""
|
||||
cache_key = self._generate_api_cache_key(view_name, params)
|
||||
result = self.api_cache.get(cache_key)
|
||||
|
||||
|
||||
if result:
|
||||
logger.debug(f"Cache hit for API view '{view_name}'")
|
||||
else:
|
||||
logger.debug(f"Cache miss for API view '{view_name}'")
|
||||
|
||||
|
||||
return result
|
||||
|
||||
|
||||
# L3: Geographic caching (building on existing MapCacheService)
|
||||
def cache_geographic_data(self, bounds: 'GeoBounds', data: Any, zoom_level: int, timeout: int = 1800):
|
||||
def cache_geographic_data(
|
||||
self,
|
||||
bounds: "GeoBounds",
|
||||
data: Any,
|
||||
zoom_level: int,
|
||||
timeout: int = 1800,
|
||||
):
|
||||
"""Cache geographic data with spatial keys"""
|
||||
# Generate spatial cache key based on bounds and zoom level
|
||||
cache_key = f"geo:{bounds.min_lat}:{bounds.min_lng}:{bounds.max_lat}:{bounds.max_lng}:z{zoom_level}"
|
||||
cache_key = f"geo:{
|
||||
bounds.min_lat}:{
|
||||
bounds.min_lng}:{
|
||||
bounds.max_lat}:{
|
||||
bounds.max_lng}:z{zoom_level}"
|
||||
self.default_cache.set(cache_key, data, timeout)
|
||||
logger.debug(f"Cached geographic data for bounds {bounds}")
|
||||
|
||||
def get_cached_geographic_data(self, bounds: 'GeoBounds', zoom_level: int) -> Optional[Any]:
|
||||
|
||||
def get_cached_geographic_data(
|
||||
self, bounds: "GeoBounds", zoom_level: int
|
||||
) -> Optional[Any]:
|
||||
"""Retrieve cached geographic data"""
|
||||
cache_key = f"geo:{bounds.min_lat}:{bounds.min_lng}:{bounds.max_lat}:{bounds.max_lng}:z{zoom_level}"
|
||||
cache_key = f"geo:{
|
||||
bounds.min_lat}:{
|
||||
bounds.min_lng}:{
|
||||
bounds.max_lat}:{
|
||||
bounds.max_lng}:z{zoom_level}"
|
||||
return self.default_cache.get(cache_key)
|
||||
|
||||
|
||||
# Cache invalidation utilities
|
||||
def invalidate_pattern(self, pattern: str):
|
||||
"""Invalidate cache keys matching a pattern (if backend supports it)"""
|
||||
try:
|
||||
# For Redis cache backends
|
||||
if hasattr(self.default_cache, 'delete_pattern'):
|
||||
if hasattr(self.default_cache, "delete_pattern"):
|
||||
deleted_count = self.default_cache.delete_pattern(pattern)
|
||||
logger.info(f"Invalidated {deleted_count} cache keys matching pattern '{pattern}'")
|
||||
logger.info(
|
||||
f"Invalidated {deleted_count} cache keys matching pattern '{pattern}'"
|
||||
)
|
||||
return deleted_count
|
||||
else:
|
||||
logger.warning(f"Cache backend does not support pattern deletion for pattern '{pattern}'")
|
||||
logger.warning(
|
||||
f"Cache backend does not support pattern deletion for pattern '{pattern}'"
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"Error invalidating cache pattern '{pattern}': {e}")
|
||||
|
||||
def invalidate_model_cache(self, model_name: str, instance_id: Optional[int] = None):
|
||||
|
||||
def invalidate_model_cache(
|
||||
self, model_name: str, instance_id: Optional[int] = None
|
||||
):
|
||||
"""Invalidate cache keys related to a specific model"""
|
||||
if instance_id:
|
||||
pattern = f"*{model_name}:{instance_id}*"
|
||||
else:
|
||||
pattern = f"*{model_name}*"
|
||||
|
||||
|
||||
self.invalidate_pattern(pattern)
|
||||
|
||||
|
||||
# Cache warming utilities
|
||||
def warm_cache(self, cache_key: str, warm_func: Callable, timeout: int = 3600, **kwargs):
|
||||
def warm_cache(
|
||||
self,
|
||||
cache_key: str,
|
||||
warm_func: Callable,
|
||||
timeout: int = 3600,
|
||||
**kwargs,
|
||||
):
|
||||
"""Proactively warm cache with data"""
|
||||
try:
|
||||
data = warm_func(**kwargs)
|
||||
@@ -119,7 +159,7 @@ class EnhancedCacheService:
|
||||
logger.info(f"Warmed cache for key '{cache_key}'")
|
||||
except Exception as e:
|
||||
logger.error(f"Error warming cache for key '{cache_key}': {e}")
|
||||
|
||||
|
||||
def _generate_api_cache_key(self, view_name: str, params: Dict) -> str:
|
||||
"""Generate consistent cache keys for API responses"""
|
||||
# Sort params to ensure consistent key generation
|
||||
@@ -129,124 +169,150 @@ class EnhancedCacheService:
|
||||
|
||||
|
||||
# Cache decorators
|
||||
def cache_api_response(timeout=1800, vary_on=None, key_prefix=''):
|
||||
def cache_api_response(timeout=1800, vary_on=None, key_prefix=""):
|
||||
"""Decorator for caching API responses"""
|
||||
|
||||
def decorator(view_func):
|
||||
@wraps(view_func)
|
||||
def wrapper(self, request, *args, **kwargs):
|
||||
if request.method != 'GET':
|
||||
if request.method != "GET":
|
||||
return view_func(self, request, *args, **kwargs)
|
||||
|
||||
|
||||
# Generate cache key based on view, user, and parameters
|
||||
cache_key_parts = [
|
||||
key_prefix or view_func.__name__,
|
||||
str(request.user.id) if request.user.is_authenticated else 'anonymous',
|
||||
str(hash(frozenset(request.GET.items())))
|
||||
(
|
||||
str(request.user.id)
|
||||
if request.user.is_authenticated
|
||||
else "anonymous"
|
||||
),
|
||||
str(hash(frozenset(request.GET.items()))),
|
||||
]
|
||||
|
||||
|
||||
if vary_on:
|
||||
for field in vary_on:
|
||||
cache_key_parts.append(str(getattr(request, field, '')))
|
||||
|
||||
cache_key = ':'.join(cache_key_parts)
|
||||
|
||||
cache_key_parts.append(str(getattr(request, field, "")))
|
||||
|
||||
cache_key = ":".join(cache_key_parts)
|
||||
|
||||
# Try to get from cache
|
||||
cache_service = EnhancedCacheService()
|
||||
cached_response = cache_service.api_cache.get(cache_key)
|
||||
if cached_response:
|
||||
logger.debug(f"Cache hit for API view {view_func.__name__}")
|
||||
return cached_response
|
||||
|
||||
|
||||
# Execute view and cache result
|
||||
response = view_func(self, request, *args, **kwargs)
|
||||
if hasattr(response, 'status_code') and response.status_code == 200:
|
||||
if hasattr(response, "status_code") and response.status_code == 200:
|
||||
cache_service.api_cache.set(cache_key, response, timeout)
|
||||
logger.debug(f"Cached API response for view {view_func.__name__}")
|
||||
|
||||
logger.debug(
|
||||
f"Cached API response for view {
|
||||
view_func.__name__}"
|
||||
)
|
||||
|
||||
return response
|
||||
|
||||
return wrapper
|
||||
|
||||
return decorator
|
||||
|
||||
|
||||
def cache_queryset_result(cache_key_template: str, timeout: int = 3600):
|
||||
"""Decorator for caching queryset results"""
|
||||
|
||||
def decorator(func):
|
||||
@wraps(func)
|
||||
def wrapper(*args, **kwargs):
|
||||
# Generate cache key from template and arguments
|
||||
cache_key = cache_key_template.format(*args, **kwargs)
|
||||
|
||||
|
||||
cache_service = EnhancedCacheService()
|
||||
return cache_service.cache_queryset(cache_key, func, timeout, *args, **kwargs)
|
||||
return cache_service.cache_queryset(
|
||||
cache_key, func, timeout, *args, **kwargs
|
||||
)
|
||||
|
||||
return wrapper
|
||||
|
||||
return decorator
|
||||
|
||||
|
||||
# Context manager for cache warming
|
||||
class CacheWarmer:
|
||||
"""Context manager for batch cache warming operations"""
|
||||
|
||||
|
||||
def __init__(self):
|
||||
self.cache_service = EnhancedCacheService()
|
||||
self.warm_operations = []
|
||||
|
||||
def add(self, cache_key: str, warm_func: Callable, timeout: int = 3600, **kwargs):
|
||||
|
||||
def add(
|
||||
self,
|
||||
cache_key: str,
|
||||
warm_func: Callable,
|
||||
timeout: int = 3600,
|
||||
**kwargs,
|
||||
):
|
||||
"""Add a cache warming operation to the batch"""
|
||||
self.warm_operations.append({
|
||||
'cache_key': cache_key,
|
||||
'warm_func': warm_func,
|
||||
'timeout': timeout,
|
||||
'kwargs': kwargs
|
||||
})
|
||||
|
||||
self.warm_operations.append(
|
||||
{
|
||||
"cache_key": cache_key,
|
||||
"warm_func": warm_func,
|
||||
"timeout": timeout,
|
||||
"kwargs": kwargs,
|
||||
}
|
||||
)
|
||||
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
"""Execute all cache warming operations"""
|
||||
logger.info(f"Warming {len(self.warm_operations)} cache entries")
|
||||
|
||||
|
||||
for operation in self.warm_operations:
|
||||
try:
|
||||
self.cache_service.warm_cache(**operation)
|
||||
except Exception as e:
|
||||
logger.error(f"Error warming cache for {operation['cache_key']}: {e}")
|
||||
logger.error(
|
||||
f"Error warming cache for {
|
||||
operation['cache_key']}: {e}"
|
||||
)
|
||||
|
||||
|
||||
# Cache statistics and monitoring
|
||||
class CacheMonitor:
|
||||
"""Monitor cache performance and statistics"""
|
||||
|
||||
|
||||
def __init__(self):
|
||||
self.cache_service = EnhancedCacheService()
|
||||
|
||||
|
||||
def get_cache_stats(self) -> Dict[str, Any]:
|
||||
"""Get cache statistics if available"""
|
||||
stats = {}
|
||||
|
||||
|
||||
try:
|
||||
# Redis cache stats
|
||||
if hasattr(self.cache_service.default_cache, '_cache'):
|
||||
if hasattr(self.cache_service.default_cache, "_cache"):
|
||||
redis_client = self.cache_service.default_cache._cache.get_client()
|
||||
info = redis_client.info()
|
||||
stats['redis'] = {
|
||||
'used_memory': info.get('used_memory_human'),
|
||||
'connected_clients': info.get('connected_clients'),
|
||||
'total_commands_processed': info.get('total_commands_processed'),
|
||||
'keyspace_hits': info.get('keyspace_hits'),
|
||||
'keyspace_misses': info.get('keyspace_misses'),
|
||||
stats["redis"] = {
|
||||
"used_memory": info.get("used_memory_human"),
|
||||
"connected_clients": info.get("connected_clients"),
|
||||
"total_commands_processed": info.get("total_commands_processed"),
|
||||
"keyspace_hits": info.get("keyspace_hits"),
|
||||
"keyspace_misses": info.get("keyspace_misses"),
|
||||
}
|
||||
|
||||
|
||||
# Calculate hit rate
|
||||
hits = info.get('keyspace_hits', 0)
|
||||
misses = info.get('keyspace_misses', 0)
|
||||
hits = info.get("keyspace_hits", 0)
|
||||
misses = info.get("keyspace_misses", 0)
|
||||
if hits + misses > 0:
|
||||
stats['redis']['hit_rate'] = hits / (hits + misses) * 100
|
||||
stats["redis"]["hit_rate"] = hits / (hits + misses) * 100
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting cache stats: {e}")
|
||||
|
||||
|
||||
return stats
|
||||
|
||||
|
||||
def log_cache_performance(self):
|
||||
"""Log cache performance metrics"""
|
||||
stats = self.get_cache_stats()
|
||||
|
||||
@@ -2,29 +2,37 @@
|
||||
Location adapters for converting between domain-specific models and UnifiedLocation.
|
||||
"""
|
||||
|
||||
from typing import List, Optional, Dict, Any
|
||||
from django.db import models
|
||||
from typing import List, Optional
|
||||
from django.db.models import QuerySet
|
||||
from django.urls import reverse
|
||||
|
||||
from .data_structures import UnifiedLocation, LocationType, GeoBounds, MapFilters
|
||||
from parks.models.location import ParkLocation
|
||||
from rides.models.location import RideLocation
|
||||
from parks.models.companies import CompanyHeadquarters
|
||||
from .data_structures import (
|
||||
UnifiedLocation,
|
||||
LocationType,
|
||||
GeoBounds,
|
||||
MapFilters,
|
||||
)
|
||||
from parks.models import ParkLocation, CompanyHeadquarters
|
||||
from rides.models import RideLocation
|
||||
from location.models import Location
|
||||
|
||||
|
||||
class BaseLocationAdapter:
|
||||
"""Base adapter class for location conversions."""
|
||||
|
||||
|
||||
def to_unified_location(self, location_obj) -> Optional[UnifiedLocation]:
|
||||
"""Convert model instance to UnifiedLocation."""
|
||||
raise NotImplementedError
|
||||
|
||||
def get_queryset(self, bounds: Optional[GeoBounds] = None,
|
||||
filters: Optional[MapFilters] = None) -> QuerySet:
|
||||
|
||||
def get_queryset(
|
||||
self,
|
||||
bounds: Optional[GeoBounds] = None,
|
||||
filters: Optional[MapFilters] = None,
|
||||
) -> QuerySet:
|
||||
"""Get optimized queryset for this location type."""
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
def bulk_convert(self, queryset: QuerySet) -> List[UnifiedLocation]:
|
||||
"""Convert multiple location objects efficiently."""
|
||||
unified_locations = []
|
||||
@@ -37,14 +45,16 @@ class BaseLocationAdapter:
|
||||
|
||||
class ParkLocationAdapter(BaseLocationAdapter):
|
||||
"""Converts Park/ParkLocation to UnifiedLocation."""
|
||||
|
||||
def to_unified_location(self, park_location: ParkLocation) -> Optional[UnifiedLocation]:
|
||||
|
||||
def to_unified_location(
|
||||
self, park_location: ParkLocation
|
||||
) -> Optional[UnifiedLocation]:
|
||||
"""Convert ParkLocation to UnifiedLocation."""
|
||||
if not park_location.point:
|
||||
return None
|
||||
|
||||
|
||||
park = park_location.park
|
||||
|
||||
|
||||
return UnifiedLocation(
|
||||
id=f"park_{park.id}",
|
||||
type=LocationType.PARK,
|
||||
@@ -52,41 +62,60 @@ class ParkLocationAdapter(BaseLocationAdapter):
|
||||
coordinates=(park_location.latitude, park_location.longitude),
|
||||
address=park_location.formatted_address,
|
||||
metadata={
|
||||
'status': getattr(park, 'status', 'UNKNOWN'),
|
||||
'rating': float(park.average_rating) if hasattr(park, 'average_rating') and park.average_rating else None,
|
||||
'ride_count': getattr(park, 'ride_count', 0),
|
||||
'coaster_count': getattr(park, 'coaster_count', 0),
|
||||
'operator': park.operator.name if hasattr(park, 'operator') and park.operator else None,
|
||||
'city': park_location.city,
|
||||
'state': park_location.state,
|
||||
'country': park_location.country,
|
||||
"status": getattr(park, "status", "UNKNOWN"),
|
||||
"rating": (
|
||||
float(park.average_rating)
|
||||
if hasattr(park, "average_rating") and park.average_rating
|
||||
else None
|
||||
),
|
||||
"ride_count": getattr(park, "ride_count", 0),
|
||||
"coaster_count": getattr(park, "coaster_count", 0),
|
||||
"operator": (
|
||||
park.operator.name
|
||||
if hasattr(park, "operator") and park.operator
|
||||
else None
|
||||
),
|
||||
"city": park_location.city,
|
||||
"state": park_location.state,
|
||||
"country": park_location.country,
|
||||
},
|
||||
type_data={
|
||||
'slug': park.slug,
|
||||
'opening_date': park.opening_date.isoformat() if hasattr(park, 'opening_date') and park.opening_date else None,
|
||||
'website': getattr(park, 'website', ''),
|
||||
'operating_season': getattr(park, 'operating_season', ''),
|
||||
'highway_exit': park_location.highway_exit,
|
||||
'parking_notes': park_location.parking_notes,
|
||||
'best_arrival_time': park_location.best_arrival_time.strftime('%H:%M') if park_location.best_arrival_time else None,
|
||||
'seasonal_notes': park_location.seasonal_notes,
|
||||
'url': self._get_park_url(park),
|
||||
"slug": park.slug,
|
||||
"opening_date": (
|
||||
park.opening_date.isoformat()
|
||||
if hasattr(park, "opening_date") and park.opening_date
|
||||
else None
|
||||
),
|
||||
"website": getattr(park, "website", ""),
|
||||
"operating_season": getattr(park, "operating_season", ""),
|
||||
"highway_exit": park_location.highway_exit,
|
||||
"parking_notes": park_location.parking_notes,
|
||||
"best_arrival_time": (
|
||||
park_location.best_arrival_time.strftime("%H:%M")
|
||||
if park_location.best_arrival_time
|
||||
else None
|
||||
),
|
||||
"seasonal_notes": park_location.seasonal_notes,
|
||||
"url": self._get_park_url(park),
|
||||
},
|
||||
cluster_weight=self._calculate_park_weight(park),
|
||||
cluster_category=self._get_park_category(park)
|
||||
cluster_category=self._get_park_category(park),
|
||||
)
|
||||
|
||||
def get_queryset(self, bounds: Optional[GeoBounds] = None,
|
||||
filters: Optional[MapFilters] = None) -> QuerySet:
|
||||
|
||||
def get_queryset(
|
||||
self,
|
||||
bounds: Optional[GeoBounds] = None,
|
||||
filters: Optional[MapFilters] = None,
|
||||
) -> QuerySet:
|
||||
"""Get optimized queryset for park locations."""
|
||||
queryset = ParkLocation.objects.select_related(
|
||||
'park', 'park__operator'
|
||||
).filter(point__isnull=False)
|
||||
|
||||
queryset = ParkLocation.objects.select_related("park", "park__operator").filter(
|
||||
point__isnull=False
|
||||
)
|
||||
|
||||
# Spatial filtering
|
||||
if bounds:
|
||||
queryset = queryset.filter(point__within=bounds.to_polygon())
|
||||
|
||||
|
||||
# Park-specific filters
|
||||
if filters:
|
||||
if filters.park_status:
|
||||
@@ -99,170 +128,212 @@ class ParkLocationAdapter(BaseLocationAdapter):
|
||||
queryset = queryset.filter(state=filters.state)
|
||||
if filters.city:
|
||||
queryset = queryset.filter(city=filters.city)
|
||||
|
||||
return queryset.order_by('park__name')
|
||||
|
||||
|
||||
return queryset.order_by("park__name")
|
||||
|
||||
def _calculate_park_weight(self, park) -> int:
|
||||
"""Calculate clustering weight based on park importance."""
|
||||
weight = 1
|
||||
if hasattr(park, 'ride_count') and park.ride_count and park.ride_count > 20:
|
||||
if hasattr(park, "ride_count") and park.ride_count and park.ride_count > 20:
|
||||
weight += 2
|
||||
if hasattr(park, 'coaster_count') and park.coaster_count and park.coaster_count > 5:
|
||||
if (
|
||||
hasattr(park, "coaster_count")
|
||||
and park.coaster_count
|
||||
and park.coaster_count > 5
|
||||
):
|
||||
weight += 1
|
||||
if hasattr(park, 'average_rating') and park.average_rating and park.average_rating > 4.0:
|
||||
if (
|
||||
hasattr(park, "average_rating")
|
||||
and park.average_rating
|
||||
and park.average_rating > 4.0
|
||||
):
|
||||
weight += 1
|
||||
return min(weight, 5) # Cap at 5
|
||||
|
||||
|
||||
def _get_park_category(self, park) -> str:
|
||||
"""Determine park category for clustering."""
|
||||
coaster_count = getattr(park, 'coaster_count', 0) or 0
|
||||
ride_count = getattr(park, 'ride_count', 0) or 0
|
||||
|
||||
coaster_count = getattr(park, "coaster_count", 0) or 0
|
||||
ride_count = getattr(park, "ride_count", 0) or 0
|
||||
|
||||
if coaster_count >= 10:
|
||||
return "major_park"
|
||||
elif ride_count >= 15:
|
||||
return "theme_park"
|
||||
else:
|
||||
return "small_park"
|
||||
|
||||
|
||||
def _get_park_url(self, park) -> str:
|
||||
"""Get URL for park detail page."""
|
||||
try:
|
||||
return reverse('parks:detail', kwargs={'slug': park.slug})
|
||||
except:
|
||||
return reverse("parks:detail", kwargs={"slug": park.slug})
|
||||
except BaseException:
|
||||
return f"/parks/{park.slug}/"
|
||||
|
||||
|
||||
class RideLocationAdapter(BaseLocationAdapter):
|
||||
"""Converts Ride/RideLocation to UnifiedLocation."""
|
||||
|
||||
def to_unified_location(self, ride_location: RideLocation) -> Optional[UnifiedLocation]:
|
||||
|
||||
def to_unified_location(
|
||||
self, ride_location: RideLocation
|
||||
) -> Optional[UnifiedLocation]:
|
||||
"""Convert RideLocation to UnifiedLocation."""
|
||||
if not ride_location.point:
|
||||
return None
|
||||
|
||||
|
||||
ride = ride_location.ride
|
||||
|
||||
|
||||
return UnifiedLocation(
|
||||
id=f"ride_{ride.id}",
|
||||
type=LocationType.RIDE,
|
||||
name=ride.name,
|
||||
coordinates=(ride_location.latitude, ride_location.longitude),
|
||||
address=f"{ride_location.park_area}, {ride.park.name}" if ride_location.park_area else ride.park.name,
|
||||
address=(
|
||||
f"{ride_location.park_area}, {ride.park.name}"
|
||||
if ride_location.park_area
|
||||
else ride.park.name
|
||||
),
|
||||
metadata={
|
||||
'park_id': ride.park.id,
|
||||
'park_name': ride.park.name,
|
||||
'park_area': ride_location.park_area,
|
||||
'ride_type': getattr(ride, 'ride_type', 'Unknown'),
|
||||
'status': getattr(ride, 'status', 'UNKNOWN'),
|
||||
'rating': float(ride.average_rating) if hasattr(ride, 'average_rating') and ride.average_rating else None,
|
||||
'manufacturer': getattr(ride, 'manufacturer', {}).get('name') if hasattr(ride, 'manufacturer') else None,
|
||||
"park_id": ride.park.id,
|
||||
"park_name": ride.park.name,
|
||||
"park_area": ride_location.park_area,
|
||||
"ride_type": getattr(ride, "ride_type", "Unknown"),
|
||||
"status": getattr(ride, "status", "UNKNOWN"),
|
||||
"rating": (
|
||||
float(ride.average_rating)
|
||||
if hasattr(ride, "average_rating") and ride.average_rating
|
||||
else None
|
||||
),
|
||||
"manufacturer": (
|
||||
getattr(ride, "manufacturer", {}).get("name")
|
||||
if hasattr(ride, "manufacturer")
|
||||
else None
|
||||
),
|
||||
},
|
||||
type_data={
|
||||
'slug': ride.slug,
|
||||
'opening_date': ride.opening_date.isoformat() if hasattr(ride, 'opening_date') and ride.opening_date else None,
|
||||
'height_requirement': getattr(ride, 'height_requirement', ''),
|
||||
'duration_minutes': getattr(ride, 'duration_minutes', None),
|
||||
'max_speed_mph': getattr(ride, 'max_speed_mph', None),
|
||||
'entrance_notes': ride_location.entrance_notes,
|
||||
'accessibility_notes': ride_location.accessibility_notes,
|
||||
'url': self._get_ride_url(ride),
|
||||
"slug": ride.slug,
|
||||
"opening_date": (
|
||||
ride.opening_date.isoformat()
|
||||
if hasattr(ride, "opening_date") and ride.opening_date
|
||||
else None
|
||||
),
|
||||
"height_requirement": getattr(ride, "height_requirement", ""),
|
||||
"duration_minutes": getattr(ride, "duration_minutes", None),
|
||||
"max_speed_mph": getattr(ride, "max_speed_mph", None),
|
||||
"entrance_notes": ride_location.entrance_notes,
|
||||
"accessibility_notes": ride_location.accessibility_notes,
|
||||
"url": self._get_ride_url(ride),
|
||||
},
|
||||
cluster_weight=self._calculate_ride_weight(ride),
|
||||
cluster_category=self._get_ride_category(ride)
|
||||
cluster_category=self._get_ride_category(ride),
|
||||
)
|
||||
|
||||
def get_queryset(self, bounds: Optional[GeoBounds] = None,
|
||||
filters: Optional[MapFilters] = None) -> QuerySet:
|
||||
|
||||
def get_queryset(
|
||||
self,
|
||||
bounds: Optional[GeoBounds] = None,
|
||||
filters: Optional[MapFilters] = None,
|
||||
) -> QuerySet:
|
||||
"""Get optimized queryset for ride locations."""
|
||||
queryset = RideLocation.objects.select_related(
|
||||
'ride', 'ride__park', 'ride__park__operator'
|
||||
"ride", "ride__park", "ride__park__operator"
|
||||
).filter(point__isnull=False)
|
||||
|
||||
|
||||
# Spatial filtering
|
||||
if bounds:
|
||||
queryset = queryset.filter(point__within=bounds.to_polygon())
|
||||
|
||||
|
||||
# Ride-specific filters
|
||||
if filters:
|
||||
if filters.ride_types:
|
||||
queryset = queryset.filter(ride__ride_type__in=filters.ride_types)
|
||||
if filters.search_query:
|
||||
queryset = queryset.filter(ride__name__icontains=filters.search_query)
|
||||
|
||||
return queryset.order_by('ride__name')
|
||||
|
||||
|
||||
return queryset.order_by("ride__name")
|
||||
|
||||
def _calculate_ride_weight(self, ride) -> int:
|
||||
"""Calculate clustering weight based on ride importance."""
|
||||
weight = 1
|
||||
ride_type = getattr(ride, 'ride_type', '').lower()
|
||||
if 'coaster' in ride_type or 'roller' in ride_type:
|
||||
ride_type = getattr(ride, "ride_type", "").lower()
|
||||
if "coaster" in ride_type or "roller" in ride_type:
|
||||
weight += 1
|
||||
if hasattr(ride, 'average_rating') and ride.average_rating and ride.average_rating > 4.0:
|
||||
if (
|
||||
hasattr(ride, "average_rating")
|
||||
and ride.average_rating
|
||||
and ride.average_rating > 4.0
|
||||
):
|
||||
weight += 1
|
||||
return min(weight, 3) # Cap at 3 for rides
|
||||
|
||||
|
||||
def _get_ride_category(self, ride) -> str:
|
||||
"""Determine ride category for clustering."""
|
||||
ride_type = getattr(ride, 'ride_type', '').lower()
|
||||
if 'coaster' in ride_type or 'roller' in ride_type:
|
||||
ride_type = getattr(ride, "ride_type", "").lower()
|
||||
if "coaster" in ride_type or "roller" in ride_type:
|
||||
return "coaster"
|
||||
elif 'water' in ride_type or 'splash' in ride_type:
|
||||
elif "water" in ride_type or "splash" in ride_type:
|
||||
return "water_ride"
|
||||
else:
|
||||
return "other_ride"
|
||||
|
||||
|
||||
def _get_ride_url(self, ride) -> str:
|
||||
"""Get URL for ride detail page."""
|
||||
try:
|
||||
return reverse('rides:detail', kwargs={'slug': ride.slug})
|
||||
except:
|
||||
return reverse("rides:detail", kwargs={"slug": ride.slug})
|
||||
except BaseException:
|
||||
return f"/rides/{ride.slug}/"
|
||||
|
||||
|
||||
class CompanyLocationAdapter(BaseLocationAdapter):
|
||||
"""Converts Company/CompanyHeadquarters to UnifiedLocation."""
|
||||
|
||||
def to_unified_location(self, company_headquarters: CompanyHeadquarters) -> Optional[UnifiedLocation]:
|
||||
|
||||
def to_unified_location(
|
||||
self, company_headquarters: CompanyHeadquarters
|
||||
) -> Optional[UnifiedLocation]:
|
||||
"""Convert CompanyHeadquarters to UnifiedLocation."""
|
||||
# Note: CompanyHeadquarters doesn't have coordinates, so we need to geocode
|
||||
# For now, we'll skip companies without coordinates
|
||||
# TODO: Implement geocoding service integration
|
||||
return None
|
||||
|
||||
def get_queryset(self, bounds: Optional[GeoBounds] = None,
|
||||
filters: Optional[MapFilters] = None) -> QuerySet:
|
||||
|
||||
def get_queryset(
|
||||
self,
|
||||
bounds: Optional[GeoBounds] = None,
|
||||
filters: Optional[MapFilters] = None,
|
||||
) -> QuerySet:
|
||||
"""Get optimized queryset for company locations."""
|
||||
queryset = CompanyHeadquarters.objects.select_related('company')
|
||||
|
||||
queryset = CompanyHeadquarters.objects.select_related("company")
|
||||
|
||||
# Company-specific filters
|
||||
if filters:
|
||||
if filters.company_roles:
|
||||
queryset = queryset.filter(company__roles__overlap=filters.company_roles)
|
||||
queryset = queryset.filter(
|
||||
company__roles__overlap=filters.company_roles
|
||||
)
|
||||
if filters.search_query:
|
||||
queryset = queryset.filter(company__name__icontains=filters.search_query)
|
||||
queryset = queryset.filter(
|
||||
company__name__icontains=filters.search_query
|
||||
)
|
||||
if filters.country:
|
||||
queryset = queryset.filter(country=filters.country)
|
||||
if filters.city:
|
||||
queryset = queryset.filter(city=filters.city)
|
||||
|
||||
return queryset.order_by('company__name')
|
||||
|
||||
return queryset.order_by("company__name")
|
||||
|
||||
|
||||
class GenericLocationAdapter(BaseLocationAdapter):
|
||||
"""Converts generic Location model to UnifiedLocation."""
|
||||
|
||||
|
||||
def to_unified_location(self, location: Location) -> Optional[UnifiedLocation]:
|
||||
"""Convert generic Location to UnifiedLocation."""
|
||||
if not location.point and not (location.latitude and location.longitude):
|
||||
return None
|
||||
|
||||
|
||||
# Use point coordinates if available, fall back to lat/lng fields
|
||||
if location.point:
|
||||
coordinates = (location.point.y, location.point.x)
|
||||
else:
|
||||
coordinates = (float(location.latitude), float(location.longitude))
|
||||
|
||||
|
||||
return UnifiedLocation(
|
||||
id=f"generic_{location.id}",
|
||||
type=LocationType.GENERIC,
|
||||
@@ -270,41 +341,50 @@ class GenericLocationAdapter(BaseLocationAdapter):
|
||||
coordinates=coordinates,
|
||||
address=location.get_formatted_address(),
|
||||
metadata={
|
||||
'location_type': location.location_type,
|
||||
'content_type': location.content_type.model if location.content_type else None,
|
||||
'object_id': location.object_id,
|
||||
'city': location.city,
|
||||
'state': location.state,
|
||||
'country': location.country,
|
||||
"location_type": location.location_type,
|
||||
"content_type": (
|
||||
location.content_type.model if location.content_type else None
|
||||
),
|
||||
"object_id": location.object_id,
|
||||
"city": location.city,
|
||||
"state": location.state,
|
||||
"country": location.country,
|
||||
},
|
||||
type_data={
|
||||
'created_at': location.created_at.isoformat() if location.created_at else None,
|
||||
'updated_at': location.updated_at.isoformat() if location.updated_at else None,
|
||||
"created_at": (
|
||||
location.created_at.isoformat() if location.created_at else None
|
||||
),
|
||||
"updated_at": (
|
||||
location.updated_at.isoformat() if location.updated_at else None
|
||||
),
|
||||
},
|
||||
cluster_weight=1,
|
||||
cluster_category="generic"
|
||||
cluster_category="generic",
|
||||
)
|
||||
|
||||
def get_queryset(self, bounds: Optional[GeoBounds] = None,
|
||||
filters: Optional[MapFilters] = None) -> QuerySet:
|
||||
|
||||
def get_queryset(
|
||||
self,
|
||||
bounds: Optional[GeoBounds] = None,
|
||||
filters: Optional[MapFilters] = None,
|
||||
) -> QuerySet:
|
||||
"""Get optimized queryset for generic locations."""
|
||||
queryset = Location.objects.select_related('content_type').filter(
|
||||
models.Q(point__isnull=False) |
|
||||
models.Q(latitude__isnull=False, longitude__isnull=False)
|
||||
queryset = Location.objects.select_related("content_type").filter(
|
||||
models.Q(point__isnull=False)
|
||||
| models.Q(latitude__isnull=False, longitude__isnull=False)
|
||||
)
|
||||
|
||||
|
||||
# Spatial filtering
|
||||
if bounds:
|
||||
queryset = queryset.filter(
|
||||
models.Q(point__within=bounds.to_polygon()) |
|
||||
models.Q(
|
||||
models.Q(point__within=bounds.to_polygon())
|
||||
| models.Q(
|
||||
latitude__gte=bounds.south,
|
||||
latitude__lte=bounds.north,
|
||||
longitude__gte=bounds.west,
|
||||
longitude__lte=bounds.east
|
||||
longitude__lte=bounds.east,
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
# Generic filters
|
||||
if filters:
|
||||
if filters.search_query:
|
||||
@@ -313,8 +393,8 @@ class GenericLocationAdapter(BaseLocationAdapter):
|
||||
queryset = queryset.filter(country=filters.country)
|
||||
if filters.city:
|
||||
queryset = queryset.filter(city=filters.city)
|
||||
|
||||
return queryset.order_by('name')
|
||||
|
||||
return queryset.order_by("name")
|
||||
|
||||
|
||||
class LocationAbstractionLayer:
|
||||
@@ -322,59 +402,78 @@ class LocationAbstractionLayer:
|
||||
Abstraction layer handling different location model types.
|
||||
Implements the adapter pattern to provide unified access to all location types.
|
||||
"""
|
||||
|
||||
|
||||
def __init__(self):
|
||||
self.adapters = {
|
||||
LocationType.PARK: ParkLocationAdapter(),
|
||||
LocationType.RIDE: RideLocationAdapter(),
|
||||
LocationType.COMPANY: CompanyLocationAdapter(),
|
||||
LocationType.GENERIC: GenericLocationAdapter()
|
||||
LocationType.GENERIC: GenericLocationAdapter(),
|
||||
}
|
||||
|
||||
def get_all_locations(self, bounds: Optional[GeoBounds] = None,
|
||||
filters: Optional[MapFilters] = None) -> List[UnifiedLocation]:
|
||||
|
||||
def get_all_locations(
|
||||
self,
|
||||
bounds: Optional[GeoBounds] = None,
|
||||
filters: Optional[MapFilters] = None,
|
||||
) -> List[UnifiedLocation]:
|
||||
"""Get locations from all sources within bounds."""
|
||||
all_locations = []
|
||||
|
||||
|
||||
# Determine which location types to include
|
||||
location_types = filters.location_types if filters and filters.location_types else set(LocationType)
|
||||
|
||||
location_types = (
|
||||
filters.location_types
|
||||
if filters and filters.location_types
|
||||
else set(LocationType)
|
||||
)
|
||||
|
||||
for location_type in location_types:
|
||||
adapter = self.adapters[location_type]
|
||||
queryset = adapter.get_queryset(bounds, filters)
|
||||
locations = adapter.bulk_convert(queryset)
|
||||
all_locations.extend(locations)
|
||||
|
||||
|
||||
return all_locations
|
||||
|
||||
def get_locations_by_type(self, location_type: LocationType,
|
||||
bounds: Optional[GeoBounds] = None,
|
||||
filters: Optional[MapFilters] = None) -> List[UnifiedLocation]:
|
||||
|
||||
def get_locations_by_type(
|
||||
self,
|
||||
location_type: LocationType,
|
||||
bounds: Optional[GeoBounds] = None,
|
||||
filters: Optional[MapFilters] = None,
|
||||
) -> List[UnifiedLocation]:
|
||||
"""Get locations of specific type."""
|
||||
adapter = self.adapters[location_type]
|
||||
queryset = adapter.get_queryset(bounds, filters)
|
||||
return adapter.bulk_convert(queryset)
|
||||
|
||||
def get_location_by_id(self, location_type: LocationType, location_id: int) -> Optional[UnifiedLocation]:
|
||||
|
||||
def get_location_by_id(
|
||||
self, location_type: LocationType, location_id: int
|
||||
) -> Optional[UnifiedLocation]:
|
||||
"""Get single location with full details."""
|
||||
adapter = self.adapters[location_type]
|
||||
|
||||
|
||||
try:
|
||||
if location_type == LocationType.PARK:
|
||||
obj = ParkLocation.objects.select_related('park', 'park__operator').get(park_id=location_id)
|
||||
obj = ParkLocation.objects.select_related("park", "park__operator").get(
|
||||
park_id=location_id
|
||||
)
|
||||
elif location_type == LocationType.RIDE:
|
||||
obj = RideLocation.objects.select_related('ride', 'ride__park').get(ride_id=location_id)
|
||||
obj = RideLocation.objects.select_related("ride", "ride__park").get(
|
||||
ride_id=location_id
|
||||
)
|
||||
elif location_type == LocationType.COMPANY:
|
||||
obj = CompanyHeadquarters.objects.select_related('company').get(company_id=location_id)
|
||||
obj = CompanyHeadquarters.objects.select_related("company").get(
|
||||
company_id=location_id
|
||||
)
|
||||
elif location_type == LocationType.GENERIC:
|
||||
obj = Location.objects.select_related('content_type').get(id=location_id)
|
||||
obj = Location.objects.select_related("content_type").get(
|
||||
id=location_id
|
||||
)
|
||||
else:
|
||||
return None
|
||||
|
||||
|
||||
return adapter.to_unified_location(obj)
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
|
||||
# Import models after defining adapters to avoid circular imports
|
||||
from django.db import models
|
||||
@@ -8,41 +8,36 @@ search capabilities.
|
||||
|
||||
from django.contrib.gis.geos import Point
|
||||
from django.contrib.gis.measure import Distance
|
||||
from django.db.models import Q, Case, When, F, Value, CharField
|
||||
from django.db.models.functions import Coalesce
|
||||
from typing import Optional, List, Dict, Any, Tuple, Set
|
||||
from django.db.models import Q
|
||||
from typing import Optional, List, Dict, Any, Set
|
||||
from dataclasses import dataclass
|
||||
|
||||
from parks.models import Park
|
||||
from parks.models import Park, Company, ParkLocation
|
||||
from rides.models import Ride
|
||||
from parks.models.companies import Company
|
||||
from parks.models.location import ParkLocation
|
||||
from rides.models.location import RideLocation
|
||||
from parks.models.companies import CompanyHeadquarters
|
||||
|
||||
|
||||
@dataclass
|
||||
class LocationSearchFilters:
|
||||
"""Filters for location-aware search queries."""
|
||||
|
||||
|
||||
# Text search
|
||||
search_query: Optional[str] = None
|
||||
|
||||
|
||||
# Location-based filters
|
||||
location_point: Optional[Point] = None
|
||||
radius_km: Optional[float] = None
|
||||
location_types: Optional[Set[str]] = None # 'park', 'ride', 'company'
|
||||
|
||||
|
||||
# Geographic filters
|
||||
country: Optional[str] = None
|
||||
state: Optional[str] = None
|
||||
city: Optional[str] = None
|
||||
|
||||
|
||||
# Content-specific filters
|
||||
park_status: Optional[List[str]] = None
|
||||
ride_types: Optional[List[str]] = None
|
||||
company_roles: Optional[List[str]] = None
|
||||
|
||||
|
||||
# Result options
|
||||
include_distance: bool = True
|
||||
max_results: int = 100
|
||||
@@ -51,14 +46,14 @@ class LocationSearchFilters:
|
||||
@dataclass
|
||||
class LocationSearchResult:
|
||||
"""Single search result with location data."""
|
||||
|
||||
|
||||
# Core data
|
||||
content_type: str # 'park', 'ride', 'company'
|
||||
object_id: int
|
||||
name: str
|
||||
description: Optional[str] = None
|
||||
url: Optional[str] = None
|
||||
|
||||
|
||||
# Location data
|
||||
latitude: Optional[float] = None
|
||||
longitude: Optional[float] = None
|
||||
@@ -66,114 +61,122 @@ class LocationSearchResult:
|
||||
city: Optional[str] = None
|
||||
state: Optional[str] = None
|
||||
country: Optional[str] = None
|
||||
|
||||
|
||||
# Distance data (if proximity search)
|
||||
distance_km: Optional[float] = None
|
||||
|
||||
|
||||
# Additional metadata
|
||||
status: Optional[str] = None
|
||||
tags: Optional[List[str]] = None
|
||||
rating: Optional[float] = None
|
||||
|
||||
|
||||
def to_dict(self) -> Dict[str, Any]:
|
||||
"""Convert to dictionary for JSON serialization."""
|
||||
return {
|
||||
'content_type': self.content_type,
|
||||
'object_id': self.object_id,
|
||||
'name': self.name,
|
||||
'description': self.description,
|
||||
'url': self.url,
|
||||
'location': {
|
||||
'latitude': self.latitude,
|
||||
'longitude': self.longitude,
|
||||
'address': self.address,
|
||||
'city': self.city,
|
||||
'state': self.state,
|
||||
'country': self.country,
|
||||
"content_type": self.content_type,
|
||||
"object_id": self.object_id,
|
||||
"name": self.name,
|
||||
"description": self.description,
|
||||
"url": self.url,
|
||||
"location": {
|
||||
"latitude": self.latitude,
|
||||
"longitude": self.longitude,
|
||||
"address": self.address,
|
||||
"city": self.city,
|
||||
"state": self.state,
|
||||
"country": self.country,
|
||||
},
|
||||
'distance_km': self.distance_km,
|
||||
'status': self.status,
|
||||
'tags': self.tags or [],
|
||||
'rating': self.rating,
|
||||
"distance_km": self.distance_km,
|
||||
"status": self.status,
|
||||
"tags": self.tags or [],
|
||||
"rating": self.rating,
|
||||
}
|
||||
|
||||
|
||||
class LocationSearchService:
|
||||
"""Service for performing location-aware searches across ThrillWiki content."""
|
||||
|
||||
|
||||
def search(self, filters: LocationSearchFilters) -> List[LocationSearchResult]:
|
||||
"""
|
||||
Perform a comprehensive location-aware search.
|
||||
|
||||
|
||||
Args:
|
||||
filters: Search filters and options
|
||||
|
||||
|
||||
Returns:
|
||||
List of search results with location data
|
||||
"""
|
||||
results = []
|
||||
|
||||
|
||||
# Search each content type based on filters
|
||||
if not filters.location_types or 'park' in filters.location_types:
|
||||
if not filters.location_types or "park" in filters.location_types:
|
||||
results.extend(self._search_parks(filters))
|
||||
|
||||
if not filters.location_types or 'ride' in filters.location_types:
|
||||
|
||||
if not filters.location_types or "ride" in filters.location_types:
|
||||
results.extend(self._search_rides(filters))
|
||||
|
||||
if not filters.location_types or 'company' in filters.location_types:
|
||||
|
||||
if not filters.location_types or "company" in filters.location_types:
|
||||
results.extend(self._search_companies(filters))
|
||||
|
||||
|
||||
# Sort by distance if proximity search, otherwise by relevance
|
||||
if filters.location_point and filters.include_distance:
|
||||
results.sort(key=lambda x: x.distance_km or float('inf'))
|
||||
results.sort(key=lambda x: x.distance_km or float("inf"))
|
||||
else:
|
||||
results.sort(key=lambda x: x.name.lower())
|
||||
|
||||
|
||||
# Apply max results limit
|
||||
return results[:filters.max_results]
|
||||
|
||||
def _search_parks(self, filters: LocationSearchFilters) -> List[LocationSearchResult]:
|
||||
return results[: filters.max_results]
|
||||
|
||||
def _search_parks(
|
||||
self, filters: LocationSearchFilters
|
||||
) -> List[LocationSearchResult]:
|
||||
"""Search parks with location data."""
|
||||
queryset = Park.objects.select_related('location', 'operator').all()
|
||||
|
||||
queryset = Park.objects.select_related("location", "operator").all()
|
||||
|
||||
# Apply location filters
|
||||
queryset = self._apply_location_filters(queryset, filters, 'location__point')
|
||||
|
||||
queryset = self._apply_location_filters(queryset, filters, "location__point")
|
||||
|
||||
# Apply text search
|
||||
if filters.search_query:
|
||||
query = Q(name__icontains=filters.search_query) | \
|
||||
Q(description__icontains=filters.search_query) | \
|
||||
Q(location__city__icontains=filters.search_query) | \
|
||||
Q(location__state__icontains=filters.search_query) | \
|
||||
Q(location__country__icontains=filters.search_query)
|
||||
query = (
|
||||
Q(name__icontains=filters.search_query)
|
||||
| Q(description__icontains=filters.search_query)
|
||||
| Q(location__city__icontains=filters.search_query)
|
||||
| Q(location__state__icontains=filters.search_query)
|
||||
| Q(location__country__icontains=filters.search_query)
|
||||
)
|
||||
queryset = queryset.filter(query)
|
||||
|
||||
|
||||
# Apply park-specific filters
|
||||
if filters.park_status:
|
||||
queryset = queryset.filter(status__in=filters.park_status)
|
||||
|
||||
|
||||
# Add distance annotation if proximity search
|
||||
if filters.location_point and filters.include_distance:
|
||||
queryset = queryset.annotate(
|
||||
distance=Distance('location__point', filters.location_point)
|
||||
).order_by('distance')
|
||||
|
||||
distance=Distance("location__point", filters.location_point)
|
||||
).order_by("distance")
|
||||
|
||||
# Convert to search results
|
||||
results = []
|
||||
for park in queryset:
|
||||
result = LocationSearchResult(
|
||||
content_type='park',
|
||||
content_type="park",
|
||||
object_id=park.id,
|
||||
name=park.name,
|
||||
description=park.description,
|
||||
url=park.get_absolute_url() if hasattr(park, 'get_absolute_url') else None,
|
||||
url=(
|
||||
park.get_absolute_url()
|
||||
if hasattr(park, "get_absolute_url")
|
||||
else None
|
||||
),
|
||||
status=park.get_status_display(),
|
||||
rating=float(park.average_rating) if park.average_rating else None,
|
||||
tags=['park', park.status.lower()]
|
||||
rating=(float(park.average_rating) if park.average_rating else None),
|
||||
tags=["park", park.status.lower()],
|
||||
)
|
||||
|
||||
|
||||
# Add location data
|
||||
if hasattr(park, 'location') and park.location:
|
||||
if hasattr(park, "location") and park.location:
|
||||
location = park.location
|
||||
result.latitude = location.latitude
|
||||
result.longitude = location.longitude
|
||||
@@ -181,67 +184,90 @@ class LocationSearchService:
|
||||
result.city = location.city
|
||||
result.state = location.state
|
||||
result.country = location.country
|
||||
|
||||
|
||||
# Add distance if proximity search
|
||||
if filters.location_point and filters.include_distance and hasattr(park, 'distance'):
|
||||
if (
|
||||
filters.location_point
|
||||
and filters.include_distance
|
||||
and hasattr(park, "distance")
|
||||
):
|
||||
result.distance_km = float(park.distance.km)
|
||||
|
||||
|
||||
results.append(result)
|
||||
|
||||
|
||||
return results
|
||||
|
||||
def _search_rides(self, filters: LocationSearchFilters) -> List[LocationSearchResult]:
|
||||
|
||||
def _search_rides(
|
||||
self, filters: LocationSearchFilters
|
||||
) -> List[LocationSearchResult]:
|
||||
"""Search rides with location data."""
|
||||
queryset = Ride.objects.select_related('park', 'location').all()
|
||||
|
||||
queryset = Ride.objects.select_related("park", "location").all()
|
||||
|
||||
# Apply location filters
|
||||
queryset = self._apply_location_filters(queryset, filters, 'location__point')
|
||||
|
||||
queryset = self._apply_location_filters(queryset, filters, "location__point")
|
||||
|
||||
# Apply text search
|
||||
if filters.search_query:
|
||||
query = Q(name__icontains=filters.search_query) | \
|
||||
Q(description__icontains=filters.search_query) | \
|
||||
Q(park__name__icontains=filters.search_query) | \
|
||||
Q(location__park_area__icontains=filters.search_query)
|
||||
query = (
|
||||
Q(name__icontains=filters.search_query)
|
||||
| Q(description__icontains=filters.search_query)
|
||||
| Q(park__name__icontains=filters.search_query)
|
||||
| Q(location__park_area__icontains=filters.search_query)
|
||||
)
|
||||
queryset = queryset.filter(query)
|
||||
|
||||
|
||||
# Apply ride-specific filters
|
||||
if filters.ride_types:
|
||||
queryset = queryset.filter(ride_type__in=filters.ride_types)
|
||||
|
||||
|
||||
# Add distance annotation if proximity search
|
||||
if filters.location_point and filters.include_distance:
|
||||
queryset = queryset.annotate(
|
||||
distance=Distance('location__point', filters.location_point)
|
||||
).order_by('distance')
|
||||
|
||||
distance=Distance("location__point", filters.location_point)
|
||||
).order_by("distance")
|
||||
|
||||
# Convert to search results
|
||||
results = []
|
||||
for ride in queryset:
|
||||
result = LocationSearchResult(
|
||||
content_type='ride',
|
||||
content_type="ride",
|
||||
object_id=ride.id,
|
||||
name=ride.name,
|
||||
description=ride.description,
|
||||
url=ride.get_absolute_url() if hasattr(ride, 'get_absolute_url') else None,
|
||||
url=(
|
||||
ride.get_absolute_url()
|
||||
if hasattr(ride, "get_absolute_url")
|
||||
else None
|
||||
),
|
||||
status=ride.status,
|
||||
tags=['ride', ride.ride_type.lower() if ride.ride_type else 'attraction']
|
||||
tags=[
|
||||
"ride",
|
||||
ride.ride_type.lower() if ride.ride_type else "attraction",
|
||||
],
|
||||
)
|
||||
|
||||
|
||||
# Add location data from ride location or park location
|
||||
location = None
|
||||
if hasattr(ride, 'location') and ride.location:
|
||||
if hasattr(ride, "location") and ride.location:
|
||||
location = ride.location
|
||||
result.latitude = location.latitude
|
||||
result.longitude = location.longitude
|
||||
result.address = f"{ride.park.name} - {location.park_area}" if location.park_area else ride.park.name
|
||||
|
||||
result.address = (
|
||||
f"{ride.park.name} - {location.park_area}"
|
||||
if location.park_area
|
||||
else ride.park.name
|
||||
)
|
||||
|
||||
# Add distance if proximity search
|
||||
if filters.location_point and filters.include_distance and hasattr(ride, 'distance'):
|
||||
if (
|
||||
filters.location_point
|
||||
and filters.include_distance
|
||||
and hasattr(ride, "distance")
|
||||
):
|
||||
result.distance_km = float(ride.distance.km)
|
||||
|
||||
|
||||
# Fall back to park location if no specific ride location
|
||||
elif ride.park and hasattr(ride.park, 'location') and ride.park.location:
|
||||
elif ride.park and hasattr(ride.park, "location") and ride.park.location:
|
||||
park_location = ride.park.location
|
||||
result.latitude = park_location.latitude
|
||||
result.longitude = park_location.longitude
|
||||
@@ -249,51 +275,61 @@ class LocationSearchService:
|
||||
result.city = park_location.city
|
||||
result.state = park_location.state
|
||||
result.country = park_location.country
|
||||
|
||||
|
||||
results.append(result)
|
||||
|
||||
|
||||
return results
|
||||
|
||||
def _search_companies(self, filters: LocationSearchFilters) -> List[LocationSearchResult]:
|
||||
|
||||
def _search_companies(
|
||||
self, filters: LocationSearchFilters
|
||||
) -> List[LocationSearchResult]:
|
||||
"""Search companies with headquarters location data."""
|
||||
queryset = Company.objects.select_related('headquarters').all()
|
||||
|
||||
queryset = Company.objects.select_related("headquarters").all()
|
||||
|
||||
# Apply location filters
|
||||
queryset = self._apply_location_filters(queryset, filters, 'headquarters__point')
|
||||
|
||||
queryset = self._apply_location_filters(
|
||||
queryset, filters, "headquarters__point"
|
||||
)
|
||||
|
||||
# Apply text search
|
||||
if filters.search_query:
|
||||
query = Q(name__icontains=filters.search_query) | \
|
||||
Q(description__icontains=filters.search_query) | \
|
||||
Q(headquarters__city__icontains=filters.search_query) | \
|
||||
Q(headquarters__state_province__icontains=filters.search_query) | \
|
||||
Q(headquarters__country__icontains=filters.search_query)
|
||||
query = (
|
||||
Q(name__icontains=filters.search_query)
|
||||
| Q(description__icontains=filters.search_query)
|
||||
| Q(headquarters__city__icontains=filters.search_query)
|
||||
| Q(headquarters__state_province__icontains=filters.search_query)
|
||||
| Q(headquarters__country__icontains=filters.search_query)
|
||||
)
|
||||
queryset = queryset.filter(query)
|
||||
|
||||
|
||||
# Apply company-specific filters
|
||||
if filters.company_roles:
|
||||
queryset = queryset.filter(roles__overlap=filters.company_roles)
|
||||
|
||||
|
||||
# Add distance annotation if proximity search
|
||||
if filters.location_point and filters.include_distance:
|
||||
queryset = queryset.annotate(
|
||||
distance=Distance('headquarters__point', filters.location_point)
|
||||
).order_by('distance')
|
||||
|
||||
distance=Distance("headquarters__point", filters.location_point)
|
||||
).order_by("distance")
|
||||
|
||||
# Convert to search results
|
||||
results = []
|
||||
for company in queryset:
|
||||
result = LocationSearchResult(
|
||||
content_type='company',
|
||||
content_type="company",
|
||||
object_id=company.id,
|
||||
name=company.name,
|
||||
description=company.description,
|
||||
url=company.get_absolute_url() if hasattr(company, 'get_absolute_url') else None,
|
||||
tags=['company'] + (company.roles or [])
|
||||
url=(
|
||||
company.get_absolute_url()
|
||||
if hasattr(company, "get_absolute_url")
|
||||
else None
|
||||
),
|
||||
tags=["company"] + (company.roles or []),
|
||||
)
|
||||
|
||||
|
||||
# Add location data
|
||||
if hasattr(company, 'headquarters') and company.headquarters:
|
||||
if hasattr(company, "headquarters") and company.headquarters:
|
||||
hq = company.headquarters
|
||||
result.latitude = hq.latitude
|
||||
result.longitude = hq.longitude
|
||||
@@ -301,93 +337,129 @@ class LocationSearchService:
|
||||
result.city = hq.city
|
||||
result.state = hq.state_province
|
||||
result.country = hq.country
|
||||
|
||||
|
||||
# Add distance if proximity search
|
||||
if filters.location_point and filters.include_distance and hasattr(company, 'distance'):
|
||||
if (
|
||||
filters.location_point
|
||||
and filters.include_distance
|
||||
and hasattr(company, "distance")
|
||||
):
|
||||
result.distance_km = float(company.distance.km)
|
||||
|
||||
|
||||
results.append(result)
|
||||
|
||||
|
||||
return results
|
||||
|
||||
def _apply_location_filters(self, queryset, filters: LocationSearchFilters, point_field: str):
|
||||
|
||||
def _apply_location_filters(
|
||||
self, queryset, filters: LocationSearchFilters, point_field: str
|
||||
):
|
||||
"""Apply common location filters to a queryset."""
|
||||
|
||||
|
||||
# Proximity filter
|
||||
if filters.location_point and filters.radius_km:
|
||||
distance = Distance(km=filters.radius_km)
|
||||
queryset = queryset.filter(**{
|
||||
f'{point_field}__distance_lte': (filters.location_point, distance)
|
||||
})
|
||||
|
||||
queryset = queryset.filter(
|
||||
**{
|
||||
f"{point_field}__distance_lte": (
|
||||
filters.location_point,
|
||||
distance,
|
||||
)
|
||||
}
|
||||
)
|
||||
|
||||
# Geographic filters - adjust field names based on model
|
||||
if filters.country:
|
||||
if 'headquarters' in point_field:
|
||||
queryset = queryset.filter(headquarters__country__icontains=filters.country)
|
||||
if "headquarters" in point_field:
|
||||
queryset = queryset.filter(
|
||||
headquarters__country__icontains=filters.country
|
||||
)
|
||||
else:
|
||||
location_field = point_field.split('__')[0]
|
||||
queryset = queryset.filter(**{f'{location_field}__country__icontains': filters.country})
|
||||
|
||||
location_field = point_field.split("__")[0]
|
||||
queryset = queryset.filter(
|
||||
**{f"{location_field}__country__icontains": filters.country}
|
||||
)
|
||||
|
||||
if filters.state:
|
||||
if 'headquarters' in point_field:
|
||||
queryset = queryset.filter(headquarters__state_province__icontains=filters.state)
|
||||
if "headquarters" in point_field:
|
||||
queryset = queryset.filter(
|
||||
headquarters__state_province__icontains=filters.state
|
||||
)
|
||||
else:
|
||||
location_field = point_field.split('__')[0]
|
||||
queryset = queryset.filter(**{f'{location_field}__state__icontains': filters.state})
|
||||
|
||||
location_field = point_field.split("__")[0]
|
||||
queryset = queryset.filter(
|
||||
**{f"{location_field}__state__icontains": filters.state}
|
||||
)
|
||||
|
||||
if filters.city:
|
||||
location_field = point_field.split('__')[0]
|
||||
queryset = queryset.filter(**{f'{location_field}__city__icontains': filters.city})
|
||||
|
||||
location_field = point_field.split("__")[0]
|
||||
queryset = queryset.filter(
|
||||
**{f"{location_field}__city__icontains": filters.city}
|
||||
)
|
||||
|
||||
return queryset
|
||||
|
||||
|
||||
def suggest_locations(self, query: str, limit: int = 10) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Get location suggestions for autocomplete.
|
||||
|
||||
|
||||
Args:
|
||||
query: Search query string
|
||||
limit: Maximum number of suggestions
|
||||
|
||||
|
||||
Returns:
|
||||
List of location suggestions
|
||||
"""
|
||||
suggestions = []
|
||||
|
||||
|
||||
if len(query) < 2:
|
||||
return suggestions
|
||||
|
||||
|
||||
# Get park location suggestions
|
||||
park_locations = ParkLocation.objects.filter(
|
||||
Q(park__name__icontains=query) |
|
||||
Q(city__icontains=query) |
|
||||
Q(state__icontains=query)
|
||||
).select_related('park')[:limit//3]
|
||||
|
||||
Q(park__name__icontains=query)
|
||||
| Q(city__icontains=query)
|
||||
| Q(state__icontains=query)
|
||||
).select_related("park")[: limit // 3]
|
||||
|
||||
for location in park_locations:
|
||||
suggestions.append({
|
||||
'type': 'park',
|
||||
'name': location.park.name,
|
||||
'address': location.formatted_address,
|
||||
'coordinates': location.coordinates,
|
||||
'url': location.park.get_absolute_url() if hasattr(location.park, 'get_absolute_url') else None
|
||||
})
|
||||
|
||||
suggestions.append(
|
||||
{
|
||||
"type": "park",
|
||||
"name": location.park.name,
|
||||
"address": location.formatted_address,
|
||||
"coordinates": location.coordinates,
|
||||
"url": (
|
||||
location.park.get_absolute_url()
|
||||
if hasattr(location.park, "get_absolute_url")
|
||||
else None
|
||||
),
|
||||
}
|
||||
)
|
||||
|
||||
# Get city suggestions
|
||||
cities = ParkLocation.objects.filter(
|
||||
city__icontains=query
|
||||
).values('city', 'state', 'country').distinct()[:limit//3]
|
||||
|
||||
cities = (
|
||||
ParkLocation.objects.filter(city__icontains=query)
|
||||
.values("city", "state", "country")
|
||||
.distinct()[: limit // 3]
|
||||
)
|
||||
|
||||
for city_data in cities:
|
||||
suggestions.append({
|
||||
'type': 'city',
|
||||
'name': f"{city_data['city']}, {city_data['state']}",
|
||||
'address': f"{city_data['city']}, {city_data['state']}, {city_data['country']}",
|
||||
'coordinates': None
|
||||
})
|
||||
|
||||
suggestions.append(
|
||||
{
|
||||
"type": "city",
|
||||
"name": f"{
|
||||
city_data['city']}, {
|
||||
city_data['state']}",
|
||||
"address": f"{
|
||||
city_data['city']}, {
|
||||
city_data['state']}, {
|
||||
city_data['country']}",
|
||||
"coordinates": None,
|
||||
}
|
||||
)
|
||||
|
||||
return suggestions[:limit]
|
||||
|
||||
|
||||
# Global instance
|
||||
location_search_service = LocationSearchService()
|
||||
location_search_service = LocationSearchService()
|
||||
|
||||
@@ -5,20 +5,18 @@ Caching service for map data to improve performance and reduce database load.
|
||||
import hashlib
|
||||
import json
|
||||
import time
|
||||
from typing import Dict, List, Optional, Any, Union
|
||||
from dataclasses import asdict
|
||||
from typing import Dict, List, Optional, Any
|
||||
|
||||
from django.core.cache import cache
|
||||
from django.conf import settings
|
||||
from django.utils import timezone
|
||||
|
||||
from .data_structures import (
|
||||
UnifiedLocation,
|
||||
ClusterData,
|
||||
GeoBounds,
|
||||
MapFilters,
|
||||
UnifiedLocation,
|
||||
ClusterData,
|
||||
GeoBounds,
|
||||
MapFilters,
|
||||
MapResponse,
|
||||
QueryPerformanceMetrics
|
||||
QueryPerformanceMetrics,
|
||||
)
|
||||
|
||||
|
||||
@@ -26,13 +24,13 @@ class MapCacheService:
|
||||
"""
|
||||
Handles caching of map data with geographic partitioning and intelligent invalidation.
|
||||
"""
|
||||
|
||||
|
||||
# Cache configuration
|
||||
DEFAULT_TTL = 3600 # 1 hour
|
||||
CLUSTER_TTL = 7200 # 2 hours (clusters change less frequently)
|
||||
LOCATION_DETAIL_TTL = 1800 # 30 minutes
|
||||
BOUNDS_CACHE_TTL = 1800 # 30 minutes
|
||||
|
||||
|
||||
# Cache key prefixes
|
||||
CACHE_PREFIX = "thrillwiki_map"
|
||||
LOCATIONS_PREFIX = f"{CACHE_PREFIX}:locations"
|
||||
@@ -40,269 +38,304 @@ class MapCacheService:
|
||||
BOUNDS_PREFIX = f"{CACHE_PREFIX}:bounds"
|
||||
DETAIL_PREFIX = f"{CACHE_PREFIX}:detail"
|
||||
STATS_PREFIX = f"{CACHE_PREFIX}:stats"
|
||||
|
||||
|
||||
# Geographic partitioning settings
|
||||
GEOHASH_PRECISION = 6 # ~1.2km precision for cache partitioning
|
||||
|
||||
|
||||
def __init__(self):
|
||||
self.cache_stats = {
|
||||
'hits': 0,
|
||||
'misses': 0,
|
||||
'invalidations': 0,
|
||||
'geohash_partitions': 0
|
||||
"hits": 0,
|
||||
"misses": 0,
|
||||
"invalidations": 0,
|
||||
"geohash_partitions": 0,
|
||||
}
|
||||
|
||||
def get_locations_cache_key(self, bounds: Optional[GeoBounds],
|
||||
filters: Optional[MapFilters],
|
||||
zoom_level: Optional[int] = None) -> str:
|
||||
|
||||
def get_locations_cache_key(
|
||||
self,
|
||||
bounds: Optional[GeoBounds],
|
||||
filters: Optional[MapFilters],
|
||||
zoom_level: Optional[int] = None,
|
||||
) -> str:
|
||||
"""Generate cache key for location queries."""
|
||||
key_parts = [self.LOCATIONS_PREFIX]
|
||||
|
||||
|
||||
if bounds:
|
||||
# Use geohash for spatial locality
|
||||
geohash = self._bounds_to_geohash(bounds)
|
||||
key_parts.append(f"geo:{geohash}")
|
||||
|
||||
|
||||
if filters:
|
||||
# Create deterministic hash of filters
|
||||
filter_hash = self._hash_filters(filters)
|
||||
key_parts.append(f"filters:{filter_hash}")
|
||||
|
||||
|
||||
if zoom_level is not None:
|
||||
key_parts.append(f"zoom:{zoom_level}")
|
||||
|
||||
|
||||
return ":".join(key_parts)
|
||||
|
||||
def get_clusters_cache_key(self, bounds: Optional[GeoBounds],
|
||||
filters: Optional[MapFilters],
|
||||
zoom_level: int) -> str:
|
||||
|
||||
def get_clusters_cache_key(
|
||||
self,
|
||||
bounds: Optional[GeoBounds],
|
||||
filters: Optional[MapFilters],
|
||||
zoom_level: int,
|
||||
) -> str:
|
||||
"""Generate cache key for cluster queries."""
|
||||
key_parts = [self.CLUSTERS_PREFIX, f"zoom:{zoom_level}"]
|
||||
|
||||
|
||||
if bounds:
|
||||
geohash = self._bounds_to_geohash(bounds)
|
||||
key_parts.append(f"geo:{geohash}")
|
||||
|
||||
|
||||
if filters:
|
||||
filter_hash = self._hash_filters(filters)
|
||||
key_parts.append(f"filters:{filter_hash}")
|
||||
|
||||
|
||||
return ":".join(key_parts)
|
||||
|
||||
def get_location_detail_cache_key(self, location_type: str, location_id: int) -> str:
|
||||
|
||||
def get_location_detail_cache_key(
|
||||
self, location_type: str, location_id: int
|
||||
) -> str:
|
||||
"""Generate cache key for individual location details."""
|
||||
return f"{self.DETAIL_PREFIX}:{location_type}:{location_id}"
|
||||
|
||||
def cache_locations(self, cache_key: str, locations: List[UnifiedLocation],
|
||||
ttl: Optional[int] = None) -> None:
|
||||
|
||||
def cache_locations(
|
||||
self,
|
||||
cache_key: str,
|
||||
locations: List[UnifiedLocation],
|
||||
ttl: Optional[int] = None,
|
||||
) -> None:
|
||||
"""Cache location data."""
|
||||
try:
|
||||
# Convert locations to serializable format
|
||||
cache_data = {
|
||||
'locations': [loc.to_dict() for loc in locations],
|
||||
'cached_at': timezone.now().isoformat(),
|
||||
'count': len(locations)
|
||||
"locations": [loc.to_dict() for loc in locations],
|
||||
"cached_at": timezone.now().isoformat(),
|
||||
"count": len(locations),
|
||||
}
|
||||
|
||||
|
||||
cache.set(cache_key, cache_data, ttl or self.DEFAULT_TTL)
|
||||
except Exception as e:
|
||||
# Log error but don't fail the request
|
||||
print(f"Cache write error for key {cache_key}: {e}")
|
||||
|
||||
def cache_clusters(self, cache_key: str, clusters: List[ClusterData],
|
||||
ttl: Optional[int] = None) -> None:
|
||||
|
||||
def cache_clusters(
|
||||
self,
|
||||
cache_key: str,
|
||||
clusters: List[ClusterData],
|
||||
ttl: Optional[int] = None,
|
||||
) -> None:
|
||||
"""Cache cluster data."""
|
||||
try:
|
||||
cache_data = {
|
||||
'clusters': [cluster.to_dict() for cluster in clusters],
|
||||
'cached_at': timezone.now().isoformat(),
|
||||
'count': len(clusters)
|
||||
"clusters": [cluster.to_dict() for cluster in clusters],
|
||||
"cached_at": timezone.now().isoformat(),
|
||||
"count": len(clusters),
|
||||
}
|
||||
|
||||
|
||||
cache.set(cache_key, cache_data, ttl or self.CLUSTER_TTL)
|
||||
except Exception as e:
|
||||
print(f"Cache write error for clusters {cache_key}: {e}")
|
||||
|
||||
def cache_map_response(self, cache_key: str, response: MapResponse,
|
||||
ttl: Optional[int] = None) -> None:
|
||||
|
||||
def cache_map_response(
|
||||
self, cache_key: str, response: MapResponse, ttl: Optional[int] = None
|
||||
) -> None:
|
||||
"""Cache complete map response."""
|
||||
try:
|
||||
cache_data = response.to_dict()
|
||||
cache_data['cached_at'] = timezone.now().isoformat()
|
||||
|
||||
cache_data["cached_at"] = timezone.now().isoformat()
|
||||
|
||||
cache.set(cache_key, cache_data, ttl or self.DEFAULT_TTL)
|
||||
except Exception as e:
|
||||
print(f"Cache write error for response {cache_key}: {e}")
|
||||
|
||||
|
||||
def get_cached_locations(self, cache_key: str) -> Optional[List[UnifiedLocation]]:
|
||||
"""Retrieve cached location data."""
|
||||
try:
|
||||
cache_data = cache.get(cache_key)
|
||||
if not cache_data:
|
||||
self.cache_stats['misses'] += 1
|
||||
self.cache_stats["misses"] += 1
|
||||
return None
|
||||
|
||||
self.cache_stats['hits'] += 1
|
||||
|
||||
|
||||
self.cache_stats["hits"] += 1
|
||||
|
||||
# Convert back to UnifiedLocation objects
|
||||
locations = []
|
||||
for loc_data in cache_data['locations']:
|
||||
for loc_data in cache_data["locations"]:
|
||||
# Reconstruct UnifiedLocation from dictionary
|
||||
locations.append(self._dict_to_unified_location(loc_data))
|
||||
|
||||
|
||||
return locations
|
||||
|
||||
|
||||
except Exception as e:
|
||||
print(f"Cache read error for key {cache_key}: {e}")
|
||||
self.cache_stats['misses'] += 1
|
||||
self.cache_stats["misses"] += 1
|
||||
return None
|
||||
|
||||
|
||||
def get_cached_clusters(self, cache_key: str) -> Optional[List[ClusterData]]:
|
||||
"""Retrieve cached cluster data."""
|
||||
try:
|
||||
cache_data = cache.get(cache_key)
|
||||
if not cache_data:
|
||||
self.cache_stats['misses'] += 1
|
||||
self.cache_stats["misses"] += 1
|
||||
return None
|
||||
|
||||
self.cache_stats['hits'] += 1
|
||||
|
||||
|
||||
self.cache_stats["hits"] += 1
|
||||
|
||||
# Convert back to ClusterData objects
|
||||
clusters = []
|
||||
for cluster_data in cache_data['clusters']:
|
||||
for cluster_data in cache_data["clusters"]:
|
||||
clusters.append(self._dict_to_cluster_data(cluster_data))
|
||||
|
||||
|
||||
return clusters
|
||||
|
||||
|
||||
except Exception as e:
|
||||
print(f"Cache read error for clusters {cache_key}: {e}")
|
||||
self.cache_stats['misses'] += 1
|
||||
self.cache_stats["misses"] += 1
|
||||
return None
|
||||
|
||||
|
||||
def get_cached_map_response(self, cache_key: str) -> Optional[MapResponse]:
|
||||
"""Retrieve cached map response."""
|
||||
try:
|
||||
cache_data = cache.get(cache_key)
|
||||
if not cache_data:
|
||||
self.cache_stats['misses'] += 1
|
||||
self.cache_stats["misses"] += 1
|
||||
return None
|
||||
|
||||
self.cache_stats['hits'] += 1
|
||||
|
||||
|
||||
self.cache_stats["hits"] += 1
|
||||
|
||||
# Convert back to MapResponse object
|
||||
return self._dict_to_map_response(cache_data['data'])
|
||||
|
||||
return self._dict_to_map_response(cache_data["data"])
|
||||
|
||||
except Exception as e:
|
||||
print(f"Cache read error for response {cache_key}: {e}")
|
||||
self.cache_stats['misses'] += 1
|
||||
self.cache_stats["misses"] += 1
|
||||
return None
|
||||
|
||||
def invalidate_location_cache(self, location_type: str, location_id: Optional[int] = None) -> None:
|
||||
|
||||
def invalidate_location_cache(
|
||||
self, location_type: str, location_id: Optional[int] = None
|
||||
) -> None:
|
||||
"""Invalidate cache for specific location or all locations of a type."""
|
||||
try:
|
||||
if location_id:
|
||||
# Invalidate specific location detail
|
||||
detail_key = self.get_location_detail_cache_key(location_type, location_id)
|
||||
detail_key = self.get_location_detail_cache_key(
|
||||
location_type, location_id
|
||||
)
|
||||
cache.delete(detail_key)
|
||||
|
||||
|
||||
# Invalidate related location and cluster caches
|
||||
# In a production system, you'd want more sophisticated cache tagging
|
||||
cache.delete_many([
|
||||
f"{self.LOCATIONS_PREFIX}:*",
|
||||
f"{self.CLUSTERS_PREFIX}:*"
|
||||
])
|
||||
|
||||
self.cache_stats['invalidations'] += 1
|
||||
|
||||
# In a production system, you'd want more sophisticated cache
|
||||
# tagging
|
||||
cache.delete_many(
|
||||
[f"{self.LOCATIONS_PREFIX}:*", f"{self.CLUSTERS_PREFIX}:*"]
|
||||
)
|
||||
|
||||
self.cache_stats["invalidations"] += 1
|
||||
|
||||
except Exception as e:
|
||||
print(f"Cache invalidation error: {e}")
|
||||
|
||||
|
||||
def invalidate_bounds_cache(self, bounds: GeoBounds) -> None:
|
||||
"""Invalidate cache for specific geographic bounds."""
|
||||
try:
|
||||
geohash = self._bounds_to_geohash(bounds)
|
||||
pattern = f"{self.LOCATIONS_PREFIX}:geo:{geohash}*"
|
||||
|
||||
|
||||
# In production, you'd use cache tagging or Redis SCAN
|
||||
# For now, we'll invalidate broader patterns
|
||||
cache.delete_many([pattern])
|
||||
|
||||
self.cache_stats['invalidations'] += 1
|
||||
|
||||
|
||||
self.cache_stats["invalidations"] += 1
|
||||
|
||||
except Exception as e:
|
||||
print(f"Bounds cache invalidation error: {e}")
|
||||
|
||||
|
||||
def clear_all_map_cache(self) -> None:
|
||||
"""Clear all map-related cache data."""
|
||||
try:
|
||||
cache.delete_many([
|
||||
f"{self.LOCATIONS_PREFIX}:*",
|
||||
f"{self.CLUSTERS_PREFIX}:*",
|
||||
f"{self.BOUNDS_PREFIX}:*",
|
||||
f"{self.DETAIL_PREFIX}:*"
|
||||
])
|
||||
|
||||
self.cache_stats['invalidations'] += 1
|
||||
|
||||
cache.delete_many(
|
||||
[
|
||||
f"{self.LOCATIONS_PREFIX}:*",
|
||||
f"{self.CLUSTERS_PREFIX}:*",
|
||||
f"{self.BOUNDS_PREFIX}:*",
|
||||
f"{self.DETAIL_PREFIX}:*",
|
||||
]
|
||||
)
|
||||
|
||||
self.cache_stats["invalidations"] += 1
|
||||
|
||||
except Exception as e:
|
||||
print(f"Cache clear error: {e}")
|
||||
|
||||
|
||||
def get_cache_stats(self) -> Dict[str, Any]:
|
||||
"""Get cache performance statistics."""
|
||||
total_requests = self.cache_stats['hits'] + self.cache_stats['misses']
|
||||
hit_rate = (self.cache_stats['hits'] / total_requests * 100) if total_requests > 0 else 0
|
||||
|
||||
total_requests = self.cache_stats["hits"] + self.cache_stats["misses"]
|
||||
hit_rate = (
|
||||
(self.cache_stats["hits"] / total_requests * 100)
|
||||
if total_requests > 0
|
||||
else 0
|
||||
)
|
||||
|
||||
return {
|
||||
'hits': self.cache_stats['hits'],
|
||||
'misses': self.cache_stats['misses'],
|
||||
'hit_rate_percent': round(hit_rate, 2),
|
||||
'invalidations': self.cache_stats['invalidations'],
|
||||
'geohash_partitions': self.cache_stats['geohash_partitions']
|
||||
"hits": self.cache_stats["hits"],
|
||||
"misses": self.cache_stats["misses"],
|
||||
"hit_rate_percent": round(hit_rate, 2),
|
||||
"invalidations": self.cache_stats["invalidations"],
|
||||
"geohash_partitions": self.cache_stats["geohash_partitions"],
|
||||
}
|
||||
|
||||
|
||||
def record_performance_metrics(self, metrics: QueryPerformanceMetrics) -> None:
|
||||
"""Record query performance metrics for analysis."""
|
||||
try:
|
||||
stats_key = f"{self.STATS_PREFIX}:performance:{int(time.time() // 300)}" # 5-minute buckets
|
||||
|
||||
current_stats = cache.get(stats_key, {
|
||||
'query_count': 0,
|
||||
'total_time_ms': 0,
|
||||
'cache_hits': 0,
|
||||
'db_queries': 0
|
||||
})
|
||||
|
||||
current_stats['query_count'] += 1
|
||||
current_stats['total_time_ms'] += metrics.query_time_ms
|
||||
current_stats['cache_hits'] += 1 if metrics.cache_hit else 0
|
||||
current_stats['db_queries'] += metrics.db_query_count
|
||||
|
||||
# 5-minute buckets
|
||||
stats_key = f"{
|
||||
self.STATS_PREFIX}:performance:{
|
||||
int(
|
||||
time.time() //
|
||||
300)}"
|
||||
|
||||
current_stats = cache.get(
|
||||
stats_key,
|
||||
{
|
||||
"query_count": 0,
|
||||
"total_time_ms": 0,
|
||||
"cache_hits": 0,
|
||||
"db_queries": 0,
|
||||
},
|
||||
)
|
||||
|
||||
current_stats["query_count"] += 1
|
||||
current_stats["total_time_ms"] += metrics.query_time_ms
|
||||
current_stats["cache_hits"] += 1 if metrics.cache_hit else 0
|
||||
current_stats["db_queries"] += metrics.db_query_count
|
||||
|
||||
cache.set(stats_key, current_stats, 3600) # Keep for 1 hour
|
||||
|
||||
|
||||
except Exception as e:
|
||||
print(f"Performance metrics recording error: {e}")
|
||||
|
||||
|
||||
def _bounds_to_geohash(self, bounds: GeoBounds) -> str:
|
||||
"""Convert geographic bounds to geohash for cache partitioning."""
|
||||
# Use center point of bounds for geohash
|
||||
center_lat = (bounds.north + bounds.south) / 2
|
||||
center_lng = (bounds.east + bounds.west) / 2
|
||||
|
||||
|
||||
# Simple geohash implementation (in production, use a library)
|
||||
return self._encode_geohash(center_lat, center_lng, self.GEOHASH_PRECISION)
|
||||
|
||||
|
||||
def _encode_geohash(self, lat: float, lng: float, precision: int) -> str:
|
||||
"""Simple geohash encoding implementation."""
|
||||
# This is a simplified implementation
|
||||
# In production, use the `geohash` library
|
||||
lat_range = [-90.0, 90.0]
|
||||
lng_range = [-180.0, 180.0]
|
||||
|
||||
|
||||
geohash = ""
|
||||
bits = 0
|
||||
bit_count = 0
|
||||
even_bit = True
|
||||
|
||||
|
||||
while len(geohash) < precision:
|
||||
if even_bit:
|
||||
# longitude
|
||||
@@ -322,80 +355,84 @@ class MapCacheService:
|
||||
else:
|
||||
bits = bits << 1
|
||||
lat_range[1] = mid
|
||||
|
||||
|
||||
even_bit = not even_bit
|
||||
bit_count += 1
|
||||
|
||||
|
||||
if bit_count == 5:
|
||||
# Convert 5 bits to base32 character
|
||||
geohash += "0123456789bcdefghjkmnpqrstuvwxyz"[bits]
|
||||
bits = 0
|
||||
bit_count = 0
|
||||
|
||||
|
||||
return geohash
|
||||
|
||||
|
||||
def _hash_filters(self, filters: MapFilters) -> str:
|
||||
"""Create deterministic hash of filters for cache keys."""
|
||||
filter_dict = filters.to_dict()
|
||||
# Sort to ensure consistent ordering
|
||||
filter_str = json.dumps(filter_dict, sort_keys=True)
|
||||
return hashlib.md5(filter_str.encode()).hexdigest()[:8]
|
||||
|
||||
|
||||
def _dict_to_unified_location(self, data: Dict[str, Any]) -> UnifiedLocation:
|
||||
"""Convert dictionary back to UnifiedLocation object."""
|
||||
from .data_structures import LocationType
|
||||
|
||||
|
||||
return UnifiedLocation(
|
||||
id=data['id'],
|
||||
type=LocationType(data['type']),
|
||||
name=data['name'],
|
||||
coordinates=tuple(data['coordinates']),
|
||||
address=data.get('address'),
|
||||
metadata=data.get('metadata', {}),
|
||||
type_data=data.get('type_data', {}),
|
||||
cluster_weight=data.get('cluster_weight', 1),
|
||||
cluster_category=data.get('cluster_category', 'default')
|
||||
id=data["id"],
|
||||
type=LocationType(data["type"]),
|
||||
name=data["name"],
|
||||
coordinates=tuple(data["coordinates"]),
|
||||
address=data.get("address"),
|
||||
metadata=data.get("metadata", {}),
|
||||
type_data=data.get("type_data", {}),
|
||||
cluster_weight=data.get("cluster_weight", 1),
|
||||
cluster_category=data.get("cluster_category", "default"),
|
||||
)
|
||||
|
||||
|
||||
def _dict_to_cluster_data(self, data: Dict[str, Any]) -> ClusterData:
|
||||
"""Convert dictionary back to ClusterData object."""
|
||||
from .data_structures import LocationType
|
||||
|
||||
bounds = GeoBounds(**data['bounds'])
|
||||
types = {LocationType(t) for t in data['types']}
|
||||
|
||||
|
||||
bounds = GeoBounds(**data["bounds"])
|
||||
types = {LocationType(t) for t in data["types"]}
|
||||
|
||||
representative = None
|
||||
if data.get('representative'):
|
||||
representative = self._dict_to_unified_location(data['representative'])
|
||||
|
||||
if data.get("representative"):
|
||||
representative = self._dict_to_unified_location(data["representative"])
|
||||
|
||||
return ClusterData(
|
||||
id=data['id'],
|
||||
coordinates=tuple(data['coordinates']),
|
||||
count=data['count'],
|
||||
id=data["id"],
|
||||
coordinates=tuple(data["coordinates"]),
|
||||
count=data["count"],
|
||||
types=types,
|
||||
bounds=bounds,
|
||||
representative_location=representative
|
||||
representative_location=representative,
|
||||
)
|
||||
|
||||
|
||||
def _dict_to_map_response(self, data: Dict[str, Any]) -> MapResponse:
|
||||
"""Convert dictionary back to MapResponse object."""
|
||||
locations = [self._dict_to_unified_location(loc) for loc in data.get('locations', [])]
|
||||
clusters = [self._dict_to_cluster_data(cluster) for cluster in data.get('clusters', [])]
|
||||
|
||||
locations = [
|
||||
self._dict_to_unified_location(loc) for loc in data.get("locations", [])
|
||||
]
|
||||
clusters = [
|
||||
self._dict_to_cluster_data(cluster) for cluster in data.get("clusters", [])
|
||||
]
|
||||
|
||||
bounds = None
|
||||
if data.get('bounds'):
|
||||
bounds = GeoBounds(**data['bounds'])
|
||||
|
||||
if data.get("bounds"):
|
||||
bounds = GeoBounds(**data["bounds"])
|
||||
|
||||
return MapResponse(
|
||||
locations=locations,
|
||||
clusters=clusters,
|
||||
bounds=bounds,
|
||||
total_count=data.get('total_count', 0),
|
||||
filtered_count=data.get('filtered_count', 0),
|
||||
zoom_level=data.get('zoom_level'),
|
||||
clustered=data.get('clustered', False)
|
||||
total_count=data.get("total_count", 0),
|
||||
filtered_count=data.get("filtered_count", 0),
|
||||
zoom_level=data.get("zoom_level"),
|
||||
clustered=data.get("clustered", False),
|
||||
)
|
||||
|
||||
|
||||
# Global cache service instance
|
||||
map_cache = MapCacheService()
|
||||
map_cache = MapCacheService()
|
||||
|
||||
@@ -5,7 +5,6 @@ Unified Map Service - Main orchestrating service for all map functionality.
|
||||
import time
|
||||
from typing import List, Optional, Dict, Any, Set
|
||||
from django.db import connection
|
||||
from django.utils import timezone
|
||||
|
||||
from .data_structures import (
|
||||
UnifiedLocation,
|
||||
@@ -14,7 +13,7 @@ from .data_structures import (
|
||||
MapFilters,
|
||||
MapResponse,
|
||||
LocationType,
|
||||
QueryPerformanceMetrics
|
||||
QueryPerformanceMetrics,
|
||||
)
|
||||
from .location_adapters import LocationAbstractionLayer
|
||||
from .clustering_service import ClusteringService
|
||||
@@ -26,17 +25,17 @@ class UnifiedMapService:
|
||||
Main service orchestrating map data retrieval, filtering, clustering, and caching.
|
||||
Provides a unified interface for all location types with performance optimization.
|
||||
"""
|
||||
|
||||
|
||||
# Performance thresholds
|
||||
MAX_UNCLUSTERED_POINTS = 500
|
||||
MAX_CLUSTERED_POINTS = 2000
|
||||
DEFAULT_ZOOM_LEVEL = 10
|
||||
|
||||
|
||||
def __init__(self):
|
||||
self.location_layer = LocationAbstractionLayer()
|
||||
self.clustering_service = ClusteringService()
|
||||
self.cache_service = MapCacheService()
|
||||
|
||||
|
||||
def get_map_data(
|
||||
self,
|
||||
*,
|
||||
@@ -44,57 +43,65 @@ class UnifiedMapService:
|
||||
filters: Optional[MapFilters] = None,
|
||||
zoom_level: int = DEFAULT_ZOOM_LEVEL,
|
||||
cluster: bool = True,
|
||||
use_cache: bool = True
|
||||
use_cache: bool = True,
|
||||
) -> MapResponse:
|
||||
"""
|
||||
Primary method for retrieving unified map data.
|
||||
|
||||
|
||||
Args:
|
||||
bounds: Geographic bounds to query within
|
||||
filters: Filtering criteria for locations
|
||||
zoom_level: Map zoom level for clustering decisions
|
||||
cluster: Whether to apply clustering
|
||||
use_cache: Whether to use cached data
|
||||
|
||||
|
||||
Returns:
|
||||
MapResponse with locations, clusters, and metadata
|
||||
"""
|
||||
start_time = time.time()
|
||||
initial_query_count = len(connection.queries)
|
||||
cache_hit = False
|
||||
|
||||
|
||||
try:
|
||||
# Generate cache key
|
||||
cache_key = None
|
||||
if use_cache:
|
||||
cache_key = self._generate_cache_key(bounds, filters, zoom_level, cluster)
|
||||
|
||||
cache_key = self._generate_cache_key(
|
||||
bounds, filters, zoom_level, cluster
|
||||
)
|
||||
|
||||
# Try to get from cache first
|
||||
cached_response = self.cache_service.get_cached_map_response(cache_key)
|
||||
if cached_response:
|
||||
cached_response.cache_hit = True
|
||||
cached_response.query_time_ms = int((time.time() - start_time) * 1000)
|
||||
cached_response.query_time_ms = int(
|
||||
(time.time() - start_time) * 1000
|
||||
)
|
||||
return cached_response
|
||||
|
||||
|
||||
# Get locations from database
|
||||
locations = self._get_locations_from_db(bounds, filters)
|
||||
|
||||
|
||||
# Apply smart limiting based on zoom level and density
|
||||
locations = self._apply_smart_limiting(locations, bounds, zoom_level)
|
||||
|
||||
|
||||
# Determine if clustering should be applied
|
||||
should_cluster = cluster and self.clustering_service.should_cluster(zoom_level, len(locations))
|
||||
|
||||
should_cluster = cluster and self.clustering_service.should_cluster(
|
||||
zoom_level, len(locations)
|
||||
)
|
||||
|
||||
# Apply clustering if needed
|
||||
clusters = []
|
||||
if should_cluster:
|
||||
locations, clusters = self.clustering_service.cluster_locations(
|
||||
locations, zoom_level, bounds
|
||||
)
|
||||
|
||||
|
||||
# Calculate response bounds
|
||||
response_bounds = self._calculate_response_bounds(locations, clusters, bounds)
|
||||
|
||||
response_bounds = self._calculate_response_bounds(
|
||||
locations, clusters, bounds
|
||||
)
|
||||
|
||||
# Create response
|
||||
response = MapResponse(
|
||||
locations=locations,
|
||||
@@ -106,22 +113,26 @@ class UnifiedMapService:
|
||||
clustered=should_cluster,
|
||||
cache_hit=cache_hit,
|
||||
query_time_ms=int((time.time() - start_time) * 1000),
|
||||
filters_applied=self._get_applied_filters_list(filters)
|
||||
filters_applied=self._get_applied_filters_list(filters),
|
||||
)
|
||||
|
||||
|
||||
# Cache the response
|
||||
if use_cache and cache_key:
|
||||
self.cache_service.cache_map_response(cache_key, response)
|
||||
|
||||
|
||||
# Record performance metrics
|
||||
self._record_performance_metrics(
|
||||
start_time, initial_query_count, cache_hit, len(locations) + len(clusters),
|
||||
bounds is not None, should_cluster
|
||||
start_time,
|
||||
initial_query_count,
|
||||
cache_hit,
|
||||
len(locations) + len(clusters),
|
||||
bounds is not None,
|
||||
should_cluster,
|
||||
)
|
||||
|
||||
|
||||
return response
|
||||
|
||||
except Exception as e:
|
||||
|
||||
except Exception:
|
||||
# Return error response
|
||||
return MapResponse(
|
||||
locations=[],
|
||||
@@ -129,58 +140,67 @@ class UnifiedMapService:
|
||||
total_count=0,
|
||||
filtered_count=0,
|
||||
query_time_ms=int((time.time() - start_time) * 1000),
|
||||
cache_hit=False
|
||||
cache_hit=False,
|
||||
)
|
||||
|
||||
def get_location_details(self, location_type: str, location_id: int) -> Optional[UnifiedLocation]:
|
||||
|
||||
def get_location_details(
|
||||
self, location_type: str, location_id: int
|
||||
) -> Optional[UnifiedLocation]:
|
||||
"""
|
||||
Get detailed information for a specific location.
|
||||
|
||||
|
||||
Args:
|
||||
location_type: Type of location (park, ride, company, generic)
|
||||
location_id: ID of the location
|
||||
|
||||
|
||||
Returns:
|
||||
UnifiedLocation with full details or None if not found
|
||||
"""
|
||||
try:
|
||||
# Check cache first
|
||||
cache_key = self.cache_service.get_location_detail_cache_key(location_type, location_id)
|
||||
cache_key = self.cache_service.get_location_detail_cache_key(
|
||||
location_type, location_id
|
||||
)
|
||||
cached_locations = self.cache_service.get_cached_locations(cache_key)
|
||||
if cached_locations:
|
||||
return cached_locations[0] if cached_locations else None
|
||||
|
||||
|
||||
# Get from database
|
||||
location_type_enum = LocationType(location_type.lower())
|
||||
location = self.location_layer.get_location_by_id(location_type_enum, location_id)
|
||||
|
||||
location = self.location_layer.get_location_by_id(
|
||||
location_type_enum, location_id
|
||||
)
|
||||
|
||||
# Cache the result
|
||||
if location:
|
||||
self.cache_service.cache_locations(cache_key, [location],
|
||||
self.cache_service.LOCATION_DETAIL_TTL)
|
||||
|
||||
self.cache_service.cache_locations(
|
||||
cache_key,
|
||||
[location],
|
||||
self.cache_service.LOCATION_DETAIL_TTL,
|
||||
)
|
||||
|
||||
return location
|
||||
|
||||
|
||||
except Exception as e:
|
||||
print(f"Error getting location details: {e}")
|
||||
return None
|
||||
|
||||
|
||||
def search_locations(
|
||||
self,
|
||||
query: str,
|
||||
bounds: Optional[GeoBounds] = None,
|
||||
location_types: Optional[Set[LocationType]] = None,
|
||||
limit: int = 50
|
||||
limit: int = 50,
|
||||
) -> List[UnifiedLocation]:
|
||||
"""
|
||||
Search locations with text query.
|
||||
|
||||
|
||||
Args:
|
||||
query: Search query string
|
||||
bounds: Optional geographic bounds to search within
|
||||
location_types: Optional set of location types to search
|
||||
limit: Maximum number of results
|
||||
|
||||
|
||||
Returns:
|
||||
List of matching UnifiedLocation objects
|
||||
"""
|
||||
@@ -189,19 +209,19 @@ class UnifiedMapService:
|
||||
filters = MapFilters(
|
||||
search_query=query,
|
||||
location_types=location_types or {LocationType.PARK, LocationType.RIDE},
|
||||
has_coordinates=True
|
||||
has_coordinates=True,
|
||||
)
|
||||
|
||||
|
||||
# Get locations
|
||||
locations = self.location_layer.get_all_locations(bounds, filters)
|
||||
|
||||
|
||||
# Apply limit
|
||||
return locations[:limit]
|
||||
|
||||
|
||||
except Exception as e:
|
||||
print(f"Error searching locations: {e}")
|
||||
return []
|
||||
|
||||
|
||||
def get_locations_by_bounds(
|
||||
self,
|
||||
north: float,
|
||||
@@ -209,94 +229,97 @@ class UnifiedMapService:
|
||||
east: float,
|
||||
west: float,
|
||||
location_types: Optional[Set[LocationType]] = None,
|
||||
zoom_level: int = DEFAULT_ZOOM_LEVEL
|
||||
zoom_level: int = DEFAULT_ZOOM_LEVEL,
|
||||
) -> MapResponse:
|
||||
"""
|
||||
Get locations within specific geographic bounds.
|
||||
|
||||
|
||||
Args:
|
||||
north, south, east, west: Bounding box coordinates
|
||||
location_types: Optional filter for location types
|
||||
zoom_level: Map zoom level for optimization
|
||||
|
||||
|
||||
Returns:
|
||||
MapResponse with locations in bounds
|
||||
"""
|
||||
try:
|
||||
bounds = GeoBounds(north=north, south=south, east=east, west=west)
|
||||
filters = MapFilters(location_types=location_types) if location_types else None
|
||||
|
||||
return self.get_map_data(bounds=bounds, filters=filters, zoom_level=zoom_level)
|
||||
|
||||
except ValueError as e:
|
||||
filters = (
|
||||
MapFilters(location_types=location_types) if location_types else None
|
||||
)
|
||||
|
||||
return self.get_map_data(
|
||||
bounds=bounds, filters=filters, zoom_level=zoom_level
|
||||
)
|
||||
|
||||
except ValueError:
|
||||
# Invalid bounds
|
||||
return MapResponse(
|
||||
locations=[],
|
||||
clusters=[],
|
||||
total_count=0,
|
||||
filtered_count=0
|
||||
locations=[], clusters=[], total_count=0, filtered_count=0
|
||||
)
|
||||
|
||||
|
||||
def get_clustered_locations(
|
||||
self,
|
||||
zoom_level: int,
|
||||
bounds: Optional[GeoBounds] = None,
|
||||
filters: Optional[MapFilters] = None
|
||||
filters: Optional[MapFilters] = None,
|
||||
) -> MapResponse:
|
||||
"""
|
||||
Get clustered location data for map display.
|
||||
|
||||
|
||||
Args:
|
||||
zoom_level: Map zoom level for clustering configuration
|
||||
bounds: Optional geographic bounds
|
||||
filters: Optional filtering criteria
|
||||
|
||||
|
||||
Returns:
|
||||
MapResponse with clustered data
|
||||
"""
|
||||
return self.get_map_data(
|
||||
bounds=bounds,
|
||||
filters=filters,
|
||||
zoom_level=zoom_level,
|
||||
cluster=True
|
||||
bounds=bounds, filters=filters, zoom_level=zoom_level, cluster=True
|
||||
)
|
||||
|
||||
|
||||
def get_locations_by_type(
|
||||
self,
|
||||
location_type: LocationType,
|
||||
bounds: Optional[GeoBounds] = None,
|
||||
limit: Optional[int] = None
|
||||
limit: Optional[int] = None,
|
||||
) -> List[UnifiedLocation]:
|
||||
"""
|
||||
Get locations of a specific type.
|
||||
|
||||
|
||||
Args:
|
||||
location_type: Type of locations to retrieve
|
||||
bounds: Optional geographic bounds
|
||||
limit: Optional limit on results
|
||||
|
||||
|
||||
Returns:
|
||||
List of UnifiedLocation objects
|
||||
"""
|
||||
try:
|
||||
filters = MapFilters(location_types={location_type})
|
||||
locations = self.location_layer.get_locations_by_type(location_type, bounds, filters)
|
||||
|
||||
locations = self.location_layer.get_locations_by_type(
|
||||
location_type, bounds, filters
|
||||
)
|
||||
|
||||
if limit:
|
||||
locations = locations[:limit]
|
||||
|
||||
|
||||
return locations
|
||||
|
||||
|
||||
except Exception as e:
|
||||
print(f"Error getting locations by type: {e}")
|
||||
return []
|
||||
|
||||
def invalidate_cache(self, location_type: Optional[str] = None,
|
||||
location_id: Optional[int] = None,
|
||||
bounds: Optional[GeoBounds] = None) -> None:
|
||||
|
||||
def invalidate_cache(
|
||||
self,
|
||||
location_type: Optional[str] = None,
|
||||
location_id: Optional[int] = None,
|
||||
bounds: Optional[GeoBounds] = None,
|
||||
) -> None:
|
||||
"""
|
||||
Invalidate cached map data.
|
||||
|
||||
|
||||
Args:
|
||||
location_type: Optional specific location type to invalidate
|
||||
location_id: Optional specific location ID to invalidate
|
||||
@@ -308,121 +331,144 @@ class UnifiedMapService:
|
||||
self.cache_service.invalidate_bounds_cache(bounds)
|
||||
else:
|
||||
self.cache_service.clear_all_map_cache()
|
||||
|
||||
|
||||
def get_service_stats(self) -> Dict[str, Any]:
|
||||
"""Get service performance and usage statistics."""
|
||||
cache_stats = self.cache_service.get_cache_stats()
|
||||
|
||||
|
||||
return {
|
||||
'cache_performance': cache_stats,
|
||||
'clustering_available': True,
|
||||
'supported_location_types': [t.value for t in LocationType],
|
||||
'max_unclustered_points': self.MAX_UNCLUSTERED_POINTS,
|
||||
'max_clustered_points': self.MAX_CLUSTERED_POINTS,
|
||||
'service_version': '1.0.0'
|
||||
"cache_performance": cache_stats,
|
||||
"clustering_available": True,
|
||||
"supported_location_types": [t.value for t in LocationType],
|
||||
"max_unclustered_points": self.MAX_UNCLUSTERED_POINTS,
|
||||
"max_clustered_points": self.MAX_CLUSTERED_POINTS,
|
||||
"service_version": "1.0.0",
|
||||
}
|
||||
|
||||
def _get_locations_from_db(self, bounds: Optional[GeoBounds],
|
||||
filters: Optional[MapFilters]) -> List[UnifiedLocation]:
|
||||
|
||||
def _get_locations_from_db(
|
||||
self, bounds: Optional[GeoBounds], filters: Optional[MapFilters]
|
||||
) -> List[UnifiedLocation]:
|
||||
"""Get locations from database using the abstraction layer."""
|
||||
return self.location_layer.get_all_locations(bounds, filters)
|
||||
|
||||
def _apply_smart_limiting(self, locations: List[UnifiedLocation],
|
||||
bounds: Optional[GeoBounds], zoom_level: int) -> List[UnifiedLocation]:
|
||||
|
||||
def _apply_smart_limiting(
|
||||
self,
|
||||
locations: List[UnifiedLocation],
|
||||
bounds: Optional[GeoBounds],
|
||||
zoom_level: int,
|
||||
) -> List[UnifiedLocation]:
|
||||
"""Apply intelligent limiting based on zoom level and density."""
|
||||
if zoom_level < 6: # Very zoomed out - show only major parks
|
||||
major_parks = [
|
||||
loc for loc in locations
|
||||
if (loc.type == LocationType.PARK and
|
||||
loc.cluster_category in ['major_park', 'theme_park'])
|
||||
loc
|
||||
for loc in locations
|
||||
if (
|
||||
loc.type == LocationType.PARK
|
||||
and loc.cluster_category in ["major_park", "theme_park"]
|
||||
)
|
||||
]
|
||||
return major_parks[:200]
|
||||
elif zoom_level < 10: # Regional level
|
||||
return locations[:1000]
|
||||
else: # City level and closer
|
||||
return locations[:self.MAX_CLUSTERED_POINTS]
|
||||
|
||||
def _calculate_response_bounds(self, locations: List[UnifiedLocation],
|
||||
clusters: List[ClusterData],
|
||||
request_bounds: Optional[GeoBounds]) -> Optional[GeoBounds]:
|
||||
return locations[: self.MAX_CLUSTERED_POINTS]
|
||||
|
||||
def _calculate_response_bounds(
|
||||
self,
|
||||
locations: List[UnifiedLocation],
|
||||
clusters: List[ClusterData],
|
||||
request_bounds: Optional[GeoBounds],
|
||||
) -> Optional[GeoBounds]:
|
||||
"""Calculate the actual bounds of the response data."""
|
||||
if request_bounds:
|
||||
return request_bounds
|
||||
|
||||
|
||||
all_coords = []
|
||||
|
||||
|
||||
# Add location coordinates
|
||||
for loc in locations:
|
||||
all_coords.append((loc.latitude, loc.longitude))
|
||||
|
||||
|
||||
# Add cluster coordinates
|
||||
for cluster in clusters:
|
||||
all_coords.append(cluster.coordinates)
|
||||
|
||||
|
||||
if not all_coords:
|
||||
return None
|
||||
|
||||
|
||||
lats, lngs = zip(*all_coords)
|
||||
return GeoBounds(
|
||||
north=max(lats),
|
||||
south=min(lats),
|
||||
east=max(lngs),
|
||||
west=min(lngs)
|
||||
north=max(lats), south=min(lats), east=max(lngs), west=min(lngs)
|
||||
)
|
||||
|
||||
|
||||
def _get_applied_filters_list(self, filters: Optional[MapFilters]) -> List[str]:
|
||||
"""Get list of applied filter types for metadata."""
|
||||
if not filters:
|
||||
return []
|
||||
|
||||
|
||||
applied = []
|
||||
if filters.location_types:
|
||||
applied.append('location_types')
|
||||
applied.append("location_types")
|
||||
if filters.search_query:
|
||||
applied.append('search_query')
|
||||
applied.append("search_query")
|
||||
if filters.park_status:
|
||||
applied.append('park_status')
|
||||
applied.append("park_status")
|
||||
if filters.ride_types:
|
||||
applied.append('ride_types')
|
||||
applied.append("ride_types")
|
||||
if filters.company_roles:
|
||||
applied.append('company_roles')
|
||||
applied.append("company_roles")
|
||||
if filters.min_rating:
|
||||
applied.append('min_rating')
|
||||
applied.append("min_rating")
|
||||
if filters.country:
|
||||
applied.append('country')
|
||||
applied.append("country")
|
||||
if filters.state:
|
||||
applied.append('state')
|
||||
applied.append("state")
|
||||
if filters.city:
|
||||
applied.append('city')
|
||||
|
||||
applied.append("city")
|
||||
|
||||
return applied
|
||||
|
||||
def _generate_cache_key(self, bounds: Optional[GeoBounds], filters: Optional[MapFilters],
|
||||
zoom_level: int, cluster: bool) -> str:
|
||||
|
||||
def _generate_cache_key(
|
||||
self,
|
||||
bounds: Optional[GeoBounds],
|
||||
filters: Optional[MapFilters],
|
||||
zoom_level: int,
|
||||
cluster: bool,
|
||||
) -> str:
|
||||
"""Generate cache key for the request."""
|
||||
if cluster:
|
||||
return self.cache_service.get_clusters_cache_key(bounds, filters, zoom_level)
|
||||
return self.cache_service.get_clusters_cache_key(
|
||||
bounds, filters, zoom_level
|
||||
)
|
||||
else:
|
||||
return self.cache_service.get_locations_cache_key(bounds, filters, zoom_level)
|
||||
|
||||
def _record_performance_metrics(self, start_time: float, initial_query_count: int,
|
||||
cache_hit: bool, result_count: int, bounds_used: bool,
|
||||
clustering_used: bool) -> None:
|
||||
return self.cache_service.get_locations_cache_key(
|
||||
bounds, filters, zoom_level
|
||||
)
|
||||
|
||||
def _record_performance_metrics(
|
||||
self,
|
||||
start_time: float,
|
||||
initial_query_count: int,
|
||||
cache_hit: bool,
|
||||
result_count: int,
|
||||
bounds_used: bool,
|
||||
clustering_used: bool,
|
||||
) -> None:
|
||||
"""Record performance metrics for monitoring."""
|
||||
query_time_ms = int((time.time() - start_time) * 1000)
|
||||
db_query_count = len(connection.queries) - initial_query_count
|
||||
|
||||
|
||||
metrics = QueryPerformanceMetrics(
|
||||
query_time_ms=query_time_ms,
|
||||
db_query_count=db_query_count,
|
||||
cache_hit=cache_hit,
|
||||
result_count=result_count,
|
||||
bounds_used=bounds_used,
|
||||
clustering_used=clustering_used
|
||||
clustering_used=clustering_used,
|
||||
)
|
||||
|
||||
|
||||
self.cache_service.record_performance_metrics(metrics)
|
||||
|
||||
|
||||
# Global service instance
|
||||
unified_map_service = UnifiedMapService()
|
||||
unified_map_service = UnifiedMapService()
|
||||
|
||||
@@ -11,7 +11,7 @@ from django.db import connection
|
||||
from django.conf import settings
|
||||
from django.utils import timezone
|
||||
|
||||
logger = logging.getLogger('performance')
|
||||
logger = logging.getLogger("performance")
|
||||
|
||||
|
||||
@contextmanager
|
||||
@@ -19,63 +19,69 @@ def monitor_performance(operation_name: str, **tags):
|
||||
"""Context manager for monitoring operation performance"""
|
||||
start_time = time.time()
|
||||
initial_queries = len(connection.queries)
|
||||
|
||||
|
||||
# Create performance context
|
||||
performance_context = {
|
||||
'operation': operation_name,
|
||||
'start_time': start_time,
|
||||
'timestamp': timezone.now().isoformat(),
|
||||
**tags
|
||||
"operation": operation_name,
|
||||
"start_time": start_time,
|
||||
"timestamp": timezone.now().isoformat(),
|
||||
**tags,
|
||||
}
|
||||
|
||||
|
||||
try:
|
||||
yield performance_context
|
||||
except Exception as e:
|
||||
performance_context['error'] = str(e)
|
||||
performance_context['status'] = 'error'
|
||||
performance_context["error"] = str(e)
|
||||
performance_context["status"] = "error"
|
||||
raise
|
||||
else:
|
||||
performance_context['status'] = 'success'
|
||||
performance_context["status"] = "success"
|
||||
finally:
|
||||
end_time = time.time()
|
||||
duration = end_time - start_time
|
||||
total_queries = len(connection.queries) - initial_queries
|
||||
|
||||
|
||||
# Update performance context with final metrics
|
||||
performance_context.update({
|
||||
'duration_seconds': duration,
|
||||
'duration_ms': round(duration * 1000, 2),
|
||||
'query_count': total_queries,
|
||||
'end_time': end_time,
|
||||
})
|
||||
|
||||
performance_context.update(
|
||||
{
|
||||
"duration_seconds": duration,
|
||||
"duration_ms": round(duration * 1000, 2),
|
||||
"query_count": total_queries,
|
||||
"end_time": end_time,
|
||||
}
|
||||
)
|
||||
|
||||
# Log performance data
|
||||
log_level = logging.WARNING if duration > 2.0 or total_queries > 10 else logging.INFO
|
||||
log_level = (
|
||||
logging.WARNING if duration > 2.0 or total_queries > 10 else logging.INFO
|
||||
)
|
||||
logger.log(
|
||||
log_level,
|
||||
f"Performance: {operation_name} completed in {duration:.3f}s with {total_queries} queries",
|
||||
extra=performance_context
|
||||
f"Performance: {operation_name} completed in {
|
||||
duration:.3f}s with {total_queries} queries",
|
||||
extra=performance_context,
|
||||
)
|
||||
|
||||
|
||||
# Log slow operations with additional detail
|
||||
if duration > 2.0:
|
||||
logger.warning(
|
||||
f"Slow operation detected: {operation_name} took {duration:.3f}s",
|
||||
f"Slow operation detected: {operation_name} took {
|
||||
duration:.3f}s",
|
||||
extra={
|
||||
'slow_operation': True,
|
||||
'threshold_exceeded': 'duration',
|
||||
**performance_context
|
||||
}
|
||||
"slow_operation": True,
|
||||
"threshold_exceeded": "duration",
|
||||
**performance_context,
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
if total_queries > 10:
|
||||
logger.warning(
|
||||
f"High query count: {operation_name} executed {total_queries} queries",
|
||||
extra={
|
||||
'high_query_count': True,
|
||||
'threshold_exceeded': 'query_count',
|
||||
**performance_context
|
||||
}
|
||||
"high_query_count": True,
|
||||
"threshold_exceeded": "query_count",
|
||||
**performance_context,
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
@@ -85,52 +91,56 @@ def track_queries(operation_name: str, warn_threshold: int = 10):
|
||||
if not settings.DEBUG:
|
||||
yield
|
||||
return
|
||||
|
||||
|
||||
initial_queries = len(connection.queries)
|
||||
start_time = time.time()
|
||||
|
||||
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
end_time = time.time()
|
||||
total_queries = len(connection.queries) - initial_queries
|
||||
execution_time = end_time - start_time
|
||||
|
||||
|
||||
query_details = []
|
||||
if hasattr(connection, 'queries') and total_queries > 0:
|
||||
if hasattr(connection, "queries") and total_queries > 0:
|
||||
recent_queries = connection.queries[-total_queries:]
|
||||
query_details = [
|
||||
{
|
||||
'sql': query['sql'][:200] + '...' if len(query['sql']) > 200 else query['sql'],
|
||||
'time': float(query['time'])
|
||||
"sql": (
|
||||
query["sql"][:200] + "..."
|
||||
if len(query["sql"]) > 200
|
||||
else query["sql"]
|
||||
),
|
||||
"time": float(query["time"]),
|
||||
}
|
||||
for query in recent_queries
|
||||
]
|
||||
|
||||
|
||||
performance_data = {
|
||||
'operation': operation_name,
|
||||
'query_count': total_queries,
|
||||
'execution_time': execution_time,
|
||||
'queries': query_details if settings.DEBUG else []
|
||||
"operation": operation_name,
|
||||
"query_count": total_queries,
|
||||
"execution_time": execution_time,
|
||||
"queries": query_details if settings.DEBUG else [],
|
||||
}
|
||||
|
||||
|
||||
if total_queries > warn_threshold or execution_time > 1.0:
|
||||
logger.warning(
|
||||
f"Performance concern in {operation_name}: "
|
||||
f"{total_queries} queries, {execution_time:.2f}s",
|
||||
extra=performance_data
|
||||
extra=performance_data,
|
||||
)
|
||||
else:
|
||||
logger.debug(
|
||||
f"Query tracking for {operation_name}: "
|
||||
f"{total_queries} queries, {execution_time:.2f}s",
|
||||
extra=performance_data
|
||||
extra=performance_data,
|
||||
)
|
||||
|
||||
|
||||
class PerformanceProfiler:
|
||||
"""Advanced performance profiling with detailed metrics"""
|
||||
|
||||
|
||||
def __init__(self, name: str):
|
||||
self.name = name
|
||||
self.start_time = None
|
||||
@@ -138,100 +148,110 @@ class PerformanceProfiler:
|
||||
self.checkpoints = []
|
||||
self.initial_queries = 0
|
||||
self.memory_usage = {}
|
||||
|
||||
|
||||
def start(self):
|
||||
"""Start profiling"""
|
||||
self.start_time = time.time()
|
||||
self.initial_queries = len(connection.queries)
|
||||
|
||||
|
||||
# Track memory usage if psutil is available
|
||||
try:
|
||||
import psutil
|
||||
|
||||
process = psutil.Process()
|
||||
self.memory_usage['start'] = process.memory_info().rss
|
||||
self.memory_usage["start"] = process.memory_info().rss
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
|
||||
logger.debug(f"Started profiling: {self.name}")
|
||||
|
||||
|
||||
def checkpoint(self, name: str):
|
||||
"""Add a checkpoint"""
|
||||
if self.start_time is None:
|
||||
logger.warning(f"Checkpoint '{name}' called before profiling started")
|
||||
return
|
||||
|
||||
|
||||
current_time = time.time()
|
||||
elapsed = current_time - self.start_time
|
||||
queries_since_start = len(connection.queries) - self.initial_queries
|
||||
|
||||
|
||||
checkpoint = {
|
||||
'name': name,
|
||||
'timestamp': current_time,
|
||||
'elapsed_seconds': elapsed,
|
||||
'queries_since_start': queries_since_start,
|
||||
"name": name,
|
||||
"timestamp": current_time,
|
||||
"elapsed_seconds": elapsed,
|
||||
"queries_since_start": queries_since_start,
|
||||
}
|
||||
|
||||
|
||||
# Memory usage if available
|
||||
try:
|
||||
import psutil
|
||||
|
||||
process = psutil.Process()
|
||||
checkpoint['memory_rss'] = process.memory_info().rss
|
||||
checkpoint["memory_rss"] = process.memory_info().rss
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
|
||||
self.checkpoints.append(checkpoint)
|
||||
logger.debug(f"Checkpoint '{name}' at {elapsed:.3f}s")
|
||||
|
||||
|
||||
def stop(self):
|
||||
"""Stop profiling and log results"""
|
||||
if self.start_time is None:
|
||||
logger.warning("Profiling stopped before it was started")
|
||||
return
|
||||
|
||||
|
||||
self.end_time = time.time()
|
||||
total_duration = self.end_time - self.start_time
|
||||
total_queries = len(connection.queries) - self.initial_queries
|
||||
|
||||
|
||||
# Final memory usage
|
||||
try:
|
||||
import psutil
|
||||
|
||||
process = psutil.Process()
|
||||
self.memory_usage['end'] = process.memory_info().rss
|
||||
self.memory_usage["end"] = process.memory_info().rss
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
|
||||
# Create detailed profiling report
|
||||
report = {
|
||||
'profiler_name': self.name,
|
||||
'total_duration': total_duration,
|
||||
'total_queries': total_queries,
|
||||
'checkpoints': self.checkpoints,
|
||||
'memory_usage': self.memory_usage,
|
||||
'queries_per_second': total_queries / total_duration if total_duration > 0 else 0,
|
||||
"profiler_name": self.name,
|
||||
"total_duration": total_duration,
|
||||
"total_queries": total_queries,
|
||||
"checkpoints": self.checkpoints,
|
||||
"memory_usage": self.memory_usage,
|
||||
"queries_per_second": (
|
||||
total_queries / total_duration if total_duration > 0 else 0
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
# Calculate checkpoint intervals
|
||||
if len(self.checkpoints) > 1:
|
||||
intervals = []
|
||||
for i in range(1, len(self.checkpoints)):
|
||||
prev = self.checkpoints[i-1]
|
||||
prev = self.checkpoints[i - 1]
|
||||
curr = self.checkpoints[i]
|
||||
intervals.append({
|
||||
'from': prev['name'],
|
||||
'to': curr['name'],
|
||||
'duration': curr['elapsed_seconds'] - prev['elapsed_seconds'],
|
||||
'queries': curr['queries_since_start'] - prev['queries_since_start'],
|
||||
})
|
||||
report['checkpoint_intervals'] = intervals
|
||||
|
||||
intervals.append(
|
||||
{
|
||||
"from": prev["name"],
|
||||
"to": curr["name"],
|
||||
"duration": curr["elapsed_seconds"] - prev["elapsed_seconds"],
|
||||
"queries": curr["queries_since_start"]
|
||||
- prev["queries_since_start"],
|
||||
}
|
||||
)
|
||||
report["checkpoint_intervals"] = intervals
|
||||
|
||||
# Log the complete report
|
||||
log_level = logging.WARNING if total_duration > 1.0 else logging.INFO
|
||||
logger.log(
|
||||
log_level,
|
||||
f"Profiling complete: {self.name} took {total_duration:.3f}s with {total_queries} queries",
|
||||
extra=report
|
||||
f"Profiling complete: {
|
||||
self.name} took {
|
||||
total_duration:.3f}s with {total_queries} queries",
|
||||
extra=report,
|
||||
)
|
||||
|
||||
|
||||
return report
|
||||
|
||||
|
||||
@@ -240,7 +260,7 @@ def profile_operation(name: str):
|
||||
"""Context manager for detailed operation profiling"""
|
||||
profiler = PerformanceProfiler(name)
|
||||
profiler.start()
|
||||
|
||||
|
||||
try:
|
||||
yield profiler
|
||||
finally:
|
||||
@@ -249,60 +269,72 @@ def profile_operation(name: str):
|
||||
|
||||
class DatabaseQueryAnalyzer:
|
||||
"""Analyze database query patterns and performance"""
|
||||
|
||||
|
||||
@staticmethod
|
||||
def analyze_queries(queries: List[Dict]) -> Dict[str, Any]:
|
||||
"""Analyze a list of queries for patterns and issues"""
|
||||
if not queries:
|
||||
return {}
|
||||
|
||||
total_time = sum(float(q.get('time', 0)) for q in queries)
|
||||
|
||||
total_time = sum(float(q.get("time", 0)) for q in queries)
|
||||
query_count = len(queries)
|
||||
|
||||
|
||||
# Group queries by type
|
||||
query_types = {}
|
||||
for query in queries:
|
||||
sql = query.get('sql', '').strip().upper()
|
||||
query_type = sql.split()[0] if sql else 'UNKNOWN'
|
||||
sql = query.get("sql", "").strip().upper()
|
||||
query_type = sql.split()[0] if sql else "UNKNOWN"
|
||||
query_types[query_type] = query_types.get(query_type, 0) + 1
|
||||
|
||||
|
||||
# Find slow queries (top 10% by time)
|
||||
sorted_queries = sorted(queries, key=lambda q: float(q.get('time', 0)), reverse=True)
|
||||
sorted_queries = sorted(
|
||||
queries, key=lambda q: float(q.get("time", 0)), reverse=True
|
||||
)
|
||||
slow_query_count = max(1, query_count // 10)
|
||||
slow_queries = sorted_queries[:slow_query_count]
|
||||
|
||||
|
||||
# Detect duplicate queries
|
||||
query_signatures = {}
|
||||
for query in queries:
|
||||
# Simplified signature - remove literals and normalize whitespace
|
||||
sql = query.get('sql', '')
|
||||
signature = ' '.join(sql.split()) # Normalize whitespace
|
||||
sql = query.get("sql", "")
|
||||
signature = " ".join(sql.split()) # Normalize whitespace
|
||||
query_signatures[signature] = query_signatures.get(signature, 0) + 1
|
||||
|
||||
duplicates = {sig: count for sig, count in query_signatures.items() if count > 1}
|
||||
|
||||
|
||||
duplicates = {
|
||||
sig: count for sig, count in query_signatures.items() if count > 1
|
||||
}
|
||||
|
||||
analysis = {
|
||||
'total_queries': query_count,
|
||||
'total_time': total_time,
|
||||
'average_time': total_time / query_count if query_count > 0 else 0,
|
||||
'query_types': query_types,
|
||||
'slow_queries': [
|
||||
"total_queries": query_count,
|
||||
"total_time": total_time,
|
||||
"average_time": total_time / query_count if query_count > 0 else 0,
|
||||
"query_types": query_types,
|
||||
"slow_queries": [
|
||||
{
|
||||
'sql': q.get('sql', '')[:200] + '...' if len(q.get('sql', '')) > 200 else q.get('sql', ''),
|
||||
'time': float(q.get('time', 0))
|
||||
"sql": (
|
||||
q.get("sql", "")[:200] + "..."
|
||||
if len(q.get("sql", "")) > 200
|
||||
else q.get("sql", "")
|
||||
),
|
||||
"time": float(q.get("time", 0)),
|
||||
}
|
||||
for q in slow_queries
|
||||
],
|
||||
'duplicate_query_count': len(duplicates),
|
||||
'duplicate_queries': duplicates if len(duplicates) <= 10 else dict(list(duplicates.items())[:10]),
|
||||
"duplicate_query_count": len(duplicates),
|
||||
"duplicate_queries": (
|
||||
duplicates
|
||||
if len(duplicates) <= 10
|
||||
else dict(list(duplicates.items())[:10])
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
return analysis
|
||||
|
||||
|
||||
@classmethod
|
||||
def analyze_current_queries(cls) -> Dict[str, Any]:
|
||||
"""Analyze the current request's queries"""
|
||||
if hasattr(connection, 'queries'):
|
||||
if hasattr(connection, "queries"):
|
||||
return cls.analyze_queries(connection.queries)
|
||||
return {}
|
||||
|
||||
@@ -310,57 +342,62 @@ class DatabaseQueryAnalyzer:
|
||||
# Performance monitoring decorators
|
||||
def monitor_function_performance(operation_name: Optional[str] = None):
|
||||
"""Decorator to monitor function performance"""
|
||||
|
||||
def decorator(func):
|
||||
@wraps(func)
|
||||
def wrapper(*args, **kwargs):
|
||||
name = operation_name or f"{func.__module__}.{func.__name__}"
|
||||
with monitor_performance(name, function=func.__name__, module=func.__module__):
|
||||
with monitor_performance(
|
||||
name, function=func.__name__, module=func.__module__
|
||||
):
|
||||
return func(*args, **kwargs)
|
||||
|
||||
return wrapper
|
||||
|
||||
return decorator
|
||||
|
||||
|
||||
def track_database_queries(warn_threshold: int = 10):
|
||||
"""Decorator to track database queries for a function"""
|
||||
|
||||
def decorator(func):
|
||||
@wraps(func)
|
||||
def wrapper(*args, **kwargs):
|
||||
operation_name = f"{func.__module__}.{func.__name__}"
|
||||
with track_queries(operation_name, warn_threshold):
|
||||
return func(*args, **kwargs)
|
||||
|
||||
return wrapper
|
||||
|
||||
return decorator
|
||||
|
||||
|
||||
# Performance metrics collection
|
||||
class PerformanceMetrics:
|
||||
"""Collect and aggregate performance metrics"""
|
||||
|
||||
|
||||
def __init__(self):
|
||||
self.metrics = []
|
||||
|
||||
|
||||
def record_metric(self, name: str, value: float, tags: Optional[Dict] = None):
|
||||
"""Record a performance metric"""
|
||||
metric = {
|
||||
'name': name,
|
||||
'value': value,
|
||||
'timestamp': timezone.now().isoformat(),
|
||||
'tags': tags or {}
|
||||
"name": name,
|
||||
"value": value,
|
||||
"timestamp": timezone.now().isoformat(),
|
||||
"tags": tags or {},
|
||||
}
|
||||
self.metrics.append(metric)
|
||||
|
||||
|
||||
# Log the metric
|
||||
logger.info(
|
||||
f"Performance metric: {name} = {value}",
|
||||
extra=metric
|
||||
)
|
||||
|
||||
logger.info(f"Performance metric: {name} = {value}", extra=metric)
|
||||
|
||||
def get_metrics(self, name: Optional[str] = None) -> List[Dict]:
|
||||
"""Get recorded metrics, optionally filtered by name"""
|
||||
if name:
|
||||
return [m for m in self.metrics if m['name'] == name]
|
||||
return [m for m in self.metrics if m["name"] == name]
|
||||
return self.metrics.copy()
|
||||
|
||||
|
||||
def clear_metrics(self):
|
||||
"""Clear all recorded metrics"""
|
||||
self.metrics.clear()
|
||||
|
||||
@@ -1,3 +1 @@
|
||||
from django.test import TestCase
|
||||
|
||||
# Create your tests here.
|
||||
|
||||
@@ -9,29 +9,27 @@ from ..views.map_views import (
|
||||
MapSearchView,
|
||||
MapBoundsView,
|
||||
MapStatsView,
|
||||
MapCacheView
|
||||
MapCacheView,
|
||||
)
|
||||
|
||||
app_name = 'map_api'
|
||||
app_name = "map_api"
|
||||
|
||||
urlpatterns = [
|
||||
# Main map data endpoint
|
||||
path('locations/', MapLocationsView.as_view(), name='locations'),
|
||||
|
||||
path("locations/", MapLocationsView.as_view(), name="locations"),
|
||||
# Location detail endpoint
|
||||
path('locations/<str:location_type>/<int:location_id>/',
|
||||
MapLocationDetailView.as_view(), name='location_detail'),
|
||||
|
||||
path(
|
||||
"locations/<str:location_type>/<int:location_id>/",
|
||||
MapLocationDetailView.as_view(),
|
||||
name="location_detail",
|
||||
),
|
||||
# Search endpoint
|
||||
path('search/', MapSearchView.as_view(), name='search'),
|
||||
|
||||
path("search/", MapSearchView.as_view(), name="search"),
|
||||
# Bounds-based query endpoint
|
||||
path('bounds/', MapBoundsView.as_view(), name='bounds'),
|
||||
|
||||
path("bounds/", MapBoundsView.as_view(), name="bounds"),
|
||||
# Service statistics endpoint
|
||||
path('stats/', MapStatsView.as_view(), name='stats'),
|
||||
|
||||
path("stats/", MapStatsView.as_view(), name="stats"),
|
||||
# Cache management endpoints
|
||||
path('cache/', MapCacheView.as_view(), name='cache'),
|
||||
path('cache/invalidate/', MapCacheView.as_view(), name='cache_invalidate'),
|
||||
]
|
||||
path("cache/", MapCacheView.as_view(), name="cache"),
|
||||
path("cache/invalidate/", MapCacheView.as_view(), name="cache_invalidate"),
|
||||
]
|
||||
|
||||
@@ -15,19 +15,25 @@ from ..views.maps import (
|
||||
LocationListView,
|
||||
)
|
||||
|
||||
app_name = 'maps'
|
||||
app_name = "maps"
|
||||
|
||||
urlpatterns = [
|
||||
# Main map views
|
||||
path('', UniversalMapView.as_view(), name='universal_map'),
|
||||
path('parks/', ParkMapView.as_view(), name='park_map'),
|
||||
path('nearby/', NearbyLocationsView.as_view(), name='nearby_locations'),
|
||||
path('list/', LocationListView.as_view(), name='location_list'),
|
||||
|
||||
path("", UniversalMapView.as_view(), name="universal_map"),
|
||||
path("parks/", ParkMapView.as_view(), name="park_map"),
|
||||
path("nearby/", NearbyLocationsView.as_view(), name="nearby_locations"),
|
||||
path("list/", LocationListView.as_view(), name="location_list"),
|
||||
# HTMX endpoints for dynamic updates
|
||||
path('htmx/filter/', LocationFilterView.as_view(), name='htmx_filter'),
|
||||
path('htmx/search/', LocationSearchView.as_view(), name='htmx_search'),
|
||||
path('htmx/bounds/', MapBoundsUpdateView.as_view(), name='htmx_bounds_update'),
|
||||
path('htmx/location/<str:location_type>/<int:location_id>/',
|
||||
LocationDetailModalView.as_view(), name='htmx_location_detail'),
|
||||
]
|
||||
path("htmx/filter/", LocationFilterView.as_view(), name="htmx_filter"),
|
||||
path("htmx/search/", LocationSearchView.as_view(), name="htmx_search"),
|
||||
path(
|
||||
"htmx/bounds/",
|
||||
MapBoundsUpdateView.as_view(),
|
||||
name="htmx_bounds_update",
|
||||
),
|
||||
path(
|
||||
"htmx/location/<str:location_type>/<int:location_id>/",
|
||||
LocationDetailModalView.as_view(),
|
||||
name="htmx_location_detail",
|
||||
),
|
||||
]
|
||||
|
||||
@@ -3,19 +3,22 @@ from core.views.search import (
|
||||
AdaptiveSearchView,
|
||||
FilterFormView,
|
||||
LocationSearchView,
|
||||
LocationSuggestionsView
|
||||
LocationSuggestionsView,
|
||||
)
|
||||
from rides.views import RideSearchView
|
||||
|
||||
app_name = 'search'
|
||||
app_name = "search"
|
||||
|
||||
urlpatterns = [
|
||||
path('parks/', AdaptiveSearchView.as_view(), name='search'),
|
||||
path('parks/filters/', FilterFormView.as_view(), name='filter_form'),
|
||||
path('rides/', RideSearchView.as_view(), name='ride_search'),
|
||||
path('rides/results/', RideSearchView.as_view(), name='ride_search_results'),
|
||||
|
||||
path("parks/", AdaptiveSearchView.as_view(), name="search"),
|
||||
path("parks/filters/", FilterFormView.as_view(), name="filter_form"),
|
||||
path("rides/", RideSearchView.as_view(), name="ride_search"),
|
||||
path("rides/results/", RideSearchView.as_view(), name="ride_search_results"),
|
||||
# Location-aware search
|
||||
path('location/', LocationSearchView.as_view(), name='location_search'),
|
||||
path('location/suggestions/', LocationSuggestionsView.as_view(), name='location_suggestions'),
|
||||
]
|
||||
path("location/", LocationSearchView.as_view(), name="location_search"),
|
||||
path(
|
||||
"location/suggestions/",
|
||||
LocationSuggestionsView.as_view(),
|
||||
name="location_suggestions",
|
||||
),
|
||||
]
|
||||
|
||||
@@ -7,18 +7,20 @@ import logging
|
||||
from contextlib import contextmanager
|
||||
from typing import Optional, Dict, Any, List, Type
|
||||
from django.db import connection, models
|
||||
from django.db.models import QuerySet, Prefetch, Count, Avg, Max, Min
|
||||
from django.db.models import QuerySet, Prefetch, Count, Avg, Max
|
||||
from django.conf import settings
|
||||
from django.core.cache import cache
|
||||
|
||||
logger = logging.getLogger('query_optimization')
|
||||
logger = logging.getLogger("query_optimization")
|
||||
|
||||
|
||||
@contextmanager
|
||||
def track_queries(operation_name: str, warn_threshold: int = 10, time_threshold: float = 1.0):
|
||||
def track_queries(
|
||||
operation_name: str, warn_threshold: int = 10, time_threshold: float = 1.0
|
||||
):
|
||||
"""
|
||||
Context manager to track database queries for specific operations
|
||||
|
||||
|
||||
Args:
|
||||
operation_name: Name of the operation being tracked
|
||||
warn_threshold: Number of queries that triggers a warning
|
||||
@@ -27,136 +29,140 @@ def track_queries(operation_name: str, warn_threshold: int = 10, time_threshold:
|
||||
if not settings.DEBUG:
|
||||
yield
|
||||
return
|
||||
|
||||
|
||||
initial_queries = len(connection.queries)
|
||||
start_time = time.time()
|
||||
|
||||
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
end_time = time.time()
|
||||
total_queries = len(connection.queries) - initial_queries
|
||||
execution_time = end_time - start_time
|
||||
|
||||
|
||||
# Collect query details
|
||||
query_details = []
|
||||
if hasattr(connection, 'queries') and total_queries > 0:
|
||||
if hasattr(connection, "queries") and total_queries > 0:
|
||||
recent_queries = connection.queries[-total_queries:]
|
||||
query_details = [
|
||||
{
|
||||
'sql': query['sql'][:500] + '...' if len(query['sql']) > 500 else query['sql'],
|
||||
'time': float(query['time']),
|
||||
'duplicate_count': sum(1 for q in recent_queries if q['sql'] == query['sql'])
|
||||
"sql": (
|
||||
query["sql"][:500] + "..."
|
||||
if len(query["sql"]) > 500
|
||||
else query["sql"]
|
||||
),
|
||||
"time": float(query["time"]),
|
||||
"duplicate_count": sum(
|
||||
1 for q in recent_queries if q["sql"] == query["sql"]
|
||||
),
|
||||
}
|
||||
for query in recent_queries
|
||||
]
|
||||
|
||||
|
||||
performance_data = {
|
||||
'operation': operation_name,
|
||||
'query_count': total_queries,
|
||||
'execution_time': execution_time,
|
||||
'queries': query_details if settings.DEBUG else [],
|
||||
'slow_queries': [q for q in query_details if q['time'] > 0.1], # Queries slower than 100ms
|
||||
"operation": operation_name,
|
||||
"query_count": total_queries,
|
||||
"execution_time": execution_time,
|
||||
"queries": query_details if settings.DEBUG else [],
|
||||
"slow_queries": [
|
||||
q for q in query_details if q["time"] > 0.1
|
||||
], # Queries slower than 100ms
|
||||
}
|
||||
|
||||
|
||||
# Log warnings for performance issues
|
||||
if total_queries > warn_threshold or execution_time > time_threshold:
|
||||
logger.warning(
|
||||
f"Performance concern in {operation_name}: "
|
||||
f"{total_queries} queries, {execution_time:.2f}s",
|
||||
extra=performance_data
|
||||
extra=performance_data,
|
||||
)
|
||||
else:
|
||||
logger.debug(
|
||||
f"Query tracking for {operation_name}: "
|
||||
f"{total_queries} queries, {execution_time:.2f}s",
|
||||
extra=performance_data
|
||||
extra=performance_data,
|
||||
)
|
||||
|
||||
|
||||
class QueryOptimizer:
|
||||
"""Utility class for common query optimization patterns"""
|
||||
|
||||
|
||||
@staticmethod
|
||||
def optimize_park_queryset(queryset: QuerySet) -> QuerySet:
|
||||
"""
|
||||
Optimize Park queryset with proper select_related and prefetch_related
|
||||
"""
|
||||
return queryset.select_related(
|
||||
'location',
|
||||
'operator',
|
||||
'created_by'
|
||||
).prefetch_related(
|
||||
'areas',
|
||||
'rides__manufacturer',
|
||||
'reviews__user'
|
||||
).annotate(
|
||||
ride_count=Count('rides'),
|
||||
average_rating=Avg('reviews__rating'),
|
||||
latest_review_date=Max('reviews__created_at')
|
||||
return (
|
||||
queryset.select_related("location", "operator", "created_by")
|
||||
.prefetch_related("areas", "rides__manufacturer", "reviews__user")
|
||||
.annotate(
|
||||
ride_count=Count("rides"),
|
||||
average_rating=Avg("reviews__rating"),
|
||||
latest_review_date=Max("reviews__created_at"),
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
@staticmethod
|
||||
def optimize_ride_queryset(queryset: QuerySet) -> QuerySet:
|
||||
"""
|
||||
Optimize Ride queryset with proper relationships
|
||||
"""
|
||||
return queryset.select_related(
|
||||
'park',
|
||||
'park__location',
|
||||
'manufacturer',
|
||||
'created_by'
|
||||
).prefetch_related(
|
||||
'reviews__user',
|
||||
'media_items'
|
||||
).annotate(
|
||||
review_count=Count('reviews'),
|
||||
average_rating=Avg('reviews__rating'),
|
||||
latest_review_date=Max('reviews__created_at')
|
||||
return (
|
||||
queryset.select_related(
|
||||
"park", "park__location", "manufacturer", "created_by"
|
||||
)
|
||||
.prefetch_related("reviews__user", "media_items")
|
||||
.annotate(
|
||||
review_count=Count("reviews"),
|
||||
average_rating=Avg("reviews__rating"),
|
||||
latest_review_date=Max("reviews__created_at"),
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
@staticmethod
|
||||
def optimize_user_queryset(queryset: QuerySet) -> QuerySet:
|
||||
"""
|
||||
Optimize User queryset for profile views
|
||||
"""
|
||||
return queryset.prefetch_related(
|
||||
Prefetch('park_reviews', to_attr='cached_park_reviews'),
|
||||
Prefetch('ride_reviews', to_attr='cached_ride_reviews'),
|
||||
'authored_parks',
|
||||
'authored_rides'
|
||||
Prefetch("park_reviews", to_attr="cached_park_reviews"),
|
||||
Prefetch("ride_reviews", to_attr="cached_ride_reviews"),
|
||||
"authored_parks",
|
||||
"authored_rides",
|
||||
).annotate(
|
||||
total_reviews=Count('park_reviews') + Count('ride_reviews'),
|
||||
parks_authored=Count('authored_parks'),
|
||||
rides_authored=Count('authored_rides')
|
||||
total_reviews=Count("park_reviews") + Count("ride_reviews"),
|
||||
parks_authored=Count("authored_parks"),
|
||||
rides_authored=Count("authored_rides"),
|
||||
)
|
||||
|
||||
|
||||
@staticmethod
|
||||
def create_bulk_queryset(model: Type[models.Model], ids: List[int]) -> QuerySet:
|
||||
"""
|
||||
Create an optimized queryset for bulk operations
|
||||
"""
|
||||
queryset = model.objects.filter(id__in=ids)
|
||||
|
||||
|
||||
# Apply model-specific optimizations
|
||||
if hasattr(model, '_meta') and model._meta.model_name == 'park':
|
||||
if hasattr(model, "_meta") and model._meta.model_name == "park":
|
||||
return QueryOptimizer.optimize_park_queryset(queryset)
|
||||
elif hasattr(model, '_meta') and model._meta.model_name == 'ride':
|
||||
elif hasattr(model, "_meta") and model._meta.model_name == "ride":
|
||||
return QueryOptimizer.optimize_ride_queryset(queryset)
|
||||
elif hasattr(model, '_meta') and model._meta.model_name == 'user':
|
||||
elif hasattr(model, "_meta") and model._meta.model_name == "user":
|
||||
return QueryOptimizer.optimize_user_queryset(queryset)
|
||||
|
||||
|
||||
return queryset
|
||||
|
||||
|
||||
class QueryCache:
|
||||
"""Caching utilities for expensive queries"""
|
||||
|
||||
|
||||
@staticmethod
|
||||
def cache_queryset_result(cache_key: str, queryset_func, timeout: int = 3600, **kwargs):
|
||||
def cache_queryset_result(
|
||||
cache_key: str, queryset_func, timeout: int = 3600, **kwargs
|
||||
):
|
||||
"""
|
||||
Cache the result of an expensive queryset operation
|
||||
|
||||
|
||||
Args:
|
||||
cache_key: Unique key for caching
|
||||
queryset_func: Function that returns the queryset result
|
||||
@@ -168,22 +174,22 @@ class QueryCache:
|
||||
if cached_result is not None:
|
||||
logger.debug(f"Cache hit for queryset: {cache_key}")
|
||||
return cached_result
|
||||
|
||||
|
||||
# Execute the expensive operation
|
||||
with track_queries(f"cache_miss_{cache_key}"):
|
||||
result = queryset_func(**kwargs)
|
||||
|
||||
|
||||
# Cache the result
|
||||
cache.set(cache_key, result, timeout)
|
||||
logger.debug(f"Cached queryset result: {cache_key}")
|
||||
|
||||
|
||||
return result
|
||||
|
||||
|
||||
@staticmethod
|
||||
def invalidate_model_cache(model_name: str, instance_id: Optional[int] = None):
|
||||
"""
|
||||
Invalidate cache keys related to a specific model
|
||||
|
||||
|
||||
Args:
|
||||
model_name: Name of the model (e.g., 'park', 'ride')
|
||||
instance_id: Specific instance ID, if applicable
|
||||
@@ -193,44 +199,50 @@ class QueryCache:
|
||||
pattern = f"*{model_name}_{instance_id}*"
|
||||
else:
|
||||
pattern = f"*{model_name}*"
|
||||
|
||||
|
||||
try:
|
||||
# For Redis cache backends that support pattern deletion
|
||||
if hasattr(cache, 'delete_pattern'):
|
||||
if hasattr(cache, "delete_pattern"):
|
||||
deleted_count = cache.delete_pattern(pattern)
|
||||
logger.info(f"Invalidated {deleted_count} cache keys for pattern: {pattern}")
|
||||
logger.info(
|
||||
f"Invalidated {deleted_count} cache keys for pattern: {pattern}"
|
||||
)
|
||||
else:
|
||||
logger.warning(f"Cache backend does not support pattern deletion: {pattern}")
|
||||
logger.warning(
|
||||
f"Cache backend does not support pattern deletion: {pattern}"
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"Error invalidating cache pattern {pattern}: {e}")
|
||||
|
||||
|
||||
class IndexAnalyzer:
|
||||
"""Analyze and suggest database indexes"""
|
||||
|
||||
|
||||
@staticmethod
|
||||
def analyze_slow_queries(min_time: float = 0.1) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Analyze slow queries from the current request
|
||||
|
||||
|
||||
Args:
|
||||
min_time: Minimum query time in seconds to consider "slow"
|
||||
"""
|
||||
if not hasattr(connection, 'queries'):
|
||||
if not hasattr(connection, "queries"):
|
||||
return []
|
||||
|
||||
|
||||
slow_queries = []
|
||||
for query in connection.queries:
|
||||
query_time = float(query.get('time', 0))
|
||||
query_time = float(query.get("time", 0))
|
||||
if query_time >= min_time:
|
||||
slow_queries.append({
|
||||
'sql': query['sql'],
|
||||
'time': query_time,
|
||||
'analysis': IndexAnalyzer._analyze_query_sql(query['sql'])
|
||||
})
|
||||
|
||||
slow_queries.append(
|
||||
{
|
||||
"sql": query["sql"],
|
||||
"time": query_time,
|
||||
"analysis": IndexAnalyzer._analyze_query_sql(query["sql"]),
|
||||
}
|
||||
)
|
||||
|
||||
return slow_queries
|
||||
|
||||
|
||||
@staticmethod
|
||||
def _analyze_query_sql(sql: str) -> Dict[str, Any]:
|
||||
"""
|
||||
@@ -238,31 +250,40 @@ class IndexAnalyzer:
|
||||
"""
|
||||
sql_upper = sql.upper()
|
||||
analysis = {
|
||||
'has_where_clause': 'WHERE' in sql_upper,
|
||||
'has_join': any(join in sql_upper for join in ['JOIN', 'INNER JOIN', 'LEFT JOIN', 'RIGHT JOIN']),
|
||||
'has_order_by': 'ORDER BY' in sql_upper,
|
||||
'has_group_by': 'GROUP BY' in sql_upper,
|
||||
'has_like': 'LIKE' in sql_upper,
|
||||
'table_scans': [],
|
||||
'suggestions': []
|
||||
"has_where_clause": "WHERE" in sql_upper,
|
||||
"has_join": any(
|
||||
join in sql_upper
|
||||
for join in ["JOIN", "INNER JOIN", "LEFT JOIN", "RIGHT JOIN"]
|
||||
),
|
||||
"has_order_by": "ORDER BY" in sql_upper,
|
||||
"has_group_by": "GROUP BY" in sql_upper,
|
||||
"has_like": "LIKE" in sql_upper,
|
||||
"table_scans": [],
|
||||
"suggestions": [],
|
||||
}
|
||||
|
||||
|
||||
# Detect potential table scans
|
||||
if 'WHERE' not in sql_upper and 'SELECT COUNT(*) FROM' not in sql_upper:
|
||||
analysis['table_scans'].append("Query may be doing a full table scan")
|
||||
|
||||
if "WHERE" not in sql_upper and "SELECT COUNT(*) FROM" not in sql_upper:
|
||||
analysis["table_scans"].append("Query may be doing a full table scan")
|
||||
|
||||
# Suggest indexes based on patterns
|
||||
if analysis['has_where_clause'] and not analysis['has_join']:
|
||||
analysis['suggestions'].append("Consider adding indexes on WHERE clause columns")
|
||||
|
||||
if analysis['has_order_by']:
|
||||
analysis['suggestions'].append("Consider adding indexes on ORDER BY columns")
|
||||
|
||||
if analysis['has_like'] and '%' not in sql[:sql.find('LIKE') + 10]:
|
||||
analysis['suggestions'].append("LIKE queries with leading wildcards cannot use indexes efficiently")
|
||||
|
||||
if analysis["has_where_clause"] and not analysis["has_join"]:
|
||||
analysis["suggestions"].append(
|
||||
"Consider adding indexes on WHERE clause columns"
|
||||
)
|
||||
|
||||
if analysis["has_order_by"]:
|
||||
analysis["suggestions"].append(
|
||||
"Consider adding indexes on ORDER BY columns"
|
||||
)
|
||||
|
||||
if analysis["has_like"] and "%" not in sql[: sql.find("LIKE") + 10]:
|
||||
analysis["suggestions"].append(
|
||||
"LIKE queries with leading wildcards cannot use indexes efficiently"
|
||||
)
|
||||
|
||||
return analysis
|
||||
|
||||
|
||||
@staticmethod
|
||||
def suggest_model_indexes(model: Type[models.Model]) -> List[str]:
|
||||
"""
|
||||
@@ -270,45 +291,66 @@ class IndexAnalyzer:
|
||||
"""
|
||||
suggestions = []
|
||||
opts = model._meta
|
||||
|
||||
# Foreign key fields should have indexes (Django adds these automatically)
|
||||
|
||||
# Foreign key fields should have indexes (Django adds these
|
||||
# automatically)
|
||||
for field in opts.fields:
|
||||
if isinstance(field, models.ForeignKey):
|
||||
suggestions.append(f"Index on {field.name} (automatically created by Django)")
|
||||
|
||||
suggestions.append(
|
||||
f"Index on {field.name} (automatically created by Django)"
|
||||
)
|
||||
|
||||
# Suggest composite indexes for common query patterns
|
||||
date_fields = [f.name for f in opts.fields if isinstance(f, (models.DateField, models.DateTimeField))]
|
||||
status_fields = [f.name for f in opts.fields if f.name in ['status', 'is_active', 'is_published']]
|
||||
|
||||
date_fields = [
|
||||
f.name
|
||||
for f in opts.fields
|
||||
if isinstance(f, (models.DateField, models.DateTimeField))
|
||||
]
|
||||
status_fields = [
|
||||
f.name
|
||||
for f in opts.fields
|
||||
if f.name in ["status", "is_active", "is_published"]
|
||||
]
|
||||
|
||||
if date_fields and status_fields:
|
||||
for date_field in date_fields:
|
||||
for status_field in status_fields:
|
||||
suggestions.append(f"Composite index on ({status_field}, {date_field}) for filtered date queries")
|
||||
|
||||
suggestions.append(
|
||||
f"Composite index on ({status_field}, {date_field}) for filtered date queries"
|
||||
)
|
||||
|
||||
# Suggest indexes for fields commonly used in WHERE clauses
|
||||
common_filter_fields = ['slug', 'name', 'created_at', 'updated_at']
|
||||
common_filter_fields = ["slug", "name", "created_at", "updated_at"]
|
||||
for field in opts.fields:
|
||||
if field.name in common_filter_fields and not field.db_index:
|
||||
suggestions.append(f"Consider adding db_index=True to {field.name}")
|
||||
|
||||
suggestions.append(
|
||||
f"Consider adding db_index=True to {
|
||||
field.name}"
|
||||
)
|
||||
|
||||
return suggestions
|
||||
|
||||
|
||||
def log_query_performance():
|
||||
"""Decorator to log query performance for a function"""
|
||||
|
||||
def decorator(func):
|
||||
def wrapper(*args, **kwargs):
|
||||
operation_name = f"{func.__module__}.{func.__name__}"
|
||||
with track_queries(operation_name):
|
||||
return func(*args, **kwargs)
|
||||
|
||||
return wrapper
|
||||
|
||||
return decorator
|
||||
|
||||
|
||||
def optimize_queryset_for_serialization(queryset: QuerySet, fields: List[str]) -> QuerySet:
|
||||
def optimize_queryset_for_serialization(
|
||||
queryset: QuerySet, fields: List[str]
|
||||
) -> QuerySet:
|
||||
"""
|
||||
Optimize a queryset for API serialization by only selecting needed fields
|
||||
|
||||
|
||||
Args:
|
||||
queryset: The queryset to optimize
|
||||
fields: List of field names that will be serialized
|
||||
@@ -316,28 +358,30 @@ def optimize_queryset_for_serialization(queryset: QuerySet, fields: List[str]) -
|
||||
# Extract foreign key fields that need select_related
|
||||
model = queryset.model
|
||||
opts = model._meta
|
||||
|
||||
|
||||
select_related_fields = []
|
||||
prefetch_related_fields = []
|
||||
|
||||
|
||||
for field_name in fields:
|
||||
try:
|
||||
field = opts.get_field(field_name)
|
||||
if isinstance(field, models.ForeignKey):
|
||||
select_related_fields.append(field_name)
|
||||
elif isinstance(field, (models.ManyToManyField, models.reverse.ManyToManyRel)):
|
||||
elif isinstance(
|
||||
field, (models.ManyToManyField, models.reverse.ManyToManyRel)
|
||||
):
|
||||
prefetch_related_fields.append(field_name)
|
||||
except models.FieldDoesNotExist:
|
||||
# Field might be a property or method, skip optimization
|
||||
continue
|
||||
|
||||
|
||||
# Apply optimizations
|
||||
if select_related_fields:
|
||||
queryset = queryset.select_related(*select_related_fields)
|
||||
|
||||
|
||||
if prefetch_related_fields:
|
||||
queryset = queryset.prefetch_related(*prefetch_related_fields)
|
||||
|
||||
|
||||
return queryset
|
||||
|
||||
|
||||
@@ -347,39 +391,42 @@ def monitor_db_performance(operation_name: str):
|
||||
"""
|
||||
Context manager that monitors database performance for an operation
|
||||
"""
|
||||
initial_queries = len(connection.queries) if hasattr(connection, 'queries') else 0
|
||||
initial_queries = len(connection.queries) if hasattr(connection, "queries") else 0
|
||||
start_time = time.time()
|
||||
|
||||
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
end_time = time.time()
|
||||
duration = end_time - start_time
|
||||
|
||||
if hasattr(connection, 'queries'):
|
||||
|
||||
if hasattr(connection, "queries"):
|
||||
total_queries = len(connection.queries) - initial_queries
|
||||
|
||||
|
||||
# Analyze queries for performance issues
|
||||
slow_queries = IndexAnalyzer.analyze_slow_queries(0.05) # 50ms threshold
|
||||
|
||||
|
||||
performance_data = {
|
||||
'operation': operation_name,
|
||||
'duration': duration,
|
||||
'query_count': total_queries,
|
||||
'slow_query_count': len(slow_queries),
|
||||
'slow_queries': slow_queries[:5] # Limit to top 5 slow queries
|
||||
"operation": operation_name,
|
||||
"duration": duration,
|
||||
"query_count": total_queries,
|
||||
"slow_query_count": len(slow_queries),
|
||||
# Limit to top 5 slow queries
|
||||
"slow_queries": slow_queries[:5],
|
||||
}
|
||||
|
||||
|
||||
# Log performance data
|
||||
if duration > 1.0 or total_queries > 15 or slow_queries:
|
||||
logger.warning(
|
||||
f"Performance issue in {operation_name}: "
|
||||
f"{duration:.3f}s, {total_queries} queries, {len(slow_queries)} slow",
|
||||
extra=performance_data
|
||||
f"{
|
||||
duration:.3f}s, {total_queries} queries, {
|
||||
len(slow_queries)} slow",
|
||||
extra=performance_data,
|
||||
)
|
||||
else:
|
||||
logger.debug(
|
||||
f"DB performance for {operation_name}: "
|
||||
f"{duration:.3f}s, {total_queries} queries",
|
||||
extra=performance_data
|
||||
extra=performance_data,
|
||||
)
|
||||
|
||||
@@ -1 +1 @@
|
||||
# Core views
|
||||
# Core views
|
||||
|
||||
@@ -19,157 +19,165 @@ class HealthCheckAPIView(APIView):
|
||||
"""
|
||||
Enhanced API endpoint for health checks with detailed JSON response
|
||||
"""
|
||||
|
||||
|
||||
permission_classes = [AllowAny] # Public endpoint
|
||||
|
||||
|
||||
def get(self, request):
|
||||
"""Return comprehensive health check information"""
|
||||
start_time = time.time()
|
||||
|
||||
|
||||
# Get basic health check results
|
||||
main_view = MainView()
|
||||
main_view.request = request
|
||||
|
||||
|
||||
plugins = main_view.plugins
|
||||
errors = main_view.errors
|
||||
|
||||
|
||||
# Collect additional performance metrics
|
||||
cache_monitor = CacheMonitor()
|
||||
cache_stats = cache_monitor.get_cache_stats()
|
||||
|
||||
|
||||
# Build comprehensive health data
|
||||
health_data = {
|
||||
'status': 'healthy' if not errors else 'unhealthy',
|
||||
'timestamp': timezone.now().isoformat(),
|
||||
'version': getattr(settings, 'VERSION', '1.0.0'),
|
||||
'environment': getattr(settings, 'ENVIRONMENT', 'development'),
|
||||
'response_time_ms': 0, # Will be calculated at the end
|
||||
'checks': {},
|
||||
'metrics': {
|
||||
'cache': cache_stats,
|
||||
'database': self._get_database_metrics(),
|
||||
'system': self._get_system_metrics(),
|
||||
}
|
||||
"status": "healthy" if not errors else "unhealthy",
|
||||
"timestamp": timezone.now().isoformat(),
|
||||
"version": getattr(settings, "VERSION", "1.0.0"),
|
||||
"environment": getattr(settings, "ENVIRONMENT", "development"),
|
||||
"response_time_ms": 0, # Will be calculated at the end
|
||||
"checks": {},
|
||||
"metrics": {
|
||||
"cache": cache_stats,
|
||||
"database": self._get_database_metrics(),
|
||||
"system": self._get_system_metrics(),
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
# Process individual health checks
|
||||
for plugin in plugins:
|
||||
plugin_name = plugin.identifier()
|
||||
plugin_errors = errors.get(plugin.__class__.__name__, [])
|
||||
|
||||
health_data['checks'][plugin_name] = {
|
||||
'status': 'healthy' if not plugin_errors else 'unhealthy',
|
||||
'critical': getattr(plugin, 'critical_service', False),
|
||||
'errors': [str(error) for error in plugin_errors],
|
||||
'response_time_ms': getattr(plugin, '_response_time', None)
|
||||
|
||||
health_data["checks"][plugin_name] = {
|
||||
"status": "healthy" if not plugin_errors else "unhealthy",
|
||||
"critical": getattr(plugin, "critical_service", False),
|
||||
"errors": [str(error) for error in plugin_errors],
|
||||
"response_time_ms": getattr(plugin, "_response_time", None),
|
||||
}
|
||||
|
||||
|
||||
# Calculate total response time
|
||||
health_data['response_time_ms'] = round((time.time() - start_time) * 1000, 2)
|
||||
|
||||
health_data["response_time_ms"] = round((time.time() - start_time) * 1000, 2)
|
||||
|
||||
# Determine HTTP status code
|
||||
status_code = 200
|
||||
if errors:
|
||||
# Check if any critical services are failing
|
||||
critical_errors = any(
|
||||
getattr(plugin, 'critical_service', False)
|
||||
for plugin in plugins
|
||||
getattr(plugin, "critical_service", False)
|
||||
for plugin in plugins
|
||||
if errors.get(plugin.__class__.__name__)
|
||||
)
|
||||
status_code = 503 if critical_errors else 200
|
||||
|
||||
|
||||
return Response(health_data, status=status_code)
|
||||
|
||||
|
||||
def _get_database_metrics(self):
|
||||
"""Get database performance metrics"""
|
||||
try:
|
||||
from django.db import connection
|
||||
|
||||
|
||||
# Get basic connection info
|
||||
metrics = {
|
||||
'vendor': connection.vendor,
|
||||
'connection_status': 'connected',
|
||||
"vendor": connection.vendor,
|
||||
"connection_status": "connected",
|
||||
}
|
||||
|
||||
|
||||
# Test query performance
|
||||
start_time = time.time()
|
||||
with connection.cursor() as cursor:
|
||||
cursor.execute("SELECT 1")
|
||||
cursor.fetchone()
|
||||
query_time = (time.time() - start_time) * 1000
|
||||
|
||||
metrics['test_query_time_ms'] = round(query_time, 2)
|
||||
|
||||
|
||||
metrics["test_query_time_ms"] = round(query_time, 2)
|
||||
|
||||
# PostgreSQL specific metrics
|
||||
if connection.vendor == 'postgresql':
|
||||
if connection.vendor == "postgresql":
|
||||
try:
|
||||
with connection.cursor() as cursor:
|
||||
cursor.execute("""
|
||||
SELECT
|
||||
cursor.execute(
|
||||
"""
|
||||
SELECT
|
||||
numbackends as active_connections,
|
||||
xact_commit as transactions_committed,
|
||||
xact_rollback as transactions_rolled_back,
|
||||
blks_read as blocks_read,
|
||||
blks_hit as blocks_hit
|
||||
FROM pg_stat_database
|
||||
FROM pg_stat_database
|
||||
WHERE datname = current_database()
|
||||
""")
|
||||
"""
|
||||
)
|
||||
row = cursor.fetchone()
|
||||
if row:
|
||||
metrics.update({
|
||||
'active_connections': row[0],
|
||||
'transactions_committed': row[1],
|
||||
'transactions_rolled_back': row[2],
|
||||
'cache_hit_ratio': round((row[4] / (row[3] + row[4])) * 100, 2) if (row[3] + row[4]) > 0 else 0
|
||||
})
|
||||
metrics.update(
|
||||
{
|
||||
"active_connections": row[0],
|
||||
"transactions_committed": row[1],
|
||||
"transactions_rolled_back": row[2],
|
||||
"cache_hit_ratio": (
|
||||
round(
|
||||
(row[4] / (row[3] + row[4])) * 100,
|
||||
2,
|
||||
)
|
||||
if (row[3] + row[4]) > 0
|
||||
else 0
|
||||
),
|
||||
}
|
||||
)
|
||||
except Exception:
|
||||
pass # Skip advanced metrics if not available
|
||||
|
||||
|
||||
return metrics
|
||||
|
||||
|
||||
except Exception as e:
|
||||
return {
|
||||
'connection_status': 'error',
|
||||
'error': str(e)
|
||||
}
|
||||
|
||||
return {"connection_status": "error", "error": str(e)}
|
||||
|
||||
def _get_system_metrics(self):
|
||||
"""Get system performance metrics"""
|
||||
metrics = {
|
||||
'debug_mode': settings.DEBUG,
|
||||
'allowed_hosts': settings.ALLOWED_HOSTS if settings.DEBUG else ['hidden'],
|
||||
"debug_mode": settings.DEBUG,
|
||||
"allowed_hosts": (settings.ALLOWED_HOSTS if settings.DEBUG else ["hidden"]),
|
||||
}
|
||||
|
||||
|
||||
try:
|
||||
import psutil
|
||||
|
||||
|
||||
# Memory metrics
|
||||
memory = psutil.virtual_memory()
|
||||
metrics['memory'] = {
|
||||
'total_mb': round(memory.total / 1024 / 1024, 2),
|
||||
'available_mb': round(memory.available / 1024 / 1024, 2),
|
||||
'percent_used': memory.percent,
|
||||
metrics["memory"] = {
|
||||
"total_mb": round(memory.total / 1024 / 1024, 2),
|
||||
"available_mb": round(memory.available / 1024 / 1024, 2),
|
||||
"percent_used": memory.percent,
|
||||
}
|
||||
|
||||
|
||||
# CPU metrics
|
||||
metrics['cpu'] = {
|
||||
'percent_used': psutil.cpu_percent(interval=0.1),
|
||||
'core_count': psutil.cpu_count(),
|
||||
metrics["cpu"] = {
|
||||
"percent_used": psutil.cpu_percent(interval=0.1),
|
||||
"core_count": psutil.cpu_count(),
|
||||
}
|
||||
|
||||
|
||||
# Disk metrics
|
||||
disk = psutil.disk_usage('/')
|
||||
metrics['disk'] = {
|
||||
'total_gb': round(disk.total / 1024 / 1024 / 1024, 2),
|
||||
'free_gb': round(disk.free / 1024 / 1024 / 1024, 2),
|
||||
'percent_used': round((disk.used / disk.total) * 100, 2),
|
||||
disk = psutil.disk_usage("/")
|
||||
metrics["disk"] = {
|
||||
"total_gb": round(disk.total / 1024 / 1024 / 1024, 2),
|
||||
"free_gb": round(disk.free / 1024 / 1024 / 1024, 2),
|
||||
"percent_used": round((disk.used / disk.total) * 100, 2),
|
||||
}
|
||||
|
||||
|
||||
except ImportError:
|
||||
metrics['system_monitoring'] = 'psutil not available'
|
||||
metrics["system_monitoring"] = "psutil not available"
|
||||
except Exception as e:
|
||||
metrics['system_error'] = str(e)
|
||||
|
||||
metrics["system_error"] = str(e)
|
||||
|
||||
return metrics
|
||||
|
||||
|
||||
@@ -177,80 +185,89 @@ class PerformanceMetricsView(APIView):
|
||||
"""
|
||||
API view for performance metrics and database analysis
|
||||
"""
|
||||
|
||||
|
||||
permission_classes = [AllowAny] if settings.DEBUG else []
|
||||
|
||||
|
||||
def get(self, request):
|
||||
"""Return performance metrics and analysis"""
|
||||
if not settings.DEBUG:
|
||||
return Response({'error': 'Only available in debug mode'}, status=403)
|
||||
|
||||
return Response({"error": "Only available in debug mode"}, status=403)
|
||||
|
||||
metrics = {
|
||||
'timestamp': timezone.now().isoformat(),
|
||||
'database_analysis': self._get_database_analysis(),
|
||||
'cache_performance': self._get_cache_performance(),
|
||||
'recent_slow_queries': self._get_slow_queries(),
|
||||
"timestamp": timezone.now().isoformat(),
|
||||
"database_analysis": self._get_database_analysis(),
|
||||
"cache_performance": self._get_cache_performance(),
|
||||
"recent_slow_queries": self._get_slow_queries(),
|
||||
}
|
||||
|
||||
|
||||
return Response(metrics)
|
||||
|
||||
|
||||
def _get_database_analysis(self):
|
||||
"""Analyze database performance"""
|
||||
try:
|
||||
from django.db import connection
|
||||
|
||||
|
||||
analysis = {
|
||||
'total_queries': len(connection.queries),
|
||||
'query_analysis': IndexAnalyzer.analyze_slow_queries(0.05),
|
||||
"total_queries": len(connection.queries),
|
||||
"query_analysis": IndexAnalyzer.analyze_slow_queries(0.05),
|
||||
}
|
||||
|
||||
|
||||
if connection.queries:
|
||||
query_times = [float(q.get('time', 0)) for q in connection.queries]
|
||||
analysis.update({
|
||||
'total_query_time': sum(query_times),
|
||||
'average_query_time': sum(query_times) / len(query_times),
|
||||
'slowest_query_time': max(query_times),
|
||||
'fastest_query_time': min(query_times),
|
||||
})
|
||||
|
||||
query_times = [float(q.get("time", 0)) for q in connection.queries]
|
||||
analysis.update(
|
||||
{
|
||||
"total_query_time": sum(query_times),
|
||||
"average_query_time": sum(query_times) / len(query_times),
|
||||
"slowest_query_time": max(query_times),
|
||||
"fastest_query_time": min(query_times),
|
||||
}
|
||||
)
|
||||
|
||||
return analysis
|
||||
|
||||
|
||||
except Exception as e:
|
||||
return {'error': str(e)}
|
||||
|
||||
return {"error": str(e)}
|
||||
|
||||
def _get_cache_performance(self):
|
||||
"""Get cache performance metrics"""
|
||||
try:
|
||||
cache_monitor = CacheMonitor()
|
||||
return cache_monitor.get_cache_stats()
|
||||
except Exception as e:
|
||||
return {'error': str(e)}
|
||||
|
||||
return {"error": str(e)}
|
||||
|
||||
def _get_slow_queries(self):
|
||||
"""Get recent slow queries"""
|
||||
try:
|
||||
return IndexAnalyzer.analyze_slow_queries(0.1) # 100ms threshold
|
||||
except Exception as e:
|
||||
return {'error': str(e)}
|
||||
return {"error": str(e)}
|
||||
|
||||
|
||||
class SimpleHealthView(View):
|
||||
"""
|
||||
Simple health check endpoint for load balancers
|
||||
"""
|
||||
|
||||
|
||||
def get(self, request):
|
||||
"""Return simple OK status"""
|
||||
try:
|
||||
# Basic database connectivity test
|
||||
from django.db import connection
|
||||
|
||||
with connection.cursor() as cursor:
|
||||
cursor.execute("SELECT 1")
|
||||
cursor.fetchone()
|
||||
|
||||
return JsonResponse({'status': 'ok', 'timestamp': timezone.now().isoformat()})
|
||||
|
||||
return JsonResponse(
|
||||
{"status": "ok", "timestamp": timezone.now().isoformat()}
|
||||
)
|
||||
except Exception as e:
|
||||
return JsonResponse(
|
||||
{'status': 'error', 'error': str(e), 'timestamp': timezone.now().isoformat()},
|
||||
status=503
|
||||
{
|
||||
"status": "error",
|
||||
"error": str(e),
|
||||
"timestamp": timezone.now().isoformat(),
|
||||
},
|
||||
status=503,
|
||||
)
|
||||
|
||||
@@ -5,15 +5,13 @@ Enhanced with proper error handling, pagination, and performance optimizations.
|
||||
|
||||
import json
|
||||
import logging
|
||||
from typing import Dict, Any, Optional, Set
|
||||
from django.http import JsonResponse, HttpRequest, Http404
|
||||
from django.views.decorators.http import require_http_methods
|
||||
from typing import Dict, Any, Optional
|
||||
from django.http import JsonResponse, HttpRequest
|
||||
from django.views.decorators.cache import cache_page
|
||||
from django.views.decorators.gzip import gzip_page
|
||||
from django.utils.decorators import method_decorator
|
||||
from django.views import View
|
||||
from django.core.exceptions import ValidationError
|
||||
from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
|
||||
from django.conf import settings
|
||||
import time
|
||||
|
||||
@@ -25,250 +23,289 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
class MapAPIView(View):
|
||||
"""Base view for map API endpoints with common functionality."""
|
||||
|
||||
|
||||
# Pagination settings
|
||||
DEFAULT_PAGE_SIZE = 50
|
||||
MAX_PAGE_SIZE = 200
|
||||
|
||||
|
||||
def dispatch(self, request, *args, **kwargs):
|
||||
"""Add CORS headers, compression, and handle preflight requests."""
|
||||
start_time = time.time()
|
||||
|
||||
|
||||
try:
|
||||
response = super().dispatch(request, *args, **kwargs)
|
||||
|
||||
|
||||
# Add CORS headers for API access
|
||||
response['Access-Control-Allow-Origin'] = '*'
|
||||
response['Access-Control-Allow-Methods'] = 'GET, POST, OPTIONS'
|
||||
response['Access-Control-Allow-Headers'] = 'Content-Type, Authorization'
|
||||
|
||||
response["Access-Control-Allow-Origin"] = "*"
|
||||
response["Access-Control-Allow-Methods"] = "GET, POST, OPTIONS"
|
||||
response["Access-Control-Allow-Headers"] = "Content-Type, Authorization"
|
||||
|
||||
# Add performance headers
|
||||
response['X-Response-Time'] = f"{(time.time() - start_time) * 1000:.2f}ms"
|
||||
|
||||
# Add compression hint for large responses
|
||||
if hasattr(response, 'content') and len(response.content) > 1024:
|
||||
response['Content-Encoding'] = 'gzip'
|
||||
|
||||
return response
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"API error in {request.path}: {str(e)}", exc_info=True)
|
||||
return self._error_response(
|
||||
"An internal server error occurred",
|
||||
status=500
|
||||
response["X-Response-Time"] = (
|
||||
f"{(time.time() -
|
||||
start_time) *
|
||||
1000:.2f}ms"
|
||||
)
|
||||
|
||||
|
||||
# Add compression hint for large responses
|
||||
if hasattr(response, "content") and len(response.content) > 1024:
|
||||
response["Content-Encoding"] = "gzip"
|
||||
|
||||
return response
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"API error in {
|
||||
request.path}: {
|
||||
str(e)}",
|
||||
exc_info=True,
|
||||
)
|
||||
return self._error_response("An internal server error occurred", status=500)
|
||||
|
||||
def options(self, request, *args, **kwargs):
|
||||
"""Handle preflight CORS requests."""
|
||||
return JsonResponse({}, status=200)
|
||||
|
||||
|
||||
def _parse_bounds(self, request: HttpRequest) -> Optional[GeoBounds]:
|
||||
"""Parse geographic bounds from request parameters."""
|
||||
try:
|
||||
north = request.GET.get('north')
|
||||
south = request.GET.get('south')
|
||||
east = request.GET.get('east')
|
||||
west = request.GET.get('west')
|
||||
|
||||
north = request.GET.get("north")
|
||||
south = request.GET.get("south")
|
||||
east = request.GET.get("east")
|
||||
west = request.GET.get("west")
|
||||
|
||||
if all(param is not None for param in [north, south, east, west]):
|
||||
bounds = GeoBounds(
|
||||
north=float(north),
|
||||
south=float(south),
|
||||
east=float(east),
|
||||
west=float(west)
|
||||
west=float(west),
|
||||
)
|
||||
|
||||
|
||||
# Validate bounds
|
||||
if not (-90 <= bounds.south <= bounds.north <= 90):
|
||||
raise ValidationError("Invalid latitude bounds")
|
||||
if not (-180 <= bounds.west <= bounds.east <= 180):
|
||||
raise ValidationError("Invalid longitude bounds")
|
||||
|
||||
|
||||
return bounds
|
||||
return None
|
||||
except (ValueError, TypeError) as e:
|
||||
raise ValidationError(f"Invalid bounds parameters: {e}")
|
||||
|
||||
|
||||
def _parse_pagination(self, request: HttpRequest) -> Dict[str, int]:
|
||||
"""Parse pagination parameters from request."""
|
||||
try:
|
||||
page = max(1, int(request.GET.get('page', 1)))
|
||||
page = max(1, int(request.GET.get("page", 1)))
|
||||
page_size = min(
|
||||
self.MAX_PAGE_SIZE,
|
||||
max(1, int(request.GET.get('page_size', self.DEFAULT_PAGE_SIZE)))
|
||||
max(
|
||||
1,
|
||||
int(request.GET.get("page_size", self.DEFAULT_PAGE_SIZE)),
|
||||
),
|
||||
)
|
||||
offset = (page - 1) * page_size
|
||||
|
||||
|
||||
return {
|
||||
'page': page,
|
||||
'page_size': page_size,
|
||||
'offset': offset,
|
||||
'limit': page_size
|
||||
"page": page,
|
||||
"page_size": page_size,
|
||||
"offset": offset,
|
||||
"limit": page_size,
|
||||
}
|
||||
except (ValueError, TypeError):
|
||||
return {
|
||||
'page': 1,
|
||||
'page_size': self.DEFAULT_PAGE_SIZE,
|
||||
'offset': 0,
|
||||
'limit': self.DEFAULT_PAGE_SIZE
|
||||
"page": 1,
|
||||
"page_size": self.DEFAULT_PAGE_SIZE,
|
||||
"offset": 0,
|
||||
"limit": self.DEFAULT_PAGE_SIZE,
|
||||
}
|
||||
|
||||
|
||||
def _parse_filters(self, request: HttpRequest) -> Optional[MapFilters]:
|
||||
"""Parse filtering parameters from request."""
|
||||
try:
|
||||
filters = MapFilters()
|
||||
|
||||
|
||||
# Location types
|
||||
location_types_param = request.GET.get('types')
|
||||
location_types_param = request.GET.get("types")
|
||||
if location_types_param:
|
||||
type_strings = location_types_param.split(',')
|
||||
type_strings = location_types_param.split(",")
|
||||
valid_types = {lt.value for lt in LocationType}
|
||||
filters.location_types = {
|
||||
LocationType(t.strip()) for t in type_strings
|
||||
LocationType(t.strip())
|
||||
for t in type_strings
|
||||
if t.strip() in valid_types
|
||||
}
|
||||
|
||||
|
||||
# Park status
|
||||
park_status_param = request.GET.get('park_status')
|
||||
park_status_param = request.GET.get("park_status")
|
||||
if park_status_param:
|
||||
filters.park_status = set(park_status_param.split(','))
|
||||
|
||||
filters.park_status = set(park_status_param.split(","))
|
||||
|
||||
# Ride types
|
||||
ride_types_param = request.GET.get('ride_types')
|
||||
ride_types_param = request.GET.get("ride_types")
|
||||
if ride_types_param:
|
||||
filters.ride_types = set(ride_types_param.split(','))
|
||||
|
||||
filters.ride_types = set(ride_types_param.split(","))
|
||||
|
||||
# Company roles
|
||||
company_roles_param = request.GET.get('company_roles')
|
||||
company_roles_param = request.GET.get("company_roles")
|
||||
if company_roles_param:
|
||||
filters.company_roles = set(company_roles_param.split(','))
|
||||
|
||||
filters.company_roles = set(company_roles_param.split(","))
|
||||
|
||||
# Search query with length validation
|
||||
search_query = request.GET.get('q') or request.GET.get('search')
|
||||
search_query = request.GET.get("q") or request.GET.get("search")
|
||||
if search_query and len(search_query.strip()) >= 2:
|
||||
filters.search_query = search_query.strip()
|
||||
|
||||
|
||||
# Rating filter with validation
|
||||
min_rating_param = request.GET.get('min_rating')
|
||||
min_rating_param = request.GET.get("min_rating")
|
||||
if min_rating_param:
|
||||
min_rating = float(min_rating_param)
|
||||
if 0 <= min_rating <= 10:
|
||||
filters.min_rating = min_rating
|
||||
|
||||
|
||||
# Geographic filters with validation
|
||||
country = request.GET.get('country', '').strip()
|
||||
country = request.GET.get("country", "").strip()
|
||||
if country and len(country) >= 2:
|
||||
filters.country = country
|
||||
|
||||
state = request.GET.get('state', '').strip()
|
||||
|
||||
state = request.GET.get("state", "").strip()
|
||||
if state and len(state) >= 2:
|
||||
filters.state = state
|
||||
|
||||
city = request.GET.get('city', '').strip()
|
||||
|
||||
city = request.GET.get("city", "").strip()
|
||||
if city and len(city) >= 2:
|
||||
filters.city = city
|
||||
|
||||
|
||||
# Coordinates requirement
|
||||
has_coordinates_param = request.GET.get('has_coordinates')
|
||||
has_coordinates_param = request.GET.get("has_coordinates")
|
||||
if has_coordinates_param is not None:
|
||||
filters.has_coordinates = has_coordinates_param.lower() in ['true', '1', 'yes']
|
||||
|
||||
return filters if any([
|
||||
filters.location_types, filters.park_status, filters.ride_types,
|
||||
filters.company_roles, filters.search_query, filters.min_rating,
|
||||
filters.country, filters.state, filters.city
|
||||
]) else None
|
||||
|
||||
filters.has_coordinates = has_coordinates_param.lower() in [
|
||||
"true",
|
||||
"1",
|
||||
"yes",
|
||||
]
|
||||
|
||||
return (
|
||||
filters
|
||||
if any(
|
||||
[
|
||||
filters.location_types,
|
||||
filters.park_status,
|
||||
filters.ride_types,
|
||||
filters.company_roles,
|
||||
filters.search_query,
|
||||
filters.min_rating,
|
||||
filters.country,
|
||||
filters.state,
|
||||
filters.city,
|
||||
]
|
||||
)
|
||||
else None
|
||||
)
|
||||
|
||||
except (ValueError, TypeError) as e:
|
||||
raise ValidationError(f"Invalid filter parameters: {e}")
|
||||
|
||||
|
||||
def _parse_zoom_level(self, request: HttpRequest) -> int:
|
||||
"""Parse zoom level from request with default."""
|
||||
try:
|
||||
zoom_param = request.GET.get('zoom', '10')
|
||||
zoom_param = request.GET.get("zoom", "10")
|
||||
zoom_level = int(zoom_param)
|
||||
return max(1, min(20, zoom_level)) # Clamp between 1 and 20
|
||||
except (ValueError, TypeError):
|
||||
return 10 # Default zoom level
|
||||
|
||||
def _create_paginated_response(self, data: list, total_count: int,
|
||||
pagination: Dict[str, int], request: HttpRequest) -> Dict[str, Any]:
|
||||
|
||||
def _create_paginated_response(
|
||||
self,
|
||||
data: list,
|
||||
total_count: int,
|
||||
pagination: Dict[str, int],
|
||||
request: HttpRequest,
|
||||
) -> Dict[str, Any]:
|
||||
"""Create paginated response with metadata."""
|
||||
total_pages = (total_count + pagination['page_size'] - 1) // pagination['page_size']
|
||||
|
||||
total_pages = (total_count + pagination["page_size"] - 1) // pagination[
|
||||
"page_size"
|
||||
]
|
||||
|
||||
# Build pagination URLs
|
||||
base_url = request.build_absolute_uri(request.path)
|
||||
query_params = request.GET.copy()
|
||||
|
||||
|
||||
next_url = None
|
||||
if pagination['page'] < total_pages:
|
||||
query_params['page'] = pagination['page'] + 1
|
||||
if pagination["page"] < total_pages:
|
||||
query_params["page"] = pagination["page"] + 1
|
||||
next_url = f"{base_url}?{query_params.urlencode()}"
|
||||
|
||||
|
||||
prev_url = None
|
||||
if pagination['page'] > 1:
|
||||
query_params['page'] = pagination['page'] - 1
|
||||
if pagination["page"] > 1:
|
||||
query_params["page"] = pagination["page"] - 1
|
||||
prev_url = f"{base_url}?{query_params.urlencode()}"
|
||||
|
||||
|
||||
return {
|
||||
'status': 'success',
|
||||
'data': data,
|
||||
'pagination': {
|
||||
'page': pagination['page'],
|
||||
'page_size': pagination['page_size'],
|
||||
'total_pages': total_pages,
|
||||
'total_count': total_count,
|
||||
'has_next': pagination['page'] < total_pages,
|
||||
'has_previous': pagination['page'] > 1,
|
||||
'next_url': next_url,
|
||||
'previous_url': prev_url,
|
||||
}
|
||||
"status": "success",
|
||||
"data": data,
|
||||
"pagination": {
|
||||
"page": pagination["page"],
|
||||
"page_size": pagination["page_size"],
|
||||
"total_pages": total_pages,
|
||||
"total_count": total_count,
|
||||
"has_next": pagination["page"] < total_pages,
|
||||
"has_previous": pagination["page"] > 1,
|
||||
"next_url": next_url,
|
||||
"previous_url": prev_url,
|
||||
},
|
||||
}
|
||||
|
||||
def _error_response(self, message: str, status: int = 400,
|
||||
error_code: str = None, details: Dict[str, Any] = None) -> JsonResponse:
|
||||
|
||||
def _error_response(
|
||||
self,
|
||||
message: str,
|
||||
status: int = 400,
|
||||
error_code: str = None,
|
||||
details: Dict[str, Any] = None,
|
||||
) -> JsonResponse:
|
||||
"""Return standardized error response with enhanced information."""
|
||||
response_data = {
|
||||
'status': 'error',
|
||||
'message': message,
|
||||
'timestamp': time.time(),
|
||||
'data': None
|
||||
"status": "error",
|
||||
"message": message,
|
||||
"timestamp": time.time(),
|
||||
"data": None,
|
||||
}
|
||||
|
||||
|
||||
if error_code:
|
||||
response_data['error_code'] = error_code
|
||||
|
||||
response_data["error_code"] = error_code
|
||||
|
||||
if details:
|
||||
response_data['details'] = details
|
||||
|
||||
response_data["details"] = details
|
||||
|
||||
# Add request ID for debugging in production
|
||||
if hasattr(settings, 'DEBUG') and not settings.DEBUG:
|
||||
response_data['request_id'] = getattr(self.request, 'id', None)
|
||||
|
||||
if hasattr(settings, "DEBUG") and not settings.DEBUG:
|
||||
response_data["request_id"] = getattr(self.request, "id", None)
|
||||
|
||||
return JsonResponse(response_data, status=status)
|
||||
|
||||
def _success_response(self, data: Any, message: str = None,
|
||||
metadata: Dict[str, Any] = None) -> JsonResponse:
|
||||
|
||||
def _success_response(
|
||||
self, data: Any, message: str = None, metadata: Dict[str, Any] = None
|
||||
) -> JsonResponse:
|
||||
"""Return standardized success response."""
|
||||
response_data = {
|
||||
'status': 'success',
|
||||
'data': data,
|
||||
'timestamp': time.time(),
|
||||
"status": "success",
|
||||
"data": data,
|
||||
"timestamp": time.time(),
|
||||
}
|
||||
|
||||
|
||||
if message:
|
||||
response_data['message'] = message
|
||||
|
||||
response_data["message"] = message
|
||||
|
||||
if metadata:
|
||||
response_data['metadata'] = metadata
|
||||
|
||||
response_data["metadata"] = metadata
|
||||
|
||||
return JsonResponse(response_data)
|
||||
|
||||
|
||||
class MapLocationsView(MapAPIView):
|
||||
"""
|
||||
API endpoint for getting map locations with optional clustering.
|
||||
|
||||
|
||||
GET /api/map/locations/
|
||||
Parameters:
|
||||
- north, south, east, west: Bounding box coordinates
|
||||
@@ -281,7 +318,7 @@ class MapLocationsView(MapAPIView):
|
||||
- min_rating: Minimum rating filter
|
||||
- country, state, city: Geographic filters
|
||||
"""
|
||||
|
||||
|
||||
@method_decorator(cache_page(300)) # Cache for 5 minutes
|
||||
@method_decorator(gzip_page) # Compress large responses
|
||||
def get(self, request: HttpRequest) -> JsonResponse:
|
||||
@@ -292,57 +329,59 @@ class MapLocationsView(MapAPIView):
|
||||
filters = self._parse_filters(request)
|
||||
zoom_level = self._parse_zoom_level(request)
|
||||
pagination = self._parse_pagination(request)
|
||||
|
||||
|
||||
# Clustering preference
|
||||
cluster_param = request.GET.get('cluster', 'true')
|
||||
enable_clustering = cluster_param.lower() in ['true', '1', 'yes']
|
||||
|
||||
cluster_param = request.GET.get("cluster", "true")
|
||||
enable_clustering = cluster_param.lower() in ["true", "1", "yes"]
|
||||
|
||||
# Cache preference
|
||||
use_cache_param = request.GET.get('cache', 'true')
|
||||
use_cache = use_cache_param.lower() in ['true', '1', 'yes']
|
||||
|
||||
use_cache_param = request.GET.get("cache", "true")
|
||||
use_cache = use_cache_param.lower() in ["true", "1", "yes"]
|
||||
|
||||
# Validate request
|
||||
if not enable_clustering and not bounds and not filters:
|
||||
return self._error_response(
|
||||
"Either bounds, filters, or clustering must be specified for non-clustered requests",
|
||||
error_code="MISSING_PARAMETERS"
|
||||
error_code="MISSING_PARAMETERS",
|
||||
)
|
||||
|
||||
|
||||
# Get map data
|
||||
response = unified_map_service.get_map_data(
|
||||
bounds=bounds,
|
||||
filters=filters,
|
||||
zoom_level=zoom_level,
|
||||
cluster=enable_clustering,
|
||||
use_cache=use_cache
|
||||
use_cache=use_cache,
|
||||
)
|
||||
|
||||
|
||||
# Handle pagination for non-clustered results
|
||||
if not enable_clustering and response.locations:
|
||||
start_idx = pagination['offset']
|
||||
end_idx = start_idx + pagination['limit']
|
||||
start_idx = pagination["offset"]
|
||||
end_idx = start_idx + pagination["limit"]
|
||||
paginated_locations = response.locations[start_idx:end_idx]
|
||||
|
||||
return JsonResponse(self._create_paginated_response(
|
||||
[loc.to_dict() for loc in paginated_locations],
|
||||
len(response.locations),
|
||||
pagination,
|
||||
request
|
||||
))
|
||||
|
||||
|
||||
return JsonResponse(
|
||||
self._create_paginated_response(
|
||||
[loc.to_dict() for loc in paginated_locations],
|
||||
len(response.locations),
|
||||
pagination,
|
||||
request,
|
||||
)
|
||||
)
|
||||
|
||||
# For clustered results, return as-is with metadata
|
||||
response_dict = response.to_dict()
|
||||
|
||||
|
||||
return self._success_response(
|
||||
response_dict,
|
||||
metadata={
|
||||
'clustered': response.clustered,
|
||||
'cache_hit': response.cache_hit,
|
||||
'query_time_ms': response.query_time_ms,
|
||||
'filters_applied': response.filters_applied
|
||||
}
|
||||
"clustered": response.clustered,
|
||||
"cache_hit": response.cache_hit,
|
||||
"query_time_ms": response.query_time_ms,
|
||||
"filters_applied": response.filters_applied,
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
except ValidationError as e:
|
||||
logger.warning(f"Validation error in MapLocationsView: {str(e)}")
|
||||
return self._error_response(str(e), 400, error_code="VALIDATION_ERROR")
|
||||
@@ -351,72 +390,81 @@ class MapLocationsView(MapAPIView):
|
||||
return self._error_response(
|
||||
"Failed to retrieve map locations",
|
||||
500,
|
||||
error_code="INTERNAL_ERROR"
|
||||
error_code="INTERNAL_ERROR",
|
||||
)
|
||||
|
||||
|
||||
class MapLocationDetailView(MapAPIView):
|
||||
"""
|
||||
API endpoint for getting detailed information about a specific location.
|
||||
|
||||
|
||||
GET /api/map/locations/<type>/<id>/
|
||||
"""
|
||||
|
||||
|
||||
@method_decorator(cache_page(600)) # Cache for 10 minutes
|
||||
def get(self, request: HttpRequest, location_type: str, location_id: int) -> JsonResponse:
|
||||
def get(
|
||||
self, request: HttpRequest, location_type: str, location_id: int
|
||||
) -> JsonResponse:
|
||||
"""Get detailed information for a specific location."""
|
||||
try:
|
||||
# Validate location type
|
||||
valid_types = [lt.value for lt in LocationType]
|
||||
if location_type not in valid_types:
|
||||
return self._error_response(
|
||||
f"Invalid location type: {location_type}. Valid types: {', '.join(valid_types)}",
|
||||
f"Invalid location type: {location_type}. Valid types: {
|
||||
', '.join(valid_types)}",
|
||||
400,
|
||||
error_code="INVALID_LOCATION_TYPE"
|
||||
error_code="INVALID_LOCATION_TYPE",
|
||||
)
|
||||
|
||||
|
||||
# Validate location ID
|
||||
if location_id <= 0:
|
||||
return self._error_response(
|
||||
"Location ID must be a positive integer",
|
||||
400,
|
||||
error_code="INVALID_LOCATION_ID"
|
||||
error_code="INVALID_LOCATION_ID",
|
||||
)
|
||||
|
||||
|
||||
# Get location details
|
||||
location = unified_map_service.get_location_details(location_type, location_id)
|
||||
|
||||
location = unified_map_service.get_location_details(
|
||||
location_type, location_id
|
||||
)
|
||||
|
||||
if not location:
|
||||
return self._error_response(
|
||||
f"Location not found: {location_type}/{location_id}",
|
||||
404,
|
||||
error_code="LOCATION_NOT_FOUND"
|
||||
error_code="LOCATION_NOT_FOUND",
|
||||
)
|
||||
|
||||
|
||||
return self._success_response(
|
||||
location.to_dict(),
|
||||
metadata={
|
||||
'location_type': location_type,
|
||||
'location_id': location_id
|
||||
}
|
||||
"location_type": location_type,
|
||||
"location_id": location_id,
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
except ValueError as e:
|
||||
logger.warning(f"Value error in MapLocationDetailView: {str(e)}")
|
||||
return self._error_response(str(e), 400, error_code="INVALID_PARAMETER")
|
||||
except Exception as e:
|
||||
logger.error(f"Error in MapLocationDetailView: {str(e)}", exc_info=True)
|
||||
logger.error(
|
||||
f"Error in MapLocationDetailView: {
|
||||
str(e)}",
|
||||
exc_info=True,
|
||||
)
|
||||
return self._error_response(
|
||||
"Failed to retrieve location details",
|
||||
500,
|
||||
error_code="INTERNAL_ERROR"
|
||||
error_code="INTERNAL_ERROR",
|
||||
)
|
||||
|
||||
|
||||
class MapSearchView(MapAPIView):
|
||||
"""
|
||||
API endpoint for searching locations by text query.
|
||||
|
||||
|
||||
GET /api/map/search/
|
||||
Parameters:
|
||||
- q: Search query (required)
|
||||
@@ -424,71 +472,75 @@ class MapSearchView(MapAPIView):
|
||||
- types: Comma-separated location types
|
||||
- limit: Maximum results (default 50)
|
||||
"""
|
||||
|
||||
|
||||
@method_decorator(gzip_page) # Compress responses
|
||||
def get(self, request: HttpRequest) -> JsonResponse:
|
||||
"""Search locations by text query with pagination."""
|
||||
try:
|
||||
# Get and validate search query
|
||||
query = request.GET.get('q', '').strip()
|
||||
query = request.GET.get("q", "").strip()
|
||||
if not query:
|
||||
return self._error_response(
|
||||
"Search query 'q' parameter is required",
|
||||
400,
|
||||
error_code="MISSING_QUERY"
|
||||
error_code="MISSING_QUERY",
|
||||
)
|
||||
|
||||
|
||||
if len(query) < 2:
|
||||
return self._error_response(
|
||||
"Search query must be at least 2 characters long",
|
||||
400,
|
||||
error_code="QUERY_TOO_SHORT"
|
||||
error_code="QUERY_TOO_SHORT",
|
||||
)
|
||||
|
||||
|
||||
# Parse parameters
|
||||
bounds = self._parse_bounds(request)
|
||||
pagination = self._parse_pagination(request)
|
||||
|
||||
|
||||
# Parse location types
|
||||
location_types = None
|
||||
types_param = request.GET.get('types')
|
||||
types_param = request.GET.get("types")
|
||||
if types_param:
|
||||
try:
|
||||
valid_types = {lt.value for lt in LocationType}
|
||||
location_types = {
|
||||
LocationType(t.strip()) for t in types_param.split(',')
|
||||
LocationType(t.strip())
|
||||
for t in types_param.split(",")
|
||||
if t.strip() in valid_types
|
||||
}
|
||||
except ValueError:
|
||||
return self._error_response(
|
||||
"Invalid location types",
|
||||
400,
|
||||
error_code="INVALID_TYPES"
|
||||
error_code="INVALID_TYPES",
|
||||
)
|
||||
|
||||
# Set reasonable search limit (higher for search than general listings)
|
||||
search_limit = min(500, pagination['page'] * pagination['page_size'])
|
||||
|
||||
|
||||
# Set reasonable search limit (higher for search than general
|
||||
# listings)
|
||||
search_limit = min(500, pagination["page"] * pagination["page_size"])
|
||||
|
||||
# Perform search
|
||||
locations = unified_map_service.search_locations(
|
||||
query=query,
|
||||
bounds=bounds,
|
||||
location_types=location_types,
|
||||
limit=search_limit
|
||||
limit=search_limit,
|
||||
)
|
||||
|
||||
|
||||
# Apply pagination
|
||||
start_idx = pagination['offset']
|
||||
end_idx = start_idx + pagination['limit']
|
||||
start_idx = pagination["offset"]
|
||||
end_idx = start_idx + pagination["limit"]
|
||||
paginated_locations = locations[start_idx:end_idx]
|
||||
|
||||
return JsonResponse(self._create_paginated_response(
|
||||
[loc.to_dict() for loc in paginated_locations],
|
||||
len(locations),
|
||||
pagination,
|
||||
request
|
||||
))
|
||||
|
||||
|
||||
return JsonResponse(
|
||||
self._create_paginated_response(
|
||||
[loc.to_dict() for loc in paginated_locations],
|
||||
len(locations),
|
||||
pagination,
|
||||
request,
|
||||
)
|
||||
)
|
||||
|
||||
except ValidationError as e:
|
||||
logger.warning(f"Validation error in MapSearchView: {str(e)}")
|
||||
return self._error_response(str(e), 400, error_code="VALIDATION_ERROR")
|
||||
@@ -500,21 +552,21 @@ class MapSearchView(MapAPIView):
|
||||
return self._error_response(
|
||||
"Search failed due to internal error",
|
||||
500,
|
||||
error_code="SEARCH_FAILED"
|
||||
error_code="SEARCH_FAILED",
|
||||
)
|
||||
|
||||
|
||||
class MapBoundsView(MapAPIView):
|
||||
"""
|
||||
API endpoint for getting locations within specific bounds.
|
||||
|
||||
|
||||
GET /api/map/bounds/
|
||||
Parameters:
|
||||
- north, south, east, west: Bounding box coordinates (required)
|
||||
- types: Comma-separated location types
|
||||
- zoom: Zoom level
|
||||
"""
|
||||
|
||||
|
||||
@method_decorator(cache_page(300)) # Cache for 5 minutes
|
||||
def get(self, request: HttpRequest) -> JsonResponse:
|
||||
"""Get locations within specific geographic bounds."""
|
||||
@@ -525,18 +577,19 @@ class MapBoundsView(MapAPIView):
|
||||
return self._error_response(
|
||||
"Bounds parameters required: north, south, east, west", 400
|
||||
)
|
||||
|
||||
|
||||
# Parse optional filters
|
||||
location_types = None
|
||||
types_param = request.GET.get('types')
|
||||
types_param = request.GET.get("types")
|
||||
if types_param:
|
||||
location_types = {
|
||||
LocationType(t.strip()) for t in types_param.split(',')
|
||||
LocationType(t.strip())
|
||||
for t in types_param.split(",")
|
||||
if t.strip() in [lt.value for lt in LocationType]
|
||||
}
|
||||
|
||||
|
||||
zoom_level = self._parse_zoom_level(request)
|
||||
|
||||
|
||||
# Get locations within bounds
|
||||
response = unified_map_service.get_locations_by_bounds(
|
||||
north=bounds.north,
|
||||
@@ -544,86 +597,103 @@ class MapBoundsView(MapAPIView):
|
||||
east=bounds.east,
|
||||
west=bounds.west,
|
||||
location_types=location_types,
|
||||
zoom_level=zoom_level
|
||||
zoom_level=zoom_level,
|
||||
)
|
||||
|
||||
|
||||
return JsonResponse(response.to_dict())
|
||||
|
||||
|
||||
except ValidationError as e:
|
||||
return self._error_response(str(e), 400)
|
||||
except Exception as e:
|
||||
return self._error_response(f"Internal server error: {str(e)}", 500)
|
||||
return self._error_response(
|
||||
f"Internal server error: {
|
||||
str(e)}",
|
||||
500,
|
||||
)
|
||||
|
||||
|
||||
class MapStatsView(MapAPIView):
|
||||
"""
|
||||
API endpoint for getting map service statistics and health information.
|
||||
|
||||
|
||||
GET /api/map/stats/
|
||||
"""
|
||||
|
||||
|
||||
def get(self, request: HttpRequest) -> JsonResponse:
|
||||
"""Get map service statistics and performance metrics."""
|
||||
try:
|
||||
stats = unified_map_service.get_service_stats()
|
||||
|
||||
return JsonResponse({
|
||||
'status': 'success',
|
||||
'data': stats
|
||||
})
|
||||
|
||||
|
||||
return JsonResponse({"status": "success", "data": stats})
|
||||
|
||||
except Exception as e:
|
||||
return self._error_response(f"Internal server error: {str(e)}", 500)
|
||||
return self._error_response(
|
||||
f"Internal server error: {
|
||||
str(e)}",
|
||||
500,
|
||||
)
|
||||
|
||||
|
||||
class MapCacheView(MapAPIView):
|
||||
"""
|
||||
API endpoint for cache management (admin only).
|
||||
|
||||
|
||||
DELETE /api/map/cache/
|
||||
POST /api/map/cache/invalidate/
|
||||
"""
|
||||
|
||||
|
||||
def delete(self, request: HttpRequest) -> JsonResponse:
|
||||
"""Clear all map cache (admin only)."""
|
||||
# TODO: Add admin permission check
|
||||
try:
|
||||
unified_map_service.invalidate_cache()
|
||||
|
||||
return JsonResponse({
|
||||
'status': 'success',
|
||||
'message': 'Map cache cleared successfully'
|
||||
})
|
||||
|
||||
|
||||
return JsonResponse(
|
||||
{
|
||||
"status": "success",
|
||||
"message": "Map cache cleared successfully",
|
||||
}
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
return self._error_response(f"Internal server error: {str(e)}", 500)
|
||||
|
||||
return self._error_response(
|
||||
f"Internal server error: {
|
||||
str(e)}",
|
||||
500,
|
||||
)
|
||||
|
||||
def post(self, request: HttpRequest) -> JsonResponse:
|
||||
"""Invalidate specific cache entries."""
|
||||
# TODO: Add admin permission check
|
||||
try:
|
||||
data = json.loads(request.body)
|
||||
|
||||
location_type = data.get('location_type')
|
||||
location_id = data.get('location_id')
|
||||
bounds_data = data.get('bounds')
|
||||
|
||||
|
||||
location_type = data.get("location_type")
|
||||
location_id = data.get("location_id")
|
||||
bounds_data = data.get("bounds")
|
||||
|
||||
bounds = None
|
||||
if bounds_data:
|
||||
bounds = GeoBounds(**bounds_data)
|
||||
|
||||
|
||||
unified_map_service.invalidate_cache(
|
||||
location_type=location_type,
|
||||
location_id=location_id,
|
||||
bounds=bounds
|
||||
bounds=bounds,
|
||||
)
|
||||
|
||||
return JsonResponse({
|
||||
'status': 'success',
|
||||
'message': 'Cache invalidated successfully'
|
||||
})
|
||||
|
||||
|
||||
return JsonResponse(
|
||||
{
|
||||
"status": "success",
|
||||
"message": "Cache invalidated successfully",
|
||||
}
|
||||
)
|
||||
|
||||
except (json.JSONDecodeError, TypeError, ValueError) as e:
|
||||
return self._error_response(f"Invalid request data: {str(e)}", 400)
|
||||
except Exception as e:
|
||||
return self._error_response(f"Internal server error: {str(e)}", 500)
|
||||
return self._error_response(
|
||||
f"Internal server error: {
|
||||
str(e)}",
|
||||
500,
|
||||
)
|
||||
|
||||
@@ -5,15 +5,10 @@ Provides web interfaces for map functionality with HTMX integration.
|
||||
|
||||
import json
|
||||
from typing import Dict, Any, Optional, Set
|
||||
from django.shortcuts import render, get_object_or_404
|
||||
from django.shortcuts import render
|
||||
from django.http import JsonResponse, HttpRequest, HttpResponse
|
||||
from django.views.generic import TemplateView, View
|
||||
from django.views.decorators.http import require_http_methods
|
||||
from django.utils.decorators import method_decorator
|
||||
from django.contrib.auth.mixins import LoginRequiredMixin
|
||||
from django.core.paginator import Paginator
|
||||
from django.core.exceptions import ValidationError
|
||||
from django.db.models import Q
|
||||
|
||||
from ..services.map_service import unified_map_service
|
||||
from ..services.data_structures import GeoBounds, MapFilters, LocationType
|
||||
@@ -21,29 +16,30 @@ from ..services.data_structures import GeoBounds, MapFilters, LocationType
|
||||
|
||||
class MapViewMixin:
|
||||
"""Mixin providing common functionality for map views."""
|
||||
|
||||
|
||||
def get_map_context(self, request: HttpRequest) -> Dict[str, Any]:
|
||||
"""Get common context data for map views."""
|
||||
return {
|
||||
'map_api_urls': {
|
||||
'locations': '/api/map/locations/',
|
||||
'search': '/api/map/search/',
|
||||
'bounds': '/api/map/bounds/',
|
||||
'location_detail': '/api/map/locations/',
|
||||
"map_api_urls": {
|
||||
"locations": "/api/map/locations/",
|
||||
"search": "/api/map/search/",
|
||||
"bounds": "/api/map/bounds/",
|
||||
"location_detail": "/api/map/locations/",
|
||||
},
|
||||
'location_types': [lt.value for lt in LocationType],
|
||||
'default_zoom': 10,
|
||||
'enable_clustering': True,
|
||||
'enable_search': True,
|
||||
"location_types": [lt.value for lt in LocationType],
|
||||
"default_zoom": 10,
|
||||
"enable_clustering": True,
|
||||
"enable_search": True,
|
||||
}
|
||||
|
||||
|
||||
def parse_location_types(self, request: HttpRequest) -> Optional[Set[LocationType]]:
|
||||
"""Parse location types from request parameters."""
|
||||
types_param = request.GET.get('types')
|
||||
types_param = request.GET.get("types")
|
||||
if types_param:
|
||||
try:
|
||||
return {
|
||||
LocationType(t.strip()) for t in types_param.split(',')
|
||||
LocationType(t.strip())
|
||||
for t in types_param.split(",")
|
||||
if t.strip() in [lt.value for lt in LocationType]
|
||||
}
|
||||
except ValueError:
|
||||
@@ -54,122 +50,141 @@ class MapViewMixin:
|
||||
class UniversalMapView(MapViewMixin, TemplateView):
|
||||
"""
|
||||
Main universal map view showing all location types.
|
||||
|
||||
|
||||
URL: /maps/
|
||||
"""
|
||||
template_name = 'maps/universal_map.html'
|
||||
|
||||
|
||||
template_name = "maps/universal_map.html"
|
||||
|
||||
def get_context_data(self, **kwargs):
|
||||
context = super().get_context_data(**kwargs)
|
||||
context.update(self.get_map_context(self.request))
|
||||
|
||||
|
||||
# Additional context for universal map
|
||||
context.update({
|
||||
'page_title': 'Interactive Map - All Locations',
|
||||
'map_type': 'universal',
|
||||
'show_all_types': True,
|
||||
'initial_location_types': [lt.value for lt in LocationType],
|
||||
'filters_enabled': True,
|
||||
})
|
||||
|
||||
context.update(
|
||||
{
|
||||
"page_title": "Interactive Map - All Locations",
|
||||
"map_type": "universal",
|
||||
"show_all_types": True,
|
||||
"initial_location_types": [lt.value for lt in LocationType],
|
||||
"filters_enabled": True,
|
||||
}
|
||||
)
|
||||
|
||||
# Handle initial bounds from query parameters
|
||||
if all(param in self.request.GET for param in ['north', 'south', 'east', 'west']):
|
||||
if all(
|
||||
param in self.request.GET for param in ["north", "south", "east", "west"]
|
||||
):
|
||||
try:
|
||||
context['initial_bounds'] = {
|
||||
'north': float(self.request.GET['north']),
|
||||
'south': float(self.request.GET['south']),
|
||||
'east': float(self.request.GET['east']),
|
||||
'west': float(self.request.GET['west']),
|
||||
context["initial_bounds"] = {
|
||||
"north": float(self.request.GET["north"]),
|
||||
"south": float(self.request.GET["south"]),
|
||||
"east": float(self.request.GET["east"]),
|
||||
"west": float(self.request.GET["west"]),
|
||||
}
|
||||
except (ValueError, TypeError):
|
||||
pass
|
||||
|
||||
|
||||
return context
|
||||
|
||||
|
||||
class ParkMapView(MapViewMixin, TemplateView):
|
||||
"""
|
||||
Map view focused specifically on parks.
|
||||
|
||||
|
||||
URL: /maps/parks/
|
||||
"""
|
||||
template_name = 'maps/park_map.html'
|
||||
|
||||
|
||||
template_name = "maps/park_map.html"
|
||||
|
||||
def get_context_data(self, **kwargs):
|
||||
context = super().get_context_data(**kwargs)
|
||||
context.update(self.get_map_context(self.request))
|
||||
|
||||
|
||||
# Park-specific context
|
||||
context.update({
|
||||
'page_title': 'Theme Parks Map',
|
||||
'map_type': 'parks',
|
||||
'show_all_types': False,
|
||||
'initial_location_types': [LocationType.PARK.value],
|
||||
'filters_enabled': True,
|
||||
'park_specific_filters': True,
|
||||
})
|
||||
|
||||
context.update(
|
||||
{
|
||||
"page_title": "Theme Parks Map",
|
||||
"map_type": "parks",
|
||||
"show_all_types": False,
|
||||
"initial_location_types": [LocationType.PARK.value],
|
||||
"filters_enabled": True,
|
||||
"park_specific_filters": True,
|
||||
}
|
||||
)
|
||||
|
||||
return context
|
||||
|
||||
|
||||
class NearbyLocationsView(MapViewMixin, TemplateView):
|
||||
"""
|
||||
View for showing locations near a specific point.
|
||||
|
||||
|
||||
URL: /maps/nearby/
|
||||
"""
|
||||
template_name = 'maps/nearby_locations.html'
|
||||
|
||||
|
||||
template_name = "maps/nearby_locations.html"
|
||||
|
||||
def get_context_data(self, **kwargs):
|
||||
context = super().get_context_data(**kwargs)
|
||||
context.update(self.get_map_context(self.request))
|
||||
|
||||
|
||||
# Parse coordinates from query parameters
|
||||
lat = self.request.GET.get('lat')
|
||||
lng = self.request.GET.get('lng')
|
||||
radius = self.request.GET.get('radius', '50') # Default 50km radius
|
||||
|
||||
lat = self.request.GET.get("lat")
|
||||
lng = self.request.GET.get("lng")
|
||||
radius = self.request.GET.get("radius", "50") # Default 50km radius
|
||||
|
||||
if lat and lng:
|
||||
try:
|
||||
center_lat = float(lat)
|
||||
center_lng = float(lng)
|
||||
search_radius = min(200, max(1, float(radius))) # Clamp between 1-200km
|
||||
|
||||
context.update({
|
||||
'page_title': f'Locations Near {center_lat:.4f}, {center_lng:.4f}',
|
||||
'map_type': 'nearby',
|
||||
'center_coordinates': {'lat': center_lat, 'lng': center_lng},
|
||||
'search_radius': search_radius,
|
||||
'show_radius_circle': True,
|
||||
})
|
||||
# Clamp between 1-200km
|
||||
search_radius = min(200, max(1, float(radius)))
|
||||
|
||||
context.update(
|
||||
{
|
||||
"page_title": f"Locations Near {
|
||||
center_lat:.4f}, {
|
||||
center_lng:.4f}",
|
||||
"map_type": "nearby",
|
||||
"center_coordinates": {
|
||||
"lat": center_lat,
|
||||
"lng": center_lng,
|
||||
},
|
||||
"search_radius": search_radius,
|
||||
"show_radius_circle": True,
|
||||
}
|
||||
)
|
||||
except (ValueError, TypeError):
|
||||
context['error'] = 'Invalid coordinates provided'
|
||||
context["error"] = "Invalid coordinates provided"
|
||||
else:
|
||||
context.update({
|
||||
'page_title': 'Nearby Locations',
|
||||
'map_type': 'nearby',
|
||||
'prompt_for_location': True,
|
||||
})
|
||||
|
||||
context.update(
|
||||
{
|
||||
"page_title": "Nearby Locations",
|
||||
"map_type": "nearby",
|
||||
"prompt_for_location": True,
|
||||
}
|
||||
)
|
||||
|
||||
return context
|
||||
|
||||
|
||||
class LocationFilterView(MapViewMixin, View):
|
||||
"""
|
||||
HTMX endpoint for updating map when filters change.
|
||||
|
||||
|
||||
URL: /maps/htmx/filter/
|
||||
"""
|
||||
|
||||
|
||||
def get(self, request: HttpRequest) -> HttpResponse:
|
||||
"""Return filtered location data for HTMX updates."""
|
||||
try:
|
||||
# Parse filter parameters
|
||||
location_types = self.parse_location_types(request)
|
||||
search_query = request.GET.get('q', '').strip()
|
||||
country = request.GET.get('country', '').strip()
|
||||
state = request.GET.get('state', '').strip()
|
||||
|
||||
search_query = request.GET.get("q", "").strip()
|
||||
country = request.GET.get("country", "").strip()
|
||||
state = request.GET.get("state", "").strip()
|
||||
|
||||
# Create filters
|
||||
filters = None
|
||||
if any([location_types, search_query, country, state]):
|
||||
@@ -178,108 +193,107 @@ class LocationFilterView(MapViewMixin, View):
|
||||
search_query=search_query or None,
|
||||
country=country or None,
|
||||
state=state or None,
|
||||
has_coordinates=True
|
||||
has_coordinates=True,
|
||||
)
|
||||
|
||||
|
||||
# Get filtered locations
|
||||
map_response = unified_map_service.get_map_data(
|
||||
filters=filters,
|
||||
zoom_level=int(request.GET.get('zoom', '10')),
|
||||
cluster=request.GET.get('cluster', 'true').lower() == 'true'
|
||||
zoom_level=int(request.GET.get("zoom", "10")),
|
||||
cluster=request.GET.get("cluster", "true").lower() == "true",
|
||||
)
|
||||
|
||||
|
||||
# Return JSON response for HTMX
|
||||
return JsonResponse({
|
||||
'status': 'success',
|
||||
'data': map_response.to_dict(),
|
||||
'filters_applied': map_response.filters_applied
|
||||
})
|
||||
|
||||
return JsonResponse(
|
||||
{
|
||||
"status": "success",
|
||||
"data": map_response.to_dict(),
|
||||
"filters_applied": map_response.filters_applied,
|
||||
}
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
return JsonResponse({
|
||||
'status': 'error',
|
||||
'message': str(e)
|
||||
}, status=400)
|
||||
return JsonResponse({"status": "error", "message": str(e)}, status=400)
|
||||
|
||||
|
||||
class LocationSearchView(MapViewMixin, View):
|
||||
"""
|
||||
HTMX endpoint for real-time location search.
|
||||
|
||||
|
||||
URL: /maps/htmx/search/
|
||||
"""
|
||||
|
||||
|
||||
def get(self, request: HttpRequest) -> HttpResponse:
|
||||
"""Return search results for HTMX updates."""
|
||||
query = request.GET.get('q', '').strip()
|
||||
|
||||
query = request.GET.get("q", "").strip()
|
||||
|
||||
if not query or len(query) < 3:
|
||||
return render(request, 'maps/partials/search_results.html', {
|
||||
'results': [],
|
||||
'query': query,
|
||||
'message': 'Enter at least 3 characters to search'
|
||||
})
|
||||
|
||||
return render(
|
||||
request,
|
||||
"maps/partials/search_results.html",
|
||||
{
|
||||
"results": [],
|
||||
"query": query,
|
||||
"message": "Enter at least 3 characters to search",
|
||||
},
|
||||
)
|
||||
|
||||
try:
|
||||
# Parse optional location types
|
||||
location_types = self.parse_location_types(request)
|
||||
limit = min(20, max(5, int(request.GET.get('limit', '10'))))
|
||||
|
||||
limit = min(20, max(5, int(request.GET.get("limit", "10"))))
|
||||
|
||||
# Perform search
|
||||
results = unified_map_service.search_locations(
|
||||
query=query,
|
||||
location_types=location_types,
|
||||
limit=limit
|
||||
query=query, location_types=location_types, limit=limit
|
||||
)
|
||||
|
||||
return render(request, 'maps/partials/search_results.html', {
|
||||
'results': results,
|
||||
'query': query,
|
||||
'count': len(results)
|
||||
})
|
||||
|
||||
|
||||
return render(
|
||||
request,
|
||||
"maps/partials/search_results.html",
|
||||
{"results": results, "query": query, "count": len(results)},
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
return render(request, 'maps/partials/search_results.html', {
|
||||
'results': [],
|
||||
'query': query,
|
||||
'error': str(e)
|
||||
})
|
||||
return render(
|
||||
request,
|
||||
"maps/partials/search_results.html",
|
||||
{"results": [], "query": query, "error": str(e)},
|
||||
)
|
||||
|
||||
|
||||
class MapBoundsUpdateView(MapViewMixin, View):
|
||||
"""
|
||||
HTMX endpoint for updating locations when map bounds change.
|
||||
|
||||
|
||||
URL: /maps/htmx/bounds/
|
||||
"""
|
||||
|
||||
|
||||
def post(self, request: HttpRequest) -> HttpResponse:
|
||||
"""Update map data when bounds change."""
|
||||
try:
|
||||
data = json.loads(request.body)
|
||||
|
||||
|
||||
# Parse bounds
|
||||
bounds = GeoBounds(
|
||||
north=float(data['north']),
|
||||
south=float(data['south']),
|
||||
east=float(data['east']),
|
||||
west=float(data['west'])
|
||||
north=float(data["north"]),
|
||||
south=float(data["south"]),
|
||||
east=float(data["east"]),
|
||||
west=float(data["west"]),
|
||||
)
|
||||
|
||||
|
||||
# Parse additional parameters
|
||||
zoom_level = int(data.get('zoom', 10))
|
||||
zoom_level = int(data.get("zoom", 10))
|
||||
location_types = None
|
||||
if 'types' in data:
|
||||
if "types" in data:
|
||||
location_types = {
|
||||
LocationType(t) for t in data['types']
|
||||
LocationType(t)
|
||||
for t in data["types"]
|
||||
if t in [lt.value for lt in LocationType]
|
||||
}
|
||||
|
||||
# Create filters if needed
|
||||
filters = None
|
||||
if location_types:
|
||||
filters = MapFilters(location_types=location_types)
|
||||
|
||||
|
||||
# Location types are used directly in the service call
|
||||
|
||||
# Get updated map data
|
||||
map_response = unified_map_service.get_locations_by_bounds(
|
||||
north=bounds.north,
|
||||
@@ -287,79 +301,86 @@ class MapBoundsUpdateView(MapViewMixin, View):
|
||||
east=bounds.east,
|
||||
west=bounds.west,
|
||||
location_types=location_types,
|
||||
zoom_level=zoom_level
|
||||
zoom_level=zoom_level,
|
||||
)
|
||||
|
||||
return JsonResponse({
|
||||
'status': 'success',
|
||||
'data': map_response.to_dict()
|
||||
})
|
||||
|
||||
|
||||
return JsonResponse({"status": "success", "data": map_response.to_dict()})
|
||||
|
||||
except (json.JSONDecodeError, ValueError, KeyError) as e:
|
||||
return JsonResponse({
|
||||
'status': 'error',
|
||||
'message': f'Invalid request data: {str(e)}'
|
||||
}, status=400)
|
||||
return JsonResponse(
|
||||
{
|
||||
"status": "error",
|
||||
"message": f"Invalid request data: {str(e)}",
|
||||
},
|
||||
status=400,
|
||||
)
|
||||
except Exception as e:
|
||||
return JsonResponse({
|
||||
'status': 'error',
|
||||
'message': str(e)
|
||||
}, status=500)
|
||||
return JsonResponse({"status": "error", "message": str(e)}, status=500)
|
||||
|
||||
|
||||
class LocationDetailModalView(MapViewMixin, View):
|
||||
"""
|
||||
HTMX endpoint for showing location details in modal.
|
||||
|
||||
|
||||
URL: /maps/htmx/location/<type>/<id>/
|
||||
"""
|
||||
|
||||
def get(self, request: HttpRequest, location_type: str, location_id: int) -> HttpResponse:
|
||||
|
||||
def get(
|
||||
self, request: HttpRequest, location_type: str, location_id: int
|
||||
) -> HttpResponse:
|
||||
"""Return location detail modal content."""
|
||||
try:
|
||||
# Validate location type
|
||||
if location_type not in [lt.value for lt in LocationType]:
|
||||
return render(request, 'maps/partials/location_modal.html', {
|
||||
'error': f'Invalid location type: {location_type}'
|
||||
})
|
||||
|
||||
return render(
|
||||
request,
|
||||
"maps/partials/location_modal.html",
|
||||
{"error": f"Invalid location type: {location_type}"},
|
||||
)
|
||||
|
||||
# Get location details
|
||||
location = unified_map_service.get_location_details(location_type, location_id)
|
||||
|
||||
location = unified_map_service.get_location_details(
|
||||
location_type, location_id
|
||||
)
|
||||
|
||||
if not location:
|
||||
return render(request, 'maps/partials/location_modal.html', {
|
||||
'error': 'Location not found'
|
||||
})
|
||||
|
||||
return render(request, 'maps/partials/location_modal.html', {
|
||||
'location': location,
|
||||
'location_type': location_type
|
||||
})
|
||||
|
||||
return render(
|
||||
request,
|
||||
"maps/partials/location_modal.html",
|
||||
{"error": "Location not found"},
|
||||
)
|
||||
|
||||
return render(
|
||||
request,
|
||||
"maps/partials/location_modal.html",
|
||||
{"location": location, "location_type": location_type},
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
return render(request, 'maps/partials/location_modal.html', {
|
||||
'error': str(e)
|
||||
})
|
||||
return render(
|
||||
request, "maps/partials/location_modal.html", {"error": str(e)}
|
||||
)
|
||||
|
||||
|
||||
class LocationListView(MapViewMixin, TemplateView):
|
||||
"""
|
||||
View for listing locations with pagination (non-map view).
|
||||
|
||||
|
||||
URL: /maps/list/
|
||||
"""
|
||||
template_name = 'maps/location_list.html'
|
||||
|
||||
template_name = "maps/location_list.html"
|
||||
paginate_by = 20
|
||||
|
||||
|
||||
def get_context_data(self, **kwargs):
|
||||
context = super().get_context_data(**kwargs)
|
||||
|
||||
|
||||
# Parse filters
|
||||
location_types = self.parse_location_types(self.request)
|
||||
search_query = self.request.GET.get('q', '').strip()
|
||||
country = self.request.GET.get('country', '').strip()
|
||||
state = self.request.GET.get('state', '').strip()
|
||||
|
||||
search_query = self.request.GET.get("q", "").strip()
|
||||
country = self.request.GET.get("country", "").strip()
|
||||
state = self.request.GET.get("state", "").strip()
|
||||
|
||||
# Create filters
|
||||
filters = None
|
||||
if any([location_types, search_query, country, state]):
|
||||
@@ -368,33 +389,33 @@ class LocationListView(MapViewMixin, TemplateView):
|
||||
search_query=search_query or None,
|
||||
country=country or None,
|
||||
state=state or None,
|
||||
has_coordinates=True
|
||||
has_coordinates=True,
|
||||
)
|
||||
|
||||
|
||||
# Get locations without clustering
|
||||
map_response = unified_map_service.get_map_data(
|
||||
filters=filters,
|
||||
cluster=False,
|
||||
use_cache=True
|
||||
filters=filters, cluster=False, use_cache=True
|
||||
)
|
||||
|
||||
|
||||
# Paginate results
|
||||
paginator = Paginator(map_response.locations, self.paginate_by)
|
||||
page_number = self.request.GET.get('page')
|
||||
page_number = self.request.GET.get("page")
|
||||
page_obj = paginator.get_page(page_number)
|
||||
|
||||
context.update({
|
||||
'page_title': 'All Locations',
|
||||
'locations': page_obj,
|
||||
'total_count': map_response.total_count,
|
||||
'applied_filters': filters,
|
||||
'location_types': [lt.value for lt in LocationType],
|
||||
'current_filters': {
|
||||
'types': self.request.GET.getlist('types'),
|
||||
'q': search_query,
|
||||
'country': country,
|
||||
'state': state,
|
||||
|
||||
context.update(
|
||||
{
|
||||
"page_title": "All Locations",
|
||||
"locations": page_obj,
|
||||
"total_count": map_response.total_count,
|
||||
"applied_filters": filters,
|
||||
"location_types": [lt.value for lt in LocationType],
|
||||
"current_filters": {
|
||||
"types": self.request.GET.getlist("types"),
|
||||
"q": search_query,
|
||||
"country": country,
|
||||
"state": state,
|
||||
},
|
||||
}
|
||||
})
|
||||
|
||||
return context
|
||||
)
|
||||
|
||||
return context
|
||||
|
||||
@@ -1,23 +1,27 @@
|
||||
from django.views.generic import TemplateView
|
||||
from django.http import JsonResponse
|
||||
from django.contrib.gis.geos import Point
|
||||
from django.contrib.gis.measure import Distance
|
||||
from parks.models import Park
|
||||
from parks.filters import ParkFilter
|
||||
from core.services.location_search import location_search_service, LocationSearchFilters
|
||||
from core.services.location_search import (
|
||||
location_search_service,
|
||||
LocationSearchFilters,
|
||||
)
|
||||
from core.forms.search import LocationSearchForm
|
||||
|
||||
|
||||
class AdaptiveSearchView(TemplateView):
|
||||
template_name = "core/search/results.html"
|
||||
|
||||
|
||||
def get_queryset(self):
|
||||
"""
|
||||
Get the base queryset, optimized with select_related and prefetch_related
|
||||
"""
|
||||
return Park.objects.select_related('operator', 'property_owner').prefetch_related(
|
||||
'location',
|
||||
'photos'
|
||||
).all()
|
||||
return (
|
||||
Park.objects.select_related("operator", "property_owner")
|
||||
.prefetch_related("location", "photos")
|
||||
.all()
|
||||
)
|
||||
|
||||
def get_filterset(self):
|
||||
"""
|
||||
@@ -31,32 +35,38 @@ class AdaptiveSearchView(TemplateView):
|
||||
"""
|
||||
context = super().get_context_data(**kwargs)
|
||||
filterset = self.get_filterset()
|
||||
|
||||
|
||||
# Check if location-based search is being used
|
||||
location_search = self.request.GET.get('location_search', '').strip()
|
||||
near_location = self.request.GET.get('near_location', '').strip()
|
||||
|
||||
location_search = self.request.GET.get("location_search", "").strip()
|
||||
near_location = self.request.GET.get("near_location", "").strip()
|
||||
|
||||
# Add location search context
|
||||
context.update({
|
||||
'results': filterset.qs,
|
||||
'filters': filterset,
|
||||
'applied_filters': bool(self.request.GET), # Check if any filters are applied
|
||||
'is_location_search': bool(location_search or near_location),
|
||||
'location_search_query': location_search or near_location,
|
||||
})
|
||||
|
||||
context.update(
|
||||
{
|
||||
"results": filterset.qs,
|
||||
"filters": filterset,
|
||||
"applied_filters": bool(
|
||||
self.request.GET
|
||||
), # Check if any filters are applied
|
||||
"is_location_search": bool(location_search or near_location),
|
||||
"location_search_query": location_search or near_location,
|
||||
}
|
||||
)
|
||||
|
||||
return context
|
||||
|
||||
|
||||
class FilterFormView(TemplateView):
|
||||
"""
|
||||
View for rendering just the filter form for HTMX updates
|
||||
"""
|
||||
|
||||
template_name = "core/search/filters.html"
|
||||
|
||||
|
||||
def get_context_data(self, **kwargs):
|
||||
context = super().get_context_data(**kwargs)
|
||||
filterset = ParkFilter(self.request.GET, queryset=Park.objects.all())
|
||||
context['filters'] = filterset
|
||||
context["filters"] = filterset
|
||||
return context
|
||||
|
||||
|
||||
@@ -64,84 +74,88 @@ class LocationSearchView(TemplateView):
|
||||
"""
|
||||
Enhanced search view with comprehensive location search capabilities.
|
||||
"""
|
||||
|
||||
template_name = "core/search/location_results.html"
|
||||
|
||||
|
||||
def get_context_data(self, **kwargs):
|
||||
context = super().get_context_data(**kwargs)
|
||||
|
||||
|
||||
# Build search filters from request parameters
|
||||
filters = self._build_search_filters()
|
||||
|
||||
|
||||
# Perform search
|
||||
results = location_search_service.search(filters)
|
||||
|
||||
|
||||
# Group results by type for better presentation
|
||||
grouped_results = {
|
||||
'parks': [r for r in results if r.content_type == 'park'],
|
||||
'rides': [r for r in results if r.content_type == 'ride'],
|
||||
'companies': [r for r in results if r.content_type == 'company'],
|
||||
"parks": [r for r in results if r.content_type == "park"],
|
||||
"rides": [r for r in results if r.content_type == "ride"],
|
||||
"companies": [r for r in results if r.content_type == "company"],
|
||||
}
|
||||
|
||||
context.update({
|
||||
'results': results,
|
||||
'grouped_results': grouped_results,
|
||||
'total_results': len(results),
|
||||
'search_filters': filters,
|
||||
'has_location_filter': bool(filters.location_point),
|
||||
'search_form': LocationSearchForm(self.request.GET),
|
||||
})
|
||||
|
||||
|
||||
context.update(
|
||||
{
|
||||
"results": results,
|
||||
"grouped_results": grouped_results,
|
||||
"total_results": len(results),
|
||||
"search_filters": filters,
|
||||
"has_location_filter": bool(filters.location_point),
|
||||
"search_form": LocationSearchForm(self.request.GET),
|
||||
}
|
||||
)
|
||||
|
||||
return context
|
||||
|
||||
|
||||
def _build_search_filters(self) -> LocationSearchFilters:
|
||||
"""Build LocationSearchFilters from request parameters."""
|
||||
form = LocationSearchForm(self.request.GET)
|
||||
form.is_valid() # Populate cleaned_data
|
||||
|
||||
|
||||
# Parse location coordinates if provided
|
||||
location_point = None
|
||||
lat = form.cleaned_data.get('lat')
|
||||
lng = form.cleaned_data.get('lng')
|
||||
lat = form.cleaned_data.get("lat")
|
||||
lng = form.cleaned_data.get("lng")
|
||||
if lat and lng:
|
||||
try:
|
||||
location_point = Point(float(lng), float(lat), srid=4326)
|
||||
except (ValueError, TypeError):
|
||||
location_point = None
|
||||
|
||||
|
||||
# Parse location types
|
||||
location_types = set()
|
||||
if form.cleaned_data.get('search_parks'):
|
||||
location_types.add('park')
|
||||
if form.cleaned_data.get('search_rides'):
|
||||
location_types.add('ride')
|
||||
if form.cleaned_data.get('search_companies'):
|
||||
location_types.add('company')
|
||||
|
||||
if form.cleaned_data.get("search_parks"):
|
||||
location_types.add("park")
|
||||
if form.cleaned_data.get("search_rides"):
|
||||
location_types.add("ride")
|
||||
if form.cleaned_data.get("search_companies"):
|
||||
location_types.add("company")
|
||||
|
||||
# If no specific types selected, search all
|
||||
if not location_types:
|
||||
location_types = {'park', 'ride', 'company'}
|
||||
|
||||
location_types = {"park", "ride", "company"}
|
||||
|
||||
# Parse radius
|
||||
radius_km = None
|
||||
radius_str = form.cleaned_data.get('radius_km', '').strip()
|
||||
radius_str = form.cleaned_data.get("radius_km", "").strip()
|
||||
if radius_str:
|
||||
try:
|
||||
radius_km = float(radius_str)
|
||||
radius_km = max(1, min(500, radius_km)) # Clamp between 1-500km
|
||||
# Clamp between 1-500km
|
||||
radius_km = max(1, min(500, radius_km))
|
||||
except (ValueError, TypeError):
|
||||
radius_km = None
|
||||
|
||||
|
||||
return LocationSearchFilters(
|
||||
search_query=form.cleaned_data.get('q', '').strip() or None,
|
||||
search_query=form.cleaned_data.get("q", "").strip() or None,
|
||||
location_point=location_point,
|
||||
radius_km=radius_km,
|
||||
location_types=location_types if location_types else None,
|
||||
country=form.cleaned_data.get('country', '').strip() or None,
|
||||
state=form.cleaned_data.get('state', '').strip() or None,
|
||||
city=form.cleaned_data.get('city', '').strip() or None,
|
||||
park_status=self.request.GET.getlist('park_status') or None,
|
||||
country=form.cleaned_data.get("country", "").strip() or None,
|
||||
state=form.cleaned_data.get("state", "").strip() or None,
|
||||
city=form.cleaned_data.get("city", "").strip() or None,
|
||||
park_status=self.request.GET.getlist("park_status") or None,
|
||||
include_distance=True,
|
||||
max_results=int(self.request.GET.get('limit', 100))
|
||||
max_results=int(self.request.GET.get("limit", 100)),
|
||||
)
|
||||
|
||||
|
||||
@@ -149,16 +163,16 @@ class LocationSuggestionsView(TemplateView):
|
||||
"""
|
||||
AJAX endpoint for location search suggestions.
|
||||
"""
|
||||
|
||||
|
||||
def get(self, request, *args, **kwargs):
|
||||
query = request.GET.get('q', '').strip()
|
||||
limit = int(request.GET.get('limit', 10))
|
||||
|
||||
query = request.GET.get("q", "").strip()
|
||||
limit = int(request.GET.get("limit", 10))
|
||||
|
||||
if len(query) < 2:
|
||||
return JsonResponse({'suggestions': []})
|
||||
|
||||
return JsonResponse({"suggestions": []})
|
||||
|
||||
try:
|
||||
suggestions = location_search_service.suggest_locations(query, limit)
|
||||
return JsonResponse({'suggestions': suggestions})
|
||||
return JsonResponse({"suggestions": suggestions})
|
||||
except Exception as e:
|
||||
return JsonResponse({'error': str(e)}, status=500)
|
||||
return JsonResponse({"error": str(e)}, status=500)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
from typing import Any, Dict, Optional, Type, cast
|
||||
from typing import Any, Dict, Optional, Type
|
||||
from django.shortcuts import redirect
|
||||
from django.urls import reverse
|
||||
from django.views.generic import DetailView
|
||||
@@ -6,13 +6,15 @@ from django.views import View
|
||||
from django.http import HttpRequest, HttpResponse
|
||||
from django.db.models import Model
|
||||
|
||||
|
||||
class SlugRedirectMixin(View):
|
||||
"""
|
||||
Mixin that handles redirects for old slugs.
|
||||
Requires the model to inherit from SluggedModel and view to inherit from DetailView.
|
||||
"""
|
||||
|
||||
model: Optional[Type[Model]] = None
|
||||
slug_url_kwarg: str = 'slug'
|
||||
slug_url_kwarg: str = "slug"
|
||||
object: Optional[Model] = None
|
||||
|
||||
def dispatch(self, request: HttpRequest, *args: Any, **kwargs: Any) -> HttpResponse:
|
||||
@@ -25,19 +27,18 @@ class SlugRedirectMixin(View):
|
||||
self.object = self.get_object() # type: ignore
|
||||
# Check if we used an old slug
|
||||
current_slug = kwargs.get(self.slug_url_kwarg)
|
||||
if current_slug and current_slug != getattr(self.object, 'slug', None):
|
||||
if current_slug and current_slug != getattr(self.object, "slug", None):
|
||||
# Get the URL pattern name from the view
|
||||
url_pattern = self.get_redirect_url_pattern()
|
||||
# Build kwargs for reverse()
|
||||
reverse_kwargs = self.get_redirect_url_kwargs()
|
||||
# Redirect to the current slug URL
|
||||
return redirect(
|
||||
reverse(url_pattern, kwargs=reverse_kwargs),
|
||||
permanent=True
|
||||
reverse(url_pattern, kwargs=reverse_kwargs), permanent=True
|
||||
)
|
||||
return super().dispatch(request, *args, **kwargs)
|
||||
except (AttributeError, Exception) as e: # type: ignore
|
||||
if self.model and hasattr(self.model, 'DoesNotExist'):
|
||||
if self.model and hasattr(self.model, "DoesNotExist"):
|
||||
if isinstance(e, self.model.DoesNotExist): # type: ignore
|
||||
return super().dispatch(request, *args, **kwargs)
|
||||
return super().dispatch(request, *args, **kwargs)
|
||||
@@ -58,4 +59,4 @@ class SlugRedirectMixin(View):
|
||||
"""
|
||||
if not self.object:
|
||||
return {}
|
||||
return {self.slug_url_kwarg: getattr(self.object, 'slug', '')}
|
||||
return {self.slug_url_kwarg: getattr(self.object, "slug", "")}
|
||||
|
||||
Reference in New Issue
Block a user