mirror of
https://github.com/pacnpal/thrillwiki_django_no_react.git
synced 2025-12-20 14:11:09 -05:00
feat: Add PrimeProgress, PrimeSelect, and PrimeSkeleton components with customizable styles and props
- Implemented PrimeProgress component with support for labels, helper text, and various styles (size, variant, color). - Created PrimeSelect component with dropdown functionality, custom templates, and validation states. - Developed PrimeSkeleton component for loading placeholders with different shapes and animations. - Updated index.ts to export new components for easy import. - Enhanced PrimeVueTest.vue to include tests for new components and their functionalities. - Introduced a custom ThrillWiki theme for PrimeVue with tailored color schemes and component styles. - Added ambient type declarations for various components to improve TypeScript support.
This commit is contained in:
@@ -142,8 +142,10 @@ def custom_exception_handler(
|
||||
|
||||
def _get_error_code(exc: Exception) -> str:
|
||||
"""Extract or determine error code from exception."""
|
||||
if hasattr(exc, "default_code"):
|
||||
return exc.default_code.upper()
|
||||
# Use getattr + isinstance to avoid static type checker errors
|
||||
default_code = getattr(exc, "default_code", None)
|
||||
if isinstance(default_code, str):
|
||||
return default_code.upper()
|
||||
|
||||
if isinstance(exc, DRFValidationError):
|
||||
return "VALIDATION_ERROR"
|
||||
@@ -179,8 +181,10 @@ def _get_error_details(exc: Exception, response_data: Any) -> Optional[Dict[str,
|
||||
if isinstance(response_data, dict) and len(response_data) > 1:
|
||||
return response_data
|
||||
|
||||
if hasattr(exc, "detail") and isinstance(exc.detail, dict):
|
||||
return exc.detail
|
||||
# Use getattr to avoid static type-checker errors when Exception doesn't define `detail`
|
||||
detail = getattr(exc, "detail", None)
|
||||
if isinstance(detail, dict):
|
||||
return detail
|
||||
|
||||
return None
|
||||
|
||||
|
||||
@@ -2,17 +2,27 @@
|
||||
Common mixins for API views following Django styleguide patterns.
|
||||
"""
|
||||
|
||||
from typing import Dict, Any, Optional
|
||||
from typing import Dict, Any, Optional, Type
|
||||
from rest_framework.request import Request
|
||||
from rest_framework.response import Response
|
||||
from rest_framework import status
|
||||
|
||||
# Constants for error messages
|
||||
_MISSING_INPUT_SERIALIZER_MSG = "Subclasses must set input_serializer class attribute"
|
||||
_MISSING_OUTPUT_SERIALIZER_MSG = "Subclasses must set output_serializer class attribute"
|
||||
_MISSING_GET_OBJECT_MSG = "Subclasses must implement get_object using selectors"
|
||||
|
||||
|
||||
class ApiMixin:
|
||||
"""
|
||||
Base mixin for API views providing standardized response formatting.
|
||||
"""
|
||||
|
||||
# Expose expected attributes so static type checkers know they exist on subclasses.
|
||||
# Subclasses or other bases (e.g. DRF GenericAPIView) will actually provide these.
|
||||
input_serializer: Optional[Type[Any]] = None
|
||||
output_serializer: Optional[Type[Any]] = None
|
||||
|
||||
def create_response(
|
||||
self,
|
||||
*,
|
||||
@@ -71,7 +81,8 @@ class ApiMixin:
|
||||
Returns:
|
||||
Standardized error Response object
|
||||
"""
|
||||
error_data = {
|
||||
# explicitly allow any-shaped values in the error_data dict
|
||||
error_data: Dict[str, Any] = {
|
||||
"code": error_code or "GENERIC_ERROR",
|
||||
"message": message,
|
||||
}
|
||||
@@ -87,15 +98,33 @@ class ApiMixin:
|
||||
|
||||
return Response(response_data, status=status_code)
|
||||
|
||||
# Provide lightweight stubs for methods commonly supplied by other bases (DRF GenericAPIView, etc.)
|
||||
# These will raise if not implemented; they also inform static analyzers about their existence.
|
||||
def paginate_queryset(self, queryset):
|
||||
"""Override / implement in subclass or provided base if pagination is needed."""
|
||||
raise NotImplementedError(
|
||||
"Subclasses must implement paginate_queryset to enable pagination"
|
||||
)
|
||||
|
||||
def get_paginated_response(self, data):
|
||||
"""Override / implement in subclass or provided base to return paginated responses."""
|
||||
raise NotImplementedError(
|
||||
"Subclasses must implement get_paginated_response to enable pagination"
|
||||
)
|
||||
|
||||
def get_object(self):
|
||||
"""Default placeholder; subclasses should implement this."""
|
||||
raise NotImplementedError(_MISSING_GET_OBJECT_MSG)
|
||||
|
||||
|
||||
class CreateApiMixin(ApiMixin):
|
||||
"""
|
||||
Mixin for create API endpoints with standardized input/output handling.
|
||||
"""
|
||||
|
||||
def create(self, request: Request, *args, **kwargs) -> Response:
|
||||
def create(self, _request: Request, *_args, **_kwargs) -> Response:
|
||||
"""Handle POST requests for creating resources."""
|
||||
serializer = self.get_input_serializer(data=request.data)
|
||||
serializer = self.get_input_serializer(data=_request.data)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
|
||||
# Create the object using the service layer
|
||||
@@ -119,11 +148,15 @@ class CreateApiMixin(ApiMixin):
|
||||
|
||||
def get_input_serializer(self, *args, **kwargs):
|
||||
"""Get the input serializer for validation."""
|
||||
return self.InputSerializer(*args, **kwargs)
|
||||
if self.input_serializer is None:
|
||||
raise NotImplementedError(_MISSING_INPUT_SERIALIZER_MSG)
|
||||
return self.input_serializer(*args, **kwargs)
|
||||
|
||||
def get_output_serializer(self, *args, **kwargs):
|
||||
"""Get the output serializer for response."""
|
||||
return self.OutputSerializer(*args, **kwargs)
|
||||
if self.output_serializer is None:
|
||||
raise NotImplementedError(_MISSING_OUTPUT_SERIALIZER_MSG)
|
||||
return self.output_serializer(*args, **kwargs)
|
||||
|
||||
|
||||
class UpdateApiMixin(ApiMixin):
|
||||
@@ -131,11 +164,11 @@ class UpdateApiMixin(ApiMixin):
|
||||
Mixin for update API endpoints with standardized input/output handling.
|
||||
"""
|
||||
|
||||
def update(self, request: Request, *args, **kwargs) -> Response:
|
||||
def update(self, _request: Request, *_args, **_kwargs) -> Response:
|
||||
"""Handle PUT/PATCH requests for updating resources."""
|
||||
instance = self.get_object()
|
||||
serializer = self.get_input_serializer(
|
||||
data=request.data, partial=kwargs.get("partial", False)
|
||||
data=_request.data, partial=_kwargs.get("partial", False)
|
||||
)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
|
||||
@@ -159,11 +192,15 @@ class UpdateApiMixin(ApiMixin):
|
||||
|
||||
def get_input_serializer(self, *args, **kwargs):
|
||||
"""Get the input serializer for validation."""
|
||||
return self.InputSerializer(*args, **kwargs)
|
||||
if self.input_serializer is None:
|
||||
raise NotImplementedError(_MISSING_INPUT_SERIALIZER_MSG)
|
||||
return self.input_serializer(*args, **kwargs)
|
||||
|
||||
def get_output_serializer(self, *args, **kwargs):
|
||||
"""Get the output serializer for response."""
|
||||
return self.OutputSerializer(*args, **kwargs)
|
||||
if self.output_serializer is None:
|
||||
raise NotImplementedError(_MISSING_OUTPUT_SERIALIZER_MSG)
|
||||
return self.output_serializer(*args, **kwargs)
|
||||
|
||||
|
||||
class ListApiMixin(ApiMixin):
|
||||
@@ -171,7 +208,7 @@ class ListApiMixin(ApiMixin):
|
||||
Mixin for list API endpoints with pagination and filtering.
|
||||
"""
|
||||
|
||||
def list(self, request: Request, *args, **kwargs) -> Response:
|
||||
def list(self, _request: Request, *_args, **_kwargs) -> Response:
|
||||
"""Handle GET requests for listing resources."""
|
||||
# Use selector to get filtered queryset
|
||||
queryset = self.get_queryset()
|
||||
@@ -197,7 +234,9 @@ class ListApiMixin(ApiMixin):
|
||||
|
||||
def get_output_serializer(self, *args, **kwargs):
|
||||
"""Get the output serializer for response."""
|
||||
return self.OutputSerializer(*args, **kwargs)
|
||||
if self.output_serializer is None:
|
||||
raise NotImplementedError(_MISSING_OUTPUT_SERIALIZER_MSG)
|
||||
return self.output_serializer(*args, **kwargs)
|
||||
|
||||
|
||||
class RetrieveApiMixin(ApiMixin):
|
||||
@@ -205,7 +244,7 @@ class RetrieveApiMixin(ApiMixin):
|
||||
Mixin for retrieve API endpoints.
|
||||
"""
|
||||
|
||||
def retrieve(self, request: Request, *args, **kwargs) -> Response:
|
||||
def retrieve(self, _request: Request, *_args, **_kwargs) -> Response:
|
||||
"""Handle GET requests for retrieving a single resource."""
|
||||
instance = self.get_object()
|
||||
serializer = self.get_output_serializer(instance)
|
||||
@@ -217,13 +256,13 @@ class RetrieveApiMixin(ApiMixin):
|
||||
Override this method to use selector patterns.
|
||||
Should call selector functions for optimized queries.
|
||||
"""
|
||||
raise NotImplementedError(
|
||||
"Subclasses must implement get_object using selectors"
|
||||
)
|
||||
raise NotImplementedError(_MISSING_GET_OBJECT_MSG)
|
||||
|
||||
def get_output_serializer(self, *args, **kwargs):
|
||||
"""Get the output serializer for response."""
|
||||
return self.OutputSerializer(*args, **kwargs)
|
||||
if self.output_serializer is None:
|
||||
raise NotImplementedError(_MISSING_OUTPUT_SERIALIZER_MSG)
|
||||
return self.output_serializer(*args, **kwargs)
|
||||
|
||||
|
||||
class DestroyApiMixin(ApiMixin):
|
||||
@@ -231,7 +270,7 @@ class DestroyApiMixin(ApiMixin):
|
||||
Mixin for delete API endpoints.
|
||||
"""
|
||||
|
||||
def destroy(self, request: Request, *args, **kwargs) -> Response:
|
||||
def destroy(self, _request: Request, *_args, **_kwargs) -> Response:
|
||||
"""Handle DELETE requests for destroying resources."""
|
||||
instance = self.get_object()
|
||||
|
||||
@@ -255,6 +294,4 @@ class DestroyApiMixin(ApiMixin):
|
||||
Override this method to use selector patterns.
|
||||
Should call selector functions for optimized queries.
|
||||
"""
|
||||
raise NotImplementedError(
|
||||
"Subclasses must implement get_object using selectors"
|
||||
)
|
||||
raise NotImplementedError(_MISSING_GET_OBJECT_MSG)
|
||||
|
||||
@@ -6,9 +6,11 @@ import hashlib
|
||||
import json
|
||||
import time
|
||||
from functools import wraps
|
||||
from typing import Optional, List, Callable
|
||||
from typing import Optional, List, Callable, Any, Dict
|
||||
from django.http import HttpRequest, HttpResponseBase
|
||||
from django.utils.decorators import method_decorator
|
||||
from django.views.decorators.vary import vary_on_headers
|
||||
from django.views import View
|
||||
from apps.core.services.enhanced_cache_service import EnhancedCacheService
|
||||
import logging
|
||||
|
||||
@@ -16,8 +18,11 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def cache_api_response(
|
||||
timeout=1800, vary_on=None, key_prefix="api", cache_backend="api"
|
||||
):
|
||||
timeout: int = 1800,
|
||||
vary_on: Optional[List[str]] = None,
|
||||
key_prefix: str = "api",
|
||||
cache_backend: str = "api",
|
||||
) -> Callable[[Callable[..., Any]], Callable[..., Any]]:
|
||||
"""
|
||||
Advanced decorator for caching API responses with flexible configuration
|
||||
|
||||
@@ -40,7 +45,7 @@ def cache_api_response(
|
||||
key_prefix,
|
||||
view_func.__name__,
|
||||
(
|
||||
str(request.user.id)
|
||||
str(getattr(request.user, "id", "anonymous"))
|
||||
if request.user.is_authenticated
|
||||
else "anonymous"
|
||||
),
|
||||
@@ -113,8 +118,8 @@ def cache_api_response(
|
||||
|
||||
|
||||
def cache_queryset_result(
|
||||
cache_key_template: str, timeout: int = 3600, cache_backend="default"
|
||||
):
|
||||
cache_key_template: str, timeout: int = 3600, cache_backend: str = "default"
|
||||
) -> Callable[[Callable[..., Any]], Callable[..., Any]]:
|
||||
"""
|
||||
Decorator for caching expensive queryset operations
|
||||
|
||||
@@ -168,7 +173,9 @@ def cache_queryset_result(
|
||||
return decorator
|
||||
|
||||
|
||||
def invalidate_cache_on_save(model_name: str, cache_patterns: List[str] = None):
|
||||
def invalidate_cache_on_save(
|
||||
model_name: str, cache_patterns: Optional[List[str]] = None
|
||||
) -> Callable[[Callable[..., Any]], Callable[..., Any]]:
|
||||
"""
|
||||
Decorator to invalidate cache when model instances are saved
|
||||
|
||||
@@ -212,7 +219,7 @@ def invalidate_cache_on_save(model_name: str, cache_patterns: List[str] = None):
|
||||
return decorator
|
||||
|
||||
|
||||
class CachedAPIViewMixin:
|
||||
class CachedAPIViewMixin(View):
|
||||
"""Mixin to add caching capabilities to API views"""
|
||||
|
||||
cache_timeout = 1800 # 30 minutes default
|
||||
@@ -221,13 +228,17 @@ class CachedAPIViewMixin:
|
||||
cache_backend = "api"
|
||||
|
||||
@method_decorator(vary_on_headers("User-Agent", "Accept-Language"))
|
||||
def dispatch(self, request, *args, **kwargs):
|
||||
def dispatch(
|
||||
self, request: HttpRequest, *args: Any, **kwargs: Any
|
||||
) -> HttpResponseBase:
|
||||
"""Add caching to the dispatch method"""
|
||||
if request.method == "GET" and getattr(self, "enable_caching", True):
|
||||
return self._cached_dispatch(request, *args, **kwargs)
|
||||
return super().dispatch(request, *args, **kwargs)
|
||||
|
||||
def _cached_dispatch(self, request, *args, **kwargs):
|
||||
def _cached_dispatch(
|
||||
self, request: HttpRequest, *args: Any, **kwargs: Any
|
||||
) -> HttpResponseBase:
|
||||
"""Handle cached dispatch for GET requests"""
|
||||
cache_key = self._generate_cache_key(request, *args, **kwargs)
|
||||
|
||||
@@ -252,13 +263,19 @@ class CachedAPIViewMixin:
|
||||
|
||||
return response
|
||||
|
||||
def _generate_cache_key(self, request, *args, **kwargs):
|
||||
def _generate_cache_key(
|
||||
self, request: HttpRequest, *args: Any, **kwargs: Any
|
||||
) -> str:
|
||||
"""Generate cache key for the request"""
|
||||
key_parts = [
|
||||
self.cache_key_prefix,
|
||||
self.__class__.__name__,
|
||||
request.method,
|
||||
(str(request.user.id) if request.user.is_authenticated else "anonymous"),
|
||||
(
|
||||
str(getattr(request.user, "id", "anonymous"))
|
||||
if request.user.is_authenticated
|
||||
else "anonymous"
|
||||
),
|
||||
str(hash(frozenset(request.GET.items()))),
|
||||
]
|
||||
|
||||
@@ -277,10 +294,10 @@ class CachedAPIViewMixin:
|
||||
|
||||
def smart_cache(
|
||||
timeout: int = 3600,
|
||||
key_func: Optional[Callable] = None,
|
||||
key_func: Optional[Callable[..., str]] = None,
|
||||
invalidate_on: Optional[List[str]] = None,
|
||||
cache_backend: str = "default",
|
||||
):
|
||||
) -> Callable[[Callable[..., Any]], Callable[..., Any]]:
|
||||
"""
|
||||
Smart caching decorator that adapts to function arguments
|
||||
|
||||
@@ -342,15 +359,17 @@ def smart_cache(
|
||||
|
||||
# Add cache invalidation if specified
|
||||
if invalidate_on:
|
||||
wrapper._cache_invalidate_on = invalidate_on
|
||||
wrapper._cache_backend = cache_backend
|
||||
setattr(wrapper, "_cache_invalidate_on", invalidate_on)
|
||||
setattr(wrapper, "_cache_backend", cache_backend)
|
||||
|
||||
return wrapper
|
||||
|
||||
return decorator
|
||||
|
||||
|
||||
def conditional_cache(condition_func: Callable, **cache_kwargs):
|
||||
def conditional_cache(
|
||||
condition_func: Callable[..., bool], **cache_kwargs: Any
|
||||
) -> Callable[[Callable[..., Any]], Callable[..., Any]]:
|
||||
"""
|
||||
Cache decorator that only caches when condition is met
|
||||
|
||||
@@ -375,13 +394,13 @@ def conditional_cache(condition_func: Callable, **cache_kwargs):
|
||||
|
||||
|
||||
# Utility functions for cache key generation
|
||||
def generate_user_cache_key(user, suffix: str = ""):
|
||||
def generate_user_cache_key(user: Any, suffix: str = "") -> str:
|
||||
"""Generate cache key based on user"""
|
||||
user_id = user.id if user.is_authenticated else "anonymous"
|
||||
return f"user:{user_id}:{suffix}" if suffix else f"user:{user_id}"
|
||||
|
||||
|
||||
def generate_model_cache_key(model_instance, suffix: str = ""):
|
||||
def generate_model_cache_key(model_instance: Any, suffix: str = "") -> str:
|
||||
"""Generate cache key based on model instance"""
|
||||
model_name = model_instance._meta.model_name
|
||||
instance_id = model_instance.id
|
||||
@@ -392,7 +411,9 @@ def generate_model_cache_key(model_instance, suffix: str = ""):
|
||||
)
|
||||
|
||||
|
||||
def generate_queryset_cache_key(queryset, params: dict = None):
|
||||
def generate_queryset_cache_key(
|
||||
queryset: Any, params: Optional[Dict[str, Any]] = None
|
||||
) -> str:
|
||||
"""Generate cache key for queryset with parameters"""
|
||||
model_name = queryset.model._meta.model_name
|
||||
params_str = json.dumps(params or {}, sort_keys=True, default=str)
|
||||
|
||||
@@ -31,8 +31,8 @@ class BaseAutocomplete(Autocomplete):
|
||||
# Project-wide component settings
|
||||
placeholder = _("Search...")
|
||||
|
||||
@staticmethod
|
||||
def auth_check(request):
|
||||
@classmethod
|
||||
def auth_check(cls, request):
|
||||
"""Enforce authentication by default.
|
||||
|
||||
This can be overridden in subclasses if public access is needed.
|
||||
|
||||
@@ -156,6 +156,10 @@ class LocationSearchForm(forms.Form):
|
||||
def clean(self):
|
||||
cleaned_data = super().clean()
|
||||
|
||||
# Handle case where super().clean() returns None due to validation errors
|
||||
if cleaned_data is None:
|
||||
return None
|
||||
|
||||
# If lat/lng are provided, ensure location field is populated for
|
||||
# display
|
||||
lat = cleaned_data.get("lat")
|
||||
|
||||
@@ -4,6 +4,7 @@ Custom health checks for ThrillWiki application.
|
||||
|
||||
import time
|
||||
import logging
|
||||
from pathlib import Path
|
||||
from django.core.cache import cache
|
||||
from django.db import connection
|
||||
from health_check.backends import BaseHealthCheckBackend
|
||||
@@ -285,7 +286,7 @@ class DiskSpaceHealthCheck(BaseHealthCheckBackend):
|
||||
media_free_percent = (media_usage.free / media_usage.total) * 100
|
||||
|
||||
# Check disk space for logs directory if it exists
|
||||
logs_dir = getattr(settings, "BASE_DIR", "/tmp") / "logs"
|
||||
logs_dir = Path(getattr(settings, "BASE_DIR", "/tmp")) / "logs"
|
||||
if logs_dir.exists():
|
||||
logs_usage = shutil.disk_usage(logs_dir)
|
||||
logs_free_percent = (logs_usage.free / logs_usage.total) * 100
|
||||
|
||||
@@ -2,21 +2,34 @@ from django.db import models
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from django.contrib.contenttypes.fields import GenericForeignKey
|
||||
from django.conf import settings
|
||||
from typing import Any, Dict, Optional
|
||||
from typing import Any, Dict, Optional, TYPE_CHECKING
|
||||
from django.db.models import QuerySet
|
||||
|
||||
if TYPE_CHECKING:
|
||||
pass
|
||||
|
||||
|
||||
class DiffMixin:
|
||||
"""Mixin to add diffing capabilities to models"""
|
||||
"""Mixin to add diffing capabilities to models with pghistory"""
|
||||
|
||||
def get_prev_record(self) -> Optional[Any]:
|
||||
"""Get the previous record for this instance"""
|
||||
try:
|
||||
# Use getattr to safely access objects manager and pghistory fields
|
||||
manager = getattr(type(self), "objects", None)
|
||||
if manager is None:
|
||||
return None
|
||||
|
||||
pgh_created_at = getattr(self, "pgh_created_at", None)
|
||||
pgh_obj_id = getattr(self, "pgh_obj_id", None)
|
||||
|
||||
if pgh_created_at is None or pgh_obj_id is None:
|
||||
return None
|
||||
|
||||
return (
|
||||
type(self)
|
||||
.objects.filter(
|
||||
pgh_created_at__lt=self.pgh_created_at,
|
||||
pgh_obj_id=self.pgh_obj_id,
|
||||
manager.filter(
|
||||
pgh_created_at__lt=pgh_created_at,
|
||||
pgh_obj_id=pgh_obj_id,
|
||||
)
|
||||
.order_by("-pgh_created_at")
|
||||
.first()
|
||||
@@ -72,11 +85,19 @@ class TrackedModel(models.Model):
|
||||
|
||||
def get_history(self) -> QuerySet:
|
||||
"""Get all history records for this instance in chronological order"""
|
||||
event_model = self.events.model # pghistory provides this automatically
|
||||
if event_model:
|
||||
return event_model.objects.filter(pgh_obj_id=self.pk).order_by(
|
||||
"-pgh_created_at"
|
||||
)
|
||||
try:
|
||||
# Use getattr to safely access pghistory events
|
||||
events = getattr(self, "events", None)
|
||||
if events is None:
|
||||
return self.__class__.objects.none()
|
||||
|
||||
event_model = getattr(events, "model", None)
|
||||
if event_model:
|
||||
return event_model.objects.filter(pgh_obj_id=self.pk).order_by(
|
||||
"-pgh_created_at"
|
||||
)
|
||||
except (AttributeError, TypeError):
|
||||
pass
|
||||
return self.__class__.objects.none()
|
||||
|
||||
|
||||
|
||||
@@ -3,7 +3,7 @@ Clustering service for map locations to improve performance and user experience.
|
||||
"""
|
||||
|
||||
import math
|
||||
from typing import List, Tuple, Dict, Any, Optional
|
||||
from typing import List, Dict, Any, Optional
|
||||
from dataclasses import dataclass
|
||||
from collections import defaultdict
|
||||
|
||||
@@ -73,7 +73,7 @@ class ClusteringService:
|
||||
locations: List[UnifiedLocation],
|
||||
zoom_level: int,
|
||||
bounds: Optional[GeoBounds] = None,
|
||||
) -> Tuple[List[UnifiedLocation], List[ClusterData]]:
|
||||
) -> tuple[List[UnifiedLocation], List[ClusterData]]:
|
||||
"""
|
||||
Cluster locations based on zoom level and density.
|
||||
Returns (unclustered_locations, clusters).
|
||||
@@ -216,7 +216,7 @@ class ClusteringService:
|
||||
|
||||
return ClusterData(
|
||||
id=cluster_id,
|
||||
coordinates=(avg_lat, avg_lng),
|
||||
coordinates=[avg_lat, avg_lng],
|
||||
count=len(locations),
|
||||
types=types,
|
||||
bounds=cluster_bounds,
|
||||
|
||||
@@ -4,7 +4,7 @@ Data structures for the unified map service.
|
||||
|
||||
from dataclasses import dataclass, field
|
||||
from enum import Enum
|
||||
from typing import Dict, List, Optional, Set, Tuple, Any
|
||||
from typing import Dict, List, Optional, Set, Any
|
||||
from django.contrib.gis.geos import Polygon
|
||||
|
||||
|
||||
@@ -110,7 +110,7 @@ class UnifiedLocation:
|
||||
id: str # Composite: f"{type}_{id}"
|
||||
type: LocationType
|
||||
name: str
|
||||
coordinates: Tuple[float, float] # (lat, lng)
|
||||
coordinates: List[float] # [lat, lng]
|
||||
address: Optional[str] = None
|
||||
metadata: Dict[str, Any] = field(default_factory=dict)
|
||||
type_data: Dict[str, Any] = field(default_factory=dict)
|
||||
@@ -168,7 +168,7 @@ class ClusterData:
|
||||
"""Represents a cluster of locations for map display."""
|
||||
|
||||
id: str
|
||||
coordinates: Tuple[float, float] # (lat, lng)
|
||||
coordinates: List[float] # [lat, lng]
|
||||
count: int
|
||||
types: Set[LocationType]
|
||||
bounds: GeoBounds
|
||||
|
||||
@@ -14,7 +14,7 @@ Features:
|
||||
|
||||
import re
|
||||
from difflib import SequenceMatcher
|
||||
from typing import List, Dict, Any, Optional, Tuple
|
||||
from typing import List, Dict, Any, Optional
|
||||
from dataclasses import dataclass
|
||||
from enum import Enum
|
||||
|
||||
@@ -181,7 +181,7 @@ class EntityFuzzyMatcher:
|
||||
|
||||
def find_entity(
|
||||
self, query: str, entity_types: Optional[List[EntityType]] = None, user=None
|
||||
) -> Tuple[List[FuzzyMatchResult], Optional[EntitySuggestion]]:
|
||||
) -> tuple[List[FuzzyMatchResult], Optional[EntitySuggestion]]:
|
||||
"""
|
||||
Find entities matching the query with fuzzy matching.
|
||||
|
||||
|
||||
@@ -45,20 +45,20 @@ class ParkLocationAdapter(BaseLocationAdapter):
|
||||
"""Converts Park/ParkLocation to UnifiedLocation."""
|
||||
|
||||
def to_unified_location(
|
||||
self, park_location: ParkLocation
|
||||
self, location_obj: ParkLocation
|
||||
) -> Optional[UnifiedLocation]:
|
||||
"""Convert ParkLocation to UnifiedLocation."""
|
||||
if not park_location.point:
|
||||
if not location_obj.point or location_obj.latitude is None or location_obj.longitude is None:
|
||||
return None
|
||||
|
||||
park = park_location.park
|
||||
park = location_obj.park
|
||||
|
||||
return UnifiedLocation(
|
||||
id=f"park_{park.id}",
|
||||
type=LocationType.PARK,
|
||||
name=park.name,
|
||||
coordinates=(park_location.latitude, park_location.longitude),
|
||||
address=park_location.formatted_address,
|
||||
coordinates=[float(location_obj.latitude), float(location_obj.longitude)],
|
||||
address=location_obj.formatted_address,
|
||||
metadata={
|
||||
"status": getattr(park, "status", "UNKNOWN"),
|
||||
"rating": (
|
||||
@@ -73,9 +73,9 @@ class ParkLocationAdapter(BaseLocationAdapter):
|
||||
if hasattr(park, "operator") and park.operator
|
||||
else None
|
||||
),
|
||||
"city": park_location.city,
|
||||
"state": park_location.state,
|
||||
"country": park_location.country,
|
||||
"city": location_obj.city,
|
||||
"state": location_obj.state,
|
||||
"country": location_obj.country,
|
||||
},
|
||||
type_data={
|
||||
"slug": park.slug,
|
||||
@@ -86,14 +86,14 @@ class ParkLocationAdapter(BaseLocationAdapter):
|
||||
),
|
||||
"website": getattr(park, "website", ""),
|
||||
"operating_season": getattr(park, "operating_season", ""),
|
||||
"highway_exit": park_location.highway_exit,
|
||||
"parking_notes": park_location.parking_notes,
|
||||
"highway_exit": location_obj.highway_exit,
|
||||
"parking_notes": location_obj.parking_notes,
|
||||
"best_arrival_time": (
|
||||
park_location.best_arrival_time.strftime("%H:%M")
|
||||
if park_location.best_arrival_time
|
||||
location_obj.best_arrival_time.strftime("%H:%M")
|
||||
if location_obj.best_arrival_time
|
||||
else None
|
||||
),
|
||||
"seasonal_notes": park_location.seasonal_notes,
|
||||
"seasonal_notes": location_obj.seasonal_notes,
|
||||
"url": self._get_park_url(park),
|
||||
},
|
||||
cluster_weight=self._calculate_park_weight(park),
|
||||
@@ -172,28 +172,28 @@ class RideLocationAdapter(BaseLocationAdapter):
|
||||
"""Converts Ride/RideLocation to UnifiedLocation."""
|
||||
|
||||
def to_unified_location(
|
||||
self, ride_location: RideLocation
|
||||
self, location_obj: RideLocation
|
||||
) -> Optional[UnifiedLocation]:
|
||||
"""Convert RideLocation to UnifiedLocation."""
|
||||
if not ride_location.point:
|
||||
if not location_obj.point or location_obj.latitude is None or location_obj.longitude is None:
|
||||
return None
|
||||
|
||||
ride = ride_location.ride
|
||||
ride = location_obj.ride
|
||||
|
||||
return UnifiedLocation(
|
||||
id=f"ride_{ride.id}",
|
||||
type=LocationType.RIDE,
|
||||
name=ride.name,
|
||||
coordinates=(ride_location.latitude, ride_location.longitude),
|
||||
coordinates=[float(location_obj.latitude), float(location_obj.longitude)],
|
||||
address=(
|
||||
f"{ride_location.park_area}, {ride.park.name}"
|
||||
if ride_location.park_area
|
||||
f"{location_obj.park_area}, {ride.park.name}"
|
||||
if location_obj.park_area
|
||||
else ride.park.name
|
||||
),
|
||||
metadata={
|
||||
"park_id": ride.park.id,
|
||||
"park_name": ride.park.name,
|
||||
"park_area": ride_location.park_area,
|
||||
"park_area": location_obj.park_area,
|
||||
"ride_type": getattr(ride, "ride_type", "Unknown"),
|
||||
"status": getattr(ride, "status", "UNKNOWN"),
|
||||
"rating": (
|
||||
@@ -217,8 +217,8 @@ class RideLocationAdapter(BaseLocationAdapter):
|
||||
"height_requirement": getattr(ride, "height_requirement", ""),
|
||||
"duration_minutes": getattr(ride, "duration_minutes", None),
|
||||
"max_speed_mph": getattr(ride, "max_speed_mph", None),
|
||||
"entrance_notes": ride_location.entrance_notes,
|
||||
"accessibility_notes": ride_location.accessibility_notes,
|
||||
"entrance_notes": location_obj.entrance_notes,
|
||||
"accessibility_notes": location_obj.accessibility_notes,
|
||||
"url": self._get_ride_url(ride),
|
||||
},
|
||||
cluster_weight=self._calculate_ride_weight(ride),
|
||||
@@ -284,7 +284,7 @@ class CompanyLocationAdapter(BaseLocationAdapter):
|
||||
"""Converts Company/CompanyHeadquarters to UnifiedLocation."""
|
||||
|
||||
def to_unified_location(
|
||||
self, company_headquarters: CompanyHeadquarters
|
||||
self, location_obj: CompanyHeadquarters
|
||||
) -> Optional[UnifiedLocation]:
|
||||
"""Convert CompanyHeadquarters to UnifiedLocation."""
|
||||
# Note: CompanyHeadquarters doesn't have coordinates, so we need to geocode
|
||||
|
||||
@@ -378,7 +378,7 @@ class MapCacheService:
|
||||
id=data["id"],
|
||||
type=LocationType(data["type"]),
|
||||
name=data["name"],
|
||||
coordinates=tuple(data["coordinates"]),
|
||||
coordinates=list(data["coordinates"]),
|
||||
address=data.get("address"),
|
||||
metadata=data.get("metadata", {}),
|
||||
type_data=data.get("type_data", {}),
|
||||
@@ -399,7 +399,7 @@ class MapCacheService:
|
||||
|
||||
return ClusterData(
|
||||
id=data["id"],
|
||||
coordinates=tuple(data["coordinates"]),
|
||||
coordinates=list(data["coordinates"]),
|
||||
count=data["count"],
|
||||
types=types,
|
||||
bounds=bounds,
|
||||
|
||||
@@ -6,7 +6,7 @@ that can be used across all domain-specific media implementations.
|
||||
"""
|
||||
|
||||
import logging
|
||||
from typing import Any, Optional, Dict, Tuple
|
||||
from typing import Any, Optional, Dict
|
||||
from datetime import datetime
|
||||
from django.core.files.uploadedfile import UploadedFile
|
||||
from django.conf import settings
|
||||
@@ -71,7 +71,7 @@ class MediaService:
|
||||
return None
|
||||
|
||||
@staticmethod
|
||||
def validate_image_file(image_file: UploadedFile) -> Tuple[bool, Optional[str]]:
|
||||
def validate_image_file(image_file: UploadedFile) -> tuple[bool, Optional[str]]:
|
||||
"""
|
||||
Validate uploaded image file.
|
||||
|
||||
|
||||
Reference in New Issue
Block a user