mirror of
https://github.com/pacnpal/thrillwiki_django_no_react.git
synced 2026-02-05 07:25:19 -05:00
feat: Implement centralized error capture and handling with new middleware, services, and API endpoints, and add new admin and statistics API views.
This commit is contained in:
@@ -43,6 +43,7 @@ from apps.api.v1.serializers.accounts import (
|
||||
UserPreferencesSerializer,
|
||||
UserStatisticsSerializer,
|
||||
)
|
||||
from apps.core.utils import capture_and_log
|
||||
from apps.lists.models import UserList
|
||||
|
||||
# Set up logging
|
||||
@@ -198,16 +199,13 @@ def delete_user_preserve_submissions(request, user_id):
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
# Log the error for debugging
|
||||
logger.error(
|
||||
f"Error deleting user {user_id} by admin {request.user.username}: {str(e)}",
|
||||
extra={
|
||||
"admin_user": request.user.username,
|
||||
"target_user_id": user_id,
|
||||
"detail": str(e),
|
||||
"action": "user_deletion_error",
|
||||
},
|
||||
exc_info=True,
|
||||
# Capture error to dashboard
|
||||
capture_and_log(
|
||||
e,
|
||||
f'Delete user {user_id} by admin {request.user.username}',
|
||||
source='api',
|
||||
request=request,
|
||||
severity='high',
|
||||
)
|
||||
|
||||
return Response(
|
||||
@@ -333,7 +331,7 @@ def save_avatar_image(request):
|
||||
)
|
||||
|
||||
except Exception as api_error:
|
||||
logger.error(f"Error fetching image from Cloudflare API: {str(api_error)}", exc_info=True)
|
||||
capture_and_log(api_error, 'Fetch image from Cloudflare API', source='api', request=request)
|
||||
return Response(
|
||||
{"detail": f"Failed to fetch image from Cloudflare: {str(api_error)}"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
@@ -357,7 +355,7 @@ def save_avatar_image(request):
|
||||
service.delete_image(old_avatar)
|
||||
logger.info(f"Successfully deleted old avatar from Cloudflare: {old_avatar.cloudflare_id}")
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to delete old avatar from Cloudflare: {str(e)}")
|
||||
capture_and_log(e, 'Delete old avatar from Cloudflare', source='api', request=request, severity='low')
|
||||
# Continue with database deletion even if Cloudflare deletion fails
|
||||
|
||||
old_avatar.delete()
|
||||
@@ -390,7 +388,7 @@ def save_avatar_image(request):
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error saving avatar image: {str(e)}", exc_info=True)
|
||||
capture_and_log(e, 'Save avatar image', source='api', request=request)
|
||||
return Response(
|
||||
{"detail": f"Failed to save avatar: {str(e)}"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
@@ -441,7 +439,7 @@ def delete_avatar(request):
|
||||
service.delete_image(avatar_to_delete)
|
||||
logger.info(f"Successfully deleted avatar from Cloudflare: {avatar_to_delete.cloudflare_id}")
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to delete avatar from Cloudflare: {str(e)}")
|
||||
capture_and_log(e, 'Delete avatar from Cloudflare', source='api', request=request, severity='low')
|
||||
# Continue with database deletion even if Cloudflare deletion fails
|
||||
|
||||
avatar_to_delete.delete()
|
||||
@@ -550,16 +548,13 @@ def request_account_deletion(request):
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
except Exception as e:
|
||||
# Log the error for debugging
|
||||
logger.error(
|
||||
f"Error creating deletion request for user {request.user.username} (ID: {request.user.user_id}): {str(e)}",
|
||||
extra={
|
||||
"user": request.user.username,
|
||||
"user_id": request.user.user_id,
|
||||
"detail": str(e),
|
||||
"action": "self_deletion_error",
|
||||
},
|
||||
exc_info=True,
|
||||
# Capture error to dashboard
|
||||
capture_and_log(
|
||||
e,
|
||||
f'Create deletion request for user {request.user.username}',
|
||||
source='api',
|
||||
request=request,
|
||||
severity='high',
|
||||
)
|
||||
|
||||
return Response(
|
||||
@@ -1547,7 +1542,7 @@ def export_user_data(request):
|
||||
export_data = UserExportService.export_user_data(request.user)
|
||||
return Response(export_data, status=status.HTTP_200_OK)
|
||||
except Exception as e:
|
||||
logger.error(f"Error exporting data for user {request.user.id}: {e}", exc_info=True)
|
||||
capture_and_log(e, 'Export user data', source='api', request=request)
|
||||
return Response({"detail": "Failed to generate data export"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
|
||||
|
||||
|
||||
|
||||
@@ -20,6 +20,7 @@ from drf_spectacular.utils import (
|
||||
from rest_framework import serializers
|
||||
|
||||
from apps.accounts.models import PasswordReset
|
||||
from apps.core.utils import capture_and_log
|
||||
|
||||
UserModel = get_user_model()
|
||||
|
||||
@@ -64,6 +65,7 @@ class UserOutputSerializer(serializers.ModelSerializer):
|
||||
|
||||
avatar_url = serializers.SerializerMethodField()
|
||||
display_name = serializers.SerializerMethodField()
|
||||
role = serializers.SerializerMethodField()
|
||||
|
||||
class Meta:
|
||||
model = UserModel
|
||||
@@ -74,9 +76,12 @@ class UserOutputSerializer(serializers.ModelSerializer):
|
||||
"display_name",
|
||||
"date_joined",
|
||||
"is_active",
|
||||
"is_staff",
|
||||
"is_superuser",
|
||||
"role",
|
||||
"avatar_url",
|
||||
]
|
||||
read_only_fields = ["id", "date_joined", "is_active"]
|
||||
read_only_fields = ["id", "date_joined", "is_active", "is_staff", "is_superuser", "role"]
|
||||
|
||||
def get_display_name(self, obj):
|
||||
"""Get the user's display name."""
|
||||
@@ -89,6 +94,15 @@ class UserOutputSerializer(serializers.ModelSerializer):
|
||||
return obj.profile.get_avatar_url()
|
||||
return None
|
||||
|
||||
@extend_schema_field(serializers.CharField())
|
||||
def get_role(self, obj) -> str:
|
||||
"""Compute effective role based on permissions."""
|
||||
if obj.is_superuser:
|
||||
return "SUPERUSER"
|
||||
if obj.is_staff:
|
||||
return "ADMIN"
|
||||
return "USER"
|
||||
|
||||
|
||||
class LoginInputSerializer(serializers.Serializer):
|
||||
"""Input serializer for user login."""
|
||||
@@ -235,8 +249,8 @@ The ThrillWiki Team
|
||||
logger.info(f"Verification email sent successfully to {user.email}. No email ID in response.")
|
||||
|
||||
except Exception as e:
|
||||
# Log the error but don't fail registration
|
||||
logger.error(f"Failed to send verification email to {user.email}: {e}")
|
||||
# Capture error but don't fail registration
|
||||
capture_and_log(e, f'Send verification email to {user.email}', source='api', severity='low')
|
||||
|
||||
|
||||
class SignupOutputSerializer(serializers.Serializer):
|
||||
|
||||
@@ -21,6 +21,7 @@ from rest_framework.response import Response
|
||||
from rest_framework.views import APIView
|
||||
|
||||
from apps.accounts.services.social_provider_service import SocialProviderService
|
||||
from apps.core.utils import capture_and_log
|
||||
|
||||
# Import directly from the auth serializers.py file (not the serializers package)
|
||||
from .serializers import (
|
||||
@@ -188,7 +189,7 @@ class LoginAPIView(APIView):
|
||||
"access": str(access_token),
|
||||
"refresh": str(refresh),
|
||||
"user": user,
|
||||
"detail": "Login successful",
|
||||
"message": "Login successful",
|
||||
}
|
||||
)
|
||||
return Response(response_serializer.data)
|
||||
@@ -820,10 +821,7 @@ The ThrillWiki Team
|
||||
return Response({"detail": "Verification email sent successfully", "success": True})
|
||||
|
||||
except Exception as e:
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
logger.error(f"Failed to send verification email to {user.email}: {e}")
|
||||
capture_and_log(e, 'Send verification email', source='api')
|
||||
|
||||
return Response(
|
||||
{"detail": "Failed to send verification email"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR
|
||||
|
||||
@@ -7,6 +7,7 @@ from rest_framework.permissions import IsAuthenticated
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.views import APIView
|
||||
|
||||
from apps.core.utils import capture_and_log
|
||||
from apps.core.utils.cloudflare import get_direct_upload_url
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
@@ -21,11 +22,11 @@ class GenerateUploadURLView(APIView):
|
||||
result = get_direct_upload_url(user_id=str(request.user.id))
|
||||
return Response(result, status=status.HTTP_200_OK)
|
||||
except ImproperlyConfigured as e:
|
||||
logger.error(f"Configuration Error: {e}")
|
||||
capture_and_log(e, 'Generate upload URL - configuration error', source='api')
|
||||
return Response({"detail": "Server configuration error."}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
|
||||
except requests.RequestException as e:
|
||||
logger.error(f"Cloudflare API Error: {e}")
|
||||
capture_and_log(e, 'Generate upload URL - Cloudflare API error', source='api')
|
||||
return Response({"detail": "Failed to generate upload URL."}, status=status.HTTP_502_BAD_GATEWAY)
|
||||
except Exception:
|
||||
logger.exception("Unexpected error generating upload URL")
|
||||
except Exception as e:
|
||||
capture_and_log(e, 'Generate upload URL - unexpected error', source='api')
|
||||
return Response({"detail": "An unexpected error occurred."}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
|
||||
|
||||
@@ -38,6 +38,7 @@ from ..serializers.maps import (
|
||||
MapLocationsResponseSerializer,
|
||||
MapSearchResponseSerializer,
|
||||
)
|
||||
from apps.core.utils import capture_and_log
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -332,7 +333,7 @@ class MapLocationsAPIView(APIView):
|
||||
return Response(result)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error in MapLocationsAPIView: {str(e)}", exc_info=True)
|
||||
capture_and_log(e, 'Get map locations', source='api')
|
||||
return Response(
|
||||
{"status": "error", "detail": "Failed to retrieve map locations"},
|
||||
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
@@ -489,7 +490,7 @@ class MapLocationDetailAPIView(APIView):
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error in MapLocationDetailAPIView: {str(e)}", exc_info=True)
|
||||
capture_and_log(e, 'Get map location detail', source='api')
|
||||
return Response(
|
||||
{"status": "error", "detail": "Failed to retrieve location details"},
|
||||
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
@@ -674,7 +675,7 @@ class MapSearchAPIView(APIView):
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error in MapSearchAPIView: {str(e)}", exc_info=True)
|
||||
capture_and_log(e, 'Map search', source='api')
|
||||
return Response(
|
||||
{"status": "error", "detail": "Search failed due to internal error"},
|
||||
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
@@ -851,7 +852,7 @@ class MapBoundsAPIView(APIView):
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error in MapBoundsAPIView: {str(e)}", exc_info=True)
|
||||
capture_and_log(e, 'Get map bounds', source='api')
|
||||
return Response(
|
||||
{"status": "error", "detail": "Failed to retrieve locations within bounds"},
|
||||
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
@@ -904,7 +905,7 @@ class MapStatsAPIView(APIView):
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error in MapStatsAPIView: {str(e)}", exc_info=True)
|
||||
capture_and_log(e, 'Get map stats', source='api')
|
||||
return Response(
|
||||
{"status": "error", "detail": "Failed to retrieve map statistics"},
|
||||
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
@@ -956,7 +957,7 @@ class MapCacheAPIView(APIView):
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error in MapCacheAPIView.delete: {str(e)}", exc_info=True)
|
||||
capture_and_log(e, 'Clear map cache', source='api')
|
||||
return Response(
|
||||
{"status": "error", "detail": "Failed to clear map cache"},
|
||||
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
@@ -984,7 +985,7 @@ class MapCacheAPIView(APIView):
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error in MapCacheAPIView.post: {str(e)}", exc_info=True)
|
||||
capture_and_log(e, 'Invalidate map cache', source='api')
|
||||
return Response(
|
||||
{"status": "error", "detail": "Failed to invalidate cache"},
|
||||
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
|
||||
@@ -14,6 +14,8 @@ from django.http import JsonResponse
|
||||
from django.utils.deprecation import MiddlewareMixin
|
||||
from rest_framework.response import Response
|
||||
|
||||
from apps.core.utils import capture_and_log
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@@ -261,7 +263,13 @@ class ContractValidationMiddleware(MiddlewareMixin):
|
||||
}
|
||||
|
||||
if severity == "ERROR":
|
||||
logger.error(f"CONTRACT VIOLATION [{violation_type}]: {message}", extra=log_data)
|
||||
# Contract violations are development issues - capture for visibility
|
||||
capture_and_log(
|
||||
ValueError(message),
|
||||
f'Contract violation [{violation_type}] on {path}',
|
||||
source='middleware',
|
||||
severity='medium',
|
||||
)
|
||||
else:
|
||||
logger.warning(f"CONTRACT VIOLATION [{violation_type}]: {message}", extra=log_data)
|
||||
|
||||
|
||||
@@ -30,6 +30,7 @@ from apps.api.v1.rides.serializers import (
|
||||
RidePhotoStatsOutputSerializer,
|
||||
RidePhotoUpdateInputSerializer,
|
||||
)
|
||||
from apps.core.utils import capture_and_log
|
||||
from apps.parks.models import Park
|
||||
from apps.rides.models import Ride
|
||||
from apps.rides.models.media import RidePhoto
|
||||
@@ -184,7 +185,7 @@ class RidePhotoViewSet(ModelViewSet):
|
||||
logger.info(f"Created ride photo {photo.id} for ride {ride.name} by user {self.request.user.username}")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error creating ride photo: {e}")
|
||||
capture_and_log(e, 'Create ride photo', source='api', request=self.request)
|
||||
raise ValidationError(f"Failed to create photo: {str(e)}") from None
|
||||
|
||||
def perform_update(self, serializer):
|
||||
@@ -203,14 +204,14 @@ class RidePhotoViewSet(ModelViewSet):
|
||||
if "is_primary" in serializer.validated_data:
|
||||
del serializer.validated_data["is_primary"]
|
||||
except Exception as e:
|
||||
logger.error(f"Error setting primary photo: {e}")
|
||||
capture_and_log(e, 'Set primary photo', source='api', request=self.request)
|
||||
raise ValidationError(f"Failed to set primary photo: {str(e)}") from None
|
||||
|
||||
try:
|
||||
serializer.save()
|
||||
logger.info(f"Updated ride photo {instance.id} by user {self.request.user.username}")
|
||||
except Exception as e:
|
||||
logger.error(f"Error updating ride photo: {e}")
|
||||
capture_and_log(e, 'Update ride photo', source='api', request=self.request)
|
||||
raise ValidationError(f"Failed to update photo: {str(e)}") from None
|
||||
|
||||
def perform_destroy(self, instance):
|
||||
@@ -229,14 +230,14 @@ class RidePhotoViewSet(ModelViewSet):
|
||||
service.delete_image(instance.image)
|
||||
logger.info(f"Successfully deleted ride photo from Cloudflare: {instance.image.cloudflare_id}")
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to delete ride photo from Cloudflare: {str(e)}")
|
||||
capture_and_log(e, 'Delete ride photo from Cloudflare', source='api', request=self.request, severity='low')
|
||||
# Continue with database deletion even if Cloudflare deletion fails
|
||||
|
||||
RideMediaService.delete_photo(instance, deleted_by=self.request.user)
|
||||
|
||||
logger.info(f"Deleted ride photo {instance.id} by user {self.request.user.username}")
|
||||
except Exception as e:
|
||||
logger.error(f"Error deleting ride photo: {e}")
|
||||
capture_and_log(e, 'Delete ride photo', source='api', request=self.request)
|
||||
raise ValidationError(f"Failed to delete photo: {str(e)}") from None
|
||||
|
||||
@extend_schema(
|
||||
@@ -281,7 +282,7 @@ class RidePhotoViewSet(ModelViewSet):
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error setting primary photo: {e}")
|
||||
capture_and_log(e, 'Set primary photo', source='api', request=request)
|
||||
return Response(
|
||||
{"detail": f"Failed to set primary photo: {str(e)}"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
@@ -339,7 +340,7 @@ class RidePhotoViewSet(ModelViewSet):
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error in bulk photo approval: {e}")
|
||||
capture_and_log(e, 'Bulk photo approval', source='api', request=request)
|
||||
return Response(
|
||||
{"detail": f"Failed to update photos: {str(e)}"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
@@ -387,7 +388,7 @@ class RidePhotoViewSet(ModelViewSet):
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting ride photo stats: {e}")
|
||||
capture_and_log(e, 'Get ride photo stats', source='api', request=request)
|
||||
return Response(
|
||||
{"detail": f"Failed to get photo statistics: {str(e)}"},
|
||||
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
@@ -491,7 +492,7 @@ class RidePhotoViewSet(ModelViewSet):
|
||||
)
|
||||
|
||||
except Exception as api_error:
|
||||
logger.error(f"Error fetching image from Cloudflare API: {str(api_error)}", exc_info=True)
|
||||
capture_and_log(api_error, 'Fetch image from Cloudflare API', source='api', request=request)
|
||||
return Response(
|
||||
{"detail": f"Failed to fetch image from Cloudflare: {str(api_error)}"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
@@ -514,14 +515,14 @@ class RidePhotoViewSet(ModelViewSet):
|
||||
try:
|
||||
RideMediaService.set_primary_photo(ride=ride, photo=photo)
|
||||
except Exception as e:
|
||||
logger.error(f"Error setting primary photo: {e}")
|
||||
capture_and_log(e, 'Set primary photo for saved image', source='api', request=request, severity='low')
|
||||
# Don't fail the entire operation, just log the error
|
||||
|
||||
serializer = RidePhotoOutputSerializer(photo, context={"request": request})
|
||||
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error saving ride photo: {e}")
|
||||
capture_and_log(e, 'Save ride photo', source='api', request=request)
|
||||
return Response(
|
||||
{"detail": f"Failed to save photo: {str(e)}"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
|
||||
@@ -31,6 +31,7 @@ from apps.api.v1.serializers.ride_reviews import (
|
||||
RideReviewStatsOutputSerializer,
|
||||
RideReviewUpdateInputSerializer,
|
||||
)
|
||||
from apps.core.utils import capture_and_log
|
||||
from apps.parks.models import Park
|
||||
from apps.rides.models import Ride
|
||||
from apps.rides.models.reviews import RideReview
|
||||
@@ -181,7 +182,7 @@ class RideReviewViewSet(ModelViewSet):
|
||||
logger.info(f"Created ride review {review.id} for ride {ride.name} by user {self.request.user.username}")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error creating ride review: {e}")
|
||||
capture_and_log(e, 'Create ride review', source='api', request=self.request)
|
||||
raise ValidationError(f"Failed to create review: {str(e)}") from None
|
||||
|
||||
def perform_update(self, serializer):
|
||||
@@ -196,7 +197,7 @@ class RideReviewViewSet(ModelViewSet):
|
||||
serializer.save()
|
||||
logger.info(f"Updated ride review {instance.id} by user {self.request.user.username}")
|
||||
except Exception as e:
|
||||
logger.error(f"Error updating ride review: {e}")
|
||||
capture_and_log(e, 'Update ride review', source='api', request=self.request)
|
||||
raise ValidationError(f"Failed to update review: {str(e)}") from None
|
||||
|
||||
def perform_destroy(self, instance):
|
||||
@@ -209,7 +210,7 @@ class RideReviewViewSet(ModelViewSet):
|
||||
logger.info(f"Deleting ride review {instance.id} by user {self.request.user.username}")
|
||||
instance.delete()
|
||||
except Exception as e:
|
||||
logger.error(f"Error deleting ride review: {e}")
|
||||
capture_and_log(e, 'Delete ride review', source='api', request=self.request)
|
||||
raise ValidationError(f"Failed to delete review: {str(e)}") from None
|
||||
|
||||
@extend_schema(
|
||||
@@ -283,7 +284,7 @@ class RideReviewViewSet(ModelViewSet):
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting ride review stats: {e}")
|
||||
capture_and_log(e, 'Get ride review stats', source='api', request=request)
|
||||
return Response(
|
||||
{"detail": f"Failed to get review statistics: {str(e)}"},
|
||||
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
@@ -360,7 +361,7 @@ class RideReviewViewSet(ModelViewSet):
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error in bulk review moderation: {e}")
|
||||
capture_and_log(e, 'Bulk review moderation', source='api', request=request)
|
||||
return Response(
|
||||
{"detail": f"Failed to moderate reviews: {str(e)}"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
|
||||
@@ -32,6 +32,7 @@ from apps.core.exceptions import (
|
||||
ServiceError,
|
||||
ValidationException,
|
||||
)
|
||||
from apps.core.utils import capture_and_log
|
||||
from apps.core.utils.error_handling import ErrorHandler
|
||||
from apps.parks.models import Park, ParkPhoto
|
||||
from apps.parks.services import ParkMediaService
|
||||
@@ -188,7 +189,7 @@ class ParkPhotoViewSet(ModelViewSet):
|
||||
logger.warning(f"Validation error creating park photo: {e}")
|
||||
raise ValidationError(str(e)) from None
|
||||
except ServiceError as e:
|
||||
logger.error(f"Service error creating park photo: {e}")
|
||||
capture_and_log(e, 'Create park photo', source='api')
|
||||
raise ValidationError(f"Failed to create photo: {str(e)}") from None
|
||||
|
||||
def perform_update(self, serializer):
|
||||
@@ -210,7 +211,7 @@ class ParkPhotoViewSet(ModelViewSet):
|
||||
logger.warning(f"Validation error setting primary photo: {e}")
|
||||
raise ValidationError(str(e)) from None
|
||||
except ServiceError as e:
|
||||
logger.error(f"Service error setting primary photo: {e}")
|
||||
capture_and_log(e, 'Set primary park photo', source='api')
|
||||
raise ValidationError(f"Failed to set primary photo: {str(e)}") from None
|
||||
|
||||
def perform_destroy(self, instance):
|
||||
@@ -232,13 +233,13 @@ class ParkPhotoViewSet(ModelViewSet):
|
||||
except ImportError:
|
||||
logger.warning("CloudflareImagesService not available")
|
||||
except ServiceError as e:
|
||||
logger.error(f"Service error deleting from Cloudflare: {str(e)}")
|
||||
capture_and_log(e, 'Delete park photo from Cloudflare', source='api', severity='low')
|
||||
# Continue with database deletion even if Cloudflare deletion fails
|
||||
|
||||
try:
|
||||
ParkMediaService().delete_photo(instance.id, deleted_by=cast(UserModel, self.request.user))
|
||||
except ServiceError as e:
|
||||
logger.error(f"Service error deleting park photo: {e}")
|
||||
capture_and_log(e, 'Delete park photo', source='api')
|
||||
raise ValidationError(f"Failed to delete photo: {str(e)}") from None
|
||||
|
||||
@extend_schema(
|
||||
@@ -539,14 +540,14 @@ class ParkPhotoViewSet(ModelViewSet):
|
||||
try:
|
||||
ParkMediaService().set_primary_photo(park_id=park.id, photo_id=photo.id)
|
||||
except ServiceError as e:
|
||||
logger.error(f"Error setting primary photo: {e}")
|
||||
capture_and_log(e, 'Set primary park photo for saved image', source='api', severity='low')
|
||||
# Don't fail the entire operation, just log the error
|
||||
|
||||
serializer = ParkPhotoOutputSerializer(photo, context={"request": request})
|
||||
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
||||
|
||||
except ImportError:
|
||||
logger.error("CloudflareImagesService not available")
|
||||
except ImportError as e:
|
||||
capture_and_log(e, 'Cloudflare service import', source='api')
|
||||
return ErrorHandler.handle_api_error(
|
||||
ServiceError("Cloudflare Images service not available"),
|
||||
user_message="Image upload service not available",
|
||||
|
||||
@@ -31,6 +31,7 @@ from rest_framework.permissions import IsAuthenticated
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.viewsets import ModelViewSet
|
||||
|
||||
from apps.core.utils import capture_and_log, capture_errors
|
||||
from apps.rides.models import Ride, RidePhoto
|
||||
from apps.rides.services.media_service import RideMediaService
|
||||
|
||||
@@ -39,6 +40,7 @@ UserModel = get_user_model()
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
|
||||
@extend_schema_view(
|
||||
list=extend_schema(
|
||||
summary="List ride photos",
|
||||
@@ -166,7 +168,7 @@ class RidePhotoViewSet(ModelViewSet):
|
||||
serializer.instance = photo
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error creating ride photo: {e}")
|
||||
capture_and_log(e, 'Creating ride photo', source='api', severity='high', entity_type='RidePhoto')
|
||||
raise ValidationError(f"Failed to create photo: {str(e)}") from None
|
||||
|
||||
def perform_update(self, serializer):
|
||||
@@ -185,7 +187,7 @@ class RidePhotoViewSet(ModelViewSet):
|
||||
if "is_primary" in serializer.validated_data:
|
||||
del serializer.validated_data["is_primary"]
|
||||
except Exception as e:
|
||||
logger.error(f"Error setting primary photo: {e}")
|
||||
capture_and_log(e, 'Setting primary photo', source='api', severity='medium', entity_type='RidePhoto')
|
||||
raise ValidationError(f"Failed to set primary photo: {str(e)}") from None
|
||||
|
||||
def perform_destroy(self, instance):
|
||||
@@ -204,12 +206,12 @@ class RidePhotoViewSet(ModelViewSet):
|
||||
service.delete_image(instance.image)
|
||||
logger.info(f"Successfully deleted ride photo from Cloudflare: {instance.image.cloudflare_id}")
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to delete ride photo from Cloudflare: {str(e)}")
|
||||
capture_and_log(e, 'Delete ride photo from Cloudflare', source='api', severity='low')
|
||||
# Continue with database deletion even if Cloudflare deletion fails
|
||||
|
||||
RideMediaService.delete_photo(instance, deleted_by=self.request.user) # type: ignore
|
||||
except Exception as e:
|
||||
logger.error(f"Error deleting ride photo: {e}")
|
||||
capture_and_log(e, 'Deleting ride photo', source='api', severity='high', entity_type='RidePhoto')
|
||||
raise ValidationError(f"Failed to delete photo: {str(e)}") from None
|
||||
|
||||
@extend_schema(
|
||||
@@ -254,7 +256,7 @@ class RidePhotoViewSet(ModelViewSet):
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error setting primary photo: {e}")
|
||||
capture_and_log(e, 'Set primary photo', source='api', severity='medium', entity_type='RidePhoto')
|
||||
return Response(
|
||||
{"detail": f"Failed to set primary photo: {str(e)}"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
@@ -308,7 +310,7 @@ class RidePhotoViewSet(ModelViewSet):
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error in bulk photo approval: {e}")
|
||||
capture_and_log(e, 'Bulk photo approval', source='api', severity='medium', entity_type='RidePhoto')
|
||||
return Response(
|
||||
{"detail": f"Failed to update photos: {str(e)}"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
@@ -356,7 +358,7 @@ class RidePhotoViewSet(ModelViewSet):
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting ride photo stats: {e}")
|
||||
capture_and_log(e, 'Getting ride photo stats', source='api', severity='low', entity_type='RidePhoto')
|
||||
return Response(
|
||||
{"detail": f"Failed to get photo statistics: {str(e)}"},
|
||||
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
@@ -392,7 +394,7 @@ class RidePhotoViewSet(ModelViewSet):
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"Error in set_primary_photo: {str(e)}", exc_info=True)
|
||||
capture_and_log(e, 'Set primary photo', source='api')
|
||||
return Response({"detail": str(e)}, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
@extend_schema(
|
||||
@@ -486,7 +488,7 @@ class RidePhotoViewSet(ModelViewSet):
|
||||
)
|
||||
|
||||
except Exception as api_error:
|
||||
logger.error(f"Error fetching image from Cloudflare API: {str(api_error)}", exc_info=True)
|
||||
capture_and_log(api_error, 'Fetch image from Cloudflare API', source='api')
|
||||
return Response(
|
||||
{"detail": f"Failed to fetch image from Cloudflare: {str(api_error)}"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
@@ -509,14 +511,14 @@ class RidePhotoViewSet(ModelViewSet):
|
||||
try:
|
||||
RideMediaService.set_primary_photo(ride=ride, photo=photo)
|
||||
except Exception as e:
|
||||
logger.error(f"Error setting primary photo: {e}")
|
||||
capture_and_log(e, 'Set primary photo for saved image', source='api', severity='low')
|
||||
# Don't fail the entire operation, just log the error
|
||||
|
||||
serializer = RidePhotoOutputSerializer(photo, context={"request": request})
|
||||
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error saving ride photo: {e}")
|
||||
capture_and_log(e, 'Save ride photo', source='api')
|
||||
return Response(
|
||||
{"detail": f"Failed to save photo: {str(e)}"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
|
||||
@@ -46,6 +46,7 @@ from apps.api.v1.serializers.rides import (
|
||||
RideUpdateInputSerializer,
|
||||
)
|
||||
from apps.core.decorators.cache_decorators import cache_api_response
|
||||
from apps.core.utils import capture_and_log
|
||||
from apps.rides.services.hybrid_loader import SmartRideLoader
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
@@ -2059,7 +2060,7 @@ class HybridRideAPIView(APIView):
|
||||
return Response(response_data, status=status.HTTP_200_OK)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error in HybridRideAPIView: {e}")
|
||||
capture_and_log(e, 'Get hybrid rides', source='api')
|
||||
return Response(
|
||||
{"detail": "Internal server error"},
|
||||
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
@@ -2358,7 +2359,7 @@ class RideFilterMetadataAPIView(APIView):
|
||||
return Response(metadata, status=status.HTTP_200_OK)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error in RideFilterMetadataAPIView: {e}")
|
||||
capture_and_log(e, 'Get ride filter metadata', source='api')
|
||||
return Response(
|
||||
{"detail": "Internal server error"},
|
||||
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
|
||||
@@ -10,10 +10,13 @@ from rest_framework.routers import DefaultRouter
|
||||
|
||||
# Import other views from the views directory
|
||||
from .views import (
|
||||
CoasterStatisticsAPIView,
|
||||
DataCompletenessAPIView,
|
||||
HealthCheckAPIView,
|
||||
NewContentAPIView,
|
||||
PerformanceMetricsAPIView,
|
||||
SimpleHealthAPIView,
|
||||
TechnicalSpecificationsAPIView,
|
||||
# Trending system views
|
||||
TrendingAPIView,
|
||||
TriggerTrendingCalculationAPIView,
|
||||
@@ -71,6 +74,23 @@ urlpatterns = [
|
||||
TriggerRankingCalculationView.as_view(),
|
||||
name="trigger-ranking-calculation",
|
||||
),
|
||||
# Admin endpoints
|
||||
path(
|
||||
"admin/data-completeness/",
|
||||
DataCompletenessAPIView.as_view(),
|
||||
name="data-completeness",
|
||||
),
|
||||
# Ride search advanced endpoints (for useAdvancedRideSearch composable)
|
||||
path(
|
||||
"rides/technical-specifications/",
|
||||
TechnicalSpecificationsAPIView.as_view(),
|
||||
name="technical-specifications",
|
||||
),
|
||||
path(
|
||||
"rides/coaster-statistics/",
|
||||
CoasterStatisticsAPIView.as_view(),
|
||||
name="coaster-statistics",
|
||||
),
|
||||
# Domain-specific API endpoints
|
||||
path("parks/", include("apps.api.v1.parks.urls")),
|
||||
path("rides/", include("apps.api.v1.rides.urls")),
|
||||
@@ -86,9 +106,11 @@ urlpatterns = [
|
||||
path("media/", include("apps.media.urls")),
|
||||
path("blog/", include("apps.blog.urls")),
|
||||
path("support/", include("apps.support.urls")),
|
||||
path("errors/", include("apps.core.urls.errors")),
|
||||
path("images/", include("apps.api.v1.images.urls")),
|
||||
# Cloudflare Images Toolkit API endpoints
|
||||
path("cloudflare-images/", include("django_cloudflareimages_toolkit.urls")),
|
||||
# Include router URLs (for rankings and any other router-registered endpoints)
|
||||
path("", include(router.urls)),
|
||||
]
|
||||
|
||||
|
||||
@@ -5,9 +5,15 @@ This package contains all API view classes organized by functionality:
|
||||
- auth.py: Authentication and user management views
|
||||
- health.py: Health check and monitoring views
|
||||
- trending.py: Trending and new content discovery views
|
||||
- admin.py: Admin-only data completeness and system management views
|
||||
"""
|
||||
|
||||
# Import all view classes for easy access
|
||||
from .admin import (
|
||||
CoasterStatisticsAPIView,
|
||||
DataCompletenessAPIView,
|
||||
TechnicalSpecificationsAPIView,
|
||||
)
|
||||
from .auth import (
|
||||
AuthStatusAPIView,
|
||||
CurrentUserAPIView,
|
||||
@@ -31,6 +37,10 @@ from .trending import (
|
||||
|
||||
# Export all views for import convenience
|
||||
__all__ = [
|
||||
# Admin views
|
||||
"DataCompletenessAPIView",
|
||||
"TechnicalSpecificationsAPIView",
|
||||
"CoasterStatisticsAPIView",
|
||||
# Authentication views
|
||||
"LoginAPIView",
|
||||
"SignupAPIView",
|
||||
@@ -49,3 +59,4 @@ __all__ = [
|
||||
"NewContentAPIView",
|
||||
"TriggerTrendingCalculationAPIView",
|
||||
]
|
||||
|
||||
|
||||
382
backend/apps/api/v1/views/admin.py
Normal file
382
backend/apps/api/v1/views/admin.py
Normal file
@@ -0,0 +1,382 @@
|
||||
"""
|
||||
Admin API views for data completeness and system management.
|
||||
|
||||
These views provide admin-only endpoints for analyzing data quality,
|
||||
entity completeness, and system health.
|
||||
"""
|
||||
|
||||
from drf_spectacular.utils import extend_schema
|
||||
from rest_framework import status
|
||||
from rest_framework.permissions import IsAdminUser
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.views import APIView
|
||||
|
||||
from apps.core.decorators.cache_decorators import cache_api_response
|
||||
from apps.parks.models import Park
|
||||
from apps.rides.models import Company, Ride
|
||||
|
||||
# Define field importance categories per entity type
|
||||
PARK_FIELDS = {
|
||||
"critical": ["name", "slug", "status"],
|
||||
"important": ["short_description", "park_type", "opening_date"],
|
||||
"valuable": ["banner_image_url", "card_image_url", "website_url", "phone", "email"],
|
||||
"supplementary": ["closing_date", "size_acres", "operating_season"],
|
||||
}
|
||||
|
||||
RIDE_FIELDS = {
|
||||
"critical": ["name", "slug", "status", "park_id"],
|
||||
"important": ["category", "opening_date", "manufacturer_id"],
|
||||
"valuable": [
|
||||
"max_speed_kmh",
|
||||
"height_meters",
|
||||
"track_length_meters",
|
||||
"inversions_count",
|
||||
"banner_image_url",
|
||||
"card_image_url",
|
||||
],
|
||||
"supplementary": [
|
||||
"min_height_cm",
|
||||
"max_height_cm",
|
||||
"duration_seconds",
|
||||
"capacity_per_hour",
|
||||
"designer_id",
|
||||
],
|
||||
}
|
||||
|
||||
COMPANY_FIELDS = {
|
||||
"critical": ["name", "slug", "company_type"],
|
||||
"important": ["description", "headquarters_location"],
|
||||
"valuable": ["logo_url", "website_url", "founded_year"],
|
||||
"supplementary": ["banner_image_url", "card_image_url"],
|
||||
}
|
||||
|
||||
|
||||
def calculate_completeness_score(obj, fields_config: dict) -> tuple[int, dict]:
|
||||
"""
|
||||
Calculate completeness score for an entity based on field importance.
|
||||
|
||||
Returns:
|
||||
Tuple of (score, missing_fields_dict)
|
||||
"""
|
||||
weights = {"critical": 40, "important": 30, "valuable": 20, "supplementary": 10}
|
||||
max_score = 100
|
||||
score = 0
|
||||
missing_fields = {}
|
||||
|
||||
for category, fields in fields_config.items():
|
||||
category_weight = weights[category]
|
||||
field_weight = category_weight / len(fields) if fields else 0
|
||||
missing_in_category = []
|
||||
|
||||
for field in fields:
|
||||
value = getattr(obj, field, None)
|
||||
if value is not None and value != "" and value != []:
|
||||
score += field_weight
|
||||
else:
|
||||
missing_in_category.append(field)
|
||||
|
||||
if missing_in_category:
|
||||
missing_fields[category] = missing_in_category
|
||||
|
||||
return min(round(score), max_score), missing_fields
|
||||
|
||||
|
||||
class DataCompletenessAPIView(APIView):
|
||||
"""
|
||||
Admin endpoint for analyzing data completeness across all entity types.
|
||||
|
||||
Returns completeness scores and missing field analysis for parks, rides,
|
||||
companies, and ride models.
|
||||
"""
|
||||
|
||||
permission_classes = [IsAdminUser]
|
||||
|
||||
@extend_schema(
|
||||
tags=["Admin"],
|
||||
summary="Get data completeness analysis",
|
||||
description="Analyze data completeness across all entity types with missing field breakdown",
|
||||
)
|
||||
@cache_api_response(timeout=300, key_prefix="data_completeness")
|
||||
def get(self, request):
|
||||
"""
|
||||
Get data completeness analysis.
|
||||
|
||||
Query parameters:
|
||||
- entity_type: Filter by entity type (park, ride, company, ride_model)
|
||||
- min_score: Minimum completeness score (0-100)
|
||||
- max_score: Maximum completeness score (0-100)
|
||||
- missing_category: Filter by missing field category (critical, important, valuable, supplementary)
|
||||
- limit: Max results per entity type (default 50)
|
||||
"""
|
||||
try:
|
||||
entity_type = request.GET.get("entity_type")
|
||||
min_score = request.GET.get("min_score")
|
||||
max_score = request.GET.get("max_score")
|
||||
missing_category = request.GET.get("missing_category")
|
||||
limit = min(int(request.GET.get("limit", 50)), 200)
|
||||
|
||||
results = {
|
||||
"summary": {},
|
||||
"parks": [],
|
||||
"rides": [],
|
||||
"companies": [],
|
||||
"ride_models": [],
|
||||
}
|
||||
|
||||
# Process parks
|
||||
if not entity_type or entity_type == "park":
|
||||
parks = Park.objects.all()[:limit]
|
||||
park_results = []
|
||||
total_park_score = 0
|
||||
parks_complete = 0
|
||||
|
||||
for park in parks:
|
||||
score, missing = calculate_completeness_score(park, PARK_FIELDS)
|
||||
|
||||
# Apply filters
|
||||
if min_score and score < int(min_score):
|
||||
continue
|
||||
if max_score and score > int(max_score):
|
||||
continue
|
||||
if missing_category and missing_category not in missing:
|
||||
continue
|
||||
|
||||
total_park_score += score
|
||||
if score == 100:
|
||||
parks_complete += 1
|
||||
|
||||
park_results.append({
|
||||
"id": str(park.id),
|
||||
"name": park.name,
|
||||
"slug": park.slug,
|
||||
"entity_type": "park",
|
||||
"updated_at": park.updated_at.isoformat() if hasattr(park, "updated_at") else None,
|
||||
"completeness_score": score,
|
||||
"missing_fields": missing,
|
||||
})
|
||||
|
||||
results["parks"] = park_results
|
||||
results["summary"]["total_parks"] = len(park_results)
|
||||
results["summary"]["avg_park_score"] = (
|
||||
round(total_park_score / len(park_results)) if park_results else 0
|
||||
)
|
||||
results["summary"]["parks_complete"] = parks_complete
|
||||
|
||||
# Process rides
|
||||
if not entity_type or entity_type == "ride":
|
||||
rides = Ride.objects.select_related("park").all()[:limit]
|
||||
ride_results = []
|
||||
total_ride_score = 0
|
||||
rides_complete = 0
|
||||
|
||||
for ride in rides:
|
||||
score, missing = calculate_completeness_score(ride, RIDE_FIELDS)
|
||||
|
||||
if min_score and score < int(min_score):
|
||||
continue
|
||||
if max_score and score > int(max_score):
|
||||
continue
|
||||
if missing_category and missing_category not in missing:
|
||||
continue
|
||||
|
||||
total_ride_score += score
|
||||
if score == 100:
|
||||
rides_complete += 1
|
||||
|
||||
ride_results.append({
|
||||
"id": str(ride.id),
|
||||
"name": ride.name,
|
||||
"slug": ride.slug,
|
||||
"entity_type": "ride",
|
||||
"updated_at": ride.updated_at.isoformat() if hasattr(ride, "updated_at") else None,
|
||||
"completeness_score": score,
|
||||
"missing_fields": missing,
|
||||
})
|
||||
|
||||
results["rides"] = ride_results
|
||||
results["summary"]["total_rides"] = len(ride_results)
|
||||
results["summary"]["avg_ride_score"] = (
|
||||
round(total_ride_score / len(ride_results)) if ride_results else 0
|
||||
)
|
||||
results["summary"]["rides_complete"] = rides_complete
|
||||
|
||||
# Process companies
|
||||
if not entity_type or entity_type == "company":
|
||||
companies = Company.objects.all()[:limit]
|
||||
company_results = []
|
||||
total_company_score = 0
|
||||
companies_complete = 0
|
||||
|
||||
for company in companies:
|
||||
score, missing = calculate_completeness_score(company, COMPANY_FIELDS)
|
||||
|
||||
if min_score and score < int(min_score):
|
||||
continue
|
||||
if max_score and score > int(max_score):
|
||||
continue
|
||||
if missing_category and missing_category not in missing:
|
||||
continue
|
||||
|
||||
total_company_score += score
|
||||
if score == 100:
|
||||
companies_complete += 1
|
||||
|
||||
company_results.append({
|
||||
"id": str(company.id),
|
||||
"name": company.name,
|
||||
"slug": company.slug,
|
||||
"entity_type": "company",
|
||||
"updated_at": company.updated_at.isoformat() if hasattr(company, "updated_at") else None,
|
||||
"completeness_score": score,
|
||||
"missing_fields": missing,
|
||||
})
|
||||
|
||||
results["companies"] = company_results
|
||||
results["summary"]["total_companies"] = len(company_results)
|
||||
results["summary"]["avg_company_score"] = (
|
||||
round(total_company_score / len(company_results)) if company_results else 0
|
||||
)
|
||||
results["summary"]["companies_complete"] = companies_complete
|
||||
|
||||
# Ride models - placeholder (if model exists)
|
||||
results["summary"]["total_models"] = 0
|
||||
results["summary"]["avg_model_score"] = 0
|
||||
results["summary"]["models_complete"] = 0
|
||||
|
||||
return Response(results, status=status.HTTP_200_OK)
|
||||
|
||||
except Exception as e:
|
||||
return Response(
|
||||
{"detail": f"Error analyzing data completeness: {str(e)}"},
|
||||
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
)
|
||||
|
||||
|
||||
class TechnicalSpecificationsAPIView(APIView):
|
||||
"""
|
||||
Endpoint for querying ride technical specifications.
|
||||
Used by advanced ride search functionality.
|
||||
"""
|
||||
|
||||
permission_classes = [] # Public endpoint
|
||||
|
||||
@extend_schema(
|
||||
tags=["Rides"],
|
||||
summary="Get ride technical specifications",
|
||||
description="Query technical specifications across rides for advanced filtering",
|
||||
)
|
||||
def get(self, request):
|
||||
"""
|
||||
Get technical specifications for rides.
|
||||
|
||||
Query parameters:
|
||||
- spec_name: Filter by specification name
|
||||
- ride_id: Filter by specific ride
|
||||
"""
|
||||
try:
|
||||
spec_name = request.GET.get("spec_name")
|
||||
ride_id = request.GET.get("ride_id")
|
||||
|
||||
# For now, return technical specs from ride fields
|
||||
# In a full implementation, this would query a separate specs table
|
||||
rides = Ride.objects.all()
|
||||
|
||||
if ride_id:
|
||||
rides = rides.filter(id=ride_id)
|
||||
|
||||
specs = []
|
||||
spec_fields = [
|
||||
("max_speed_kmh", "Max Speed (km/h)"),
|
||||
("height_meters", "Height (m)"),
|
||||
("track_length_meters", "Track Length (m)"),
|
||||
("inversions_count", "Inversions"),
|
||||
("duration_seconds", "Duration (s)"),
|
||||
("g_force", "G-Force"),
|
||||
]
|
||||
|
||||
for ride in rides[:100]: # Limit to prevent huge responses
|
||||
for field, _name in spec_fields:
|
||||
value = getattr(ride, field, None)
|
||||
if value is not None and (not spec_name or spec_name == field):
|
||||
specs.append({
|
||||
"ride_id": str(ride.id),
|
||||
"spec_name": field,
|
||||
"spec_value": str(value),
|
||||
})
|
||||
|
||||
return Response(specs, status=status.HTTP_200_OK)
|
||||
|
||||
except Exception as e:
|
||||
return Response(
|
||||
{"detail": f"Error fetching specifications: {str(e)}"},
|
||||
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
)
|
||||
|
||||
|
||||
class CoasterStatisticsAPIView(APIView):
|
||||
"""
|
||||
Endpoint for querying coaster statistics for advanced filtering.
|
||||
"""
|
||||
|
||||
permission_classes = [] # Public endpoint
|
||||
|
||||
@extend_schema(
|
||||
tags=["Rides"],
|
||||
summary="Get coaster statistics",
|
||||
description="Query coaster statistics for advanced ride filtering",
|
||||
)
|
||||
def get(self, request):
|
||||
"""
|
||||
Get coaster statistics.
|
||||
|
||||
Query parameters:
|
||||
- stat_name: Filter by statistic name
|
||||
- stat_value__gte: Minimum value
|
||||
- stat_value__lte: Maximum value
|
||||
"""
|
||||
try:
|
||||
stat_name = request.GET.get("stat_name")
|
||||
min_value = request.GET.get("stat_value__gte")
|
||||
max_value = request.GET.get("stat_value__lte")
|
||||
|
||||
# Query rides with coaster category and relevant stats
|
||||
rides = Ride.objects.filter(category="coaster")
|
||||
|
||||
stats = []
|
||||
stat_fields = [
|
||||
"max_speed_kmh",
|
||||
"height_meters",
|
||||
"track_length_meters",
|
||||
"inversions_count",
|
||||
"g_force",
|
||||
"drop_height_meters",
|
||||
]
|
||||
|
||||
for ride in rides[:100]:
|
||||
for field in stat_fields:
|
||||
if stat_name and stat_name != field:
|
||||
continue
|
||||
|
||||
value = getattr(ride, field, None)
|
||||
if value is None:
|
||||
continue
|
||||
|
||||
# Apply value filters
|
||||
if min_value and float(value) < float(min_value):
|
||||
continue
|
||||
if max_value and float(value) > float(max_value):
|
||||
continue
|
||||
|
||||
stats.append({
|
||||
"ride_id": str(ride.id),
|
||||
"stat_name": field,
|
||||
"stat_value": float(value),
|
||||
})
|
||||
|
||||
return Response(stats, status=status.HTTP_200_OK)
|
||||
|
||||
except Exception as e:
|
||||
return Response(
|
||||
{"detail": f"Error fetching statistics: {str(e)}"},
|
||||
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
)
|
||||
@@ -15,6 +15,7 @@ from rest_framework.serializers import Serializer
|
||||
from rest_framework.views import APIView
|
||||
|
||||
from apps.api.v1.serializers.shared import validate_filter_metadata_contract
|
||||
from apps.core.utils import capture_and_log
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -45,17 +46,12 @@ class ContractCompliantAPIView(APIView):
|
||||
return response
|
||||
|
||||
except Exception as e:
|
||||
# Log the error with context
|
||||
logger.error(
|
||||
f"API error in {self.__class__.__name__}: {str(e)}",
|
||||
extra={
|
||||
"view_class": self.__class__.__name__,
|
||||
"request_path": request.path,
|
||||
"request_method": request.method,
|
||||
"user": getattr(request, "user", None),
|
||||
"detail": str(e),
|
||||
},
|
||||
exc_info=True,
|
||||
# Capture error to dashboard
|
||||
capture_and_log(
|
||||
e,
|
||||
f'API error in {self.__class__.__name__}',
|
||||
source='api',
|
||||
severity='high',
|
||||
)
|
||||
|
||||
# Return standardized error response
|
||||
@@ -194,10 +190,10 @@ class FilterMetadataAPIView(ContractCompliantAPIView):
|
||||
return self.success_response(validated_metadata)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Error getting filter metadata in {self.__class__.__name__}: {str(e)}",
|
||||
extra={"view_class": self.__class__.__name__, "detail": str(e)},
|
||||
exc_info=True,
|
||||
capture_and_log(
|
||||
e,
|
||||
f'Get filter metadata in {self.__class__.__name__}',
|
||||
source='api',
|
||||
)
|
||||
|
||||
return self.error_response(message="Failed to retrieve filter metadata", error_code="FILTER_METADATA_ERROR")
|
||||
@@ -238,14 +234,10 @@ class HybridFilteringAPIView(ContractCompliantAPIView):
|
||||
return self.success_response(hybrid_data)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Error in hybrid filtering for {self.__class__.__name__}: {str(e)}",
|
||||
extra={
|
||||
"view_class": self.__class__.__name__,
|
||||
"filters": getattr(self, "_extracted_filters", {}),
|
||||
"detail": str(e),
|
||||
},
|
||||
exc_info=True,
|
||||
capture_and_log(
|
||||
e,
|
||||
f'Hybrid filtering for {self.__class__.__name__}',
|
||||
source='api',
|
||||
)
|
||||
|
||||
return self.error_response(message="Failed to retrieve filtered data", error_code="HYBRID_FILTERING_ERROR")
|
||||
@@ -392,7 +384,7 @@ def contract_compliant_view(view_class):
|
||||
return response
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error in decorated view {view_class.__name__}: {str(e)}", exc_info=True)
|
||||
capture_and_log(e, f'Decorated view {view_class.__name__}', source='api')
|
||||
|
||||
# Return basic error response
|
||||
return Response(
|
||||
|
||||
Reference in New Issue
Block a user