feat: Implement initial schema and add various API, service, and management command enhancements across the application.

This commit is contained in:
pacnpal
2026-01-01 15:13:01 -05:00
parent c95f99ca10
commit b243b17af7
413 changed files with 11164 additions and 17433 deletions

View File

@@ -54,9 +54,8 @@ except ImportError:
# Type hint for the mixin
if TYPE_CHECKING:
from typing import Union
TurnstileMixinType = Union[type[FallbackTurnstileMixin], Any]
TurnstileMixinType = type[FallbackTurnstileMixin] | Any
else:
TurnstileMixinType = TurnstileMixin
@@ -87,11 +86,9 @@ class LoginAPIView(TurnstileMixin, APIView): # type: ignore[misc]
# Validate Turnstile if configured
self.validate_turnstile(request)
except ValidationError as e:
return Response({"error": str(e)}, status=status.HTTP_400_BAD_REQUEST)
return Response({"detail": str(e)}, status=status.HTTP_400_BAD_REQUEST)
serializer = LoginInputSerializer(
data=request.data, context={"request": request}
)
serializer = LoginInputSerializer(data=request.data, context={"request": request})
if serializer.is_valid():
# The serializer handles authentication validation
user = serializer.validated_data["user"] # type: ignore[index]
@@ -106,7 +103,7 @@ class LoginAPIView(TurnstileMixin, APIView): # type: ignore[misc]
{
"token": token.key,
"user": user,
"message": "Login successful",
"detail": "Login successful",
}
)
return Response(response_serializer.data)
@@ -138,7 +135,7 @@ class SignupAPIView(TurnstileMixin, APIView): # type: ignore[misc]
# Validate Turnstile if configured
self.validate_turnstile(request)
except ValidationError as e:
return Response({"error": str(e)}, status=status.HTTP_400_BAD_REQUEST)
return Response({"detail": str(e)}, status=status.HTTP_400_BAD_REQUEST)
serializer = SignupInputSerializer(data=request.data)
if serializer.is_valid():
@@ -152,7 +149,7 @@ class SignupAPIView(TurnstileMixin, APIView): # type: ignore[misc]
{
"token": token.key,
"user": user,
"message": "Registration successful",
"detail": "Registration successful",
}
)
return Response(response_serializer.data, status=status.HTTP_201_CREATED)
@@ -186,14 +183,10 @@ class LogoutAPIView(APIView):
# Logout from session
logout(request._request) # type: ignore[attr-defined]
response_serializer = LogoutOutputSerializer(
{"message": "Logout successful"}
)
response_serializer = LogoutOutputSerializer({"detail": "Logout successful"})
return Response(response_serializer.data)
except Exception:
return Response(
{"error": "Logout failed"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR
)
return Response({"detail": "Logout failed"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
@extend_schema_view(
@@ -237,15 +230,11 @@ class PasswordResetAPIView(APIView):
serializer_class = PasswordResetInputSerializer
def post(self, request: Request) -> Response:
serializer = PasswordResetInputSerializer(
data=request.data, context={"request": request}
)
serializer = PasswordResetInputSerializer(data=request.data, context={"request": request})
if serializer.is_valid():
serializer.save()
response_serializer = PasswordResetOutputSerializer(
{"detail": "Password reset email sent"}
)
response_serializer = PasswordResetOutputSerializer({"detail": "Password reset email sent"})
return Response(response_serializer.data)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
@@ -271,15 +260,11 @@ class PasswordChangeAPIView(APIView):
serializer_class = PasswordChangeInputSerializer
def post(self, request: Request) -> Response:
serializer = PasswordChangeInputSerializer(
data=request.data, context={"request": request}
)
serializer = PasswordChangeInputSerializer(data=request.data, context={"request": request})
if serializer.is_valid():
serializer.save()
response_serializer = PasswordChangeOutputSerializer(
{"detail": "Password changed successfully"}
)
response_serializer = PasswordChangeOutputSerializer({"detail": "Password changed successfully"})
return Response(response_serializer.data)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
@@ -338,9 +323,7 @@ class SocialProvidersAPIView(APIView):
provider_name = social_app.name or social_app.provider.title()
# Build auth URL efficiently
auth_url = request.build_absolute_uri(
f"/accounts/{social_app.provider}/login/"
)
auth_url = request.build_absolute_uri(f"/accounts/{social_app.provider}/login/")
providers_list.append(
{
@@ -370,13 +353,9 @@ class SocialProvidersAPIView(APIView):
"status": "error",
"error": {
"code": "SOCIAL_PROVIDERS_ERROR",
"message": "Unable to retrieve social providers",
"detail": "Unable to retrieve social providers",
"details": str(e) if str(e) else None,
"request_user": (
str(request.user)
if hasattr(request, "user")
else "AnonymousUser"
),
"request_user": (str(request.user) if hasattr(request, "user") else "AnonymousUser"),
},
"data": None,
},

View File

@@ -39,7 +39,7 @@ class ContractCompliantAPIView(APIView):
response = super().dispatch(request, *args, **kwargs)
# Validate contract in DEBUG mode
if settings.DEBUG and hasattr(response, 'data'):
if settings.DEBUG and hasattr(response, "data"):
self._validate_response_contract(response.data)
return response
@@ -49,19 +49,18 @@ class ContractCompliantAPIView(APIView):
logger.error(
f"API error in {self.__class__.__name__}: {str(e)}",
extra={
'view_class': self.__class__.__name__,
'request_path': request.path,
'request_method': request.method,
'user': getattr(request, 'user', None),
'error': str(e)
"view_class": self.__class__.__name__,
"request_path": request.path,
"request_method": request.method,
"user": getattr(request, "user", None),
"detail": str(e),
},
exc_info=True
exc_info=True,
)
# Return standardized error response
return self.error_response(
message="An internal error occurred",
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR
message="An internal error occurred", status_code=status.HTTP_500_INTERNAL_SERVER_ERROR
)
def success_response(
@@ -69,7 +68,7 @@ class ContractCompliantAPIView(APIView):
data: Any = None,
message: str = None,
status_code: int = status.HTTP_200_OK,
headers: dict[str, str] = None
headers: dict[str, str] = None,
) -> Response:
"""
Create a standardized success response.
@@ -83,21 +82,15 @@ class ContractCompliantAPIView(APIView):
Returns:
Response with standardized format
"""
response_data = {
'success': True
}
response_data = {"success": True}
if data is not None:
response_data['data'] = data
response_data["data"] = data
if message:
response_data['message'] = message
response_data["message"] = message
return Response(
response_data,
status=status_code,
headers=headers
)
return Response(response_data, status=status_code, headers=headers)
def error_response(
self,
@@ -105,7 +98,7 @@ class ContractCompliantAPIView(APIView):
status_code: int = status.HTTP_400_BAD_REQUEST,
error_code: str = None,
details: Any = None,
headers: dict[str, str] = None
headers: dict[str, str] = None,
) -> Response:
"""
Create a standardized error response.
@@ -120,37 +113,22 @@ class ContractCompliantAPIView(APIView):
Returns:
Response with standardized error format
"""
error_data = {
'code': error_code or 'API_ERROR',
'message': message
}
error_data = {"code": error_code or "API_ERROR", "message": message}
if details:
error_data['details'] = details
error_data["details"] = details
# Add user context if available
if hasattr(self, 'request') and hasattr(self.request, 'user'):
if hasattr(self, "request") and hasattr(self.request, "user"):
user = self.request.user
if user and user.is_authenticated:
error_data['request_user'] = user.username
error_data["request_user"] = user.username
response_data = {
'status': 'error',
'error': error_data,
'data': None
}
response_data = {"status": "error", "error": error_data, "data": None}
return Response(
response_data,
status=status_code,
headers=headers
)
return Response(response_data, status=status_code, headers=headers)
def validation_error_response(
self,
errors: dict[str, Any],
message: str = "Validation failed"
) -> Response:
def validation_error_response(self, errors: dict[str, Any], message: str = "Validation failed") -> Response:
"""
Create a standardized validation error response.
@@ -161,14 +139,7 @@ class ContractCompliantAPIView(APIView):
Returns:
Response with validation errors
"""
return Response(
{
'success': False,
'message': message,
'errors': errors
},
status=status.HTTP_400_BAD_REQUEST
)
return Response({"success": False, "message": message, "errors": errors}, status=status.HTTP_400_BAD_REQUEST)
def _validate_response_contract(self, data: Any) -> None:
"""
@@ -179,7 +150,7 @@ class ContractCompliantAPIView(APIView):
"""
try:
# Check if this looks like filter metadata
if isinstance(data, dict) and 'categorical' in data and 'ranges' in data:
if isinstance(data, dict) and "categorical" in data and "ranges" in data:
validate_filter_metadata_contract(data)
# Add more contract validations as needed
@@ -188,10 +159,10 @@ class ContractCompliantAPIView(APIView):
logger.warning(
f"Contract validation failed in {self.__class__.__name__}: {str(e)}",
extra={
'view_class': self.__class__.__name__,
'validation_error': str(e),
'response_data_type': type(data).__name__
}
"view_class": self.__class__.__name__,
"validation_error": str(e),
"response_data_type": type(data).__name__,
},
)
@@ -225,17 +196,11 @@ class FilterMetadataAPIView(ContractCompliantAPIView):
except Exception as e:
logger.error(
f"Error getting filter metadata in {self.__class__.__name__}: {str(e)}",
extra={
'view_class': self.__class__.__name__,
'error': str(e)
},
exc_info=True
extra={"view_class": self.__class__.__name__, "detail": str(e)},
exc_info=True,
)
return self.error_response(
message="Failed to retrieve filter metadata",
error_code="FILTER_METADATA_ERROR"
)
return self.error_response(message="Failed to retrieve filter metadata", error_code="FILTER_METADATA_ERROR")
class HybridFilteringAPIView(ContractCompliantAPIView):
@@ -276,17 +241,14 @@ class HybridFilteringAPIView(ContractCompliantAPIView):
logger.error(
f"Error in hybrid filtering for {self.__class__.__name__}: {str(e)}",
extra={
'view_class': self.__class__.__name__,
'filters': getattr(self, '_extracted_filters', {}),
'error': str(e)
"view_class": self.__class__.__name__,
"filters": getattr(self, "_extracted_filters", {}),
"detail": str(e),
},
exc_info=True
exc_info=True,
)
return self.error_response(
message="Failed to retrieve filtered data",
error_code="HYBRID_FILTERING_ERROR"
)
return self.error_response(message="Failed to retrieve filtered data", error_code="HYBRID_FILTERING_ERROR")
def extract_filters(self, request) -> dict[str, Any]:
"""
@@ -313,19 +275,19 @@ class HybridFilteringAPIView(ContractCompliantAPIView):
def _validate_hybrid_response(self, data: dict[str, Any]) -> None:
"""Validate hybrid response structure."""
required_fields = ['strategy', 'total_count']
required_fields = ["strategy", "total_count"]
for field in required_fields:
if field not in data:
raise ValueError(f"Hybrid response missing required field: {field}")
# Validate strategy value
if data['strategy'] not in ['client_side', 'server_side']:
if data["strategy"] not in ["client_side", "server_side"]:
raise ValueError(f"Invalid strategy value: {data['strategy']}")
# Validate filter metadata if present
if 'filter_metadata' in data:
validate_filter_metadata_contract(data['filter_metadata'])
if "filter_metadata" in data:
validate_filter_metadata_contract(data["filter_metadata"])
class PaginatedAPIView(ContractCompliantAPIView):
@@ -340,11 +302,7 @@ class PaginatedAPIView(ContractCompliantAPIView):
max_page_size = 100
def get_paginated_response(
self,
queryset,
serializer_class: type[Serializer],
request,
page_size: int = None
self, queryset, serializer_class: type[Serializer], request, page_size: int = None
) -> Response:
"""
Create a paginated response.
@@ -362,13 +320,10 @@ class PaginatedAPIView(ContractCompliantAPIView):
# Determine page size
if page_size is None:
page_size = min(
int(request.query_params.get('page_size', self.default_page_size)),
self.max_page_size
)
page_size = min(int(request.query_params.get("page_size", self.default_page_size)), self.max_page_size)
# Get page number
page_number = request.query_params.get('page', 1)
page_number = request.query_params.get("page", 1)
try:
page_number = int(page_number)
@@ -389,28 +344,28 @@ class PaginatedAPIView(ContractCompliantAPIView):
serializer = serializer_class(page.object_list, many=True)
# Build pagination URLs
request_url = request.build_absolute_uri().split('?')[0]
request_url = request.build_absolute_uri().split("?")[0]
query_params = request.query_params.copy()
next_url = None
if page.has_next():
query_params['page'] = page.next_page_number()
query_params["page"] = page.next_page_number()
next_url = f"{request_url}?{query_params.urlencode()}"
previous_url = None
if page.has_previous():
query_params['page'] = page.previous_page_number()
query_params["page"] = page.previous_page_number()
previous_url = f"{request_url}?{query_params.urlencode()}"
# Create response data
response_data = {
'count': paginator.count,
'next': next_url,
'previous': previous_url,
'results': serializer.data,
'page_size': page_size,
'current_page': page.number,
'total_pages': paginator.num_pages
"count": paginator.count,
"next": next_url,
"previous": previous_url,
"results": serializer.data,
"page_size": page_size,
"current_page": page.number,
"total_pages": paginator.num_pages,
}
return self.success_response(response_data)
@@ -430,29 +385,23 @@ def contract_compliant_view(view_class):
response = original_dispatch(self, request, *args, **kwargs)
# Add contract validation in DEBUG mode
if settings.DEBUG and hasattr(response, 'data'):
if settings.DEBUG and hasattr(response, "data"):
# Basic validation - can be extended
pass
return response
except Exception as e:
logger.error(
f"Error in decorated view {view_class.__name__}: {str(e)}",
exc_info=True
)
logger.error(f"Error in decorated view {view_class.__name__}: {str(e)}", exc_info=True)
# Return basic error response
return Response(
{
'status': 'error',
'error': {
'code': 'API_ERROR',
'message': 'An internal error occurred'
},
'data': None
"status": "error",
"error": {"code": "API_ERROR", "detail": "An internal error occurred"},
"data": None,
},
status=status.HTTP_500_INTERNAL_SERVER_ERROR
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
)
view_class.dispatch = new_dispatch

View File

@@ -1,4 +1,3 @@
from django.utils import timezone
from drf_spectacular.utils import extend_schema
from rest_framework.permissions import AllowAny
@@ -13,6 +12,7 @@ class DiscoveryAPIView(APIView):
"""
API endpoint for discovery content (Top Lists, Opening/Closing Soon).
"""
permission_classes = [AllowAny]
@extend_schema(
@@ -68,7 +68,7 @@ class DiscoveryAPIView(APIView):
"recently_closed": {
"parks": self._serialize(recently_closed_parks, "park"),
"rides": self._serialize(recently_closed_rides, "ride"),
}
},
}
return Response(data)
@@ -83,14 +83,13 @@ class DiscoveryAPIView(APIView):
"average_rating": item.average_rating,
}
if type_ == "park":
data.update({
"city": item.location.city if item.location else None,
"state": item.location.state if item.location else None,
})
data.update(
{
"city": item.location.city if item.location else None,
"state": item.location.state if item.location else None,
}
)
elif type_ == "ride":
data.update({
"park_name": item.park.name,
"park_slug": item.park.slug
})
data.update({"park_name": item.park.name, "park_slug": item.park.slug})
results.append(data)
return results

View File

@@ -30,7 +30,7 @@ class FallbackCacheMonitor:
"""Fallback class if CacheMonitor is not available."""
def get_cache_stats(self):
return {"error": "Cache monitoring not available"}
return {"detail": "Cache monitoring not available"}
class FallbackIndexAnalyzer:
@@ -38,7 +38,7 @@ class FallbackIndexAnalyzer:
@staticmethod
def analyze_slow_queries(threshold):
return {"error": "Query analysis not available"}
return {"detail": "Query analysis not available"}
# Try to import the real classes, use fallbacks if not available
@@ -56,9 +56,7 @@ except ImportError:
@extend_schema_view(
get=extend_schema(
summary="Health check",
description=(
"Get comprehensive health check information including system metrics."
),
description=("Get comprehensive health check information including system metrics."),
responses={
200: HealthCheckOutputSerializer,
503: HealthCheckOutputSerializer,
@@ -88,7 +86,7 @@ class HealthCheckAPIView(APIView):
cache_monitor = CacheMonitor()
cache_stats = cache_monitor.get_cache_stats()
except Exception:
cache_stats = {"error": "Cache monitoring unavailable"}
cache_stats = {"detail": "Cache monitoring unavailable"}
# Build comprehensive health data
health_data = {
@@ -120,9 +118,7 @@ class HealthCheckAPIView(APIView):
critical_service = False
response_time = None
plugin_errors = (
errors.get(plugin_class_name, []) if isinstance(errors, dict) else []
)
plugin_errors = errors.get(plugin_class_name, []) if isinstance(errors, dict) else []
health_data["checks"][plugin_name] = {
"status": "healthy" if not plugin_errors else "unhealthy",
@@ -194,9 +190,7 @@ class HealthCheckAPIView(APIView):
"transactions_committed": row[1],
"transactions_rolled_back": row[2],
"cache_hit_ratio": (
round((row[4] / (row[3] + row[4])) * 100, 2)
if (row[3] + row[4]) > 0
else 0
round((row[4] / (row[3] + row[4])) * 100, 2) if (row[3] + row[4]) > 0 else 0
),
}
)
@@ -206,7 +200,7 @@ class HealthCheckAPIView(APIView):
return metrics
except Exception as e:
return {"connection_status": "error", "error": str(e)}
return {"connection_status": "error", "detail": str(e)}
def _get_system_metrics(self) -> dict:
"""Get system performance metrics."""
@@ -270,7 +264,7 @@ class PerformanceMetricsAPIView(APIView):
def get(self, request: Request) -> Response:
"""Return performance metrics and analysis."""
if not settings.DEBUG:
return Response({"error": "Only available in debug mode"}, status=403)
return Response({"detail": "Only available in debug mode"}, status=403)
metrics = {
"timestamp": timezone.now(),
@@ -306,7 +300,7 @@ class PerformanceMetricsAPIView(APIView):
return analysis
except Exception as e:
return {"error": str(e)}
return {"detail": str(e)}
def _get_cache_performance(self):
"""Get cache performance metrics."""
@@ -314,14 +308,14 @@ class PerformanceMetricsAPIView(APIView):
cache_monitor = CacheMonitor()
return cache_monitor.get_cache_stats()
except Exception as e:
return {"error": str(e)}
return {"detail": str(e)}
def _get_slow_queries(self):
"""Get recent slow queries."""
try:
return IndexAnalyzer.analyze_slow_queries(0.1) # 100ms threshold
except Exception as e:
return {"error": str(e)}
return {"detail": str(e)}
@extend_schema_view(
@@ -336,9 +330,7 @@ class PerformanceMetricsAPIView(APIView):
),
options=extend_schema(
summary="CORS preflight for simple health check",
description=(
"Handle CORS preflight requests for the simple health check endpoint."
),
description=("Handle CORS preflight requests for the simple health check endpoint."),
responses={
200: SimpleHealthOutputSerializer,
},
@@ -370,7 +362,7 @@ class SimpleHealthAPIView(APIView):
except Exception as e:
response_data = {
"status": "error",
"error": str(e),
"detail": str(e),
"timestamp": timezone.now(),
}
serializer = SimpleHealthOutputSerializer(response_data)

View File

@@ -1,6 +1,7 @@
"""
Leaderboard views for user rankings
"""
from datetime import timedelta
from django.db.models import Count, Sum
@@ -15,7 +16,7 @@ from apps.reviews.models import Review
from apps.rides.models import RideCredit
@api_view(['GET'])
@api_view(["GET"])
@permission_classes([AllowAny])
def leaderboard(request):
"""
@@ -26,25 +27,25 @@ def leaderboard(request):
- period: 'all' | 'monthly' | 'weekly' (default: all)
- limit: int (default: 25, max: 100)
"""
category = request.query_params.get('category', 'credits')
period = request.query_params.get('period', 'all')
limit = min(int(request.query_params.get('limit', 25)), 100)
category = request.query_params.get("category", "credits")
period = request.query_params.get("period", "all")
limit = min(int(request.query_params.get("limit", 25)), 100)
# Calculate date filter based on period
date_filter = None
if period == 'weekly':
if period == "weekly":
date_filter = timezone.now() - timedelta(days=7)
elif period == 'monthly':
elif period == "monthly":
date_filter = timezone.now() - timedelta(days=30)
if category == 'credits':
if category == "credits":
return _get_credits_leaderboard(date_filter, limit)
elif category == 'reviews':
elif category == "reviews":
return _get_reviews_leaderboard(date_filter, limit)
elif category == 'contributions':
elif category == "contributions":
return _get_contributions_leaderboard(date_filter, limit)
else:
return Response({'error': 'Invalid category'}, status=400)
return Response({"detail": "Invalid category"}, status=400)
def _get_credits_leaderboard(date_filter, limit):
@@ -55,26 +56,34 @@ def _get_credits_leaderboard(date_filter, limit):
queryset = queryset.filter(created_at__gte=date_filter)
# Aggregate credits per user
users_data = queryset.values('user_id', 'user__username', 'user__display_name').annotate(
total_credits=Coalesce(Sum('count'), 0),
unique_rides=Count('ride', distinct=True),
).order_by('-total_credits')[:limit]
users_data = (
queryset.values("user_id", "user__username", "user__display_name")
.annotate(
total_credits=Coalesce(Sum("count"), 0),
unique_rides=Count("ride", distinct=True),
)
.order_by("-total_credits")[:limit]
)
results = []
for rank, entry in enumerate(users_data, 1):
results.append({
'rank': rank,
'user_id': entry['user_id'],
'username': entry['user__username'],
'display_name': entry['user__display_name'] or entry['user__username'],
'total_credits': entry['total_credits'],
'unique_rides': entry['unique_rides'],
})
results.append(
{
"rank": rank,
"user_id": entry["user_id"],
"username": entry["user__username"],
"display_name": entry["user__display_name"] or entry["user__username"],
"total_credits": entry["total_credits"],
"unique_rides": entry["unique_rides"],
}
)
return Response({
'category': 'credits',
'results': results,
})
return Response(
{
"category": "credits",
"results": results,
}
)
def _get_reviews_leaderboard(date_filter, limit):
@@ -85,49 +94,65 @@ def _get_reviews_leaderboard(date_filter, limit):
queryset = queryset.filter(created_at__gte=date_filter)
# Count reviews per user
users_data = queryset.values('user_id', 'user__username', 'user__display_name').annotate(
review_count=Count('id'),
).order_by('-review_count')[:limit]
users_data = (
queryset.values("user_id", "user__username", "user__display_name")
.annotate(
review_count=Count("id"),
)
.order_by("-review_count")[:limit]
)
results = []
for rank, entry in enumerate(users_data, 1):
results.append({
'rank': rank,
'user_id': entry['user_id'],
'username': entry['user__username'],
'display_name': entry['user__display_name'] or entry['user__username'],
'review_count': entry['review_count'],
})
results.append(
{
"rank": rank,
"user_id": entry["user_id"],
"username": entry["user__username"],
"display_name": entry["user__display_name"] or entry["user__username"],
"review_count": entry["review_count"],
}
)
return Response({
'category': 'reviews',
'results': results,
})
return Response(
{
"category": "reviews",
"results": results,
}
)
def _get_contributions_leaderboard(date_filter, limit):
"""Top users by approved contributions."""
queryset = EditSubmission.objects.filter(status='approved')
queryset = EditSubmission.objects.filter(status="approved")
if date_filter:
queryset = queryset.filter(created_at__gte=date_filter)
# Count contributions per user
users_data = queryset.values('submitted_by_id', 'submitted_by__username', 'submitted_by__display_name').annotate(
contribution_count=Count('id'),
).order_by('-contribution_count')[:limit]
users_data = (
queryset.values("submitted_by_id", "submitted_by__username", "submitted_by__display_name")
.annotate(
contribution_count=Count("id"),
)
.order_by("-contribution_count")[:limit]
)
results = []
for rank, entry in enumerate(users_data, 1):
results.append({
'rank': rank,
'user_id': entry['submitted_by_id'],
'username': entry['submitted_by__username'],
'display_name': entry['submitted_by__display_name'] or entry['submitted_by__username'],
'contribution_count': entry['contribution_count'],
})
results.append(
{
"rank": rank,
"user_id": entry["submitted_by_id"],
"username": entry["submitted_by__username"],
"display_name": entry["submitted_by__display_name"] or entry["submitted_by__username"],
"contribution_count": entry["contribution_count"],
}
)
return Response({
'category': 'contributions',
'results': results,
})
return Response(
{
"category": "contributions",
"results": results,
}
)

View File

@@ -186,21 +186,13 @@ class StatsAPIView(APIView):
total_rides = Ride.objects.count()
# Company counts by role
total_manufacturers = RideCompany.objects.filter(
roles__contains=["MANUFACTURER"]
).count()
total_manufacturers = RideCompany.objects.filter(roles__contains=["MANUFACTURER"]).count()
total_operators = ParkCompany.objects.filter(
roles__contains=["OPERATOR"]
).count()
total_operators = ParkCompany.objects.filter(roles__contains=["OPERATOR"]).count()
total_designers = RideCompany.objects.filter(
roles__contains=["DESIGNER"]
).count()
total_designers = RideCompany.objects.filter(roles__contains=["DESIGNER"]).count()
total_property_owners = ParkCompany.objects.filter(
roles__contains=["PROPERTY_OWNER"]
).count()
total_property_owners = ParkCompany.objects.filter(roles__contains=["PROPERTY_OWNER"]).count()
# Photo counts (combined)
total_park_photos = ParkPhoto.objects.count()
@@ -211,11 +203,7 @@ class StatsAPIView(APIView):
total_roller_coasters = RollerCoasterStats.objects.count()
# Ride category counts
ride_categories = (
Ride.objects.values("category")
.annotate(count=Count("id"))
.exclude(category="")
)
ride_categories = Ride.objects.values("category").annotate(count=Count("id")).exclude(category="")
category_stats = {}
for category in ride_categories:
@@ -232,9 +220,7 @@ class StatsAPIView(APIView):
"OT": "other_rides",
}
category_name = category_names.get(
category_code, f"category_{category_code.lower()}"
)
category_name = category_names.get(category_code, f"category_{category_code.lower()}")
category_stats[category_name] = category_count
# Park status counts
@@ -281,9 +267,7 @@ class StatsAPIView(APIView):
"RELOCATED": "relocated_rides",
}
status_name = status_names.get(
status_code, f"ride_status_{status_code.lower()}"
)
status_name = status_names.get(status_code, f"ride_status_{status_code.lower()}")
ride_status_stats[status_name] = status_count
# Review counts
@@ -365,7 +349,7 @@ class StatsRecalculateAPIView(APIView):
# Return success response with the fresh stats
return Response(
{
"message": "Platform statistics have been successfully recalculated",
"detail": "Platform statistics have been successfully recalculated",
"stats": fresh_stats,
"recalculated_at": timezone.now().isoformat(),
},

View File

@@ -127,18 +127,14 @@ class TriggerTrendingCalculationAPIView(APIView):
try:
# Run trending calculation command
with redirect_stdout(trending_output), redirect_stderr(trending_output):
call_command(
"calculate_trending", "--content-type=all", "--limit=50"
)
call_command("calculate_trending", "--content-type=all", "--limit=50")
trending_completed = True
except Exception as e:
trending_output.write(f"Error: {str(e)}")
try:
# Run new content calculation command
with redirect_stdout(new_content_output), redirect_stderr(
new_content_output
):
with redirect_stdout(new_content_output), redirect_stderr(new_content_output):
call_command(
"calculate_new_content",
"--content-type=all",
@@ -153,7 +149,7 @@ class TriggerTrendingCalculationAPIView(APIView):
return Response(
{
"message": "Trending content calculation completed",
"detail": "Trending content calculation completed",
"trending_completed": trending_completed,
"new_content_completed": new_content_completed,
"completion_time": completion_time,
@@ -166,7 +162,7 @@ class TriggerTrendingCalculationAPIView(APIView):
except Exception as e:
return Response(
{
"error": "Failed to trigger trending content calculation",
"detail": "Failed to trigger trending content calculation",
"details": str(e),
},
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
@@ -213,9 +209,7 @@ class NewContentAPIView(APIView):
days_back = min(int(request.query_params.get("days", 30)), 365)
# Get new content using direct calculation service
all_new_content = trending_service.get_new_content(
limit=limit * 2, days_back=days_back
)
all_new_content = trending_service.get_new_content(limit=limit * 2, days_back=days_back)
recently_added = []
newly_opened = []