remove backend

This commit is contained in:
pacnpal
2025-09-21 20:19:12 -04:00
parent 9e724bd795
commit f3c59ad6ff
557 changed files with 1739 additions and 4836 deletions

View File

@@ -0,0 +1,53 @@
"""
API v1 Views Package
This package contains all API view classes organized by functionality:
- auth.py: Authentication and user management views
- health.py: Health check and monitoring views
- trending.py: Trending and new content discovery views
"""
# Import all view classes for easy access
from .auth import (
LoginAPIView,
SignupAPIView,
LogoutAPIView,
CurrentUserAPIView,
PasswordResetAPIView,
PasswordChangeAPIView,
SocialProvidersAPIView,
AuthStatusAPIView,
)
from .health import (
HealthCheckAPIView,
PerformanceMetricsAPIView,
SimpleHealthAPIView,
)
from .trending import (
TrendingAPIView,
NewContentAPIView,
TriggerTrendingCalculationAPIView,
)
# Export all views for import convenience
__all__ = [
# Authentication views
"LoginAPIView",
"SignupAPIView",
"LogoutAPIView",
"CurrentUserAPIView",
"PasswordResetAPIView",
"PasswordChangeAPIView",
"SocialProvidersAPIView",
"AuthStatusAPIView",
# Health check views
"HealthCheckAPIView",
"PerformanceMetricsAPIView",
"SimpleHealthAPIView",
# Trending views
"TrendingAPIView",
"NewContentAPIView",
"TriggerTrendingCalculationAPIView",
]

414
apps/api/v1/views/auth.py Normal file
View File

@@ -0,0 +1,414 @@
"""
Authentication API views for ThrillWiki API v1.
This module contains all authentication-related API endpoints including
login, signup, logout, password management, and social authentication.
"""
# type: ignore[misc,attr-defined,arg-type,call-arg,index,assignment]
from typing import TYPE_CHECKING, Type, Any
from django.contrib.auth import login, logout, get_user_model
from django.contrib.sites.shortcuts import get_current_site
from django.core.exceptions import ValidationError
from rest_framework import status
from rest_framework.views import APIView
from rest_framework.request import Request
from rest_framework.response import Response
from rest_framework.permissions import AllowAny, IsAuthenticated
from drf_spectacular.utils import extend_schema, extend_schema_view
# Import serializers from the auth serializers module
from ..serializers.auth import (
LoginInputSerializer,
LoginOutputSerializer,
SignupInputSerializer,
SignupOutputSerializer,
LogoutOutputSerializer,
UserOutputSerializer,
PasswordResetInputSerializer,
PasswordResetOutputSerializer,
PasswordChangeInputSerializer,
PasswordChangeOutputSerializer,
SocialProviderOutputSerializer,
AuthStatusOutputSerializer,
)
# Handle optional dependencies with fallback classes
class FallbackTurnstileMixin:
"""Fallback mixin if TurnstileMixin is not available."""
def validate_turnstile(self, request: Any) -> None:
"""Fallback validation method that does nothing."""
pass
# Try to import the real class, use fallback if not available
try:
from apps.accounts.mixins import TurnstileMixin
except ImportError:
TurnstileMixin = FallbackTurnstileMixin
# Type hint for the mixin
if TYPE_CHECKING:
from typing import Union
TurnstileMixinType = Union[Type[FallbackTurnstileMixin], Any]
else:
TurnstileMixinType = TurnstileMixin
UserModel = get_user_model()
@extend_schema_view(
post=extend_schema(
summary="User login",
description="Authenticate user with username/email and password.",
request=LoginInputSerializer,
responses={
200: LoginOutputSerializer,
400: "Bad Request",
},
tags=["Authentication"],
),
)
class LoginAPIView(TurnstileMixin, APIView): # type: ignore[misc]
"""API endpoint for user login."""
permission_classes = [AllowAny]
authentication_classes = []
serializer_class = LoginInputSerializer
def post(self, request: Request) -> Response:
try:
# Validate Turnstile if configured
self.validate_turnstile(request)
except ValidationError as e:
return Response({"error": str(e)}, status=status.HTTP_400_BAD_REQUEST)
serializer = LoginInputSerializer(
data=request.data, context={"request": request}
)
if serializer.is_valid():
# The serializer handles authentication validation
user = serializer.validated_data["user"] # type: ignore[index]
login(request._request, user) # type: ignore[attr-defined]
# Optimized token creation - get_or_create is atomic
from rest_framework.authtoken.models import Token
token, created = Token.objects.get_or_create(user=user)
response_serializer = LoginOutputSerializer(
{
"token": token.key,
"user": user,
"message": "Login successful",
}
)
return Response(response_serializer.data)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
@extend_schema_view(
post=extend_schema(
summary="User registration",
description="Register a new user account.",
request=SignupInputSerializer,
responses={
201: SignupOutputSerializer,
400: "Bad Request",
},
tags=["Authentication"],
),
)
class SignupAPIView(TurnstileMixin, APIView): # type: ignore[misc]
"""API endpoint for user registration."""
permission_classes = [AllowAny]
authentication_classes = []
serializer_class = SignupInputSerializer
def post(self, request: Request) -> Response:
try:
# Validate Turnstile if configured
self.validate_turnstile(request)
except ValidationError as e:
return Response({"error": str(e)}, status=status.HTTP_400_BAD_REQUEST)
serializer = SignupInputSerializer(data=request.data)
if serializer.is_valid():
user = serializer.save()
login(request._request, user) # type: ignore[attr-defined]
from rest_framework.authtoken.models import Token
token, created = Token.objects.get_or_create(user=user)
response_serializer = SignupOutputSerializer(
{
"token": token.key,
"user": user,
"message": "Registration successful",
}
)
return Response(response_serializer.data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
@extend_schema_view(
post=extend_schema(
summary="User logout",
description="Logout the current user and invalidate their token.",
responses={
200: LogoutOutputSerializer,
401: "Unauthorized",
},
tags=["Authentication"],
),
)
class LogoutAPIView(APIView):
"""API endpoint for user logout."""
permission_classes = [IsAuthenticated]
serializer_class = LogoutOutputSerializer
def post(self, request: Request) -> Response:
try:
# Delete the token for token-based auth
if hasattr(request.user, "auth_token"):
request.user.auth_token.delete()
# Logout from session
logout(request._request) # type: ignore[attr-defined]
response_serializer = LogoutOutputSerializer(
{"message": "Logout successful"}
)
return Response(response_serializer.data)
except Exception:
return Response(
{"error": "Logout failed"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR
)
@extend_schema_view(
get=extend_schema(
summary="Get current user",
description="Retrieve information about the currently authenticated user.",
responses={
200: UserOutputSerializer,
401: "Unauthorized",
},
tags=["Authentication"],
),
)
class CurrentUserAPIView(APIView):
"""API endpoint to get current user information."""
permission_classes = [IsAuthenticated]
serializer_class = UserOutputSerializer
def get(self, request: Request) -> Response:
serializer = UserOutputSerializer(request.user)
return Response(serializer.data)
@extend_schema_view(
post=extend_schema(
summary="Request password reset",
description="Send a password reset email to the user.",
request=PasswordResetInputSerializer,
responses={
200: PasswordResetOutputSerializer,
400: "Bad Request",
},
tags=["Authentication"],
),
)
class PasswordResetAPIView(APIView):
"""API endpoint to request password reset."""
permission_classes = [AllowAny]
serializer_class = PasswordResetInputSerializer
def post(self, request: Request) -> Response:
serializer = PasswordResetInputSerializer(
data=request.data, context={"request": request}
)
if serializer.is_valid():
serializer.save()
response_serializer = PasswordResetOutputSerializer(
{"detail": "Password reset email sent"}
)
return Response(response_serializer.data)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
@extend_schema_view(
post=extend_schema(
summary="Change password",
description="Change the current user's password.",
request=PasswordChangeInputSerializer,
responses={
200: PasswordChangeOutputSerializer,
400: "Bad Request",
401: "Unauthorized",
},
tags=["Authentication"],
),
)
class PasswordChangeAPIView(APIView):
"""API endpoint to change password."""
permission_classes = [IsAuthenticated]
serializer_class = PasswordChangeInputSerializer
def post(self, request: Request) -> Response:
serializer = PasswordChangeInputSerializer(
data=request.data, context={"request": request}
)
if serializer.is_valid():
serializer.save()
response_serializer = PasswordChangeOutputSerializer(
{"detail": "Password changed successfully"}
)
return Response(response_serializer.data)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
@extend_schema_view(
get=extend_schema(
summary="Get social providers",
description="Retrieve available social authentication providers.",
responses={200: "List of social providers"},
tags=["Authentication"],
),
)
class SocialProvidersAPIView(APIView):
"""API endpoint to get available social authentication providers."""
permission_classes = [AllowAny]
serializer_class = SocialProviderOutputSerializer
def get(self, request: Request) -> Response:
from django.core.cache import cache
try:
# Check if django-allauth is available
try:
from allauth.socialaccount.models import SocialApp
except ImportError:
# django-allauth is not installed, return empty list
serializer = SocialProviderOutputSerializer([], many=True)
return Response(serializer.data)
site = get_current_site(request._request) # type: ignore[attr-defined]
# Cache key based on site and request host
# Use pk for Site objects, domain for RequestSite objects
site_identifier = getattr(site, "pk", site.domain)
cache_key = f"social_providers:{site_identifier}:{request.get_host()}"
# Try to get from cache first (cache for 15 minutes)
cached_providers = cache.get(cache_key)
if cached_providers is not None:
return Response(cached_providers)
providers_list = []
# Optimized query: filter by site and order by provider name
try:
social_apps = SocialApp.objects.filter(sites=site).order_by("provider")
except Exception:
# If query fails (table doesn't exist, etc.), return empty list
social_apps = []
for social_app in social_apps:
try:
# Simplified provider name resolution - avoid expensive provider class loading
provider_name = social_app.name or social_app.provider.title()
# Build auth URL efficiently
auth_url = request.build_absolute_uri(
f"/accounts/{social_app.provider}/login/"
)
providers_list.append(
{
"id": social_app.provider,
"name": provider_name,
"authUrl": auth_url,
}
)
except Exception:
# Skip if provider can't be loaded
continue
# Serialize and cache the result
serializer = SocialProviderOutputSerializer(providers_list, many=True)
response_data = serializer.data
# Cache for 15 minutes (900 seconds)
cache.set(cache_key, response_data, 900)
return Response(response_data)
except Exception as e:
# Return a proper JSON error response instead of letting it bubble up
return Response(
{
"status": "error",
"error": {
"code": "SOCIAL_PROVIDERS_ERROR",
"message": "Unable to retrieve social providers",
"details": str(e) if str(e) else None,
"request_user": (
str(request.user)
if hasattr(request, "user")
else "AnonymousUser"
),
},
"data": None,
},
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
)
@extend_schema_view(
post=extend_schema(
summary="Check authentication status",
description="Check if user is authenticated and return user data.",
responses={200: AuthStatusOutputSerializer},
tags=["Authentication"],
),
)
class AuthStatusAPIView(APIView):
"""API endpoint to check authentication status."""
permission_classes = [AllowAny]
serializer_class = AuthStatusOutputSerializer
def post(self, request: Request) -> Response:
if request.user.is_authenticated:
response_data = {
"authenticated": True,
"user": request.user,
}
else:
response_data = {
"authenticated": False,
"user": None,
}
serializer = AuthStatusOutputSerializer(response_data)
return Response(serializer.data)

460
apps/api/v1/views/base.py Normal file
View File

@@ -0,0 +1,460 @@
"""
Base Views for Contract-Compliant API Responses
This module provides base view classes that ensure all API responses follow
consistent formats that match frontend TypeScript interfaces exactly.
"""
import logging
from typing import Dict, Any, Optional, Type
from rest_framework.views import APIView
from rest_framework.response import Response
from rest_framework import status
from rest_framework.serializers import Serializer
from django.conf import settings
from apps.api.v1.serializers.shared import (
validate_filter_metadata_contract
)
logger = logging.getLogger(__name__)
class ContractCompliantAPIView(APIView):
"""
Base API view that ensures all responses are contract-compliant.
This view provides:
- Standardized success response format
- Consistent error response format
- Automatic contract validation in DEBUG mode
- Proper error logging with context
"""
# Override in subclasses to specify response serializer
response_serializer_class: Optional[Type[Serializer]] = None
def dispatch(self, request, *args, **kwargs):
"""Override dispatch to add contract validation."""
try:
response = super().dispatch(request, *args, **kwargs)
# Validate contract in DEBUG mode
if settings.DEBUG and hasattr(response, 'data'):
self._validate_response_contract(response.data)
return response
except Exception as e:
# Log the error with context
logger.error(
f"API error in {self.__class__.__name__}: {str(e)}",
extra={
'view_class': self.__class__.__name__,
'request_path': request.path,
'request_method': request.method,
'user': getattr(request, 'user', None),
'error': str(e)
},
exc_info=True
)
# Return standardized error response
return self.error_response(
message="An internal error occurred",
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR
)
def success_response(
self,
data: Any = None,
message: str = None,
status_code: int = status.HTTP_200_OK,
headers: Dict[str, str] = None
) -> Response:
"""
Create a standardized success response.
Args:
data: Response data
message: Optional success message
status_code: HTTP status code
headers: Optional response headers
Returns:
Response with standardized format
"""
response_data = {
'success': True
}
if data is not None:
response_data['data'] = data
if message:
response_data['message'] = message
return Response(
response_data,
status=status_code,
headers=headers
)
def error_response(
self,
message: str,
status_code: int = status.HTTP_400_BAD_REQUEST,
error_code: str = None,
details: Any = None,
headers: Dict[str, str] = None
) -> Response:
"""
Create a standardized error response.
Args:
message: Error message
status_code: HTTP status code
error_code: Optional error code
details: Optional error details
headers: Optional response headers
Returns:
Response with standardized error format
"""
error_data = {
'code': error_code or 'API_ERROR',
'message': message
}
if details:
error_data['details'] = details
# Add user context if available
if hasattr(self, 'request') and hasattr(self.request, 'user'):
user = self.request.user
if user and user.is_authenticated:
error_data['request_user'] = user.username
response_data = {
'status': 'error',
'error': error_data,
'data': None
}
return Response(
response_data,
status=status_code,
headers=headers
)
def validation_error_response(
self,
errors: Dict[str, Any],
message: str = "Validation failed"
) -> Response:
"""
Create a standardized validation error response.
Args:
errors: Validation errors dictionary
message: Error message
Returns:
Response with validation errors
"""
return Response(
{
'success': False,
'message': message,
'errors': errors
},
status=status.HTTP_400_BAD_REQUEST
)
def _validate_response_contract(self, data: Any) -> None:
"""
Validate response data against expected contracts.
This method is called automatically in DEBUG mode to catch
contract violations during development.
"""
try:
# Check if this looks like filter metadata
if isinstance(data, dict) and 'categorical' in data and 'ranges' in data:
validate_filter_metadata_contract(data)
# Add more contract validations as needed
except Exception as e:
logger.warning(
f"Contract validation failed in {self.__class__.__name__}: {str(e)}",
extra={
'view_class': self.__class__.__name__,
'validation_error': str(e),
'response_data_type': type(data).__name__
}
)
class FilterMetadataAPIView(ContractCompliantAPIView):
"""
Base view for filter metadata endpoints.
This view ensures filter metadata responses always follow the correct
contract that matches frontend TypeScript interfaces.
"""
def get_filter_metadata(self) -> Dict[str, Any]:
"""
Override this method in subclasses to provide filter metadata.
Returns:
Filter metadata dictionary
"""
raise NotImplementedError("Subclasses must implement get_filter_metadata()")
def get(self, request, *args, **kwargs):
"""Handle GET requests for filter metadata."""
try:
metadata = self.get_filter_metadata()
# Validate the metadata contract
validated_metadata = validate_filter_metadata_contract(metadata)
return self.success_response(validated_metadata)
except Exception as e:
logger.error(
f"Error getting filter metadata in {self.__class__.__name__}: {str(e)}",
extra={
'view_class': self.__class__.__name__,
'error': str(e)
},
exc_info=True
)
return self.error_response(
message="Failed to retrieve filter metadata",
error_code="FILTER_METADATA_ERROR"
)
class HybridFilteringAPIView(ContractCompliantAPIView):
"""
Base view for hybrid filtering endpoints.
This view provides common functionality for hybrid filtering responses
and ensures they follow the correct contract.
"""
def get_hybrid_data(self, filters: Dict[str, Any] = None) -> Dict[str, Any]:
"""
Override this method in subclasses to provide hybrid data.
Args:
filters: Filter parameters
Returns:
Hybrid response dictionary
"""
raise NotImplementedError("Subclasses must implement get_hybrid_data()")
def get(self, request, *args, **kwargs):
"""Handle GET requests for hybrid filtering."""
try:
# Extract filters from request parameters
filters = self.extract_filters(request)
# Get hybrid data
hybrid_data = self.get_hybrid_data(filters)
# Validate hybrid response structure
self._validate_hybrid_response(hybrid_data)
return self.success_response(hybrid_data)
except Exception as e:
logger.error(
f"Error in hybrid filtering for {self.__class__.__name__}: {str(e)}",
extra={
'view_class': self.__class__.__name__,
'filters': getattr(self, '_extracted_filters', {}),
'error': str(e)
},
exc_info=True
)
return self.error_response(
message="Failed to retrieve filtered data",
error_code="HYBRID_FILTERING_ERROR"
)
def extract_filters(self, request) -> Dict[str, Any]:
"""
Extract filter parameters from request.
Override this method in subclasses to customize filter extraction.
Args:
request: HTTP request object
Returns:
Dictionary of filter parameters
"""
# Basic implementation - extract all query parameters
filters = {}
for key, value in request.query_params.items():
if value: # Only include non-empty values
filters[key] = value
# Store for error logging
self._extracted_filters = filters
return filters
def _validate_hybrid_response(self, data: Dict[str, Any]) -> None:
"""Validate hybrid response structure."""
required_fields = ['strategy', 'total_count']
for field in required_fields:
if field not in data:
raise ValueError(f"Hybrid response missing required field: {field}")
# Validate strategy value
if data['strategy'] not in ['client_side', 'server_side']:
raise ValueError(f"Invalid strategy value: {data['strategy']}")
# Validate filter metadata if present
if 'filter_metadata' in data:
validate_filter_metadata_contract(data['filter_metadata'])
class PaginatedAPIView(ContractCompliantAPIView):
"""
Base view for paginated responses.
This view ensures paginated responses follow the correct contract
with consistent pagination metadata.
"""
default_page_size = 20
max_page_size = 100
def get_paginated_response(
self,
queryset,
serializer_class: Type[Serializer],
request,
page_size: int = None
) -> Response:
"""
Create a paginated response.
Args:
queryset: Django queryset to paginate
serializer_class: Serializer class for items
request: HTTP request object
page_size: Optional page size override
Returns:
Paginated response
"""
from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
# Determine page size
if page_size is None:
page_size = min(
int(request.query_params.get('page_size', self.default_page_size)),
self.max_page_size
)
# Get page number
page_number = request.query_params.get('page', 1)
try:
page_number = int(page_number)
except (ValueError, TypeError):
page_number = 1
# Create paginator
paginator = Paginator(queryset, page_size)
try:
page = paginator.page(page_number)
except PageNotAnInteger:
page = paginator.page(1)
except EmptyPage:
page = paginator.page(paginator.num_pages)
# Serialize data
serializer = serializer_class(page.object_list, many=True)
# Build pagination URLs
request_url = request.build_absolute_uri().split('?')[0]
query_params = request.query_params.copy()
next_url = None
if page.has_next():
query_params['page'] = page.next_page_number()
next_url = f"{request_url}?{query_params.urlencode()}"
previous_url = None
if page.has_previous():
query_params['page'] = page.previous_page_number()
previous_url = f"{request_url}?{query_params.urlencode()}"
# Create response data
response_data = {
'count': paginator.count,
'next': next_url,
'previous': previous_url,
'results': serializer.data,
'page_size': page_size,
'current_page': page.number,
'total_pages': paginator.num_pages
}
return self.success_response(response_data)
def contract_compliant_view(view_class):
"""
Decorator to make any view contract-compliant.
This decorator can be applied to existing views to add contract
validation without changing the base class.
"""
original_dispatch = view_class.dispatch
def new_dispatch(self, request, *args, **kwargs):
try:
response = original_dispatch(self, request, *args, **kwargs)
# Add contract validation in DEBUG mode
if settings.DEBUG and hasattr(response, 'data'):
# Basic validation - can be extended
pass
return response
except Exception as e:
logger.error(
f"Error in decorated view {view_class.__name__}: {str(e)}",
exc_info=True
)
# Return basic error response
return Response(
{
'status': 'error',
'error': {
'code': 'API_ERROR',
'message': 'An internal error occurred'
},
'data': None
},
status=status.HTTP_500_INTERNAL_SERVER_ERROR
)
view_class.dispatch = new_dispatch
return view_class

384
apps/api/v1/views/health.py Normal file
View File

@@ -0,0 +1,384 @@
"""
Health check API views for ThrillWiki API v1.
This module contains health check and monitoring endpoints for system status,
performance metrics, and database analysis.
"""
import time
from django.utils import timezone
from django.conf import settings
from rest_framework.views import APIView
from rest_framework.request import Request
from rest_framework.response import Response
from rest_framework.permissions import AllowAny
from health_check.views import MainView
from drf_spectacular.utils import extend_schema, extend_schema_view
# Import serializers
from ..serializers import (
HealthCheckOutputSerializer,
PerformanceMetricsOutputSerializer,
SimpleHealthOutputSerializer,
)
# Handle optional dependencies with fallback classes
class FallbackCacheMonitor:
"""Fallback class if CacheMonitor is not available."""
def get_cache_stats(self):
return {"error": "Cache monitoring not available"}
class FallbackIndexAnalyzer:
"""Fallback class if IndexAnalyzer is not available."""
@staticmethod
def analyze_slow_queries(threshold):
return {"error": "Query analysis not available"}
# Try to import the real classes, use fallbacks if not available
try:
from apps.core.services.enhanced_cache_service import CacheMonitor
except ImportError:
CacheMonitor = FallbackCacheMonitor
try:
from apps.core.utils.query_optimization import IndexAnalyzer
except ImportError:
IndexAnalyzer = FallbackIndexAnalyzer
@extend_schema_view(
get=extend_schema(
summary="Health check",
description=(
"Get comprehensive health check information including system metrics."
),
responses={
200: HealthCheckOutputSerializer,
503: HealthCheckOutputSerializer,
},
tags=["Health"],
),
)
class HealthCheckAPIView(APIView):
"""Enhanced API endpoint for health checks with detailed JSON response."""
permission_classes = [AllowAny]
serializer_class = HealthCheckOutputSerializer
def get(self, request: Request) -> Response:
"""Return comprehensive health check information."""
start_time = time.time()
# Get basic health check results
main_view = MainView()
main_view.request = request._request # type: ignore[attr-defined]
plugins = main_view.plugins
errors = main_view.errors
# Collect additional performance metrics
try:
cache_monitor = CacheMonitor()
cache_stats = cache_monitor.get_cache_stats()
except Exception:
cache_stats = {"error": "Cache monitoring unavailable"}
# Build comprehensive health data
health_data = {
"status": "healthy" if not errors else "unhealthy",
"timestamp": timezone.now(),
"version": getattr(settings, "VERSION", "1.0.0"),
"environment": getattr(settings, "ENVIRONMENT", "development"),
"response_time_ms": 0, # Will be calculated at the end
"checks": {},
"metrics": {
"cache": cache_stats,
"database": self._get_database_metrics(),
"system": self._get_system_metrics(),
},
}
# Process individual health checks
for plugin in plugins:
# Handle both plugin objects and strings
if hasattr(plugin, "identifier"):
plugin_name = plugin.identifier()
plugin_class_name = plugin.__class__.__name__
critical_service = getattr(plugin, "critical_service", False)
response_time = getattr(plugin, "_response_time", None)
else:
# If plugin is a string, use it directly
plugin_name = str(plugin)
plugin_class_name = plugin_name
critical_service = False
response_time = None
plugin_errors = (
errors.get(plugin_class_name, []) if isinstance(errors, dict) else []
)
health_data["checks"][plugin_name] = {
"status": "healthy" if not plugin_errors else "unhealthy",
"critical": critical_service,
"errors": [str(error) for error in plugin_errors],
"response_time_ms": response_time,
}
# Calculate total response time
health_data["response_time_ms"] = round((time.time() - start_time) * 1000, 2)
# Determine HTTP status code
status_code = 200
if errors:
# Check if any critical services are failing
critical_errors = any(
getattr(plugin, "critical_service", False)
for plugin in plugins
if isinstance(errors, dict) and errors.get(plugin.__class__.__name__)
)
status_code = 503 if critical_errors else 200
serializer = HealthCheckOutputSerializer(health_data)
return Response(serializer.data, status=status_code)
def _get_database_metrics(self) -> dict:
"""Get database performance metrics."""
try:
from django.db import connection
from typing import Any
# Get basic connection info
metrics: dict[str, Any] = {
"vendor": connection.vendor,
"connection_status": "connected",
}
# Test query performance
start_time = time.time()
with connection.cursor() as cursor:
cursor.execute("SELECT 1")
cursor.fetchone()
query_time = (time.time() - start_time) * 1000
metrics["test_query_time_ms"] = round(query_time, 2)
# PostgreSQL specific metrics
if connection.vendor == "postgresql":
try:
with connection.cursor() as cursor:
cursor.execute(
"""
SELECT
numbackends as active_connections,
xact_commit as transactions_committed,
xact_rollback as transactions_rolled_back,
blks_read as blocks_read,
blks_hit as blocks_hit
FROM pg_stat_database
WHERE datname = current_database()
"""
)
row = cursor.fetchone()
if row:
metrics.update(
{ # type: ignore[arg-type]
"active_connections": row[0],
"transactions_committed": row[1],
"transactions_rolled_back": row[2],
"cache_hit_ratio": (
round((row[4] / (row[3] + row[4])) * 100, 2)
if (row[3] + row[4]) > 0
else 0
),
}
)
except Exception:
pass # Skip advanced metrics if not available
return metrics
except Exception as e:
return {"connection_status": "error", "error": str(e)}
def _get_system_metrics(self) -> dict:
"""Get system performance metrics."""
from typing import Any
metrics: dict[str, Any] = {
"debug_mode": settings.DEBUG,
"allowed_hosts": (settings.ALLOWED_HOSTS if settings.DEBUG else ["hidden"]),
}
try:
import psutil
# Memory metrics
memory = psutil.virtual_memory()
metrics["memory"] = {
"total_mb": round(memory.total / 1024 / 1024, 2),
"available_mb": round(memory.available / 1024 / 1024, 2),
"percent_used": memory.percent,
}
# CPU metrics
metrics["cpu"] = {
"percent_used": psutil.cpu_percent(interval=0.1),
"core_count": psutil.cpu_count(),
}
# Disk metrics
disk = psutil.disk_usage("/")
metrics["disk"] = {
"total_gb": round(disk.total / 1024 / 1024 / 1024, 2),
"free_gb": round(disk.free / 1024 / 1024 / 1024, 2),
"percent_used": round((disk.used / disk.total) * 100, 2),
}
except ImportError:
metrics["system_monitoring"] = "psutil not available"
except Exception as e:
metrics["system_error"] = str(e)
return metrics
@extend_schema_view(
get=extend_schema(
summary="Performance metrics",
description="Get performance metrics and database analysis (debug mode only).",
responses={
200: PerformanceMetricsOutputSerializer,
403: "Forbidden",
},
tags=["Health"],
),
)
class PerformanceMetricsAPIView(APIView):
"""API view for performance metrics and database analysis."""
permission_classes = [AllowAny] if settings.DEBUG else []
serializer_class = PerformanceMetricsOutputSerializer
def get(self, request: Request) -> Response:
"""Return performance metrics and analysis."""
if not settings.DEBUG:
return Response({"error": "Only available in debug mode"}, status=403)
metrics = {
"timestamp": timezone.now(),
"database_analysis": self._get_database_analysis(),
"cache_performance": self._get_cache_performance(),
"recent_slow_queries": self._get_slow_queries(),
}
serializer = PerformanceMetricsOutputSerializer(metrics)
return Response(serializer.data)
def _get_database_analysis(self):
"""Analyze database performance."""
try:
from django.db import connection
analysis = {
"total_queries": len(connection.queries),
"query_analysis": IndexAnalyzer.analyze_slow_queries(0.05),
}
if connection.queries:
query_times = [float(q.get("time", 0)) for q in connection.queries]
analysis.update(
{
"total_query_time": sum(query_times),
"average_query_time": sum(query_times) / len(query_times),
"slowest_query_time": max(query_times),
"fastest_query_time": min(query_times),
}
)
return analysis
except Exception as e:
return {"error": str(e)}
def _get_cache_performance(self):
"""Get cache performance metrics."""
try:
cache_monitor = CacheMonitor()
return cache_monitor.get_cache_stats()
except Exception as e:
return {"error": str(e)}
def _get_slow_queries(self):
"""Get recent slow queries."""
try:
return IndexAnalyzer.analyze_slow_queries(0.1) # 100ms threshold
except Exception as e:
return {"error": str(e)}
@extend_schema_view(
get=extend_schema(
summary="Simple health check",
description="Simple health check endpoint for load balancers.",
responses={
200: SimpleHealthOutputSerializer,
503: SimpleHealthOutputSerializer,
},
tags=["Health"],
),
options=extend_schema(
summary="CORS preflight for simple health check",
description=(
"Handle CORS preflight requests for the simple health check endpoint."
),
responses={
200: SimpleHealthOutputSerializer,
},
tags=["Health"],
),
)
class SimpleHealthAPIView(APIView):
"""Simple health check endpoint for load balancers."""
permission_classes = [AllowAny]
serializer_class = SimpleHealthOutputSerializer
def get(self, request: Request) -> Response:
"""Return simple OK status."""
try:
# Basic database connectivity test
from django.db import connection
with connection.cursor() as cursor:
cursor.execute("SELECT 1")
cursor.fetchone()
response_data = {
"status": "ok",
"timestamp": timezone.now(),
}
serializer = SimpleHealthOutputSerializer(response_data)
return Response(serializer.data, status=200)
except Exception as e:
response_data = {
"status": "error",
"error": str(e),
"timestamp": timezone.now(),
}
serializer = SimpleHealthOutputSerializer(response_data)
return Response(serializer.data, status=503)
def options(self, request: Request) -> Response:
"""Handle OPTIONS requests for CORS preflight."""
response_data = {
"status": "ok",
"timestamp": timezone.now(),
}
serializer = SimpleHealthOutputSerializer(response_data)
return Response(serializer.data)

View File

@@ -0,0 +1,85 @@
"""
Views for review-related API endpoints.
"""
from rest_framework.views import APIView
from rest_framework.response import Response
from rest_framework.permissions import AllowAny
from rest_framework import status
from drf_spectacular.utils import extend_schema, OpenApiParameter
from drf_spectacular.types import OpenApiTypes
from itertools import chain
from operator import attrgetter
from apps.parks.models.reviews import ParkReview
from apps.rides.models.reviews import RideReview
from ..serializers.reviews import LatestReviewSerializer
class LatestReviewsAPIView(APIView):
"""
API endpoint to get the latest reviews from both parks and rides.
Returns a combined list of the most recent reviews across the platform,
including username, user avatar, date, score, and review snippet.
"""
permission_classes = [AllowAny]
@extend_schema(
summary="Get Latest Reviews",
description=(
"Retrieve the latest reviews from both parks and rides. "
"Returns a combined list sorted by creation date, including "
"user information, ratings, and content snippets."
),
parameters=[
OpenApiParameter(
name="limit",
type=OpenApiTypes.INT,
location=OpenApiParameter.QUERY,
description="Number of reviews to return (default: 20, max: 100)",
default=20,
),
],
responses={
200: LatestReviewSerializer(many=True),
},
tags=["Reviews"],
)
def get(self, request):
"""Get the latest reviews from both parks and rides."""
# Get limit parameter with validation
try:
limit = int(request.query_params.get("limit", 20))
limit = min(max(limit, 1), 100) # Clamp between 1 and 100
except (ValueError, TypeError):
limit = 20
# Get published reviews from both models
park_reviews = (
ParkReview.objects.filter(is_published=True)
.select_related("user", "user__profile", "park")
.order_by("-created_at")[:limit]
)
ride_reviews = (
RideReview.objects.filter(is_published=True)
.select_related("user", "user__profile", "ride", "ride__park")
.order_by("-created_at")[:limit]
)
# Combine and sort by created_at
all_reviews = sorted(
chain(park_reviews, ride_reviews),
key=attrgetter("created_at"),
reverse=True,
)[:limit]
# Serialize the combined results
serializer = LatestReviewSerializer(all_reviews, many=True)
return Response(
{"count": len(all_reviews), "results": serializer.data},
status=status.HTTP_200_OK,
)

368
apps/api/v1/views/stats.py Normal file
View File

@@ -0,0 +1,368 @@
"""
Statistics API views for ThrillWiki.
Provides aggregate statistics about the platform's content including
counts of parks, rides, manufacturers, and other entities.
"""
from rest_framework.views import APIView
from rest_framework.response import Response
from rest_framework import status
from rest_framework.permissions import AllowAny, IsAdminUser
from django.db.models import Count
from django.core.cache import cache
from django.utils import timezone
from drf_spectacular.utils import extend_schema, OpenApiExample
from datetime import datetime
from apps.parks.models import Park, ParkReview, ParkPhoto, Company as ParkCompany
from apps.rides.models import (
Ride,
RollerCoasterStats,
RideReview,
RidePhoto,
Company as RideCompany,
)
from ..serializers.stats import StatsSerializer
class StatsAPIView(APIView):
"""
API endpoint that returns aggregate statistics about the platform.
Returns counts of various entities like parks, rides, manufacturers, etc.
Results are cached for performance.
"""
permission_classes = [AllowAny]
def _get_relative_time(self, timestamp_str):
"""
Convert an ISO timestamp to a human-readable relative time.
Args:
timestamp_str: ISO format timestamp string
Returns:
str: Human-readable relative time (e.g., "2 days, 3 hours, 15 minutes ago", "just now")
"""
if not timestamp_str or timestamp_str == "just_now":
return "just now"
try:
# Parse the ISO timestamp
if isinstance(timestamp_str, str):
timestamp = datetime.fromisoformat(timestamp_str.replace("Z", "+00:00"))
else:
timestamp = timestamp_str
# Make timezone-aware if needed
if timestamp.tzinfo is None:
timestamp = timezone.make_aware(timestamp)
now = timezone.now()
diff = now - timestamp
total_seconds = int(diff.total_seconds())
# If less than a minute, return "just now"
if total_seconds < 60:
return "just now"
# Calculate time components
days = diff.days
hours = (total_seconds % 86400) // 3600
minutes = (total_seconds % 3600) // 60
# Build the relative time string
parts = []
if days > 0:
parts.append(f'{days} day{"s" if days != 1 else ""}')
if hours > 0:
parts.append(f'{hours} hour{"s" if hours != 1 else ""}')
if minutes > 0:
parts.append(f'{minutes} minute{"s" if minutes != 1 else ""}')
# Join parts with commas and add "ago"
if len(parts) == 0:
return "just now"
elif len(parts) == 1:
return f"{parts[0]} ago"
elif len(parts) == 2:
return f"{parts[0]} and {parts[1]} ago"
else:
return f'{", ".join(parts[:-1])}, and {parts[-1]} ago'
except (ValueError, TypeError):
return "unknown"
@extend_schema(
operation_id="get_platform_stats",
summary="Get platform statistics",
description="""
Returns comprehensive aggregate statistics about the ThrillWiki platform.
This endpoint provides detailed counts and breakdowns of all major entities including:
- Parks, rides, and roller coasters
- Companies (manufacturers, operators, designers, property owners)
- Photos and reviews
- Ride categories (roller coasters, dark rides, flat rides, etc.)
- Status breakdowns (operating, closed, under construction, etc.)
Results are cached for 5 minutes for optimal performance and automatically
invalidated when relevant data changes.
**No authentication required** - this is a public endpoint.
""".strip(),
responses={
200: StatsSerializer,
500: {
"type": "object",
"properties": {
"error": {
"type": "string",
"description": "Error message if statistics calculation fails",
}
},
},
},
tags=["Statistics"],
examples=[
OpenApiExample(
name="Sample Response",
description="Example of platform statistics response",
value={
"total_parks": 7,
"total_rides": 10,
"total_manufacturers": 6,
"total_operators": 7,
"total_designers": 4,
"total_property_owners": 0,
"total_roller_coasters": 8,
"total_photos": 0,
"total_park_photos": 0,
"total_ride_photos": 0,
"total_reviews": 8,
"total_park_reviews": 4,
"total_ride_reviews": 4,
"roller_coasters": 10,
"operating_parks": 7,
"operating_rides": 10,
"last_updated": "2025-08-28T17:34:59.677143+00:00",
"relative_last_updated": "just now",
},
)
],
)
def get(self, request):
"""Get platform statistics."""
# Try to get cached stats first
cache_key = "platform_stats"
cached_stats = cache.get(cache_key)
if cached_stats:
return Response(cached_stats, status=status.HTTP_200_OK)
# Calculate fresh stats
stats = self._calculate_stats()
# Cache for 5 minutes
cache.set(cache_key, stats, 300)
return Response(stats, status=status.HTTP_200_OK)
def _calculate_stats(self):
"""Calculate all platform statistics."""
# Basic entity counts
total_parks = Park.objects.count()
total_rides = Ride.objects.count()
# Company counts by role
total_manufacturers = RideCompany.objects.filter(
roles__contains=["MANUFACTURER"]
).count()
total_operators = ParkCompany.objects.filter(
roles__contains=["OPERATOR"]
).count()
total_designers = RideCompany.objects.filter(
roles__contains=["DESIGNER"]
).count()
total_property_owners = ParkCompany.objects.filter(
roles__contains=["PROPERTY_OWNER"]
).count()
# Photo counts (combined)
total_park_photos = ParkPhoto.objects.count()
total_ride_photos = RidePhoto.objects.count()
total_photos = total_park_photos + total_ride_photos
# Ride type counts
total_roller_coasters = RollerCoasterStats.objects.count()
# Ride category counts
ride_categories = (
Ride.objects.values("category")
.annotate(count=Count("id"))
.exclude(category="")
)
category_stats = {}
for category in ride_categories:
category_code = category["category"]
category_count = category["count"]
# Convert category codes to readable names
category_names = {
"RC": "roller_coasters",
"DR": "dark_rides",
"FR": "flat_rides",
"WR": "water_rides",
"TR": "transport_rides",
"OT": "other_rides",
}
category_name = category_names.get(
category_code, f"category_{category_code.lower()}"
)
category_stats[category_name] = category_count
# Park status counts
park_statuses = Park.objects.values("status").annotate(count=Count("id"))
park_status_stats = {}
for status_item in park_statuses:
status_code = status_item["status"]
status_count = status_item["count"]
# Convert status codes to readable names
status_names = {
"OPERATING": "operating_parks",
"CLOSED_TEMP": "temporarily_closed_parks",
"CLOSED_PERM": "permanently_closed_parks",
"UNDER_CONSTRUCTION": "under_construction_parks",
"DEMOLISHED": "demolished_parks",
"RELOCATED": "relocated_parks",
}
if status_code in status_names:
status_name = status_names[status_code]
else:
raise ValueError(f"Unknown park status: {status_code}")
park_status_stats[status_name] = status_count
# Ride status counts
ride_statuses = Ride.objects.values("status").annotate(count=Count("id"))
ride_status_stats = {}
for status_item in ride_statuses:
status_code = status_item["status"]
status_count = status_item["count"]
# Convert status codes to readable names
status_names = {
"OPERATING": "operating_rides",
"CLOSED_TEMP": "temporarily_closed_rides",
"SBNO": "sbno_rides",
"CLOSING": "closing_rides",
"CLOSED_PERM": "permanently_closed_rides",
"UNDER_CONSTRUCTION": "under_construction_rides",
"DEMOLISHED": "demolished_rides",
"RELOCATED": "relocated_rides",
}
status_name = status_names.get(
status_code, f"ride_status_{status_code.lower()}"
)
ride_status_stats[status_name] = status_count
# Review counts
total_park_reviews = ParkReview.objects.count()
total_ride_reviews = RideReview.objects.count()
total_reviews = total_park_reviews + total_ride_reviews
# Timestamp handling
now = timezone.now()
last_updated_iso = now.isoformat()
# Get cached timestamp or use current time
cached_timestamp = cache.get("platform_stats_timestamp")
if cached_timestamp and cached_timestamp != "just_now":
# Use cached timestamp for consistency
last_updated_iso = cached_timestamp
else:
# Set new timestamp in cache
cache.set("platform_stats_timestamp", last_updated_iso, 300)
# Calculate relative time
relative_last_updated = self._get_relative_time(last_updated_iso)
# Combine all stats
stats = {
# Core entity counts
"total_parks": total_parks,
"total_rides": total_rides,
"total_manufacturers": total_manufacturers,
"total_operators": total_operators,
"total_designers": total_designers,
"total_property_owners": total_property_owners,
"total_roller_coasters": total_roller_coasters,
# Photo counts
"total_photos": total_photos,
"total_park_photos": total_park_photos,
"total_ride_photos": total_ride_photos,
# Review counts
"total_reviews": total_reviews,
"total_park_reviews": total_park_reviews,
"total_ride_reviews": total_ride_reviews,
# Category breakdowns
**category_stats,
# Status breakdowns
**park_status_stats,
**ride_status_stats,
# Metadata
"last_updated": last_updated_iso,
"relative_last_updated": relative_last_updated,
}
return stats
class StatsRecalculateAPIView(APIView):
"""
Admin-only API endpoint to force recalculation of platform statistics.
This endpoint clears the cache and forces a fresh calculation of all statistics.
Only accessible to admin users.
"""
permission_classes = [IsAdminUser]
@extend_schema(exclude=True)
def post(self, request):
"""Force recalculation of platform statistics."""
# Clear the cache
cache.delete("platform_stats")
cache.delete("platform_stats_timestamp")
# Create a new StatsAPIView instance to reuse the calculation logic
stats_view = StatsAPIView()
fresh_stats = stats_view._calculate_stats()
# Cache the fresh stats
cache.set("platform_stats", fresh_stats, 300)
# Return success response with the fresh stats
return Response(
{
"message": "Platform statistics have been successfully recalculated",
"stats": fresh_stats,
"recalculated_at": timezone.now().isoformat(),
},
status=status.HTTP_200_OK,
)

View File

@@ -0,0 +1,265 @@
"""
Trending content API views for ThrillWiki API v1.
This module contains endpoints for trending and new content discovery
including trending parks, rides, and recently added content.
"""
from datetime import datetime, date
from rest_framework.views import APIView
from rest_framework.request import Request
from rest_framework.response import Response
from rest_framework.permissions import AllowAny, IsAdminUser
from rest_framework import status
from drf_spectacular.utils import extend_schema, extend_schema_view, OpenApiParameter
from drf_spectacular.types import OpenApiTypes
@extend_schema_view(
get=extend_schema(
summary="Get trending content",
description="Retrieve trending parks and rides based on view counts, ratings, and recency.",
parameters=[
OpenApiParameter(
name="limit",
location=OpenApiParameter.QUERY,
description="Number of trending items to return (default: 20, max: 100)",
required=False,
type=OpenApiTypes.INT,
default=20,
),
OpenApiParameter(
name="timeframe",
location=OpenApiParameter.QUERY,
description="Timeframe for trending calculation (day, week, month) - default: week",
required=False,
type=OpenApiTypes.STR,
enum=["day", "week", "month"],
default="week",
),
],
responses={200: OpenApiTypes.OBJECT},
tags=["Trending"],
),
)
class TrendingAPIView(APIView):
"""API endpoint for trending content."""
permission_classes = [AllowAny]
def get(self, request: Request) -> Response:
"""Get trending parks and rides."""
from apps.core.services.trending_service import trending_service
# Parse parameters
limit = min(int(request.query_params.get("limit", 20)), 100)
# Get trending content using direct calculation service
all_trending = trending_service.get_trending_content(limit=limit * 2)
# Separate by content type
trending_rides = []
trending_parks = []
for item in all_trending:
if item.get("category") == "ride":
trending_rides.append(item)
elif item.get("category") == "park":
trending_parks.append(item)
# Limit each category
trending_rides = trending_rides[: limit // 3] if trending_rides else []
trending_parks = trending_parks[: limit // 3] if trending_parks else []
# Latest reviews will be empty until review system is implemented
latest_reviews = []
# Return in expected frontend format
response_data = {
"trending_rides": trending_rides,
"trending_parks": trending_parks,
"latest_reviews": latest_reviews,
}
return Response(response_data)
@extend_schema_view(
post=extend_schema(
summary="Trigger trending content calculation",
description="Manually trigger the calculation of trending content using Django management commands. Admin access required.",
responses={
202: {
"type": "object",
"properties": {
"message": {"type": "string"},
"trending_completed": {"type": "boolean"},
"new_content_completed": {"type": "boolean"},
"completion_time": {"type": "string"},
},
},
403: {"description": "Admin access required"},
},
tags=["Trending"],
),
)
class TriggerTrendingCalculationAPIView(APIView):
"""API endpoint to manually trigger trending content calculation."""
permission_classes = [IsAdminUser]
def post(self, request: Request) -> Response:
"""Trigger trending content calculation using management commands."""
try:
from django.core.management import call_command
import io
from contextlib import redirect_stdout, redirect_stderr
# Capture command output
trending_output = io.StringIO()
new_content_output = io.StringIO()
trending_completed = False
new_content_completed = False
try:
# Run trending calculation command
with redirect_stdout(trending_output), redirect_stderr(trending_output):
call_command(
"calculate_trending", "--content-type=all", "--limit=50"
)
trending_completed = True
except Exception as e:
trending_output.write(f"Error: {str(e)}")
try:
# Run new content calculation command
with redirect_stdout(new_content_output), redirect_stderr(
new_content_output
):
call_command(
"calculate_new_content",
"--content-type=all",
"--days-back=30",
"--limit=50",
)
new_content_completed = True
except Exception as e:
new_content_output.write(f"Error: {str(e)}")
completion_time = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
return Response(
{
"message": "Trending content calculation completed",
"trending_completed": trending_completed,
"new_content_completed": new_content_completed,
"completion_time": completion_time,
"trending_output": trending_output.getvalue(),
"new_content_output": new_content_output.getvalue(),
},
status=status.HTTP_202_ACCEPTED,
)
except Exception as e:
return Response(
{
"error": "Failed to trigger trending content calculation",
"details": str(e),
},
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
)
@extend_schema_view(
get=extend_schema(
summary="Get new content",
description="Retrieve recently added parks and rides.",
parameters=[
OpenApiParameter(
name="limit",
location=OpenApiParameter.QUERY,
description="Number of new items to return (default: 20, max: 100)",
required=False,
type=OpenApiTypes.INT,
default=20,
),
OpenApiParameter(
name="days",
location=OpenApiParameter.QUERY,
description="Number of days to look back for new content (default: 30, max: 365)",
required=False,
type=OpenApiTypes.INT,
default=30,
),
],
responses={200: OpenApiTypes.OBJECT},
tags=["Trending"],
),
)
class NewContentAPIView(APIView):
"""API endpoint for new content."""
permission_classes = [AllowAny]
def get(self, request: Request) -> Response:
"""Get new parks and rides."""
from apps.core.services.trending_service import trending_service
# Parse parameters
limit = min(int(request.query_params.get("limit", 20)), 100)
days_back = min(int(request.query_params.get("days", 30)), 365)
# Get new content using direct calculation service
all_new_content = trending_service.get_new_content(
limit=limit * 2, days_back=days_back
)
recently_added = []
newly_opened = []
upcoming = []
# Categorize items based on date
today = date.today()
for item in all_new_content:
date_added = item.get("date_added", "")
if date_added:
try:
# Parse the date string
if isinstance(date_added, str):
item_date = datetime.fromisoformat(date_added).date()
else:
item_date = date_added
# Calculate days difference
days_diff = (today - item_date).days
if days_diff <= 30: # Recently added (last 30 days)
recently_added.append(item)
elif days_diff <= 365: # Newly opened (last year)
newly_opened.append(item)
else: # Older items
newly_opened.append(item)
except (ValueError, TypeError):
# If date parsing fails, add to recently added
recently_added.append(item)
else:
recently_added.append(item)
# Upcoming items will be empty until future content system is implemented
upcoming = []
# Limit each category
recently_added = recently_added[: limit // 3] if recently_added else []
newly_opened = newly_opened[: limit // 3] if newly_opened else []
# Return in expected frontend format
response_data = {
"recently_added": recently_added,
"newly_opened": newly_opened,
"upcoming": upcoming,
}
return Response(response_data)