Add comprehensive tests for Parks API and models

- Implemented extensive test cases for the Parks API, covering endpoints for listing, retrieving, creating, updating, and deleting parks.
- Added tests for filtering, searching, and ordering parks in the API.
- Created tests for error handling in the API, including malformed JSON and unsupported methods.
- Developed model tests for Park, ParkArea, Company, and ParkReview models, ensuring validation and constraints are enforced.
- Introduced utility mixins for API and model testing to streamline assertions and enhance test readability.
- Included integration tests to validate complete workflows involving park creation, retrieval, updating, and deletion.
This commit is contained in:
pacnpal
2025-08-17 19:36:20 -04:00
parent 17228e9935
commit c26414ff74
210 changed files with 24155 additions and 833 deletions

305
moderation/selectors.py Normal file
View File

@@ -0,0 +1,305 @@
"""
Selectors for moderation-related data retrieval.
Following Django styleguide pattern for separating data access from business logic.
"""
from typing import Optional, Dict, Any
from django.db.models import QuerySet, Q, Count
from django.utils import timezone
from datetime import timedelta
from django.contrib.auth import get_user_model
from django.contrib.auth.models import User
from .models import EditSubmission
def pending_submissions_for_review(
*,
content_type: Optional[str] = None,
limit: int = 50
) -> QuerySet[EditSubmission]:
"""
Get pending submissions that need moderation review.
Args:
content_type: Optional filter by content type name
limit: Maximum number of submissions to return
Returns:
QuerySet of pending submissions ordered by submission date
"""
queryset = EditSubmission.objects.filter(
status='PENDING'
).select_related(
'submitted_by',
'content_type'
).prefetch_related(
'content_object'
)
if content_type:
queryset = queryset.filter(content_type__model=content_type.lower())
return queryset.order_by('submitted_at')[:limit]
def submissions_by_user(
*,
user_id: int,
status: Optional[str] = None
) -> QuerySet[EditSubmission]:
"""
Get submissions created by a specific user.
Args:
user_id: ID of the user who submitted
status: Optional filter by submission status
Returns:
QuerySet of user's submissions
"""
queryset = EditSubmission.objects.filter(
submitted_by_id=user_id
).select_related(
'content_type',
'handled_by'
)
if status:
queryset = queryset.filter(status=status)
return queryset.order_by('-submitted_at')
def submissions_handled_by_moderator(
*,
moderator_id: int,
days: int = 30
) -> QuerySet[EditSubmission]:
"""
Get submissions handled by a specific moderator in the last N days.
Args:
moderator_id: ID of the moderator
days: Number of days to look back
Returns:
QuerySet of submissions handled by the moderator
"""
cutoff_date = timezone.now() - timedelta(days=days)
return EditSubmission.objects.filter(
handled_by_id=moderator_id,
handled_at__gte=cutoff_date
).select_related(
'submitted_by',
'content_type'
).order_by('-handled_at')
def recent_submissions(*, days: int = 7) -> QuerySet[EditSubmission]:
"""
Get recent submissions from the last N days.
Args:
days: Number of days to look back
Returns:
QuerySet of recent submissions
"""
cutoff_date = timezone.now() - timedelta(days=days)
return EditSubmission.objects.filter(
submitted_at__gte=cutoff_date
).select_related(
'submitted_by',
'content_type',
'handled_by'
).order_by('-submitted_at')
def submissions_by_content_type(
*,
content_type: str,
status: Optional[str] = None
) -> QuerySet[EditSubmission]:
"""
Get submissions for a specific content type.
Args:
content_type: Name of the content type (e.g., 'park', 'ride')
status: Optional filter by submission status
Returns:
QuerySet of submissions for the content type
"""
queryset = EditSubmission.objects.filter(
content_type__model=content_type.lower()
).select_related(
'submitted_by',
'handled_by'
)
if status:
queryset = queryset.filter(status=status)
return queryset.order_by('-submitted_at')
def moderation_queue_summary() -> Dict[str, Any]:
"""
Get summary statistics for the moderation queue.
Returns:
Dictionary containing queue statistics
"""
pending_count = EditSubmission.objects.filter(status='PENDING').count()
approved_today = EditSubmission.objects.filter(
status='APPROVED',
handled_at__date=timezone.now().date()
).count()
rejected_today = EditSubmission.objects.filter(
status='REJECTED',
handled_at__date=timezone.now().date()
).count()
# Submissions by content type
submissions_by_type = EditSubmission.objects.filter(
status='PENDING'
).values('content_type__model').annotate(
count=Count('id')
).order_by('-count')
return {
'pending_count': pending_count,
'approved_today': approved_today,
'rejected_today': rejected_today,
'submissions_by_type': list(submissions_by_type)
}
def moderation_statistics_summary(
*,
days: int = 30,
moderator: Optional[User] = None
) -> Dict[str, Any]:
"""
Get comprehensive moderation statistics for a time period.
Args:
days: Number of days to analyze
moderator: Optional filter by specific moderator
Returns:
Dictionary containing detailed moderation statistics
"""
cutoff_date = timezone.now() - timedelta(days=days)
base_queryset = EditSubmission.objects.filter(
submitted_at__gte=cutoff_date
)
if moderator:
handled_queryset = base_queryset.filter(handled_by=moderator)
else:
handled_queryset = base_queryset
total_submissions = base_queryset.count()
pending_submissions = base_queryset.filter(status='PENDING').count()
approved_submissions = handled_queryset.filter(status='APPROVED').count()
rejected_submissions = handled_queryset.filter(status='REJECTED').count()
# Response time analysis (only for handled submissions)
handled_with_times = handled_queryset.exclude(
handled_at__isnull=True
).extra(
select={
'response_hours': 'EXTRACT(EPOCH FROM (handled_at - submitted_at)) / 3600'
}
).values_list('response_hours', flat=True)
avg_response_time = None
if handled_with_times:
avg_response_time = sum(handled_with_times) / len(handled_with_times)
return {
'period_days': days,
'total_submissions': total_submissions,
'pending_submissions': pending_submissions,
'approved_submissions': approved_submissions,
'rejected_submissions': rejected_submissions,
'approval_rate': (approved_submissions / (approved_submissions + rejected_submissions) * 100) if (approved_submissions + rejected_submissions) > 0 else 0,
'average_response_time_hours': avg_response_time,
'moderator': moderator.username if moderator else None
}
def submissions_needing_attention(*, hours: int = 24) -> QuerySet[EditSubmission]:
"""
Get pending submissions that have been waiting for more than N hours.
Args:
hours: Number of hours threshold for attention
Returns:
QuerySet of submissions needing attention
"""
cutoff_time = timezone.now() - timedelta(hours=hours)
return EditSubmission.objects.filter(
status='PENDING',
submitted_at__lte=cutoff_time
).select_related(
'submitted_by',
'content_type'
).order_by('submitted_at')
def top_contributors(*, days: int = 30, limit: int = 10) -> QuerySet[User]:
"""
Get users who have submitted the most content in the last N days.
Args:
days: Number of days to analyze
limit: Maximum number of users to return
Returns:
QuerySet of top contributing users
"""
cutoff_date = timezone.now() - timedelta(days=days)
return User.objects.filter(
edit_submissions__submitted_at__gte=cutoff_date
).annotate(
submission_count=Count('edit_submissions')
).filter(
submission_count__gt=0
).order_by('-submission_count')[:limit]
def moderator_workload_summary(*, days: int = 30) -> Dict[str, Any]:
"""
Get workload distribution among moderators.
Args:
days: Number of days to analyze
Returns:
Dictionary containing moderator workload statistics
"""
cutoff_date = timezone.now() - timedelta(days=days)
moderator_stats = User.objects.filter(
handled_submissions__handled_at__gte=cutoff_date
).annotate(
handled_count=Count('handled_submissions')
).filter(
handled_count__gt=0
).order_by('-handled_count').values(
'username', 'handled_count'
)
return {
'period_days': days,
'moderator_stats': list(moderator_stats)
}