mirror of
https://github.com/pacnpal/thrillwiki_django_no_react.git
synced 2026-02-05 13:15:17 -05:00
feat: Implement a new notifications application, add admin API views for dashboard metrics, introduce scheduled tasks, and update API routing and project configurations.
This commit is contained in:
710
backend/apps/api/v1/admin/views.py
Normal file
710
backend/apps/api/v1/admin/views.py
Normal file
@@ -0,0 +1,710 @@
|
||||
"""
|
||||
Admin API views for dashboard functionality.
|
||||
|
||||
These views provide endpoints for:
|
||||
- OSM cache statistics
|
||||
- Rate limiting metrics
|
||||
- Database manager operations
|
||||
- Celery task status
|
||||
"""
|
||||
|
||||
import logging
|
||||
from datetime import timedelta
|
||||
from typing import Any
|
||||
|
||||
from django.apps import apps
|
||||
from django.contrib.auth import get_user_model
|
||||
from django.core.cache import cache
|
||||
from django.db import transaction
|
||||
from django.db.models import Count, Q
|
||||
from django.utils import timezone
|
||||
from rest_framework import status
|
||||
from rest_framework.permissions import IsAdminUser
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.views import APIView
|
||||
|
||||
from apps.core.utils import capture_and_log
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
User = get_user_model()
|
||||
|
||||
|
||||
class OSMUsageStatsView(APIView):
|
||||
"""
|
||||
GET /admin/osm-usage-stats/
|
||||
Return OSM cache statistics for admin dashboard.
|
||||
"""
|
||||
|
||||
permission_classes = [IsAdminUser]
|
||||
|
||||
def get(self, request):
|
||||
"""Return OSM/location cache usage statistics."""
|
||||
try:
|
||||
# Try to get stats from cache first
|
||||
cached_stats = cache.get("osm_usage_stats")
|
||||
if cached_stats:
|
||||
return Response(cached_stats)
|
||||
|
||||
# Calculate fresh stats
|
||||
now = timezone.now()
|
||||
last_24h = now - timedelta(hours=24)
|
||||
|
||||
# Get location query cache model if it exists
|
||||
try:
|
||||
LocationQueryCache = apps.get_model("maps", "LocationQueryCache")
|
||||
has_cache_model = True
|
||||
except LookupError:
|
||||
has_cache_model = False
|
||||
|
||||
if has_cache_model:
|
||||
total_queries = LocationQueryCache.objects.count()
|
||||
recent_queries = LocationQueryCache.objects.filter(
|
||||
created_at__gte=last_24h
|
||||
).count()
|
||||
cache_hits = LocationQueryCache.objects.filter(
|
||||
access_count__gt=1
|
||||
).count()
|
||||
|
||||
stats = {
|
||||
"timeWindow": "24h",
|
||||
"totalSearches": recent_queries,
|
||||
"cacheHits": cache_hits,
|
||||
"cacheMisses": max(0, recent_queries - cache_hits),
|
||||
"apiCalls": max(0, recent_queries - cache_hits),
|
||||
"errors": 0,
|
||||
"cacheHitRate": (
|
||||
round(cache_hits / total_queries * 100, 2)
|
||||
if total_queries > 0
|
||||
else 0
|
||||
),
|
||||
"avgResponseTime": 0, # Would need request logging
|
||||
"totalCachedQueries": total_queries,
|
||||
"totalCacheAccesses": cache_hits,
|
||||
"hourlyData": [],
|
||||
"apiCallsSaved": cache_hits,
|
||||
"estimatedCost": {
|
||||
"callsMade": max(0, recent_queries - cache_hits),
|
||||
"callsSaved": cache_hits,
|
||||
"savings": f"${cache_hits * 0.001:.2f}", # Estimated
|
||||
},
|
||||
}
|
||||
else:
|
||||
# Return empty stats if no cache model
|
||||
stats = {
|
||||
"timeWindow": "24h",
|
||||
"totalSearches": 0,
|
||||
"cacheHits": 0,
|
||||
"cacheMisses": 0,
|
||||
"apiCalls": 0,
|
||||
"errors": 0,
|
||||
"cacheHitRate": 0,
|
||||
"avgResponseTime": 0,
|
||||
"totalCachedQueries": 0,
|
||||
"totalCacheAccesses": 0,
|
||||
"hourlyData": [],
|
||||
"apiCallsSaved": 0,
|
||||
"estimatedCost": {
|
||||
"callsMade": 0,
|
||||
"callsSaved": 0,
|
||||
"savings": "$0.00",
|
||||
},
|
||||
}
|
||||
|
||||
# Cache for 5 minutes
|
||||
cache.set("osm_usage_stats", stats, 300)
|
||||
return Response(stats)
|
||||
|
||||
except Exception as e:
|
||||
capture_and_log(e, "OSM usage stats - error", source="api")
|
||||
return Response(
|
||||
{"detail": "Failed to fetch OSM usage stats"},
|
||||
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
)
|
||||
|
||||
|
||||
class RateLimitMetricsView(APIView):
|
||||
"""
|
||||
POST /admin/rate-limit-metrics/
|
||||
Return rate limiting metrics for admin dashboard.
|
||||
"""
|
||||
|
||||
permission_classes = [IsAdminUser]
|
||||
|
||||
def post(self, request):
|
||||
"""Return rate limit metrics based on action."""
|
||||
try:
|
||||
action = request.data.get("action", "stats")
|
||||
time_window = request.data.get("timeWindow", 60000) # ms
|
||||
limit = request.data.get("limit", 100)
|
||||
|
||||
# Convert time_window from ms to seconds
|
||||
time_window_seconds = time_window / 1000 if time_window else 60
|
||||
cutoff = timezone.now() - timedelta(seconds=time_window_seconds)
|
||||
|
||||
if action == "stats":
|
||||
# Return aggregate statistics
|
||||
# In a real implementation, you'd query a rate limit log table
|
||||
stats = {
|
||||
"totalRequests": 0,
|
||||
"allowedRequests": 0,
|
||||
"blockedRequests": 0,
|
||||
"blockRate": 0,
|
||||
"uniqueIPs": 0,
|
||||
"uniqueUsers": 0,
|
||||
"topBlockedIPs": [],
|
||||
"topBlockedUsers": [],
|
||||
"tierDistribution": {
|
||||
"anonymous": 0,
|
||||
"authenticated": 0,
|
||||
"premium": 0,
|
||||
"admin": 0,
|
||||
},
|
||||
}
|
||||
return Response(stats)
|
||||
|
||||
elif action == "recent":
|
||||
# Return recent rate limit events
|
||||
return Response([])
|
||||
|
||||
elif action == "function":
|
||||
# Return metrics for a specific function
|
||||
function_name = request.data.get("functionName", "")
|
||||
return Response([])
|
||||
|
||||
elif action == "user":
|
||||
# Return metrics for a specific user
|
||||
user_id = request.data.get("userId", "")
|
||||
return Response([])
|
||||
|
||||
elif action == "ip":
|
||||
# Return metrics for a specific IP
|
||||
client_ip = request.data.get("clientIP", "")
|
||||
return Response([])
|
||||
|
||||
return Response(
|
||||
{"detail": f"Unknown action: {action}"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
capture_and_log(e, "Rate limit metrics - error", source="api")
|
||||
return Response(
|
||||
{"detail": "Failed to fetch rate limit metrics"},
|
||||
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
)
|
||||
|
||||
|
||||
class DatabaseManagerView(APIView):
|
||||
"""
|
||||
POST /admin/database-manager/
|
||||
Handle admin CRUD operations for entities.
|
||||
"""
|
||||
|
||||
permission_classes = [IsAdminUser]
|
||||
|
||||
# Map entity types to Django models
|
||||
ENTITY_MODEL_MAP = {
|
||||
"parks": ("parks", "Park"),
|
||||
"rides": ("rides", "Ride"),
|
||||
"companies": ("companies", "Company"),
|
||||
"reviews": ("reviews", "Review"),
|
||||
"blog_posts": ("blog", "BlogPost"),
|
||||
"photos": ("media", "Photo"),
|
||||
"lists": ("lists", "List"),
|
||||
"profiles": ("accounts", "UserProfile"),
|
||||
}
|
||||
|
||||
def post(self, request):
|
||||
"""Dispatch to appropriate handler based on operation."""
|
||||
try:
|
||||
operation = request.data.get("operation")
|
||||
entity_type = request.data.get("entityType")
|
||||
entity_id = request.data.get("entityId")
|
||||
data = request.data.get("data", {})
|
||||
change_reason = request.data.get("changeReason", "Admin operation")
|
||||
|
||||
if not operation:
|
||||
return Response(
|
||||
{"detail": "operation is required"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
if not entity_type:
|
||||
return Response(
|
||||
{"detail": "entityType is required"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
# Get the model class
|
||||
model_info = self.ENTITY_MODEL_MAP.get(entity_type)
|
||||
if not model_info:
|
||||
return Response(
|
||||
{"detail": f"Unknown entity type: {entity_type}"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
try:
|
||||
Model = apps.get_model(model_info[0], model_info[1])
|
||||
except LookupError:
|
||||
return Response(
|
||||
{"detail": f"Model not found for {entity_type}"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
# Dispatch to handler
|
||||
handlers = {
|
||||
"create": self._handle_create,
|
||||
"update": self._handle_update,
|
||||
"delete": self._handle_delete,
|
||||
"restore": self._handle_restore,
|
||||
"permanent-delete": self._handle_permanent_delete,
|
||||
"bulk-update-status": self._handle_bulk_update_status,
|
||||
"bulk-delete": self._handle_bulk_delete,
|
||||
"bulk-restore": self._handle_bulk_restore,
|
||||
"bulk-permanent-delete": self._handle_bulk_permanent_delete,
|
||||
"get-dependencies": self._handle_get_dependencies,
|
||||
}
|
||||
|
||||
handler = handlers.get(operation)
|
||||
if not handler:
|
||||
return Response(
|
||||
{"detail": f"Unknown operation: {operation}"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
return handler(Model, entity_type, entity_id, data, change_reason, request)
|
||||
|
||||
except Exception as e:
|
||||
capture_and_log(
|
||||
e, f"Database manager - {operation} error", source="api"
|
||||
)
|
||||
return Response(
|
||||
{"detail": str(e)},
|
||||
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
)
|
||||
|
||||
def _handle_create(
|
||||
self, Model, entity_type, entity_id, data, change_reason, request
|
||||
):
|
||||
"""Create a new entity."""
|
||||
with transaction.atomic():
|
||||
instance = Model.objects.create(**data)
|
||||
return Response(
|
||||
{
|
||||
"success": True,
|
||||
"data": {"id": str(instance.pk)},
|
||||
"message": f"{entity_type} created successfully",
|
||||
}
|
||||
)
|
||||
|
||||
def _handle_update(
|
||||
self, Model, entity_type, entity_id, data, change_reason, request
|
||||
):
|
||||
"""Update an existing entity."""
|
||||
if not entity_id:
|
||||
return Response(
|
||||
{"detail": "entityId is required for update"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
with transaction.atomic():
|
||||
try:
|
||||
instance = Model.objects.get(pk=entity_id)
|
||||
except Model.DoesNotExist:
|
||||
return Response(
|
||||
{"detail": f"{entity_type} not found"},
|
||||
status=status.HTTP_404_NOT_FOUND,
|
||||
)
|
||||
|
||||
for key, value in data.items():
|
||||
if hasattr(instance, key):
|
||||
setattr(instance, key, value)
|
||||
instance.save()
|
||||
|
||||
return Response(
|
||||
{
|
||||
"success": True,
|
||||
"data": {"id": str(instance.pk)},
|
||||
"message": f"{entity_type} updated successfully",
|
||||
}
|
||||
)
|
||||
|
||||
def _handle_delete(
|
||||
self, Model, entity_type, entity_id, data, change_reason, request
|
||||
):
|
||||
"""Soft delete an entity (set status to deleted)."""
|
||||
if not entity_id:
|
||||
return Response(
|
||||
{"detail": "entityId is required for delete"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
with transaction.atomic():
|
||||
try:
|
||||
instance = Model.objects.get(pk=entity_id)
|
||||
except Model.DoesNotExist:
|
||||
return Response(
|
||||
{"detail": f"{entity_type} not found"},
|
||||
status=status.HTTP_404_NOT_FOUND,
|
||||
)
|
||||
|
||||
# Try soft delete first (set status)
|
||||
if hasattr(instance, "status"):
|
||||
instance.status = "deleted"
|
||||
instance.save()
|
||||
elif hasattr(instance, "is_deleted"):
|
||||
instance.is_deleted = True
|
||||
instance.save()
|
||||
elif hasattr(instance, "deleted_at"):
|
||||
instance.deleted_at = timezone.now()
|
||||
instance.save()
|
||||
else:
|
||||
# Hard delete if no soft delete field
|
||||
instance.delete()
|
||||
|
||||
return Response(
|
||||
{
|
||||
"success": True,
|
||||
"data": {"id": str(entity_id)},
|
||||
"message": f"{entity_type} deleted successfully",
|
||||
}
|
||||
)
|
||||
|
||||
def _handle_restore(
|
||||
self, Model, entity_type, entity_id, data, change_reason, request
|
||||
):
|
||||
"""Restore a soft-deleted entity."""
|
||||
if not entity_id:
|
||||
return Response(
|
||||
{"detail": "entityId is required for restore"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
new_status = data.get("status", "draft")
|
||||
|
||||
with transaction.atomic():
|
||||
try:
|
||||
# Try to get even deleted entities
|
||||
instance = Model.all_objects.get(pk=entity_id)
|
||||
except AttributeError:
|
||||
# Model doesn't have all_objects manager
|
||||
instance = Model.objects.get(pk=entity_id)
|
||||
except Model.DoesNotExist:
|
||||
return Response(
|
||||
{"detail": f"{entity_type} not found"},
|
||||
status=status.HTTP_404_NOT_FOUND,
|
||||
)
|
||||
|
||||
if hasattr(instance, "status"):
|
||||
instance.status = new_status
|
||||
instance.save()
|
||||
elif hasattr(instance, "is_deleted"):
|
||||
instance.is_deleted = False
|
||||
instance.save()
|
||||
elif hasattr(instance, "deleted_at"):
|
||||
instance.deleted_at = None
|
||||
instance.save()
|
||||
|
||||
return Response(
|
||||
{
|
||||
"success": True,
|
||||
"data": {"id": str(entity_id)},
|
||||
"message": f"{entity_type} restored successfully",
|
||||
}
|
||||
)
|
||||
|
||||
def _handle_permanent_delete(
|
||||
self, Model, entity_type, entity_id, data, change_reason, request
|
||||
):
|
||||
"""Permanently delete an entity."""
|
||||
if not entity_id:
|
||||
return Response(
|
||||
{"detail": "entityId is required for permanent-delete"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
with transaction.atomic():
|
||||
try:
|
||||
# Try to get even deleted entities
|
||||
try:
|
||||
instance = Model.all_objects.get(pk=entity_id)
|
||||
except AttributeError:
|
||||
instance = Model.objects.get(pk=entity_id)
|
||||
except Model.DoesNotExist:
|
||||
return Response(
|
||||
{"detail": f"{entity_type} not found"},
|
||||
status=status.HTTP_404_NOT_FOUND,
|
||||
)
|
||||
|
||||
instance.delete()
|
||||
|
||||
return Response(
|
||||
{
|
||||
"success": True,
|
||||
"data": {"id": str(entity_id)},
|
||||
"message": f"{entity_type} permanently deleted",
|
||||
}
|
||||
)
|
||||
|
||||
def _handle_bulk_update_status(
|
||||
self, Model, entity_type, entity_id, data, change_reason, request
|
||||
):
|
||||
"""Bulk update status of multiple entities."""
|
||||
entity_ids = data.get("entityIds", [])
|
||||
new_status = data.get("status")
|
||||
|
||||
if not entity_ids:
|
||||
return Response(
|
||||
{"detail": "entityIds is required"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
if not new_status:
|
||||
return Response(
|
||||
{"detail": "status is required"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
with transaction.atomic():
|
||||
updated = Model.objects.filter(pk__in=entity_ids).update(status=new_status)
|
||||
|
||||
return Response(
|
||||
{
|
||||
"success": True,
|
||||
"bulk": {
|
||||
"successCount": updated,
|
||||
"failedCount": len(entity_ids) - updated,
|
||||
},
|
||||
"message": f"Updated {updated} {entity_type}",
|
||||
}
|
||||
)
|
||||
|
||||
def _handle_bulk_delete(
|
||||
self, Model, entity_type, entity_id, data, change_reason, request
|
||||
):
|
||||
"""Bulk soft delete multiple entities."""
|
||||
entity_ids = data.get("entityIds", [])
|
||||
|
||||
if not entity_ids:
|
||||
return Response(
|
||||
{"detail": "entityIds is required"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
with transaction.atomic():
|
||||
if hasattr(Model, "status"):
|
||||
updated = Model.objects.filter(pk__in=entity_ids).update(
|
||||
status="deleted"
|
||||
)
|
||||
else:
|
||||
updated = Model.objects.filter(pk__in=entity_ids).update(
|
||||
is_deleted=True
|
||||
)
|
||||
|
||||
return Response(
|
||||
{
|
||||
"success": True,
|
||||
"bulk": {
|
||||
"successCount": updated,
|
||||
"failedCount": len(entity_ids) - updated,
|
||||
},
|
||||
"message": f"Deleted {updated} {entity_type}",
|
||||
}
|
||||
)
|
||||
|
||||
def _handle_bulk_restore(
|
||||
self, Model, entity_type, entity_id, data, change_reason, request
|
||||
):
|
||||
"""Bulk restore soft-deleted entities."""
|
||||
entity_ids = data.get("entityIds", [])
|
||||
new_status = data.get("status", "draft")
|
||||
|
||||
if not entity_ids:
|
||||
return Response(
|
||||
{"detail": "entityIds is required"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
with transaction.atomic():
|
||||
try:
|
||||
updated = Model.all_objects.filter(pk__in=entity_ids).update(
|
||||
status=new_status
|
||||
)
|
||||
except AttributeError:
|
||||
updated = Model.objects.filter(pk__in=entity_ids).update(
|
||||
status=new_status
|
||||
)
|
||||
|
||||
return Response(
|
||||
{
|
||||
"success": True,
|
||||
"bulk": {
|
||||
"successCount": updated,
|
||||
"failedCount": len(entity_ids) - updated,
|
||||
},
|
||||
"message": f"Restored {updated} {entity_type}",
|
||||
}
|
||||
)
|
||||
|
||||
def _handle_bulk_permanent_delete(
|
||||
self, Model, entity_type, entity_id, data, change_reason, request
|
||||
):
|
||||
"""Bulk permanently delete entities."""
|
||||
entity_ids = data.get("entityIds", [])
|
||||
|
||||
if not entity_ids:
|
||||
return Response(
|
||||
{"detail": "entityIds is required"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
with transaction.atomic():
|
||||
try:
|
||||
deleted, _ = Model.all_objects.filter(pk__in=entity_ids).delete()
|
||||
except AttributeError:
|
||||
deleted, _ = Model.objects.filter(pk__in=entity_ids).delete()
|
||||
|
||||
return Response(
|
||||
{
|
||||
"success": True,
|
||||
"bulk": {
|
||||
"successCount": deleted,
|
||||
"failedCount": len(entity_ids) - deleted,
|
||||
},
|
||||
"message": f"Permanently deleted {deleted} {entity_type}",
|
||||
}
|
||||
)
|
||||
|
||||
def _handle_get_dependencies(
|
||||
self, Model, entity_type, entity_id, data, change_reason, request
|
||||
):
|
||||
"""Get dependencies for an entity before deletion."""
|
||||
if not entity_id:
|
||||
return Response(
|
||||
{"detail": "entityId is required"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
try:
|
||||
instance = Model.objects.get(pk=entity_id)
|
||||
except Model.DoesNotExist:
|
||||
return Response(
|
||||
{"detail": f"{entity_type} not found"},
|
||||
status=status.HTTP_404_NOT_FOUND,
|
||||
)
|
||||
|
||||
# Get related objects count
|
||||
dependencies = []
|
||||
for rel in instance._meta.get_fields():
|
||||
if rel.one_to_many or rel.one_to_one or rel.many_to_many:
|
||||
try:
|
||||
related_name = rel.get_accessor_name()
|
||||
related_manager = getattr(instance, related_name, None)
|
||||
if related_manager and hasattr(related_manager, "count"):
|
||||
count = related_manager.count()
|
||||
if count > 0:
|
||||
dependencies.append(
|
||||
{
|
||||
"type": rel.related_model._meta.verbose_name_plural,
|
||||
"count": count,
|
||||
}
|
||||
)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
return Response(
|
||||
{
|
||||
"success": True,
|
||||
"dependencies": dependencies,
|
||||
"hasDependencies": len(dependencies) > 0,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
class CeleryTaskStatusView(APIView):
|
||||
"""
|
||||
GET /admin/tasks/status/
|
||||
Return Celery task status (read-only).
|
||||
"""
|
||||
|
||||
permission_classes = [IsAdminUser]
|
||||
|
||||
# List of known scheduled tasks
|
||||
SCHEDULED_TASKS = [
|
||||
{
|
||||
"name": "process_scheduled_deletions",
|
||||
"display_name": "Process Scheduled Deletions",
|
||||
"schedule": "daily at midnight",
|
||||
},
|
||||
{
|
||||
"name": "process_closing_entities",
|
||||
"display_name": "Process Closing Entities",
|
||||
"schedule": "daily at midnight",
|
||||
},
|
||||
{
|
||||
"name": "process_expired_bans",
|
||||
"display_name": "Process Expired Bans",
|
||||
"schedule": "every 15 minutes",
|
||||
},
|
||||
{
|
||||
"name": "cleanup_orphaned_images",
|
||||
"display_name": "Cleanup Orphaned Images",
|
||||
"schedule": "weekly on Sunday",
|
||||
},
|
||||
{
|
||||
"name": "cleanup_old_versions",
|
||||
"display_name": "Cleanup Old Versions",
|
||||
"schedule": "weekly on Sunday",
|
||||
},
|
||||
{
|
||||
"name": "data_retention_cleanup",
|
||||
"display_name": "Data Retention Cleanup",
|
||||
"schedule": "daily at 3 AM",
|
||||
},
|
||||
]
|
||||
|
||||
def get(self, request):
|
||||
"""Return status of all scheduled tasks."""
|
||||
try:
|
||||
task_name = request.query_params.get("task")
|
||||
|
||||
tasks_status = []
|
||||
for task_info in self.SCHEDULED_TASKS:
|
||||
# Get last run info from cache
|
||||
cache_key = f"task_last_run_{task_info['name']}"
|
||||
last_run_info = cache.get(cache_key, {})
|
||||
|
||||
task_status = {
|
||||
"name": task_info["name"],
|
||||
"displayName": task_info["display_name"],
|
||||
"schedule": task_info["schedule"],
|
||||
"lastRun": last_run_info.get("timestamp"),
|
||||
"lastResult": last_run_info.get("result", "unknown"),
|
||||
"lastDuration": last_run_info.get("duration"),
|
||||
"status": "scheduled",
|
||||
}
|
||||
|
||||
if task_name and task_name == task_info["name"]:
|
||||
return Response(task_status)
|
||||
|
||||
tasks_status.append(task_status)
|
||||
|
||||
if task_name:
|
||||
return Response(
|
||||
{"detail": f"Unknown task: {task_name}"},
|
||||
status=status.HTTP_404_NOT_FOUND,
|
||||
)
|
||||
|
||||
return Response(
|
||||
{
|
||||
"tasks": tasks_status,
|
||||
"totalTasks": len(tasks_status),
|
||||
}
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
capture_and_log(e, "Celery task status - error", source="api")
|
||||
return Response(
|
||||
{"detail": "Failed to fetch task status"},
|
||||
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
)
|
||||
Reference in New Issue
Block a user