mirror of
https://github.com/pacnpal/thrillwiki_django_no_react.git
synced 2025-12-22 04:11:09 -05:00
@@ -1 +0,0 @@
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
|
||||
@@ -1,217 +0,0 @@
|
||||
"""
|
||||
Django management command to calculate new content.
|
||||
|
||||
This replaces the Celery task for calculating new content.
|
||||
Run with: uv run manage.py calculate_new_content
|
||||
"""
|
||||
|
||||
import logging
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Dict, List, Any
|
||||
from django.core.management.base import BaseCommand, CommandError
|
||||
from django.utils import timezone
|
||||
from django.core.cache import cache
|
||||
from django.db.models import Q
|
||||
|
||||
from apps.parks.models import Park
|
||||
from apps.rides.models import Ride
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Calculate new content and cache results"
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument(
|
||||
"--content-type",
|
||||
type=str,
|
||||
default="all",
|
||||
choices=["all", "parks", "rides"],
|
||||
help="Type of content to calculate (default: all)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--days-back",
|
||||
type=int,
|
||||
default=30,
|
||||
help="Number of days to look back for new content (default: 30)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--limit",
|
||||
type=int,
|
||||
default=50,
|
||||
help="Maximum number of results to calculate (default: 50)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--verbose", action="store_true", help="Enable verbose output"
|
||||
)
|
||||
|
||||
def handle(self, *args, **options):
|
||||
content_type = options["content_type"]
|
||||
days_back = options["days_back"]
|
||||
limit = options["limit"]
|
||||
verbose = options["verbose"]
|
||||
|
||||
if verbose:
|
||||
self.stdout.write(f"Starting new content calculation for {content_type}")
|
||||
|
||||
try:
|
||||
cutoff_date = timezone.now() - timedelta(days=days_back)
|
||||
new_items = []
|
||||
|
||||
if content_type in ["all", "parks"]:
|
||||
parks = self._get_new_parks(
|
||||
cutoff_date, limit if content_type == "parks" else limit * 2
|
||||
)
|
||||
new_items.extend(parks)
|
||||
if verbose:
|
||||
self.stdout.write(f"Found {len(parks)} new parks")
|
||||
|
||||
if content_type in ["all", "rides"]:
|
||||
rides = self._get_new_rides(
|
||||
cutoff_date, limit if content_type == "rides" else limit * 2
|
||||
)
|
||||
new_items.extend(rides)
|
||||
if verbose:
|
||||
self.stdout.write(f"Found {len(rides)} new rides")
|
||||
|
||||
# Sort by date added (most recent first) and apply limit
|
||||
new_items.sort(key=lambda x: x.get("date_added", ""), reverse=True)
|
||||
new_items = new_items[:limit]
|
||||
|
||||
# Format results for API consumption
|
||||
formatted_results = self._format_new_content_results(new_items)
|
||||
|
||||
# Cache results
|
||||
cache_key = f"new_content:calculated:{content_type}:{days_back}:{limit}"
|
||||
cache.set(cache_key, formatted_results, 1800) # Cache for 30 minutes
|
||||
|
||||
self.stdout.write(
|
||||
self.style.SUCCESS(
|
||||
f"Successfully calculated {len(formatted_results)} new items for {content_type}"
|
||||
)
|
||||
)
|
||||
|
||||
if verbose:
|
||||
for item in formatted_results[:5]: # Show first 5 items
|
||||
self.stdout.write(
|
||||
f" {item['name']} ({item['park']}) - opened: {item['date_opened']}"
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error calculating new content: {e}", exc_info=True)
|
||||
raise CommandError(f"Failed to calculate new content: {e}")
|
||||
|
||||
def _get_new_parks(self, cutoff_date: datetime, limit: int) -> List[Dict[str, Any]]:
|
||||
"""Get recently added parks using real data."""
|
||||
new_parks = (
|
||||
Park.objects.filter(
|
||||
Q(created_at__gte=cutoff_date)
|
||||
| Q(opening_date__gte=cutoff_date.date()),
|
||||
status="OPERATING",
|
||||
)
|
||||
.select_related("location", "operator")
|
||||
.order_by("-created_at", "-opening_date")[:limit]
|
||||
)
|
||||
|
||||
results = []
|
||||
for park in new_parks:
|
||||
date_added = park.opening_date or park.created_at
|
||||
if date_added:
|
||||
if isinstance(date_added, datetime):
|
||||
date_added = date_added.date()
|
||||
|
||||
opening_date = getattr(park, "opening_date", None)
|
||||
if opening_date and isinstance(opening_date, datetime):
|
||||
opening_date = opening_date.date()
|
||||
|
||||
results.append(
|
||||
{
|
||||
"content_object": park,
|
||||
"content_type": "park",
|
||||
"id": park.pk,
|
||||
"name": park.name,
|
||||
"slug": park.slug,
|
||||
"park": park.name, # For parks, park field is the park name itself
|
||||
"category": "park",
|
||||
"date_added": date_added.isoformat() if date_added else "",
|
||||
"date_opened": opening_date.isoformat() if opening_date else "",
|
||||
"url": park.url,
|
||||
}
|
||||
)
|
||||
|
||||
return results
|
||||
|
||||
def _get_new_rides(self, cutoff_date: datetime, limit: int) -> List[Dict[str, Any]]:
|
||||
"""Get recently added rides using real data."""
|
||||
new_rides = (
|
||||
Ride.objects.filter(
|
||||
Q(created_at__gte=cutoff_date)
|
||||
| Q(opening_date__gte=cutoff_date.date()),
|
||||
status="OPERATING",
|
||||
)
|
||||
.select_related("park", "park__location")
|
||||
.order_by("-created_at", "-opening_date")[:limit]
|
||||
)
|
||||
|
||||
results = []
|
||||
for ride in new_rides:
|
||||
date_added = getattr(ride, "opening_date", None) or getattr(
|
||||
ride, "created_at", None
|
||||
)
|
||||
if date_added:
|
||||
if isinstance(date_added, datetime):
|
||||
date_added = date_added.date()
|
||||
|
||||
opening_date = getattr(ride, "opening_date", None)
|
||||
if opening_date and isinstance(opening_date, datetime):
|
||||
opening_date = opening_date.date()
|
||||
|
||||
results.append(
|
||||
{
|
||||
"content_object": ride,
|
||||
"content_type": "ride",
|
||||
"id": ride.pk,
|
||||
"name": ride.name,
|
||||
"slug": ride.slug,
|
||||
"park": ride.park.name if ride.park else "",
|
||||
"category": "ride",
|
||||
"date_added": date_added.isoformat() if date_added else "",
|
||||
"date_opened": opening_date.isoformat() if opening_date else "",
|
||||
"url": ride.url,
|
||||
"park_url": ride.park.url if ride.park else "",
|
||||
}
|
||||
)
|
||||
|
||||
return results
|
||||
|
||||
def _format_new_content_results(
|
||||
self, new_items: List[Dict[str, Any]]
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""Format new content results for frontend consumption."""
|
||||
formatted_results = []
|
||||
|
||||
for item in new_items:
|
||||
try:
|
||||
# Format exactly as frontend expects
|
||||
formatted_item = {
|
||||
"id": item["id"],
|
||||
"name": item["name"],
|
||||
"park": item["park"],
|
||||
"category": item["category"],
|
||||
"date_added": item["date_added"],
|
||||
"date_opened": item["date_opened"],
|
||||
"slug": item["slug"],
|
||||
"url": item["url"],
|
||||
}
|
||||
|
||||
# Add park_url for rides
|
||||
if item.get("park_url"):
|
||||
formatted_item["park_url"] = item["park_url"]
|
||||
|
||||
formatted_results.append(formatted_item)
|
||||
|
||||
except Exception as e:
|
||||
logger.warning(f"Error formatting new content item: {e}")
|
||||
|
||||
return formatted_results
|
||||
@@ -1,391 +0,0 @@
|
||||
"""
|
||||
Django management command to calculate trending content.
|
||||
|
||||
This replaces the Celery task for calculating trending content.
|
||||
Run with: uv run manage.py calculate_trending
|
||||
"""
|
||||
|
||||
import logging
|
||||
from typing import Dict, List, Any
|
||||
from django.core.management.base import BaseCommand, CommandError
|
||||
from django.utils import timezone
|
||||
from django.core.cache import cache
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
|
||||
from apps.core.analytics import PageView
|
||||
from apps.parks.models import Park
|
||||
from apps.rides.models import Ride
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Calculate trending content and cache results"
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument(
|
||||
"--content-type",
|
||||
type=str,
|
||||
default="all",
|
||||
choices=["all", "parks", "rides"],
|
||||
help="Type of content to calculate (default: all)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--limit",
|
||||
type=int,
|
||||
default=50,
|
||||
help="Maximum number of results to calculate (default: 50)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--verbose", action="store_true", help="Enable verbose output"
|
||||
)
|
||||
|
||||
def handle(self, *args, **options):
|
||||
content_type = options["content_type"]
|
||||
limit = options["limit"]
|
||||
verbose = options["verbose"]
|
||||
|
||||
if verbose:
|
||||
self.stdout.write(f"Starting trending calculation for {content_type}")
|
||||
|
||||
try:
|
||||
# Time windows for calculations
|
||||
current_period_hours = 168 # 7 days
|
||||
# 14 days (for previous 7-day window comparison)
|
||||
previous_period_hours = 336
|
||||
|
||||
trending_items = []
|
||||
|
||||
if content_type in ["all", "parks"]:
|
||||
park_items = self._calculate_trending_parks(
|
||||
current_period_hours,
|
||||
previous_period_hours,
|
||||
limit if content_type == "parks" else limit * 2,
|
||||
)
|
||||
trending_items.extend(park_items)
|
||||
if verbose:
|
||||
self.stdout.write(f"Calculated {len(park_items)} trending parks")
|
||||
|
||||
if content_type in ["all", "rides"]:
|
||||
ride_items = self._calculate_trending_rides(
|
||||
current_period_hours,
|
||||
previous_period_hours,
|
||||
limit if content_type == "rides" else limit * 2,
|
||||
)
|
||||
trending_items.extend(ride_items)
|
||||
if verbose:
|
||||
self.stdout.write(f"Calculated {len(ride_items)} trending rides")
|
||||
|
||||
# Sort by trending score and apply limit
|
||||
trending_items.sort(key=lambda x: x.get("trending_score", 0), reverse=True)
|
||||
trending_items = trending_items[:limit]
|
||||
|
||||
# Format results for API consumption
|
||||
formatted_results = self._format_trending_results(
|
||||
trending_items, current_period_hours, previous_period_hours
|
||||
)
|
||||
|
||||
# Cache results
|
||||
cache_key = f"trending:calculated:{content_type}:{limit}"
|
||||
cache.set(cache_key, formatted_results, 3600) # Cache for 1 hour
|
||||
|
||||
self.stdout.write(
|
||||
self.style.SUCCESS(
|
||||
f"Successfully calculated {len(formatted_results)} trending items for {content_type}"
|
||||
)
|
||||
)
|
||||
|
||||
if verbose:
|
||||
for item in formatted_results[:5]: # Show first 5 items
|
||||
self.stdout.write(
|
||||
f" {item['name']} (score: {item.get('views_change', 'N/A')})"
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error calculating trending content: {e}", exc_info=True)
|
||||
raise CommandError(f"Failed to calculate trending content: {e}")
|
||||
|
||||
def _calculate_trending_parks(
|
||||
self, current_period_hours: int, previous_period_hours: int, limit: int
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""Calculate trending scores for parks using real data."""
|
||||
parks = Park.objects.filter(status="OPERATING").select_related(
|
||||
"location", "operator"
|
||||
)
|
||||
|
||||
trending_parks = []
|
||||
|
||||
for park in parks:
|
||||
try:
|
||||
score = self._calculate_content_score(
|
||||
park, "park", current_period_hours, previous_period_hours
|
||||
)
|
||||
if score > 0: # Only include items with positive trending scores
|
||||
trending_parks.append(
|
||||
{
|
||||
"content_object": park,
|
||||
"content_type": "park",
|
||||
"trending_score": score,
|
||||
"id": park.id,
|
||||
"name": park.name,
|
||||
"slug": park.slug,
|
||||
"park": park.name, # For parks, park field is the park name itself
|
||||
"category": "park",
|
||||
"rating": (
|
||||
float(park.average_rating)
|
||||
if park.average_rating
|
||||
else 0.0
|
||||
),
|
||||
"date_opened": (
|
||||
park.opening_date.isoformat()
|
||||
if park.opening_date
|
||||
else ""
|
||||
),
|
||||
"url": park.url,
|
||||
}
|
||||
)
|
||||
except Exception as e:
|
||||
logger.warning(f"Error calculating score for park {park.id}: {e}")
|
||||
|
||||
return trending_parks
|
||||
|
||||
def _calculate_trending_rides(
|
||||
self, current_period_hours: int, previous_period_hours: int, limit: int
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""Calculate trending scores for rides using real data."""
|
||||
rides = Ride.objects.filter(status="OPERATING").select_related(
|
||||
"park", "park__location"
|
||||
)
|
||||
|
||||
trending_rides = []
|
||||
|
||||
for ride in rides:
|
||||
try:
|
||||
score = self._calculate_content_score(
|
||||
ride, "ride", current_period_hours, previous_period_hours
|
||||
)
|
||||
if score > 0: # Only include items with positive trending scores
|
||||
trending_rides.append(
|
||||
{
|
||||
"content_object": ride,
|
||||
"content_type": "ride",
|
||||
"trending_score": score,
|
||||
"id": ride.pk,
|
||||
"name": ride.name,
|
||||
"slug": ride.slug,
|
||||
"park": ride.park.name if ride.park else "",
|
||||
"category": "ride",
|
||||
"rating": (
|
||||
float(ride.average_rating)
|
||||
if ride.average_rating
|
||||
else 0.0
|
||||
),
|
||||
"date_opened": (
|
||||
ride.opening_date.isoformat()
|
||||
if ride.opening_date
|
||||
else ""
|
||||
),
|
||||
"url": ride.url,
|
||||
"park_url": ride.park.url if ride.park else "",
|
||||
}
|
||||
)
|
||||
except Exception as e:
|
||||
logger.warning(f"Error calculating score for ride {ride.pk}: {e}")
|
||||
|
||||
return trending_rides
|
||||
|
||||
def _calculate_content_score(
|
||||
self,
|
||||
content_obj: Any,
|
||||
content_type: str,
|
||||
current_period_hours: int,
|
||||
previous_period_hours: int,
|
||||
) -> float:
|
||||
"""Calculate weighted trending score for content object using real analytics data."""
|
||||
try:
|
||||
# Get content type for PageView queries
|
||||
ct = ContentType.objects.get_for_model(content_obj)
|
||||
|
||||
# 1. View Growth Score (40% weight)
|
||||
view_growth_score = self._calculate_view_growth_score(
|
||||
ct, content_obj.id, current_period_hours, previous_period_hours
|
||||
)
|
||||
|
||||
# 2. Rating Score (30% weight)
|
||||
rating_score = self._calculate_rating_score(content_obj)
|
||||
|
||||
# 3. Recency Score (20% weight)
|
||||
recency_score = self._calculate_recency_score(content_obj)
|
||||
|
||||
# 4. Popularity Score (10% weight)
|
||||
popularity_score = self._calculate_popularity_score(
|
||||
ct, content_obj.id, current_period_hours
|
||||
)
|
||||
|
||||
# Calculate weighted final score
|
||||
final_score = (
|
||||
view_growth_score * 0.4
|
||||
+ rating_score * 0.3
|
||||
+ recency_score * 0.2
|
||||
+ popularity_score * 0.1
|
||||
)
|
||||
|
||||
return final_score
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Error calculating score for {content_type} {content_obj.id}: {e}"
|
||||
)
|
||||
return 0.0
|
||||
|
||||
def _calculate_view_growth_score(
|
||||
self,
|
||||
content_type: ContentType,
|
||||
object_id: int,
|
||||
current_period_hours: int,
|
||||
previous_period_hours: int,
|
||||
) -> float:
|
||||
"""Calculate normalized view growth score using real PageView data."""
|
||||
try:
|
||||
current_views, previous_views, growth_percentage = (
|
||||
PageView.get_views_growth(
|
||||
content_type,
|
||||
object_id,
|
||||
current_period_hours,
|
||||
previous_period_hours,
|
||||
)
|
||||
)
|
||||
|
||||
if previous_views == 0:
|
||||
# New content with views gets boost
|
||||
return min(current_views / 100.0, 1.0) if current_views > 0 else 0.0
|
||||
|
||||
# Normalize growth percentage to 0-1 scale
|
||||
normalized_growth = (
|
||||
min(growth_percentage / 500.0, 1.0) if growth_percentage > 0 else 0.0
|
||||
)
|
||||
return max(normalized_growth, 0.0)
|
||||
|
||||
except Exception as e:
|
||||
logger.warning(f"Error calculating view growth: {e}")
|
||||
return 0.0
|
||||
|
||||
def _calculate_rating_score(self, content_obj: Any) -> float:
|
||||
"""Calculate normalized rating score."""
|
||||
try:
|
||||
rating = getattr(content_obj, "average_rating", None)
|
||||
if rating is None or rating == 0:
|
||||
return 0.3 # Neutral score for unrated content
|
||||
|
||||
# Normalize rating from 1-10 scale to 0-1 scale
|
||||
return min(max((float(rating) - 1) / 9.0, 0.0), 1.0)
|
||||
|
||||
except Exception as e:
|
||||
logger.warning(f"Error calculating rating score: {e}")
|
||||
return 0.3
|
||||
|
||||
def _calculate_recency_score(self, content_obj: Any) -> float:
|
||||
"""Calculate recency score based on when content was added/updated."""
|
||||
try:
|
||||
# Use opening_date for parks/rides, or created_at as fallback
|
||||
date_added = getattr(content_obj, "opening_date", None)
|
||||
if not date_added:
|
||||
date_added = getattr(content_obj, "created_at", None)
|
||||
if not date_added:
|
||||
return 0.5 # Neutral score for unknown dates
|
||||
|
||||
# Handle both date and datetime objects
|
||||
if hasattr(date_added, "date"):
|
||||
date_added = date_added.date()
|
||||
|
||||
# Calculate days since added
|
||||
today = timezone.now().date()
|
||||
days_since_added = (today - date_added).days
|
||||
|
||||
# Recency score: newer content gets higher scores
|
||||
if days_since_added <= 0:
|
||||
return 1.0
|
||||
elif days_since_added <= 30:
|
||||
return 1.0 - (days_since_added / 30.0) * 0.2 # 1.0 to 0.8
|
||||
elif days_since_added <= 365:
|
||||
return 0.8 - ((days_since_added - 30) / (365 - 30)) * 0.7 # 0.8 to 0.1
|
||||
else:
|
||||
return 0.0
|
||||
|
||||
except Exception as e:
|
||||
logger.warning(f"Error calculating recency score: {e}")
|
||||
return 0.5
|
||||
|
||||
def _calculate_popularity_score(
|
||||
self, content_type: ContentType, object_id: int, hours: int
|
||||
) -> float:
|
||||
"""Calculate popularity score based on total view count."""
|
||||
try:
|
||||
total_views = PageView.get_total_views_count(
|
||||
content_type, object_id, hours=hours
|
||||
)
|
||||
|
||||
# Normalize views to 0-1 scale
|
||||
if total_views == 0:
|
||||
return 0.0
|
||||
elif total_views <= 100:
|
||||
return total_views / 200.0 # 0.0 to 0.5
|
||||
else:
|
||||
return min(0.5 + (total_views - 100) / 1800.0, 1.0) # 0.5 to 1.0
|
||||
|
||||
except Exception as e:
|
||||
logger.warning(f"Error calculating popularity score: {e}")
|
||||
return 0.0
|
||||
|
||||
def _format_trending_results(
|
||||
self,
|
||||
trending_items: List[Dict[str, Any]],
|
||||
current_period_hours: int,
|
||||
previous_period_hours: int,
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""Format trending results for frontend consumption."""
|
||||
formatted_results = []
|
||||
|
||||
for rank, item in enumerate(trending_items, 1):
|
||||
try:
|
||||
# Get view change for display
|
||||
content_obj = item["content_object"]
|
||||
ct = ContentType.objects.get_for_model(content_obj)
|
||||
current_views, previous_views, growth_percentage = (
|
||||
PageView.get_views_growth(
|
||||
ct,
|
||||
content_obj.id,
|
||||
current_period_hours,
|
||||
previous_period_hours,
|
||||
)
|
||||
)
|
||||
|
||||
# Format exactly as frontend expects
|
||||
formatted_item = {
|
||||
"id": item["id"],
|
||||
"name": item["name"],
|
||||
"park": item["park"],
|
||||
"category": item["category"],
|
||||
"rating": item["rating"],
|
||||
"rank": rank,
|
||||
"views": current_views,
|
||||
"views_change": (
|
||||
f"+{growth_percentage:.1f}%"
|
||||
if growth_percentage > 0
|
||||
else f"{growth_percentage:.1f}%"
|
||||
),
|
||||
"slug": item["slug"],
|
||||
"date_opened": item["date_opened"],
|
||||
"url": item["url"],
|
||||
}
|
||||
|
||||
# Add park_url for rides
|
||||
if item.get("park_url"):
|
||||
formatted_item["park_url"] = item["park_url"]
|
||||
|
||||
formatted_results.append(formatted_item)
|
||||
|
||||
except Exception as e:
|
||||
logger.warning(f"Error formatting trending item: {e}")
|
||||
|
||||
return formatted_results
|
||||
@@ -1,472 +0,0 @@
|
||||
"""
|
||||
Django management command to clear all types of cache data.
|
||||
|
||||
This command provides comprehensive cache clearing functionality including:
|
||||
- Django cache framework (all configured backends)
|
||||
- Python __pycache__ directories and .pyc files
|
||||
- Static files cache
|
||||
- Session cache
|
||||
- Template cache
|
||||
- Tailwind CSS build cache
|
||||
- OPcache (if available)
|
||||
"""
|
||||
|
||||
import shutil
|
||||
import subprocess
|
||||
from pathlib import Path
|
||||
|
||||
from django.core.cache import cache, caches
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.conf import settings
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = (
|
||||
"Clear all types of cache data including Django cache, "
|
||||
"__pycache__, and build caches"
|
||||
)
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument(
|
||||
"--django-cache",
|
||||
action="store_true",
|
||||
help="Clear Django cache framework cache only",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--pycache",
|
||||
action="store_true",
|
||||
help="Clear Python __pycache__ directories and .pyc files only",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--static",
|
||||
action="store_true",
|
||||
help="Clear static files cache only",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--sessions",
|
||||
action="store_true",
|
||||
help="Clear session cache only",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--templates",
|
||||
action="store_true",
|
||||
help="Clear template cache only",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--tailwind",
|
||||
action="store_true",
|
||||
help="Clear Tailwind CSS build cache only",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--opcache",
|
||||
action="store_true",
|
||||
help="Clear PHP OPcache if available",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--dry-run",
|
||||
action="store_true",
|
||||
help="Show what would be cleared without actually clearing",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--verbose",
|
||||
action="store_true",
|
||||
help="Show detailed output of clearing operations",
|
||||
)
|
||||
|
||||
def handle(self, *args, **options):
|
||||
"""Clear cache data based on provided options."""
|
||||
self.dry_run = options["dry_run"]
|
||||
self.verbose = options["verbose"]
|
||||
|
||||
# If no specific cache type is specified, clear all
|
||||
clear_all = not any(
|
||||
[
|
||||
options["django_cache"],
|
||||
options["pycache"],
|
||||
options["static"],
|
||||
options["sessions"],
|
||||
options["templates"],
|
||||
options["tailwind"],
|
||||
options["opcache"],
|
||||
]
|
||||
)
|
||||
|
||||
if self.dry_run:
|
||||
self.stdout.write(
|
||||
self.style.WARNING("🔍 DRY RUN MODE - No files will be deleted")
|
||||
)
|
||||
self.stdout.write("")
|
||||
|
||||
self.stdout.write(self.style.SUCCESS("🧹 ThrillWiki Cache Clearing Utility"))
|
||||
self.stdout.write("")
|
||||
|
||||
# Clear Django cache framework
|
||||
if clear_all or options["django_cache"]:
|
||||
self.clear_django_cache()
|
||||
|
||||
# Clear Python __pycache__
|
||||
if clear_all or options["pycache"]:
|
||||
self.clear_pycache()
|
||||
|
||||
# Clear static files cache
|
||||
if clear_all or options["static"]:
|
||||
self.clear_static_cache()
|
||||
|
||||
# Clear sessions cache
|
||||
if clear_all or options["sessions"]:
|
||||
self.clear_sessions_cache()
|
||||
|
||||
# Clear template cache
|
||||
if clear_all or options["templates"]:
|
||||
self.clear_template_cache()
|
||||
|
||||
# Clear Tailwind cache
|
||||
if clear_all or options["tailwind"]:
|
||||
self.clear_tailwind_cache()
|
||||
|
||||
# Clear OPcache
|
||||
if clear_all or options["opcache"]:
|
||||
self.clear_opcache()
|
||||
|
||||
self.stdout.write("")
|
||||
self.stdout.write(
|
||||
self.style.SUCCESS("✅ Cache clearing completed successfully!")
|
||||
)
|
||||
|
||||
def clear_django_cache(self):
|
||||
"""Clear Django cache framework cache."""
|
||||
self.stdout.write("🗄️ Clearing Django cache framework...")
|
||||
|
||||
try:
|
||||
# Clear default cache
|
||||
if not self.dry_run:
|
||||
cache.clear()
|
||||
|
||||
cache_info = f"Default cache ({cache.__class__.__name__})"
|
||||
self.stdout.write(self.style.SUCCESS(f" ✅ Cleared {cache_info}"))
|
||||
|
||||
# Clear all configured caches
|
||||
cache_aliases = getattr(settings, "CACHES", {}).keys()
|
||||
for alias in cache_aliases:
|
||||
if alias != "default": # Already cleared above
|
||||
try:
|
||||
cache_backend = caches[alias]
|
||||
if not self.dry_run:
|
||||
cache_backend.clear()
|
||||
|
||||
cache_info = (
|
||||
f"{alias} cache ({cache_backend.__class__.__name__})"
|
||||
)
|
||||
self.stdout.write(
|
||||
self.style.SUCCESS(f" ✅ Cleared {cache_info}")
|
||||
)
|
||||
except Exception as e:
|
||||
self.stdout.write(
|
||||
self.style.WARNING(
|
||||
f" ⚠️ Could not clear {alias} cache: {e}"
|
||||
)
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
self.stdout.write(
|
||||
self.style.ERROR(f" ❌ Error clearing Django cache: {e}")
|
||||
)
|
||||
|
||||
def clear_pycache(self):
|
||||
"""Clear Python __pycache__ directories and .pyc files."""
|
||||
self.stdout.write("🐍 Clearing Python __pycache__ and .pyc files...")
|
||||
|
||||
removed_count = 0
|
||||
removed_size = 0
|
||||
|
||||
try:
|
||||
# Start from project root
|
||||
project_root = Path(settings.BASE_DIR)
|
||||
|
||||
# Find and remove __pycache__ directories
|
||||
for pycache_dir in project_root.rglob("__pycache__"):
|
||||
if pycache_dir.is_dir():
|
||||
try:
|
||||
# Calculate size before removal
|
||||
dir_size = sum(
|
||||
f.stat().st_size
|
||||
for f in pycache_dir.rglob("*")
|
||||
if f.is_file()
|
||||
)
|
||||
removed_size += dir_size
|
||||
|
||||
if self.verbose:
|
||||
self.stdout.write(f" 🗑️ Removing: {pycache_dir}")
|
||||
|
||||
if not self.dry_run:
|
||||
shutil.rmtree(pycache_dir)
|
||||
|
||||
removed_count += 1
|
||||
except Exception as e:
|
||||
self.stdout.write(
|
||||
self.style.WARNING(
|
||||
f" ⚠️ Could not remove {pycache_dir}: {e}"
|
||||
)
|
||||
)
|
||||
|
||||
# Find and remove .pyc files
|
||||
for pyc_file in project_root.rglob("*.pyc"):
|
||||
try:
|
||||
file_size = pyc_file.stat().st_size
|
||||
removed_size += file_size
|
||||
|
||||
if self.verbose:
|
||||
self.stdout.write(f" 🗑️ Removing: {pyc_file}")
|
||||
|
||||
if not self.dry_run:
|
||||
pyc_file.unlink()
|
||||
|
||||
removed_count += 1
|
||||
except Exception as e:
|
||||
self.stdout.write(
|
||||
self.style.WARNING(f" ⚠️ Could not remove {pyc_file}: {e}")
|
||||
)
|
||||
|
||||
# Format file size
|
||||
size_mb = removed_size / (1024 * 1024)
|
||||
self.stdout.write(
|
||||
self.style.SUCCESS(
|
||||
f" ✅ Removed {removed_count} Python cache items ({size_mb:.2f} MB)"
|
||||
)
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
self.stdout.write(
|
||||
self.style.ERROR(f" ❌ Error clearing Python cache: {e}")
|
||||
)
|
||||
|
||||
def clear_static_cache(self):
|
||||
"""Clear static files cache."""
|
||||
self.stdout.write("📦 Clearing static files cache...")
|
||||
|
||||
try:
|
||||
static_root = getattr(settings, "STATIC_ROOT", None)
|
||||
|
||||
if static_root and Path(static_root).exists():
|
||||
static_path = Path(static_root)
|
||||
|
||||
# Calculate size
|
||||
total_size = sum(
|
||||
f.stat().st_size for f in static_path.rglob("*") if f.is_file()
|
||||
)
|
||||
size_mb = total_size / (1024 * 1024)
|
||||
|
||||
if self.verbose:
|
||||
self.stdout.write(f" 🗑️ Removing: {static_path}")
|
||||
|
||||
if not self.dry_run:
|
||||
shutil.rmtree(static_path)
|
||||
static_path.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
self.stdout.write(
|
||||
self.style.SUCCESS(
|
||||
f" ✅ Cleared static files cache ({size_mb:.2f} MB)"
|
||||
)
|
||||
)
|
||||
else:
|
||||
self.stdout.write(
|
||||
self.style.WARNING(
|
||||
" ⚠️ No STATIC_ROOT configured or directory doesn't exist"
|
||||
)
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
self.stdout.write(
|
||||
self.style.ERROR(f" ❌ Error clearing static cache: {e}")
|
||||
)
|
||||
|
||||
def clear_sessions_cache(self):
|
||||
"""Clear session cache if using cache-based sessions."""
|
||||
self.stdout.write("🔐 Clearing session cache...")
|
||||
|
||||
try:
|
||||
session_engine = getattr(settings, "SESSION_ENGINE", "")
|
||||
|
||||
if "cache" in session_engine:
|
||||
# Using cache-based sessions
|
||||
session_cache_alias = getattr(
|
||||
settings, "SESSION_CACHE_ALIAS", "default"
|
||||
)
|
||||
session_cache = caches[session_cache_alias]
|
||||
|
||||
if not self.dry_run:
|
||||
# Clear session keys (this is a simplified approach)
|
||||
# In production, you might want more sophisticated session clearing
|
||||
session_cache.clear()
|
||||
|
||||
self.stdout.write(
|
||||
self.style.SUCCESS(
|
||||
f" ✅ Cleared cache-based sessions ({session_cache_alias})"
|
||||
)
|
||||
)
|
||||
else:
|
||||
self.stdout.write(
|
||||
self.style.WARNING(" ⚠️ Not using cache-based sessions")
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
self.stdout.write(
|
||||
self.style.ERROR(f" ❌ Error clearing session cache: {e}")
|
||||
)
|
||||
|
||||
def clear_template_cache(self):
|
||||
"""Clear template cache."""
|
||||
self.stdout.write("📄 Clearing template cache...")
|
||||
|
||||
try:
|
||||
# Clear template cache if using cached template loader
|
||||
from django.template import engines
|
||||
from django.template.loaders.cached import Loader as CachedLoader
|
||||
|
||||
cleared_engines = 0
|
||||
for engine in engines.all():
|
||||
try:
|
||||
# Check for DjangoTemplates engine with cached loaders
|
||||
engine_backend = getattr(engine, "backend", "")
|
||||
if "DjangoTemplates" in engine_backend:
|
||||
# Get engine instance safely
|
||||
engine_instance = getattr(engine, "engine", None)
|
||||
if engine_instance:
|
||||
template_loaders = getattr(
|
||||
engine_instance, "template_loaders", []
|
||||
)
|
||||
for loader in template_loaders:
|
||||
if isinstance(loader, CachedLoader):
|
||||
if not self.dry_run:
|
||||
loader.reset()
|
||||
cleared_engines += 1
|
||||
if self.verbose:
|
||||
self.stdout.write(
|
||||
f" 🗑️ Cleared cached loader: {loader}"
|
||||
)
|
||||
|
||||
# Check for Jinja2 engines (if present)
|
||||
elif "Jinja2" in engine_backend and hasattr(engine, "env"):
|
||||
env = getattr(engine, "env", None)
|
||||
if env and hasattr(env, "cache"):
|
||||
if not self.dry_run:
|
||||
env.cache.clear()
|
||||
cleared_engines += 1
|
||||
if self.verbose:
|
||||
self.stdout.write(
|
||||
f" 🗑️ Cleared Jinja2 cache: {engine}"
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
if self.verbose:
|
||||
self.stdout.write(
|
||||
self.style.WARNING(
|
||||
f" ⚠️ Could not clear cache for engine {engine}: {e}"
|
||||
)
|
||||
)
|
||||
|
||||
if cleared_engines > 0:
|
||||
self.stdout.write(
|
||||
self.style.SUCCESS(
|
||||
f" ✅ Cleared template cache for "
|
||||
f"{cleared_engines} loaders/engines"
|
||||
)
|
||||
)
|
||||
else:
|
||||
self.stdout.write(
|
||||
self.style.WARNING(" ⚠️ No cached template loaders found")
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
self.stdout.write(
|
||||
self.style.ERROR(f" ❌ Error clearing template cache: {e}")
|
||||
)
|
||||
|
||||
def clear_tailwind_cache(self):
|
||||
"""Clear Tailwind CSS build cache."""
|
||||
self.stdout.write("🎨 Clearing Tailwind CSS cache...")
|
||||
|
||||
try:
|
||||
# Look for common Tailwind cache directories
|
||||
project_root = Path(settings.BASE_DIR)
|
||||
cache_paths = [
|
||||
project_root / "node_modules" / ".cache",
|
||||
project_root / ".tailwindcss-cache",
|
||||
project_root / "static" / "css" / ".cache",
|
||||
]
|
||||
|
||||
cleared_count = 0
|
||||
for cache_path in cache_paths:
|
||||
if cache_path.exists():
|
||||
try:
|
||||
if self.verbose:
|
||||
self.stdout.write(f" 🗑️ Removing: {cache_path}")
|
||||
|
||||
if not self.dry_run:
|
||||
if cache_path.is_file():
|
||||
cache_path.unlink()
|
||||
else:
|
||||
shutil.rmtree(cache_path)
|
||||
|
||||
cleared_count += 1
|
||||
except Exception as e:
|
||||
self.stdout.write(
|
||||
self.style.WARNING(
|
||||
f" ⚠️ Could not remove {cache_path}: {e}"
|
||||
)
|
||||
)
|
||||
|
||||
if cleared_count > 0:
|
||||
self.stdout.write(
|
||||
self.style.SUCCESS(
|
||||
f" ✅ Cleared {cleared_count} Tailwind cache directories"
|
||||
)
|
||||
)
|
||||
else:
|
||||
self.stdout.write(
|
||||
self.style.WARNING(" ⚠️ No Tailwind cache directories found")
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
self.stdout.write(
|
||||
self.style.ERROR(f" ❌ Error clearing Tailwind cache: {e}")
|
||||
)
|
||||
|
||||
def clear_opcache(self):
|
||||
"""Clear PHP OPcache if available."""
|
||||
self.stdout.write("⚡ Clearing OPcache...")
|
||||
|
||||
try:
|
||||
# This is mainly for mixed environments
|
||||
php_code = (
|
||||
"if (function_exists('opcache_reset')) { "
|
||||
"opcache_reset(); echo 'cleared'; } "
|
||||
"else { echo 'not_available'; }"
|
||||
)
|
||||
result = subprocess.run(
|
||||
["php", "-r", php_code],
|
||||
capture_output=True,
|
||||
text=True,
|
||||
timeout=10,
|
||||
)
|
||||
|
||||
if result.returncode == 0:
|
||||
if "cleared" in result.stdout:
|
||||
self.stdout.write(
|
||||
self.style.SUCCESS(" ✅ OPcache cleared successfully")
|
||||
)
|
||||
else:
|
||||
self.stdout.write(self.style.WARNING(" ⚠️ OPcache not available"))
|
||||
else:
|
||||
self.stdout.write(
|
||||
self.style.WARNING(
|
||||
" ⚠️ PHP not available or OPcache not accessible"
|
||||
)
|
||||
)
|
||||
|
||||
except (subprocess.TimeoutExpired, FileNotFoundError):
|
||||
self.stdout.write(
|
||||
self.style.WARNING(" ⚠️ PHP not found or not accessible")
|
||||
)
|
||||
except Exception as e:
|
||||
self.stdout.write(self.style.ERROR(f" ❌ Error clearing OPcache: {e}"))
|
||||
@@ -1,99 +0,0 @@
|
||||
"""
|
||||
Django management command to run the development server.
|
||||
|
||||
This command automatically sets up the development environment and starts
|
||||
the server, replacing the need for the dev_server.sh script.
|
||||
"""
|
||||
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.core.management import execute_from_command_line
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Run the development server with automatic setup"
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument(
|
||||
"--port",
|
||||
type=str,
|
||||
default="8000",
|
||||
help="Port to run the server on (default: 8000)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--host",
|
||||
type=str,
|
||||
default="0.0.0.0",
|
||||
help="Host to bind the server to (default: 0.0.0.0)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--skip-setup",
|
||||
action="store_true",
|
||||
help="Skip the development setup and go straight to running the server",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--use-runserver-plus",
|
||||
action="store_true",
|
||||
help="Use runserver_plus if available (from django-extensions)",
|
||||
)
|
||||
|
||||
def handle(self, *args, **options):
|
||||
"""Run the development setup and start the server."""
|
||||
if not options["skip_setup"]:
|
||||
self.stdout.write(
|
||||
self.style.SUCCESS(
|
||||
"🚀 Setting up and starting ThrillWiki Development Server..."
|
||||
)
|
||||
)
|
||||
|
||||
# Run the setup_dev command first
|
||||
execute_from_command_line(["manage.py", "setup_dev"])
|
||||
|
||||
else:
|
||||
self.stdout.write(
|
||||
self.style.SUCCESS("🚀 Starting ThrillWiki Development Server...")
|
||||
)
|
||||
|
||||
# Determine which server command to use
|
||||
self.get_server_command(options)
|
||||
|
||||
# Start the server
|
||||
self.stdout.write("")
|
||||
self.stdout.write(
|
||||
self.style.SUCCESS(
|
||||
f"🌟 Starting Django development server on http://{options['host']}:{options['port']}"
|
||||
)
|
||||
)
|
||||
self.stdout.write("Press Ctrl+C to stop the server")
|
||||
self.stdout.write("")
|
||||
|
||||
try:
|
||||
if options["use_runserver_plus"] or self.has_runserver_plus():
|
||||
execute_from_command_line(
|
||||
[
|
||||
"manage.py",
|
||||
"runserver_plus",
|
||||
f"{options['host']}:{options['port']}",
|
||||
]
|
||||
)
|
||||
else:
|
||||
execute_from_command_line(
|
||||
["manage.py", "runserver", f"{options['host']}:{options['port']}"]
|
||||
)
|
||||
except KeyboardInterrupt:
|
||||
self.stdout.write("")
|
||||
self.stdout.write(self.style.SUCCESS("👋 Development server stopped"))
|
||||
|
||||
def get_server_command(self, options):
|
||||
"""Determine which server command to use."""
|
||||
if options["use_runserver_plus"] or self.has_runserver_plus():
|
||||
return "runserver_plus"
|
||||
return "runserver"
|
||||
|
||||
def has_runserver_plus(self):
|
||||
"""Check if runserver_plus is available (django-extensions)."""
|
||||
try:
|
||||
import django_extensions
|
||||
|
||||
return True
|
||||
except ImportError:
|
||||
return False
|
||||
@@ -1,224 +0,0 @@
|
||||
"""
|
||||
Django management command to set up the development environment.
|
||||
|
||||
This command performs all the setup tasks that the dev_server.sh script does,
|
||||
allowing the project to run without requiring the shell script.
|
||||
"""
|
||||
|
||||
import subprocess
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.conf import settings
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Set up the development environment"
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument(
|
||||
"--skip-migrations",
|
||||
action="store_true",
|
||||
help="Skip running database migrations",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--skip-static",
|
||||
action="store_true",
|
||||
help="Skip collecting static files",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--skip-tailwind",
|
||||
action="store_true",
|
||||
help="Skip building Tailwind CSS",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--skip-superuser",
|
||||
action="store_true",
|
||||
help="Skip creating development superuser",
|
||||
)
|
||||
|
||||
def handle(self, *args, **options):
|
||||
"""Run the development setup process."""
|
||||
self.stdout.write(
|
||||
self.style.SUCCESS("🚀 Setting up ThrillWiki Development Environment...")
|
||||
)
|
||||
|
||||
# Create necessary directories
|
||||
self.create_directories()
|
||||
|
||||
# Run database migrations if needed
|
||||
if not options["skip_migrations"]:
|
||||
self.run_migrations()
|
||||
|
||||
# Seed sample data
|
||||
self.seed_sample_data()
|
||||
|
||||
# Create superuser if it doesn't exist
|
||||
if not options["skip_superuser"]:
|
||||
self.create_superuser()
|
||||
|
||||
# Collect static files
|
||||
if not options["skip_static"]:
|
||||
self.collect_static()
|
||||
|
||||
# Build Tailwind CSS
|
||||
if not options["skip_tailwind"]:
|
||||
self.build_tailwind()
|
||||
|
||||
# Run system checks
|
||||
self.run_system_checks()
|
||||
|
||||
# Display environment info
|
||||
self.display_environment_info()
|
||||
|
||||
self.stdout.write(
|
||||
self.style.SUCCESS("✅ Development environment setup complete!")
|
||||
)
|
||||
|
||||
def create_directories(self):
|
||||
"""Create necessary directories."""
|
||||
self.stdout.write("📁 Creating necessary directories...")
|
||||
directories = ["logs", "profiles", "media", "staticfiles", "static/css"]
|
||||
|
||||
for directory in directories:
|
||||
dir_path = Path(settings.BASE_DIR) / directory
|
||||
dir_path.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
self.stdout.write(self.style.SUCCESS("✅ Directories created"))
|
||||
|
||||
def run_migrations(self):
|
||||
"""Run database migrations if needed."""
|
||||
self.stdout.write("🗄️ Checking database migrations...")
|
||||
|
||||
try:
|
||||
# Check if migrations are up to date
|
||||
result = subprocess.run(
|
||||
["uv", "run", "manage.py", "migrate", "--check"],
|
||||
capture_output=True,
|
||||
text=True,
|
||||
)
|
||||
|
||||
if result.returncode == 0:
|
||||
self.stdout.write(
|
||||
self.style.SUCCESS("✅ Database migrations are up to date")
|
||||
)
|
||||
else:
|
||||
self.stdout.write("🔄 Running database migrations...")
|
||||
subprocess.run(
|
||||
["uv", "run", "manage.py", "migrate", "--noinput"], check=True
|
||||
)
|
||||
self.stdout.write(
|
||||
self.style.SUCCESS("✅ Database migrations completed")
|
||||
)
|
||||
|
||||
except subprocess.CalledProcessError as e:
|
||||
self.stdout.write(
|
||||
self.style.WARNING(f"⚠️ Migration error (continuing): {e}")
|
||||
)
|
||||
|
||||
def seed_sample_data(self):
|
||||
"""Seed sample data to the database."""
|
||||
self.stdout.write("🌱 Seeding sample data...")
|
||||
|
||||
try:
|
||||
subprocess.run(
|
||||
["uv", "run", "manage.py", "seed_sample_data"], check=True
|
||||
)
|
||||
self.stdout.write(self.style.SUCCESS("✅ Sample data seeded"))
|
||||
except subprocess.CalledProcessError:
|
||||
self.stdout.write(
|
||||
self.style.WARNING("⚠️ Could not seed sample data (continuing)")
|
||||
)
|
||||
|
||||
def create_superuser(self):
|
||||
"""Create development superuser if it doesn't exist."""
|
||||
self.stdout.write("👤 Checking for superuser...")
|
||||
|
||||
try:
|
||||
from django.contrib.auth import get_user_model
|
||||
|
||||
User = get_user_model()
|
||||
|
||||
if User.objects.filter(is_superuser=True).exists():
|
||||
self.stdout.write(self.style.SUCCESS("✅ Superuser already exists"))
|
||||
else:
|
||||
self.stdout.write("👤 Creating development superuser (admin/admin)...")
|
||||
if not User.objects.filter(username="admin").exists():
|
||||
User.objects.create_superuser("admin", "admin@example.com", "admin")
|
||||
self.stdout.write(
|
||||
self.style.SUCCESS("✅ Created superuser: admin/admin")
|
||||
)
|
||||
else:
|
||||
self.stdout.write(
|
||||
self.style.SUCCESS("✅ Admin user already exists")
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
self.stdout.write(self.style.WARNING(f"⚠️ Could not create superuser: {e}"))
|
||||
|
||||
def collect_static(self):
|
||||
"""Collect static files for development."""
|
||||
self.stdout.write("📦 Collecting static files...")
|
||||
|
||||
try:
|
||||
subprocess.run(
|
||||
["uv", "run", "manage.py", "collectstatic", "--noinput", "--clear"],
|
||||
check=True,
|
||||
)
|
||||
self.stdout.write(self.style.SUCCESS("✅ Static files collected"))
|
||||
except subprocess.CalledProcessError as e:
|
||||
self.stdout.write(
|
||||
self.style.WARNING(f"⚠️ Could not collect static files: {e}")
|
||||
)
|
||||
|
||||
def build_tailwind(self):
|
||||
"""Build Tailwind CSS if npm is available."""
|
||||
self.stdout.write("🎨 Building Tailwind CSS...")
|
||||
|
||||
try:
|
||||
# Check if npm is available
|
||||
subprocess.run(["npm", "--version"], capture_output=True, check=True)
|
||||
|
||||
# Build Tailwind CSS
|
||||
subprocess.run(
|
||||
["uv", "run", "manage.py", "tailwind", "build"], check=True
|
||||
)
|
||||
self.stdout.write(self.style.SUCCESS("✅ Tailwind CSS built"))
|
||||
|
||||
except (subprocess.CalledProcessError, FileNotFoundError):
|
||||
self.stdout.write(
|
||||
self.style.WARNING(
|
||||
"⚠️ npm not found or Tailwind build failed, skipping"
|
||||
)
|
||||
)
|
||||
|
||||
def run_system_checks(self):
|
||||
"""Run Django system checks."""
|
||||
self.stdout.write("🔍 Running system checks...")
|
||||
|
||||
try:
|
||||
subprocess.run(["uv", "run", "manage.py", "check"], check=True)
|
||||
self.stdout.write(self.style.SUCCESS("✅ System checks passed"))
|
||||
except subprocess.CalledProcessError:
|
||||
self.stdout.write(
|
||||
self.style.WARNING("❌ System checks failed, but continuing...")
|
||||
)
|
||||
|
||||
def display_environment_info(self):
|
||||
"""Display development environment information."""
|
||||
self.stdout.write("")
|
||||
self.stdout.write(self.style.SUCCESS("🌍 Development Environment:"))
|
||||
self.stdout.write(f" - Settings Module: {settings.SETTINGS_MODULE}")
|
||||
self.stdout.write(f" - Debug Mode: {settings.DEBUG}")
|
||||
self.stdout.write(" - Database: PostgreSQL with PostGIS")
|
||||
self.stdout.write(" - Cache: Local memory cache")
|
||||
self.stdout.write(" - Admin URL: http://localhost:8000/admin/")
|
||||
self.stdout.write(" - Admin User: admin / admin")
|
||||
self.stdout.write(" - Silk Profiler: http://localhost:8000/silk/")
|
||||
self.stdout.write(" - Debug Toolbar: Available on debug pages")
|
||||
self.stdout.write(" - API Documentation: http://localhost:8000/api/docs/")
|
||||
self.stdout.write("")
|
||||
self.stdout.write("🌟 Ready to start development server with:")
|
||||
self.stdout.write(" uv run manage.py runserver_plus")
|
||||
self.stdout.write("")
|
||||
@@ -1,309 +0,0 @@
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from django.utils import timezone
|
||||
from apps.parks.models.parks import Park
|
||||
from apps.rides.models.rides import Ride
|
||||
from apps.parks.models.companies import Company
|
||||
from apps.core.analytics import PageView
|
||||
from apps.core.services.trending_service import trending_service
|
||||
from datetime import datetime, timedelta
|
||||
import random
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Test the trending algorithm with sample data"
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument(
|
||||
"--clean",
|
||||
action="store_true",
|
||||
help="Clean existing test data before creating new data",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--verbose",
|
||||
action="store_true",
|
||||
help="Show detailed output",
|
||||
)
|
||||
|
||||
def handle(self, *args, **options):
|
||||
self.verbose = options["verbose"]
|
||||
|
||||
if options["clean"]:
|
||||
self.clean_test_data()
|
||||
|
||||
self.create_test_data()
|
||||
self.test_trending_algorithm()
|
||||
self.test_api_format()
|
||||
|
||||
self.stdout.write(
|
||||
self.style.SUCCESS("✓ Trending system test completed successfully!")
|
||||
)
|
||||
|
||||
def clean_test_data(self):
|
||||
"""Clean existing test data."""
|
||||
self.stdout.write("Cleaning existing test data...")
|
||||
|
||||
# Delete test PageViews
|
||||
PageView.objects.filter(
|
||||
content_type__in=[
|
||||
ContentType.objects.get_for_model(Park),
|
||||
ContentType.objects.get_for_model(Ride),
|
||||
]
|
||||
).delete()
|
||||
|
||||
self.stdout.write("✓ Test data cleaned")
|
||||
|
||||
def create_test_data(self):
|
||||
"""Create sample parks, rides, and page views for testing."""
|
||||
self.stdout.write("Creating test data...")
|
||||
|
||||
# Create or get default operator company
|
||||
operator, created = Company.objects.get_or_create(
|
||||
name="Default Theme Park Operator",
|
||||
defaults={
|
||||
"roles": ["OPERATOR"],
|
||||
"description": "Default operator for test parks",
|
||||
},
|
||||
)
|
||||
if created and self.verbose:
|
||||
self.stdout.write(f" Created operator company: {operator.name}")
|
||||
|
||||
# Get or create test parks and rides
|
||||
parks_data = [
|
||||
{
|
||||
"name": "Cedar Point",
|
||||
"slug": "cedar-point",
|
||||
"description": "America's Roller Coast featuring world-class roller coasters",
|
||||
"average_rating": 9.2,
|
||||
"opening_date": datetime(1870, 1, 1).date(),
|
||||
"operator": operator,
|
||||
},
|
||||
{
|
||||
"name": "Magic Kingdom",
|
||||
"slug": "magic-kingdom",
|
||||
"description": "Walt Disney World's most magical theme park",
|
||||
"average_rating": 9.5,
|
||||
"opening_date": datetime(1971, 10, 1).date(),
|
||||
"operator": operator,
|
||||
},
|
||||
{
|
||||
"name": "Six Flags Great Adventure",
|
||||
"slug": "six-flags-great-adventure",
|
||||
"description": "Home to Kingda Ka and incredible thrills",
|
||||
"average_rating": 8.8,
|
||||
"opening_date": datetime(1974, 7, 1).date(),
|
||||
"operator": operator,
|
||||
},
|
||||
]
|
||||
|
||||
# Create parks
|
||||
parks = []
|
||||
for park_data in parks_data:
|
||||
park, created = Park.objects.get_or_create(
|
||||
name=park_data["name"], defaults=park_data
|
||||
)
|
||||
parks.append(park)
|
||||
if created and self.verbose:
|
||||
self.stdout.write(f" Created park: {park.name}")
|
||||
|
||||
# Now create rides - they need park references
|
||||
rides_data = [
|
||||
{
|
||||
"name": "Steel Vengeance",
|
||||
"slug": "steel-vengeance",
|
||||
"description": "Hybrid roller coaster at Cedar Point",
|
||||
"park": next(p for p in parks if p.name == "Cedar Point"),
|
||||
"category": "RC", # Roller Coaster
|
||||
"average_rating": 9.8,
|
||||
"opening_date": datetime(2018, 5, 5).date(),
|
||||
},
|
||||
{
|
||||
"name": "Space Mountain",
|
||||
"slug": "space-mountain",
|
||||
"description": "Indoor space-themed roller coaster",
|
||||
"park": next(p for p in parks if p.name == "Magic Kingdom"),
|
||||
"category": "RC", # Roller Coaster
|
||||
"average_rating": 8.5,
|
||||
"opening_date": datetime(1975, 1, 15).date(),
|
||||
},
|
||||
{
|
||||
"name": "Kingda Ka",
|
||||
"slug": "kingda-ka",
|
||||
"description": "World's tallest roller coaster",
|
||||
"park": next(p for p in parks if p.name == "Six Flags Great Adventure"),
|
||||
"category": "RC", # Roller Coaster
|
||||
"average_rating": 9.0,
|
||||
"opening_date": datetime(2005, 5, 21).date(),
|
||||
},
|
||||
{
|
||||
"name": "Millennium Force",
|
||||
"slug": "millennium-force",
|
||||
"description": "Legendary steel roller coaster",
|
||||
"park": next(p for p in parks if p.name == "Cedar Point"),
|
||||
"category": "RC", # Roller Coaster
|
||||
"average_rating": 9.4,
|
||||
"opening_date": datetime(2000, 5, 13).date(),
|
||||
},
|
||||
]
|
||||
|
||||
# Create rides
|
||||
rides = []
|
||||
for ride_data in rides_data:
|
||||
ride, created = Ride.objects.get_or_create(
|
||||
name=ride_data["name"], defaults=ride_data
|
||||
)
|
||||
rides.append(ride)
|
||||
if created and self.verbose:
|
||||
self.stdout.write(f" Created ride: {ride.name}")
|
||||
|
||||
# Create PageViews with different patterns to test trending
|
||||
self.create_page_views(parks, rides)
|
||||
|
||||
self.stdout.write("✓ Test data created")
|
||||
|
||||
def create_page_views(self, parks, rides):
|
||||
"""Create PageViews with different trending patterns."""
|
||||
now = timezone.now()
|
||||
|
||||
# Pattern 1: Recently trending item (Steel Vengeance)
|
||||
steel_vengeance = next(r for r in rides if r.name == "Steel Vengeance")
|
||||
self.create_views_for_content(
|
||||
steel_vengeance, recent_views=50, older_views=10, base_time=now
|
||||
)
|
||||
|
||||
# Pattern 2: Consistently popular item (Space Mountain)
|
||||
space_mountain = next(r for r in rides if r.name == "Space Mountain")
|
||||
self.create_views_for_content(
|
||||
space_mountain, recent_views=30, older_views=25, base_time=now
|
||||
)
|
||||
|
||||
# Pattern 3: Declining popularity (Kingda Ka)
|
||||
kingda_ka = next(r for r in rides if r.name == "Kingda Ka")
|
||||
self.create_views_for_content(
|
||||
kingda_ka, recent_views=5, older_views=40, base_time=now
|
||||
)
|
||||
|
||||
# Pattern 4: New but growing (Millennium Force)
|
||||
millennium_force = next(r for r in rides if r.name == "Millennium Force")
|
||||
self.create_views_for_content(
|
||||
millennium_force, recent_views=25, older_views=5, base_time=now
|
||||
)
|
||||
|
||||
# Create some park views too
|
||||
cedar_point = next(p for p in parks if p.name == "Cedar Point")
|
||||
self.create_views_for_content(
|
||||
cedar_point, recent_views=35, older_views=20, base_time=now
|
||||
)
|
||||
|
||||
if self.verbose:
|
||||
self.stdout.write(" Created PageView data for trending analysis")
|
||||
|
||||
def create_views_for_content(
|
||||
self, content_object, recent_views, older_views, base_time
|
||||
):
|
||||
"""Create PageViews for a content object with specified patterns."""
|
||||
content_type = ContentType.objects.get_for_model(type(content_object))
|
||||
|
||||
# Create recent views (last 2 hours)
|
||||
for i in range(recent_views):
|
||||
view_time = base_time - timedelta(
|
||||
minutes=random.randint(0, 120) # Last 2 hours
|
||||
)
|
||||
PageView.objects.create(
|
||||
content_type=content_type,
|
||||
object_id=content_object.id,
|
||||
ip_address=f"192.168.1.{random.randint(1, 255)}",
|
||||
user_agent="Test Agent",
|
||||
timestamp=view_time,
|
||||
)
|
||||
|
||||
# Create older views (2-24 hours ago)
|
||||
for i in range(older_views):
|
||||
view_time = base_time - timedelta(hours=random.randint(2, 24))
|
||||
PageView.objects.create(
|
||||
content_type=content_type,
|
||||
object_id=content_object.id,
|
||||
ip_address=f"10.0.0.{random.randint(1, 255)}",
|
||||
user_agent="Test Agent",
|
||||
timestamp=view_time,
|
||||
)
|
||||
|
||||
def test_trending_algorithm(self):
|
||||
"""Test the trending algorithm functionality."""
|
||||
self.stdout.write("Testing trending algorithm...")
|
||||
|
||||
# Test trending content for different content types
|
||||
trending_parks = trending_service.get_trending_content(
|
||||
content_type="parks", limit=3
|
||||
)
|
||||
trending_rides = trending_service.get_trending_content(
|
||||
content_type="rides", limit=3
|
||||
)
|
||||
trending_all = trending_service.get_trending_content(
|
||||
content_type="all", limit=5
|
||||
)
|
||||
|
||||
# Test new content
|
||||
new_parks = trending_service.get_new_content(content_type="parks", limit=3)
|
||||
new_rides = trending_service.get_new_content(content_type="rides", limit=3)
|
||||
new_all = trending_service.get_new_content(content_type="all", limit=5)
|
||||
|
||||
if self.verbose:
|
||||
self.stdout.write(f" Trending parks: {len(trending_parks)} results")
|
||||
self.stdout.write(f" Trending rides: {len(trending_rides)} results")
|
||||
self.stdout.write(f" Trending all: {len(trending_all)} results")
|
||||
self.stdout.write(f" New parks: {len(new_parks)} results")
|
||||
self.stdout.write(f" New rides: {len(new_rides)} results")
|
||||
self.stdout.write(f" New all: {len(new_all)} results")
|
||||
|
||||
self.stdout.write("✓ Trending algorithm working correctly")
|
||||
|
||||
def test_api_format(self):
|
||||
"""Test that API responses match expected frontend format."""
|
||||
self.stdout.write("Testing API response format...")
|
||||
|
||||
# Test trending content format
|
||||
trending_parks = trending_service.get_trending_content(
|
||||
content_type="parks", limit=3
|
||||
)
|
||||
trending_service.get_trending_content(
|
||||
content_type="rides", limit=3
|
||||
)
|
||||
|
||||
# Test new content format
|
||||
new_parks = trending_service.get_new_content(content_type="parks", limit=3)
|
||||
trending_service.get_new_content(content_type="rides", limit=3)
|
||||
|
||||
# Verify trending data structure
|
||||
if trending_parks:
|
||||
item = trending_parks[0]
|
||||
required_trending_fields = [
|
||||
"id",
|
||||
"name",
|
||||
"slug",
|
||||
"views",
|
||||
"views_change",
|
||||
"rank",
|
||||
]
|
||||
for field in required_trending_fields:
|
||||
if field not in item:
|
||||
raise ValueError(f"Missing required trending field: {field}")
|
||||
|
||||
# Verify new content data structure
|
||||
if new_parks:
|
||||
item = new_parks[0]
|
||||
required_new_fields = ["id", "name", "slug"]
|
||||
for field in required_new_fields:
|
||||
if field not in item:
|
||||
raise ValueError(f"Missing required new content field: {field}")
|
||||
|
||||
if self.verbose:
|
||||
self.stdout.write(" Sample trending park data:")
|
||||
if trending_parks:
|
||||
self.stdout.write(f" {trending_parks[0]}")
|
||||
|
||||
self.stdout.write(" Sample new park data:")
|
||||
if new_parks:
|
||||
self.stdout.write(f" {new_parks[0]}")
|
||||
|
||||
self.stdout.write("✓ API format validation passed")
|
||||
@@ -1,36 +0,0 @@
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.core.cache import cache
|
||||
from apps.parks.models import Park
|
||||
from apps.rides.models import Ride
|
||||
from apps.core.analytics import PageView
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Updates trending parks and rides cache based on views in the last 7 days"
|
||||
|
||||
def handle(self, *args, **kwargs):
|
||||
"""
|
||||
Updates the trending parks and rides in the cache.
|
||||
|
||||
This command is designed to be run once daily via cron to keep the trending
|
||||
items up to date. It looks at page views from the last 7 days and caches
|
||||
the top 10 most viewed parks and rides.
|
||||
|
||||
The cached data is used by the home page to display trending items without
|
||||
having to query the database on every request.
|
||||
"""
|
||||
# Get top 10 trending parks and rides from the last 7 days (168 hours)
|
||||
trending_parks = PageView.get_trending_items(Park, hours=168, limit=10)
|
||||
trending_rides = PageView.get_trending_items(Ride, hours=168, limit=10)
|
||||
|
||||
# Cache the results for 24 hours (daily refresh)
|
||||
cache.set("trending_parks", trending_parks, 86400) # 86400 seconds = 24 hours
|
||||
cache.set("trending_rides", trending_rides, 86400)
|
||||
|
||||
self.stdout.write(
|
||||
self.style.SUCCESS(
|
||||
"Successfully updated trending parks and rides. "
|
||||
"Cached 10 items each for parks and rides based on views "
|
||||
"in the last 7 days."
|
||||
)
|
||||
)
|
||||
Reference in New Issue
Block a user