feat: major API restructure and Vue.js frontend integration

- Centralize API endpoints in dedicated api app with v1 versioning
- Remove individual API modules from parks and rides apps
- Add event tracking system with analytics functionality
- Integrate Vue.js frontend with Tailwind CSS v4 and TypeScript
- Add comprehensive database migrations for event tracking
- Implement user authentication and social provider setup
- Add API schema documentation and serializers
- Configure development environment with shared scripts
- Update project structure for monorepo with frontend/backend separation
This commit is contained in:
pacnpal
2025-08-24 16:42:20 -04:00
parent 92f4104d7a
commit e62646bcf9
127 changed files with 27734 additions and 1867 deletions

View File

@@ -3,8 +3,11 @@ from django.contrib.contenttypes.fields import GenericForeignKey
from django.contrib.contenttypes.models import ContentType
from django.utils import timezone
from django.db.models import Count
from datetime import timedelta
import pghistory
@pghistory.track()
class PageView(models.Model):
content_type = models.ForeignKey(
ContentType, on_delete=models.CASCADE, related_name="page_views"
@@ -35,7 +38,7 @@ class PageView(models.Model):
QuerySet: The trending items ordered by view count
"""
content_type = ContentType.objects.get_for_model(model_class)
cutoff = timezone.now() - timezone.timedelta(hours=hours)
cutoff = timezone.now() - timedelta(hours=hours)
# Query through the ContentType relationship
item_ids = (

View File

@@ -0,0 +1,171 @@
# Generated by Django 5.2.5 on 2025-08-24 19:25
import django.db.models.deletion
import pgtrigger.compiler
import pgtrigger.migrations
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("contenttypes", "0002_remove_content_type_name"),
("core", "0002_historicalslug_pageview"),
("pghistory", "0007_auto_20250421_0444"),
]
operations = [
migrations.CreateModel(
name="PageViewEvent",
fields=[
("pgh_id", models.AutoField(primary_key=True, serialize=False)),
("pgh_created_at", models.DateTimeField(auto_now_add=True)),
("pgh_label", models.TextField(help_text="The event label.")),
("id", models.BigIntegerField()),
("object_id", models.PositiveIntegerField()),
("timestamp", models.DateTimeField(auto_now_add=True)),
("ip_address", models.GenericIPAddressField()),
("user_agent", models.CharField(blank=True, max_length=512)),
],
options={
"abstract": False,
},
),
migrations.CreateModel(
name="SlugHistoryEvent",
fields=[
("pgh_id", models.AutoField(primary_key=True, serialize=False)),
("pgh_created_at", models.DateTimeField(auto_now_add=True)),
("pgh_label", models.TextField(help_text="The event label.")),
("id", models.BigIntegerField()),
("object_id", models.CharField(max_length=50)),
("old_slug", models.SlugField(db_index=False, max_length=200)),
("created_at", models.DateTimeField(auto_now_add=True)),
],
options={
"abstract": False,
},
),
pgtrigger.migrations.AddTrigger(
model_name="pageview",
trigger=pgtrigger.compiler.Trigger(
name="insert_insert",
sql=pgtrigger.compiler.UpsertTriggerSql(
func='INSERT INTO "core_pageviewevent" ("content_type_id", "id", "ip_address", "object_id", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "timestamp", "user_agent") VALUES (NEW."content_type_id", NEW."id", NEW."ip_address", NEW."object_id", _pgh_attach_context(), NOW(), \'insert\', NEW."id", NEW."timestamp", NEW."user_agent"); RETURN NULL;',
hash="1682d124ea3ba215e630c7cfcde929f7444cf247",
operation="INSERT",
pgid="pgtrigger_insert_insert_ee1e1",
table="core_pageview",
when="AFTER",
),
),
),
pgtrigger.migrations.AddTrigger(
model_name="pageview",
trigger=pgtrigger.compiler.Trigger(
name="update_update",
sql=pgtrigger.compiler.UpsertTriggerSql(
condition="WHEN (OLD.* IS DISTINCT FROM NEW.*)",
func='INSERT INTO "core_pageviewevent" ("content_type_id", "id", "ip_address", "object_id", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "timestamp", "user_agent") VALUES (NEW."content_type_id", NEW."id", NEW."ip_address", NEW."object_id", _pgh_attach_context(), NOW(), \'update\', NEW."id", NEW."timestamp", NEW."user_agent"); RETURN NULL;',
hash="4221b2dd6636cae454f8d69c0c1841c40c47e6a6",
operation="UPDATE",
pgid="pgtrigger_update_update_3c505",
table="core_pageview",
when="AFTER",
),
),
),
pgtrigger.migrations.AddTrigger(
model_name="slughistory",
trigger=pgtrigger.compiler.Trigger(
name="insert_insert",
sql=pgtrigger.compiler.UpsertTriggerSql(
func='INSERT INTO "core_slughistoryevent" ("content_type_id", "created_at", "id", "object_id", "old_slug", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id") VALUES (NEW."content_type_id", NEW."created_at", NEW."id", NEW."object_id", NEW."old_slug", _pgh_attach_context(), NOW(), \'insert\', NEW."id"); RETURN NULL;',
hash="2a2a05025693c165b88e5eba7fcc23214749a78b",
operation="INSERT",
pgid="pgtrigger_insert_insert_3002a",
table="core_slughistory",
when="AFTER",
),
),
),
pgtrigger.migrations.AddTrigger(
model_name="slughistory",
trigger=pgtrigger.compiler.Trigger(
name="update_update",
sql=pgtrigger.compiler.UpsertTriggerSql(
condition="WHEN (OLD.* IS DISTINCT FROM NEW.*)",
func='INSERT INTO "core_slughistoryevent" ("content_type_id", "created_at", "id", "object_id", "old_slug", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id") VALUES (NEW."content_type_id", NEW."created_at", NEW."id", NEW."object_id", NEW."old_slug", _pgh_attach_context(), NOW(), \'update\', NEW."id"); RETURN NULL;',
hash="3ad197ccb6178668e762720341e45d3fd3216776",
operation="UPDATE",
pgid="pgtrigger_update_update_52030",
table="core_slughistory",
when="AFTER",
),
),
),
migrations.AddField(
model_name="pageviewevent",
name="content_type",
field=models.ForeignKey(
db_constraint=False,
on_delete=django.db.models.deletion.DO_NOTHING,
related_name="+",
related_query_name="+",
to="contenttypes.contenttype",
),
),
migrations.AddField(
model_name="pageviewevent",
name="pgh_context",
field=models.ForeignKey(
db_constraint=False,
null=True,
on_delete=django.db.models.deletion.DO_NOTHING,
related_name="+",
to="pghistory.context",
),
),
migrations.AddField(
model_name="pageviewevent",
name="pgh_obj",
field=models.ForeignKey(
db_constraint=False,
on_delete=django.db.models.deletion.DO_NOTHING,
related_name="events",
to="core.pageview",
),
),
migrations.AddField(
model_name="slughistoryevent",
name="content_type",
field=models.ForeignKey(
db_constraint=False,
on_delete=django.db.models.deletion.DO_NOTHING,
related_name="+",
related_query_name="+",
to="contenttypes.contenttype",
),
),
migrations.AddField(
model_name="slughistoryevent",
name="pgh_context",
field=models.ForeignKey(
db_constraint=False,
null=True,
on_delete=django.db.models.deletion.DO_NOTHING,
related_name="+",
to="pghistory.context",
),
),
migrations.AddField(
model_name="slughistoryevent",
name="pgh_obj",
field=models.ForeignKey(
db_constraint=False,
on_delete=django.db.models.deletion.DO_NOTHING,
related_name="events",
to="core.slughistory",
),
),
]

View File

@@ -3,8 +3,10 @@ from django.contrib.contenttypes.fields import GenericForeignKey
from django.contrib.contenttypes.models import ContentType
from django.utils.text import slugify
from apps.core.history import TrackedModel
import pghistory
@pghistory.track()
class SlugHistory(models.Model):
"""
Model for tracking slug changes across all models that use slugs.
@@ -40,7 +42,7 @@ class SluggedModel(TrackedModel):
name = models.CharField(max_length=200)
slug = models.SlugField(max_length=200, unique=True)
class Meta:
class Meta(TrackedModel.Meta):
abstract = True
def save(self, *args, **kwargs):
@@ -83,16 +85,20 @@ class SluggedModel(TrackedModel):
# Try to get by current slug first
return cls.objects.get(slug=slug), False
except cls.DoesNotExist:
# Check pghistory first
history_model = cls.get_history_model()
history_entry = (
history_model.objects.filter(slug=slug)
.order_by("-pgh_created_at")
.first()
)
# Check pghistory first if available
try:
import pghistory.models
if history_entry:
return cls.objects.get(id=history_entry.pgh_obj_id), True
history_entries = pghistory.models.Events.objects.filter(
pgh_model=f"{cls._meta.app_label}.{cls._meta.model_name}", slug=slug
).order_by("-pgh_created_at")
if history_entries:
history_entry = history_entries.first()
if history_entry:
return cls.objects.get(id=history_entry.pgh_obj_id), True
except (ImportError, AttributeError):
pass
# Try to find in manual slug history as fallback
history = (
@@ -106,7 +112,7 @@ class SluggedModel(TrackedModel):
if history:
return (
cls.objects.get(**{cls.get_id_field_name(): history.object_id}),
cls.objects.get(**{cls().get_id_field_name(): history.object_id}),
True,
)

View File

@@ -1,273 +0,0 @@
"""
Enhanced health check views for API monitoring.
"""
import time
from django.http import JsonResponse
from django.utils import timezone
from django.views import View
from django.conf import settings
from rest_framework.views import APIView
from rest_framework.response import Response
from rest_framework.permissions import AllowAny
from health_check.views import MainView
from apps.core.services.enhanced_cache_service import CacheMonitor
from apps.core.utils.query_optimization import IndexAnalyzer
class HealthCheckAPIView(APIView):
"""
Enhanced API endpoint for health checks with detailed JSON response
"""
permission_classes = [AllowAny] # Public endpoint
def get(self, request):
"""Return comprehensive health check information"""
start_time = time.time()
# Get basic health check results
main_view = MainView()
main_view.request = request
plugins = main_view.plugins
errors = main_view.errors
# Collect additional performance metrics
cache_monitor = CacheMonitor()
cache_stats = cache_monitor.get_cache_stats()
# Build comprehensive health data
health_data = {
"status": "healthy" if not errors else "unhealthy",
"timestamp": timezone.now().isoformat(),
"version": getattr(settings, "VERSION", "1.0.0"),
"environment": getattr(settings, "ENVIRONMENT", "development"),
"response_time_ms": 0, # Will be calculated at the end
"checks": {},
"metrics": {
"cache": cache_stats,
"database": self._get_database_metrics(),
"system": self._get_system_metrics(),
},
}
# Process individual health checks
for plugin in plugins:
plugin_name = plugin.identifier()
plugin_errors = errors.get(plugin.__class__.__name__, [])
health_data["checks"][plugin_name] = {
"status": "healthy" if not plugin_errors else "unhealthy",
"critical": getattr(plugin, "critical_service", False),
"errors": [str(error) for error in plugin_errors],
"response_time_ms": getattr(plugin, "_response_time", None),
}
# Calculate total response time
health_data["response_time_ms"] = round((time.time() - start_time) * 1000, 2)
# Determine HTTP status code
status_code = 200
if errors:
# Check if any critical services are failing
critical_errors = any(
getattr(plugin, "critical_service", False)
for plugin in plugins
if errors.get(plugin.__class__.__name__)
)
status_code = 503 if critical_errors else 200
return Response(health_data, status=status_code)
def _get_database_metrics(self):
"""Get database performance metrics"""
try:
from django.db import connection
# Get basic connection info
metrics = {
"vendor": connection.vendor,
"connection_status": "connected",
}
# Test query performance
start_time = time.time()
with connection.cursor() as cursor:
cursor.execute("SELECT 1")
cursor.fetchone()
query_time = (time.time() - start_time) * 1000
metrics["test_query_time_ms"] = round(query_time, 2)
# PostgreSQL specific metrics
if connection.vendor == "postgresql":
try:
with connection.cursor() as cursor:
cursor.execute(
"""
SELECT
numbackends as active_connections,
xact_commit as transactions_committed,
xact_rollback as transactions_rolled_back,
blks_read as blocks_read,
blks_hit as blocks_hit
FROM pg_stat_database
WHERE datname = current_database()
"""
)
row = cursor.fetchone()
if row:
metrics.update(
{
"active_connections": row[0],
"transactions_committed": row[1],
"transactions_rolled_back": row[2],
"cache_hit_ratio": (
round(
(row[4] / (row[3] + row[4])) * 100,
2,
)
if (row[3] + row[4]) > 0
else 0
),
}
)
except Exception:
pass # Skip advanced metrics if not available
return metrics
except Exception as e:
return {"connection_status": "error", "error": str(e)}
def _get_system_metrics(self):
"""Get system performance metrics"""
metrics = {
"debug_mode": settings.DEBUG,
"allowed_hosts": (settings.ALLOWED_HOSTS if settings.DEBUG else ["hidden"]),
}
try:
import psutil
# Memory metrics
memory = psutil.virtual_memory()
metrics["memory"] = {
"total_mb": round(memory.total / 1024 / 1024, 2),
"available_mb": round(memory.available / 1024 / 1024, 2),
"percent_used": memory.percent,
}
# CPU metrics
metrics["cpu"] = {
"percent_used": psutil.cpu_percent(interval=0.1),
"core_count": psutil.cpu_count(),
}
# Disk metrics
disk = psutil.disk_usage("/")
metrics["disk"] = {
"total_gb": round(disk.total / 1024 / 1024 / 1024, 2),
"free_gb": round(disk.free / 1024 / 1024 / 1024, 2),
"percent_used": round((disk.used / disk.total) * 100, 2),
}
except ImportError:
metrics["system_monitoring"] = "psutil not available"
except Exception as e:
metrics["system_error"] = str(e)
return metrics
class PerformanceMetricsView(APIView):
"""
API view for performance metrics and database analysis
"""
permission_classes = [AllowAny] if settings.DEBUG else []
def get(self, request):
"""Return performance metrics and analysis"""
if not settings.DEBUG:
return Response({"error": "Only available in debug mode"}, status=403)
metrics = {
"timestamp": timezone.now().isoformat(),
"database_analysis": self._get_database_analysis(),
"cache_performance": self._get_cache_performance(),
"recent_slow_queries": self._get_slow_queries(),
}
return Response(metrics)
def _get_database_analysis(self):
"""Analyze database performance"""
try:
from django.db import connection
analysis = {
"total_queries": len(connection.queries),
"query_analysis": IndexAnalyzer.analyze_slow_queries(0.05),
}
if connection.queries:
query_times = [float(q.get("time", 0)) for q in connection.queries]
analysis.update(
{
"total_query_time": sum(query_times),
"average_query_time": sum(query_times) / len(query_times),
"slowest_query_time": max(query_times),
"fastest_query_time": min(query_times),
}
)
return analysis
except Exception as e:
return {"error": str(e)}
def _get_cache_performance(self):
"""Get cache performance metrics"""
try:
cache_monitor = CacheMonitor()
return cache_monitor.get_cache_stats()
except Exception as e:
return {"error": str(e)}
def _get_slow_queries(self):
"""Get recent slow queries"""
try:
return IndexAnalyzer.analyze_slow_queries(0.1) # 100ms threshold
except Exception as e:
return {"error": str(e)}
class SimpleHealthView(View):
"""
Simple health check endpoint for load balancers
"""
def get(self, request):
"""Return simple OK status"""
try:
# Basic database connectivity test
from django.db import connection
with connection.cursor() as cursor:
cursor.execute("SELECT 1")
cursor.fetchone()
return JsonResponse(
{"status": "ok", "timestamp": timezone.now().isoformat()}
)
except Exception as e:
return JsonResponse(
{
"status": "error",
"error": str(e),
"timestamp": timezone.now().isoformat(),
},
status=503,
)