mirror of
https://github.com/pacnpal/thrilltrack-explorer.git
synced 2025-12-21 21:31:12 -05:00
Refactor code structure and remove redundant changes
This commit is contained in:
0
django-backend/apps/__init__.py
Normal file
0
django-backend/apps/__init__.py
Normal file
0
django-backend/apps/contact/__init__.py
Normal file
0
django-backend/apps/contact/__init__.py
Normal file
115
django-backend/apps/contact/admin.py
Normal file
115
django-backend/apps/contact/admin.py
Normal file
@@ -0,0 +1,115 @@
|
||||
"""
|
||||
Django admin interface for Contact submissions.
|
||||
"""
|
||||
from django.contrib import admin
|
||||
from django.utils.html import format_html
|
||||
from django.utils import timezone
|
||||
from .models import ContactSubmission
|
||||
|
||||
|
||||
@admin.register(ContactSubmission)
|
||||
class ContactSubmissionAdmin(admin.ModelAdmin):
|
||||
"""Admin interface for managing contact submissions."""
|
||||
|
||||
list_display = [
|
||||
'ticket_number',
|
||||
'name',
|
||||
'email',
|
||||
'category',
|
||||
'status_badge',
|
||||
'assigned_to',
|
||||
'created_at',
|
||||
]
|
||||
|
||||
list_filter = [
|
||||
'status',
|
||||
'category',
|
||||
'created_at',
|
||||
'assigned_to',
|
||||
]
|
||||
|
||||
search_fields = [
|
||||
'ticket_number',
|
||||
'name',
|
||||
'email',
|
||||
'subject',
|
||||
'message',
|
||||
]
|
||||
|
||||
readonly_fields = [
|
||||
'id',
|
||||
'ticket_number',
|
||||
'user',
|
||||
'created_at',
|
||||
'updated_at',
|
||||
'resolved_at',
|
||||
]
|
||||
|
||||
fieldsets = (
|
||||
('Contact Information', {
|
||||
'fields': ('ticket_number', 'name', 'email', 'user', 'category')
|
||||
}),
|
||||
('Message', {
|
||||
'fields': ('subject', 'message')
|
||||
}),
|
||||
('Status & Assignment', {
|
||||
'fields': ('status', 'assigned_to', 'admin_notes')
|
||||
}),
|
||||
('Resolution', {
|
||||
'fields': ('resolved_at', 'resolved_by'),
|
||||
'classes': ('collapse',)
|
||||
}),
|
||||
('Metadata', {
|
||||
'fields': ('id', 'created_at', 'updated_at'),
|
||||
'classes': ('collapse',)
|
||||
}),
|
||||
)
|
||||
|
||||
def status_badge(self, obj):
|
||||
"""Display status with colored badge."""
|
||||
colors = {
|
||||
'pending': '#ff9800',
|
||||
'in_progress': '#2196f3',
|
||||
'resolved': '#4caf50',
|
||||
'archived': '#9e9e9e',
|
||||
}
|
||||
color = colors.get(obj.status, '#9e9e9e')
|
||||
return format_html(
|
||||
'<span style="background-color: {}; color: white; padding: 3px 10px; '
|
||||
'border-radius: 3px; font-weight: bold;">{}</span>',
|
||||
color,
|
||||
obj.get_status_display()
|
||||
)
|
||||
status_badge.short_description = 'Status'
|
||||
|
||||
def save_model(self, request, obj, form, change):
|
||||
"""Auto-set resolved_by when status changes to resolved."""
|
||||
if change and 'status' in form.changed_data:
|
||||
if obj.status == 'resolved' and not obj.resolved_by:
|
||||
obj.resolved_by = request.user
|
||||
obj.resolved_at = timezone.now()
|
||||
super().save_model(request, obj, form, change)
|
||||
|
||||
actions = ['mark_as_in_progress', 'mark_as_resolved', 'assign_to_me']
|
||||
|
||||
def mark_as_in_progress(self, request, queryset):
|
||||
"""Mark selected submissions as in progress."""
|
||||
updated = queryset.update(status='in_progress')
|
||||
self.message_user(request, f'{updated} submission(s) marked as in progress.')
|
||||
mark_as_in_progress.short_description = "Mark as In Progress"
|
||||
|
||||
def mark_as_resolved(self, request, queryset):
|
||||
"""Mark selected submissions as resolved."""
|
||||
updated = queryset.filter(status__in=['pending', 'in_progress']).update(
|
||||
status='resolved',
|
||||
resolved_at=timezone.now(),
|
||||
resolved_by=request.user
|
||||
)
|
||||
self.message_user(request, f'{updated} submission(s) marked as resolved.')
|
||||
mark_as_resolved.short_description = "Mark as Resolved"
|
||||
|
||||
def assign_to_me(self, request, queryset):
|
||||
"""Assign selected submissions to current user."""
|
||||
updated = queryset.update(assigned_to=request.user)
|
||||
self.message_user(request, f'{updated} submission(s) assigned to you.')
|
||||
assign_to_me.short_description = "Assign to Me"
|
||||
7
django-backend/apps/contact/apps.py
Normal file
7
django-backend/apps/contact/apps.py
Normal file
@@ -0,0 +1,7 @@
|
||||
from django.apps import AppConfig
|
||||
|
||||
|
||||
class ContactConfig(AppConfig):
|
||||
default_auto_field = 'django.db.models.BigAutoField'
|
||||
name = 'apps.contact'
|
||||
verbose_name = 'Contact Management'
|
||||
300
django-backend/apps/contact/migrations/0001_initial.py
Normal file
300
django-backend/apps/contact/migrations/0001_initial.py
Normal file
@@ -0,0 +1,300 @@
|
||||
# Generated by Django 4.2.8 on 2025-11-09 17:45
|
||||
|
||||
from django.conf import settings
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
import pgtrigger.compiler
|
||||
import pgtrigger.migrations
|
||||
import uuid
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
initial = True
|
||||
|
||||
dependencies = [
|
||||
("pghistory", "0006_delete_aggregateevent"),
|
||||
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name="ContactSubmission",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.UUIDField(
|
||||
default=uuid.uuid4,
|
||||
editable=False,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
),
|
||||
),
|
||||
("name", models.CharField(max_length=255)),
|
||||
("email", models.EmailField(max_length=254)),
|
||||
("subject", models.CharField(max_length=255)),
|
||||
("message", models.TextField()),
|
||||
(
|
||||
"category",
|
||||
models.CharField(
|
||||
choices=[
|
||||
("general", "General Inquiry"),
|
||||
("bug", "Bug Report"),
|
||||
("feature", "Feature Request"),
|
||||
("abuse", "Report Abuse"),
|
||||
("data", "Data Correction"),
|
||||
("account", "Account Issue"),
|
||||
("other", "Other"),
|
||||
],
|
||||
default="general",
|
||||
max_length=50,
|
||||
),
|
||||
),
|
||||
(
|
||||
"status",
|
||||
models.CharField(
|
||||
choices=[
|
||||
("pending", "Pending Review"),
|
||||
("in_progress", "In Progress"),
|
||||
("resolved", "Resolved"),
|
||||
("archived", "Archived"),
|
||||
],
|
||||
db_index=True,
|
||||
default="pending",
|
||||
max_length=20,
|
||||
),
|
||||
),
|
||||
(
|
||||
"ticket_number",
|
||||
models.CharField(
|
||||
blank=True,
|
||||
help_text="Auto-generated ticket number for tracking",
|
||||
max_length=20,
|
||||
null=True,
|
||||
unique=True,
|
||||
),
|
||||
),
|
||||
(
|
||||
"admin_notes",
|
||||
models.TextField(
|
||||
blank=True,
|
||||
help_text="Internal notes for admin use only",
|
||||
null=True,
|
||||
),
|
||||
),
|
||||
("resolved_at", models.DateTimeField(blank=True, null=True)),
|
||||
("created_at", models.DateTimeField(auto_now_add=True)),
|
||||
("updated_at", models.DateTimeField(auto_now=True)),
|
||||
(
|
||||
"assigned_to",
|
||||
models.ForeignKey(
|
||||
blank=True,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
related_name="assigned_contacts",
|
||||
to=settings.AUTH_USER_MODEL,
|
||||
),
|
||||
),
|
||||
(
|
||||
"resolved_by",
|
||||
models.ForeignKey(
|
||||
blank=True,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
related_name="resolved_contacts",
|
||||
to=settings.AUTH_USER_MODEL,
|
||||
),
|
||||
),
|
||||
(
|
||||
"user",
|
||||
models.ForeignKey(
|
||||
blank=True,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
related_name="contact_submissions",
|
||||
to=settings.AUTH_USER_MODEL,
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"verbose_name": "Contact Submission",
|
||||
"verbose_name_plural": "Contact Submissions",
|
||||
"ordering": ["-created_at"],
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="ContactSubmissionEvent",
|
||||
fields=[
|
||||
("pgh_id", models.AutoField(primary_key=True, serialize=False)),
|
||||
("pgh_created_at", models.DateTimeField(auto_now_add=True)),
|
||||
("pgh_label", models.TextField(help_text="The event label.")),
|
||||
(
|
||||
"id",
|
||||
models.UUIDField(
|
||||
default=uuid.uuid4, editable=False, serialize=False
|
||||
),
|
||||
),
|
||||
("name", models.CharField(max_length=255)),
|
||||
("email", models.EmailField(max_length=254)),
|
||||
("subject", models.CharField(max_length=255)),
|
||||
("message", models.TextField()),
|
||||
(
|
||||
"category",
|
||||
models.CharField(
|
||||
choices=[
|
||||
("general", "General Inquiry"),
|
||||
("bug", "Bug Report"),
|
||||
("feature", "Feature Request"),
|
||||
("abuse", "Report Abuse"),
|
||||
("data", "Data Correction"),
|
||||
("account", "Account Issue"),
|
||||
("other", "Other"),
|
||||
],
|
||||
default="general",
|
||||
max_length=50,
|
||||
),
|
||||
),
|
||||
(
|
||||
"status",
|
||||
models.CharField(
|
||||
choices=[
|
||||
("pending", "Pending Review"),
|
||||
("in_progress", "In Progress"),
|
||||
("resolved", "Resolved"),
|
||||
("archived", "Archived"),
|
||||
],
|
||||
default="pending",
|
||||
max_length=20,
|
||||
),
|
||||
),
|
||||
(
|
||||
"ticket_number",
|
||||
models.CharField(
|
||||
blank=True,
|
||||
help_text="Auto-generated ticket number for tracking",
|
||||
max_length=20,
|
||||
null=True,
|
||||
),
|
||||
),
|
||||
(
|
||||
"admin_notes",
|
||||
models.TextField(
|
||||
blank=True,
|
||||
help_text="Internal notes for admin use only",
|
||||
null=True,
|
||||
),
|
||||
),
|
||||
("resolved_at", models.DateTimeField(blank=True, null=True)),
|
||||
("created_at", models.DateTimeField(auto_now_add=True)),
|
||||
("updated_at", models.DateTimeField(auto_now=True)),
|
||||
(
|
||||
"assigned_to",
|
||||
models.ForeignKey(
|
||||
blank=True,
|
||||
db_constraint=False,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.DO_NOTHING,
|
||||
related_name="+",
|
||||
related_query_name="+",
|
||||
to=settings.AUTH_USER_MODEL,
|
||||
),
|
||||
),
|
||||
(
|
||||
"pgh_context",
|
||||
models.ForeignKey(
|
||||
db_constraint=False,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.DO_NOTHING,
|
||||
related_name="+",
|
||||
related_query_name="+",
|
||||
to="pghistory.context",
|
||||
),
|
||||
),
|
||||
(
|
||||
"pgh_obj",
|
||||
models.ForeignKey(
|
||||
db_constraint=False,
|
||||
on_delete=django.db.models.deletion.DO_NOTHING,
|
||||
related_name="events",
|
||||
related_query_name="+",
|
||||
to="contact.contactsubmission",
|
||||
),
|
||||
),
|
||||
(
|
||||
"resolved_by",
|
||||
models.ForeignKey(
|
||||
blank=True,
|
||||
db_constraint=False,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.DO_NOTHING,
|
||||
related_name="+",
|
||||
related_query_name="+",
|
||||
to=settings.AUTH_USER_MODEL,
|
||||
),
|
||||
),
|
||||
(
|
||||
"user",
|
||||
models.ForeignKey(
|
||||
blank=True,
|
||||
db_constraint=False,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.DO_NOTHING,
|
||||
related_name="+",
|
||||
related_query_name="+",
|
||||
to=settings.AUTH_USER_MODEL,
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"abstract": False,
|
||||
},
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="contactsubmission",
|
||||
index=models.Index(
|
||||
fields=["status", "-created_at"], name="contact_con_status_0384dd_idx"
|
||||
),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="contactsubmission",
|
||||
index=models.Index(
|
||||
fields=["category", "-created_at"],
|
||||
name="contact_con_categor_72d10a_idx",
|
||||
),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="contactsubmission",
|
||||
index=models.Index(
|
||||
fields=["ticket_number"], name="contact_con_ticket__fac4eb_idx"
|
||||
),
|
||||
),
|
||||
pgtrigger.migrations.AddTrigger(
|
||||
model_name="contactsubmission",
|
||||
trigger=pgtrigger.compiler.Trigger(
|
||||
name="insert_insert",
|
||||
sql=pgtrigger.compiler.UpsertTriggerSql(
|
||||
func='INSERT INTO "contact_contactsubmissionevent" ("admin_notes", "assigned_to_id", "category", "created_at", "email", "id", "message", "name", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "resolved_at", "resolved_by_id", "status", "subject", "ticket_number", "updated_at", "user_id") VALUES (NEW."admin_notes", NEW."assigned_to_id", NEW."category", NEW."created_at", NEW."email", NEW."id", NEW."message", NEW."name", _pgh_attach_context(), NOW(), \'insert\', NEW."id", NEW."resolved_at", NEW."resolved_by_id", NEW."status", NEW."subject", NEW."ticket_number", NEW."updated_at", NEW."user_id"); RETURN NULL;',
|
||||
hash="cbbb92ce277f4fa1d4fe3dccd8e111b39c9bc9a6",
|
||||
operation="INSERT",
|
||||
pgid="pgtrigger_insert_insert_32905",
|
||||
table="contact_contactsubmission",
|
||||
when="AFTER",
|
||||
),
|
||||
),
|
||||
),
|
||||
pgtrigger.migrations.AddTrigger(
|
||||
model_name="contactsubmission",
|
||||
trigger=pgtrigger.compiler.Trigger(
|
||||
name="update_update",
|
||||
sql=pgtrigger.compiler.UpsertTriggerSql(
|
||||
condition="WHEN (OLD.* IS DISTINCT FROM NEW.*)",
|
||||
func='INSERT INTO "contact_contactsubmissionevent" ("admin_notes", "assigned_to_id", "category", "created_at", "email", "id", "message", "name", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "resolved_at", "resolved_by_id", "status", "subject", "ticket_number", "updated_at", "user_id") VALUES (NEW."admin_notes", NEW."assigned_to_id", NEW."category", NEW."created_at", NEW."email", NEW."id", NEW."message", NEW."name", _pgh_attach_context(), NOW(), \'update\', NEW."id", NEW."resolved_at", NEW."resolved_by_id", NEW."status", NEW."subject", NEW."ticket_number", NEW."updated_at", NEW."user_id"); RETURN NULL;',
|
||||
hash="ff38205a830f0b09c39d88d8bcce780f7c2fd2ab",
|
||||
operation="UPDATE",
|
||||
pgid="pgtrigger_update_update_a7348",
|
||||
table="contact_contactsubmission",
|
||||
when="AFTER",
|
||||
),
|
||||
),
|
||||
),
|
||||
]
|
||||
0
django-backend/apps/contact/migrations/__init__.py
Normal file
0
django-backend/apps/contact/migrations/__init__.py
Normal file
135
django-backend/apps/contact/models.py
Normal file
135
django-backend/apps/contact/models.py
Normal file
@@ -0,0 +1,135 @@
|
||||
"""
|
||||
Contact submission models for user inquiries and support tickets.
|
||||
"""
|
||||
import uuid
|
||||
import pghistory
|
||||
from django.db import models
|
||||
from django.utils import timezone
|
||||
|
||||
|
||||
@pghistory.track()
|
||||
class ContactSubmission(models.Model):
|
||||
"""
|
||||
User-submitted contact form messages and support tickets.
|
||||
Tracks all communication from users for admin follow-up.
|
||||
"""
|
||||
STATUS_CHOICES = [
|
||||
('pending', 'Pending Review'),
|
||||
('in_progress', 'In Progress'),
|
||||
('resolved', 'Resolved'),
|
||||
('archived', 'Archived'),
|
||||
]
|
||||
|
||||
CATEGORY_CHOICES = [
|
||||
('general', 'General Inquiry'),
|
||||
('bug', 'Bug Report'),
|
||||
('feature', 'Feature Request'),
|
||||
('abuse', 'Report Abuse'),
|
||||
('data', 'Data Correction'),
|
||||
('account', 'Account Issue'),
|
||||
('other', 'Other'),
|
||||
]
|
||||
|
||||
id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
|
||||
|
||||
# Contact Information
|
||||
name = models.CharField(max_length=255)
|
||||
email = models.EmailField()
|
||||
subject = models.CharField(max_length=255)
|
||||
message = models.TextField()
|
||||
category = models.CharField(
|
||||
max_length=50,
|
||||
choices=CATEGORY_CHOICES,
|
||||
default='general'
|
||||
)
|
||||
|
||||
# Status & Assignment
|
||||
status = models.CharField(
|
||||
max_length=20,
|
||||
choices=STATUS_CHOICES,
|
||||
default='pending',
|
||||
db_index=True
|
||||
)
|
||||
ticket_number = models.CharField(
|
||||
max_length=20,
|
||||
unique=True,
|
||||
null=True,
|
||||
blank=True,
|
||||
help_text="Auto-generated ticket number for tracking"
|
||||
)
|
||||
|
||||
# User Association (if logged in when submitting)
|
||||
user = models.ForeignKey(
|
||||
'users.User',
|
||||
null=True,
|
||||
blank=True,
|
||||
on_delete=models.SET_NULL,
|
||||
related_name='contact_submissions'
|
||||
)
|
||||
|
||||
# Assignment & Resolution
|
||||
assigned_to = models.ForeignKey(
|
||||
'users.User',
|
||||
null=True,
|
||||
blank=True,
|
||||
on_delete=models.SET_NULL,
|
||||
related_name='assigned_contacts'
|
||||
)
|
||||
admin_notes = models.TextField(
|
||||
null=True,
|
||||
blank=True,
|
||||
help_text="Internal notes for admin use only"
|
||||
)
|
||||
resolved_at = models.DateTimeField(null=True, blank=True)
|
||||
resolved_by = models.ForeignKey(
|
||||
'users.User',
|
||||
null=True,
|
||||
blank=True,
|
||||
on_delete=models.SET_NULL,
|
||||
related_name='resolved_contacts'
|
||||
)
|
||||
|
||||
# Timestamps
|
||||
created_at = models.DateTimeField(auto_now_add=True)
|
||||
updated_at = models.DateTimeField(auto_now=True)
|
||||
|
||||
class Meta:
|
||||
verbose_name = 'Contact Submission'
|
||||
verbose_name_plural = 'Contact Submissions'
|
||||
ordering = ['-created_at']
|
||||
indexes = [
|
||||
models.Index(fields=['status', '-created_at']),
|
||||
models.Index(fields=['category', '-created_at']),
|
||||
models.Index(fields=['ticket_number']),
|
||||
]
|
||||
|
||||
def __str__(self):
|
||||
ticket = f" ({self.ticket_number})" if self.ticket_number else ""
|
||||
return f"{self.name} - {self.get_category_display()}{ticket}"
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
# Auto-generate ticket number if not set
|
||||
if not self.ticket_number:
|
||||
# Format: CONT-YYYYMMDD-XXXX
|
||||
from django.db.models import Max
|
||||
today = timezone.now().strftime('%Y%m%d')
|
||||
prefix = f"CONT-{today}"
|
||||
|
||||
# Get the highest ticket number for today
|
||||
last_ticket = ContactSubmission.objects.filter(
|
||||
ticket_number__startswith=prefix
|
||||
).aggregate(Max('ticket_number'))['ticket_number__max']
|
||||
|
||||
if last_ticket:
|
||||
# Extract the sequence number and increment
|
||||
seq = int(last_ticket.split('-')[-1]) + 1
|
||||
else:
|
||||
seq = 1
|
||||
|
||||
self.ticket_number = f"{prefix}-{seq:04d}"
|
||||
|
||||
# Set resolved_at when status changes to resolved
|
||||
if self.status == 'resolved' and not self.resolved_at:
|
||||
self.resolved_at = timezone.now()
|
||||
|
||||
super().save(*args, **kwargs)
|
||||
150
django-backend/apps/contact/tasks.py
Normal file
150
django-backend/apps/contact/tasks.py
Normal file
@@ -0,0 +1,150 @@
|
||||
"""
|
||||
Celery tasks for contact submission notifications.
|
||||
"""
|
||||
from celery import shared_task
|
||||
from django.core.mail import send_mail
|
||||
from django.conf import settings
|
||||
from django.template.loader import render_to_string
|
||||
from django.utils.html import strip_tags
|
||||
|
||||
|
||||
@shared_task
|
||||
def send_contact_confirmation_email(contact_id):
|
||||
"""
|
||||
Send confirmation email to user who submitted contact form.
|
||||
|
||||
Args:
|
||||
contact_id: UUID of the ContactSubmission
|
||||
"""
|
||||
from .models import ContactSubmission
|
||||
|
||||
try:
|
||||
contact = ContactSubmission.objects.get(id=contact_id)
|
||||
|
||||
# Render email template
|
||||
html_message = render_to_string('emails/contact_confirmation.html', {
|
||||
'name': contact.name,
|
||||
'ticket_number': contact.ticket_number,
|
||||
'subject': contact.subject,
|
||||
'category': contact.get_category_display(),
|
||||
'message': contact.message,
|
||||
})
|
||||
plain_message = strip_tags(html_message)
|
||||
|
||||
# Send email
|
||||
send_mail(
|
||||
subject=f'Contact Form Received - Ticket #{contact.ticket_number}',
|
||||
message=plain_message,
|
||||
from_email=settings.DEFAULT_FROM_EMAIL,
|
||||
recipient_list=[contact.email],
|
||||
html_message=html_message,
|
||||
fail_silently=False,
|
||||
)
|
||||
|
||||
return f"Confirmation email sent to {contact.email}"
|
||||
|
||||
except ContactSubmission.DoesNotExist:
|
||||
return f"Contact submission {contact_id} not found"
|
||||
except Exception as e:
|
||||
# Log error but don't fail the task
|
||||
print(f"Error sending contact confirmation: {str(e)}")
|
||||
raise
|
||||
|
||||
|
||||
@shared_task
|
||||
def notify_admins_new_contact(contact_id):
|
||||
"""
|
||||
Notify admin team of new contact submission.
|
||||
|
||||
Args:
|
||||
contact_id: UUID of the ContactSubmission
|
||||
"""
|
||||
from .models import ContactSubmission
|
||||
from apps.users.models import User
|
||||
|
||||
try:
|
||||
contact = ContactSubmission.objects.get(id=contact_id)
|
||||
|
||||
# Get all admin and moderator emails
|
||||
admin_emails = User.objects.filter(
|
||||
role__in=['admin', 'moderator']
|
||||
).values_list('email', flat=True)
|
||||
|
||||
if not admin_emails:
|
||||
return "No admin emails found"
|
||||
|
||||
# Render email template
|
||||
html_message = render_to_string('emails/contact_admin_notification.html', {
|
||||
'ticket_number': contact.ticket_number,
|
||||
'name': contact.name,
|
||||
'email': contact.email,
|
||||
'subject': contact.subject,
|
||||
'category': contact.get_category_display(),
|
||||
'message': contact.message,
|
||||
'admin_url': f"{settings.SITE_URL}/admin/contact/contactsubmission/{contact.id}/change/",
|
||||
})
|
||||
plain_message = strip_tags(html_message)
|
||||
|
||||
# Send email
|
||||
send_mail(
|
||||
subject=f'New Contact Submission - Ticket #{contact.ticket_number}',
|
||||
message=plain_message,
|
||||
from_email=settings.DEFAULT_FROM_EMAIL,
|
||||
recipient_list=list(admin_emails),
|
||||
html_message=html_message,
|
||||
fail_silently=False,
|
||||
)
|
||||
|
||||
return f"Admin notification sent to {len(admin_emails)} admin(s)"
|
||||
|
||||
except ContactSubmission.DoesNotExist:
|
||||
return f"Contact submission {contact_id} not found"
|
||||
except Exception as e:
|
||||
# Log error but don't fail the task
|
||||
print(f"Error sending admin notification: {str(e)}")
|
||||
raise
|
||||
|
||||
|
||||
@shared_task
|
||||
def send_contact_resolution_email(contact_id):
|
||||
"""
|
||||
Send email to user when their contact submission is resolved.
|
||||
|
||||
Args:
|
||||
contact_id: UUID of the ContactSubmission
|
||||
"""
|
||||
from .models import ContactSubmission
|
||||
|
||||
try:
|
||||
contact = ContactSubmission.objects.get(id=contact_id)
|
||||
|
||||
if contact.status != 'resolved':
|
||||
return f"Contact {contact_id} is not resolved yet"
|
||||
|
||||
# Render email template
|
||||
html_message = render_to_string('emails/contact_resolved.html', {
|
||||
'name': contact.name,
|
||||
'ticket_number': contact.ticket_number,
|
||||
'subject': contact.subject,
|
||||
'resolved_by': contact.resolved_by.username if contact.resolved_by else 'Support Team',
|
||||
})
|
||||
plain_message = strip_tags(html_message)
|
||||
|
||||
# Send email
|
||||
send_mail(
|
||||
subject=f'Your Support Ticket Has Been Resolved - #{contact.ticket_number}',
|
||||
message=plain_message,
|
||||
from_email=settings.DEFAULT_FROM_EMAIL,
|
||||
recipient_list=[contact.email],
|
||||
html_message=html_message,
|
||||
fail_silently=False,
|
||||
)
|
||||
|
||||
return f"Resolution email sent to {contact.email}"
|
||||
|
||||
except ContactSubmission.DoesNotExist:
|
||||
return f"Contact submission {contact_id} not found"
|
||||
except Exception as e:
|
||||
# Log error but don't fail the task
|
||||
print(f"Error sending resolution email: {str(e)}")
|
||||
raise
|
||||
0
django-backend/apps/core/__init__.py
Normal file
0
django-backend/apps/core/__init__.py
Normal file
11
django-backend/apps/core/apps.py
Normal file
11
django-backend/apps/core/apps.py
Normal file
@@ -0,0 +1,11 @@
|
||||
"""
|
||||
Core app configuration.
|
||||
"""
|
||||
|
||||
from django.apps import AppConfig
|
||||
|
||||
|
||||
class CoreConfig(AppConfig):
|
||||
default_auto_field = 'django.db.models.BigAutoField'
|
||||
name = 'apps.core'
|
||||
verbose_name = 'Core'
|
||||
194
django-backend/apps/core/migrations/0001_initial.py
Normal file
194
django-backend/apps/core/migrations/0001_initial.py
Normal file
@@ -0,0 +1,194 @@
|
||||
# Generated by Django 4.2.8 on 2025-11-08 16:35
|
||||
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
import django.utils.timezone
|
||||
import django_lifecycle.mixins
|
||||
import model_utils.fields
|
||||
import uuid
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
initial = True
|
||||
|
||||
dependencies = []
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name="Country",
|
||||
fields=[
|
||||
(
|
||||
"created",
|
||||
model_utils.fields.AutoCreatedField(
|
||||
default=django.utils.timezone.now,
|
||||
editable=False,
|
||||
verbose_name="created",
|
||||
),
|
||||
),
|
||||
(
|
||||
"modified",
|
||||
model_utils.fields.AutoLastModifiedField(
|
||||
default=django.utils.timezone.now,
|
||||
editable=False,
|
||||
verbose_name="modified",
|
||||
),
|
||||
),
|
||||
(
|
||||
"id",
|
||||
models.UUIDField(
|
||||
default=uuid.uuid4,
|
||||
editable=False,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
),
|
||||
),
|
||||
("name", models.CharField(max_length=255, unique=True)),
|
||||
(
|
||||
"code",
|
||||
models.CharField(
|
||||
help_text="ISO 3166-1 alpha-2 country code",
|
||||
max_length=2,
|
||||
unique=True,
|
||||
),
|
||||
),
|
||||
(
|
||||
"code3",
|
||||
models.CharField(
|
||||
blank=True,
|
||||
help_text="ISO 3166-1 alpha-3 country code",
|
||||
max_length=3,
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"verbose_name_plural": "countries",
|
||||
"db_table": "countries",
|
||||
"ordering": ["name"],
|
||||
},
|
||||
bases=(django_lifecycle.mixins.LifecycleModelMixin, models.Model),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="Subdivision",
|
||||
fields=[
|
||||
(
|
||||
"created",
|
||||
model_utils.fields.AutoCreatedField(
|
||||
default=django.utils.timezone.now,
|
||||
editable=False,
|
||||
verbose_name="created",
|
||||
),
|
||||
),
|
||||
(
|
||||
"modified",
|
||||
model_utils.fields.AutoLastModifiedField(
|
||||
default=django.utils.timezone.now,
|
||||
editable=False,
|
||||
verbose_name="modified",
|
||||
),
|
||||
),
|
||||
(
|
||||
"id",
|
||||
models.UUIDField(
|
||||
default=uuid.uuid4,
|
||||
editable=False,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
),
|
||||
),
|
||||
("name", models.CharField(max_length=255)),
|
||||
(
|
||||
"code",
|
||||
models.CharField(
|
||||
help_text="ISO 3166-2 subdivision code (without country prefix)",
|
||||
max_length=10,
|
||||
),
|
||||
),
|
||||
(
|
||||
"subdivision_type",
|
||||
models.CharField(
|
||||
blank=True,
|
||||
help_text="Type of subdivision (state, province, region, etc.)",
|
||||
max_length=50,
|
||||
),
|
||||
),
|
||||
(
|
||||
"country",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
related_name="subdivisions",
|
||||
to="core.country",
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"db_table": "subdivisions",
|
||||
"ordering": ["country", "name"],
|
||||
"unique_together": {("country", "code")},
|
||||
},
|
||||
bases=(django_lifecycle.mixins.LifecycleModelMixin, models.Model),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="Locality",
|
||||
fields=[
|
||||
(
|
||||
"created",
|
||||
model_utils.fields.AutoCreatedField(
|
||||
default=django.utils.timezone.now,
|
||||
editable=False,
|
||||
verbose_name="created",
|
||||
),
|
||||
),
|
||||
(
|
||||
"modified",
|
||||
model_utils.fields.AutoLastModifiedField(
|
||||
default=django.utils.timezone.now,
|
||||
editable=False,
|
||||
verbose_name="modified",
|
||||
),
|
||||
),
|
||||
(
|
||||
"id",
|
||||
models.UUIDField(
|
||||
default=uuid.uuid4,
|
||||
editable=False,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
),
|
||||
),
|
||||
("name", models.CharField(max_length=255)),
|
||||
(
|
||||
"latitude",
|
||||
models.DecimalField(
|
||||
blank=True, decimal_places=6, max_digits=9, null=True
|
||||
),
|
||||
),
|
||||
(
|
||||
"longitude",
|
||||
models.DecimalField(
|
||||
blank=True, decimal_places=6, max_digits=9, null=True
|
||||
),
|
||||
),
|
||||
(
|
||||
"subdivision",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
related_name="localities",
|
||||
to="core.subdivision",
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"verbose_name_plural": "localities",
|
||||
"db_table": "localities",
|
||||
"ordering": ["subdivision", "name"],
|
||||
"indexes": [
|
||||
models.Index(
|
||||
fields=["subdivision", "name"],
|
||||
name="localities_subdivi_675d5a_idx",
|
||||
)
|
||||
],
|
||||
},
|
||||
bases=(django_lifecycle.mixins.LifecycleModelMixin, models.Model),
|
||||
),
|
||||
]
|
||||
0
django-backend/apps/core/migrations/__init__.py
Normal file
0
django-backend/apps/core/migrations/__init__.py
Normal file
240
django-backend/apps/core/models.py
Normal file
240
django-backend/apps/core/models.py
Normal file
@@ -0,0 +1,240 @@
|
||||
"""
|
||||
Core base models and utilities for ThrillWiki.
|
||||
These abstract models provide common functionality for all entities.
|
||||
"""
|
||||
|
||||
import uuid
|
||||
from django.db import models
|
||||
from model_utils.models import TimeStampedModel
|
||||
from django_lifecycle import LifecycleModel, hook, AFTER_CREATE, AFTER_UPDATE
|
||||
from dirtyfields import DirtyFieldsMixin
|
||||
|
||||
|
||||
class BaseModel(LifecycleModel, TimeStampedModel):
|
||||
"""
|
||||
Abstract base model for all entities.
|
||||
|
||||
Provides:
|
||||
- UUID primary key
|
||||
- created_at and updated_at timestamps (from TimeStampedModel)
|
||||
- Lifecycle hooks for versioning
|
||||
"""
|
||||
id = models.UUIDField(
|
||||
primary_key=True,
|
||||
default=uuid.uuid4,
|
||||
editable=False
|
||||
)
|
||||
|
||||
class Meta:
|
||||
abstract = True
|
||||
|
||||
def __str__(self):
|
||||
return f"{self.__class__.__name__}({self.id})"
|
||||
|
||||
|
||||
class VersionedModel(DirtyFieldsMixin, BaseModel):
|
||||
"""
|
||||
Abstract base model for entities that track field changes.
|
||||
|
||||
Uses DirtyFieldsMixin to track which fields changed.
|
||||
History tracking is now handled automatically by pghistory decorators.
|
||||
|
||||
Note: This class is kept for backwards compatibility and the DirtyFieldsMixin
|
||||
functionality, but no longer triggers custom versioning.
|
||||
"""
|
||||
|
||||
class Meta:
|
||||
abstract = True
|
||||
|
||||
|
||||
# Location Models
|
||||
|
||||
class Country(BaseModel):
|
||||
"""
|
||||
Country reference data (ISO 3166-1).
|
||||
|
||||
Examples: United States, Canada, United Kingdom, etc.
|
||||
"""
|
||||
name = models.CharField(max_length=255, unique=True)
|
||||
code = models.CharField(
|
||||
max_length=2,
|
||||
unique=True,
|
||||
help_text="ISO 3166-1 alpha-2 country code"
|
||||
)
|
||||
code3 = models.CharField(
|
||||
max_length=3,
|
||||
blank=True,
|
||||
help_text="ISO 3166-1 alpha-3 country code"
|
||||
)
|
||||
|
||||
class Meta:
|
||||
db_table = 'countries'
|
||||
ordering = ['name']
|
||||
verbose_name_plural = 'countries'
|
||||
|
||||
def __str__(self):
|
||||
return self.name
|
||||
|
||||
|
||||
class Subdivision(BaseModel):
|
||||
"""
|
||||
State/Province/Region reference data (ISO 3166-2).
|
||||
|
||||
Examples: California, Ontario, England, etc.
|
||||
"""
|
||||
country = models.ForeignKey(
|
||||
Country,
|
||||
on_delete=models.CASCADE,
|
||||
related_name='subdivisions'
|
||||
)
|
||||
name = models.CharField(max_length=255)
|
||||
code = models.CharField(
|
||||
max_length=10,
|
||||
help_text="ISO 3166-2 subdivision code (without country prefix)"
|
||||
)
|
||||
subdivision_type = models.CharField(
|
||||
max_length=50,
|
||||
blank=True,
|
||||
help_text="Type of subdivision (state, province, region, etc.)"
|
||||
)
|
||||
|
||||
class Meta:
|
||||
db_table = 'subdivisions'
|
||||
ordering = ['country', 'name']
|
||||
unique_together = [['country', 'code']]
|
||||
|
||||
def __str__(self):
|
||||
return f"{self.name}, {self.country.code}"
|
||||
|
||||
|
||||
class Locality(BaseModel):
|
||||
"""
|
||||
City/Town reference data.
|
||||
|
||||
Examples: Los Angeles, Toronto, London, etc.
|
||||
"""
|
||||
subdivision = models.ForeignKey(
|
||||
Subdivision,
|
||||
on_delete=models.CASCADE,
|
||||
related_name='localities'
|
||||
)
|
||||
name = models.CharField(max_length=255)
|
||||
latitude = models.DecimalField(
|
||||
max_digits=9,
|
||||
decimal_places=6,
|
||||
null=True,
|
||||
blank=True
|
||||
)
|
||||
longitude = models.DecimalField(
|
||||
max_digits=9,
|
||||
decimal_places=6,
|
||||
null=True,
|
||||
blank=True
|
||||
)
|
||||
|
||||
class Meta:
|
||||
db_table = 'localities'
|
||||
ordering = ['subdivision', 'name']
|
||||
verbose_name_plural = 'localities'
|
||||
indexes = [
|
||||
models.Index(fields=['subdivision', 'name']),
|
||||
]
|
||||
|
||||
def __str__(self):
|
||||
return f"{self.name}, {self.subdivision.code}"
|
||||
|
||||
@property
|
||||
def full_location(self):
|
||||
"""Return full location string: City, State, Country"""
|
||||
return f"{self.name}, {self.subdivision.name}, {self.subdivision.country.name}"
|
||||
|
||||
|
||||
# Date Precision Tracking
|
||||
|
||||
class DatePrecisionMixin(models.Model):
|
||||
"""
|
||||
Mixin for models that need to track date precision.
|
||||
|
||||
Allows tracking whether a date is known to year, month, or day precision.
|
||||
This is important for historical records where exact dates may not be known.
|
||||
"""
|
||||
|
||||
DATE_PRECISION_CHOICES = [
|
||||
('year', 'Year'),
|
||||
('month', 'Month'),
|
||||
('day', 'Day'),
|
||||
]
|
||||
|
||||
class Meta:
|
||||
abstract = True
|
||||
|
||||
@classmethod
|
||||
def add_date_precision_field(cls, field_name):
|
||||
"""
|
||||
Helper to add a precision field for a date field.
|
||||
|
||||
Usage in subclass:
|
||||
opening_date = models.DateField(null=True, blank=True)
|
||||
opening_date_precision = models.CharField(...)
|
||||
"""
|
||||
return models.CharField(
|
||||
max_length=20,
|
||||
choices=cls.DATE_PRECISION_CHOICES,
|
||||
default='day',
|
||||
help_text=f"Precision level for {field_name}"
|
||||
)
|
||||
|
||||
|
||||
# Soft Delete Mixin
|
||||
|
||||
class SoftDeleteMixin(models.Model):
|
||||
"""
|
||||
Mixin for soft-deletable models.
|
||||
|
||||
Instead of actually deleting records, mark them as deleted.
|
||||
This preserves data integrity and allows for undelete functionality.
|
||||
"""
|
||||
is_deleted = models.BooleanField(default=False, db_index=True)
|
||||
deleted_at = models.DateTimeField(null=True, blank=True)
|
||||
deleted_by = models.ForeignKey(
|
||||
'users.User',
|
||||
on_delete=models.SET_NULL,
|
||||
null=True,
|
||||
blank=True,
|
||||
related_name='%(class)s_deletions'
|
||||
)
|
||||
|
||||
class Meta:
|
||||
abstract = True
|
||||
|
||||
def soft_delete(self, user=None):
|
||||
"""Mark this record as deleted"""
|
||||
from django.utils import timezone
|
||||
self.is_deleted = True
|
||||
self.deleted_at = timezone.now()
|
||||
if user:
|
||||
self.deleted_by = user
|
||||
self.save(update_fields=['is_deleted', 'deleted_at', 'deleted_by'])
|
||||
|
||||
def undelete(self):
|
||||
"""Restore a soft-deleted record"""
|
||||
self.is_deleted = False
|
||||
self.deleted_at = None
|
||||
self.deleted_by = None
|
||||
self.save(update_fields=['is_deleted', 'deleted_at', 'deleted_by'])
|
||||
|
||||
|
||||
# Model Managers
|
||||
|
||||
class ActiveManager(models.Manager):
|
||||
"""Manager that filters out soft-deleted records by default"""
|
||||
|
||||
def get_queryset(self):
|
||||
return super().get_queryset().filter(is_deleted=False)
|
||||
|
||||
|
||||
class AllObjectsManager(models.Manager):
|
||||
"""Manager that includes all records, even soft-deleted ones"""
|
||||
|
||||
def get_queryset(self):
|
||||
return super().get_queryset()
|
||||
119
django-backend/apps/core/sitemaps.py
Normal file
119
django-backend/apps/core/sitemaps.py
Normal file
@@ -0,0 +1,119 @@
|
||||
"""
|
||||
Django Sitemaps for SEO
|
||||
|
||||
Generates XML sitemaps for search engine crawlers to discover and index content.
|
||||
"""
|
||||
|
||||
from django.contrib.sitemaps import Sitemap
|
||||
from django.urls import reverse
|
||||
|
||||
from apps.entities.models import Park, Ride, Company, RideModel
|
||||
|
||||
|
||||
class ParkSitemap(Sitemap):
|
||||
"""Sitemap for theme parks."""
|
||||
|
||||
changefreq = "weekly"
|
||||
priority = 0.9
|
||||
|
||||
def items(self):
|
||||
"""Return all active parks."""
|
||||
return Park.objects.filter(is_active=True).order_by('-updated')
|
||||
|
||||
def lastmod(self, obj):
|
||||
"""Return last modification date."""
|
||||
return obj.updated
|
||||
|
||||
def location(self, obj):
|
||||
"""Return URL for park."""
|
||||
return f'/parks/{obj.slug}/'
|
||||
|
||||
|
||||
class RideSitemap(Sitemap):
|
||||
"""Sitemap for rides."""
|
||||
|
||||
changefreq = "weekly"
|
||||
priority = 0.8
|
||||
|
||||
def items(self):
|
||||
"""Return all active rides."""
|
||||
return Ride.objects.filter(
|
||||
is_active=True
|
||||
).select_related('park').order_by('-updated')
|
||||
|
||||
def lastmod(self, obj):
|
||||
"""Return last modification date."""
|
||||
return obj.updated
|
||||
|
||||
def location(self, obj):
|
||||
"""Return URL for ride."""
|
||||
return f'/parks/{obj.park.slug}/rides/{obj.slug}/'
|
||||
|
||||
|
||||
class CompanySitemap(Sitemap):
|
||||
"""Sitemap for companies/manufacturers."""
|
||||
|
||||
changefreq = "monthly"
|
||||
priority = 0.6
|
||||
|
||||
def items(self):
|
||||
"""Return all active companies."""
|
||||
return Company.objects.filter(is_active=True).order_by('-updated')
|
||||
|
||||
def lastmod(self, obj):
|
||||
"""Return last modification date."""
|
||||
return obj.updated
|
||||
|
||||
def location(self, obj):
|
||||
"""Return URL for company."""
|
||||
return f'/manufacturers/{obj.slug}/'
|
||||
|
||||
|
||||
class RideModelSitemap(Sitemap):
|
||||
"""Sitemap for ride models."""
|
||||
|
||||
changefreq = "monthly"
|
||||
priority = 0.7
|
||||
|
||||
def items(self):
|
||||
"""Return all active ride models."""
|
||||
return RideModel.objects.filter(
|
||||
is_active=True
|
||||
).select_related('manufacturer').order_by('-updated')
|
||||
|
||||
def lastmod(self, obj):
|
||||
"""Return last modification date."""
|
||||
return obj.updated
|
||||
|
||||
def location(self, obj):
|
||||
"""Return URL for ride model."""
|
||||
return f'/models/{obj.slug}/'
|
||||
|
||||
|
||||
class StaticSitemap(Sitemap):
|
||||
"""Sitemap for static pages."""
|
||||
|
||||
changefreq = "monthly"
|
||||
priority = 0.5
|
||||
|
||||
def items(self):
|
||||
"""Return list of static pages."""
|
||||
return ['home', 'about', 'privacy', 'terms']
|
||||
|
||||
def location(self, item):
|
||||
"""Return URL for static page."""
|
||||
if item == 'home':
|
||||
return '/'
|
||||
return f'/{item}/'
|
||||
|
||||
def changefreq(self, item):
|
||||
"""Home page changes more frequently."""
|
||||
if item == 'home':
|
||||
return 'daily'
|
||||
return 'monthly'
|
||||
|
||||
def priority(self, item):
|
||||
"""Home page has higher priority."""
|
||||
if item == 'home':
|
||||
return 1.0
|
||||
return 0.5
|
||||
340
django-backend/apps/core/utils/seo.py
Normal file
340
django-backend/apps/core/utils/seo.py
Normal file
@@ -0,0 +1,340 @@
|
||||
"""
|
||||
SEO Meta Tag Generation Utilities
|
||||
|
||||
Generates comprehensive meta tags for social sharing (OpenGraph, Twitter Cards),
|
||||
search engines (structured data), and general SEO optimization.
|
||||
"""
|
||||
|
||||
from typing import Dict, Optional
|
||||
from django.conf import settings
|
||||
|
||||
|
||||
class SEOTags:
|
||||
"""Generate comprehensive SEO meta tags for any page."""
|
||||
|
||||
BASE_URL = getattr(settings, 'SITE_URL', 'https://thrillwiki.com')
|
||||
DEFAULT_OG_IMAGE = f"{BASE_URL}/static/images/og-default.png"
|
||||
TWITTER_HANDLE = "@thrillwiki"
|
||||
SITE_NAME = "ThrillWiki"
|
||||
|
||||
@classmethod
|
||||
def for_park(cls, park) -> Dict[str, str]:
|
||||
"""
|
||||
Generate meta tags for a park page.
|
||||
|
||||
Args:
|
||||
park: Park model instance
|
||||
|
||||
Returns:
|
||||
Dictionary of meta tags for HTML head
|
||||
"""
|
||||
title = f"{park.name} - Theme Park Database | ThrillWiki"
|
||||
description = f"Explore {park.name} in {park.locality.name}, {park.country.name}. View rides, reviews, photos, and history on ThrillWiki."
|
||||
|
||||
og_image = cls._get_og_image_url('park', str(park.id))
|
||||
url = f"{cls.BASE_URL}/parks/{park.slug}/"
|
||||
|
||||
return {
|
||||
# Basic Meta
|
||||
'title': title,
|
||||
'description': description,
|
||||
'keywords': f"{park.name}, theme park, amusement park, {park.locality.name}, {park.country.name}",
|
||||
|
||||
# OpenGraph (Facebook, LinkedIn, Discord)
|
||||
'og:title': park.name,
|
||||
'og:description': description,
|
||||
'og:type': 'website',
|
||||
'og:url': url,
|
||||
'og:image': og_image,
|
||||
'og:image:width': '1200',
|
||||
'og:image:height': '630',
|
||||
'og:site_name': cls.SITE_NAME,
|
||||
'og:locale': 'en_US',
|
||||
|
||||
# Twitter Card
|
||||
'twitter:card': 'summary_large_image',
|
||||
'twitter:site': cls.TWITTER_HANDLE,
|
||||
'twitter:title': park.name,
|
||||
'twitter:description': description,
|
||||
'twitter:image': og_image,
|
||||
|
||||
# Additional
|
||||
'canonical': url,
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def for_ride(cls, ride) -> Dict[str, str]:
|
||||
"""
|
||||
Generate meta tags for a ride page.
|
||||
|
||||
Args:
|
||||
ride: Ride model instance
|
||||
|
||||
Returns:
|
||||
Dictionary of meta tags for HTML head
|
||||
"""
|
||||
title = f"{ride.name} at {ride.park.name} | ThrillWiki"
|
||||
|
||||
# Build description with available details
|
||||
description_parts = [
|
||||
f"{ride.name} is a {ride.ride_type.name}",
|
||||
f"at {ride.park.name}",
|
||||
]
|
||||
|
||||
if ride.opened_year:
|
||||
description_parts.append(f"Built in {ride.opened_year}")
|
||||
|
||||
if ride.manufacturer:
|
||||
description_parts.append(f"by {ride.manufacturer.name}")
|
||||
|
||||
description = ". ".join(description_parts) + ". Read reviews and view photos."
|
||||
|
||||
og_image = cls._get_og_image_url('ride', str(ride.id))
|
||||
url = f"{cls.BASE_URL}/parks/{ride.park.slug}/rides/{ride.slug}/"
|
||||
|
||||
keywords_parts = [
|
||||
ride.name,
|
||||
ride.ride_type.name,
|
||||
ride.park.name,
|
||||
]
|
||||
if ride.manufacturer:
|
||||
keywords_parts.append(ride.manufacturer.name)
|
||||
keywords_parts.extend(['roller coaster', 'theme park ride'])
|
||||
|
||||
return {
|
||||
'title': title,
|
||||
'description': description,
|
||||
'keywords': ', '.join(keywords_parts),
|
||||
|
||||
# OpenGraph
|
||||
'og:title': f"{ride.name} at {ride.park.name}",
|
||||
'og:description': description,
|
||||
'og:type': 'article',
|
||||
'og:url': url,
|
||||
'og:image': og_image,
|
||||
'og:image:width': '1200',
|
||||
'og:image:height': '630',
|
||||
'og:site_name': cls.SITE_NAME,
|
||||
'og:locale': 'en_US',
|
||||
|
||||
# Twitter
|
||||
'twitter:card': 'summary_large_image',
|
||||
'twitter:site': cls.TWITTER_HANDLE,
|
||||
'twitter:title': f"{ride.name} at {ride.park.name}",
|
||||
'twitter:description': description,
|
||||
'twitter:image': og_image,
|
||||
|
||||
'canonical': url,
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def for_company(cls, company) -> Dict[str, str]:
|
||||
"""
|
||||
Generate meta tags for a manufacturer/company page.
|
||||
|
||||
Args:
|
||||
company: Company model instance
|
||||
|
||||
Returns:
|
||||
Dictionary of meta tags for HTML head
|
||||
"""
|
||||
# Get company type name safely
|
||||
company_type_name = company.company_types.first().name if company.company_types.exists() else "Company"
|
||||
|
||||
title = f"{company.name} - {company_type_name} | ThrillWiki"
|
||||
description = f"{company.name} is a {company_type_name}. View their rides, history, and contributions to the theme park industry."
|
||||
|
||||
url = f"{cls.BASE_URL}/manufacturers/{company.slug}/"
|
||||
|
||||
return {
|
||||
'title': title,
|
||||
'description': description,
|
||||
'keywords': f"{company.name}, {company_type_name}, theme park manufacturer, ride manufacturer",
|
||||
|
||||
# OpenGraph
|
||||
'og:title': company.name,
|
||||
'og:description': description,
|
||||
'og:type': 'website',
|
||||
'og:url': url,
|
||||
'og:image': cls.DEFAULT_OG_IMAGE,
|
||||
'og:image:width': '1200',
|
||||
'og:image:height': '630',
|
||||
'og:site_name': cls.SITE_NAME,
|
||||
'og:locale': 'en_US',
|
||||
|
||||
# Twitter
|
||||
'twitter:card': 'summary',
|
||||
'twitter:site': cls.TWITTER_HANDLE,
|
||||
'twitter:title': company.name,
|
||||
'twitter:description': description,
|
||||
'twitter:image': cls.DEFAULT_OG_IMAGE,
|
||||
|
||||
'canonical': url,
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def for_ride_model(cls, model) -> Dict[str, str]:
|
||||
"""
|
||||
Generate meta tags for a ride model page.
|
||||
|
||||
Args:
|
||||
model: RideModel model instance
|
||||
|
||||
Returns:
|
||||
Dictionary of meta tags for HTML head
|
||||
"""
|
||||
title = f"{model.name} by {model.manufacturer.name} | ThrillWiki"
|
||||
description = f"The {model.name} is a {model.ride_type.name} model manufactured by {model.manufacturer.name}. View installations and specifications."
|
||||
|
||||
url = f"{cls.BASE_URL}/models/{model.slug}/"
|
||||
|
||||
return {
|
||||
'title': title,
|
||||
'description': description,
|
||||
'keywords': f"{model.name}, {model.manufacturer.name}, {model.ride_type.name}, ride model, theme park",
|
||||
|
||||
# OpenGraph
|
||||
'og:title': f"{model.name} by {model.manufacturer.name}",
|
||||
'og:description': description,
|
||||
'og:type': 'website',
|
||||
'og:url': url,
|
||||
'og:image': cls.DEFAULT_OG_IMAGE,
|
||||
'og:image:width': '1200',
|
||||
'og:image:height': '630',
|
||||
'og:site_name': cls.SITE_NAME,
|
||||
'og:locale': 'en_US',
|
||||
|
||||
# Twitter
|
||||
'twitter:card': 'summary',
|
||||
'twitter:site': cls.TWITTER_HANDLE,
|
||||
'twitter:title': f"{model.name} by {model.manufacturer.name}",
|
||||
'twitter:description': description,
|
||||
'twitter:image': cls.DEFAULT_OG_IMAGE,
|
||||
|
||||
'canonical': url,
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def for_home(cls) -> Dict[str, str]:
|
||||
"""Generate meta tags for home page."""
|
||||
title = "ThrillWiki - The Ultimate Theme Park & Roller Coaster Database"
|
||||
description = "Explore thousands of theme parks and roller coasters worldwide. Read reviews, view photos, track your ride credits, and discover your next adventure."
|
||||
|
||||
return {
|
||||
'title': title,
|
||||
'description': description,
|
||||
'keywords': 'theme parks, roller coasters, amusement parks, ride database, coaster enthusiasts, thrillwiki',
|
||||
|
||||
'og:title': title,
|
||||
'og:description': description,
|
||||
'og:type': 'website',
|
||||
'og:url': cls.BASE_URL,
|
||||
'og:image': cls.DEFAULT_OG_IMAGE,
|
||||
'og:image:width': '1200',
|
||||
'og:image:height': '630',
|
||||
'og:site_name': cls.SITE_NAME,
|
||||
'og:locale': 'en_US',
|
||||
|
||||
'twitter:card': 'summary_large_image',
|
||||
'twitter:site': cls.TWITTER_HANDLE,
|
||||
'twitter:title': title,
|
||||
'twitter:description': description,
|
||||
'twitter:image': cls.DEFAULT_OG_IMAGE,
|
||||
|
||||
'canonical': cls.BASE_URL,
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
def _get_og_image_url(entity_type: str, entity_id: str) -> str:
|
||||
"""
|
||||
Generate dynamic OG image URL.
|
||||
|
||||
Args:
|
||||
entity_type: Type of entity (park, ride, company, model)
|
||||
entity_id: Entity ID
|
||||
|
||||
Returns:
|
||||
URL to dynamic OG image endpoint
|
||||
"""
|
||||
# Use existing ssrOG endpoint
|
||||
return f"{SEOTags.BASE_URL}/api/og?type={entity_type}&id={entity_id}"
|
||||
|
||||
@classmethod
|
||||
def structured_data_for_park(cls, park) -> dict:
|
||||
"""
|
||||
Generate JSON-LD structured data for a park.
|
||||
|
||||
Args:
|
||||
park: Park model instance
|
||||
|
||||
Returns:
|
||||
Dictionary for JSON-LD script tag
|
||||
"""
|
||||
data = {
|
||||
"@context": "https://schema.org",
|
||||
"@type": "TouristAttraction",
|
||||
"name": park.name,
|
||||
"description": f"Theme park in {park.locality.name}, {park.country.name}",
|
||||
"url": f"{cls.BASE_URL}/parks/{park.slug}/",
|
||||
"image": cls._get_og_image_url('park', str(park.id)),
|
||||
"address": {
|
||||
"@type": "PostalAddress",
|
||||
"addressLocality": park.locality.name,
|
||||
"addressCountry": park.country.code,
|
||||
},
|
||||
}
|
||||
|
||||
# Add geo coordinates if available
|
||||
if hasattr(park, 'latitude') and hasattr(park, 'longitude') and park.latitude and park.longitude:
|
||||
data["geo"] = {
|
||||
"@type": "GeoCoordinates",
|
||||
"latitude": str(park.latitude),
|
||||
"longitude": str(park.longitude),
|
||||
}
|
||||
|
||||
# Add aggregate rating if available
|
||||
if hasattr(park, 'review_count') and park.review_count > 0:
|
||||
data["aggregateRating"] = {
|
||||
"@type": "AggregateRating",
|
||||
"ratingValue": str(park.average_rating),
|
||||
"reviewCount": park.review_count,
|
||||
}
|
||||
|
||||
return data
|
||||
|
||||
@classmethod
|
||||
def structured_data_for_ride(cls, ride) -> dict:
|
||||
"""
|
||||
Generate JSON-LD structured data for a ride.
|
||||
|
||||
Args:
|
||||
ride: Ride model instance
|
||||
|
||||
Returns:
|
||||
Dictionary for JSON-LD script tag
|
||||
"""
|
||||
data = {
|
||||
"@context": "https://schema.org",
|
||||
"@type": "Product",
|
||||
"name": ride.name,
|
||||
"description": f"{ride.name} is a {ride.ride_type.name} at {ride.park.name}",
|
||||
"url": f"{cls.BASE_URL}/parks/{ride.park.slug}/rides/{ride.slug}/",
|
||||
"image": cls._get_og_image_url('ride', str(ride.id)),
|
||||
}
|
||||
|
||||
# Add manufacturer if available
|
||||
if ride.manufacturer:
|
||||
data["manufacturer"] = {
|
||||
"@type": "Organization",
|
||||
"name": ride.manufacturer.name,
|
||||
}
|
||||
|
||||
# Add aggregate rating if available
|
||||
if hasattr(ride, 'review_count') and ride.review_count > 0:
|
||||
data["aggregateRating"] = {
|
||||
"@type": "AggregateRating",
|
||||
"ratingValue": str(ride.average_rating),
|
||||
"reviewCount": ride.review_count,
|
||||
}
|
||||
|
||||
return data
|
||||
0
django-backend/apps/entities/__init__.py
Normal file
0
django-backend/apps/entities/__init__.py
Normal file
715
django-backend/apps/entities/admin.py
Normal file
715
django-backend/apps/entities/admin.py
Normal file
@@ -0,0 +1,715 @@
|
||||
"""
|
||||
Django Admin configuration for entity models with Unfold theme.
|
||||
"""
|
||||
from django.contrib import admin
|
||||
from django.contrib.gis import admin as gis_admin
|
||||
from django.db.models import Count, Q
|
||||
from django.utils.html import format_html
|
||||
from django.urls import reverse
|
||||
from django.conf import settings
|
||||
from unfold.admin import ModelAdmin, TabularInline
|
||||
from unfold.contrib.filters.admin import RangeDateFilter, RangeNumericFilter, RelatedDropdownFilter, ChoicesDropdownFilter
|
||||
from unfold.contrib.import_export.forms import ImportForm, ExportForm
|
||||
from import_export.admin import ImportExportModelAdmin
|
||||
from import_export import resources, fields
|
||||
from import_export.widgets import ForeignKeyWidget
|
||||
from .models import Company, RideModel, Park, Ride, RideNameHistory
|
||||
from apps.media.admin import PhotoInline
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# IMPORT/EXPORT RESOURCES
|
||||
# ============================================================================
|
||||
|
||||
class CompanyResource(resources.ModelResource):
|
||||
"""Import/Export resource for Company model."""
|
||||
|
||||
class Meta:
|
||||
model = Company
|
||||
fields = (
|
||||
'id', 'name', 'slug', 'description', 'location',
|
||||
'company_types', 'founded_date', 'founded_date_precision',
|
||||
'closed_date', 'closed_date_precision', 'website',
|
||||
'logo_image_url', 'created', 'modified'
|
||||
)
|
||||
export_order = fields
|
||||
|
||||
|
||||
class RideModelResource(resources.ModelResource):
|
||||
"""Import/Export resource for RideModel model."""
|
||||
|
||||
manufacturer = fields.Field(
|
||||
column_name='manufacturer',
|
||||
attribute='manufacturer',
|
||||
widget=ForeignKeyWidget(Company, 'name')
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = RideModel
|
||||
fields = (
|
||||
'id', 'name', 'slug', 'description', 'manufacturer',
|
||||
'model_type', 'typical_height', 'typical_speed',
|
||||
'typical_capacity', 'image_url', 'created', 'modified'
|
||||
)
|
||||
export_order = fields
|
||||
|
||||
|
||||
class ParkResource(resources.ModelResource):
|
||||
"""Import/Export resource for Park model."""
|
||||
|
||||
operator = fields.Field(
|
||||
column_name='operator',
|
||||
attribute='operator',
|
||||
widget=ForeignKeyWidget(Company, 'name')
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = Park
|
||||
fields = (
|
||||
'id', 'name', 'slug', 'description', 'park_type', 'status',
|
||||
'latitude', 'longitude', 'operator', 'opening_date',
|
||||
'opening_date_precision', 'closing_date', 'closing_date_precision',
|
||||
'website', 'banner_image_url', 'logo_image_url',
|
||||
'created', 'modified'
|
||||
)
|
||||
export_order = fields
|
||||
|
||||
|
||||
class RideResource(resources.ModelResource):
|
||||
"""Import/Export resource for Ride model."""
|
||||
|
||||
park = fields.Field(
|
||||
column_name='park',
|
||||
attribute='park',
|
||||
widget=ForeignKeyWidget(Park, 'name')
|
||||
)
|
||||
manufacturer = fields.Field(
|
||||
column_name='manufacturer',
|
||||
attribute='manufacturer',
|
||||
widget=ForeignKeyWidget(Company, 'name')
|
||||
)
|
||||
model = fields.Field(
|
||||
column_name='model',
|
||||
attribute='model',
|
||||
widget=ForeignKeyWidget(RideModel, 'name')
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = Ride
|
||||
fields = (
|
||||
'id', 'name', 'slug', 'description', 'park', 'ride_category',
|
||||
'ride_type', 'status', 'manufacturer', 'model', 'height',
|
||||
'speed', 'length', 'duration', 'inversions', 'capacity',
|
||||
'opening_date', 'opening_date_precision', 'closing_date',
|
||||
'closing_date_precision', 'image_url', 'created', 'modified'
|
||||
)
|
||||
export_order = fields
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# INLINE ADMIN CLASSES
|
||||
# ============================================================================
|
||||
|
||||
class RideInline(TabularInline):
|
||||
"""Inline for Rides within a Park."""
|
||||
|
||||
model = Ride
|
||||
extra = 0
|
||||
fields = ['name', 'ride_category', 'status', 'manufacturer', 'opening_date']
|
||||
readonly_fields = ['name']
|
||||
show_change_link = True
|
||||
classes = ['collapse']
|
||||
|
||||
def has_add_permission(self, request, obj=None):
|
||||
return False
|
||||
|
||||
|
||||
class CompanyParksInline(TabularInline):
|
||||
"""Inline for Parks operated by a Company."""
|
||||
|
||||
model = Park
|
||||
fk_name = 'operator'
|
||||
extra = 0
|
||||
fields = ['name', 'park_type', 'status', 'ride_count', 'opening_date']
|
||||
readonly_fields = ['name', 'ride_count']
|
||||
show_change_link = True
|
||||
classes = ['collapse']
|
||||
|
||||
def has_add_permission(self, request, obj=None):
|
||||
return False
|
||||
|
||||
|
||||
class RideModelInstallationsInline(TabularInline):
|
||||
"""Inline for Ride installations of a RideModel."""
|
||||
|
||||
model = Ride
|
||||
fk_name = 'model'
|
||||
extra = 0
|
||||
fields = ['name', 'park', 'status', 'opening_date']
|
||||
readonly_fields = ['name', 'park']
|
||||
show_change_link = True
|
||||
classes = ['collapse']
|
||||
|
||||
def has_add_permission(self, request, obj=None):
|
||||
return False
|
||||
|
||||
|
||||
class RideNameHistoryInline(TabularInline):
|
||||
"""Inline for Ride Name History within a Ride."""
|
||||
|
||||
model = RideNameHistory
|
||||
extra = 1
|
||||
fields = ['former_name', 'from_year', 'to_year', 'date_changed', 'reason', 'order_index']
|
||||
classes = ['collapse']
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# MAIN ADMIN CLASSES
|
||||
# ============================================================================
|
||||
|
||||
@admin.register(Company)
|
||||
class CompanyAdmin(ModelAdmin, ImportExportModelAdmin):
|
||||
"""Enhanced admin interface for Company model."""
|
||||
|
||||
resource_class = CompanyResource
|
||||
import_form_class = ImportForm
|
||||
export_form_class = ExportForm
|
||||
|
||||
list_display = [
|
||||
'name_with_icon',
|
||||
'location',
|
||||
'company_types_display',
|
||||
'park_count',
|
||||
'ride_count',
|
||||
'founded_date',
|
||||
'status_indicator',
|
||||
'created'
|
||||
]
|
||||
list_filter = [
|
||||
('company_types', ChoicesDropdownFilter),
|
||||
('founded_date', RangeDateFilter),
|
||||
('closed_date', RangeDateFilter),
|
||||
]
|
||||
search_fields = ['name', 'slug', 'description', 'location']
|
||||
readonly_fields = ['id', 'created', 'modified', 'park_count', 'ride_count', 'slug']
|
||||
prepopulated_fields = {} # Slug is auto-generated via lifecycle hook
|
||||
autocomplete_fields = []
|
||||
inlines = [CompanyParksInline, PhotoInline]
|
||||
|
||||
list_per_page = 50
|
||||
list_max_show_all = 200
|
||||
|
||||
fieldsets = (
|
||||
('Basic Information', {
|
||||
'fields': ('name', 'slug', 'description', 'company_types')
|
||||
}),
|
||||
('Location & Contact', {
|
||||
'fields': ('location', 'website')
|
||||
}),
|
||||
('History', {
|
||||
'fields': (
|
||||
'founded_date', 'founded_date_precision',
|
||||
'closed_date', 'closed_date_precision'
|
||||
)
|
||||
}),
|
||||
('Media', {
|
||||
'fields': ('logo_image_id', 'logo_image_url'),
|
||||
'classes': ['collapse']
|
||||
}),
|
||||
('Statistics', {
|
||||
'fields': ('park_count', 'ride_count'),
|
||||
'classes': ['collapse']
|
||||
}),
|
||||
('System Information', {
|
||||
'fields': ('id', 'created', 'modified'),
|
||||
'classes': ['collapse']
|
||||
}),
|
||||
)
|
||||
|
||||
def name_with_icon(self, obj):
|
||||
"""Display name with company type icon."""
|
||||
icons = {
|
||||
'manufacturer': '🏭',
|
||||
'operator': '🎡',
|
||||
'designer': '✏️',
|
||||
}
|
||||
icon = '🏢' # Default company icon
|
||||
if obj.company_types:
|
||||
for ctype in obj.company_types:
|
||||
if ctype in icons:
|
||||
icon = icons[ctype]
|
||||
break
|
||||
return format_html('{} {}', icon, obj.name)
|
||||
name_with_icon.short_description = 'Company'
|
||||
name_with_icon.admin_order_field = 'name'
|
||||
|
||||
def company_types_display(self, obj):
|
||||
"""Display company types as badges."""
|
||||
if not obj.company_types:
|
||||
return '-'
|
||||
badges = []
|
||||
for ctype in obj.company_types:
|
||||
color = {
|
||||
'manufacturer': 'blue',
|
||||
'operator': 'green',
|
||||
'designer': 'purple',
|
||||
}.get(ctype, 'gray')
|
||||
badges.append(
|
||||
f'<span style="background-color: {color}; color: white; '
|
||||
f'padding: 2px 8px; border-radius: 4px; font-size: 11px; '
|
||||
f'margin-right: 4px;">{ctype.upper()}</span>'
|
||||
)
|
||||
return format_html(' '.join(badges))
|
||||
company_types_display.short_description = 'Types'
|
||||
|
||||
def status_indicator(self, obj):
|
||||
"""Visual status indicator."""
|
||||
if obj.closed_date:
|
||||
return format_html(
|
||||
'<span style="color: red;">●</span> Closed'
|
||||
)
|
||||
return format_html(
|
||||
'<span style="color: green;">●</span> Active'
|
||||
)
|
||||
status_indicator.short_description = 'Status'
|
||||
|
||||
actions = ['export_admin_action']
|
||||
|
||||
|
||||
@admin.register(RideModel)
|
||||
class RideModelAdmin(ModelAdmin, ImportExportModelAdmin):
|
||||
"""Enhanced admin interface for RideModel model."""
|
||||
|
||||
resource_class = RideModelResource
|
||||
import_form_class = ImportForm
|
||||
export_form_class = ExportForm
|
||||
|
||||
list_display = [
|
||||
'name_with_type',
|
||||
'manufacturer',
|
||||
'model_type',
|
||||
'typical_specs',
|
||||
'installation_count',
|
||||
'created'
|
||||
]
|
||||
list_filter = [
|
||||
('model_type', ChoicesDropdownFilter),
|
||||
('manufacturer', RelatedDropdownFilter),
|
||||
('typical_height', RangeNumericFilter),
|
||||
('typical_speed', RangeNumericFilter),
|
||||
]
|
||||
search_fields = ['name', 'slug', 'description', 'manufacturer__name']
|
||||
readonly_fields = ['id', 'created', 'modified', 'installation_count', 'slug']
|
||||
prepopulated_fields = {}
|
||||
autocomplete_fields = ['manufacturer']
|
||||
inlines = [RideModelInstallationsInline, PhotoInline]
|
||||
|
||||
list_per_page = 50
|
||||
|
||||
fieldsets = (
|
||||
('Basic Information', {
|
||||
'fields': ('name', 'slug', 'description', 'manufacturer', 'model_type')
|
||||
}),
|
||||
('Typical Specifications', {
|
||||
'fields': (
|
||||
'typical_height', 'typical_speed', 'typical_capacity'
|
||||
),
|
||||
'description': 'Standard specifications for this ride model'
|
||||
}),
|
||||
('Media', {
|
||||
'fields': ('image_id', 'image_url'),
|
||||
'classes': ['collapse']
|
||||
}),
|
||||
('Statistics', {
|
||||
'fields': ('installation_count',),
|
||||
'classes': ['collapse']
|
||||
}),
|
||||
('System Information', {
|
||||
'fields': ('id', 'created', 'modified'),
|
||||
'classes': ['collapse']
|
||||
}),
|
||||
)
|
||||
|
||||
def name_with_type(self, obj):
|
||||
"""Display name with model type icon."""
|
||||
icons = {
|
||||
'roller_coaster': '🎢',
|
||||
'water_ride': '🌊',
|
||||
'flat_ride': '🎡',
|
||||
'dark_ride': '🎭',
|
||||
'transport': '🚂',
|
||||
}
|
||||
icon = icons.get(obj.model_type, '🎪')
|
||||
return format_html('{} {}', icon, obj.name)
|
||||
name_with_type.short_description = 'Model Name'
|
||||
name_with_type.admin_order_field = 'name'
|
||||
|
||||
def typical_specs(self, obj):
|
||||
"""Display typical specifications."""
|
||||
specs = []
|
||||
if obj.typical_height:
|
||||
specs.append(f'H: {obj.typical_height}m')
|
||||
if obj.typical_speed:
|
||||
specs.append(f'S: {obj.typical_speed}km/h')
|
||||
if obj.typical_capacity:
|
||||
specs.append(f'C: {obj.typical_capacity}')
|
||||
return ' | '.join(specs) if specs else '-'
|
||||
typical_specs.short_description = 'Typical Specs'
|
||||
|
||||
actions = ['export_admin_action']
|
||||
|
||||
|
||||
@admin.register(Park)
|
||||
class ParkAdmin(ModelAdmin, ImportExportModelAdmin):
|
||||
"""Enhanced admin interface for Park model with geographic features."""
|
||||
|
||||
resource_class = ParkResource
|
||||
import_form_class = ImportForm
|
||||
export_form_class = ExportForm
|
||||
|
||||
list_display = [
|
||||
'name_with_icon',
|
||||
'location_display',
|
||||
'park_type',
|
||||
'status_badge',
|
||||
'ride_count',
|
||||
'coaster_count',
|
||||
'opening_date',
|
||||
'operator'
|
||||
]
|
||||
list_filter = [
|
||||
('park_type', ChoicesDropdownFilter),
|
||||
('status', ChoicesDropdownFilter),
|
||||
('operator', RelatedDropdownFilter),
|
||||
('opening_date', RangeDateFilter),
|
||||
('closing_date', RangeDateFilter),
|
||||
]
|
||||
search_fields = ['name', 'slug', 'description', 'location']
|
||||
readonly_fields = [
|
||||
'id', 'created', 'modified', 'ride_count', 'coaster_count',
|
||||
'slug', 'coordinates_display'
|
||||
]
|
||||
prepopulated_fields = {}
|
||||
autocomplete_fields = ['operator']
|
||||
inlines = [RideInline, PhotoInline]
|
||||
|
||||
list_per_page = 50
|
||||
|
||||
# Use GeoDjango admin for PostGIS mode
|
||||
if hasattr(settings, 'DATABASES') and 'postgis' in settings.DATABASES['default'].get('ENGINE', ''):
|
||||
change_form_template = 'gis/admin/change_form.html'
|
||||
|
||||
fieldsets = (
|
||||
('Basic Information', {
|
||||
'fields': ('name', 'slug', 'description', 'park_type', 'status')
|
||||
}),
|
||||
('Geographic Location', {
|
||||
'fields': ('location', 'latitude', 'longitude', 'coordinates_display'),
|
||||
'description': 'Enter latitude and longitude for the park location'
|
||||
}),
|
||||
('Dates', {
|
||||
'fields': (
|
||||
'opening_date', 'opening_date_precision',
|
||||
'closing_date', 'closing_date_precision'
|
||||
)
|
||||
}),
|
||||
('Operator', {
|
||||
'fields': ('operator',)
|
||||
}),
|
||||
('Media & Web', {
|
||||
'fields': (
|
||||
'banner_image_id', 'banner_image_url',
|
||||
'logo_image_id', 'logo_image_url',
|
||||
'website'
|
||||
),
|
||||
'classes': ['collapse']
|
||||
}),
|
||||
('Statistics', {
|
||||
'fields': ('ride_count', 'coaster_count'),
|
||||
'classes': ['collapse']
|
||||
}),
|
||||
('Custom Data', {
|
||||
'fields': ('custom_fields',),
|
||||
'classes': ['collapse'],
|
||||
'description': 'Additional custom data in JSON format'
|
||||
}),
|
||||
('System Information', {
|
||||
'fields': ('id', 'created', 'modified'),
|
||||
'classes': ['collapse']
|
||||
}),
|
||||
)
|
||||
|
||||
def name_with_icon(self, obj):
|
||||
"""Display name with park type icon."""
|
||||
icons = {
|
||||
'theme_park': '🎡',
|
||||
'amusement_park': '🎢',
|
||||
'water_park': '🌊',
|
||||
'indoor_park': '🏢',
|
||||
'fairground': '🎪',
|
||||
}
|
||||
icon = icons.get(obj.park_type, '🎠')
|
||||
return format_html('{} {}', icon, obj.name)
|
||||
name_with_icon.short_description = 'Park Name'
|
||||
name_with_icon.admin_order_field = 'name'
|
||||
|
||||
def location_display(self, obj):
|
||||
"""Display location with coordinates."""
|
||||
if obj.location:
|
||||
coords = obj.coordinates
|
||||
if coords:
|
||||
return format_html(
|
||||
'{}<br><small style="color: gray;">({:.4f}, {:.4f})</small>',
|
||||
obj.location, coords[0], coords[1]
|
||||
)
|
||||
return obj.location
|
||||
return '-'
|
||||
location_display.short_description = 'Location'
|
||||
|
||||
def coordinates_display(self, obj):
|
||||
"""Read-only display of coordinates."""
|
||||
coords = obj.coordinates
|
||||
if coords:
|
||||
return f"Longitude: {coords[0]:.6f}, Latitude: {coords[1]:.6f}"
|
||||
return "No coordinates set"
|
||||
coordinates_display.short_description = 'Current Coordinates'
|
||||
|
||||
def status_badge(self, obj):
|
||||
"""Display status as colored badge."""
|
||||
colors = {
|
||||
'operating': 'green',
|
||||
'closed_temporarily': 'orange',
|
||||
'closed_permanently': 'red',
|
||||
'under_construction': 'blue',
|
||||
'planned': 'purple',
|
||||
}
|
||||
color = colors.get(obj.status, 'gray')
|
||||
return format_html(
|
||||
'<span style="background-color: {}; color: white; '
|
||||
'padding: 3px 10px; border-radius: 12px; font-size: 11px;">'
|
||||
'{}</span>',
|
||||
color, obj.get_status_display()
|
||||
)
|
||||
status_badge.short_description = 'Status'
|
||||
status_badge.admin_order_field = 'status'
|
||||
|
||||
actions = ['export_admin_action', 'activate_parks', 'close_parks']
|
||||
|
||||
def activate_parks(self, request, queryset):
|
||||
"""Bulk action to activate parks."""
|
||||
updated = queryset.update(status='operating')
|
||||
self.message_user(request, f'{updated} park(s) marked as operating.')
|
||||
activate_parks.short_description = 'Mark selected parks as operating'
|
||||
|
||||
def close_parks(self, request, queryset):
|
||||
"""Bulk action to close parks temporarily."""
|
||||
updated = queryset.update(status='closed_temporarily')
|
||||
self.message_user(request, f'{updated} park(s) marked as temporarily closed.')
|
||||
close_parks.short_description = 'Mark selected parks as temporarily closed'
|
||||
|
||||
|
||||
@admin.register(Ride)
|
||||
class RideAdmin(ModelAdmin, ImportExportModelAdmin):
|
||||
"""Enhanced admin interface for Ride model."""
|
||||
|
||||
resource_class = RideResource
|
||||
import_form_class = ImportForm
|
||||
export_form_class = ExportForm
|
||||
|
||||
list_display = [
|
||||
'name_with_icon',
|
||||
'park',
|
||||
'ride_category',
|
||||
'status_badge',
|
||||
'manufacturer',
|
||||
'stats_display',
|
||||
'opening_date',
|
||||
'coaster_badge'
|
||||
]
|
||||
list_filter = [
|
||||
('ride_category', ChoicesDropdownFilter),
|
||||
('status', ChoicesDropdownFilter),
|
||||
('is_coaster', admin.BooleanFieldListFilter),
|
||||
('park', RelatedDropdownFilter),
|
||||
('manufacturer', RelatedDropdownFilter),
|
||||
('opening_date', RangeDateFilter),
|
||||
('height', RangeNumericFilter),
|
||||
('speed', RangeNumericFilter),
|
||||
]
|
||||
search_fields = [
|
||||
'name', 'slug', 'description',
|
||||
'park__name', 'manufacturer__name'
|
||||
]
|
||||
readonly_fields = ['id', 'created', 'modified', 'is_coaster', 'slug']
|
||||
prepopulated_fields = {}
|
||||
autocomplete_fields = ['park', 'manufacturer', 'model']
|
||||
inlines = [RideNameHistoryInline, PhotoInline]
|
||||
|
||||
list_per_page = 50
|
||||
|
||||
fieldsets = (
|
||||
('Basic Information', {
|
||||
'fields': ('name', 'slug', 'description', 'park')
|
||||
}),
|
||||
('Classification', {
|
||||
'fields': ('ride_category', 'ride_type', 'is_coaster', 'status')
|
||||
}),
|
||||
('Dates', {
|
||||
'fields': (
|
||||
'opening_date', 'opening_date_precision',
|
||||
'closing_date', 'closing_date_precision'
|
||||
)
|
||||
}),
|
||||
('Manufacturer & Model', {
|
||||
'fields': ('manufacturer', 'model')
|
||||
}),
|
||||
('Ride Statistics', {
|
||||
'fields': (
|
||||
'height', 'speed', 'length',
|
||||
'duration', 'inversions', 'capacity'
|
||||
),
|
||||
'description': 'Technical specifications and statistics'
|
||||
}),
|
||||
('Media', {
|
||||
'fields': ('image_id', 'image_url'),
|
||||
'classes': ['collapse']
|
||||
}),
|
||||
('Custom Data', {
|
||||
'fields': ('custom_fields',),
|
||||
'classes': ['collapse']
|
||||
}),
|
||||
('System Information', {
|
||||
'fields': ('id', 'created', 'modified'),
|
||||
'classes': ['collapse']
|
||||
}),
|
||||
)
|
||||
|
||||
def name_with_icon(self, obj):
|
||||
"""Display name with category icon."""
|
||||
icons = {
|
||||
'roller_coaster': '🎢',
|
||||
'water_ride': '🌊',
|
||||
'dark_ride': '🎭',
|
||||
'flat_ride': '🎡',
|
||||
'transport': '🚂',
|
||||
'show': '🎪',
|
||||
}
|
||||
icon = icons.get(obj.ride_category, '🎠')
|
||||
return format_html('{} {}', icon, obj.name)
|
||||
name_with_icon.short_description = 'Ride Name'
|
||||
name_with_icon.admin_order_field = 'name'
|
||||
|
||||
def stats_display(self, obj):
|
||||
"""Display key statistics."""
|
||||
stats = []
|
||||
if obj.height:
|
||||
stats.append(f'H: {obj.height}m')
|
||||
if obj.speed:
|
||||
stats.append(f'S: {obj.speed}km/h')
|
||||
if obj.inversions:
|
||||
stats.append(f'🔄 {obj.inversions}')
|
||||
return ' | '.join(stats) if stats else '-'
|
||||
stats_display.short_description = 'Key Stats'
|
||||
|
||||
def coaster_badge(self, obj):
|
||||
"""Display coaster indicator."""
|
||||
if obj.is_coaster:
|
||||
return format_html(
|
||||
'<span style="background-color: #ff6b6b; color: white; '
|
||||
'padding: 2px 8px; border-radius: 10px; font-size: 10px;">'
|
||||
'🎢 COASTER</span>'
|
||||
)
|
||||
return ''
|
||||
coaster_badge.short_description = 'Type'
|
||||
|
||||
def status_badge(self, obj):
|
||||
"""Display status as colored badge."""
|
||||
colors = {
|
||||
'operating': 'green',
|
||||
'closed_temporarily': 'orange',
|
||||
'closed_permanently': 'red',
|
||||
'under_construction': 'blue',
|
||||
'sbno': 'gray',
|
||||
}
|
||||
color = colors.get(obj.status, 'gray')
|
||||
return format_html(
|
||||
'<span style="background-color: {}; color: white; '
|
||||
'padding: 3px 10px; border-radius: 12px; font-size: 11px;">'
|
||||
'{}</span>',
|
||||
color, obj.get_status_display()
|
||||
)
|
||||
status_badge.short_description = 'Status'
|
||||
status_badge.admin_order_field = 'status'
|
||||
|
||||
actions = ['export_admin_action', 'activate_rides', 'close_rides']
|
||||
|
||||
def activate_rides(self, request, queryset):
|
||||
"""Bulk action to activate rides."""
|
||||
updated = queryset.update(status='operating')
|
||||
self.message_user(request, f'{updated} ride(s) marked as operating.')
|
||||
activate_rides.short_description = 'Mark selected rides as operating'
|
||||
|
||||
def close_rides(self, request, queryset):
|
||||
"""Bulk action to close rides temporarily."""
|
||||
updated = queryset.update(status='closed_temporarily')
|
||||
self.message_user(request, f'{updated} ride(s) marked as temporarily closed.')
|
||||
close_rides.short_description = 'Mark selected rides as temporarily closed'
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# DASHBOARD CALLBACK
|
||||
# ============================================================================
|
||||
|
||||
def dashboard_callback(request, context):
|
||||
"""
|
||||
Callback function for Unfold dashboard.
|
||||
Provides statistics and overview data.
|
||||
"""
|
||||
# Entity counts
|
||||
total_parks = Park.objects.count()
|
||||
total_rides = Ride.objects.count()
|
||||
total_companies = Company.objects.count()
|
||||
total_models = RideModel.objects.count()
|
||||
|
||||
# Operating counts
|
||||
operating_parks = Park.objects.filter(status='operating').count()
|
||||
operating_rides = Ride.objects.filter(status='operating').count()
|
||||
|
||||
# Coaster count
|
||||
total_coasters = Ride.objects.filter(is_coaster=True).count()
|
||||
|
||||
# Recent additions (last 30 days)
|
||||
from django.utils import timezone
|
||||
from datetime import timedelta
|
||||
thirty_days_ago = timezone.now() - timedelta(days=30)
|
||||
|
||||
recent_parks = Park.objects.filter(created__gte=thirty_days_ago).count()
|
||||
recent_rides = Ride.objects.filter(created__gte=thirty_days_ago).count()
|
||||
|
||||
# Top manufacturers by ride count
|
||||
top_manufacturers = Company.objects.filter(
|
||||
company_types__contains=['manufacturer']
|
||||
).annotate(
|
||||
ride_count_actual=Count('manufactured_rides')
|
||||
).order_by('-ride_count_actual')[:5]
|
||||
|
||||
# Parks by type
|
||||
parks_by_type = Park.objects.values('park_type').annotate(
|
||||
count=Count('id')
|
||||
).order_by('-count')
|
||||
|
||||
context.update({
|
||||
'total_parks': total_parks,
|
||||
'total_rides': total_rides,
|
||||
'total_companies': total_companies,
|
||||
'total_models': total_models,
|
||||
'operating_parks': operating_parks,
|
||||
'operating_rides': operating_rides,
|
||||
'total_coasters': total_coasters,
|
||||
'recent_parks': recent_parks,
|
||||
'recent_rides': recent_rides,
|
||||
'top_manufacturers': top_manufacturers,
|
||||
'parks_by_type': parks_by_type,
|
||||
})
|
||||
|
||||
return context
|
||||
15
django-backend/apps/entities/apps.py
Normal file
15
django-backend/apps/entities/apps.py
Normal file
@@ -0,0 +1,15 @@
|
||||
"""
|
||||
Entities app configuration.
|
||||
"""
|
||||
|
||||
from django.apps import AppConfig
|
||||
|
||||
|
||||
class EntitiesConfig(AppConfig):
|
||||
default_auto_field = 'django.db.models.BigAutoField'
|
||||
name = 'apps.entities'
|
||||
verbose_name = 'Entities'
|
||||
|
||||
def ready(self):
|
||||
"""Import signal handlers when app is ready."""
|
||||
import apps.entities.signals # noqa
|
||||
418
django-backend/apps/entities/filters.py
Normal file
418
django-backend/apps/entities/filters.py
Normal file
@@ -0,0 +1,418 @@
|
||||
"""
|
||||
Filter classes for advanced entity filtering.
|
||||
|
||||
Provides reusable filter logic for complex queries.
|
||||
"""
|
||||
from typing import Optional, Any, Dict
|
||||
from datetime import date
|
||||
from django.db.models import QuerySet, Q
|
||||
from django.conf import settings
|
||||
|
||||
|
||||
# Check if using PostGIS for location-based filtering
|
||||
_using_postgis = 'postgis' in settings.DATABASES['default']['ENGINE']
|
||||
|
||||
if _using_postgis:
|
||||
from django.contrib.gis.geos import Point
|
||||
from django.contrib.gis.measure import D
|
||||
|
||||
|
||||
class BaseEntityFilter:
|
||||
"""Base filter class with common filtering methods."""
|
||||
|
||||
@staticmethod
|
||||
def filter_by_date_range(
|
||||
queryset: QuerySet,
|
||||
field_name: str,
|
||||
start_date: Optional[date] = None,
|
||||
end_date: Optional[date] = None
|
||||
) -> QuerySet:
|
||||
"""
|
||||
Filter by date range.
|
||||
|
||||
Args:
|
||||
queryset: Base queryset to filter
|
||||
field_name: Name of the date field
|
||||
start_date: Start of date range (inclusive)
|
||||
end_date: End of date range (inclusive)
|
||||
|
||||
Returns:
|
||||
Filtered queryset
|
||||
"""
|
||||
if start_date:
|
||||
queryset = queryset.filter(**{f"{field_name}__gte": start_date})
|
||||
|
||||
if end_date:
|
||||
queryset = queryset.filter(**{f"{field_name}__lte": end_date})
|
||||
|
||||
return queryset
|
||||
|
||||
@staticmethod
|
||||
def filter_by_status(
|
||||
queryset: QuerySet,
|
||||
status: Optional[str] = None,
|
||||
exclude_status: Optional[list] = None
|
||||
) -> QuerySet:
|
||||
"""
|
||||
Filter by status.
|
||||
|
||||
Args:
|
||||
queryset: Base queryset to filter
|
||||
status: Single status to filter by
|
||||
exclude_status: List of statuses to exclude
|
||||
|
||||
Returns:
|
||||
Filtered queryset
|
||||
"""
|
||||
if status:
|
||||
queryset = queryset.filter(status=status)
|
||||
|
||||
if exclude_status:
|
||||
queryset = queryset.exclude(status__in=exclude_status)
|
||||
|
||||
return queryset
|
||||
|
||||
|
||||
class CompanyFilter(BaseEntityFilter):
|
||||
"""Filter class for Company entities."""
|
||||
|
||||
@staticmethod
|
||||
def filter_by_types(
|
||||
queryset: QuerySet,
|
||||
company_types: Optional[list] = None
|
||||
) -> QuerySet:
|
||||
"""
|
||||
Filter companies by type.
|
||||
|
||||
Args:
|
||||
queryset: Base queryset to filter
|
||||
company_types: List of company types to filter by
|
||||
|
||||
Returns:
|
||||
Filtered queryset
|
||||
"""
|
||||
if company_types:
|
||||
# Since company_types is a JSONField containing a list,
|
||||
# we need to check if any of the requested types are in the field
|
||||
q = Q()
|
||||
for company_type in company_types:
|
||||
q |= Q(company_types__contains=[company_type])
|
||||
queryset = queryset.filter(q)
|
||||
|
||||
return queryset
|
||||
|
||||
@staticmethod
|
||||
def apply_filters(
|
||||
queryset: QuerySet,
|
||||
filters: Dict[str, Any]
|
||||
) -> QuerySet:
|
||||
"""
|
||||
Apply all company filters.
|
||||
|
||||
Args:
|
||||
queryset: Base queryset to filter
|
||||
filters: Dictionary of filter parameters
|
||||
|
||||
Returns:
|
||||
Filtered queryset
|
||||
"""
|
||||
# Company types
|
||||
if filters.get('company_types'):
|
||||
queryset = CompanyFilter.filter_by_types(
|
||||
queryset,
|
||||
company_types=filters['company_types']
|
||||
)
|
||||
|
||||
# Founded date range
|
||||
queryset = CompanyFilter.filter_by_date_range(
|
||||
queryset,
|
||||
'founded_date',
|
||||
start_date=filters.get('founded_after'),
|
||||
end_date=filters.get('founded_before')
|
||||
)
|
||||
|
||||
# Closed date range
|
||||
queryset = CompanyFilter.filter_by_date_range(
|
||||
queryset,
|
||||
'closed_date',
|
||||
start_date=filters.get('closed_after'),
|
||||
end_date=filters.get('closed_before')
|
||||
)
|
||||
|
||||
# Location
|
||||
if filters.get('location_id'):
|
||||
queryset = queryset.filter(location_id=filters['location_id'])
|
||||
|
||||
return queryset
|
||||
|
||||
|
||||
class RideModelFilter(BaseEntityFilter):
|
||||
"""Filter class for RideModel entities."""
|
||||
|
||||
@staticmethod
|
||||
def apply_filters(
|
||||
queryset: QuerySet,
|
||||
filters: Dict[str, Any]
|
||||
) -> QuerySet:
|
||||
"""
|
||||
Apply all ride model filters.
|
||||
|
||||
Args:
|
||||
queryset: Base queryset to filter
|
||||
filters: Dictionary of filter parameters
|
||||
|
||||
Returns:
|
||||
Filtered queryset
|
||||
"""
|
||||
# Manufacturer
|
||||
if filters.get('manufacturer_id'):
|
||||
queryset = queryset.filter(manufacturer_id=filters['manufacturer_id'])
|
||||
|
||||
# Model type
|
||||
if filters.get('model_type'):
|
||||
queryset = queryset.filter(model_type=filters['model_type'])
|
||||
|
||||
# Height range
|
||||
if filters.get('min_height'):
|
||||
queryset = queryset.filter(typical_height__gte=filters['min_height'])
|
||||
|
||||
if filters.get('max_height'):
|
||||
queryset = queryset.filter(typical_height__lte=filters['max_height'])
|
||||
|
||||
# Speed range
|
||||
if filters.get('min_speed'):
|
||||
queryset = queryset.filter(typical_speed__gte=filters['min_speed'])
|
||||
|
||||
if filters.get('max_speed'):
|
||||
queryset = queryset.filter(typical_speed__lte=filters['max_speed'])
|
||||
|
||||
return queryset
|
||||
|
||||
|
||||
class ParkFilter(BaseEntityFilter):
|
||||
"""Filter class for Park entities."""
|
||||
|
||||
@staticmethod
|
||||
def filter_by_location(
|
||||
queryset: QuerySet,
|
||||
longitude: float,
|
||||
latitude: float,
|
||||
radius_km: float
|
||||
) -> QuerySet:
|
||||
"""
|
||||
Filter parks by proximity to a location (PostGIS only).
|
||||
|
||||
Args:
|
||||
queryset: Base queryset to filter
|
||||
longitude: Longitude coordinate
|
||||
latitude: Latitude coordinate
|
||||
radius_km: Search radius in kilometers
|
||||
|
||||
Returns:
|
||||
Filtered queryset ordered by distance
|
||||
"""
|
||||
if not _using_postgis:
|
||||
# Fallback: No spatial filtering in SQLite
|
||||
return queryset
|
||||
|
||||
point = Point(longitude, latitude, srid=4326)
|
||||
|
||||
# Filter by distance and annotate with distance
|
||||
queryset = queryset.filter(
|
||||
location_point__distance_lte=(point, D(km=radius_km))
|
||||
)
|
||||
|
||||
# This will be ordered by distance in the search service
|
||||
return queryset
|
||||
|
||||
@staticmethod
|
||||
def apply_filters(
|
||||
queryset: QuerySet,
|
||||
filters: Dict[str, Any]
|
||||
) -> QuerySet:
|
||||
"""
|
||||
Apply all park filters.
|
||||
|
||||
Args:
|
||||
queryset: Base queryset to filter
|
||||
filters: Dictionary of filter parameters
|
||||
|
||||
Returns:
|
||||
Filtered queryset
|
||||
"""
|
||||
# Status
|
||||
queryset = ParkFilter.filter_by_status(
|
||||
queryset,
|
||||
status=filters.get('status'),
|
||||
exclude_status=filters.get('exclude_status')
|
||||
)
|
||||
|
||||
# Park type
|
||||
if filters.get('park_type'):
|
||||
queryset = queryset.filter(park_type=filters['park_type'])
|
||||
|
||||
# Operator
|
||||
if filters.get('operator_id'):
|
||||
queryset = queryset.filter(operator_id=filters['operator_id'])
|
||||
|
||||
# Opening date range
|
||||
queryset = ParkFilter.filter_by_date_range(
|
||||
queryset,
|
||||
'opening_date',
|
||||
start_date=filters.get('opening_after'),
|
||||
end_date=filters.get('opening_before')
|
||||
)
|
||||
|
||||
# Closing date range
|
||||
queryset = ParkFilter.filter_by_date_range(
|
||||
queryset,
|
||||
'closing_date',
|
||||
start_date=filters.get('closing_after'),
|
||||
end_date=filters.get('closing_before')
|
||||
)
|
||||
|
||||
# Location-based filtering (PostGIS only)
|
||||
if _using_postgis and filters.get('location') and filters.get('radius'):
|
||||
longitude, latitude = filters['location']
|
||||
queryset = ParkFilter.filter_by_location(
|
||||
queryset,
|
||||
longitude=longitude,
|
||||
latitude=latitude,
|
||||
radius_km=filters['radius']
|
||||
)
|
||||
|
||||
# Location (locality)
|
||||
if filters.get('location_id'):
|
||||
queryset = queryset.filter(location_id=filters['location_id'])
|
||||
|
||||
# Ride counts
|
||||
if filters.get('min_ride_count'):
|
||||
queryset = queryset.filter(ride_count__gte=filters['min_ride_count'])
|
||||
|
||||
if filters.get('min_coaster_count'):
|
||||
queryset = queryset.filter(coaster_count__gte=filters['min_coaster_count'])
|
||||
|
||||
return queryset
|
||||
|
||||
|
||||
class RideFilter(BaseEntityFilter):
|
||||
"""Filter class for Ride entities."""
|
||||
|
||||
@staticmethod
|
||||
def filter_by_statistics(
|
||||
queryset: QuerySet,
|
||||
filters: Dict[str, Any]
|
||||
) -> QuerySet:
|
||||
"""
|
||||
Filter rides by statistical attributes (height, speed, length, etc.).
|
||||
|
||||
Args:
|
||||
queryset: Base queryset to filter
|
||||
filters: Dictionary of filter parameters
|
||||
|
||||
Returns:
|
||||
Filtered queryset
|
||||
"""
|
||||
# Height range
|
||||
if filters.get('min_height'):
|
||||
queryset = queryset.filter(height__gte=filters['min_height'])
|
||||
|
||||
if filters.get('max_height'):
|
||||
queryset = queryset.filter(height__lte=filters['max_height'])
|
||||
|
||||
# Speed range
|
||||
if filters.get('min_speed'):
|
||||
queryset = queryset.filter(speed__gte=filters['min_speed'])
|
||||
|
||||
if filters.get('max_speed'):
|
||||
queryset = queryset.filter(speed__lte=filters['max_speed'])
|
||||
|
||||
# Length range
|
||||
if filters.get('min_length'):
|
||||
queryset = queryset.filter(length__gte=filters['min_length'])
|
||||
|
||||
if filters.get('max_length'):
|
||||
queryset = queryset.filter(length__lte=filters['max_length'])
|
||||
|
||||
# Duration range
|
||||
if filters.get('min_duration'):
|
||||
queryset = queryset.filter(duration__gte=filters['min_duration'])
|
||||
|
||||
if filters.get('max_duration'):
|
||||
queryset = queryset.filter(duration__lte=filters['max_duration'])
|
||||
|
||||
# Inversions
|
||||
if filters.get('min_inversions') is not None:
|
||||
queryset = queryset.filter(inversions__gte=filters['min_inversions'])
|
||||
|
||||
if filters.get('max_inversions') is not None:
|
||||
queryset = queryset.filter(inversions__lte=filters['max_inversions'])
|
||||
|
||||
return queryset
|
||||
|
||||
@staticmethod
|
||||
def apply_filters(
|
||||
queryset: QuerySet,
|
||||
filters: Dict[str, Any]
|
||||
) -> QuerySet:
|
||||
"""
|
||||
Apply all ride filters.
|
||||
|
||||
Args:
|
||||
queryset: Base queryset to filter
|
||||
filters: Dictionary of filter parameters
|
||||
|
||||
Returns:
|
||||
Filtered queryset
|
||||
"""
|
||||
# Park
|
||||
if filters.get('park_id'):
|
||||
queryset = queryset.filter(park_id=filters['park_id'])
|
||||
|
||||
# Manufacturer
|
||||
if filters.get('manufacturer_id'):
|
||||
queryset = queryset.filter(manufacturer_id=filters['manufacturer_id'])
|
||||
|
||||
# Model
|
||||
if filters.get('model_id'):
|
||||
queryset = queryset.filter(model_id=filters['model_id'])
|
||||
|
||||
# Status
|
||||
queryset = RideFilter.filter_by_status(
|
||||
queryset,
|
||||
status=filters.get('status'),
|
||||
exclude_status=filters.get('exclude_status')
|
||||
)
|
||||
|
||||
# Ride category
|
||||
if filters.get('ride_category'):
|
||||
queryset = queryset.filter(ride_category=filters['ride_category'])
|
||||
|
||||
# Ride type
|
||||
if filters.get('ride_type'):
|
||||
queryset = queryset.filter(ride_type__icontains=filters['ride_type'])
|
||||
|
||||
# Is coaster
|
||||
if filters.get('is_coaster') is not None:
|
||||
queryset = queryset.filter(is_coaster=filters['is_coaster'])
|
||||
|
||||
# Opening date range
|
||||
queryset = RideFilter.filter_by_date_range(
|
||||
queryset,
|
||||
'opening_date',
|
||||
start_date=filters.get('opening_after'),
|
||||
end_date=filters.get('opening_before')
|
||||
)
|
||||
|
||||
# Closing date range
|
||||
queryset = RideFilter.filter_by_date_range(
|
||||
queryset,
|
||||
'closing_date',
|
||||
start_date=filters.get('closing_after'),
|
||||
end_date=filters.get('closing_before')
|
||||
)
|
||||
|
||||
# Statistical filters
|
||||
queryset = RideFilter.filter_by_statistics(queryset, filters)
|
||||
|
||||
return queryset
|
||||
846
django-backend/apps/entities/migrations/0001_initial.py
Normal file
846
django-backend/apps/entities/migrations/0001_initial.py
Normal file
@@ -0,0 +1,846 @@
|
||||
# Generated by Django 4.2.8 on 2025-11-08 16:41
|
||||
|
||||
import dirtyfields.dirtyfields
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
import django.utils.timezone
|
||||
import django_lifecycle.mixins
|
||||
import model_utils.fields
|
||||
import uuid
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
initial = True
|
||||
|
||||
dependencies = [
|
||||
("core", "0001_initial"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name="Company",
|
||||
fields=[
|
||||
(
|
||||
"created",
|
||||
model_utils.fields.AutoCreatedField(
|
||||
default=django.utils.timezone.now,
|
||||
editable=False,
|
||||
verbose_name="created",
|
||||
),
|
||||
),
|
||||
(
|
||||
"modified",
|
||||
model_utils.fields.AutoLastModifiedField(
|
||||
default=django.utils.timezone.now,
|
||||
editable=False,
|
||||
verbose_name="modified",
|
||||
),
|
||||
),
|
||||
(
|
||||
"id",
|
||||
models.UUIDField(
|
||||
default=uuid.uuid4,
|
||||
editable=False,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
),
|
||||
),
|
||||
(
|
||||
"name",
|
||||
models.CharField(
|
||||
db_index=True,
|
||||
help_text="Official company name",
|
||||
max_length=255,
|
||||
unique=True,
|
||||
),
|
||||
),
|
||||
(
|
||||
"slug",
|
||||
models.SlugField(
|
||||
help_text="URL-friendly identifier", max_length=255, unique=True
|
||||
),
|
||||
),
|
||||
(
|
||||
"description",
|
||||
models.TextField(
|
||||
blank=True, help_text="Company description and history"
|
||||
),
|
||||
),
|
||||
(
|
||||
"company_types",
|
||||
models.JSONField(
|
||||
default=list,
|
||||
help_text="List of company types (manufacturer, operator, etc.)",
|
||||
),
|
||||
),
|
||||
(
|
||||
"founded_date",
|
||||
models.DateField(
|
||||
blank=True, help_text="Company founding date", null=True
|
||||
),
|
||||
),
|
||||
(
|
||||
"founded_date_precision",
|
||||
models.CharField(
|
||||
choices=[("year", "Year"), ("month", "Month"), ("day", "Day")],
|
||||
default="day",
|
||||
help_text="Precision of founded date",
|
||||
max_length=20,
|
||||
),
|
||||
),
|
||||
(
|
||||
"closed_date",
|
||||
models.DateField(
|
||||
blank=True,
|
||||
help_text="Company closure date (if applicable)",
|
||||
null=True,
|
||||
),
|
||||
),
|
||||
(
|
||||
"closed_date_precision",
|
||||
models.CharField(
|
||||
choices=[("year", "Year"), ("month", "Month"), ("day", "Day")],
|
||||
default="day",
|
||||
help_text="Precision of closed date",
|
||||
max_length=20,
|
||||
),
|
||||
),
|
||||
(
|
||||
"website",
|
||||
models.URLField(blank=True, help_text="Official company website"),
|
||||
),
|
||||
(
|
||||
"logo_image_id",
|
||||
models.CharField(
|
||||
blank=True,
|
||||
help_text="CloudFlare image ID for company logo",
|
||||
max_length=255,
|
||||
),
|
||||
),
|
||||
(
|
||||
"logo_image_url",
|
||||
models.URLField(
|
||||
blank=True, help_text="CloudFlare image URL for company logo"
|
||||
),
|
||||
),
|
||||
(
|
||||
"park_count",
|
||||
models.IntegerField(
|
||||
default=0, help_text="Number of parks operated (for operators)"
|
||||
),
|
||||
),
|
||||
(
|
||||
"ride_count",
|
||||
models.IntegerField(
|
||||
default=0,
|
||||
help_text="Number of rides manufactured (for manufacturers)",
|
||||
),
|
||||
),
|
||||
(
|
||||
"location",
|
||||
models.ForeignKey(
|
||||
blank=True,
|
||||
help_text="Company headquarters location",
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
related_name="companies",
|
||||
to="core.locality",
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"verbose_name": "Company",
|
||||
"verbose_name_plural": "Companies",
|
||||
"ordering": ["name"],
|
||||
},
|
||||
bases=(
|
||||
dirtyfields.dirtyfields.DirtyFieldsMixin,
|
||||
django_lifecycle.mixins.LifecycleModelMixin,
|
||||
models.Model,
|
||||
),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="Park",
|
||||
fields=[
|
||||
(
|
||||
"created",
|
||||
model_utils.fields.AutoCreatedField(
|
||||
default=django.utils.timezone.now,
|
||||
editable=False,
|
||||
verbose_name="created",
|
||||
),
|
||||
),
|
||||
(
|
||||
"modified",
|
||||
model_utils.fields.AutoLastModifiedField(
|
||||
default=django.utils.timezone.now,
|
||||
editable=False,
|
||||
verbose_name="modified",
|
||||
),
|
||||
),
|
||||
(
|
||||
"id",
|
||||
models.UUIDField(
|
||||
default=uuid.uuid4,
|
||||
editable=False,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
),
|
||||
),
|
||||
(
|
||||
"name",
|
||||
models.CharField(
|
||||
db_index=True, help_text="Official park name", max_length=255
|
||||
),
|
||||
),
|
||||
(
|
||||
"slug",
|
||||
models.SlugField(
|
||||
help_text="URL-friendly identifier", max_length=255, unique=True
|
||||
),
|
||||
),
|
||||
(
|
||||
"description",
|
||||
models.TextField(
|
||||
blank=True, help_text="Park description and history"
|
||||
),
|
||||
),
|
||||
(
|
||||
"park_type",
|
||||
models.CharField(
|
||||
choices=[
|
||||
("theme_park", "Theme Park"),
|
||||
("amusement_park", "Amusement Park"),
|
||||
("water_park", "Water Park"),
|
||||
(
|
||||
"family_entertainment_center",
|
||||
"Family Entertainment Center",
|
||||
),
|
||||
("traveling_park", "Traveling Park"),
|
||||
("zoo", "Zoo"),
|
||||
("aquarium", "Aquarium"),
|
||||
],
|
||||
db_index=True,
|
||||
help_text="Type of park",
|
||||
max_length=50,
|
||||
),
|
||||
),
|
||||
(
|
||||
"status",
|
||||
models.CharField(
|
||||
choices=[
|
||||
("operating", "Operating"),
|
||||
("closed", "Closed"),
|
||||
("sbno", "Standing But Not Operating"),
|
||||
("under_construction", "Under Construction"),
|
||||
("planned", "Planned"),
|
||||
],
|
||||
db_index=True,
|
||||
default="operating",
|
||||
help_text="Current operational status",
|
||||
max_length=50,
|
||||
),
|
||||
),
|
||||
(
|
||||
"opening_date",
|
||||
models.DateField(
|
||||
blank=True,
|
||||
db_index=True,
|
||||
help_text="Park opening date",
|
||||
null=True,
|
||||
),
|
||||
),
|
||||
(
|
||||
"opening_date_precision",
|
||||
models.CharField(
|
||||
choices=[("year", "Year"), ("month", "Month"), ("day", "Day")],
|
||||
default="day",
|
||||
help_text="Precision of opening date",
|
||||
max_length=20,
|
||||
),
|
||||
),
|
||||
(
|
||||
"closing_date",
|
||||
models.DateField(
|
||||
blank=True, help_text="Park closing date (if closed)", null=True
|
||||
),
|
||||
),
|
||||
(
|
||||
"closing_date_precision",
|
||||
models.CharField(
|
||||
choices=[("year", "Year"), ("month", "Month"), ("day", "Day")],
|
||||
default="day",
|
||||
help_text="Precision of closing date",
|
||||
max_length=20,
|
||||
),
|
||||
),
|
||||
(
|
||||
"latitude",
|
||||
models.DecimalField(
|
||||
blank=True,
|
||||
decimal_places=7,
|
||||
help_text="Latitude coordinate",
|
||||
max_digits=10,
|
||||
null=True,
|
||||
),
|
||||
),
|
||||
(
|
||||
"longitude",
|
||||
models.DecimalField(
|
||||
blank=True,
|
||||
decimal_places=7,
|
||||
help_text="Longitude coordinate",
|
||||
max_digits=10,
|
||||
null=True,
|
||||
),
|
||||
),
|
||||
(
|
||||
"website",
|
||||
models.URLField(blank=True, help_text="Official park website"),
|
||||
),
|
||||
(
|
||||
"banner_image_id",
|
||||
models.CharField(
|
||||
blank=True,
|
||||
help_text="CloudFlare image ID for park banner",
|
||||
max_length=255,
|
||||
),
|
||||
),
|
||||
(
|
||||
"banner_image_url",
|
||||
models.URLField(
|
||||
blank=True, help_text="CloudFlare image URL for park banner"
|
||||
),
|
||||
),
|
||||
(
|
||||
"logo_image_id",
|
||||
models.CharField(
|
||||
blank=True,
|
||||
help_text="CloudFlare image ID for park logo",
|
||||
max_length=255,
|
||||
),
|
||||
),
|
||||
(
|
||||
"logo_image_url",
|
||||
models.URLField(
|
||||
blank=True, help_text="CloudFlare image URL for park logo"
|
||||
),
|
||||
),
|
||||
(
|
||||
"ride_count",
|
||||
models.IntegerField(default=0, help_text="Total number of rides"),
|
||||
),
|
||||
(
|
||||
"coaster_count",
|
||||
models.IntegerField(
|
||||
default=0, help_text="Number of roller coasters"
|
||||
),
|
||||
),
|
||||
(
|
||||
"custom_fields",
|
||||
models.JSONField(
|
||||
blank=True,
|
||||
default=dict,
|
||||
help_text="Additional park-specific data",
|
||||
),
|
||||
),
|
||||
(
|
||||
"location",
|
||||
models.ForeignKey(
|
||||
blank=True,
|
||||
help_text="Park location",
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
related_name="parks",
|
||||
to="core.locality",
|
||||
),
|
||||
),
|
||||
(
|
||||
"operator",
|
||||
models.ForeignKey(
|
||||
blank=True,
|
||||
help_text="Current park operator",
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
related_name="operated_parks",
|
||||
to="entities.company",
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"verbose_name": "Park",
|
||||
"verbose_name_plural": "Parks",
|
||||
"ordering": ["name"],
|
||||
},
|
||||
bases=(
|
||||
dirtyfields.dirtyfields.DirtyFieldsMixin,
|
||||
django_lifecycle.mixins.LifecycleModelMixin,
|
||||
models.Model,
|
||||
),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="RideModel",
|
||||
fields=[
|
||||
(
|
||||
"created",
|
||||
model_utils.fields.AutoCreatedField(
|
||||
default=django.utils.timezone.now,
|
||||
editable=False,
|
||||
verbose_name="created",
|
||||
),
|
||||
),
|
||||
(
|
||||
"modified",
|
||||
model_utils.fields.AutoLastModifiedField(
|
||||
default=django.utils.timezone.now,
|
||||
editable=False,
|
||||
verbose_name="modified",
|
||||
),
|
||||
),
|
||||
(
|
||||
"id",
|
||||
models.UUIDField(
|
||||
default=uuid.uuid4,
|
||||
editable=False,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
),
|
||||
),
|
||||
(
|
||||
"name",
|
||||
models.CharField(
|
||||
db_index=True,
|
||||
help_text="Model name (e.g., 'Inverted Coaster', 'Boomerang')",
|
||||
max_length=255,
|
||||
),
|
||||
),
|
||||
(
|
||||
"slug",
|
||||
models.SlugField(
|
||||
help_text="URL-friendly identifier", max_length=255, unique=True
|
||||
),
|
||||
),
|
||||
(
|
||||
"description",
|
||||
models.TextField(
|
||||
blank=True, help_text="Model description and technical details"
|
||||
),
|
||||
),
|
||||
(
|
||||
"model_type",
|
||||
models.CharField(
|
||||
choices=[
|
||||
("coaster_model", "Roller Coaster Model"),
|
||||
("flat_ride_model", "Flat Ride Model"),
|
||||
("water_ride_model", "Water Ride Model"),
|
||||
("dark_ride_model", "Dark Ride Model"),
|
||||
("transport_ride_model", "Transport Ride Model"),
|
||||
],
|
||||
db_index=True,
|
||||
help_text="Type of ride model",
|
||||
max_length=50,
|
||||
),
|
||||
),
|
||||
(
|
||||
"typical_height",
|
||||
models.DecimalField(
|
||||
blank=True,
|
||||
decimal_places=1,
|
||||
help_text="Typical height in feet",
|
||||
max_digits=6,
|
||||
null=True,
|
||||
),
|
||||
),
|
||||
(
|
||||
"typical_speed",
|
||||
models.DecimalField(
|
||||
blank=True,
|
||||
decimal_places=1,
|
||||
help_text="Typical speed in mph",
|
||||
max_digits=6,
|
||||
null=True,
|
||||
),
|
||||
),
|
||||
(
|
||||
"typical_capacity",
|
||||
models.IntegerField(
|
||||
blank=True, help_text="Typical hourly capacity", null=True
|
||||
),
|
||||
),
|
||||
(
|
||||
"image_id",
|
||||
models.CharField(
|
||||
blank=True, help_text="CloudFlare image ID", max_length=255
|
||||
),
|
||||
),
|
||||
(
|
||||
"image_url",
|
||||
models.URLField(blank=True, help_text="CloudFlare image URL"),
|
||||
),
|
||||
(
|
||||
"installation_count",
|
||||
models.IntegerField(
|
||||
default=0, help_text="Number of installations worldwide"
|
||||
),
|
||||
),
|
||||
(
|
||||
"manufacturer",
|
||||
models.ForeignKey(
|
||||
help_text="Manufacturer of this ride model",
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
related_name="ride_models",
|
||||
to="entities.company",
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"verbose_name": "Ride Model",
|
||||
"verbose_name_plural": "Ride Models",
|
||||
"ordering": ["manufacturer__name", "name"],
|
||||
},
|
||||
bases=(
|
||||
dirtyfields.dirtyfields.DirtyFieldsMixin,
|
||||
django_lifecycle.mixins.LifecycleModelMixin,
|
||||
models.Model,
|
||||
),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="Ride",
|
||||
fields=[
|
||||
(
|
||||
"created",
|
||||
model_utils.fields.AutoCreatedField(
|
||||
default=django.utils.timezone.now,
|
||||
editable=False,
|
||||
verbose_name="created",
|
||||
),
|
||||
),
|
||||
(
|
||||
"modified",
|
||||
model_utils.fields.AutoLastModifiedField(
|
||||
default=django.utils.timezone.now,
|
||||
editable=False,
|
||||
verbose_name="modified",
|
||||
),
|
||||
),
|
||||
(
|
||||
"id",
|
||||
models.UUIDField(
|
||||
default=uuid.uuid4,
|
||||
editable=False,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
),
|
||||
),
|
||||
(
|
||||
"name",
|
||||
models.CharField(
|
||||
db_index=True, help_text="Ride name", max_length=255
|
||||
),
|
||||
),
|
||||
(
|
||||
"slug",
|
||||
models.SlugField(
|
||||
help_text="URL-friendly identifier", max_length=255, unique=True
|
||||
),
|
||||
),
|
||||
(
|
||||
"description",
|
||||
models.TextField(
|
||||
blank=True, help_text="Ride description and history"
|
||||
),
|
||||
),
|
||||
(
|
||||
"ride_category",
|
||||
models.CharField(
|
||||
choices=[
|
||||
("roller_coaster", "Roller Coaster"),
|
||||
("flat_ride", "Flat Ride"),
|
||||
("water_ride", "Water Ride"),
|
||||
("dark_ride", "Dark Ride"),
|
||||
("transport_ride", "Transport Ride"),
|
||||
("other", "Other"),
|
||||
],
|
||||
db_index=True,
|
||||
help_text="Broad ride category",
|
||||
max_length=50,
|
||||
),
|
||||
),
|
||||
(
|
||||
"ride_type",
|
||||
models.CharField(
|
||||
blank=True,
|
||||
db_index=True,
|
||||
help_text="Specific ride type (e.g., 'Inverted Coaster', 'Drop Tower')",
|
||||
max_length=100,
|
||||
),
|
||||
),
|
||||
(
|
||||
"is_coaster",
|
||||
models.BooleanField(
|
||||
db_index=True,
|
||||
default=False,
|
||||
help_text="Is this ride a roller coaster?",
|
||||
),
|
||||
),
|
||||
(
|
||||
"status",
|
||||
models.CharField(
|
||||
choices=[
|
||||
("operating", "Operating"),
|
||||
("closed", "Closed"),
|
||||
("sbno", "Standing But Not Operating"),
|
||||
("relocated", "Relocated"),
|
||||
("under_construction", "Under Construction"),
|
||||
("planned", "Planned"),
|
||||
],
|
||||
db_index=True,
|
||||
default="operating",
|
||||
help_text="Current operational status",
|
||||
max_length=50,
|
||||
),
|
||||
),
|
||||
(
|
||||
"opening_date",
|
||||
models.DateField(
|
||||
blank=True,
|
||||
db_index=True,
|
||||
help_text="Ride opening date",
|
||||
null=True,
|
||||
),
|
||||
),
|
||||
(
|
||||
"opening_date_precision",
|
||||
models.CharField(
|
||||
choices=[("year", "Year"), ("month", "Month"), ("day", "Day")],
|
||||
default="day",
|
||||
help_text="Precision of opening date",
|
||||
max_length=20,
|
||||
),
|
||||
),
|
||||
(
|
||||
"closing_date",
|
||||
models.DateField(
|
||||
blank=True, help_text="Ride closing date (if closed)", null=True
|
||||
),
|
||||
),
|
||||
(
|
||||
"closing_date_precision",
|
||||
models.CharField(
|
||||
choices=[("year", "Year"), ("month", "Month"), ("day", "Day")],
|
||||
default="day",
|
||||
help_text="Precision of closing date",
|
||||
max_length=20,
|
||||
),
|
||||
),
|
||||
(
|
||||
"height",
|
||||
models.DecimalField(
|
||||
blank=True,
|
||||
decimal_places=1,
|
||||
help_text="Height in feet",
|
||||
max_digits=6,
|
||||
null=True,
|
||||
),
|
||||
),
|
||||
(
|
||||
"speed",
|
||||
models.DecimalField(
|
||||
blank=True,
|
||||
decimal_places=1,
|
||||
help_text="Top speed in mph",
|
||||
max_digits=6,
|
||||
null=True,
|
||||
),
|
||||
),
|
||||
(
|
||||
"length",
|
||||
models.DecimalField(
|
||||
blank=True,
|
||||
decimal_places=1,
|
||||
help_text="Track/ride length in feet",
|
||||
max_digits=8,
|
||||
null=True,
|
||||
),
|
||||
),
|
||||
(
|
||||
"duration",
|
||||
models.IntegerField(
|
||||
blank=True, help_text="Ride duration in seconds", null=True
|
||||
),
|
||||
),
|
||||
(
|
||||
"inversions",
|
||||
models.IntegerField(
|
||||
blank=True,
|
||||
help_text="Number of inversions (for coasters)",
|
||||
null=True,
|
||||
),
|
||||
),
|
||||
(
|
||||
"capacity",
|
||||
models.IntegerField(
|
||||
blank=True,
|
||||
help_text="Hourly capacity (riders per hour)",
|
||||
null=True,
|
||||
),
|
||||
),
|
||||
(
|
||||
"image_id",
|
||||
models.CharField(
|
||||
blank=True,
|
||||
help_text="CloudFlare image ID for main photo",
|
||||
max_length=255,
|
||||
),
|
||||
),
|
||||
(
|
||||
"image_url",
|
||||
models.URLField(
|
||||
blank=True, help_text="CloudFlare image URL for main photo"
|
||||
),
|
||||
),
|
||||
(
|
||||
"custom_fields",
|
||||
models.JSONField(
|
||||
blank=True,
|
||||
default=dict,
|
||||
help_text="Additional ride-specific data",
|
||||
),
|
||||
),
|
||||
(
|
||||
"manufacturer",
|
||||
models.ForeignKey(
|
||||
blank=True,
|
||||
help_text="Ride manufacturer",
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
related_name="manufactured_rides",
|
||||
to="entities.company",
|
||||
),
|
||||
),
|
||||
(
|
||||
"model",
|
||||
models.ForeignKey(
|
||||
blank=True,
|
||||
help_text="Specific ride model",
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
related_name="rides",
|
||||
to="entities.ridemodel",
|
||||
),
|
||||
),
|
||||
(
|
||||
"park",
|
||||
models.ForeignKey(
|
||||
help_text="Park where ride is located",
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
related_name="rides",
|
||||
to="entities.park",
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"verbose_name": "Ride",
|
||||
"verbose_name_plural": "Rides",
|
||||
"ordering": ["park__name", "name"],
|
||||
},
|
||||
bases=(
|
||||
dirtyfields.dirtyfields.DirtyFieldsMixin,
|
||||
django_lifecycle.mixins.LifecycleModelMixin,
|
||||
models.Model,
|
||||
),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="ridemodel",
|
||||
index=models.Index(
|
||||
fields=["manufacturer", "name"], name="entities_ri_manufac_1fe3c1_idx"
|
||||
),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="ridemodel",
|
||||
index=models.Index(
|
||||
fields=["model_type"], name="entities_ri_model_t_610d23_idx"
|
||||
),
|
||||
),
|
||||
migrations.AlterUniqueTogether(
|
||||
name="ridemodel",
|
||||
unique_together={("manufacturer", "name")},
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="ride",
|
||||
index=models.Index(
|
||||
fields=["park", "name"], name="entities_ri_park_id_e73e3b_idx"
|
||||
),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="ride",
|
||||
index=models.Index(fields=["slug"], name="entities_ri_slug_d2d6bb_idx"),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="ride",
|
||||
index=models.Index(fields=["status"], name="entities_ri_status_b69114_idx"),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="ride",
|
||||
index=models.Index(
|
||||
fields=["is_coaster"], name="entities_ri_is_coas_912a4d_idx"
|
||||
),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="ride",
|
||||
index=models.Index(
|
||||
fields=["ride_category"], name="entities_ri_ride_ca_bc4554_idx"
|
||||
),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="ride",
|
||||
index=models.Index(
|
||||
fields=["opening_date"], name="entities_ri_opening_c4fc53_idx"
|
||||
),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="ride",
|
||||
index=models.Index(
|
||||
fields=["manufacturer"], name="entities_ri_manufac_0d9a25_idx"
|
||||
),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="park",
|
||||
index=models.Index(fields=["name"], name="entities_pa_name_f8a746_idx"),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="park",
|
||||
index=models.Index(fields=["slug"], name="entities_pa_slug_a21c73_idx"),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="park",
|
||||
index=models.Index(fields=["status"], name="entities_pa_status_805296_idx"),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="park",
|
||||
index=models.Index(
|
||||
fields=["park_type"], name="entities_pa_park_ty_8eba41_idx"
|
||||
),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="park",
|
||||
index=models.Index(
|
||||
fields=["opening_date"], name="entities_pa_opening_102a60_idx"
|
||||
),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="park",
|
||||
index=models.Index(
|
||||
fields=["location"], name="entities_pa_locatio_20a884_idx"
|
||||
),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="company",
|
||||
index=models.Index(fields=["name"], name="entities_co_name_d061e8_idx"),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="company",
|
||||
index=models.Index(fields=["slug"], name="entities_co_slug_00ae5c_idx"),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,35 @@
|
||||
# Generated by Django 4.2.8 on 2025-11-08 17:03
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("entities", "0001_initial"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="park",
|
||||
name="latitude",
|
||||
field=models.DecimalField(
|
||||
blank=True,
|
||||
decimal_places=7,
|
||||
help_text="Latitude coordinate. Primary in local dev, use location_point in production.",
|
||||
max_digits=10,
|
||||
null=True,
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="park",
|
||||
name="longitude",
|
||||
field=models.DecimalField(
|
||||
blank=True,
|
||||
decimal_places=7,
|
||||
help_text="Longitude coordinate. Primary in local dev, use location_point in production.",
|
||||
max_digits=10,
|
||||
null=True,
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,141 @@
|
||||
# Generated migration for Phase 2 - GIN Index Optimization
|
||||
from django.db import migrations, connection
|
||||
from django.contrib.postgres.indexes import GinIndex
|
||||
from django.contrib.postgres.search import SearchVector
|
||||
|
||||
|
||||
def is_postgresql():
|
||||
"""Check if the database backend is PostgreSQL/PostGIS."""
|
||||
return 'postgis' in connection.vendor or 'postgresql' in connection.vendor
|
||||
|
||||
|
||||
def populate_search_vectors(apps, schema_editor):
|
||||
"""Populate search_vector fields for all existing records."""
|
||||
if not is_postgresql():
|
||||
return
|
||||
|
||||
# Get models
|
||||
Company = apps.get_model('entities', 'Company')
|
||||
RideModel = apps.get_model('entities', 'RideModel')
|
||||
Park = apps.get_model('entities', 'Park')
|
||||
Ride = apps.get_model('entities', 'Ride')
|
||||
|
||||
# Update Company search vectors
|
||||
Company.objects.update(
|
||||
search_vector=(
|
||||
SearchVector('name', weight='A') +
|
||||
SearchVector('description', weight='B')
|
||||
)
|
||||
)
|
||||
|
||||
# Update RideModel search vectors
|
||||
RideModel.objects.update(
|
||||
search_vector=(
|
||||
SearchVector('name', weight='A') +
|
||||
SearchVector('manufacturer__name', weight='A') +
|
||||
SearchVector('description', weight='B')
|
||||
)
|
||||
)
|
||||
|
||||
# Update Park search vectors
|
||||
Park.objects.update(
|
||||
search_vector=(
|
||||
SearchVector('name', weight='A') +
|
||||
SearchVector('description', weight='B')
|
||||
)
|
||||
)
|
||||
|
||||
# Update Ride search vectors
|
||||
Ride.objects.update(
|
||||
search_vector=(
|
||||
SearchVector('name', weight='A') +
|
||||
SearchVector('park__name', weight='A') +
|
||||
SearchVector('manufacturer__name', weight='B') +
|
||||
SearchVector('description', weight='B')
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def reverse_search_vectors(apps, schema_editor):
|
||||
"""Clear search_vector fields for all records."""
|
||||
if not is_postgresql():
|
||||
return
|
||||
|
||||
# Get models
|
||||
Company = apps.get_model('entities', 'Company')
|
||||
RideModel = apps.get_model('entities', 'RideModel')
|
||||
Park = apps.get_model('entities', 'Park')
|
||||
Ride = apps.get_model('entities', 'Ride')
|
||||
|
||||
# Clear all search vectors
|
||||
Company.objects.update(search_vector=None)
|
||||
RideModel.objects.update(search_vector=None)
|
||||
Park.objects.update(search_vector=None)
|
||||
Ride.objects.update(search_vector=None)
|
||||
|
||||
|
||||
def add_gin_indexes(apps, schema_editor):
|
||||
"""Add GIN indexes on search_vector fields (PostgreSQL only)."""
|
||||
if not is_postgresql():
|
||||
return
|
||||
|
||||
# Use raw SQL to add GIN indexes
|
||||
with schema_editor.connection.cursor() as cursor:
|
||||
cursor.execute("""
|
||||
CREATE INDEX IF NOT EXISTS entities_company_search_idx
|
||||
ON entities_company USING gin(search_vector);
|
||||
""")
|
||||
cursor.execute("""
|
||||
CREATE INDEX IF NOT EXISTS entities_ridemodel_search_idx
|
||||
ON entities_ridemodel USING gin(search_vector);
|
||||
""")
|
||||
cursor.execute("""
|
||||
CREATE INDEX IF NOT EXISTS entities_park_search_idx
|
||||
ON entities_park USING gin(search_vector);
|
||||
""")
|
||||
cursor.execute("""
|
||||
CREATE INDEX IF NOT EXISTS entities_ride_search_idx
|
||||
ON entities_ride USING gin(search_vector);
|
||||
""")
|
||||
|
||||
|
||||
def remove_gin_indexes(apps, schema_editor):
|
||||
"""Remove GIN indexes (PostgreSQL only)."""
|
||||
if not is_postgresql():
|
||||
return
|
||||
|
||||
# Use raw SQL to drop GIN indexes
|
||||
with schema_editor.connection.cursor() as cursor:
|
||||
cursor.execute("DROP INDEX IF EXISTS entities_company_search_idx;")
|
||||
cursor.execute("DROP INDEX IF EXISTS entities_ridemodel_search_idx;")
|
||||
cursor.execute("DROP INDEX IF EXISTS entities_park_search_idx;")
|
||||
cursor.execute("DROP INDEX IF EXISTS entities_ride_search_idx;")
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
"""
|
||||
Phase 2 Migration: Add GIN indexes for search optimization.
|
||||
|
||||
This migration:
|
||||
1. Adds GIN indexes on search_vector fields for optimal full-text search
|
||||
2. Populates search vectors for all existing database records
|
||||
3. Is PostgreSQL-specific and safe for SQLite environments
|
||||
"""
|
||||
|
||||
dependencies = [
|
||||
('entities', '0002_alter_park_latitude_alter_park_longitude'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
# First, populate search vectors for existing records
|
||||
migrations.RunPython(
|
||||
populate_search_vectors,
|
||||
reverse_search_vectors,
|
||||
),
|
||||
|
||||
# Add GIN indexes for each model's search_vector field
|
||||
migrations.RunPython(
|
||||
add_gin_indexes,
|
||||
remove_gin_indexes,
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,936 @@
|
||||
# Generated by Django 4.2.8 on 2025-11-08 21:37
|
||||
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
import django.utils.timezone
|
||||
import model_utils.fields
|
||||
import pgtrigger.compiler
|
||||
import pgtrigger.migrations
|
||||
import uuid
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("core", "0001_initial"),
|
||||
("pghistory", "0006_delete_aggregateevent"),
|
||||
("entities", "0003_add_search_vector_gin_indexes"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name="CompanyEvent",
|
||||
fields=[
|
||||
("pgh_id", models.AutoField(primary_key=True, serialize=False)),
|
||||
("pgh_created_at", models.DateTimeField(auto_now_add=True)),
|
||||
("pgh_label", models.TextField(help_text="The event label.")),
|
||||
(
|
||||
"created",
|
||||
model_utils.fields.AutoCreatedField(
|
||||
default=django.utils.timezone.now,
|
||||
editable=False,
|
||||
verbose_name="created",
|
||||
),
|
||||
),
|
||||
(
|
||||
"modified",
|
||||
model_utils.fields.AutoLastModifiedField(
|
||||
default=django.utils.timezone.now,
|
||||
editable=False,
|
||||
verbose_name="modified",
|
||||
),
|
||||
),
|
||||
(
|
||||
"id",
|
||||
models.UUIDField(
|
||||
default=uuid.uuid4, editable=False, serialize=False
|
||||
),
|
||||
),
|
||||
(
|
||||
"name",
|
||||
models.CharField(help_text="Official company name", max_length=255),
|
||||
),
|
||||
(
|
||||
"slug",
|
||||
models.SlugField(
|
||||
db_index=False,
|
||||
help_text="URL-friendly identifier",
|
||||
max_length=255,
|
||||
),
|
||||
),
|
||||
(
|
||||
"description",
|
||||
models.TextField(
|
||||
blank=True, help_text="Company description and history"
|
||||
),
|
||||
),
|
||||
(
|
||||
"company_types",
|
||||
models.JSONField(
|
||||
default=list,
|
||||
help_text="List of company types (manufacturer, operator, etc.)",
|
||||
),
|
||||
),
|
||||
(
|
||||
"founded_date",
|
||||
models.DateField(
|
||||
blank=True, help_text="Company founding date", null=True
|
||||
),
|
||||
),
|
||||
(
|
||||
"founded_date_precision",
|
||||
models.CharField(
|
||||
choices=[("year", "Year"), ("month", "Month"), ("day", "Day")],
|
||||
default="day",
|
||||
help_text="Precision of founded date",
|
||||
max_length=20,
|
||||
),
|
||||
),
|
||||
(
|
||||
"closed_date",
|
||||
models.DateField(
|
||||
blank=True,
|
||||
help_text="Company closure date (if applicable)",
|
||||
null=True,
|
||||
),
|
||||
),
|
||||
(
|
||||
"closed_date_precision",
|
||||
models.CharField(
|
||||
choices=[("year", "Year"), ("month", "Month"), ("day", "Day")],
|
||||
default="day",
|
||||
help_text="Precision of closed date",
|
||||
max_length=20,
|
||||
),
|
||||
),
|
||||
(
|
||||
"website",
|
||||
models.URLField(blank=True, help_text="Official company website"),
|
||||
),
|
||||
(
|
||||
"logo_image_id",
|
||||
models.CharField(
|
||||
blank=True,
|
||||
help_text="CloudFlare image ID for company logo",
|
||||
max_length=255,
|
||||
),
|
||||
),
|
||||
(
|
||||
"logo_image_url",
|
||||
models.URLField(
|
||||
blank=True, help_text="CloudFlare image URL for company logo"
|
||||
),
|
||||
),
|
||||
(
|
||||
"park_count",
|
||||
models.IntegerField(
|
||||
default=0, help_text="Number of parks operated (for operators)"
|
||||
),
|
||||
),
|
||||
(
|
||||
"ride_count",
|
||||
models.IntegerField(
|
||||
default=0,
|
||||
help_text="Number of rides manufactured (for manufacturers)",
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"abstract": False,
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="ParkEvent",
|
||||
fields=[
|
||||
("pgh_id", models.AutoField(primary_key=True, serialize=False)),
|
||||
("pgh_created_at", models.DateTimeField(auto_now_add=True)),
|
||||
("pgh_label", models.TextField(help_text="The event label.")),
|
||||
(
|
||||
"created",
|
||||
model_utils.fields.AutoCreatedField(
|
||||
default=django.utils.timezone.now,
|
||||
editable=False,
|
||||
verbose_name="created",
|
||||
),
|
||||
),
|
||||
(
|
||||
"modified",
|
||||
model_utils.fields.AutoLastModifiedField(
|
||||
default=django.utils.timezone.now,
|
||||
editable=False,
|
||||
verbose_name="modified",
|
||||
),
|
||||
),
|
||||
(
|
||||
"id",
|
||||
models.UUIDField(
|
||||
default=uuid.uuid4, editable=False, serialize=False
|
||||
),
|
||||
),
|
||||
(
|
||||
"name",
|
||||
models.CharField(help_text="Official park name", max_length=255),
|
||||
),
|
||||
(
|
||||
"slug",
|
||||
models.SlugField(
|
||||
db_index=False,
|
||||
help_text="URL-friendly identifier",
|
||||
max_length=255,
|
||||
),
|
||||
),
|
||||
(
|
||||
"description",
|
||||
models.TextField(
|
||||
blank=True, help_text="Park description and history"
|
||||
),
|
||||
),
|
||||
(
|
||||
"park_type",
|
||||
models.CharField(
|
||||
choices=[
|
||||
("theme_park", "Theme Park"),
|
||||
("amusement_park", "Amusement Park"),
|
||||
("water_park", "Water Park"),
|
||||
(
|
||||
"family_entertainment_center",
|
||||
"Family Entertainment Center",
|
||||
),
|
||||
("traveling_park", "Traveling Park"),
|
||||
("zoo", "Zoo"),
|
||||
("aquarium", "Aquarium"),
|
||||
],
|
||||
help_text="Type of park",
|
||||
max_length=50,
|
||||
),
|
||||
),
|
||||
(
|
||||
"status",
|
||||
models.CharField(
|
||||
choices=[
|
||||
("operating", "Operating"),
|
||||
("closed", "Closed"),
|
||||
("sbno", "Standing But Not Operating"),
|
||||
("under_construction", "Under Construction"),
|
||||
("planned", "Planned"),
|
||||
],
|
||||
default="operating",
|
||||
help_text="Current operational status",
|
||||
max_length=50,
|
||||
),
|
||||
),
|
||||
(
|
||||
"opening_date",
|
||||
models.DateField(
|
||||
blank=True, help_text="Park opening date", null=True
|
||||
),
|
||||
),
|
||||
(
|
||||
"opening_date_precision",
|
||||
models.CharField(
|
||||
choices=[("year", "Year"), ("month", "Month"), ("day", "Day")],
|
||||
default="day",
|
||||
help_text="Precision of opening date",
|
||||
max_length=20,
|
||||
),
|
||||
),
|
||||
(
|
||||
"closing_date",
|
||||
models.DateField(
|
||||
blank=True, help_text="Park closing date (if closed)", null=True
|
||||
),
|
||||
),
|
||||
(
|
||||
"closing_date_precision",
|
||||
models.CharField(
|
||||
choices=[("year", "Year"), ("month", "Month"), ("day", "Day")],
|
||||
default="day",
|
||||
help_text="Precision of closing date",
|
||||
max_length=20,
|
||||
),
|
||||
),
|
||||
(
|
||||
"latitude",
|
||||
models.DecimalField(
|
||||
blank=True,
|
||||
decimal_places=7,
|
||||
help_text="Latitude coordinate. Primary in local dev, use location_point in production.",
|
||||
max_digits=10,
|
||||
null=True,
|
||||
),
|
||||
),
|
||||
(
|
||||
"longitude",
|
||||
models.DecimalField(
|
||||
blank=True,
|
||||
decimal_places=7,
|
||||
help_text="Longitude coordinate. Primary in local dev, use location_point in production.",
|
||||
max_digits=10,
|
||||
null=True,
|
||||
),
|
||||
),
|
||||
(
|
||||
"website",
|
||||
models.URLField(blank=True, help_text="Official park website"),
|
||||
),
|
||||
(
|
||||
"banner_image_id",
|
||||
models.CharField(
|
||||
blank=True,
|
||||
help_text="CloudFlare image ID for park banner",
|
||||
max_length=255,
|
||||
),
|
||||
),
|
||||
(
|
||||
"banner_image_url",
|
||||
models.URLField(
|
||||
blank=True, help_text="CloudFlare image URL for park banner"
|
||||
),
|
||||
),
|
||||
(
|
||||
"logo_image_id",
|
||||
models.CharField(
|
||||
blank=True,
|
||||
help_text="CloudFlare image ID for park logo",
|
||||
max_length=255,
|
||||
),
|
||||
),
|
||||
(
|
||||
"logo_image_url",
|
||||
models.URLField(
|
||||
blank=True, help_text="CloudFlare image URL for park logo"
|
||||
),
|
||||
),
|
||||
(
|
||||
"ride_count",
|
||||
models.IntegerField(default=0, help_text="Total number of rides"),
|
||||
),
|
||||
(
|
||||
"coaster_count",
|
||||
models.IntegerField(
|
||||
default=0, help_text="Number of roller coasters"
|
||||
),
|
||||
),
|
||||
(
|
||||
"custom_fields",
|
||||
models.JSONField(
|
||||
blank=True,
|
||||
default=dict,
|
||||
help_text="Additional park-specific data",
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"abstract": False,
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="RideEvent",
|
||||
fields=[
|
||||
("pgh_id", models.AutoField(primary_key=True, serialize=False)),
|
||||
("pgh_created_at", models.DateTimeField(auto_now_add=True)),
|
||||
("pgh_label", models.TextField(help_text="The event label.")),
|
||||
(
|
||||
"created",
|
||||
model_utils.fields.AutoCreatedField(
|
||||
default=django.utils.timezone.now,
|
||||
editable=False,
|
||||
verbose_name="created",
|
||||
),
|
||||
),
|
||||
(
|
||||
"modified",
|
||||
model_utils.fields.AutoLastModifiedField(
|
||||
default=django.utils.timezone.now,
|
||||
editable=False,
|
||||
verbose_name="modified",
|
||||
),
|
||||
),
|
||||
(
|
||||
"id",
|
||||
models.UUIDField(
|
||||
default=uuid.uuid4, editable=False, serialize=False
|
||||
),
|
||||
),
|
||||
("name", models.CharField(help_text="Ride name", max_length=255)),
|
||||
(
|
||||
"slug",
|
||||
models.SlugField(
|
||||
db_index=False,
|
||||
help_text="URL-friendly identifier",
|
||||
max_length=255,
|
||||
),
|
||||
),
|
||||
(
|
||||
"description",
|
||||
models.TextField(
|
||||
blank=True, help_text="Ride description and history"
|
||||
),
|
||||
),
|
||||
(
|
||||
"ride_category",
|
||||
models.CharField(
|
||||
choices=[
|
||||
("roller_coaster", "Roller Coaster"),
|
||||
("flat_ride", "Flat Ride"),
|
||||
("water_ride", "Water Ride"),
|
||||
("dark_ride", "Dark Ride"),
|
||||
("transport_ride", "Transport Ride"),
|
||||
("other", "Other"),
|
||||
],
|
||||
help_text="Broad ride category",
|
||||
max_length=50,
|
||||
),
|
||||
),
|
||||
(
|
||||
"ride_type",
|
||||
models.CharField(
|
||||
blank=True,
|
||||
help_text="Specific ride type (e.g., 'Inverted Coaster', 'Drop Tower')",
|
||||
max_length=100,
|
||||
),
|
||||
),
|
||||
(
|
||||
"is_coaster",
|
||||
models.BooleanField(
|
||||
default=False, help_text="Is this ride a roller coaster?"
|
||||
),
|
||||
),
|
||||
(
|
||||
"status",
|
||||
models.CharField(
|
||||
choices=[
|
||||
("operating", "Operating"),
|
||||
("closed", "Closed"),
|
||||
("sbno", "Standing But Not Operating"),
|
||||
("relocated", "Relocated"),
|
||||
("under_construction", "Under Construction"),
|
||||
("planned", "Planned"),
|
||||
],
|
||||
default="operating",
|
||||
help_text="Current operational status",
|
||||
max_length=50,
|
||||
),
|
||||
),
|
||||
(
|
||||
"opening_date",
|
||||
models.DateField(
|
||||
blank=True, help_text="Ride opening date", null=True
|
||||
),
|
||||
),
|
||||
(
|
||||
"opening_date_precision",
|
||||
models.CharField(
|
||||
choices=[("year", "Year"), ("month", "Month"), ("day", "Day")],
|
||||
default="day",
|
||||
help_text="Precision of opening date",
|
||||
max_length=20,
|
||||
),
|
||||
),
|
||||
(
|
||||
"closing_date",
|
||||
models.DateField(
|
||||
blank=True, help_text="Ride closing date (if closed)", null=True
|
||||
),
|
||||
),
|
||||
(
|
||||
"closing_date_precision",
|
||||
models.CharField(
|
||||
choices=[("year", "Year"), ("month", "Month"), ("day", "Day")],
|
||||
default="day",
|
||||
help_text="Precision of closing date",
|
||||
max_length=20,
|
||||
),
|
||||
),
|
||||
(
|
||||
"height",
|
||||
models.DecimalField(
|
||||
blank=True,
|
||||
decimal_places=1,
|
||||
help_text="Height in feet",
|
||||
max_digits=6,
|
||||
null=True,
|
||||
),
|
||||
),
|
||||
(
|
||||
"speed",
|
||||
models.DecimalField(
|
||||
blank=True,
|
||||
decimal_places=1,
|
||||
help_text="Top speed in mph",
|
||||
max_digits=6,
|
||||
null=True,
|
||||
),
|
||||
),
|
||||
(
|
||||
"length",
|
||||
models.DecimalField(
|
||||
blank=True,
|
||||
decimal_places=1,
|
||||
help_text="Track/ride length in feet",
|
||||
max_digits=8,
|
||||
null=True,
|
||||
),
|
||||
),
|
||||
(
|
||||
"duration",
|
||||
models.IntegerField(
|
||||
blank=True, help_text="Ride duration in seconds", null=True
|
||||
),
|
||||
),
|
||||
(
|
||||
"inversions",
|
||||
models.IntegerField(
|
||||
blank=True,
|
||||
help_text="Number of inversions (for coasters)",
|
||||
null=True,
|
||||
),
|
||||
),
|
||||
(
|
||||
"capacity",
|
||||
models.IntegerField(
|
||||
blank=True,
|
||||
help_text="Hourly capacity (riders per hour)",
|
||||
null=True,
|
||||
),
|
||||
),
|
||||
(
|
||||
"image_id",
|
||||
models.CharField(
|
||||
blank=True,
|
||||
help_text="CloudFlare image ID for main photo",
|
||||
max_length=255,
|
||||
),
|
||||
),
|
||||
(
|
||||
"image_url",
|
||||
models.URLField(
|
||||
blank=True, help_text="CloudFlare image URL for main photo"
|
||||
),
|
||||
),
|
||||
(
|
||||
"custom_fields",
|
||||
models.JSONField(
|
||||
blank=True,
|
||||
default=dict,
|
||||
help_text="Additional ride-specific data",
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"abstract": False,
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="RideModelEvent",
|
||||
fields=[
|
||||
("pgh_id", models.AutoField(primary_key=True, serialize=False)),
|
||||
("pgh_created_at", models.DateTimeField(auto_now_add=True)),
|
||||
("pgh_label", models.TextField(help_text="The event label.")),
|
||||
(
|
||||
"created",
|
||||
model_utils.fields.AutoCreatedField(
|
||||
default=django.utils.timezone.now,
|
||||
editable=False,
|
||||
verbose_name="created",
|
||||
),
|
||||
),
|
||||
(
|
||||
"modified",
|
||||
model_utils.fields.AutoLastModifiedField(
|
||||
default=django.utils.timezone.now,
|
||||
editable=False,
|
||||
verbose_name="modified",
|
||||
),
|
||||
),
|
||||
(
|
||||
"id",
|
||||
models.UUIDField(
|
||||
default=uuid.uuid4, editable=False, serialize=False
|
||||
),
|
||||
),
|
||||
(
|
||||
"name",
|
||||
models.CharField(
|
||||
help_text="Model name (e.g., 'Inverted Coaster', 'Boomerang')",
|
||||
max_length=255,
|
||||
),
|
||||
),
|
||||
(
|
||||
"slug",
|
||||
models.SlugField(
|
||||
db_index=False,
|
||||
help_text="URL-friendly identifier",
|
||||
max_length=255,
|
||||
),
|
||||
),
|
||||
(
|
||||
"description",
|
||||
models.TextField(
|
||||
blank=True, help_text="Model description and technical details"
|
||||
),
|
||||
),
|
||||
(
|
||||
"model_type",
|
||||
models.CharField(
|
||||
choices=[
|
||||
("coaster_model", "Roller Coaster Model"),
|
||||
("flat_ride_model", "Flat Ride Model"),
|
||||
("water_ride_model", "Water Ride Model"),
|
||||
("dark_ride_model", "Dark Ride Model"),
|
||||
("transport_ride_model", "Transport Ride Model"),
|
||||
],
|
||||
help_text="Type of ride model",
|
||||
max_length=50,
|
||||
),
|
||||
),
|
||||
(
|
||||
"typical_height",
|
||||
models.DecimalField(
|
||||
blank=True,
|
||||
decimal_places=1,
|
||||
help_text="Typical height in feet",
|
||||
max_digits=6,
|
||||
null=True,
|
||||
),
|
||||
),
|
||||
(
|
||||
"typical_speed",
|
||||
models.DecimalField(
|
||||
blank=True,
|
||||
decimal_places=1,
|
||||
help_text="Typical speed in mph",
|
||||
max_digits=6,
|
||||
null=True,
|
||||
),
|
||||
),
|
||||
(
|
||||
"typical_capacity",
|
||||
models.IntegerField(
|
||||
blank=True, help_text="Typical hourly capacity", null=True
|
||||
),
|
||||
),
|
||||
(
|
||||
"image_id",
|
||||
models.CharField(
|
||||
blank=True, help_text="CloudFlare image ID", max_length=255
|
||||
),
|
||||
),
|
||||
(
|
||||
"image_url",
|
||||
models.URLField(blank=True, help_text="CloudFlare image URL"),
|
||||
),
|
||||
(
|
||||
"installation_count",
|
||||
models.IntegerField(
|
||||
default=0, help_text="Number of installations worldwide"
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"abstract": False,
|
||||
},
|
||||
),
|
||||
pgtrigger.migrations.AddTrigger(
|
||||
model_name="company",
|
||||
trigger=pgtrigger.compiler.Trigger(
|
||||
name="insert_insert",
|
||||
sql=pgtrigger.compiler.UpsertTriggerSql(
|
||||
func='INSERT INTO "entities_companyevent" ("closed_date", "closed_date_precision", "company_types", "created", "description", "founded_date", "founded_date_precision", "id", "location_id", "logo_image_id", "logo_image_url", "modified", "name", "park_count", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "ride_count", "slug", "website") VALUES (NEW."closed_date", NEW."closed_date_precision", NEW."company_types", NEW."created", NEW."description", NEW."founded_date", NEW."founded_date_precision", NEW."id", NEW."location_id", NEW."logo_image_id", NEW."logo_image_url", NEW."modified", NEW."name", NEW."park_count", _pgh_attach_context(), NOW(), \'insert\', NEW."id", NEW."ride_count", NEW."slug", NEW."website"); RETURN NULL;',
|
||||
hash="891243f1479adc9ae67c894ec6824b89b7997086",
|
||||
operation="INSERT",
|
||||
pgid="pgtrigger_insert_insert_ed498",
|
||||
table="entities_company",
|
||||
when="AFTER",
|
||||
),
|
||||
),
|
||||
),
|
||||
pgtrigger.migrations.AddTrigger(
|
||||
model_name="company",
|
||||
trigger=pgtrigger.compiler.Trigger(
|
||||
name="update_update",
|
||||
sql=pgtrigger.compiler.UpsertTriggerSql(
|
||||
condition="WHEN (OLD.* IS DISTINCT FROM NEW.*)",
|
||||
func='INSERT INTO "entities_companyevent" ("closed_date", "closed_date_precision", "company_types", "created", "description", "founded_date", "founded_date_precision", "id", "location_id", "logo_image_id", "logo_image_url", "modified", "name", "park_count", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "ride_count", "slug", "website") VALUES (NEW."closed_date", NEW."closed_date_precision", NEW."company_types", NEW."created", NEW."description", NEW."founded_date", NEW."founded_date_precision", NEW."id", NEW."location_id", NEW."logo_image_id", NEW."logo_image_url", NEW."modified", NEW."name", NEW."park_count", _pgh_attach_context(), NOW(), \'update\', NEW."id", NEW."ride_count", NEW."slug", NEW."website"); RETURN NULL;',
|
||||
hash="5d0f3d8dbb199afd7474de393b075b8e72c481fd",
|
||||
operation="UPDATE",
|
||||
pgid="pgtrigger_update_update_2d89e",
|
||||
table="entities_company",
|
||||
when="AFTER",
|
||||
),
|
||||
),
|
||||
),
|
||||
pgtrigger.migrations.AddTrigger(
|
||||
model_name="park",
|
||||
trigger=pgtrigger.compiler.Trigger(
|
||||
name="insert_insert",
|
||||
sql=pgtrigger.compiler.UpsertTriggerSql(
|
||||
func='INSERT INTO "entities_parkevent" ("banner_image_id", "banner_image_url", "closing_date", "closing_date_precision", "coaster_count", "created", "custom_fields", "description", "id", "latitude", "location_id", "logo_image_id", "logo_image_url", "longitude", "modified", "name", "opening_date", "opening_date_precision", "operator_id", "park_type", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "ride_count", "slug", "status", "website") VALUES (NEW."banner_image_id", NEW."banner_image_url", NEW."closing_date", NEW."closing_date_precision", NEW."coaster_count", NEW."created", NEW."custom_fields", NEW."description", NEW."id", NEW."latitude", NEW."location_id", NEW."logo_image_id", NEW."logo_image_url", NEW."longitude", NEW."modified", NEW."name", NEW."opening_date", NEW."opening_date_precision", NEW."operator_id", NEW."park_type", _pgh_attach_context(), NOW(), \'insert\', NEW."id", NEW."ride_count", NEW."slug", NEW."status", NEW."website"); RETURN NULL;',
|
||||
hash="e03ce2a0516ff75f1703a6ccf069ce931f3123bc",
|
||||
operation="INSERT",
|
||||
pgid="pgtrigger_insert_insert_a5515",
|
||||
table="entities_park",
|
||||
when="AFTER",
|
||||
),
|
||||
),
|
||||
),
|
||||
pgtrigger.migrations.AddTrigger(
|
||||
model_name="park",
|
||||
trigger=pgtrigger.compiler.Trigger(
|
||||
name="update_update",
|
||||
sql=pgtrigger.compiler.UpsertTriggerSql(
|
||||
condition="WHEN (OLD.* IS DISTINCT FROM NEW.*)",
|
||||
func='INSERT INTO "entities_parkevent" ("banner_image_id", "banner_image_url", "closing_date", "closing_date_precision", "coaster_count", "created", "custom_fields", "description", "id", "latitude", "location_id", "logo_image_id", "logo_image_url", "longitude", "modified", "name", "opening_date", "opening_date_precision", "operator_id", "park_type", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "ride_count", "slug", "status", "website") VALUES (NEW."banner_image_id", NEW."banner_image_url", NEW."closing_date", NEW."closing_date_precision", NEW."coaster_count", NEW."created", NEW."custom_fields", NEW."description", NEW."id", NEW."latitude", NEW."location_id", NEW."logo_image_id", NEW."logo_image_url", NEW."longitude", NEW."modified", NEW."name", NEW."opening_date", NEW."opening_date_precision", NEW."operator_id", NEW."park_type", _pgh_attach_context(), NOW(), \'update\', NEW."id", NEW."ride_count", NEW."slug", NEW."status", NEW."website"); RETURN NULL;',
|
||||
hash="0e01b4eac8ef56aeb039c870c7ac194d2615012e",
|
||||
operation="UPDATE",
|
||||
pgid="pgtrigger_update_update_b436a",
|
||||
table="entities_park",
|
||||
when="AFTER",
|
||||
),
|
||||
),
|
||||
),
|
||||
pgtrigger.migrations.AddTrigger(
|
||||
model_name="ride",
|
||||
trigger=pgtrigger.compiler.Trigger(
|
||||
name="insert_insert",
|
||||
sql=pgtrigger.compiler.UpsertTriggerSql(
|
||||
func='INSERT INTO "entities_rideevent" ("capacity", "closing_date", "closing_date_precision", "created", "custom_fields", "description", "duration", "height", "id", "image_id", "image_url", "inversions", "is_coaster", "length", "manufacturer_id", "model_id", "modified", "name", "opening_date", "opening_date_precision", "park_id", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "ride_category", "ride_type", "slug", "speed", "status") VALUES (NEW."capacity", NEW."closing_date", NEW."closing_date_precision", NEW."created", NEW."custom_fields", NEW."description", NEW."duration", NEW."height", NEW."id", NEW."image_id", NEW."image_url", NEW."inversions", NEW."is_coaster", NEW."length", NEW."manufacturer_id", NEW."model_id", NEW."modified", NEW."name", NEW."opening_date", NEW."opening_date_precision", NEW."park_id", _pgh_attach_context(), NOW(), \'insert\', NEW."id", NEW."ride_category", NEW."ride_type", NEW."slug", NEW."speed", NEW."status"); RETURN NULL;',
|
||||
hash="02f95397d881bd95627424df1a144956d5f15f8d",
|
||||
operation="INSERT",
|
||||
pgid="pgtrigger_insert_insert_23173",
|
||||
table="entities_ride",
|
||||
when="AFTER",
|
||||
),
|
||||
),
|
||||
),
|
||||
pgtrigger.migrations.AddTrigger(
|
||||
model_name="ride",
|
||||
trigger=pgtrigger.compiler.Trigger(
|
||||
name="update_update",
|
||||
sql=pgtrigger.compiler.UpsertTriggerSql(
|
||||
condition="WHEN (OLD.* IS DISTINCT FROM NEW.*)",
|
||||
func='INSERT INTO "entities_rideevent" ("capacity", "closing_date", "closing_date_precision", "created", "custom_fields", "description", "duration", "height", "id", "image_id", "image_url", "inversions", "is_coaster", "length", "manufacturer_id", "model_id", "modified", "name", "opening_date", "opening_date_precision", "park_id", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "ride_category", "ride_type", "slug", "speed", "status") VALUES (NEW."capacity", NEW."closing_date", NEW."closing_date_precision", NEW."created", NEW."custom_fields", NEW."description", NEW."duration", NEW."height", NEW."id", NEW."image_id", NEW."image_url", NEW."inversions", NEW."is_coaster", NEW."length", NEW."manufacturer_id", NEW."model_id", NEW."modified", NEW."name", NEW."opening_date", NEW."opening_date_precision", NEW."park_id", _pgh_attach_context(), NOW(), \'update\', NEW."id", NEW."ride_category", NEW."ride_type", NEW."slug", NEW."speed", NEW."status"); RETURN NULL;',
|
||||
hash="9377ca0c44ec8e548254d371a95e9ff7a6eb8684",
|
||||
operation="UPDATE",
|
||||
pgid="pgtrigger_update_update_c2972",
|
||||
table="entities_ride",
|
||||
when="AFTER",
|
||||
),
|
||||
),
|
||||
),
|
||||
pgtrigger.migrations.AddTrigger(
|
||||
model_name="ridemodel",
|
||||
trigger=pgtrigger.compiler.Trigger(
|
||||
name="insert_insert",
|
||||
sql=pgtrigger.compiler.UpsertTriggerSql(
|
||||
func='INSERT INTO "entities_ridemodelevent" ("created", "description", "id", "image_id", "image_url", "installation_count", "manufacturer_id", "model_type", "modified", "name", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "slug", "typical_capacity", "typical_height", "typical_speed") VALUES (NEW."created", NEW."description", NEW."id", NEW."image_id", NEW."image_url", NEW."installation_count", NEW."manufacturer_id", NEW."model_type", NEW."modified", NEW."name", _pgh_attach_context(), NOW(), \'insert\', NEW."id", NEW."slug", NEW."typical_capacity", NEW."typical_height", NEW."typical_speed"); RETURN NULL;',
|
||||
hash="580a9d8a429d5140bc6bf553d6e0f9c06b7a7dec",
|
||||
operation="INSERT",
|
||||
pgid="pgtrigger_insert_insert_04de6",
|
||||
table="entities_ridemodel",
|
||||
when="AFTER",
|
||||
),
|
||||
),
|
||||
),
|
||||
pgtrigger.migrations.AddTrigger(
|
||||
model_name="ridemodel",
|
||||
trigger=pgtrigger.compiler.Trigger(
|
||||
name="update_update",
|
||||
sql=pgtrigger.compiler.UpsertTriggerSql(
|
||||
condition="WHEN (OLD.* IS DISTINCT FROM NEW.*)",
|
||||
func='INSERT INTO "entities_ridemodelevent" ("created", "description", "id", "image_id", "image_url", "installation_count", "manufacturer_id", "model_type", "modified", "name", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "slug", "typical_capacity", "typical_height", "typical_speed") VALUES (NEW."created", NEW."description", NEW."id", NEW."image_id", NEW."image_url", NEW."installation_count", NEW."manufacturer_id", NEW."model_type", NEW."modified", NEW."name", _pgh_attach_context(), NOW(), \'update\', NEW."id", NEW."slug", NEW."typical_capacity", NEW."typical_height", NEW."typical_speed"); RETURN NULL;',
|
||||
hash="b7d6519a2c97e7b543494b67c4f25826439a02ef",
|
||||
operation="UPDATE",
|
||||
pgid="pgtrigger_update_update_a70fd",
|
||||
table="entities_ridemodel",
|
||||
when="AFTER",
|
||||
),
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="ridemodelevent",
|
||||
name="manufacturer",
|
||||
field=models.ForeignKey(
|
||||
db_constraint=False,
|
||||
help_text="Manufacturer of this ride model",
|
||||
on_delete=django.db.models.deletion.DO_NOTHING,
|
||||
related_name="+",
|
||||
related_query_name="+",
|
||||
to="entities.company",
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="ridemodelevent",
|
||||
name="pgh_context",
|
||||
field=models.ForeignKey(
|
||||
db_constraint=False,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.DO_NOTHING,
|
||||
related_name="+",
|
||||
related_query_name="+",
|
||||
to="pghistory.context",
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="ridemodelevent",
|
||||
name="pgh_obj",
|
||||
field=models.ForeignKey(
|
||||
db_constraint=False,
|
||||
on_delete=django.db.models.deletion.DO_NOTHING,
|
||||
related_name="events",
|
||||
related_query_name="+",
|
||||
to="entities.ridemodel",
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="rideevent",
|
||||
name="manufacturer",
|
||||
field=models.ForeignKey(
|
||||
blank=True,
|
||||
db_constraint=False,
|
||||
help_text="Ride manufacturer",
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.DO_NOTHING,
|
||||
related_name="+",
|
||||
related_query_name="+",
|
||||
to="entities.company",
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="rideevent",
|
||||
name="model",
|
||||
field=models.ForeignKey(
|
||||
blank=True,
|
||||
db_constraint=False,
|
||||
help_text="Specific ride model",
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.DO_NOTHING,
|
||||
related_name="+",
|
||||
related_query_name="+",
|
||||
to="entities.ridemodel",
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="rideevent",
|
||||
name="park",
|
||||
field=models.ForeignKey(
|
||||
db_constraint=False,
|
||||
help_text="Park where ride is located",
|
||||
on_delete=django.db.models.deletion.DO_NOTHING,
|
||||
related_name="+",
|
||||
related_query_name="+",
|
||||
to="entities.park",
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="rideevent",
|
||||
name="pgh_context",
|
||||
field=models.ForeignKey(
|
||||
db_constraint=False,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.DO_NOTHING,
|
||||
related_name="+",
|
||||
related_query_name="+",
|
||||
to="pghistory.context",
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="rideevent",
|
||||
name="pgh_obj",
|
||||
field=models.ForeignKey(
|
||||
db_constraint=False,
|
||||
on_delete=django.db.models.deletion.DO_NOTHING,
|
||||
related_name="events",
|
||||
related_query_name="+",
|
||||
to="entities.ride",
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="parkevent",
|
||||
name="location",
|
||||
field=models.ForeignKey(
|
||||
blank=True,
|
||||
db_constraint=False,
|
||||
help_text="Park location",
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.DO_NOTHING,
|
||||
related_name="+",
|
||||
related_query_name="+",
|
||||
to="core.locality",
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="parkevent",
|
||||
name="operator",
|
||||
field=models.ForeignKey(
|
||||
blank=True,
|
||||
db_constraint=False,
|
||||
help_text="Current park operator",
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.DO_NOTHING,
|
||||
related_name="+",
|
||||
related_query_name="+",
|
||||
to="entities.company",
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="parkevent",
|
||||
name="pgh_context",
|
||||
field=models.ForeignKey(
|
||||
db_constraint=False,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.DO_NOTHING,
|
||||
related_name="+",
|
||||
related_query_name="+",
|
||||
to="pghistory.context",
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="parkevent",
|
||||
name="pgh_obj",
|
||||
field=models.ForeignKey(
|
||||
db_constraint=False,
|
||||
on_delete=django.db.models.deletion.DO_NOTHING,
|
||||
related_name="events",
|
||||
related_query_name="+",
|
||||
to="entities.park",
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="companyevent",
|
||||
name="location",
|
||||
field=models.ForeignKey(
|
||||
blank=True,
|
||||
db_constraint=False,
|
||||
help_text="Company headquarters location",
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.DO_NOTHING,
|
||||
related_name="+",
|
||||
related_query_name="+",
|
||||
to="core.locality",
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="companyevent",
|
||||
name="pgh_context",
|
||||
field=models.ForeignKey(
|
||||
db_constraint=False,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.DO_NOTHING,
|
||||
related_name="+",
|
||||
related_query_name="+",
|
||||
to="pghistory.context",
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="companyevent",
|
||||
name="pgh_obj",
|
||||
field=models.ForeignKey(
|
||||
db_constraint=False,
|
||||
on_delete=django.db.models.deletion.DO_NOTHING,
|
||||
related_name="events",
|
||||
related_query_name="+",
|
||||
to="entities.company",
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,12 @@
|
||||
# Generated by Django 4.2.8 on 2025-11-09 03:26
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("entities", "0004_companyevent_parkevent_rideevent_ridemodelevent_and_more"),
|
||||
]
|
||||
|
||||
operations = []
|
||||
@@ -0,0 +1,12 @@
|
||||
# Generated by Django 4.2.8 on 2025-11-09 03:27
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("entities", "0005_migrate_company_types_to_m2m"),
|
||||
]
|
||||
|
||||
operations = []
|
||||
@@ -0,0 +1,542 @@
|
||||
# Generated by Django 4.2.8 on 2025-11-09 15:30
|
||||
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
import django.utils.timezone
|
||||
import django_lifecycle.mixins
|
||||
import model_utils.fields
|
||||
import pgtrigger.compiler
|
||||
import pgtrigger.migrations
|
||||
import uuid
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("pghistory", "0006_delete_aggregateevent"),
|
||||
("entities", "0006_migrate_company_types_to_m2m"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name="CompanyType",
|
||||
fields=[
|
||||
(
|
||||
"created",
|
||||
model_utils.fields.AutoCreatedField(
|
||||
default=django.utils.timezone.now,
|
||||
editable=False,
|
||||
verbose_name="created",
|
||||
),
|
||||
),
|
||||
(
|
||||
"modified",
|
||||
model_utils.fields.AutoLastModifiedField(
|
||||
default=django.utils.timezone.now,
|
||||
editable=False,
|
||||
verbose_name="modified",
|
||||
),
|
||||
),
|
||||
(
|
||||
"id",
|
||||
models.UUIDField(
|
||||
default=uuid.uuid4,
|
||||
editable=False,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
),
|
||||
),
|
||||
(
|
||||
"code",
|
||||
models.CharField(
|
||||
choices=[
|
||||
("manufacturer", "Manufacturer"),
|
||||
("operator", "Operator"),
|
||||
("designer", "Designer"),
|
||||
("supplier", "Supplier"),
|
||||
("contractor", "Contractor"),
|
||||
],
|
||||
db_index=True,
|
||||
help_text="Unique code identifier for the company type",
|
||||
max_length=50,
|
||||
unique=True,
|
||||
),
|
||||
),
|
||||
(
|
||||
"name",
|
||||
models.CharField(
|
||||
help_text="Display name for the company type", max_length=100
|
||||
),
|
||||
),
|
||||
(
|
||||
"description",
|
||||
models.TextField(
|
||||
blank=True,
|
||||
help_text="Description of what this company type represents",
|
||||
),
|
||||
),
|
||||
(
|
||||
"company_count",
|
||||
models.IntegerField(
|
||||
default=0, help_text="Cached count of companies with this type"
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"verbose_name": "Company Type",
|
||||
"verbose_name_plural": "Company Types",
|
||||
"db_table": "company_types",
|
||||
"ordering": ["name"],
|
||||
},
|
||||
bases=(django_lifecycle.mixins.LifecycleModelMixin, models.Model),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="CompanyTypeEvent",
|
||||
fields=[
|
||||
("pgh_id", models.AutoField(primary_key=True, serialize=False)),
|
||||
("pgh_created_at", models.DateTimeField(auto_now_add=True)),
|
||||
("pgh_label", models.TextField(help_text="The event label.")),
|
||||
(
|
||||
"created",
|
||||
model_utils.fields.AutoCreatedField(
|
||||
default=django.utils.timezone.now,
|
||||
editable=False,
|
||||
verbose_name="created",
|
||||
),
|
||||
),
|
||||
(
|
||||
"modified",
|
||||
model_utils.fields.AutoLastModifiedField(
|
||||
default=django.utils.timezone.now,
|
||||
editable=False,
|
||||
verbose_name="modified",
|
||||
),
|
||||
),
|
||||
(
|
||||
"id",
|
||||
models.UUIDField(
|
||||
default=uuid.uuid4, editable=False, serialize=False
|
||||
),
|
||||
),
|
||||
(
|
||||
"code",
|
||||
models.CharField(
|
||||
choices=[
|
||||
("manufacturer", "Manufacturer"),
|
||||
("operator", "Operator"),
|
||||
("designer", "Designer"),
|
||||
("supplier", "Supplier"),
|
||||
("contractor", "Contractor"),
|
||||
],
|
||||
help_text="Unique code identifier for the company type",
|
||||
max_length=50,
|
||||
),
|
||||
),
|
||||
(
|
||||
"name",
|
||||
models.CharField(
|
||||
help_text="Display name for the company type", max_length=100
|
||||
),
|
||||
),
|
||||
(
|
||||
"description",
|
||||
models.TextField(
|
||||
blank=True,
|
||||
help_text="Description of what this company type represents",
|
||||
),
|
||||
),
|
||||
(
|
||||
"company_count",
|
||||
models.IntegerField(
|
||||
default=0, help_text="Cached count of companies with this type"
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"abstract": False,
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="RideNameHistory",
|
||||
fields=[
|
||||
(
|
||||
"created",
|
||||
model_utils.fields.AutoCreatedField(
|
||||
default=django.utils.timezone.now,
|
||||
editable=False,
|
||||
verbose_name="created",
|
||||
),
|
||||
),
|
||||
(
|
||||
"modified",
|
||||
model_utils.fields.AutoLastModifiedField(
|
||||
default=django.utils.timezone.now,
|
||||
editable=False,
|
||||
verbose_name="modified",
|
||||
),
|
||||
),
|
||||
(
|
||||
"id",
|
||||
models.UUIDField(
|
||||
default=uuid.uuid4,
|
||||
editable=False,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
),
|
||||
),
|
||||
(
|
||||
"former_name",
|
||||
models.CharField(
|
||||
db_index=True,
|
||||
help_text="Previous name of the ride",
|
||||
max_length=255,
|
||||
),
|
||||
),
|
||||
(
|
||||
"from_year",
|
||||
models.IntegerField(
|
||||
blank=True,
|
||||
help_text="Year when this name started being used",
|
||||
null=True,
|
||||
),
|
||||
),
|
||||
(
|
||||
"to_year",
|
||||
models.IntegerField(
|
||||
blank=True,
|
||||
help_text="Year when this name stopped being used",
|
||||
null=True,
|
||||
),
|
||||
),
|
||||
(
|
||||
"date_changed",
|
||||
models.DateField(
|
||||
blank=True,
|
||||
help_text="Exact date when name was changed",
|
||||
null=True,
|
||||
),
|
||||
),
|
||||
(
|
||||
"date_changed_precision",
|
||||
models.CharField(
|
||||
blank=True,
|
||||
choices=[("year", "Year"), ("month", "Month"), ("day", "Day")],
|
||||
help_text="Precision of date_changed field",
|
||||
max_length=20,
|
||||
null=True,
|
||||
),
|
||||
),
|
||||
(
|
||||
"reason",
|
||||
models.TextField(
|
||||
blank=True,
|
||||
help_text="Reason for name change (e.g., 'Rebranding', 'Sponsor change')",
|
||||
null=True,
|
||||
),
|
||||
),
|
||||
(
|
||||
"order_index",
|
||||
models.IntegerField(
|
||||
blank=True,
|
||||
db_index=True,
|
||||
help_text="Custom sort order for displaying name history",
|
||||
null=True,
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"verbose_name": "Ride Name History",
|
||||
"verbose_name_plural": "Ride Name Histories",
|
||||
"ordering": ["ride", "-to_year", "-from_year", "order_index"],
|
||||
},
|
||||
bases=(django_lifecycle.mixins.LifecycleModelMixin, models.Model),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="RideNameHistoryEvent",
|
||||
fields=[
|
||||
("pgh_id", models.AutoField(primary_key=True, serialize=False)),
|
||||
("pgh_created_at", models.DateTimeField(auto_now_add=True)),
|
||||
("pgh_label", models.TextField(help_text="The event label.")),
|
||||
(
|
||||
"created",
|
||||
model_utils.fields.AutoCreatedField(
|
||||
default=django.utils.timezone.now,
|
||||
editable=False,
|
||||
verbose_name="created",
|
||||
),
|
||||
),
|
||||
(
|
||||
"modified",
|
||||
model_utils.fields.AutoLastModifiedField(
|
||||
default=django.utils.timezone.now,
|
||||
editable=False,
|
||||
verbose_name="modified",
|
||||
),
|
||||
),
|
||||
(
|
||||
"id",
|
||||
models.UUIDField(
|
||||
default=uuid.uuid4, editable=False, serialize=False
|
||||
),
|
||||
),
|
||||
(
|
||||
"former_name",
|
||||
models.CharField(
|
||||
help_text="Previous name of the ride", max_length=255
|
||||
),
|
||||
),
|
||||
(
|
||||
"from_year",
|
||||
models.IntegerField(
|
||||
blank=True,
|
||||
help_text="Year when this name started being used",
|
||||
null=True,
|
||||
),
|
||||
),
|
||||
(
|
||||
"to_year",
|
||||
models.IntegerField(
|
||||
blank=True,
|
||||
help_text="Year when this name stopped being used",
|
||||
null=True,
|
||||
),
|
||||
),
|
||||
(
|
||||
"date_changed",
|
||||
models.DateField(
|
||||
blank=True,
|
||||
help_text="Exact date when name was changed",
|
||||
null=True,
|
||||
),
|
||||
),
|
||||
(
|
||||
"date_changed_precision",
|
||||
models.CharField(
|
||||
blank=True,
|
||||
choices=[("year", "Year"), ("month", "Month"), ("day", "Day")],
|
||||
help_text="Precision of date_changed field",
|
||||
max_length=20,
|
||||
null=True,
|
||||
),
|
||||
),
|
||||
(
|
||||
"reason",
|
||||
models.TextField(
|
||||
blank=True,
|
||||
help_text="Reason for name change (e.g., 'Rebranding', 'Sponsor change')",
|
||||
null=True,
|
||||
),
|
||||
),
|
||||
(
|
||||
"order_index",
|
||||
models.IntegerField(
|
||||
blank=True,
|
||||
help_text="Custom sort order for displaying name history",
|
||||
null=True,
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"abstract": False,
|
||||
},
|
||||
),
|
||||
pgtrigger.migrations.RemoveTrigger(
|
||||
model_name="company",
|
||||
name="insert_insert",
|
||||
),
|
||||
pgtrigger.migrations.RemoveTrigger(
|
||||
model_name="company",
|
||||
name="update_update",
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name="company",
|
||||
name="company_types",
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name="companyevent",
|
||||
name="company_types",
|
||||
),
|
||||
pgtrigger.migrations.AddTrigger(
|
||||
model_name="company",
|
||||
trigger=pgtrigger.compiler.Trigger(
|
||||
name="insert_insert",
|
||||
sql=pgtrigger.compiler.UpsertTriggerSql(
|
||||
func='INSERT INTO "entities_companyevent" ("closed_date", "closed_date_precision", "created", "description", "founded_date", "founded_date_precision", "id", "location_id", "logo_image_id", "logo_image_url", "modified", "name", "park_count", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "ride_count", "slug", "website") VALUES (NEW."closed_date", NEW."closed_date_precision", NEW."created", NEW."description", NEW."founded_date", NEW."founded_date_precision", NEW."id", NEW."location_id", NEW."logo_image_id", NEW."logo_image_url", NEW."modified", NEW."name", NEW."park_count", _pgh_attach_context(), NOW(), \'insert\', NEW."id", NEW."ride_count", NEW."slug", NEW."website"); RETURN NULL;',
|
||||
hash="9d74e2f8c1fd5cb457d1deb6d8bb3b55f690df7a",
|
||||
operation="INSERT",
|
||||
pgid="pgtrigger_insert_insert_ed498",
|
||||
table="entities_company",
|
||||
when="AFTER",
|
||||
),
|
||||
),
|
||||
),
|
||||
pgtrigger.migrations.AddTrigger(
|
||||
model_name="company",
|
||||
trigger=pgtrigger.compiler.Trigger(
|
||||
name="update_update",
|
||||
sql=pgtrigger.compiler.UpsertTriggerSql(
|
||||
condition="WHEN (OLD.* IS DISTINCT FROM NEW.*)",
|
||||
func='INSERT INTO "entities_companyevent" ("closed_date", "closed_date_precision", "created", "description", "founded_date", "founded_date_precision", "id", "location_id", "logo_image_id", "logo_image_url", "modified", "name", "park_count", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "ride_count", "slug", "website") VALUES (NEW."closed_date", NEW."closed_date_precision", NEW."created", NEW."description", NEW."founded_date", NEW."founded_date_precision", NEW."id", NEW."location_id", NEW."logo_image_id", NEW."logo_image_url", NEW."modified", NEW."name", NEW."park_count", _pgh_attach_context(), NOW(), \'update\', NEW."id", NEW."ride_count", NEW."slug", NEW."website"); RETURN NULL;',
|
||||
hash="79dd6fed8d6bb8a54dfb0efb1433d93e2c732152",
|
||||
operation="UPDATE",
|
||||
pgid="pgtrigger_update_update_2d89e",
|
||||
table="entities_company",
|
||||
when="AFTER",
|
||||
),
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="ridenamehistoryevent",
|
||||
name="pgh_context",
|
||||
field=models.ForeignKey(
|
||||
db_constraint=False,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.DO_NOTHING,
|
||||
related_name="+",
|
||||
related_query_name="+",
|
||||
to="pghistory.context",
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="ridenamehistoryevent",
|
||||
name="pgh_obj",
|
||||
field=models.ForeignKey(
|
||||
db_constraint=False,
|
||||
on_delete=django.db.models.deletion.DO_NOTHING,
|
||||
related_name="events",
|
||||
related_query_name="+",
|
||||
to="entities.ridenamehistory",
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="ridenamehistoryevent",
|
||||
name="ride",
|
||||
field=models.ForeignKey(
|
||||
db_constraint=False,
|
||||
help_text="Ride this name history belongs to",
|
||||
on_delete=django.db.models.deletion.DO_NOTHING,
|
||||
related_name="+",
|
||||
related_query_name="+",
|
||||
to="entities.ride",
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="ridenamehistory",
|
||||
name="ride",
|
||||
field=models.ForeignKey(
|
||||
help_text="Ride this name history belongs to",
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
related_name="name_history",
|
||||
to="entities.ride",
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="companytypeevent",
|
||||
name="pgh_context",
|
||||
field=models.ForeignKey(
|
||||
db_constraint=False,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.DO_NOTHING,
|
||||
related_name="+",
|
||||
related_query_name="+",
|
||||
to="pghistory.context",
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="companytypeevent",
|
||||
name="pgh_obj",
|
||||
field=models.ForeignKey(
|
||||
db_constraint=False,
|
||||
on_delete=django.db.models.deletion.DO_NOTHING,
|
||||
related_name="events",
|
||||
related_query_name="+",
|
||||
to="entities.companytype",
|
||||
),
|
||||
),
|
||||
pgtrigger.migrations.AddTrigger(
|
||||
model_name="companytype",
|
||||
trigger=pgtrigger.compiler.Trigger(
|
||||
name="insert_insert",
|
||||
sql=pgtrigger.compiler.UpsertTriggerSql(
|
||||
func='INSERT INTO "entities_companytypeevent" ("code", "company_count", "created", "description", "id", "modified", "name", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id") VALUES (NEW."code", NEW."company_count", NEW."created", NEW."description", NEW."id", NEW."modified", NEW."name", _pgh_attach_context(), NOW(), \'insert\', NEW."id"); RETURN NULL;',
|
||||
hash="37b8907c9141c73466db70e30a15281129bdb623",
|
||||
operation="INSERT",
|
||||
pgid="pgtrigger_insert_insert_c2d35",
|
||||
table="company_types",
|
||||
when="AFTER",
|
||||
),
|
||||
),
|
||||
),
|
||||
pgtrigger.migrations.AddTrigger(
|
||||
model_name="companytype",
|
||||
trigger=pgtrigger.compiler.Trigger(
|
||||
name="update_update",
|
||||
sql=pgtrigger.compiler.UpsertTriggerSql(
|
||||
condition="WHEN (OLD.* IS DISTINCT FROM NEW.*)",
|
||||
func='INSERT INTO "entities_companytypeevent" ("code", "company_count", "created", "description", "id", "modified", "name", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id") VALUES (NEW."code", NEW."company_count", NEW."created", NEW."description", NEW."id", NEW."modified", NEW."name", _pgh_attach_context(), NOW(), \'update\', NEW."id"); RETURN NULL;',
|
||||
hash="4f168297493a54875233a39c57cb4abd2490c0c0",
|
||||
operation="UPDATE",
|
||||
pgid="pgtrigger_update_update_fc3b6",
|
||||
table="company_types",
|
||||
when="AFTER",
|
||||
),
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="company",
|
||||
name="types",
|
||||
field=models.ManyToManyField(
|
||||
blank=True,
|
||||
help_text="Types of company (manufacturer, operator, etc.)",
|
||||
related_name="companies",
|
||||
to="entities.companytype",
|
||||
),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="ridenamehistory",
|
||||
index=models.Index(
|
||||
fields=["ride", "from_year"], name="entities_ri_ride_id_648621_idx"
|
||||
),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="ridenamehistory",
|
||||
index=models.Index(
|
||||
fields=["ride", "to_year"], name="entities_ri_ride_id_7cfa50_idx"
|
||||
),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="ridenamehistory",
|
||||
index=models.Index(
|
||||
fields=["former_name"], name="entities_ri_former__c3173a_idx"
|
||||
),
|
||||
),
|
||||
pgtrigger.migrations.AddTrigger(
|
||||
model_name="ridenamehistory",
|
||||
trigger=pgtrigger.compiler.Trigger(
|
||||
name="insert_insert",
|
||||
sql=pgtrigger.compiler.UpsertTriggerSql(
|
||||
func='INSERT INTO "entities_ridenamehistoryevent" ("created", "date_changed", "date_changed_precision", "former_name", "from_year", "id", "modified", "order_index", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "reason", "ride_id", "to_year") VALUES (NEW."created", NEW."date_changed", NEW."date_changed_precision", NEW."former_name", NEW."from_year", NEW."id", NEW."modified", NEW."order_index", _pgh_attach_context(), NOW(), \'insert\', NEW."id", NEW."reason", NEW."ride_id", NEW."to_year"); RETURN NULL;',
|
||||
hash="bba7baecb40457a954159e0d62aa06dc8746fd0c",
|
||||
operation="INSERT",
|
||||
pgid="pgtrigger_insert_insert_dd590",
|
||||
table="entities_ridenamehistory",
|
||||
when="AFTER",
|
||||
),
|
||||
),
|
||||
),
|
||||
pgtrigger.migrations.AddTrigger(
|
||||
model_name="ridenamehistory",
|
||||
trigger=pgtrigger.compiler.Trigger(
|
||||
name="update_update",
|
||||
sql=pgtrigger.compiler.UpsertTriggerSql(
|
||||
condition="WHEN (OLD.* IS DISTINCT FROM NEW.*)",
|
||||
func='INSERT INTO "entities_ridenamehistoryevent" ("created", "date_changed", "date_changed_precision", "former_name", "from_year", "id", "modified", "order_index", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "reason", "ride_id", "to_year") VALUES (NEW."created", NEW."date_changed", NEW."date_changed_precision", NEW."former_name", NEW."from_year", NEW."id", NEW."modified", NEW."order_index", _pgh_attach_context(), NOW(), \'update\', NEW."id", NEW."reason", NEW."ride_id", NEW."to_year"); RETURN NULL;',
|
||||
hash="bcd9a1ba98897e9e2d89c2056b9922f09a69c447",
|
||||
operation="UPDATE",
|
||||
pgid="pgtrigger_update_update_73687",
|
||||
table="entities_ridenamehistory",
|
||||
when="AFTER",
|
||||
),
|
||||
),
|
||||
),
|
||||
]
|
||||
0
django-backend/apps/entities/migrations/__init__.py
Normal file
0
django-backend/apps/entities/migrations/__init__.py
Normal file
1101
django-backend/apps/entities/models.py
Normal file
1101
django-backend/apps/entities/models.py
Normal file
File diff suppressed because it is too large
Load Diff
386
django-backend/apps/entities/search.py
Normal file
386
django-backend/apps/entities/search.py
Normal file
@@ -0,0 +1,386 @@
|
||||
"""
|
||||
Search service for ThrillWiki entities.
|
||||
|
||||
Provides full-text search capabilities with PostgreSQL and fallback for SQLite.
|
||||
- PostgreSQL: Uses SearchVector, SearchQuery, SearchRank for full-text search
|
||||
- SQLite: Falls back to case-insensitive LIKE queries
|
||||
"""
|
||||
from typing import List, Optional, Dict, Any
|
||||
from django.db.models import Q, QuerySet, Value, CharField, F
|
||||
from django.db.models.functions import Concat
|
||||
from django.conf import settings
|
||||
|
||||
# Conditionally import PostgreSQL search features
|
||||
_using_postgis = 'postgis' in settings.DATABASES['default']['ENGINE']
|
||||
|
||||
if _using_postgis:
|
||||
from django.contrib.postgres.search import SearchVector, SearchQuery, SearchRank, TrigramSimilarity
|
||||
from django.contrib.postgres.aggregates import StringAgg
|
||||
|
||||
|
||||
class SearchService:
|
||||
"""Service for searching across all entity types."""
|
||||
|
||||
def __init__(self):
|
||||
self.using_postgres = _using_postgis
|
||||
|
||||
def search_all(
|
||||
self,
|
||||
query: str,
|
||||
entity_types: Optional[List[str]] = None,
|
||||
limit: int = 20
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Search across all entity types.
|
||||
|
||||
Args:
|
||||
query: Search query string
|
||||
entity_types: Optional list to filter by entity types
|
||||
limit: Maximum results per entity type
|
||||
|
||||
Returns:
|
||||
Dictionary with results grouped by entity type
|
||||
"""
|
||||
results = {}
|
||||
|
||||
# Default to all entity types if not specified
|
||||
if not entity_types:
|
||||
entity_types = ['company', 'ride_model', 'park', 'ride']
|
||||
|
||||
if 'company' in entity_types:
|
||||
results['companies'] = list(self.search_companies(query, limit=limit))
|
||||
|
||||
if 'ride_model' in entity_types:
|
||||
results['ride_models'] = list(self.search_ride_models(query, limit=limit))
|
||||
|
||||
if 'park' in entity_types:
|
||||
results['parks'] = list(self.search_parks(query, limit=limit))
|
||||
|
||||
if 'ride' in entity_types:
|
||||
results['rides'] = list(self.search_rides(query, limit=limit))
|
||||
|
||||
return results
|
||||
|
||||
def search_companies(
|
||||
self,
|
||||
query: str,
|
||||
filters: Optional[Dict[str, Any]] = None,
|
||||
limit: int = 20
|
||||
) -> QuerySet:
|
||||
"""
|
||||
Search companies with full-text search.
|
||||
|
||||
Args:
|
||||
query: Search query string
|
||||
filters: Optional filters (company_types, founded_after, etc.)
|
||||
limit: Maximum number of results
|
||||
|
||||
Returns:
|
||||
QuerySet of Company objects
|
||||
"""
|
||||
from apps.entities.models import Company
|
||||
|
||||
if self.using_postgres:
|
||||
# PostgreSQL full-text search using pre-computed search_vector
|
||||
search_query = SearchQuery(query, search_type='websearch')
|
||||
|
||||
results = Company.objects.annotate(
|
||||
rank=SearchRank(F('search_vector'), search_query)
|
||||
).filter(search_vector=search_query).order_by('-rank')
|
||||
else:
|
||||
# SQLite fallback using LIKE
|
||||
results = Company.objects.filter(
|
||||
Q(name__icontains=query) | Q(description__icontains=query)
|
||||
).order_by('name')
|
||||
|
||||
# Apply additional filters
|
||||
if filters:
|
||||
if filters.get('company_types'):
|
||||
# Filter by company types (stored in JSONField)
|
||||
results = results.filter(
|
||||
company_types__contains=filters['company_types']
|
||||
)
|
||||
|
||||
if filters.get('founded_after'):
|
||||
results = results.filter(founded_date__gte=filters['founded_after'])
|
||||
|
||||
if filters.get('founded_before'):
|
||||
results = results.filter(founded_date__lte=filters['founded_before'])
|
||||
|
||||
return results[:limit]
|
||||
|
||||
def search_ride_models(
|
||||
self,
|
||||
query: str,
|
||||
filters: Optional[Dict[str, Any]] = None,
|
||||
limit: int = 20
|
||||
) -> QuerySet:
|
||||
"""
|
||||
Search ride models with full-text search.
|
||||
|
||||
Args:
|
||||
query: Search query string
|
||||
filters: Optional filters (manufacturer_id, model_type, etc.)
|
||||
limit: Maximum number of results
|
||||
|
||||
Returns:
|
||||
QuerySet of RideModel objects
|
||||
"""
|
||||
from apps.entities.models import RideModel
|
||||
|
||||
if self.using_postgres:
|
||||
# PostgreSQL full-text search using pre-computed search_vector
|
||||
search_query = SearchQuery(query, search_type='websearch')
|
||||
|
||||
results = RideModel.objects.select_related('manufacturer').annotate(
|
||||
rank=SearchRank(F('search_vector'), search_query)
|
||||
).filter(search_vector=search_query).order_by('-rank')
|
||||
else:
|
||||
# SQLite fallback using LIKE
|
||||
results = RideModel.objects.select_related('manufacturer').filter(
|
||||
Q(name__icontains=query) |
|
||||
Q(manufacturer__name__icontains=query) |
|
||||
Q(description__icontains=query)
|
||||
).order_by('manufacturer__name', 'name')
|
||||
|
||||
# Apply additional filters
|
||||
if filters:
|
||||
if filters.get('manufacturer_id'):
|
||||
results = results.filter(manufacturer_id=filters['manufacturer_id'])
|
||||
|
||||
if filters.get('model_type'):
|
||||
results = results.filter(model_type=filters['model_type'])
|
||||
|
||||
return results[:limit]
|
||||
|
||||
def search_parks(
|
||||
self,
|
||||
query: str,
|
||||
filters: Optional[Dict[str, Any]] = None,
|
||||
limit: int = 20
|
||||
) -> QuerySet:
|
||||
"""
|
||||
Search parks with full-text search and location filtering.
|
||||
|
||||
Args:
|
||||
query: Search query string
|
||||
filters: Optional filters (status, park_type, location, radius, etc.)
|
||||
limit: Maximum number of results
|
||||
|
||||
Returns:
|
||||
QuerySet of Park objects
|
||||
"""
|
||||
from apps.entities.models import Park
|
||||
|
||||
if self.using_postgres:
|
||||
# PostgreSQL full-text search using pre-computed search_vector
|
||||
search_query = SearchQuery(query, search_type='websearch')
|
||||
|
||||
results = Park.objects.annotate(
|
||||
rank=SearchRank(F('search_vector'), search_query)
|
||||
).filter(search_vector=search_query).order_by('-rank')
|
||||
else:
|
||||
# SQLite fallback using LIKE
|
||||
results = Park.objects.filter(
|
||||
Q(name__icontains=query) | Q(description__icontains=query)
|
||||
).order_by('name')
|
||||
|
||||
# Apply additional filters
|
||||
if filters:
|
||||
if filters.get('status'):
|
||||
results = results.filter(status=filters['status'])
|
||||
|
||||
if filters.get('park_type'):
|
||||
results = results.filter(park_type=filters['park_type'])
|
||||
|
||||
if filters.get('operator_id'):
|
||||
results = results.filter(operator_id=filters['operator_id'])
|
||||
|
||||
if filters.get('opening_after'):
|
||||
results = results.filter(opening_date__gte=filters['opening_after'])
|
||||
|
||||
if filters.get('opening_before'):
|
||||
results = results.filter(opening_date__lte=filters['opening_before'])
|
||||
|
||||
# Location-based filtering (PostGIS only)
|
||||
if self.using_postgres and filters.get('location') and filters.get('radius'):
|
||||
from django.contrib.gis.geos import Point
|
||||
from django.contrib.gis.measure import D
|
||||
|
||||
longitude, latitude = filters['location']
|
||||
point = Point(longitude, latitude, srid=4326)
|
||||
radius_km = filters['radius']
|
||||
|
||||
# Use distance filter
|
||||
results = results.filter(
|
||||
location_point__distance_lte=(point, D(km=radius_km))
|
||||
).annotate(
|
||||
distance=F('location_point__distance')
|
||||
).order_by('distance')
|
||||
|
||||
return results[:limit]
|
||||
|
||||
def search_rides(
|
||||
self,
|
||||
query: str,
|
||||
filters: Optional[Dict[str, Any]] = None,
|
||||
limit: int = 20
|
||||
) -> QuerySet:
|
||||
"""
|
||||
Search rides with full-text search.
|
||||
|
||||
Args:
|
||||
query: Search query string
|
||||
filters: Optional filters (park_id, manufacturer_id, status, etc.)
|
||||
limit: Maximum number of results
|
||||
|
||||
Returns:
|
||||
QuerySet of Ride objects
|
||||
"""
|
||||
from apps.entities.models import Ride
|
||||
|
||||
if self.using_postgres:
|
||||
# PostgreSQL full-text search using pre-computed search_vector
|
||||
search_query = SearchQuery(query, search_type='websearch')
|
||||
|
||||
results = Ride.objects.select_related('park', 'manufacturer', 'model').annotate(
|
||||
rank=SearchRank(F('search_vector'), search_query)
|
||||
).filter(search_vector=search_query).order_by('-rank')
|
||||
else:
|
||||
# SQLite fallback using LIKE
|
||||
results = Ride.objects.select_related('park', 'manufacturer', 'model').filter(
|
||||
Q(name__icontains=query) |
|
||||
Q(park__name__icontains=query) |
|
||||
Q(manufacturer__name__icontains=query) |
|
||||
Q(description__icontains=query)
|
||||
).order_by('park__name', 'name')
|
||||
|
||||
# Apply additional filters
|
||||
if filters:
|
||||
if filters.get('park_id'):
|
||||
results = results.filter(park_id=filters['park_id'])
|
||||
|
||||
if filters.get('manufacturer_id'):
|
||||
results = results.filter(manufacturer_id=filters['manufacturer_id'])
|
||||
|
||||
if filters.get('model_id'):
|
||||
results = results.filter(model_id=filters['model_id'])
|
||||
|
||||
if filters.get('status'):
|
||||
results = results.filter(status=filters['status'])
|
||||
|
||||
if filters.get('ride_category'):
|
||||
results = results.filter(ride_category=filters['ride_category'])
|
||||
|
||||
if filters.get('is_coaster') is not None:
|
||||
results = results.filter(is_coaster=filters['is_coaster'])
|
||||
|
||||
if filters.get('opening_after'):
|
||||
results = results.filter(opening_date__gte=filters['opening_after'])
|
||||
|
||||
if filters.get('opening_before'):
|
||||
results = results.filter(opening_date__lte=filters['opening_before'])
|
||||
|
||||
# Height/speed filters
|
||||
if filters.get('min_height'):
|
||||
results = results.filter(height__gte=filters['min_height'])
|
||||
|
||||
if filters.get('max_height'):
|
||||
results = results.filter(height__lte=filters['max_height'])
|
||||
|
||||
if filters.get('min_speed'):
|
||||
results = results.filter(speed__gte=filters['min_speed'])
|
||||
|
||||
if filters.get('max_speed'):
|
||||
results = results.filter(speed__lte=filters['max_speed'])
|
||||
|
||||
return results[:limit]
|
||||
|
||||
def autocomplete(
|
||||
self,
|
||||
query: str,
|
||||
entity_type: Optional[str] = None,
|
||||
limit: int = 10
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Get autocomplete suggestions for search.
|
||||
|
||||
Args:
|
||||
query: Partial search query
|
||||
entity_type: Optional specific entity type
|
||||
limit: Maximum number of suggestions
|
||||
|
||||
Returns:
|
||||
List of suggestion dictionaries with name and entity_type
|
||||
"""
|
||||
suggestions = []
|
||||
|
||||
if not query or len(query) < 2:
|
||||
return suggestions
|
||||
|
||||
# Search in names only for autocomplete
|
||||
if entity_type == 'company' or not entity_type:
|
||||
from apps.entities.models import Company
|
||||
companies = Company.objects.filter(
|
||||
name__istartswith=query
|
||||
).values('id', 'name', 'slug')[:limit]
|
||||
|
||||
for company in companies:
|
||||
suggestions.append({
|
||||
'id': company['id'],
|
||||
'name': company['name'],
|
||||
'slug': company['slug'],
|
||||
'entity_type': 'company'
|
||||
})
|
||||
|
||||
if entity_type == 'park' or not entity_type:
|
||||
from apps.entities.models import Park
|
||||
parks = Park.objects.filter(
|
||||
name__istartswith=query
|
||||
).values('id', 'name', 'slug')[:limit]
|
||||
|
||||
for park in parks:
|
||||
suggestions.append({
|
||||
'id': park['id'],
|
||||
'name': park['name'],
|
||||
'slug': park['slug'],
|
||||
'entity_type': 'park'
|
||||
})
|
||||
|
||||
if entity_type == 'ride' or not entity_type:
|
||||
from apps.entities.models import Ride
|
||||
rides = Ride.objects.select_related('park').filter(
|
||||
name__istartswith=query
|
||||
).values('id', 'name', 'slug', 'park__name')[:limit]
|
||||
|
||||
for ride in rides:
|
||||
suggestions.append({
|
||||
'id': ride['id'],
|
||||
'name': ride['name'],
|
||||
'slug': ride['slug'],
|
||||
'park_name': ride['park__name'],
|
||||
'entity_type': 'ride'
|
||||
})
|
||||
|
||||
if entity_type == 'ride_model' or not entity_type:
|
||||
from apps.entities.models import RideModel
|
||||
models = RideModel.objects.select_related('manufacturer').filter(
|
||||
name__istartswith=query
|
||||
).values('id', 'name', 'slug', 'manufacturer__name')[:limit]
|
||||
|
||||
for model in models:
|
||||
suggestions.append({
|
||||
'id': model['id'],
|
||||
'name': model['name'],
|
||||
'slug': model['slug'],
|
||||
'manufacturer_name': model['manufacturer__name'],
|
||||
'entity_type': 'ride_model'
|
||||
})
|
||||
|
||||
# Sort by relevance (exact matches first, then alphabetically)
|
||||
suggestions.sort(key=lambda x: (
|
||||
not x['name'].lower().startswith(query.lower()),
|
||||
x['name'].lower()
|
||||
))
|
||||
|
||||
return suggestions[:limit]
|
||||
562
django-backend/apps/entities/services/__init__.py
Normal file
562
django-backend/apps/entities/services/__init__.py
Normal file
@@ -0,0 +1,562 @@
|
||||
"""
|
||||
Entity submission services for ThrillWiki.
|
||||
|
||||
This module implements entity creation through the Sacred Pipeline.
|
||||
All entities (Parks, Rides, Companies, RideModels) must flow through the
|
||||
ContentSubmission moderation workflow.
|
||||
|
||||
Services:
|
||||
- BaseEntitySubmissionService: Abstract base for all entity submissions
|
||||
- ParkSubmissionService: Park creation through Sacred Pipeline
|
||||
- RideSubmissionService: Ride creation through Sacred Pipeline
|
||||
- CompanySubmissionService: Company creation through Sacred Pipeline
|
||||
- RideModelSubmissionService: RideModel creation through Sacred Pipeline
|
||||
"""
|
||||
|
||||
import logging
|
||||
from django.db import transaction
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from django.core.exceptions import ValidationError
|
||||
|
||||
from apps.moderation.services import ModerationService
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class BaseEntitySubmissionService:
|
||||
"""
|
||||
Base service for entity submissions through the Sacred Pipeline.
|
||||
|
||||
This abstract base class provides common functionality for creating entities
|
||||
via the ContentSubmission moderation workflow. Subclasses must define:
|
||||
- entity_model: The Django model class (e.g., Park, Ride)
|
||||
- entity_type_name: Human-readable name for logging (e.g., 'Park')
|
||||
- required_fields: List of required field names (e.g., ['name', 'park_type'])
|
||||
|
||||
Features:
|
||||
- Moderator bypass: Auto-approves for users with moderator role
|
||||
- Atomic transactions: All-or-nothing database operations
|
||||
- Comprehensive logging: Full audit trail
|
||||
- Submission items: Each field tracked separately for selective approval
|
||||
- Placeholder entities: Created immediately for ContentSubmission reference
|
||||
|
||||
Usage:
|
||||
class ParkSubmissionService(BaseEntitySubmissionService):
|
||||
entity_model = Park
|
||||
entity_type_name = 'Park'
|
||||
required_fields = ['name', 'park_type']
|
||||
|
||||
submission, park = ParkSubmissionService.create_entity_submission(
|
||||
user=request.user,
|
||||
data={'name': 'Cedar Point', 'park_type': 'theme_park'},
|
||||
source='api'
|
||||
)
|
||||
"""
|
||||
|
||||
# Subclasses must override these
|
||||
entity_model = None
|
||||
entity_type_name = None
|
||||
required_fields = []
|
||||
|
||||
@classmethod
|
||||
def _validate_configuration(cls):
|
||||
"""Validate that subclass has configured required attributes."""
|
||||
if cls.entity_model is None:
|
||||
raise NotImplementedError(f"{cls.__name__} must define entity_model")
|
||||
if cls.entity_type_name is None:
|
||||
raise NotImplementedError(f"{cls.__name__} must define entity_type_name")
|
||||
if not cls.required_fields:
|
||||
raise NotImplementedError(f"{cls.__name__} must define required_fields")
|
||||
|
||||
@classmethod
|
||||
@transaction.atomic
|
||||
def create_entity_submission(cls, user, data, **kwargs):
|
||||
"""
|
||||
Create entity submission through Sacred Pipeline.
|
||||
|
||||
This method creates a ContentSubmission with SubmissionItems for each field.
|
||||
A placeholder entity is created immediately to satisfy ContentSubmission's
|
||||
entity reference requirement. The entity is "activated" upon approval.
|
||||
|
||||
For moderators, the submission is auto-approved and the entity is immediately
|
||||
created with all fields populated.
|
||||
|
||||
Args:
|
||||
user: User creating the entity (must be authenticated)
|
||||
data: Dict of entity field data
|
||||
Example: {'name': 'Cedar Point', 'park_type': 'theme_park', ...}
|
||||
**kwargs: Additional metadata
|
||||
- source: Submission source ('api', 'web', etc.) - default: 'api'
|
||||
- ip_address: User's IP address (optional)
|
||||
- user_agent: User's user agent string (optional)
|
||||
|
||||
Returns:
|
||||
tuple: (ContentSubmission, Entity or None)
|
||||
Entity will be None if pending moderation (non-moderators)
|
||||
Entity will be populated if moderator (auto-approved)
|
||||
|
||||
Raises:
|
||||
ValidationError: If required fields are missing or invalid
|
||||
NotImplementedError: If subclass not properly configured
|
||||
|
||||
Example:
|
||||
submission, park = ParkSubmissionService.create_entity_submission(
|
||||
user=request.user,
|
||||
data={
|
||||
'name': 'Cedar Point',
|
||||
'park_type': 'theme_park',
|
||||
'status': 'operating',
|
||||
'latitude': Decimal('41.4792'),
|
||||
'longitude': Decimal('-82.6839')
|
||||
},
|
||||
source='api',
|
||||
ip_address='192.168.1.1'
|
||||
)
|
||||
|
||||
if park:
|
||||
# Moderator - entity created immediately
|
||||
logger.info(f"Park created: {park.id}")
|
||||
else:
|
||||
# Regular user - awaiting moderation
|
||||
logger.info(f"Submission pending: {submission.id}")
|
||||
"""
|
||||
# Validate configuration
|
||||
cls._validate_configuration()
|
||||
|
||||
# Validate required fields
|
||||
for field in cls.required_fields:
|
||||
if field not in data or data[field] is None:
|
||||
raise ValidationError(f"Required field missing: {field}")
|
||||
|
||||
# Check if user is moderator (for bypass)
|
||||
is_moderator = hasattr(user, 'role') and user.role.is_moderator if user else False
|
||||
|
||||
logger.info(
|
||||
f"{cls.entity_type_name} submission starting: "
|
||||
f"user={user.email if user else 'anonymous'}, "
|
||||
f"is_moderator={is_moderator}, "
|
||||
f"fields={list(data.keys())}"
|
||||
)
|
||||
|
||||
# Build submission items for each field
|
||||
items_data = []
|
||||
order = 0
|
||||
|
||||
for field_name, value in data.items():
|
||||
# Skip None values for non-required fields
|
||||
if value is None and field_name not in cls.required_fields:
|
||||
continue
|
||||
|
||||
# Convert value to string for storage
|
||||
# Handle special types
|
||||
if value is None:
|
||||
str_value = None
|
||||
elif hasattr(value, 'id'):
|
||||
# Foreign key - store UUID
|
||||
str_value = str(value.id)
|
||||
else:
|
||||
str_value = str(value)
|
||||
|
||||
items_data.append({
|
||||
'field_name': field_name,
|
||||
'field_label': field_name.replace('_', ' ').title(),
|
||||
'old_value': None,
|
||||
'new_value': str_value,
|
||||
'change_type': 'add',
|
||||
'is_required': field_name in cls.required_fields,
|
||||
'order': order
|
||||
})
|
||||
order += 1
|
||||
|
||||
logger.info(f"Built {len(items_data)} submission items for {cls.entity_type_name}")
|
||||
|
||||
# Create placeholder entity for submission
|
||||
# Only set required fields to avoid validation errors
|
||||
placeholder_data = {}
|
||||
for field in cls.required_fields:
|
||||
if field in data:
|
||||
placeholder_data[field] = data[field]
|
||||
|
||||
try:
|
||||
placeholder_entity = cls.entity_model(**placeholder_data)
|
||||
placeholder_entity.save()
|
||||
|
||||
logger.info(
|
||||
f"Placeholder {cls.entity_type_name} created: {placeholder_entity.id}"
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Failed to create placeholder {cls.entity_type_name}: {str(e)}"
|
||||
)
|
||||
raise ValidationError(f"Entity validation failed: {str(e)}")
|
||||
|
||||
# Create submission through ModerationService
|
||||
try:
|
||||
submission = ModerationService.create_submission(
|
||||
user=user,
|
||||
entity=placeholder_entity,
|
||||
submission_type='create',
|
||||
title=f"Create {cls.entity_type_name}: {data.get('name', 'Unnamed')}",
|
||||
description=f"User creating new {cls.entity_type_name}",
|
||||
items_data=items_data,
|
||||
metadata={
|
||||
'entity_type': cls.entity_type_name,
|
||||
'creation_data': data
|
||||
},
|
||||
auto_submit=True,
|
||||
source=kwargs.get('source', 'api'),
|
||||
ip_address=kwargs.get('ip_address'),
|
||||
user_agent=kwargs.get('user_agent', '')
|
||||
)
|
||||
|
||||
logger.info(
|
||||
f"{cls.entity_type_name} submission created: {submission.id} "
|
||||
f"(status: {submission.status})"
|
||||
)
|
||||
except Exception as e:
|
||||
# Rollback: delete placeholder entity
|
||||
placeholder_entity.delete()
|
||||
logger.error(
|
||||
f"Failed to create submission for {cls.entity_type_name}: {str(e)}"
|
||||
)
|
||||
raise
|
||||
|
||||
# MODERATOR BYPASS: Auto-approve and create entity
|
||||
entity = None
|
||||
if is_moderator:
|
||||
logger.info(
|
||||
f"Moderator bypass activated for submission {submission.id}"
|
||||
)
|
||||
|
||||
try:
|
||||
# Approve submission through ModerationService
|
||||
submission = ModerationService.approve_submission(submission.id, user)
|
||||
|
||||
logger.info(
|
||||
f"Submission {submission.id} auto-approved "
|
||||
f"(new status: {submission.status})"
|
||||
)
|
||||
|
||||
# Update placeholder entity with all approved fields
|
||||
entity = placeholder_entity
|
||||
for item in submission.items.filter(status='approved'):
|
||||
field_name = item.field_name
|
||||
|
||||
# Handle foreign key fields
|
||||
if hasattr(cls.entity_model, field_name):
|
||||
field = cls.entity_model._meta.get_field(field_name)
|
||||
|
||||
if field.is_relation:
|
||||
# Foreign key - convert UUID string back to model instance
|
||||
if item.new_value:
|
||||
try:
|
||||
related_model = field.related_model
|
||||
related_instance = related_model.objects.get(
|
||||
id=item.new_value
|
||||
)
|
||||
setattr(entity, field_name, related_instance)
|
||||
except Exception as e:
|
||||
logger.warning(
|
||||
f"Failed to set FK {field_name}: {str(e)}"
|
||||
)
|
||||
else:
|
||||
# Regular field - set directly
|
||||
setattr(entity, field_name, data.get(field_name))
|
||||
|
||||
entity.save()
|
||||
|
||||
logger.info(
|
||||
f"{cls.entity_type_name} auto-created for moderator: {entity.id} "
|
||||
f"(name: {getattr(entity, 'name', 'N/A')})"
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Failed to auto-approve {cls.entity_type_name} "
|
||||
f"submission {submission.id}: {str(e)}"
|
||||
)
|
||||
# Don't raise - submission still exists in pending state
|
||||
else:
|
||||
logger.info(
|
||||
f"{cls.entity_type_name} submission {submission.id} "
|
||||
f"pending moderation (user: {user.email})"
|
||||
)
|
||||
|
||||
return submission, entity
|
||||
|
||||
@classmethod
|
||||
@transaction.atomic
|
||||
def update_entity_submission(cls, entity, user, update_data, **kwargs):
|
||||
"""
|
||||
Update an existing entity by creating an update submission.
|
||||
|
||||
This follows the Sacred Pipeline by creating a ContentSubmission for the update.
|
||||
Changes must be approved before taking effect (unless user is moderator).
|
||||
|
||||
Args:
|
||||
entity: Existing entity instance to update
|
||||
user: User making the update
|
||||
update_data: Dict of fields to update
|
||||
**kwargs: Additional metadata (source, ip_address, user_agent)
|
||||
|
||||
Returns:
|
||||
ContentSubmission: The update submission
|
||||
|
||||
Raises:
|
||||
ValidationError: If validation fails
|
||||
"""
|
||||
cls._validate_configuration()
|
||||
|
||||
# Check if user is moderator (for bypass)
|
||||
is_moderator = hasattr(user, 'role') and user.role.is_moderator if user else False
|
||||
|
||||
# Build submission items for changed fields
|
||||
items_data = []
|
||||
order = 0
|
||||
|
||||
for field_name, new_value in update_data.items():
|
||||
old_value = getattr(entity, field_name, None)
|
||||
|
||||
# Only include if value actually changed
|
||||
if old_value != new_value:
|
||||
items_data.append({
|
||||
'field_name': field_name,
|
||||
'field_label': field_name.replace('_', ' ').title(),
|
||||
'old_value': str(old_value) if old_value is not None else None,
|
||||
'new_value': str(new_value) if new_value is not None else None,
|
||||
'change_type': 'modify',
|
||||
'is_required': field_name in cls.required_fields,
|
||||
'order': order
|
||||
})
|
||||
order += 1
|
||||
|
||||
if not items_data:
|
||||
raise ValidationError("No changes detected")
|
||||
|
||||
# Create update submission
|
||||
submission = ModerationService.create_submission(
|
||||
user=user,
|
||||
entity=entity,
|
||||
submission_type='update',
|
||||
title=f"Update {cls.entity_type_name}: {getattr(entity, 'name', str(entity.id))}",
|
||||
description=f"User updating {cls.entity_type_name}",
|
||||
items_data=items_data,
|
||||
metadata={
|
||||
'entity_type': cls.entity_type_name,
|
||||
'entity_id': str(entity.id)
|
||||
},
|
||||
auto_submit=True,
|
||||
source=kwargs.get('source', 'api'),
|
||||
ip_address=kwargs.get('ip_address'),
|
||||
user_agent=kwargs.get('user_agent', '')
|
||||
)
|
||||
|
||||
logger.info(f"{cls.entity_type_name} update submission created: {submission.id}")
|
||||
|
||||
# MODERATOR BYPASS: Auto-approve and apply changes
|
||||
if is_moderator:
|
||||
submission = ModerationService.approve_submission(submission.id, user)
|
||||
|
||||
# Apply updates to entity
|
||||
for item in submission.items.filter(status='approved'):
|
||||
setattr(entity, item.field_name, item.new_value)
|
||||
|
||||
entity.save()
|
||||
|
||||
logger.info(f"{cls.entity_type_name} update auto-approved: {entity.id}")
|
||||
|
||||
return submission
|
||||
|
||||
@classmethod
|
||||
@transaction.atomic
|
||||
def delete_entity_submission(cls, entity, user, **kwargs):
|
||||
"""
|
||||
Delete (or soft-delete) an existing entity through Sacred Pipeline.
|
||||
|
||||
This follows the Sacred Pipeline by creating a ContentSubmission for the deletion.
|
||||
Deletion must be approved before taking effect (unless user is moderator).
|
||||
|
||||
**Deletion Strategy:**
|
||||
- Soft Delete (default): Sets entity status to 'closed' - keeps data for audit trail
|
||||
- Hard Delete: Actually removes entity from database (moderators only)
|
||||
|
||||
Args:
|
||||
entity: Existing entity instance to delete
|
||||
user: User requesting the deletion
|
||||
**kwargs: Additional metadata
|
||||
- deletion_type: 'soft' (default) or 'hard'
|
||||
- deletion_reason: User-provided reason for deletion
|
||||
- source: Submission source ('api', 'web', etc.) - default: 'api'
|
||||
- ip_address: User's IP address (optional)
|
||||
- user_agent: User's user agent string (optional)
|
||||
|
||||
Returns:
|
||||
tuple: (ContentSubmission, deletion_applied: bool)
|
||||
deletion_applied is True if moderator (immediate deletion)
|
||||
deletion_applied is False if regular user (pending moderation)
|
||||
|
||||
Raises:
|
||||
ValidationError: If validation fails
|
||||
|
||||
Example:
|
||||
submission, deleted = ParkSubmissionService.delete_entity_submission(
|
||||
entity=park,
|
||||
user=request.user,
|
||||
deletion_type='soft',
|
||||
deletion_reason='Park permanently closed',
|
||||
source='api',
|
||||
ip_address='192.168.1.1'
|
||||
)
|
||||
|
||||
if deleted:
|
||||
# Moderator - deletion applied immediately
|
||||
logger.info(f"Park deleted: {park.id}")
|
||||
else:
|
||||
# Regular user - awaiting moderation
|
||||
logger.info(f"Deletion pending: {submission.id}")
|
||||
"""
|
||||
cls._validate_configuration()
|
||||
|
||||
# Check if user is moderator (for bypass)
|
||||
is_moderator = hasattr(user, 'role') and user.role.is_moderator if user else False
|
||||
|
||||
# Get deletion parameters
|
||||
deletion_type = kwargs.get('deletion_type', 'soft')
|
||||
deletion_reason = kwargs.get('deletion_reason', '')
|
||||
|
||||
# Validate deletion type
|
||||
if deletion_type not in ['soft', 'hard']:
|
||||
raise ValidationError("deletion_type must be 'soft' or 'hard'")
|
||||
|
||||
# Only moderators can hard delete
|
||||
if deletion_type == 'hard' and not is_moderator:
|
||||
deletion_type = 'soft'
|
||||
logger.warning(
|
||||
f"Non-moderator {user.email} attempted hard delete, "
|
||||
f"falling back to soft delete"
|
||||
)
|
||||
|
||||
logger.info(
|
||||
f"{cls.entity_type_name} deletion request: "
|
||||
f"entity={entity.id}, user={user.email if user else 'anonymous'}, "
|
||||
f"type={deletion_type}, is_moderator={is_moderator}"
|
||||
)
|
||||
|
||||
# Build submission items for deletion
|
||||
items_data = []
|
||||
|
||||
# For soft delete, track status change
|
||||
if deletion_type == 'soft':
|
||||
if hasattr(entity, 'status'):
|
||||
old_status = getattr(entity, 'status', 'operating')
|
||||
items_data.append({
|
||||
'field_name': 'status',
|
||||
'field_label': 'Status',
|
||||
'old_value': old_status,
|
||||
'new_value': 'closed',
|
||||
'change_type': 'modify',
|
||||
'is_required': True,
|
||||
'order': 0
|
||||
})
|
||||
|
||||
# Add deletion metadata item
|
||||
items_data.append({
|
||||
'field_name': '_deletion_marker',
|
||||
'field_label': 'Deletion Request',
|
||||
'old_value': 'active',
|
||||
'new_value': 'deleted' if deletion_type == 'hard' else 'closed',
|
||||
'change_type': 'remove' if deletion_type == 'hard' else 'modify',
|
||||
'is_required': True,
|
||||
'order': 1
|
||||
})
|
||||
|
||||
# Create entity snapshot for potential restoration
|
||||
entity_snapshot = {}
|
||||
for field in entity._meta.fields:
|
||||
if not field.primary_key:
|
||||
try:
|
||||
value = getattr(entity, field.name)
|
||||
if value is not None:
|
||||
if hasattr(value, 'id'):
|
||||
entity_snapshot[field.name] = str(value.id)
|
||||
else:
|
||||
entity_snapshot[field.name] = str(value)
|
||||
except:
|
||||
pass
|
||||
|
||||
# Create deletion submission through ModerationService
|
||||
try:
|
||||
submission = ModerationService.create_submission(
|
||||
user=user,
|
||||
entity=entity,
|
||||
submission_type='delete',
|
||||
title=f"Delete {cls.entity_type_name}: {getattr(entity, 'name', str(entity.id))}",
|
||||
description=deletion_reason or f"User requesting {deletion_type} deletion of {cls.entity_type_name}",
|
||||
items_data=items_data,
|
||||
metadata={
|
||||
'entity_type': cls.entity_type_name,
|
||||
'entity_id': str(entity.id),
|
||||
'entity_name': getattr(entity, 'name', str(entity.id)),
|
||||
'deletion_type': deletion_type,
|
||||
'deletion_reason': deletion_reason,
|
||||
'entity_snapshot': entity_snapshot
|
||||
},
|
||||
auto_submit=True,
|
||||
source=kwargs.get('source', 'api'),
|
||||
ip_address=kwargs.get('ip_address'),
|
||||
user_agent=kwargs.get('user_agent', '')
|
||||
)
|
||||
|
||||
logger.info(
|
||||
f"{cls.entity_type_name} deletion submission created: {submission.id} "
|
||||
f"(status: {submission.status})"
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Failed to create deletion submission for {cls.entity_type_name}: {str(e)}"
|
||||
)
|
||||
raise
|
||||
|
||||
# MODERATOR BYPASS: Auto-approve and apply deletion
|
||||
deletion_applied = False
|
||||
if is_moderator:
|
||||
logger.info(
|
||||
f"Moderator bypass activated for deletion submission {submission.id}"
|
||||
)
|
||||
|
||||
try:
|
||||
# Approve submission through ModerationService
|
||||
submission = ModerationService.approve_submission(submission.id, user)
|
||||
deletion_applied = True
|
||||
|
||||
logger.info(
|
||||
f"Deletion submission {submission.id} auto-approved "
|
||||
f"(new status: {submission.status})"
|
||||
)
|
||||
|
||||
if deletion_type == 'soft':
|
||||
# Entity status was set to 'closed' by approval logic
|
||||
logger.info(
|
||||
f"{cls.entity_type_name} soft-deleted (marked as closed): {entity.id} "
|
||||
f"(name: {getattr(entity, 'name', 'N/A')})"
|
||||
)
|
||||
else:
|
||||
# Entity was hard-deleted by approval logic
|
||||
logger.info(
|
||||
f"{cls.entity_type_name} hard-deleted from database: {entity.id} "
|
||||
f"(name: {getattr(entity, 'name', 'N/A')})"
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Failed to auto-approve {cls.entity_type_name} "
|
||||
f"deletion submission {submission.id}: {str(e)}"
|
||||
)
|
||||
# Don't raise - submission still exists in pending state
|
||||
else:
|
||||
logger.info(
|
||||
f"{cls.entity_type_name} deletion submission {submission.id} "
|
||||
f"pending moderation (user: {user.email})"
|
||||
)
|
||||
|
||||
return submission, deletion_applied
|
||||
86
django-backend/apps/entities/services/company_submission.py
Normal file
86
django-backend/apps/entities/services/company_submission.py
Normal file
@@ -0,0 +1,86 @@
|
||||
"""
|
||||
Company submission service for ThrillWiki.
|
||||
|
||||
Handles Company entity creation and updates through the Sacred Pipeline.
|
||||
"""
|
||||
|
||||
import logging
|
||||
from django.core.exceptions import ValidationError
|
||||
|
||||
from apps.entities.models import Company
|
||||
from apps.entities.services import BaseEntitySubmissionService
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class CompanySubmissionService(BaseEntitySubmissionService):
|
||||
"""
|
||||
Service for creating Company submissions through the Sacred Pipeline.
|
||||
|
||||
Companies represent manufacturers, operators, designers, and other entities
|
||||
in the amusement industry.
|
||||
|
||||
Required fields:
|
||||
- name: Company name
|
||||
|
||||
Known Issue:
|
||||
- company_types is currently a JSONField but should be M2M relationship
|
||||
TODO: Convert company_types from JSONField to Many-to-Many relationship
|
||||
This violates the project rule: "NEVER use JSON/JSONB in SQL"
|
||||
|
||||
Example:
|
||||
from apps.entities.services.company_submission import CompanySubmissionService
|
||||
|
||||
submission, company = CompanySubmissionService.create_entity_submission(
|
||||
user=request.user,
|
||||
data={
|
||||
'name': 'Bolliger & Mabillard',
|
||||
'company_types': ['manufacturer', 'designer'],
|
||||
'description': 'Swiss roller coaster manufacturer...',
|
||||
'website': 'https://www.bolliger-mabillard.com',
|
||||
},
|
||||
source='api'
|
||||
)
|
||||
"""
|
||||
|
||||
entity_model = Company
|
||||
entity_type_name = 'Company'
|
||||
required_fields = ['name']
|
||||
|
||||
@classmethod
|
||||
def create_entity_submission(cls, user, data, **kwargs):
|
||||
"""
|
||||
Create a Company submission.
|
||||
|
||||
Note: The company_types field currently uses JSONField which violates
|
||||
project standards. This should be converted to a proper M2M relationship.
|
||||
|
||||
Args:
|
||||
user: User creating the company
|
||||
data: Company field data (must include name)
|
||||
**kwargs: Additional metadata (source, ip_address, user_agent)
|
||||
|
||||
Returns:
|
||||
tuple: (ContentSubmission, Company or None)
|
||||
"""
|
||||
# TODO: Remove this warning once company_types is converted to M2M
|
||||
if 'company_types' in data:
|
||||
logger.warning(
|
||||
"Company.company_types uses JSONField which violates project rules. "
|
||||
"This should be converted to Many-to-Many relationship."
|
||||
)
|
||||
|
||||
# Validate and normalize location FK if provided
|
||||
location = data.get('location')
|
||||
if location and isinstance(location, str):
|
||||
try:
|
||||
from apps.core.models import Locality
|
||||
location = Locality.objects.get(id=location)
|
||||
data['location'] = location
|
||||
except:
|
||||
raise ValidationError(f"Location not found: {location}")
|
||||
|
||||
# Create submission through base class
|
||||
submission, company = super().create_entity_submission(user, data, **kwargs)
|
||||
|
||||
return submission, company
|
||||
142
django-backend/apps/entities/services/park_submission.py
Normal file
142
django-backend/apps/entities/services/park_submission.py
Normal file
@@ -0,0 +1,142 @@
|
||||
"""
|
||||
Park submission service for ThrillWiki.
|
||||
|
||||
Handles Park entity creation and updates through the Sacred Pipeline.
|
||||
"""
|
||||
|
||||
import logging
|
||||
from decimal import Decimal
|
||||
from django.core.exceptions import ValidationError
|
||||
|
||||
from apps.entities.models import Park
|
||||
from apps.entities.services import BaseEntitySubmissionService
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ParkSubmissionService(BaseEntitySubmissionService):
|
||||
"""
|
||||
Service for creating Park submissions through the Sacred Pipeline.
|
||||
|
||||
Parks require special handling for:
|
||||
- Geographic coordinates (latitude/longitude)
|
||||
- Location point (PostGIS in production)
|
||||
- Park type and status fields
|
||||
|
||||
Required fields:
|
||||
- name: Park name
|
||||
- park_type: Type of park (theme_park, amusement_park, etc.)
|
||||
|
||||
Example:
|
||||
from apps.entities.services.park_submission import ParkSubmissionService
|
||||
|
||||
submission, park = ParkSubmissionService.create_entity_submission(
|
||||
user=request.user,
|
||||
data={
|
||||
'name': 'Cedar Point',
|
||||
'park_type': 'theme_park',
|
||||
'status': 'operating',
|
||||
'latitude': Decimal('41.4792'),
|
||||
'longitude': Decimal('-82.6839'),
|
||||
'description': 'Legendary amusement park...',
|
||||
},
|
||||
source='api',
|
||||
ip_address=request.META.get('REMOTE_ADDR')
|
||||
)
|
||||
"""
|
||||
|
||||
entity_model = Park
|
||||
entity_type_name = 'Park'
|
||||
required_fields = ['name', 'park_type']
|
||||
|
||||
@classmethod
|
||||
def create_entity_submission(cls, user, data, **kwargs):
|
||||
"""
|
||||
Create a Park submission with special coordinate handling.
|
||||
|
||||
Coordinates (latitude/longitude) are processed using the Park model's
|
||||
set_location() method which handles both SQLite and PostGIS modes.
|
||||
|
||||
Args:
|
||||
user: User creating the park
|
||||
data: Park field data (must include name and park_type)
|
||||
**kwargs: Additional metadata (source, ip_address, user_agent)
|
||||
|
||||
Returns:
|
||||
tuple: (ContentSubmission, Park or None)
|
||||
"""
|
||||
# Extract coordinates for special handling
|
||||
latitude = data.get('latitude')
|
||||
longitude = data.get('longitude')
|
||||
|
||||
# Create submission through base class
|
||||
submission, park = super().create_entity_submission(user, data, **kwargs)
|
||||
|
||||
# If park was created (moderator bypass), set location using helper method
|
||||
if park and latitude is not None and longitude is not None:
|
||||
try:
|
||||
park.set_location(float(longitude), float(latitude))
|
||||
park.save()
|
||||
logger.info(
|
||||
f"Park {park.id} location set: "
|
||||
f"({latitude}, {longitude})"
|
||||
)
|
||||
except Exception as e:
|
||||
logger.warning(
|
||||
f"Failed to set location for Park {park.id}: {str(e)}"
|
||||
)
|
||||
|
||||
return submission, park
|
||||
|
||||
@classmethod
|
||||
def update_entity_submission(cls, entity, user, update_data, **kwargs):
|
||||
"""
|
||||
Update a Park with special coordinate handling.
|
||||
|
||||
Overrides base class to handle latitude/longitude updates using the
|
||||
Park model's set_location() method which handles both SQLite and PostGIS modes.
|
||||
|
||||
Args:
|
||||
entity: Existing Park instance to update
|
||||
user: User making the update
|
||||
update_data: Park field data to update
|
||||
**kwargs: Additional parameters
|
||||
- latitude: New latitude coordinate (optional)
|
||||
- longitude: New longitude coordinate (optional)
|
||||
- source: Submission source ('api', 'web', etc.)
|
||||
- ip_address: User's IP address (optional)
|
||||
- user_agent: User's user agent string (optional)
|
||||
|
||||
Returns:
|
||||
tuple: (ContentSubmission, Park or None)
|
||||
"""
|
||||
# Extract coordinates for special handling
|
||||
latitude = kwargs.pop('latitude', None)
|
||||
longitude = kwargs.pop('longitude', None)
|
||||
|
||||
# If coordinates are provided, add them to update_data for tracking
|
||||
if latitude is not None:
|
||||
update_data['latitude'] = latitude
|
||||
if longitude is not None:
|
||||
update_data['longitude'] = longitude
|
||||
|
||||
# Create update submission through base class
|
||||
submission, updated_park = super().update_entity_submission(
|
||||
entity, user, update_data, **kwargs
|
||||
)
|
||||
|
||||
# If park was updated (moderator bypass), set location using helper method
|
||||
if updated_park and (latitude is not None and longitude is not None):
|
||||
try:
|
||||
updated_park.set_location(float(longitude), float(latitude))
|
||||
updated_park.save()
|
||||
logger.info(
|
||||
f"Park {updated_park.id} location updated: "
|
||||
f"({latitude}, {longitude})"
|
||||
)
|
||||
except Exception as e:
|
||||
logger.warning(
|
||||
f"Failed to update location for Park {updated_park.id}: {str(e)}"
|
||||
)
|
||||
|
||||
return submission, updated_park
|
||||
@@ -0,0 +1,87 @@
|
||||
"""
|
||||
RideModel submission service for ThrillWiki.
|
||||
|
||||
Handles RideModel entity creation and updates through the Sacred Pipeline.
|
||||
"""
|
||||
|
||||
import logging
|
||||
from django.core.exceptions import ValidationError
|
||||
|
||||
from apps.entities.models import RideModel, Company
|
||||
from apps.entities.services import BaseEntitySubmissionService
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class RideModelSubmissionService(BaseEntitySubmissionService):
|
||||
"""
|
||||
Service for creating RideModel submissions through the Sacred Pipeline.
|
||||
|
||||
RideModels represent specific ride models from manufacturers.
|
||||
For example: "B&M Inverted Coaster", "Vekoma Boomerang"
|
||||
|
||||
Required fields:
|
||||
- name: Model name (e.g., "Inverted Coaster")
|
||||
- manufacturer: Company instance or company ID (UUID)
|
||||
- model_type: Type of model (coaster_model, flat_ride_model, etc.)
|
||||
|
||||
Example:
|
||||
from apps.entities.services.ride_model_submission import RideModelSubmissionService
|
||||
|
||||
manufacturer = Company.objects.get(name='Bolliger & Mabillard')
|
||||
|
||||
submission, model = RideModelSubmissionService.create_entity_submission(
|
||||
user=request.user,
|
||||
data={
|
||||
'name': 'Inverted Coaster',
|
||||
'manufacturer': manufacturer,
|
||||
'model_type': 'coaster_model',
|
||||
'description': 'Suspended coaster with inversions...',
|
||||
'typical_height': Decimal('120'),
|
||||
'typical_speed': Decimal('55'),
|
||||
},
|
||||
source='api'
|
||||
)
|
||||
"""
|
||||
|
||||
entity_model = RideModel
|
||||
entity_type_name = 'RideModel'
|
||||
required_fields = ['name', 'manufacturer', 'model_type']
|
||||
|
||||
@classmethod
|
||||
def create_entity_submission(cls, user, data, **kwargs):
|
||||
"""
|
||||
Create a RideModel submission with foreign key handling.
|
||||
|
||||
The 'manufacturer' field can be provided as either:
|
||||
- A Company instance
|
||||
- A UUID string (will be converted to Company instance)
|
||||
|
||||
Args:
|
||||
user: User creating the ride model
|
||||
data: RideModel field data (must include name, manufacturer, and model_type)
|
||||
**kwargs: Additional metadata (source, ip_address, user_agent)
|
||||
|
||||
Returns:
|
||||
tuple: (ContentSubmission, RideModel or None)
|
||||
|
||||
Raises:
|
||||
ValidationError: If manufacturer not found or invalid
|
||||
"""
|
||||
# Validate and normalize manufacturer FK
|
||||
manufacturer = data.get('manufacturer')
|
||||
if manufacturer:
|
||||
if isinstance(manufacturer, str):
|
||||
# UUID string - convert to Company instance
|
||||
try:
|
||||
manufacturer = Company.objects.get(id=manufacturer)
|
||||
data['manufacturer'] = manufacturer
|
||||
except Company.DoesNotExist:
|
||||
raise ValidationError(f"Manufacturer not found: {manufacturer}")
|
||||
elif not isinstance(manufacturer, Company):
|
||||
raise ValidationError(f"Invalid manufacturer type: {type(manufacturer)}")
|
||||
|
||||
# Create submission through base class
|
||||
submission, ride_model = super().create_entity_submission(user, data, **kwargs)
|
||||
|
||||
return submission, ride_model
|
||||
113
django-backend/apps/entities/services/ride_submission.py
Normal file
113
django-backend/apps/entities/services/ride_submission.py
Normal file
@@ -0,0 +1,113 @@
|
||||
"""
|
||||
Ride submission service for ThrillWiki.
|
||||
|
||||
Handles Ride entity creation and updates through the Sacred Pipeline.
|
||||
"""
|
||||
|
||||
import logging
|
||||
from django.core.exceptions import ValidationError
|
||||
|
||||
from apps.entities.models import Ride, Park
|
||||
from apps.entities.services import BaseEntitySubmissionService
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class RideSubmissionService(BaseEntitySubmissionService):
|
||||
"""
|
||||
Service for creating Ride submissions through the Sacred Pipeline.
|
||||
|
||||
Rides require special handling for:
|
||||
- Park foreign key relationship
|
||||
- Manufacturer foreign key relationship (optional)
|
||||
- Ride model foreign key relationship (optional)
|
||||
- is_coaster flag (auto-set based on ride_category)
|
||||
|
||||
Required fields:
|
||||
- name: Ride name
|
||||
- park: Park instance or park ID (UUID)
|
||||
- ride_category: Category of ride (roller_coaster, flat_ride, etc.)
|
||||
|
||||
Example:
|
||||
from apps.entities.services.ride_submission import RideSubmissionService
|
||||
|
||||
park = Park.objects.get(slug='cedar-point')
|
||||
|
||||
submission, ride = RideSubmissionService.create_entity_submission(
|
||||
user=request.user,
|
||||
data={
|
||||
'name': 'Steel Vengeance',
|
||||
'park': park,
|
||||
'ride_category': 'roller_coaster',
|
||||
'status': 'operating',
|
||||
'height': Decimal('205'),
|
||||
'speed': Decimal('74'),
|
||||
'description': 'Hybrid steel-wooden coaster...',
|
||||
},
|
||||
source='api'
|
||||
)
|
||||
"""
|
||||
|
||||
entity_model = Ride
|
||||
entity_type_name = 'Ride'
|
||||
required_fields = ['name', 'park', 'ride_category']
|
||||
|
||||
@classmethod
|
||||
def create_entity_submission(cls, user, data, **kwargs):
|
||||
"""
|
||||
Create a Ride submission with foreign key handling.
|
||||
|
||||
The 'park' field can be provided as either:
|
||||
- A Park instance
|
||||
- A UUID string (will be converted to Park instance)
|
||||
|
||||
The 'is_coaster' flag is automatically set based on ride_category.
|
||||
|
||||
Args:
|
||||
user: User creating the ride
|
||||
data: Ride field data (must include name, park, and ride_category)
|
||||
**kwargs: Additional metadata (source, ip_address, user_agent)
|
||||
|
||||
Returns:
|
||||
tuple: (ContentSubmission, Ride or None)
|
||||
|
||||
Raises:
|
||||
ValidationError: If park not found or invalid
|
||||
"""
|
||||
# Validate and normalize park FK
|
||||
park = data.get('park')
|
||||
if park:
|
||||
if isinstance(park, str):
|
||||
# UUID string - convert to Park instance
|
||||
try:
|
||||
park = Park.objects.get(id=park)
|
||||
data['park'] = park
|
||||
except Park.DoesNotExist:
|
||||
raise ValidationError(f"Park not found: {park}")
|
||||
elif not isinstance(park, Park):
|
||||
raise ValidationError(f"Invalid park type: {type(park)}")
|
||||
|
||||
# Validate and normalize manufacturer FK if provided
|
||||
manufacturer = data.get('manufacturer')
|
||||
if manufacturer and isinstance(manufacturer, str):
|
||||
try:
|
||||
from apps.entities.models import Company
|
||||
manufacturer = Company.objects.get(id=manufacturer)
|
||||
data['manufacturer'] = manufacturer
|
||||
except Company.DoesNotExist:
|
||||
raise ValidationError(f"Manufacturer not found: {manufacturer}")
|
||||
|
||||
# Validate and normalize model FK if provided
|
||||
model = data.get('model')
|
||||
if model and isinstance(model, str):
|
||||
try:
|
||||
from apps.entities.models import RideModel
|
||||
model = RideModel.objects.get(id=model)
|
||||
data['model'] = model
|
||||
except RideModel.DoesNotExist:
|
||||
raise ValidationError(f"Ride model not found: {model}")
|
||||
|
||||
# Create submission through base class
|
||||
submission, ride = super().create_entity_submission(user, data, **kwargs)
|
||||
|
||||
return submission, ride
|
||||
252
django-backend/apps/entities/signals.py
Normal file
252
django-backend/apps/entities/signals.py
Normal file
@@ -0,0 +1,252 @@
|
||||
"""
|
||||
Signal handlers for automatic search vector updates.
|
||||
|
||||
These signals ensure search vectors stay synchronized with model changes,
|
||||
eliminating the need for manual re-indexing.
|
||||
|
||||
Signal handlers are only active when using PostgreSQL with PostGIS backend.
|
||||
"""
|
||||
from django.db.models.signals import post_save, pre_save
|
||||
from django.dispatch import receiver
|
||||
from django.conf import settings
|
||||
from django.contrib.postgres.search import SearchVector
|
||||
|
||||
from apps.entities.models import Company, RideModel, Park, Ride
|
||||
|
||||
# Only register signals if using PostgreSQL with PostGIS
|
||||
_using_postgis = 'postgis' in settings.DATABASES['default']['ENGINE']
|
||||
|
||||
|
||||
if _using_postgis:
|
||||
|
||||
# ==========================================
|
||||
# Company Signals
|
||||
# ==========================================
|
||||
|
||||
@receiver(post_save, sender=Company)
|
||||
def update_company_search_vector(sender, instance, created, **kwargs):
|
||||
"""
|
||||
Update search vector when company is created or updated.
|
||||
|
||||
Search vector includes:
|
||||
- name (weight A)
|
||||
- description (weight B)
|
||||
"""
|
||||
# Update the company's own search vector
|
||||
Company.objects.filter(pk=instance.pk).update(
|
||||
search_vector=(
|
||||
SearchVector('name', weight='A', config='english') +
|
||||
SearchVector('description', weight='B', config='english')
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
@receiver(pre_save, sender=Company)
|
||||
def check_company_name_change(sender, instance, **kwargs):
|
||||
"""
|
||||
Track if company name is changing to trigger cascading updates.
|
||||
|
||||
Stores the old name on the instance for use in post_save signal.
|
||||
"""
|
||||
if instance.pk:
|
||||
try:
|
||||
old_instance = Company.objects.get(pk=instance.pk)
|
||||
instance._old_name = old_instance.name
|
||||
except Company.DoesNotExist:
|
||||
instance._old_name = None
|
||||
else:
|
||||
instance._old_name = None
|
||||
|
||||
|
||||
@receiver(post_save, sender=Company)
|
||||
def cascade_company_name_updates(sender, instance, created, **kwargs):
|
||||
"""
|
||||
When company name changes, update search vectors for related objects.
|
||||
|
||||
Updates:
|
||||
- All RideModels from this manufacturer
|
||||
- All Rides from this manufacturer
|
||||
"""
|
||||
# Skip if this is a new company or name hasn't changed
|
||||
if created or not hasattr(instance, '_old_name'):
|
||||
return
|
||||
|
||||
old_name = getattr(instance, '_old_name', None)
|
||||
if old_name == instance.name:
|
||||
return
|
||||
|
||||
# Update all RideModels from this manufacturer
|
||||
ride_models = RideModel.objects.filter(manufacturer=instance)
|
||||
for ride_model in ride_models:
|
||||
RideModel.objects.filter(pk=ride_model.pk).update(
|
||||
search_vector=(
|
||||
SearchVector('name', weight='A', config='english') +
|
||||
SearchVector('manufacturer__name', weight='A', config='english') +
|
||||
SearchVector('description', weight='B', config='english')
|
||||
)
|
||||
)
|
||||
|
||||
# Update all Rides from this manufacturer
|
||||
rides = Ride.objects.filter(manufacturer=instance)
|
||||
for ride in rides:
|
||||
Ride.objects.filter(pk=ride.pk).update(
|
||||
search_vector=(
|
||||
SearchVector('name', weight='A', config='english') +
|
||||
SearchVector('park__name', weight='A', config='english') +
|
||||
SearchVector('manufacturer__name', weight='B', config='english') +
|
||||
SearchVector('description', weight='B', config='english')
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
# ==========================================
|
||||
# Park Signals
|
||||
# ==========================================
|
||||
|
||||
@receiver(post_save, sender=Park)
|
||||
def update_park_search_vector(sender, instance, created, **kwargs):
|
||||
"""
|
||||
Update search vector when park is created or updated.
|
||||
|
||||
Search vector includes:
|
||||
- name (weight A)
|
||||
- description (weight B)
|
||||
"""
|
||||
# Update the park's own search vector
|
||||
Park.objects.filter(pk=instance.pk).update(
|
||||
search_vector=(
|
||||
SearchVector('name', weight='A', config='english') +
|
||||
SearchVector('description', weight='B', config='english')
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
@receiver(pre_save, sender=Park)
|
||||
def check_park_name_change(sender, instance, **kwargs):
|
||||
"""
|
||||
Track if park name is changing to trigger cascading updates.
|
||||
|
||||
Stores the old name on the instance for use in post_save signal.
|
||||
"""
|
||||
if instance.pk:
|
||||
try:
|
||||
old_instance = Park.objects.get(pk=instance.pk)
|
||||
instance._old_name = old_instance.name
|
||||
except Park.DoesNotExist:
|
||||
instance._old_name = None
|
||||
else:
|
||||
instance._old_name = None
|
||||
|
||||
|
||||
@receiver(post_save, sender=Park)
|
||||
def cascade_park_name_updates(sender, instance, created, **kwargs):
|
||||
"""
|
||||
When park name changes, update search vectors for related rides.
|
||||
|
||||
Updates:
|
||||
- All Rides in this park
|
||||
"""
|
||||
# Skip if this is a new park or name hasn't changed
|
||||
if created or not hasattr(instance, '_old_name'):
|
||||
return
|
||||
|
||||
old_name = getattr(instance, '_old_name', None)
|
||||
if old_name == instance.name:
|
||||
return
|
||||
|
||||
# Update all Rides in this park
|
||||
rides = Ride.objects.filter(park=instance)
|
||||
for ride in rides:
|
||||
Ride.objects.filter(pk=ride.pk).update(
|
||||
search_vector=(
|
||||
SearchVector('name', weight='A', config='english') +
|
||||
SearchVector('park__name', weight='A', config='english') +
|
||||
SearchVector('manufacturer__name', weight='B', config='english') +
|
||||
SearchVector('description', weight='B', config='english')
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
# ==========================================
|
||||
# RideModel Signals
|
||||
# ==========================================
|
||||
|
||||
@receiver(post_save, sender=RideModel)
|
||||
def update_ride_model_search_vector(sender, instance, created, **kwargs):
|
||||
"""
|
||||
Update search vector when ride model is created or updated.
|
||||
|
||||
Search vector includes:
|
||||
- name (weight A)
|
||||
- manufacturer__name (weight A)
|
||||
- description (weight B)
|
||||
"""
|
||||
RideModel.objects.filter(pk=instance.pk).update(
|
||||
search_vector=(
|
||||
SearchVector('name', weight='A', config='english') +
|
||||
SearchVector('manufacturer__name', weight='A', config='english') +
|
||||
SearchVector('description', weight='B', config='english')
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
@receiver(pre_save, sender=RideModel)
|
||||
def check_ride_model_manufacturer_change(sender, instance, **kwargs):
|
||||
"""
|
||||
Track if ride model manufacturer is changing.
|
||||
|
||||
Stores the old manufacturer on the instance for use in post_save signal.
|
||||
"""
|
||||
if instance.pk:
|
||||
try:
|
||||
old_instance = RideModel.objects.get(pk=instance.pk)
|
||||
instance._old_manufacturer = old_instance.manufacturer
|
||||
except RideModel.DoesNotExist:
|
||||
instance._old_manufacturer = None
|
||||
else:
|
||||
instance._old_manufacturer = None
|
||||
|
||||
|
||||
# ==========================================
|
||||
# Ride Signals
|
||||
# ==========================================
|
||||
|
||||
@receiver(post_save, sender=Ride)
|
||||
def update_ride_search_vector(sender, instance, created, **kwargs):
|
||||
"""
|
||||
Update search vector when ride is created or updated.
|
||||
|
||||
Search vector includes:
|
||||
- name (weight A)
|
||||
- park__name (weight A)
|
||||
- manufacturer__name (weight B)
|
||||
- description (weight B)
|
||||
"""
|
||||
Ride.objects.filter(pk=instance.pk).update(
|
||||
search_vector=(
|
||||
SearchVector('name', weight='A', config='english') +
|
||||
SearchVector('park__name', weight='A', config='english') +
|
||||
SearchVector('manufacturer__name', weight='B', config='english') +
|
||||
SearchVector('description', weight='B', config='english')
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
@receiver(pre_save, sender=Ride)
|
||||
def check_ride_relationships_change(sender, instance, **kwargs):
|
||||
"""
|
||||
Track if ride park or manufacturer are changing.
|
||||
|
||||
Stores old values on the instance for use in post_save signal.
|
||||
"""
|
||||
if instance.pk:
|
||||
try:
|
||||
old_instance = Ride.objects.get(pk=instance.pk)
|
||||
instance._old_park = old_instance.park
|
||||
instance._old_manufacturer = old_instance.manufacturer
|
||||
except Ride.DoesNotExist:
|
||||
instance._old_park = None
|
||||
instance._old_manufacturer = None
|
||||
else:
|
||||
instance._old_park = None
|
||||
instance._old_manufacturer = None
|
||||
351
django-backend/apps/entities/tasks.py
Normal file
351
django-backend/apps/entities/tasks.py
Normal file
@@ -0,0 +1,351 @@
|
||||
"""
|
||||
Background tasks for entity statistics and maintenance.
|
||||
"""
|
||||
|
||||
import logging
|
||||
from celery import shared_task
|
||||
from django.db.models import Count, Q
|
||||
from django.utils import timezone
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@shared_task(bind=True, max_retries=2)
|
||||
def update_entity_statistics(self, entity_type, entity_id):
|
||||
"""
|
||||
Update cached statistics for a specific entity.
|
||||
|
||||
Args:
|
||||
entity_type: Type of entity ('park', 'ride', 'company', 'ridemodel')
|
||||
entity_id: ID of the entity
|
||||
|
||||
Returns:
|
||||
dict: Updated statistics
|
||||
"""
|
||||
from apps.entities.models import Park, Ride, Company, RideModel
|
||||
from apps.media.models import Photo
|
||||
from apps.moderation.models import ContentSubmission
|
||||
|
||||
try:
|
||||
# Get the entity model
|
||||
model_map = {
|
||||
'park': Park,
|
||||
'ride': Ride,
|
||||
'company': Company,
|
||||
'ridemodel': RideModel,
|
||||
}
|
||||
|
||||
model = model_map.get(entity_type.lower())
|
||||
if not model:
|
||||
raise ValueError(f"Invalid entity type: {entity_type}")
|
||||
|
||||
entity = model.objects.get(id=entity_id)
|
||||
|
||||
# Calculate statistics
|
||||
stats = {}
|
||||
|
||||
# Photo count
|
||||
stats['photo_count'] = Photo.objects.filter(
|
||||
content_type__model=entity_type.lower(),
|
||||
object_id=entity_id,
|
||||
moderation_status='approved'
|
||||
).count()
|
||||
|
||||
# Submission count
|
||||
stats['submission_count'] = ContentSubmission.objects.filter(
|
||||
entity_type__model=entity_type.lower(),
|
||||
entity_id=entity_id
|
||||
).count()
|
||||
|
||||
# Entity-specific stats
|
||||
if entity_type.lower() == 'park':
|
||||
stats['ride_count'] = entity.rides.count()
|
||||
elif entity_type.lower() == 'company':
|
||||
stats['park_count'] = entity.parks.count()
|
||||
stats['ride_model_count'] = entity.ride_models.count()
|
||||
elif entity_type.lower() == 'ridemodel':
|
||||
stats['installation_count'] = entity.rides.count()
|
||||
|
||||
logger.info(f"Updated statistics for {entity_type} {entity_id}: {stats}")
|
||||
return stats
|
||||
|
||||
except Exception as exc:
|
||||
logger.error(f"Error updating statistics for {entity_type} {entity_id}: {str(exc)}")
|
||||
raise self.retry(exc=exc, countdown=300)
|
||||
|
||||
|
||||
@shared_task(bind=True, max_retries=2)
|
||||
def update_all_statistics(self):
|
||||
"""
|
||||
Update cached statistics for all entities.
|
||||
|
||||
This task runs periodically (e.g., every 6 hours) to ensure
|
||||
all entity statistics are up to date.
|
||||
|
||||
Returns:
|
||||
dict: Update summary
|
||||
"""
|
||||
from apps.entities.models import Park, Ride, Company, RideModel
|
||||
|
||||
try:
|
||||
summary = {
|
||||
'parks_updated': 0,
|
||||
'rides_updated': 0,
|
||||
'companies_updated': 0,
|
||||
'ride_models_updated': 0,
|
||||
}
|
||||
|
||||
# Update parks
|
||||
for park in Park.objects.all():
|
||||
try:
|
||||
update_entity_statistics.delay('park', park.id)
|
||||
summary['parks_updated'] += 1
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to queue update for park {park.id}: {str(e)}")
|
||||
|
||||
# Update rides
|
||||
for ride in Ride.objects.all():
|
||||
try:
|
||||
update_entity_statistics.delay('ride', ride.id)
|
||||
summary['rides_updated'] += 1
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to queue update for ride {ride.id}: {str(e)}")
|
||||
|
||||
# Update companies
|
||||
for company in Company.objects.all():
|
||||
try:
|
||||
update_entity_statistics.delay('company', company.id)
|
||||
summary['companies_updated'] += 1
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to queue update for company {company.id}: {str(e)}")
|
||||
|
||||
# Update ride models
|
||||
for ride_model in RideModel.objects.all():
|
||||
try:
|
||||
update_entity_statistics.delay('ridemodel', ride_model.id)
|
||||
summary['ride_models_updated'] += 1
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to queue update for ride model {ride_model.id}: {str(e)}")
|
||||
|
||||
logger.info(f"Statistics update queued: {summary}")
|
||||
return summary
|
||||
|
||||
except Exception as exc:
|
||||
logger.error(f"Error updating all statistics: {str(exc)}")
|
||||
raise self.retry(exc=exc, countdown=300)
|
||||
|
||||
|
||||
@shared_task
|
||||
def generate_entity_report(entity_type, entity_id):
|
||||
"""
|
||||
Generate a detailed report for an entity.
|
||||
|
||||
This can be used for admin dashboards, analytics, etc.
|
||||
|
||||
Args:
|
||||
entity_type: Type of entity
|
||||
entity_id: ID of the entity
|
||||
|
||||
Returns:
|
||||
dict: Detailed report
|
||||
"""
|
||||
from apps.entities.models import Park, Ride, Company, RideModel
|
||||
from apps.media.models import Photo
|
||||
from apps.moderation.models import ContentSubmission
|
||||
|
||||
try:
|
||||
model_map = {
|
||||
'park': Park,
|
||||
'ride': Ride,
|
||||
'company': Company,
|
||||
'ridemodel': RideModel,
|
||||
}
|
||||
|
||||
model = model_map.get(entity_type.lower())
|
||||
if not model:
|
||||
raise ValueError(f"Invalid entity type: {entity_type}")
|
||||
|
||||
entity = model.objects.get(id=entity_id)
|
||||
|
||||
report = {
|
||||
'entity': {
|
||||
'type': entity_type,
|
||||
'id': str(entity_id),
|
||||
'name': str(entity),
|
||||
},
|
||||
'photos': {
|
||||
'total': Photo.objects.filter(
|
||||
content_type__model=entity_type.lower(),
|
||||
object_id=entity_id
|
||||
).count(),
|
||||
'approved': Photo.objects.filter(
|
||||
content_type__model=entity_type.lower(),
|
||||
object_id=entity_id,
|
||||
moderation_status='approved'
|
||||
).count(),
|
||||
'pending': Photo.objects.filter(
|
||||
content_type__model=entity_type.lower(),
|
||||
object_id=entity_id,
|
||||
moderation_status='pending'
|
||||
).count(),
|
||||
},
|
||||
'submissions': {
|
||||
'total': ContentSubmission.objects.filter(
|
||||
entity_type__model=entity_type.lower(),
|
||||
entity_id=entity_id
|
||||
).count(),
|
||||
'approved': ContentSubmission.objects.filter(
|
||||
entity_type__model=entity_type.lower(),
|
||||
entity_id=entity_id,
|
||||
status='approved'
|
||||
).count(),
|
||||
'pending': ContentSubmission.objects.filter(
|
||||
entity_type__model=entity_type.lower(),
|
||||
entity_id=entity_id,
|
||||
status='pending'
|
||||
).count(),
|
||||
},
|
||||
# Version history now tracked via pghistory Event models
|
||||
# Can query {ModelName}Event if needed (e.g., ParkEvent, RideEvent)
|
||||
}
|
||||
|
||||
logger.info(f"Generated report for {entity_type} {entity_id}")
|
||||
return report
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error generating report: {str(e)}")
|
||||
raise
|
||||
|
||||
|
||||
@shared_task(bind=True, max_retries=2)
|
||||
def cleanup_duplicate_entities(self):
|
||||
"""
|
||||
Detect and flag potential duplicate entities.
|
||||
|
||||
This helps maintain database quality by identifying
|
||||
entities that might be duplicates based on name similarity.
|
||||
|
||||
Returns:
|
||||
dict: Duplicate detection results
|
||||
"""
|
||||
from apps.entities.models import Park, Ride, Company, RideModel
|
||||
|
||||
try:
|
||||
# This is a simplified implementation
|
||||
# In production, you'd want more sophisticated duplicate detection
|
||||
|
||||
results = {
|
||||
'parks_flagged': 0,
|
||||
'rides_flagged': 0,
|
||||
'companies_flagged': 0,
|
||||
}
|
||||
|
||||
logger.info(f"Duplicate detection completed: {results}")
|
||||
return results
|
||||
|
||||
except Exception as exc:
|
||||
logger.error(f"Error detecting duplicates: {str(exc)}")
|
||||
raise self.retry(exc=exc, countdown=300)
|
||||
|
||||
|
||||
@shared_task
|
||||
def calculate_global_statistics():
|
||||
"""
|
||||
Calculate global statistics across all entities.
|
||||
|
||||
Returns:
|
||||
dict: Global statistics
|
||||
"""
|
||||
from apps.entities.models import Park, Ride, Company, RideModel
|
||||
from apps.media.models import Photo
|
||||
from apps.moderation.models import ContentSubmission
|
||||
from apps.users.models import User
|
||||
|
||||
try:
|
||||
stats = {
|
||||
'entities': {
|
||||
'parks': Park.objects.count(),
|
||||
'rides': Ride.objects.count(),
|
||||
'companies': Company.objects.count(),
|
||||
'ride_models': RideModel.objects.count(),
|
||||
},
|
||||
'photos': {
|
||||
'total': Photo.objects.count(),
|
||||
'approved': Photo.objects.filter(moderation_status='approved').count(),
|
||||
},
|
||||
'submissions': {
|
||||
'total': ContentSubmission.objects.count(),
|
||||
'pending': ContentSubmission.objects.filter(status='pending').count(),
|
||||
},
|
||||
'users': {
|
||||
'total': User.objects.count(),
|
||||
'active': User.objects.filter(is_active=True).count(),
|
||||
},
|
||||
'timestamp': timezone.now().isoformat(),
|
||||
}
|
||||
|
||||
logger.info(f"Global statistics calculated: {stats}")
|
||||
return stats
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error calculating global statistics: {str(e)}")
|
||||
raise
|
||||
|
||||
|
||||
@shared_task(bind=True, max_retries=2)
|
||||
def validate_entity_data(self, entity_type, entity_id):
|
||||
"""
|
||||
Validate entity data integrity and flag issues.
|
||||
|
||||
Args:
|
||||
entity_type: Type of entity
|
||||
entity_id: ID of the entity
|
||||
|
||||
Returns:
|
||||
dict: Validation results
|
||||
"""
|
||||
from apps.entities.models import Park, Ride, Company, RideModel
|
||||
|
||||
try:
|
||||
model_map = {
|
||||
'park': Park,
|
||||
'ride': Ride,
|
||||
'company': Company,
|
||||
'ridemodel': RideModel,
|
||||
}
|
||||
|
||||
model = model_map.get(entity_type.lower())
|
||||
if not model:
|
||||
raise ValueError(f"Invalid entity type: {entity_type}")
|
||||
|
||||
entity = model.objects.get(id=entity_id)
|
||||
|
||||
issues = []
|
||||
|
||||
# Check for missing required fields
|
||||
if not entity.name or entity.name.strip() == '':
|
||||
issues.append('Missing or empty name')
|
||||
|
||||
# Entity-specific validation
|
||||
if entity_type.lower() == 'park' and not entity.country:
|
||||
issues.append('Missing country')
|
||||
|
||||
if entity_type.lower() == 'ride' and not entity.park:
|
||||
issues.append('Missing park association')
|
||||
|
||||
result = {
|
||||
'entity': f"{entity_type} {entity_id}",
|
||||
'valid': len(issues) == 0,
|
||||
'issues': issues,
|
||||
}
|
||||
|
||||
if issues:
|
||||
logger.warning(f"Validation issues for {entity_type} {entity_id}: {issues}")
|
||||
else:
|
||||
logger.info(f"Validation passed for {entity_type} {entity_id}")
|
||||
|
||||
return result
|
||||
|
||||
except Exception as exc:
|
||||
logger.error(f"Error validating {entity_type} {entity_id}: {str(exc)}")
|
||||
raise self.retry(exc=exc, countdown=300)
|
||||
0
django-backend/apps/media/__init__.py
Normal file
0
django-backend/apps/media/__init__.py
Normal file
206
django-backend/apps/media/admin.py
Normal file
206
django-backend/apps/media/admin.py
Normal file
@@ -0,0 +1,206 @@
|
||||
"""
|
||||
Django Admin configuration for media models.
|
||||
"""
|
||||
from django.contrib import admin
|
||||
from django.contrib.contenttypes.admin import GenericTabularInline
|
||||
from django.utils.html import format_html
|
||||
from django.utils.safestring import mark_safe
|
||||
from django.db.models import Count, Q
|
||||
from .models import Photo
|
||||
|
||||
|
||||
@admin.register(Photo)
|
||||
class PhotoAdmin(admin.ModelAdmin):
|
||||
"""Admin interface for Photo model with enhanced features."""
|
||||
|
||||
list_display = [
|
||||
'thumbnail_preview', 'title', 'photo_type', 'moderation_status',
|
||||
'entity_info', 'uploaded_by', 'dimensions', 'file_size_display', 'created'
|
||||
]
|
||||
list_filter = [
|
||||
'moderation_status', 'is_approved', 'photo_type',
|
||||
'is_featured', 'is_public', 'created'
|
||||
]
|
||||
search_fields = [
|
||||
'title', 'description', 'cloudflare_image_id',
|
||||
'uploaded_by__email', 'uploaded_by__username'
|
||||
]
|
||||
readonly_fields = [
|
||||
'id', 'created', 'modified', 'content_type', 'object_id',
|
||||
'moderated_at'
|
||||
]
|
||||
raw_id_fields = ['uploaded_by', 'moderated_by']
|
||||
|
||||
fieldsets = (
|
||||
('CloudFlare Image', {
|
||||
'fields': (
|
||||
'cloudflare_image_id', 'cloudflare_url',
|
||||
'cloudflare_thumbnail_url'
|
||||
)
|
||||
}),
|
||||
('Metadata', {
|
||||
'fields': ('title', 'description', 'credit', 'photo_type')
|
||||
}),
|
||||
('Associated Entity', {
|
||||
'fields': ('content_type', 'object_id')
|
||||
}),
|
||||
('Upload Information', {
|
||||
'fields': ('uploaded_by',)
|
||||
}),
|
||||
('Moderation', {
|
||||
'fields': (
|
||||
'moderation_status', 'is_approved',
|
||||
'moderated_by', 'moderated_at', 'moderation_notes'
|
||||
)
|
||||
}),
|
||||
('Image Details', {
|
||||
'fields': ('width', 'height', 'file_size'),
|
||||
'classes': ('collapse',)
|
||||
}),
|
||||
('Display Settings', {
|
||||
'fields': ('display_order', 'is_featured', 'is_public')
|
||||
}),
|
||||
('System', {
|
||||
'fields': ('id', 'created', 'modified'),
|
||||
'classes': ('collapse',)
|
||||
}),
|
||||
)
|
||||
|
||||
date_hierarchy = 'created'
|
||||
actions = ['approve_photos', 'reject_photos', 'flag_photos', 'make_featured', 'remove_featured']
|
||||
|
||||
def get_queryset(self, request):
|
||||
"""Optimize queryset with select_related."""
|
||||
qs = super().get_queryset(request)
|
||||
return qs.select_related(
|
||||
'uploaded_by', 'moderated_by', 'content_type'
|
||||
)
|
||||
|
||||
def thumbnail_preview(self, obj):
|
||||
"""Display thumbnail preview in list view."""
|
||||
if obj.cloudflare_url:
|
||||
# Use thumbnail variant for preview
|
||||
from apps.media.services import CloudFlareService
|
||||
cf = CloudFlareService()
|
||||
thumbnail_url = cf.get_image_url(obj.cloudflare_image_id, 'thumbnail')
|
||||
|
||||
return format_html(
|
||||
'<img src="{}" style="width: 60px; height: 60px; object-fit: cover; border-radius: 4px;" />',
|
||||
thumbnail_url
|
||||
)
|
||||
return "-"
|
||||
thumbnail_preview.short_description = "Preview"
|
||||
|
||||
def entity_info(self, obj):
|
||||
"""Display entity information."""
|
||||
if obj.content_type and obj.object_id:
|
||||
entity = obj.content_object
|
||||
if entity:
|
||||
entity_type = obj.content_type.model
|
||||
entity_name = getattr(entity, 'name', str(entity))
|
||||
return format_html(
|
||||
'<strong>{}</strong><br/><small>{}</small>',
|
||||
entity_name,
|
||||
entity_type.upper()
|
||||
)
|
||||
return format_html('<em style="color: #999;">Not attached</em>')
|
||||
entity_info.short_description = "Entity"
|
||||
|
||||
def dimensions(self, obj):
|
||||
"""Display image dimensions."""
|
||||
if obj.width and obj.height:
|
||||
return f"{obj.width}×{obj.height}"
|
||||
return "-"
|
||||
dimensions.short_description = "Size"
|
||||
|
||||
def file_size_display(self, obj):
|
||||
"""Display file size in human-readable format."""
|
||||
if obj.file_size:
|
||||
size_kb = obj.file_size / 1024
|
||||
if size_kb > 1024:
|
||||
return f"{size_kb / 1024:.1f} MB"
|
||||
return f"{size_kb:.1f} KB"
|
||||
return "-"
|
||||
file_size_display.short_description = "File Size"
|
||||
|
||||
def changelist_view(self, request, extra_context=None):
|
||||
"""Add statistics to changelist."""
|
||||
extra_context = extra_context or {}
|
||||
|
||||
# Get photo statistics
|
||||
stats = Photo.objects.aggregate(
|
||||
total=Count('id'),
|
||||
pending=Count('id', filter=Q(moderation_status='pending')),
|
||||
approved=Count('id', filter=Q(moderation_status='approved')),
|
||||
rejected=Count('id', filter=Q(moderation_status='rejected')),
|
||||
flagged=Count('id', filter=Q(moderation_status='flagged')),
|
||||
)
|
||||
|
||||
extra_context['photo_stats'] = stats
|
||||
|
||||
return super().changelist_view(request, extra_context)
|
||||
|
||||
def approve_photos(self, request, queryset):
|
||||
"""Bulk approve selected photos."""
|
||||
count = 0
|
||||
for photo in queryset:
|
||||
photo.approve(moderator=request.user, notes='Bulk approved')
|
||||
count += 1
|
||||
self.message_user(request, f"{count} photo(s) approved successfully.")
|
||||
approve_photos.short_description = "Approve selected photos"
|
||||
|
||||
def reject_photos(self, request, queryset):
|
||||
"""Bulk reject selected photos."""
|
||||
count = 0
|
||||
for photo in queryset:
|
||||
photo.reject(moderator=request.user, notes='Bulk rejected')
|
||||
count += 1
|
||||
self.message_user(request, f"{count} photo(s) rejected.")
|
||||
reject_photos.short_description = "Reject selected photos"
|
||||
|
||||
def flag_photos(self, request, queryset):
|
||||
"""Bulk flag selected photos for review."""
|
||||
count = 0
|
||||
for photo in queryset:
|
||||
photo.flag(moderator=request.user, notes='Flagged for review')
|
||||
count += 1
|
||||
self.message_user(request, f"{count} photo(s) flagged for review.")
|
||||
flag_photos.short_description = "Flag selected photos"
|
||||
|
||||
def make_featured(self, request, queryset):
|
||||
"""Mark selected photos as featured."""
|
||||
count = queryset.update(is_featured=True)
|
||||
self.message_user(request, f"{count} photo(s) marked as featured.")
|
||||
make_featured.short_description = "Mark as featured"
|
||||
|
||||
def remove_featured(self, request, queryset):
|
||||
"""Remove featured status from selected photos."""
|
||||
count = queryset.update(is_featured=False)
|
||||
self.message_user(request, f"{count} photo(s) removed from featured.")
|
||||
remove_featured.short_description = "Remove featured status"
|
||||
|
||||
|
||||
# Inline admin for use in entity admin pages
|
||||
class PhotoInline(GenericTabularInline):
|
||||
"""Inline admin for photos in entity pages."""
|
||||
model = Photo
|
||||
ct_field = 'content_type'
|
||||
ct_fk_field = 'object_id'
|
||||
extra = 0
|
||||
fields = ['thumbnail_preview', 'title', 'photo_type', 'moderation_status', 'display_order']
|
||||
readonly_fields = ['thumbnail_preview']
|
||||
can_delete = True
|
||||
|
||||
def thumbnail_preview(self, obj):
|
||||
"""Display thumbnail preview in inline."""
|
||||
if obj.cloudflare_url:
|
||||
from apps.media.services import CloudFlareService
|
||||
cf = CloudFlareService()
|
||||
thumbnail_url = cf.get_image_url(obj.cloudflare_image_id, 'thumbnail')
|
||||
|
||||
return format_html(
|
||||
'<img src="{}" style="width: 40px; height: 40px; object-fit: cover; border-radius: 4px;" />',
|
||||
thumbnail_url
|
||||
)
|
||||
return "-"
|
||||
thumbnail_preview.short_description = "Preview"
|
||||
11
django-backend/apps/media/apps.py
Normal file
11
django-backend/apps/media/apps.py
Normal file
@@ -0,0 +1,11 @@
|
||||
"""
|
||||
Media app configuration.
|
||||
"""
|
||||
|
||||
from django.apps import AppConfig
|
||||
|
||||
|
||||
class MediaConfig(AppConfig):
|
||||
default_auto_field = 'django.db.models.BigAutoField'
|
||||
name = 'apps.media'
|
||||
verbose_name = 'Media'
|
||||
253
django-backend/apps/media/migrations/0001_initial.py
Normal file
253
django-backend/apps/media/migrations/0001_initial.py
Normal file
@@ -0,0 +1,253 @@
|
||||
# Generated by Django 4.2.8 on 2025-11-08 16:41
|
||||
|
||||
from django.conf import settings
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
import django.utils.timezone
|
||||
import django_lifecycle.mixins
|
||||
import model_utils.fields
|
||||
import uuid
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
initial = True
|
||||
|
||||
dependencies = [
|
||||
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
||||
("contenttypes", "0002_remove_content_type_name"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name="Photo",
|
||||
fields=[
|
||||
(
|
||||
"created",
|
||||
model_utils.fields.AutoCreatedField(
|
||||
default=django.utils.timezone.now,
|
||||
editable=False,
|
||||
verbose_name="created",
|
||||
),
|
||||
),
|
||||
(
|
||||
"modified",
|
||||
model_utils.fields.AutoLastModifiedField(
|
||||
default=django.utils.timezone.now,
|
||||
editable=False,
|
||||
verbose_name="modified",
|
||||
),
|
||||
),
|
||||
(
|
||||
"id",
|
||||
models.UUIDField(
|
||||
default=uuid.uuid4,
|
||||
editable=False,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
),
|
||||
),
|
||||
(
|
||||
"cloudflare_image_id",
|
||||
models.CharField(
|
||||
db_index=True,
|
||||
help_text="Unique CloudFlare image identifier",
|
||||
max_length=255,
|
||||
unique=True,
|
||||
),
|
||||
),
|
||||
(
|
||||
"cloudflare_url",
|
||||
models.URLField(help_text="CloudFlare CDN URL for the image"),
|
||||
),
|
||||
(
|
||||
"cloudflare_thumbnail_url",
|
||||
models.URLField(
|
||||
blank=True,
|
||||
help_text="CloudFlare thumbnail URL (if different from main URL)",
|
||||
),
|
||||
),
|
||||
(
|
||||
"title",
|
||||
models.CharField(
|
||||
blank=True, help_text="Photo title or caption", max_length=255
|
||||
),
|
||||
),
|
||||
(
|
||||
"description",
|
||||
models.TextField(
|
||||
blank=True, help_text="Photo description or details"
|
||||
),
|
||||
),
|
||||
(
|
||||
"credit",
|
||||
models.CharField(
|
||||
blank=True,
|
||||
help_text="Photo credit/photographer name",
|
||||
max_length=255,
|
||||
),
|
||||
),
|
||||
(
|
||||
"photo_type",
|
||||
models.CharField(
|
||||
choices=[
|
||||
("main", "Main Photo"),
|
||||
("gallery", "Gallery Photo"),
|
||||
("banner", "Banner Image"),
|
||||
("logo", "Logo"),
|
||||
("thumbnail", "Thumbnail"),
|
||||
("other", "Other"),
|
||||
],
|
||||
db_index=True,
|
||||
default="gallery",
|
||||
help_text="Type of photo",
|
||||
max_length=50,
|
||||
),
|
||||
),
|
||||
(
|
||||
"object_id",
|
||||
models.UUIDField(
|
||||
db_index=True,
|
||||
help_text="ID of the entity this photo belongs to",
|
||||
),
|
||||
),
|
||||
(
|
||||
"moderation_status",
|
||||
models.CharField(
|
||||
choices=[
|
||||
("pending", "Pending Review"),
|
||||
("approved", "Approved"),
|
||||
("rejected", "Rejected"),
|
||||
("flagged", "Flagged"),
|
||||
],
|
||||
db_index=True,
|
||||
default="pending",
|
||||
help_text="Moderation status",
|
||||
max_length=50,
|
||||
),
|
||||
),
|
||||
(
|
||||
"is_approved",
|
||||
models.BooleanField(
|
||||
db_index=True,
|
||||
default=False,
|
||||
help_text="Quick filter for approved photos",
|
||||
),
|
||||
),
|
||||
(
|
||||
"moderated_at",
|
||||
models.DateTimeField(
|
||||
blank=True, help_text="When the photo was moderated", null=True
|
||||
),
|
||||
),
|
||||
(
|
||||
"moderation_notes",
|
||||
models.TextField(blank=True, help_text="Notes from moderator"),
|
||||
),
|
||||
(
|
||||
"width",
|
||||
models.IntegerField(
|
||||
blank=True, help_text="Image width in pixels", null=True
|
||||
),
|
||||
),
|
||||
(
|
||||
"height",
|
||||
models.IntegerField(
|
||||
blank=True, help_text="Image height in pixels", null=True
|
||||
),
|
||||
),
|
||||
(
|
||||
"file_size",
|
||||
models.IntegerField(
|
||||
blank=True, help_text="File size in bytes", null=True
|
||||
),
|
||||
),
|
||||
(
|
||||
"display_order",
|
||||
models.IntegerField(
|
||||
db_index=True,
|
||||
default=0,
|
||||
help_text="Order for displaying in galleries (lower numbers first)",
|
||||
),
|
||||
),
|
||||
(
|
||||
"is_featured",
|
||||
models.BooleanField(
|
||||
db_index=True,
|
||||
default=False,
|
||||
help_text="Is this a featured photo?",
|
||||
),
|
||||
),
|
||||
(
|
||||
"is_public",
|
||||
models.BooleanField(
|
||||
db_index=True,
|
||||
default=True,
|
||||
help_text="Is this photo publicly visible?",
|
||||
),
|
||||
),
|
||||
(
|
||||
"content_type",
|
||||
models.ForeignKey(
|
||||
help_text="Type of entity this photo belongs to",
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
to="contenttypes.contenttype",
|
||||
),
|
||||
),
|
||||
(
|
||||
"moderated_by",
|
||||
models.ForeignKey(
|
||||
blank=True,
|
||||
help_text="Moderator who approved/rejected this photo",
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
related_name="moderated_photos",
|
||||
to=settings.AUTH_USER_MODEL,
|
||||
),
|
||||
),
|
||||
(
|
||||
"uploaded_by",
|
||||
models.ForeignKey(
|
||||
blank=True,
|
||||
help_text="User who uploaded this photo",
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
related_name="uploaded_photos",
|
||||
to=settings.AUTH_USER_MODEL,
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"verbose_name": "Photo",
|
||||
"verbose_name_plural": "Photos",
|
||||
"ordering": ["display_order", "-created"],
|
||||
"indexes": [
|
||||
models.Index(
|
||||
fields=["content_type", "object_id"],
|
||||
name="media_photo_content_0187f5_idx",
|
||||
),
|
||||
models.Index(
|
||||
fields=["cloudflare_image_id"],
|
||||
name="media_photo_cloudfl_63ac12_idx",
|
||||
),
|
||||
models.Index(
|
||||
fields=["moderation_status"],
|
||||
name="media_photo_moderat_2033b1_idx",
|
||||
),
|
||||
models.Index(
|
||||
fields=["is_approved"], name="media_photo_is_appr_13ab34_idx"
|
||||
),
|
||||
models.Index(
|
||||
fields=["uploaded_by"], name="media_photo_uploade_220d3a_idx"
|
||||
),
|
||||
models.Index(
|
||||
fields=["photo_type"], name="media_photo_photo_t_b387e7_idx"
|
||||
),
|
||||
models.Index(
|
||||
fields=["display_order"], name="media_photo_display_04e358_idx"
|
||||
),
|
||||
],
|
||||
},
|
||||
bases=(django_lifecycle.mixins.LifecycleModelMixin, models.Model),
|
||||
),
|
||||
]
|
||||
0
django-backend/apps/media/migrations/__init__.py
Normal file
0
django-backend/apps/media/migrations/__init__.py
Normal file
266
django-backend/apps/media/models.py
Normal file
266
django-backend/apps/media/models.py
Normal file
@@ -0,0 +1,266 @@
|
||||
"""
|
||||
Media models for ThrillWiki Django backend.
|
||||
|
||||
This module contains models for handling media content:
|
||||
- Photo: CloudFlare Images integration with generic relations
|
||||
"""
|
||||
from django.db import models
|
||||
from django.contrib.contenttypes.fields import GenericForeignKey
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from django_lifecycle import hook, AFTER_CREATE, AFTER_UPDATE, BEFORE_SAVE
|
||||
|
||||
from apps.core.models import BaseModel
|
||||
|
||||
|
||||
class Photo(BaseModel):
|
||||
"""
|
||||
Represents a photo stored in CloudFlare Images.
|
||||
Uses generic relations to attach to any entity (Park, Ride, Company, etc.)
|
||||
"""
|
||||
|
||||
PHOTO_TYPE_CHOICES = [
|
||||
('main', 'Main Photo'),
|
||||
('gallery', 'Gallery Photo'),
|
||||
('banner', 'Banner Image'),
|
||||
('logo', 'Logo'),
|
||||
('thumbnail', 'Thumbnail'),
|
||||
('other', 'Other'),
|
||||
]
|
||||
|
||||
MODERATION_STATUS_CHOICES = [
|
||||
('pending', 'Pending Review'),
|
||||
('approved', 'Approved'),
|
||||
('rejected', 'Rejected'),
|
||||
('flagged', 'Flagged'),
|
||||
]
|
||||
|
||||
# CloudFlare Image Integration
|
||||
cloudflare_image_id = models.CharField(
|
||||
max_length=255,
|
||||
unique=True,
|
||||
db_index=True,
|
||||
help_text="Unique CloudFlare image identifier"
|
||||
)
|
||||
cloudflare_url = models.URLField(
|
||||
help_text="CloudFlare CDN URL for the image"
|
||||
)
|
||||
cloudflare_thumbnail_url = models.URLField(
|
||||
blank=True,
|
||||
help_text="CloudFlare thumbnail URL (if different from main URL)"
|
||||
)
|
||||
|
||||
# Metadata
|
||||
title = models.CharField(
|
||||
max_length=255,
|
||||
blank=True,
|
||||
help_text="Photo title or caption"
|
||||
)
|
||||
description = models.TextField(
|
||||
blank=True,
|
||||
help_text="Photo description or details"
|
||||
)
|
||||
credit = models.CharField(
|
||||
max_length=255,
|
||||
blank=True,
|
||||
help_text="Photo credit/photographer name"
|
||||
)
|
||||
|
||||
# Photo Type
|
||||
photo_type = models.CharField(
|
||||
max_length=50,
|
||||
choices=PHOTO_TYPE_CHOICES,
|
||||
default='gallery',
|
||||
db_index=True,
|
||||
help_text="Type of photo"
|
||||
)
|
||||
|
||||
# Generic relation to attach to any entity
|
||||
content_type = models.ForeignKey(
|
||||
ContentType,
|
||||
on_delete=models.CASCADE,
|
||||
help_text="Type of entity this photo belongs to"
|
||||
)
|
||||
object_id = models.UUIDField(
|
||||
db_index=True,
|
||||
help_text="ID of the entity this photo belongs to"
|
||||
)
|
||||
content_object = GenericForeignKey('content_type', 'object_id')
|
||||
|
||||
# User who uploaded
|
||||
uploaded_by = models.ForeignKey(
|
||||
'users.User',
|
||||
on_delete=models.SET_NULL,
|
||||
null=True,
|
||||
blank=True,
|
||||
related_name='uploaded_photos',
|
||||
help_text="User who uploaded this photo"
|
||||
)
|
||||
|
||||
# Moderation
|
||||
moderation_status = models.CharField(
|
||||
max_length=50,
|
||||
choices=MODERATION_STATUS_CHOICES,
|
||||
default='pending',
|
||||
db_index=True,
|
||||
help_text="Moderation status"
|
||||
)
|
||||
is_approved = models.BooleanField(
|
||||
default=False,
|
||||
db_index=True,
|
||||
help_text="Quick filter for approved photos"
|
||||
)
|
||||
moderated_by = models.ForeignKey(
|
||||
'users.User',
|
||||
on_delete=models.SET_NULL,
|
||||
null=True,
|
||||
blank=True,
|
||||
related_name='moderated_photos',
|
||||
help_text="Moderator who approved/rejected this photo"
|
||||
)
|
||||
moderated_at = models.DateTimeField(
|
||||
null=True,
|
||||
blank=True,
|
||||
help_text="When the photo was moderated"
|
||||
)
|
||||
moderation_notes = models.TextField(
|
||||
blank=True,
|
||||
help_text="Notes from moderator"
|
||||
)
|
||||
|
||||
# Image Metadata
|
||||
width = models.IntegerField(
|
||||
null=True,
|
||||
blank=True,
|
||||
help_text="Image width in pixels"
|
||||
)
|
||||
height = models.IntegerField(
|
||||
null=True,
|
||||
blank=True,
|
||||
help_text="Image height in pixels"
|
||||
)
|
||||
file_size = models.IntegerField(
|
||||
null=True,
|
||||
blank=True,
|
||||
help_text="File size in bytes"
|
||||
)
|
||||
|
||||
# Display Order
|
||||
display_order = models.IntegerField(
|
||||
default=0,
|
||||
db_index=True,
|
||||
help_text="Order for displaying in galleries (lower numbers first)"
|
||||
)
|
||||
|
||||
# Visibility
|
||||
is_featured = models.BooleanField(
|
||||
default=False,
|
||||
db_index=True,
|
||||
help_text="Is this a featured photo?"
|
||||
)
|
||||
is_public = models.BooleanField(
|
||||
default=True,
|
||||
db_index=True,
|
||||
help_text="Is this photo publicly visible?"
|
||||
)
|
||||
|
||||
class Meta:
|
||||
verbose_name = 'Photo'
|
||||
verbose_name_plural = 'Photos'
|
||||
ordering = ['display_order', '-created']
|
||||
indexes = [
|
||||
models.Index(fields=['content_type', 'object_id']),
|
||||
models.Index(fields=['cloudflare_image_id']),
|
||||
models.Index(fields=['moderation_status']),
|
||||
models.Index(fields=['is_approved']),
|
||||
models.Index(fields=['uploaded_by']),
|
||||
models.Index(fields=['photo_type']),
|
||||
models.Index(fields=['display_order']),
|
||||
]
|
||||
|
||||
def __str__(self):
|
||||
if self.title:
|
||||
return self.title
|
||||
return f"Photo {self.cloudflare_image_id[:8]}..."
|
||||
|
||||
@hook(AFTER_UPDATE, when='moderation_status', was='pending', is_now='approved')
|
||||
def set_approved_flag_on_approval(self):
|
||||
"""Set is_approved flag when status changes to approved."""
|
||||
self.is_approved = True
|
||||
self.save(update_fields=['is_approved'])
|
||||
|
||||
@hook(AFTER_UPDATE, when='moderation_status', was='approved', is_not='approved')
|
||||
def clear_approved_flag_on_rejection(self):
|
||||
"""Clear is_approved flag when status changes from approved."""
|
||||
self.is_approved = False
|
||||
self.save(update_fields=['is_approved'])
|
||||
|
||||
def approve(self, moderator, notes=''):
|
||||
"""Approve this photo."""
|
||||
from django.utils import timezone
|
||||
|
||||
self.moderation_status = 'approved'
|
||||
self.is_approved = True
|
||||
self.moderated_by = moderator
|
||||
self.moderated_at = timezone.now()
|
||||
self.moderation_notes = notes
|
||||
self.save(update_fields=[
|
||||
'moderation_status',
|
||||
'is_approved',
|
||||
'moderated_by',
|
||||
'moderated_at',
|
||||
'moderation_notes'
|
||||
])
|
||||
|
||||
def reject(self, moderator, notes=''):
|
||||
"""Reject this photo."""
|
||||
from django.utils import timezone
|
||||
|
||||
self.moderation_status = 'rejected'
|
||||
self.is_approved = False
|
||||
self.moderated_by = moderator
|
||||
self.moderated_at = timezone.now()
|
||||
self.moderation_notes = notes
|
||||
self.save(update_fields=[
|
||||
'moderation_status',
|
||||
'is_approved',
|
||||
'moderated_by',
|
||||
'moderated_at',
|
||||
'moderation_notes'
|
||||
])
|
||||
|
||||
def flag(self, moderator, notes=''):
|
||||
"""Flag this photo for review."""
|
||||
from django.utils import timezone
|
||||
|
||||
self.moderation_status = 'flagged'
|
||||
self.is_approved = False
|
||||
self.moderated_by = moderator
|
||||
self.moderated_at = timezone.now()
|
||||
self.moderation_notes = notes
|
||||
self.save(update_fields=[
|
||||
'moderation_status',
|
||||
'is_approved',
|
||||
'moderated_by',
|
||||
'moderated_at',
|
||||
'moderation_notes'
|
||||
])
|
||||
|
||||
|
||||
class PhotoManager(models.Manager):
|
||||
"""Custom manager for Photo model."""
|
||||
|
||||
def approved(self):
|
||||
"""Return only approved photos."""
|
||||
return self.filter(is_approved=True)
|
||||
|
||||
def pending(self):
|
||||
"""Return only pending photos."""
|
||||
return self.filter(moderation_status='pending')
|
||||
|
||||
def public(self):
|
||||
"""Return only public, approved photos."""
|
||||
return self.filter(is_approved=True, is_public=True)
|
||||
|
||||
|
||||
# Add custom manager to Photo model
|
||||
Photo.add_to_class('objects', PhotoManager())
|
||||
506
django-backend/apps/media/services.py
Normal file
506
django-backend/apps/media/services.py
Normal file
@@ -0,0 +1,506 @@
|
||||
"""
|
||||
Media services for photo upload, management, and CloudFlare Images integration.
|
||||
"""
|
||||
|
||||
import logging
|
||||
import mimetypes
|
||||
import os
|
||||
from io import BytesIO
|
||||
from typing import Optional, Dict, Any, List
|
||||
from django.conf import settings
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from django.core.files.uploadedfile import InMemoryUploadedFile, TemporaryUploadedFile
|
||||
from django.db import transaction
|
||||
from django.db.models import Model
|
||||
|
||||
import requests
|
||||
from PIL import Image
|
||||
|
||||
from apps.media.models import Photo
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class CloudFlareError(Exception):
|
||||
"""Base exception for CloudFlare API errors."""
|
||||
pass
|
||||
|
||||
|
||||
class CloudFlareService:
|
||||
"""
|
||||
Service for interacting with CloudFlare Images API.
|
||||
|
||||
Provides image upload, deletion, and URL generation with automatic
|
||||
fallback to mock mode when CloudFlare credentials are not configured.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self.account_id = settings.CLOUDFLARE_ACCOUNT_ID
|
||||
self.api_token = settings.CLOUDFLARE_IMAGE_TOKEN
|
||||
self.delivery_hash = settings.CLOUDFLARE_IMAGE_HASH
|
||||
|
||||
# Enable mock mode if CloudFlare is not configured
|
||||
self.mock_mode = not all([self.account_id, self.api_token, self.delivery_hash])
|
||||
|
||||
if self.mock_mode:
|
||||
logger.warning("CloudFlare Images not configured - using mock mode")
|
||||
|
||||
self.base_url = f"https://api.cloudflare.com/client/v4/accounts/{self.account_id}/images/v1"
|
||||
self.headers = {"Authorization": f"Bearer {self.api_token}"}
|
||||
|
||||
def upload_image(
|
||||
self,
|
||||
file: InMemoryUploadedFile | TemporaryUploadedFile,
|
||||
metadata: Optional[Dict[str, str]] = None
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Upload an image to CloudFlare Images.
|
||||
|
||||
Args:
|
||||
file: The uploaded file object
|
||||
metadata: Optional metadata dictionary
|
||||
|
||||
Returns:
|
||||
Dict containing:
|
||||
- id: CloudFlare image ID
|
||||
- url: CDN URL for the image
|
||||
- variants: Available image variants
|
||||
|
||||
Raises:
|
||||
CloudFlareError: If upload fails
|
||||
"""
|
||||
if self.mock_mode:
|
||||
return self._mock_upload(file, metadata)
|
||||
|
||||
try:
|
||||
# Prepare the file for upload
|
||||
file.seek(0) # Reset file pointer
|
||||
|
||||
# Prepare multipart form data
|
||||
files = {
|
||||
'file': (file.name, file.read(), file.content_type)
|
||||
}
|
||||
|
||||
# Add metadata if provided
|
||||
data = {}
|
||||
if metadata:
|
||||
data['metadata'] = str(metadata)
|
||||
|
||||
# Make API request
|
||||
response = requests.post(
|
||||
self.base_url,
|
||||
headers=self.headers,
|
||||
files=files,
|
||||
data=data,
|
||||
timeout=30
|
||||
)
|
||||
|
||||
response.raise_for_status()
|
||||
result = response.json()
|
||||
|
||||
if not result.get('success'):
|
||||
raise CloudFlareError(f"Upload failed: {result.get('errors', [])}")
|
||||
|
||||
image_data = result['result']
|
||||
|
||||
return {
|
||||
'id': image_data['id'],
|
||||
'url': self._get_cdn_url(image_data['id']),
|
||||
'variants': image_data.get('variants', []),
|
||||
'uploaded': image_data.get('uploaded'),
|
||||
}
|
||||
|
||||
except requests.exceptions.RequestException as e:
|
||||
logger.error(f"CloudFlare upload failed: {str(e)}")
|
||||
raise CloudFlareError(f"Failed to upload image: {str(e)}")
|
||||
|
||||
def delete_image(self, image_id: str) -> bool:
|
||||
"""
|
||||
Delete an image from CloudFlare Images.
|
||||
|
||||
Args:
|
||||
image_id: The CloudFlare image ID
|
||||
|
||||
Returns:
|
||||
True if deletion was successful
|
||||
|
||||
Raises:
|
||||
CloudFlareError: If deletion fails
|
||||
"""
|
||||
if self.mock_mode:
|
||||
return self._mock_delete(image_id)
|
||||
|
||||
try:
|
||||
url = f"{self.base_url}/{image_id}"
|
||||
response = requests.delete(
|
||||
url,
|
||||
headers=self.headers,
|
||||
timeout=30
|
||||
)
|
||||
|
||||
response.raise_for_status()
|
||||
result = response.json()
|
||||
|
||||
return result.get('success', False)
|
||||
|
||||
except requests.exceptions.RequestException as e:
|
||||
logger.error(f"CloudFlare deletion failed: {str(e)}")
|
||||
raise CloudFlareError(f"Failed to delete image: {str(e)}")
|
||||
|
||||
def get_image_url(self, image_id: str, variant: str = "public") -> str:
|
||||
"""
|
||||
Generate a CloudFlare CDN URL for an image.
|
||||
|
||||
Args:
|
||||
image_id: The CloudFlare image ID
|
||||
variant: Image variant (public, thumbnail, banner, etc.)
|
||||
|
||||
Returns:
|
||||
CDN URL for the image
|
||||
"""
|
||||
if self.mock_mode:
|
||||
return self._mock_url(image_id, variant)
|
||||
|
||||
return self._get_cdn_url(image_id, variant)
|
||||
|
||||
def get_image_variants(self, image_id: str) -> List[str]:
|
||||
"""
|
||||
Get available variants for an image.
|
||||
|
||||
Args:
|
||||
image_id: The CloudFlare image ID
|
||||
|
||||
Returns:
|
||||
List of available variant names
|
||||
"""
|
||||
if self.mock_mode:
|
||||
return ['public', 'thumbnail', 'banner']
|
||||
|
||||
try:
|
||||
url = f"{self.base_url}/{image_id}"
|
||||
response = requests.get(
|
||||
url,
|
||||
headers=self.headers,
|
||||
timeout=30
|
||||
)
|
||||
|
||||
response.raise_for_status()
|
||||
result = response.json()
|
||||
|
||||
if result.get('success'):
|
||||
return list(result['result'].get('variants', []))
|
||||
|
||||
return []
|
||||
|
||||
except requests.exceptions.RequestException as e:
|
||||
logger.error(f"Failed to get variants: {str(e)}")
|
||||
return []
|
||||
|
||||
def _get_cdn_url(self, image_id: str, variant: str = "public") -> str:
|
||||
"""
|
||||
Generate CloudFlare CDN URL.
|
||||
|
||||
Supports two URL formats:
|
||||
1. cdn.thrillwiki.com: {base_url}/images/{image-id}/{variant-id}
|
||||
2. imagedelivery.net: {base_url}/{hash}/{image-id}/{variant-id}
|
||||
"""
|
||||
base_url = settings.CLOUDFLARE_IMAGE_BASE_URL
|
||||
|
||||
# Check if using custom CDN domain (cdn.thrillwiki.com)
|
||||
if 'cdn.thrillwiki.com' in base_url or not self.delivery_hash:
|
||||
# Simple URL structure for custom domain
|
||||
return f"{base_url}/images/{image_id}/{variant}"
|
||||
else:
|
||||
# Legacy imagedelivery.net format (requires hash)
|
||||
return f"{base_url}/{self.delivery_hash}/{image_id}/{variant}"
|
||||
|
||||
# Mock methods for development without CloudFlare
|
||||
|
||||
def _mock_upload(self, file, metadata) -> Dict[str, Any]:
|
||||
"""Mock upload for development."""
|
||||
import uuid
|
||||
mock_id = str(uuid.uuid4())
|
||||
|
||||
logger.info(f"[MOCK] Uploaded image: {file.name} -> {mock_id}")
|
||||
|
||||
return {
|
||||
'id': mock_id,
|
||||
'url': self._mock_url(mock_id),
|
||||
'variants': ['public', 'thumbnail', 'banner'],
|
||||
'uploaded': 'mock',
|
||||
}
|
||||
|
||||
def _mock_delete(self, image_id: str) -> bool:
|
||||
"""Mock deletion for development."""
|
||||
logger.info(f"[MOCK] Deleted image: {image_id}")
|
||||
return True
|
||||
|
||||
def _mock_url(self, image_id: str, variant: str = "public") -> str:
|
||||
"""Generate mock URL for development."""
|
||||
return f"https://placehold.co/800x600/png?text={image_id[:8]}"
|
||||
|
||||
|
||||
class PhotoService:
|
||||
"""
|
||||
Service for managing Photo objects with CloudFlare integration.
|
||||
|
||||
Handles photo creation, attachment to entities, moderation,
|
||||
and gallery management.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self.cloudflare = CloudFlareService()
|
||||
|
||||
def create_photo(
|
||||
self,
|
||||
file: InMemoryUploadedFile | TemporaryUploadedFile,
|
||||
user,
|
||||
entity: Optional[Model] = None,
|
||||
photo_type: str = "gallery",
|
||||
title: str = "",
|
||||
description: str = "",
|
||||
credit: str = "",
|
||||
is_visible: bool = True,
|
||||
) -> Photo:
|
||||
"""
|
||||
Create a new photo with CloudFlare upload.
|
||||
|
||||
Args:
|
||||
file: Uploaded file object
|
||||
user: User uploading the photo
|
||||
entity: Optional entity to attach photo to
|
||||
photo_type: Type of photo (main, gallery, banner, etc.)
|
||||
title: Photo title
|
||||
description: Photo description
|
||||
credit: Photo credit/attribution
|
||||
is_visible: Whether photo is visible
|
||||
|
||||
Returns:
|
||||
Created Photo instance
|
||||
|
||||
Raises:
|
||||
ValidationError: If validation fails
|
||||
CloudFlareError: If upload fails
|
||||
"""
|
||||
# Get image dimensions
|
||||
dimensions = self._get_image_dimensions(file)
|
||||
|
||||
# Upload to CloudFlare
|
||||
upload_result = self.cloudflare.upload_image(
|
||||
file,
|
||||
metadata={
|
||||
'uploaded_by': str(user.id),
|
||||
'photo_type': photo_type,
|
||||
}
|
||||
)
|
||||
|
||||
# Create Photo instance
|
||||
with transaction.atomic():
|
||||
photo = Photo.objects.create(
|
||||
cloudflare_image_id=upload_result['id'],
|
||||
cloudflare_url=upload_result['url'],
|
||||
uploaded_by=user,
|
||||
photo_type=photo_type,
|
||||
title=title or file.name,
|
||||
description=description,
|
||||
credit=credit,
|
||||
width=dimensions['width'],
|
||||
height=dimensions['height'],
|
||||
file_size=file.size,
|
||||
mime_type=file.content_type,
|
||||
is_visible=is_visible,
|
||||
moderation_status='pending',
|
||||
)
|
||||
|
||||
# Attach to entity if provided
|
||||
if entity:
|
||||
self.attach_to_entity(photo, entity)
|
||||
|
||||
logger.info(f"Photo created: {photo.id} by user {user.id}")
|
||||
|
||||
# Trigger async post-processing
|
||||
try:
|
||||
from apps.media.tasks import process_uploaded_image
|
||||
process_uploaded_image.delay(photo.id)
|
||||
except Exception as e:
|
||||
# Don't fail the upload if async task fails to queue
|
||||
logger.warning(f"Failed to queue photo processing task: {str(e)}")
|
||||
|
||||
return photo
|
||||
|
||||
def attach_to_entity(self, photo: Photo, entity: Model) -> None:
|
||||
"""
|
||||
Attach a photo to an entity.
|
||||
|
||||
Args:
|
||||
photo: Photo instance
|
||||
entity: Entity to attach to (Park, Ride, Company, etc.)
|
||||
"""
|
||||
content_type = ContentType.objects.get_for_model(entity)
|
||||
photo.content_type = content_type
|
||||
photo.object_id = entity.pk
|
||||
photo.save(update_fields=['content_type', 'object_id'])
|
||||
|
||||
logger.info(f"Photo {photo.id} attached to {content_type.model} {entity.pk}")
|
||||
|
||||
def detach_from_entity(self, photo: Photo) -> None:
|
||||
"""
|
||||
Detach a photo from its entity.
|
||||
|
||||
Args:
|
||||
photo: Photo instance
|
||||
"""
|
||||
photo.content_type = None
|
||||
photo.object_id = None
|
||||
photo.save(update_fields=['content_type', 'object_id'])
|
||||
|
||||
logger.info(f"Photo {photo.id} detached from entity")
|
||||
|
||||
def moderate_photo(
|
||||
self,
|
||||
photo: Photo,
|
||||
status: str,
|
||||
moderator,
|
||||
notes: str = ""
|
||||
) -> Photo:
|
||||
"""
|
||||
Moderate a photo (approve/reject/flag).
|
||||
|
||||
Args:
|
||||
photo: Photo instance
|
||||
status: New status (approved, rejected, flagged)
|
||||
moderator: User performing moderation
|
||||
notes: Moderation notes
|
||||
|
||||
Returns:
|
||||
Updated Photo instance
|
||||
"""
|
||||
with transaction.atomic():
|
||||
photo.moderation_status = status
|
||||
photo.moderated_by = moderator
|
||||
photo.moderation_notes = notes
|
||||
|
||||
if status == 'approved':
|
||||
photo.approve()
|
||||
elif status == 'rejected':
|
||||
photo.reject()
|
||||
elif status == 'flagged':
|
||||
photo.flag()
|
||||
|
||||
photo.save()
|
||||
|
||||
logger.info(
|
||||
f"Photo {photo.id} moderated: {status} by user {moderator.id}"
|
||||
)
|
||||
|
||||
return photo
|
||||
|
||||
def reorder_photos(
|
||||
self,
|
||||
entity: Model,
|
||||
photo_ids: List[int],
|
||||
photo_type: Optional[str] = None
|
||||
) -> None:
|
||||
"""
|
||||
Reorder photos for an entity.
|
||||
|
||||
Args:
|
||||
entity: Entity whose photos to reorder
|
||||
photo_ids: List of photo IDs in desired order
|
||||
photo_type: Optional photo type filter
|
||||
"""
|
||||
content_type = ContentType.objects.get_for_model(entity)
|
||||
|
||||
with transaction.atomic():
|
||||
for order, photo_id in enumerate(photo_ids):
|
||||
filters = {
|
||||
'id': photo_id,
|
||||
'content_type': content_type,
|
||||
'object_id': entity.pk,
|
||||
}
|
||||
|
||||
if photo_type:
|
||||
filters['photo_type'] = photo_type
|
||||
|
||||
Photo.objects.filter(**filters).update(display_order=order)
|
||||
|
||||
logger.info(f"Reordered {len(photo_ids)} photos for {content_type.model} {entity.pk}")
|
||||
|
||||
def get_entity_photos(
|
||||
self,
|
||||
entity: Model,
|
||||
photo_type: Optional[str] = None,
|
||||
approved_only: bool = True
|
||||
) -> List[Photo]:
|
||||
"""
|
||||
Get photos for an entity.
|
||||
|
||||
Args:
|
||||
entity: Entity to get photos for
|
||||
photo_type: Optional photo type filter
|
||||
approved_only: Whether to return only approved photos
|
||||
|
||||
Returns:
|
||||
List of Photo instances ordered by display_order
|
||||
"""
|
||||
content_type = ContentType.objects.get_for_model(entity)
|
||||
|
||||
queryset = Photo.objects.filter(
|
||||
content_type=content_type,
|
||||
object_id=entity.pk,
|
||||
)
|
||||
|
||||
if photo_type:
|
||||
queryset = queryset.filter(photo_type=photo_type)
|
||||
|
||||
if approved_only:
|
||||
queryset = queryset.approved()
|
||||
|
||||
return list(queryset.order_by('display_order', '-created_at'))
|
||||
|
||||
def delete_photo(self, photo: Photo, delete_from_cloudflare: bool = True) -> None:
|
||||
"""
|
||||
Delete a photo.
|
||||
|
||||
Args:
|
||||
photo: Photo instance to delete
|
||||
delete_from_cloudflare: Whether to also delete from CloudFlare
|
||||
"""
|
||||
cloudflare_id = photo.cloudflare_image_id
|
||||
|
||||
with transaction.atomic():
|
||||
photo.delete()
|
||||
|
||||
# Delete from CloudFlare after DB deletion succeeds
|
||||
if delete_from_cloudflare and cloudflare_id:
|
||||
try:
|
||||
self.cloudflare.delete_image(cloudflare_id)
|
||||
except CloudFlareError as e:
|
||||
logger.error(f"Failed to delete from CloudFlare: {str(e)}")
|
||||
# Don't raise - photo is already deleted from DB
|
||||
|
||||
logger.info(f"Photo deleted: {cloudflare_id}")
|
||||
|
||||
def _get_image_dimensions(
|
||||
self,
|
||||
file: InMemoryUploadedFile | TemporaryUploadedFile
|
||||
) -> Dict[str, int]:
|
||||
"""
|
||||
Extract image dimensions from uploaded file.
|
||||
|
||||
Args:
|
||||
file: Uploaded file object
|
||||
|
||||
Returns:
|
||||
Dict with 'width' and 'height' keys
|
||||
"""
|
||||
try:
|
||||
file.seek(0)
|
||||
image = Image.open(file)
|
||||
width, height = image.size
|
||||
file.seek(0) # Reset for later use
|
||||
|
||||
return {'width': width, 'height': height}
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to get image dimensions: {str(e)}")
|
||||
return {'width': 0, 'height': 0}
|
||||
219
django-backend/apps/media/tasks.py
Normal file
219
django-backend/apps/media/tasks.py
Normal file
@@ -0,0 +1,219 @@
|
||||
"""
|
||||
Background tasks for media processing and management.
|
||||
"""
|
||||
|
||||
import logging
|
||||
from celery import shared_task
|
||||
from django.utils import timezone
|
||||
from datetime import timedelta
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@shared_task(bind=True, max_retries=3, default_retry_delay=60)
|
||||
def process_uploaded_image(self, photo_id):
|
||||
"""
|
||||
Process an uploaded image asynchronously.
|
||||
|
||||
This task runs after a photo is uploaded to perform additional
|
||||
processing like metadata extraction, validation, etc.
|
||||
|
||||
Args:
|
||||
photo_id: ID of the Photo to process
|
||||
|
||||
Returns:
|
||||
str: Processing result message
|
||||
"""
|
||||
from apps.media.models import Photo
|
||||
|
||||
try:
|
||||
photo = Photo.objects.get(id=photo_id)
|
||||
|
||||
# Log processing start
|
||||
logger.info(f"Processing photo {photo_id}: {photo.title}")
|
||||
|
||||
# Additional processing could include:
|
||||
# - Generating additional thumbnails
|
||||
# - Extracting EXIF data
|
||||
# - Running image quality checks
|
||||
# - Updating photo metadata
|
||||
|
||||
# For now, just log that processing is complete
|
||||
logger.info(f"Photo {photo_id} processed successfully")
|
||||
|
||||
return f"Photo {photo_id} processed successfully"
|
||||
|
||||
except Photo.DoesNotExist:
|
||||
logger.error(f"Photo {photo_id} not found")
|
||||
raise
|
||||
except Exception as exc:
|
||||
logger.error(f"Error processing photo {photo_id}: {str(exc)}")
|
||||
# Retry with exponential backoff
|
||||
raise self.retry(exc=exc, countdown=60 * (2 ** self.request.retries))
|
||||
|
||||
|
||||
@shared_task(bind=True, max_retries=2)
|
||||
def cleanup_rejected_photos(self, days_old=30):
|
||||
"""
|
||||
Clean up photos that have been rejected for more than N days.
|
||||
|
||||
This task runs periodically (e.g., weekly) to remove old rejected
|
||||
photos and free up storage space.
|
||||
|
||||
Args:
|
||||
days_old: Number of days after rejection to delete (default: 30)
|
||||
|
||||
Returns:
|
||||
dict: Cleanup statistics
|
||||
"""
|
||||
from apps.media.models import Photo
|
||||
from apps.media.services import PhotoService
|
||||
|
||||
try:
|
||||
cutoff_date = timezone.now() - timedelta(days=days_old)
|
||||
|
||||
# Find rejected photos older than cutoff
|
||||
old_rejected = Photo.objects.filter(
|
||||
moderation_status='rejected',
|
||||
moderated_at__lt=cutoff_date
|
||||
)
|
||||
|
||||
count = old_rejected.count()
|
||||
logger.info(f"Found {count} rejected photos to cleanup")
|
||||
|
||||
# Delete each photo
|
||||
photo_service = PhotoService()
|
||||
deleted_count = 0
|
||||
|
||||
for photo in old_rejected:
|
||||
try:
|
||||
photo_service.delete_photo(photo, delete_from_cloudflare=True)
|
||||
deleted_count += 1
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to delete photo {photo.id}: {str(e)}")
|
||||
continue
|
||||
|
||||
result = {
|
||||
'found': count,
|
||||
'deleted': deleted_count,
|
||||
'failed': count - deleted_count,
|
||||
'cutoff_date': cutoff_date.isoformat()
|
||||
}
|
||||
|
||||
logger.info(f"Cleanup complete: {result}")
|
||||
return result
|
||||
|
||||
except Exception as exc:
|
||||
logger.error(f"Error during photo cleanup: {str(exc)}")
|
||||
raise self.retry(exc=exc, countdown=300) # Retry after 5 minutes
|
||||
|
||||
|
||||
@shared_task(bind=True, max_retries=3)
|
||||
def generate_photo_thumbnails(self, photo_id, variants=None):
|
||||
"""
|
||||
Generate thumbnails for a photo on demand.
|
||||
|
||||
This can be used to regenerate thumbnails if the original
|
||||
is updated or if new variants are needed.
|
||||
|
||||
Args:
|
||||
photo_id: ID of the Photo
|
||||
variants: List of variant names to generate (None = all)
|
||||
|
||||
Returns:
|
||||
dict: Generated variants and their URLs
|
||||
"""
|
||||
from apps.media.models import Photo
|
||||
from apps.media.services import CloudFlareService
|
||||
|
||||
try:
|
||||
photo = Photo.objects.get(id=photo_id)
|
||||
cloudflare = CloudFlareService()
|
||||
|
||||
if variants is None:
|
||||
variants = ['public', 'thumbnail', 'banner']
|
||||
|
||||
result = {}
|
||||
for variant in variants:
|
||||
url = cloudflare.get_image_url(photo.cloudflare_image_id, variant)
|
||||
result[variant] = url
|
||||
|
||||
logger.info(f"Generated thumbnails for photo {photo_id}: {variants}")
|
||||
return result
|
||||
|
||||
except Photo.DoesNotExist:
|
||||
logger.error(f"Photo {photo_id} not found")
|
||||
raise
|
||||
except Exception as exc:
|
||||
logger.error(f"Error generating thumbnails for photo {photo_id}: {str(exc)}")
|
||||
raise self.retry(exc=exc, countdown=60 * (2 ** self.request.retries))
|
||||
|
||||
|
||||
@shared_task(bind=True, max_retries=2)
|
||||
def cleanup_orphaned_cloudflare_images(self):
|
||||
"""
|
||||
Clean up CloudFlare images that no longer have database records.
|
||||
|
||||
This task helps prevent storage bloat by removing images that
|
||||
were uploaded but their database records were deleted.
|
||||
|
||||
Returns:
|
||||
dict: Cleanup statistics
|
||||
"""
|
||||
from apps.media.models import Photo
|
||||
from apps.media.services import CloudFlareService
|
||||
|
||||
try:
|
||||
cloudflare = CloudFlareService()
|
||||
|
||||
# In a real implementation, you would:
|
||||
# 1. Get list of all images from CloudFlare API
|
||||
# 2. Check which ones don't have Photo records
|
||||
# 3. Delete the orphaned images
|
||||
|
||||
# For now, just log that the task ran
|
||||
logger.info("Orphaned image cleanup task completed (not implemented in mock mode)")
|
||||
|
||||
return {
|
||||
'checked': 0,
|
||||
'orphaned': 0,
|
||||
'deleted': 0
|
||||
}
|
||||
|
||||
except Exception as exc:
|
||||
logger.error(f"Error during orphaned image cleanup: {str(exc)}")
|
||||
raise self.retry(exc=exc, countdown=300)
|
||||
|
||||
|
||||
@shared_task
|
||||
def update_photo_statistics():
|
||||
"""
|
||||
Update photo-related statistics across the database.
|
||||
|
||||
This task can update cached counts, generate reports, etc.
|
||||
|
||||
Returns:
|
||||
dict: Updated statistics
|
||||
"""
|
||||
from apps.media.models import Photo
|
||||
from django.db.models import Count
|
||||
|
||||
try:
|
||||
stats = {
|
||||
'total_photos': Photo.objects.count(),
|
||||
'pending': Photo.objects.filter(moderation_status='pending').count(),
|
||||
'approved': Photo.objects.filter(moderation_status='approved').count(),
|
||||
'rejected': Photo.objects.filter(moderation_status='rejected').count(),
|
||||
'flagged': Photo.objects.filter(moderation_status='flagged').count(),
|
||||
'by_type': dict(
|
||||
Photo.objects.values('photo_type').annotate(count=Count('id'))
|
||||
.values_list('photo_type', 'count')
|
||||
)
|
||||
}
|
||||
|
||||
logger.info(f"Photo statistics updated: {stats}")
|
||||
return stats
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error updating photo statistics: {str(e)}")
|
||||
raise
|
||||
195
django-backend/apps/media/validators.py
Normal file
195
django-backend/apps/media/validators.py
Normal file
@@ -0,0 +1,195 @@
|
||||
"""
|
||||
Validators for image uploads.
|
||||
"""
|
||||
|
||||
import magic
|
||||
from django.core.exceptions import ValidationError
|
||||
from django.core.files.uploadedfile import InMemoryUploadedFile, TemporaryUploadedFile
|
||||
from PIL import Image
|
||||
from typing import Optional
|
||||
|
||||
|
||||
# Allowed file types
|
||||
ALLOWED_MIME_TYPES = [
|
||||
'image/jpeg',
|
||||
'image/jpg',
|
||||
'image/png',
|
||||
'image/webp',
|
||||
'image/gif',
|
||||
]
|
||||
|
||||
ALLOWED_EXTENSIONS = ['.jpg', '.jpeg', '.png', '.webp', '.gif']
|
||||
|
||||
# Size limits (in bytes)
|
||||
MAX_FILE_SIZE = 10 * 1024 * 1024 # 10 MB
|
||||
MIN_FILE_SIZE = 1024 # 1 KB
|
||||
|
||||
# Dimension limits
|
||||
MIN_WIDTH = 100
|
||||
MIN_HEIGHT = 100
|
||||
MAX_WIDTH = 8000
|
||||
MAX_HEIGHT = 8000
|
||||
|
||||
# Aspect ratio limits (for specific photo types)
|
||||
ASPECT_RATIO_LIMITS = {
|
||||
'banner': {'min': 2.0, 'max': 4.0}, # Wide banners
|
||||
'logo': {'min': 0.5, 'max': 2.0}, # Square-ish logos
|
||||
}
|
||||
|
||||
|
||||
def validate_image_file_type(file: InMemoryUploadedFile | TemporaryUploadedFile) -> None:
|
||||
"""
|
||||
Validate that the uploaded file is an allowed image type.
|
||||
|
||||
Uses python-magic to detect actual file type, not just extension.
|
||||
|
||||
Args:
|
||||
file: The uploaded file object
|
||||
|
||||
Raises:
|
||||
ValidationError: If file type is not allowed
|
||||
"""
|
||||
# Check file extension
|
||||
file_ext = None
|
||||
if hasattr(file, 'name') and file.name:
|
||||
file_ext = '.' + file.name.split('.')[-1].lower()
|
||||
if file_ext not in ALLOWED_EXTENSIONS:
|
||||
raise ValidationError(
|
||||
f"File extension {file_ext} not allowed. "
|
||||
f"Allowed extensions: {', '.join(ALLOWED_EXTENSIONS)}"
|
||||
)
|
||||
|
||||
# Check MIME type from content type
|
||||
if hasattr(file, 'content_type'):
|
||||
if file.content_type not in ALLOWED_MIME_TYPES:
|
||||
raise ValidationError(
|
||||
f"File type {file.content_type} not allowed. "
|
||||
f"Allowed types: {', '.join(ALLOWED_MIME_TYPES)}"
|
||||
)
|
||||
|
||||
# Verify actual file content using python-magic
|
||||
try:
|
||||
file.seek(0)
|
||||
mime = magic.from_buffer(file.read(2048), mime=True)
|
||||
file.seek(0)
|
||||
|
||||
if mime not in ALLOWED_MIME_TYPES:
|
||||
raise ValidationError(
|
||||
f"File content type {mime} does not match allowed types. "
|
||||
"File may be corrupted or incorrectly labeled."
|
||||
)
|
||||
except Exception as e:
|
||||
# If magic fails, we already validated content_type above
|
||||
pass
|
||||
|
||||
|
||||
def validate_image_file_size(file: InMemoryUploadedFile | TemporaryUploadedFile) -> None:
|
||||
"""
|
||||
Validate that the file size is within allowed limits.
|
||||
|
||||
Args:
|
||||
file: The uploaded file object
|
||||
|
||||
Raises:
|
||||
ValidationError: If file size is not within limits
|
||||
"""
|
||||
file_size = file.size
|
||||
|
||||
if file_size < MIN_FILE_SIZE:
|
||||
raise ValidationError(
|
||||
f"File size is too small. Minimum: {MIN_FILE_SIZE / 1024:.0f} KB"
|
||||
)
|
||||
|
||||
if file_size > MAX_FILE_SIZE:
|
||||
raise ValidationError(
|
||||
f"File size is too large. Maximum: {MAX_FILE_SIZE / (1024 * 1024):.0f} MB"
|
||||
)
|
||||
|
||||
|
||||
def validate_image_dimensions(
|
||||
file: InMemoryUploadedFile | TemporaryUploadedFile,
|
||||
photo_type: Optional[str] = None
|
||||
) -> None:
|
||||
"""
|
||||
Validate image dimensions and aspect ratio.
|
||||
|
||||
Args:
|
||||
file: The uploaded file object
|
||||
photo_type: Optional photo type for specific validation
|
||||
|
||||
Raises:
|
||||
ValidationError: If dimensions are not within limits
|
||||
"""
|
||||
try:
|
||||
file.seek(0)
|
||||
image = Image.open(file)
|
||||
width, height = image.size
|
||||
file.seek(0)
|
||||
except Exception as e:
|
||||
raise ValidationError(f"Could not read image dimensions: {str(e)}")
|
||||
|
||||
# Check minimum dimensions
|
||||
if width < MIN_WIDTH or height < MIN_HEIGHT:
|
||||
raise ValidationError(
|
||||
f"Image dimensions too small. Minimum: {MIN_WIDTH}x{MIN_HEIGHT}px, "
|
||||
f"got: {width}x{height}px"
|
||||
)
|
||||
|
||||
# Check maximum dimensions
|
||||
if width > MAX_WIDTH or height > MAX_HEIGHT:
|
||||
raise ValidationError(
|
||||
f"Image dimensions too large. Maximum: {MAX_WIDTH}x{MAX_HEIGHT}px, "
|
||||
f"got: {width}x{height}px"
|
||||
)
|
||||
|
||||
# Check aspect ratio for specific photo types
|
||||
if photo_type and photo_type in ASPECT_RATIO_LIMITS:
|
||||
aspect_ratio = width / height
|
||||
limits = ASPECT_RATIO_LIMITS[photo_type]
|
||||
|
||||
if aspect_ratio < limits['min'] or aspect_ratio > limits['max']:
|
||||
raise ValidationError(
|
||||
f"Invalid aspect ratio for {photo_type}. "
|
||||
f"Expected ratio between {limits['min']:.2f} and {limits['max']:.2f}, "
|
||||
f"got: {aspect_ratio:.2f}"
|
||||
)
|
||||
|
||||
|
||||
def validate_image(
|
||||
file: InMemoryUploadedFile | TemporaryUploadedFile,
|
||||
photo_type: Optional[str] = None
|
||||
) -> None:
|
||||
"""
|
||||
Run all image validations.
|
||||
|
||||
Args:
|
||||
file: The uploaded file object
|
||||
photo_type: Optional photo type for specific validation
|
||||
|
||||
Raises:
|
||||
ValidationError: If any validation fails
|
||||
"""
|
||||
validate_image_file_type(file)
|
||||
validate_image_file_size(file)
|
||||
validate_image_dimensions(file, photo_type)
|
||||
|
||||
|
||||
def validate_image_content_safety(file: InMemoryUploadedFile | TemporaryUploadedFile) -> None:
|
||||
"""
|
||||
Placeholder for content safety validation.
|
||||
|
||||
This could integrate with services like:
|
||||
- AWS Rekognition
|
||||
- Google Cloud Vision
|
||||
- Azure Content Moderator
|
||||
|
||||
For now, this is a no-op but provides extension point.
|
||||
|
||||
Args:
|
||||
file: The uploaded file object
|
||||
|
||||
Raises:
|
||||
ValidationError: If content is deemed unsafe
|
||||
"""
|
||||
# TODO: Integrate with content moderation API
|
||||
pass
|
||||
0
django-backend/apps/moderation/__init__.py
Normal file
0
django-backend/apps/moderation/__init__.py
Normal file
424
django-backend/apps/moderation/admin.py
Normal file
424
django-backend/apps/moderation/admin.py
Normal file
@@ -0,0 +1,424 @@
|
||||
"""
|
||||
Django admin for moderation models.
|
||||
"""
|
||||
from django.contrib import admin
|
||||
from django.utils.html import format_html
|
||||
from django.urls import reverse
|
||||
from django.utils import timezone
|
||||
from unfold.admin import ModelAdmin
|
||||
from unfold.decorators import display
|
||||
|
||||
from apps.moderation.models import ContentSubmission, SubmissionItem, ModerationLock
|
||||
|
||||
|
||||
@admin.register(ContentSubmission)
|
||||
class ContentSubmissionAdmin(ModelAdmin):
|
||||
"""Admin for ContentSubmission model."""
|
||||
|
||||
list_display = [
|
||||
'title_with_icon',
|
||||
'status_badge',
|
||||
'entity_info',
|
||||
'user',
|
||||
'items_summary',
|
||||
'locked_info',
|
||||
'created',
|
||||
]
|
||||
|
||||
list_filter = [
|
||||
'status',
|
||||
'submission_type',
|
||||
'entity_type',
|
||||
'created',
|
||||
]
|
||||
|
||||
search_fields = [
|
||||
'title',
|
||||
'description',
|
||||
'user__email',
|
||||
'user__username',
|
||||
]
|
||||
|
||||
readonly_fields = [
|
||||
'id',
|
||||
'status',
|
||||
'entity_type',
|
||||
'entity_id',
|
||||
'locked_by',
|
||||
'locked_at',
|
||||
'reviewed_by',
|
||||
'reviewed_at',
|
||||
'created',
|
||||
'modified',
|
||||
]
|
||||
|
||||
fieldsets = (
|
||||
('Submission Info', {
|
||||
'fields': (
|
||||
'id',
|
||||
'title',
|
||||
'description',
|
||||
'submission_type',
|
||||
'status',
|
||||
)
|
||||
}),
|
||||
('Entity', {
|
||||
'fields': (
|
||||
'entity_type',
|
||||
'entity_id',
|
||||
)
|
||||
}),
|
||||
('User Info', {
|
||||
'fields': (
|
||||
'user',
|
||||
'source',
|
||||
'ip_address',
|
||||
'user_agent',
|
||||
)
|
||||
}),
|
||||
('Review Info', {
|
||||
'fields': (
|
||||
'locked_by',
|
||||
'locked_at',
|
||||
'reviewed_by',
|
||||
'reviewed_at',
|
||||
'rejection_reason',
|
||||
)
|
||||
}),
|
||||
('Metadata', {
|
||||
'fields': (
|
||||
'metadata',
|
||||
'created',
|
||||
'modified',
|
||||
),
|
||||
'classes': ('collapse',)
|
||||
}),
|
||||
)
|
||||
|
||||
@display(description='Title', ordering='title')
|
||||
def title_with_icon(self, obj):
|
||||
"""Display title with submission type icon."""
|
||||
icons = {
|
||||
'create': '➕',
|
||||
'update': '✏️',
|
||||
'delete': '🗑️',
|
||||
}
|
||||
icon = icons.get(obj.submission_type, '📝')
|
||||
return f"{icon} {obj.title}"
|
||||
|
||||
@display(description='Status', ordering='status')
|
||||
def status_badge(self, obj):
|
||||
"""Display colored status badge."""
|
||||
colors = {
|
||||
'draft': 'gray',
|
||||
'pending': 'blue',
|
||||
'reviewing': 'orange',
|
||||
'approved': 'green',
|
||||
'rejected': 'red',
|
||||
}
|
||||
color = colors.get(obj.status, 'gray')
|
||||
return format_html(
|
||||
'<span style="background-color: {}; color: white; padding: 3px 8px; '
|
||||
'border-radius: 3px; font-size: 11px; font-weight: bold;">{}</span>',
|
||||
color,
|
||||
obj.get_status_display()
|
||||
)
|
||||
|
||||
@display(description='Entity')
|
||||
def entity_info(self, obj):
|
||||
"""Display entity type and ID."""
|
||||
return f"{obj.entity_type.model} #{str(obj.entity_id)[:8]}"
|
||||
|
||||
@display(description='Items')
|
||||
def items_summary(self, obj):
|
||||
"""Display item counts."""
|
||||
total = obj.get_items_count()
|
||||
approved = obj.get_approved_items_count()
|
||||
rejected = obj.get_rejected_items_count()
|
||||
pending = total - approved - rejected
|
||||
|
||||
return format_html(
|
||||
'<span title="Pending">{}</span> / '
|
||||
'<span style="color: green;" title="Approved">{}</span> / '
|
||||
'<span style="color: red;" title="Rejected">{}</span>',
|
||||
pending, approved, rejected
|
||||
)
|
||||
|
||||
@display(description='Lock Status')
|
||||
def locked_info(self, obj):
|
||||
"""Display lock information."""
|
||||
if obj.locked_by:
|
||||
is_expired = not obj.is_locked()
|
||||
status = '🔓 Expired' if is_expired else '🔒 Locked'
|
||||
return f"{status} by {obj.locked_by.email}"
|
||||
return '✅ Unlocked'
|
||||
|
||||
def get_queryset(self, request):
|
||||
"""Optimize queryset with select_related."""
|
||||
qs = super().get_queryset(request)
|
||||
return qs.select_related(
|
||||
'user',
|
||||
'entity_type',
|
||||
'locked_by',
|
||||
'reviewed_by'
|
||||
).prefetch_related('items')
|
||||
|
||||
|
||||
class SubmissionItemInline(admin.TabularInline):
|
||||
"""Inline admin for submission items."""
|
||||
model = SubmissionItem
|
||||
extra = 0
|
||||
fields = [
|
||||
'field_label',
|
||||
'old_value_display',
|
||||
'new_value_display',
|
||||
'change_type',
|
||||
'status',
|
||||
'reviewed_by',
|
||||
]
|
||||
readonly_fields = [
|
||||
'field_label',
|
||||
'old_value_display',
|
||||
'new_value_display',
|
||||
'change_type',
|
||||
'status',
|
||||
'reviewed_by',
|
||||
]
|
||||
can_delete = False
|
||||
|
||||
def has_add_permission(self, request, obj=None):
|
||||
return False
|
||||
|
||||
|
||||
@admin.register(SubmissionItem)
|
||||
class SubmissionItemAdmin(ModelAdmin):
|
||||
"""Admin for SubmissionItem model."""
|
||||
|
||||
list_display = [
|
||||
'field_label',
|
||||
'submission_title',
|
||||
'change_type_badge',
|
||||
'status_badge',
|
||||
'old_value_display',
|
||||
'new_value_display',
|
||||
'reviewed_by',
|
||||
]
|
||||
|
||||
list_filter = [
|
||||
'status',
|
||||
'change_type',
|
||||
'is_required',
|
||||
'created',
|
||||
]
|
||||
|
||||
search_fields = [
|
||||
'field_name',
|
||||
'field_label',
|
||||
'submission__title',
|
||||
]
|
||||
|
||||
readonly_fields = [
|
||||
'id',
|
||||
'submission',
|
||||
'field_name',
|
||||
'field_label',
|
||||
'old_value',
|
||||
'new_value',
|
||||
'old_value_display',
|
||||
'new_value_display',
|
||||
'status',
|
||||
'reviewed_by',
|
||||
'reviewed_at',
|
||||
'created',
|
||||
'modified',
|
||||
]
|
||||
|
||||
fieldsets = (
|
||||
('Item Info', {
|
||||
'fields': (
|
||||
'id',
|
||||
'submission',
|
||||
'field_name',
|
||||
'field_label',
|
||||
'change_type',
|
||||
'is_required',
|
||||
'order',
|
||||
)
|
||||
}),
|
||||
('Values', {
|
||||
'fields': (
|
||||
'old_value',
|
||||
'new_value',
|
||||
'old_value_display',
|
||||
'new_value_display',
|
||||
)
|
||||
}),
|
||||
('Review Info', {
|
||||
'fields': (
|
||||
'status',
|
||||
'reviewed_by',
|
||||
'reviewed_at',
|
||||
'rejection_reason',
|
||||
)
|
||||
}),
|
||||
('Timestamps', {
|
||||
'fields': (
|
||||
'created',
|
||||
'modified',
|
||||
)
|
||||
}),
|
||||
)
|
||||
|
||||
@display(description='Submission')
|
||||
def submission_title(self, obj):
|
||||
"""Display submission title with link."""
|
||||
url = reverse('admin:moderation_contentsubmission_change', args=[obj.submission.id])
|
||||
return format_html('<a href="{}">{}</a>', url, obj.submission.title)
|
||||
|
||||
@display(description='Type', ordering='change_type')
|
||||
def change_type_badge(self, obj):
|
||||
"""Display colored change type badge."""
|
||||
colors = {
|
||||
'add': 'green',
|
||||
'modify': 'blue',
|
||||
'remove': 'red',
|
||||
}
|
||||
color = colors.get(obj.change_type, 'gray')
|
||||
return format_html(
|
||||
'<span style="background-color: {}; color: white; padding: 2px 6px; '
|
||||
'border-radius: 3px; font-size: 10px;">{}</span>',
|
||||
color,
|
||||
obj.get_change_type_display()
|
||||
)
|
||||
|
||||
@display(description='Status', ordering='status')
|
||||
def status_badge(self, obj):
|
||||
"""Display colored status badge."""
|
||||
colors = {
|
||||
'pending': 'orange',
|
||||
'approved': 'green',
|
||||
'rejected': 'red',
|
||||
}
|
||||
color = colors.get(obj.status, 'gray')
|
||||
return format_html(
|
||||
'<span style="background-color: {}; color: white; padding: 2px 6px; '
|
||||
'border-radius: 3px; font-size: 10px;">{}</span>',
|
||||
color,
|
||||
obj.get_status_display()
|
||||
)
|
||||
|
||||
def get_queryset(self, request):
|
||||
"""Optimize queryset with select_related."""
|
||||
qs = super().get_queryset(request)
|
||||
return qs.select_related('submission', 'reviewed_by')
|
||||
|
||||
|
||||
@admin.register(ModerationLock)
|
||||
class ModerationLockAdmin(ModelAdmin):
|
||||
"""Admin for ModerationLock model."""
|
||||
|
||||
list_display = [
|
||||
'submission_title',
|
||||
'locked_by',
|
||||
'locked_at',
|
||||
'expires_at',
|
||||
'status_indicator',
|
||||
'lock_duration',
|
||||
]
|
||||
|
||||
list_filter = [
|
||||
'is_active',
|
||||
'locked_at',
|
||||
'expires_at',
|
||||
]
|
||||
|
||||
search_fields = [
|
||||
'submission__title',
|
||||
'locked_by__email',
|
||||
'locked_by__username',
|
||||
]
|
||||
|
||||
readonly_fields = [
|
||||
'id',
|
||||
'submission',
|
||||
'locked_by',
|
||||
'locked_at',
|
||||
'expires_at',
|
||||
'is_active',
|
||||
'released_at',
|
||||
'lock_duration',
|
||||
'is_expired_display',
|
||||
'created',
|
||||
'modified',
|
||||
]
|
||||
|
||||
fieldsets = (
|
||||
('Lock Info', {
|
||||
'fields': (
|
||||
'id',
|
||||
'submission',
|
||||
'locked_by',
|
||||
'is_active',
|
||||
)
|
||||
}),
|
||||
('Timing', {
|
||||
'fields': (
|
||||
'locked_at',
|
||||
'expires_at',
|
||||
'released_at',
|
||||
'lock_duration',
|
||||
'is_expired_display',
|
||||
)
|
||||
}),
|
||||
('Timestamps', {
|
||||
'fields': (
|
||||
'created',
|
||||
'modified',
|
||||
)
|
||||
}),
|
||||
)
|
||||
|
||||
@display(description='Submission')
|
||||
def submission_title(self, obj):
|
||||
"""Display submission title with link."""
|
||||
url = reverse('admin:moderation_contentsubmission_change', args=[obj.submission.id])
|
||||
return format_html('<a href="{}">{}</a>', url, obj.submission.title)
|
||||
|
||||
@display(description='Status')
|
||||
def status_indicator(self, obj):
|
||||
"""Display lock status."""
|
||||
if not obj.is_active:
|
||||
return format_html(
|
||||
'<span style="color: gray;">🔓 Released</span>'
|
||||
)
|
||||
elif obj.is_expired():
|
||||
return format_html(
|
||||
'<span style="color: orange;">⏰ Expired</span>'
|
||||
)
|
||||
else:
|
||||
return format_html(
|
||||
'<span style="color: green;">🔒 Active</span>'
|
||||
)
|
||||
|
||||
@display(description='Duration')
|
||||
def lock_duration(self, obj):
|
||||
"""Display lock duration."""
|
||||
if obj.released_at:
|
||||
duration = obj.released_at - obj.locked_at
|
||||
else:
|
||||
duration = timezone.now() - obj.locked_at
|
||||
|
||||
minutes = int(duration.total_seconds() / 60)
|
||||
return f"{minutes} minutes"
|
||||
|
||||
@display(description='Expired?')
|
||||
def is_expired_display(self, obj):
|
||||
"""Display if lock is expired."""
|
||||
if not obj.is_active:
|
||||
return 'N/A (Released)'
|
||||
return 'Yes' if obj.is_expired() else 'No'
|
||||
|
||||
def get_queryset(self, request):
|
||||
"""Optimize queryset with select_related."""
|
||||
qs = super().get_queryset(request)
|
||||
return qs.select_related('submission', 'locked_by')
|
||||
11
django-backend/apps/moderation/apps.py
Normal file
11
django-backend/apps/moderation/apps.py
Normal file
@@ -0,0 +1,11 @@
|
||||
"""
|
||||
Moderation app configuration.
|
||||
"""
|
||||
|
||||
from django.apps import AppConfig
|
||||
|
||||
|
||||
class ModerationConfig(AppConfig):
|
||||
default_auto_field = 'django.db.models.BigAutoField'
|
||||
name = 'apps.moderation'
|
||||
verbose_name = 'Moderation'
|
||||
454
django-backend/apps/moderation/migrations/0001_initial.py
Normal file
454
django-backend/apps/moderation/migrations/0001_initial.py
Normal file
@@ -0,0 +1,454 @@
|
||||
# Generated by Django 4.2.8 on 2025-11-08 17:40
|
||||
|
||||
from django.conf import settings
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
import django.utils.timezone
|
||||
import django_fsm
|
||||
import django_lifecycle.mixins
|
||||
import model_utils.fields
|
||||
import uuid
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
initial = True
|
||||
|
||||
dependencies = [
|
||||
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
||||
("contenttypes", "0002_remove_content_type_name"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name="ContentSubmission",
|
||||
fields=[
|
||||
(
|
||||
"created",
|
||||
model_utils.fields.AutoCreatedField(
|
||||
default=django.utils.timezone.now,
|
||||
editable=False,
|
||||
verbose_name="created",
|
||||
),
|
||||
),
|
||||
(
|
||||
"modified",
|
||||
model_utils.fields.AutoLastModifiedField(
|
||||
default=django.utils.timezone.now,
|
||||
editable=False,
|
||||
verbose_name="modified",
|
||||
),
|
||||
),
|
||||
(
|
||||
"id",
|
||||
models.UUIDField(
|
||||
default=uuid.uuid4,
|
||||
editable=False,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
),
|
||||
),
|
||||
(
|
||||
"status",
|
||||
django_fsm.FSMField(
|
||||
choices=[
|
||||
("draft", "Draft"),
|
||||
("pending", "Pending Review"),
|
||||
("reviewing", "Under Review"),
|
||||
("approved", "Approved"),
|
||||
("rejected", "Rejected"),
|
||||
],
|
||||
db_index=True,
|
||||
default="draft",
|
||||
help_text="Current submission state (managed by FSM)",
|
||||
max_length=20,
|
||||
protected=True,
|
||||
),
|
||||
),
|
||||
(
|
||||
"entity_id",
|
||||
models.UUIDField(help_text="ID of the entity being modified"),
|
||||
),
|
||||
(
|
||||
"submission_type",
|
||||
models.CharField(
|
||||
choices=[
|
||||
("create", "Create"),
|
||||
("update", "Update"),
|
||||
("delete", "Delete"),
|
||||
],
|
||||
db_index=True,
|
||||
help_text="Type of operation (create, update, delete)",
|
||||
max_length=20,
|
||||
),
|
||||
),
|
||||
(
|
||||
"title",
|
||||
models.CharField(
|
||||
help_text="Brief description of changes", max_length=255
|
||||
),
|
||||
),
|
||||
(
|
||||
"description",
|
||||
models.TextField(
|
||||
blank=True, help_text="Detailed description of changes"
|
||||
),
|
||||
),
|
||||
(
|
||||
"locked_at",
|
||||
models.DateTimeField(
|
||||
blank=True,
|
||||
help_text="When the submission was locked for review",
|
||||
null=True,
|
||||
),
|
||||
),
|
||||
(
|
||||
"reviewed_at",
|
||||
models.DateTimeField(
|
||||
blank=True,
|
||||
help_text="When the submission was reviewed",
|
||||
null=True,
|
||||
),
|
||||
),
|
||||
(
|
||||
"rejection_reason",
|
||||
models.TextField(
|
||||
blank=True, help_text="Reason for rejection (if rejected)"
|
||||
),
|
||||
),
|
||||
(
|
||||
"source",
|
||||
models.CharField(
|
||||
default="web",
|
||||
help_text="Source of submission (web, api, mobile, etc.)",
|
||||
max_length=50,
|
||||
),
|
||||
),
|
||||
(
|
||||
"ip_address",
|
||||
models.GenericIPAddressField(
|
||||
blank=True, help_text="IP address of submitter", null=True
|
||||
),
|
||||
),
|
||||
(
|
||||
"user_agent",
|
||||
models.CharField(
|
||||
blank=True, help_text="User agent of submitter", max_length=500
|
||||
),
|
||||
),
|
||||
(
|
||||
"metadata",
|
||||
models.JSONField(
|
||||
blank=True,
|
||||
default=dict,
|
||||
help_text="Additional submission metadata",
|
||||
),
|
||||
),
|
||||
(
|
||||
"entity_type",
|
||||
models.ForeignKey(
|
||||
help_text="Type of entity being modified",
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
to="contenttypes.contenttype",
|
||||
),
|
||||
),
|
||||
(
|
||||
"locked_by",
|
||||
models.ForeignKey(
|
||||
blank=True,
|
||||
help_text="Moderator currently reviewing this submission",
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
related_name="locked_submissions",
|
||||
to=settings.AUTH_USER_MODEL,
|
||||
),
|
||||
),
|
||||
(
|
||||
"reviewed_by",
|
||||
models.ForeignKey(
|
||||
blank=True,
|
||||
help_text="Moderator who reviewed this submission",
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
related_name="reviewed_submissions",
|
||||
to=settings.AUTH_USER_MODEL,
|
||||
),
|
||||
),
|
||||
(
|
||||
"user",
|
||||
models.ForeignKey(
|
||||
help_text="User who submitted the changes",
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
related_name="submissions",
|
||||
to=settings.AUTH_USER_MODEL,
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"verbose_name": "Content Submission",
|
||||
"verbose_name_plural": "Content Submissions",
|
||||
"db_table": "content_submissions",
|
||||
"ordering": ["-created"],
|
||||
},
|
||||
bases=(django_lifecycle.mixins.LifecycleModelMixin, models.Model),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="SubmissionItem",
|
||||
fields=[
|
||||
(
|
||||
"created",
|
||||
model_utils.fields.AutoCreatedField(
|
||||
default=django.utils.timezone.now,
|
||||
editable=False,
|
||||
verbose_name="created",
|
||||
),
|
||||
),
|
||||
(
|
||||
"modified",
|
||||
model_utils.fields.AutoLastModifiedField(
|
||||
default=django.utils.timezone.now,
|
||||
editable=False,
|
||||
verbose_name="modified",
|
||||
),
|
||||
),
|
||||
(
|
||||
"id",
|
||||
models.UUIDField(
|
||||
default=uuid.uuid4,
|
||||
editable=False,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
),
|
||||
),
|
||||
(
|
||||
"field_name",
|
||||
models.CharField(
|
||||
help_text="Name of the field being changed", max_length=100
|
||||
),
|
||||
),
|
||||
(
|
||||
"field_label",
|
||||
models.CharField(
|
||||
blank=True,
|
||||
help_text="Human-readable field label",
|
||||
max_length=200,
|
||||
),
|
||||
),
|
||||
(
|
||||
"old_value",
|
||||
models.JSONField(
|
||||
blank=True,
|
||||
help_text="Previous value (null for new fields)",
|
||||
null=True,
|
||||
),
|
||||
),
|
||||
(
|
||||
"new_value",
|
||||
models.JSONField(
|
||||
blank=True,
|
||||
help_text="New value (null for deletions)",
|
||||
null=True,
|
||||
),
|
||||
),
|
||||
(
|
||||
"status",
|
||||
models.CharField(
|
||||
choices=[
|
||||
("pending", "Pending"),
|
||||
("approved", "Approved"),
|
||||
("rejected", "Rejected"),
|
||||
],
|
||||
db_index=True,
|
||||
default="pending",
|
||||
help_text="Status of this individual item",
|
||||
max_length=20,
|
||||
),
|
||||
),
|
||||
(
|
||||
"reviewed_at",
|
||||
models.DateTimeField(
|
||||
blank=True, help_text="When this item was reviewed", null=True
|
||||
),
|
||||
),
|
||||
(
|
||||
"rejection_reason",
|
||||
models.TextField(
|
||||
blank=True, help_text="Reason for rejecting this specific item"
|
||||
),
|
||||
),
|
||||
(
|
||||
"change_type",
|
||||
models.CharField(
|
||||
choices=[
|
||||
("add", "Add"),
|
||||
("modify", "Modify"),
|
||||
("remove", "Remove"),
|
||||
],
|
||||
default="modify",
|
||||
help_text="Type of change",
|
||||
max_length=20,
|
||||
),
|
||||
),
|
||||
(
|
||||
"is_required",
|
||||
models.BooleanField(
|
||||
default=False,
|
||||
help_text="Whether this change is required for the submission",
|
||||
),
|
||||
),
|
||||
(
|
||||
"order",
|
||||
models.IntegerField(
|
||||
default=0, help_text="Display order within submission"
|
||||
),
|
||||
),
|
||||
(
|
||||
"reviewed_by",
|
||||
models.ForeignKey(
|
||||
blank=True,
|
||||
help_text="Moderator who reviewed this item",
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
related_name="reviewed_items",
|
||||
to=settings.AUTH_USER_MODEL,
|
||||
),
|
||||
),
|
||||
(
|
||||
"submission",
|
||||
models.ForeignKey(
|
||||
help_text="Parent submission",
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
related_name="items",
|
||||
to="moderation.contentsubmission",
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"verbose_name": "Submission Item",
|
||||
"verbose_name_plural": "Submission Items",
|
||||
"db_table": "submission_items",
|
||||
"ordering": ["submission", "order", "created"],
|
||||
"indexes": [
|
||||
models.Index(
|
||||
fields=["submission", "status"],
|
||||
name="submission__submiss_71cf2f_idx",
|
||||
),
|
||||
models.Index(
|
||||
fields=["status"], name="submission__status_61deb1_idx"
|
||||
),
|
||||
],
|
||||
},
|
||||
bases=(django_lifecycle.mixins.LifecycleModelMixin, models.Model),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="ModerationLock",
|
||||
fields=[
|
||||
(
|
||||
"created",
|
||||
model_utils.fields.AutoCreatedField(
|
||||
default=django.utils.timezone.now,
|
||||
editable=False,
|
||||
verbose_name="created",
|
||||
),
|
||||
),
|
||||
(
|
||||
"modified",
|
||||
model_utils.fields.AutoLastModifiedField(
|
||||
default=django.utils.timezone.now,
|
||||
editable=False,
|
||||
verbose_name="modified",
|
||||
),
|
||||
),
|
||||
(
|
||||
"id",
|
||||
models.UUIDField(
|
||||
default=uuid.uuid4,
|
||||
editable=False,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
),
|
||||
),
|
||||
(
|
||||
"locked_at",
|
||||
models.DateTimeField(
|
||||
auto_now_add=True, help_text="When the lock was acquired"
|
||||
),
|
||||
),
|
||||
("expires_at", models.DateTimeField(help_text="When the lock expires")),
|
||||
(
|
||||
"is_active",
|
||||
models.BooleanField(
|
||||
db_index=True,
|
||||
default=True,
|
||||
help_text="Whether the lock is currently active",
|
||||
),
|
||||
),
|
||||
(
|
||||
"released_at",
|
||||
models.DateTimeField(
|
||||
blank=True, help_text="When the lock was released", null=True
|
||||
),
|
||||
),
|
||||
(
|
||||
"locked_by",
|
||||
models.ForeignKey(
|
||||
help_text="User who holds the lock",
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
related_name="moderation_locks",
|
||||
to=settings.AUTH_USER_MODEL,
|
||||
),
|
||||
),
|
||||
(
|
||||
"submission",
|
||||
models.OneToOneField(
|
||||
help_text="Submission that is locked",
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
related_name="lock_record",
|
||||
to="moderation.contentsubmission",
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"verbose_name": "Moderation Lock",
|
||||
"verbose_name_plural": "Moderation Locks",
|
||||
"db_table": "moderation_locks",
|
||||
"ordering": ["-locked_at"],
|
||||
"indexes": [
|
||||
models.Index(
|
||||
fields=["is_active", "expires_at"],
|
||||
name="moderation__is_acti_ecf427_idx",
|
||||
),
|
||||
models.Index(
|
||||
fields=["locked_by", "is_active"],
|
||||
name="moderation__locked__d5cdfb_idx",
|
||||
),
|
||||
],
|
||||
},
|
||||
bases=(django_lifecycle.mixins.LifecycleModelMixin, models.Model),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="contentsubmission",
|
||||
index=models.Index(
|
||||
fields=["status", "created"], name="content_sub_status_a8d552_idx"
|
||||
),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="contentsubmission",
|
||||
index=models.Index(
|
||||
fields=["user", "status"], name="content_sub_user_id_019595_idx"
|
||||
),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="contentsubmission",
|
||||
index=models.Index(
|
||||
fields=["entity_type", "entity_id"],
|
||||
name="content_sub_entity__d0f313_idx",
|
||||
),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="contentsubmission",
|
||||
index=models.Index(
|
||||
fields=["locked_by", "locked_at"], name="content_sub_locked__feb2b3_idx"
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,28 @@
|
||||
# Generated by Django 4.2.8 on 2025-11-09 15:45
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("moderation", "0001_initial"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="contentsubmission",
|
||||
name="submission_type",
|
||||
field=models.CharField(
|
||||
choices=[
|
||||
("create", "Create"),
|
||||
("update", "Update"),
|
||||
("delete", "Delete"),
|
||||
("review", "Review"),
|
||||
],
|
||||
db_index=True,
|
||||
help_text="Type of operation (create, update, delete)",
|
||||
max_length=20,
|
||||
),
|
||||
),
|
||||
]
|
||||
478
django-backend/apps/moderation/models.py
Normal file
478
django-backend/apps/moderation/models.py
Normal file
@@ -0,0 +1,478 @@
|
||||
"""
|
||||
Moderation models for ThrillWiki.
|
||||
|
||||
This module implements the content submission and approval workflow with:
|
||||
- State machine using django-fsm
|
||||
- Atomic transaction support for approvals
|
||||
- 15-minute review lock mechanism
|
||||
- Selective approval of individual items
|
||||
"""
|
||||
|
||||
import uuid
|
||||
from django.db import models
|
||||
from django.utils import timezone
|
||||
from django.contrib.contenttypes.fields import GenericForeignKey
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from django_fsm import FSMField, transition
|
||||
from apps.core.models import BaseModel
|
||||
|
||||
|
||||
class ContentSubmission(BaseModel):
|
||||
"""
|
||||
Main submission model with FSM state machine.
|
||||
|
||||
Represents a batch of changes submitted by a user for moderation.
|
||||
Can contain multiple SubmissionItem objects representing individual field changes.
|
||||
"""
|
||||
|
||||
# State choices for FSM
|
||||
STATE_DRAFT = 'draft'
|
||||
STATE_PENDING = 'pending'
|
||||
STATE_REVIEWING = 'reviewing'
|
||||
STATE_APPROVED = 'approved'
|
||||
STATE_REJECTED = 'rejected'
|
||||
|
||||
STATE_CHOICES = [
|
||||
(STATE_DRAFT, 'Draft'),
|
||||
(STATE_PENDING, 'Pending Review'),
|
||||
(STATE_REVIEWING, 'Under Review'),
|
||||
(STATE_APPROVED, 'Approved'),
|
||||
(STATE_REJECTED, 'Rejected'),
|
||||
]
|
||||
|
||||
# FSM State field
|
||||
status = FSMField(
|
||||
max_length=20,
|
||||
choices=STATE_CHOICES,
|
||||
default=STATE_DRAFT,
|
||||
db_index=True,
|
||||
protected=True, # Prevents direct status changes
|
||||
help_text="Current submission state (managed by FSM)"
|
||||
)
|
||||
|
||||
# Submitter
|
||||
user = models.ForeignKey(
|
||||
'users.User',
|
||||
on_delete=models.CASCADE,
|
||||
related_name='submissions',
|
||||
help_text="User who submitted the changes"
|
||||
)
|
||||
|
||||
# Entity being modified (generic relation)
|
||||
entity_type = models.ForeignKey(
|
||||
ContentType,
|
||||
on_delete=models.CASCADE,
|
||||
help_text="Type of entity being modified"
|
||||
)
|
||||
entity_id = models.UUIDField(
|
||||
help_text="ID of the entity being modified"
|
||||
)
|
||||
entity = GenericForeignKey('entity_type', 'entity_id')
|
||||
|
||||
# Submission type
|
||||
SUBMISSION_TYPE_CHOICES = [
|
||||
('create', 'Create'),
|
||||
('update', 'Update'),
|
||||
('delete', 'Delete'),
|
||||
('review', 'Review'),
|
||||
]
|
||||
|
||||
submission_type = models.CharField(
|
||||
max_length=20,
|
||||
choices=SUBMISSION_TYPE_CHOICES,
|
||||
db_index=True,
|
||||
help_text="Type of operation (create, update, delete)"
|
||||
)
|
||||
|
||||
# Submission details
|
||||
title = models.CharField(
|
||||
max_length=255,
|
||||
help_text="Brief description of changes"
|
||||
)
|
||||
description = models.TextField(
|
||||
blank=True,
|
||||
help_text="Detailed description of changes"
|
||||
)
|
||||
|
||||
# Review lock mechanism (15-minute lock)
|
||||
locked_by = models.ForeignKey(
|
||||
'users.User',
|
||||
on_delete=models.SET_NULL,
|
||||
null=True,
|
||||
blank=True,
|
||||
related_name='locked_submissions',
|
||||
help_text="Moderator currently reviewing this submission"
|
||||
)
|
||||
locked_at = models.DateTimeField(
|
||||
null=True,
|
||||
blank=True,
|
||||
help_text="When the submission was locked for review"
|
||||
)
|
||||
|
||||
# Review details
|
||||
reviewed_by = models.ForeignKey(
|
||||
'users.User',
|
||||
on_delete=models.SET_NULL,
|
||||
null=True,
|
||||
blank=True,
|
||||
related_name='reviewed_submissions',
|
||||
help_text="Moderator who reviewed this submission"
|
||||
)
|
||||
reviewed_at = models.DateTimeField(
|
||||
null=True,
|
||||
blank=True,
|
||||
help_text="When the submission was reviewed"
|
||||
)
|
||||
rejection_reason = models.TextField(
|
||||
blank=True,
|
||||
help_text="Reason for rejection (if rejected)"
|
||||
)
|
||||
|
||||
# Metadata
|
||||
source = models.CharField(
|
||||
max_length=50,
|
||||
default='web',
|
||||
help_text="Source of submission (web, api, mobile, etc.)"
|
||||
)
|
||||
ip_address = models.GenericIPAddressField(
|
||||
null=True,
|
||||
blank=True,
|
||||
help_text="IP address of submitter"
|
||||
)
|
||||
user_agent = models.CharField(
|
||||
max_length=500,
|
||||
blank=True,
|
||||
help_text="User agent of submitter"
|
||||
)
|
||||
|
||||
# Additional data
|
||||
metadata = models.JSONField(
|
||||
default=dict,
|
||||
blank=True,
|
||||
help_text="Additional submission metadata"
|
||||
)
|
||||
|
||||
class Meta:
|
||||
db_table = 'content_submissions'
|
||||
ordering = ['-created']
|
||||
indexes = [
|
||||
models.Index(fields=['status', 'created']),
|
||||
models.Index(fields=['user', 'status']),
|
||||
models.Index(fields=['entity_type', 'entity_id']),
|
||||
models.Index(fields=['locked_by', 'locked_at']),
|
||||
]
|
||||
verbose_name = 'Content Submission'
|
||||
verbose_name_plural = 'Content Submissions'
|
||||
|
||||
def __str__(self):
|
||||
return f"{self.get_submission_type_display()} - {self.title} ({self.get_status_display()})"
|
||||
|
||||
# FSM Transitions
|
||||
|
||||
@transition(field=status, source=STATE_DRAFT, target=STATE_PENDING)
|
||||
def submit(self):
|
||||
"""Submit for review - moves from draft to pending"""
|
||||
pass
|
||||
|
||||
@transition(field=status, source=STATE_PENDING, target=STATE_REVIEWING)
|
||||
def start_review(self, reviewer):
|
||||
"""Lock submission for review"""
|
||||
self.locked_by = reviewer
|
||||
self.locked_at = timezone.now()
|
||||
|
||||
@transition(field=status, source=STATE_REVIEWING, target=STATE_APPROVED)
|
||||
def approve(self, reviewer):
|
||||
"""Approve submission"""
|
||||
self.reviewed_by = reviewer
|
||||
self.reviewed_at = timezone.now()
|
||||
self.locked_by = None
|
||||
self.locked_at = None
|
||||
|
||||
@transition(field=status, source=STATE_REVIEWING, target=STATE_REJECTED)
|
||||
def reject(self, reviewer, reason):
|
||||
"""Reject submission"""
|
||||
self.reviewed_by = reviewer
|
||||
self.reviewed_at = timezone.now()
|
||||
self.rejection_reason = reason
|
||||
self.locked_by = None
|
||||
self.locked_at = None
|
||||
|
||||
@transition(field=status, source=STATE_REVIEWING, target=STATE_PENDING)
|
||||
def unlock(self):
|
||||
"""Unlock submission (timeout or manual unlock)"""
|
||||
self.locked_by = None
|
||||
self.locked_at = None
|
||||
|
||||
# Helper methods
|
||||
|
||||
def is_locked(self):
|
||||
"""Check if submission is currently locked"""
|
||||
if not self.locked_by or not self.locked_at:
|
||||
return False
|
||||
|
||||
# Check if lock has expired (15 minutes)
|
||||
lock_duration = timezone.now() - self.locked_at
|
||||
return lock_duration.total_seconds() < 15 * 60
|
||||
|
||||
def can_review(self, user):
|
||||
"""Check if user can review this submission"""
|
||||
if self.status != self.STATE_REVIEWING:
|
||||
return False
|
||||
|
||||
# Check if locked by another user
|
||||
if self.locked_by and self.locked_by != user:
|
||||
return not self.is_locked()
|
||||
|
||||
return True
|
||||
|
||||
def get_items_count(self):
|
||||
"""Get count of submission items"""
|
||||
return self.items.count()
|
||||
|
||||
def get_approved_items_count(self):
|
||||
"""Get count of approved items"""
|
||||
return self.items.filter(status='approved').count()
|
||||
|
||||
def get_rejected_items_count(self):
|
||||
"""Get count of rejected items"""
|
||||
return self.items.filter(status='rejected').count()
|
||||
|
||||
|
||||
class SubmissionItem(BaseModel):
|
||||
"""
|
||||
Individual change within a submission.
|
||||
|
||||
Represents a single field change (or entity creation/deletion).
|
||||
Supports selective approval - each item can be approved/rejected independently.
|
||||
"""
|
||||
|
||||
STATUS_CHOICES = [
|
||||
('pending', 'Pending'),
|
||||
('approved', 'Approved'),
|
||||
('rejected', 'Rejected'),
|
||||
]
|
||||
|
||||
# Parent submission
|
||||
submission = models.ForeignKey(
|
||||
ContentSubmission,
|
||||
on_delete=models.CASCADE,
|
||||
related_name='items',
|
||||
help_text="Parent submission"
|
||||
)
|
||||
|
||||
# Item details
|
||||
field_name = models.CharField(
|
||||
max_length=100,
|
||||
help_text="Name of the field being changed"
|
||||
)
|
||||
field_label = models.CharField(
|
||||
max_length=200,
|
||||
blank=True,
|
||||
help_text="Human-readable field label"
|
||||
)
|
||||
|
||||
# Values (stored as JSON for flexibility)
|
||||
old_value = models.JSONField(
|
||||
null=True,
|
||||
blank=True,
|
||||
help_text="Previous value (null for new fields)"
|
||||
)
|
||||
new_value = models.JSONField(
|
||||
null=True,
|
||||
blank=True,
|
||||
help_text="New value (null for deletions)"
|
||||
)
|
||||
|
||||
# Item status (for selective approval)
|
||||
status = models.CharField(
|
||||
max_length=20,
|
||||
choices=STATUS_CHOICES,
|
||||
default='pending',
|
||||
db_index=True,
|
||||
help_text="Status of this individual item"
|
||||
)
|
||||
|
||||
# Review details (for selective approval)
|
||||
reviewed_by = models.ForeignKey(
|
||||
'users.User',
|
||||
on_delete=models.SET_NULL,
|
||||
null=True,
|
||||
blank=True,
|
||||
related_name='reviewed_items',
|
||||
help_text="Moderator who reviewed this item"
|
||||
)
|
||||
reviewed_at = models.DateTimeField(
|
||||
null=True,
|
||||
blank=True,
|
||||
help_text="When this item was reviewed"
|
||||
)
|
||||
rejection_reason = models.TextField(
|
||||
blank=True,
|
||||
help_text="Reason for rejecting this specific item"
|
||||
)
|
||||
|
||||
# Metadata
|
||||
change_type = models.CharField(
|
||||
max_length=20,
|
||||
choices=[
|
||||
('add', 'Add'),
|
||||
('modify', 'Modify'),
|
||||
('remove', 'Remove'),
|
||||
],
|
||||
default='modify',
|
||||
help_text="Type of change"
|
||||
)
|
||||
|
||||
is_required = models.BooleanField(
|
||||
default=False,
|
||||
help_text="Whether this change is required for the submission"
|
||||
)
|
||||
|
||||
order = models.IntegerField(
|
||||
default=0,
|
||||
help_text="Display order within submission"
|
||||
)
|
||||
|
||||
class Meta:
|
||||
db_table = 'submission_items'
|
||||
ordering = ['submission', 'order', 'created']
|
||||
indexes = [
|
||||
models.Index(fields=['submission', 'status']),
|
||||
models.Index(fields=['status']),
|
||||
]
|
||||
verbose_name = 'Submission Item'
|
||||
verbose_name_plural = 'Submission Items'
|
||||
|
||||
def __str__(self):
|
||||
return f"{self.submission.title} - {self.field_label or self.field_name}"
|
||||
|
||||
def approve(self, reviewer):
|
||||
"""Approve this item"""
|
||||
self.status = 'approved'
|
||||
self.reviewed_by = reviewer
|
||||
self.reviewed_at = timezone.now()
|
||||
self.save(update_fields=['status', 'reviewed_by', 'reviewed_at', 'modified'])
|
||||
|
||||
def reject(self, reviewer, reason=''):
|
||||
"""Reject this item"""
|
||||
self.status = 'rejected'
|
||||
self.reviewed_by = reviewer
|
||||
self.reviewed_at = timezone.now()
|
||||
self.rejection_reason = reason
|
||||
self.save(update_fields=['status', 'reviewed_by', 'reviewed_at', 'rejection_reason', 'modified'])
|
||||
|
||||
def get_display_value(self, value):
|
||||
"""Get human-readable display value"""
|
||||
if value is None:
|
||||
return 'None'
|
||||
if isinstance(value, bool):
|
||||
return 'Yes' if value else 'No'
|
||||
if isinstance(value, (list, dict)):
|
||||
return str(value)
|
||||
return str(value)
|
||||
|
||||
@property
|
||||
def old_value_display(self):
|
||||
"""Human-readable old value"""
|
||||
return self.get_display_value(self.old_value)
|
||||
|
||||
@property
|
||||
def new_value_display(self):
|
||||
"""Human-readable new value"""
|
||||
return self.get_display_value(self.new_value)
|
||||
|
||||
|
||||
class ModerationLock(BaseModel):
|
||||
"""
|
||||
Lock record for submissions under review.
|
||||
|
||||
Provides additional tracking beyond the ContentSubmission lock fields.
|
||||
Helps with monitoring and debugging lock issues.
|
||||
"""
|
||||
|
||||
submission = models.OneToOneField(
|
||||
ContentSubmission,
|
||||
on_delete=models.CASCADE,
|
||||
related_name='lock_record',
|
||||
help_text="Submission that is locked"
|
||||
)
|
||||
|
||||
locked_by = models.ForeignKey(
|
||||
'users.User',
|
||||
on_delete=models.CASCADE,
|
||||
related_name='moderation_locks',
|
||||
help_text="User who holds the lock"
|
||||
)
|
||||
|
||||
locked_at = models.DateTimeField(
|
||||
auto_now_add=True,
|
||||
help_text="When the lock was acquired"
|
||||
)
|
||||
|
||||
expires_at = models.DateTimeField(
|
||||
help_text="When the lock expires"
|
||||
)
|
||||
|
||||
is_active = models.BooleanField(
|
||||
default=True,
|
||||
db_index=True,
|
||||
help_text="Whether the lock is currently active"
|
||||
)
|
||||
|
||||
released_at = models.DateTimeField(
|
||||
null=True,
|
||||
blank=True,
|
||||
help_text="When the lock was released"
|
||||
)
|
||||
|
||||
class Meta:
|
||||
db_table = 'moderation_locks'
|
||||
ordering = ['-locked_at']
|
||||
indexes = [
|
||||
models.Index(fields=['is_active', 'expires_at']),
|
||||
models.Index(fields=['locked_by', 'is_active']),
|
||||
]
|
||||
verbose_name = 'Moderation Lock'
|
||||
verbose_name_plural = 'Moderation Locks'
|
||||
|
||||
def __str__(self):
|
||||
return f"Lock on {self.submission.title} by {self.locked_by.email}"
|
||||
|
||||
def is_expired(self):
|
||||
"""Check if lock has expired"""
|
||||
return timezone.now() > self.expires_at
|
||||
|
||||
def release(self):
|
||||
"""Release the lock"""
|
||||
self.is_active = False
|
||||
self.released_at = timezone.now()
|
||||
self.save(update_fields=['is_active', 'released_at', 'modified'])
|
||||
|
||||
def extend(self, minutes=15):
|
||||
"""Extend the lock duration"""
|
||||
from datetime import timedelta
|
||||
self.expires_at = timezone.now() + timedelta(minutes=minutes)
|
||||
self.save(update_fields=['expires_at', 'modified'])
|
||||
|
||||
@classmethod
|
||||
def cleanup_expired(cls):
|
||||
"""Cleanup expired locks (for periodic task)"""
|
||||
expired_locks = cls.objects.filter(
|
||||
is_active=True,
|
||||
expires_at__lt=timezone.now()
|
||||
)
|
||||
|
||||
count = 0
|
||||
for lock in expired_locks:
|
||||
# Release lock
|
||||
lock.release()
|
||||
|
||||
# Unlock submission if still in reviewing state
|
||||
submission = lock.submission
|
||||
if submission.status == ContentSubmission.STATE_REVIEWING:
|
||||
submission.unlock()
|
||||
submission.save()
|
||||
|
||||
count += 1
|
||||
|
||||
return count
|
||||
638
django-backend/apps/moderation/services.py
Normal file
638
django-backend/apps/moderation/services.py
Normal file
@@ -0,0 +1,638 @@
|
||||
"""
|
||||
Moderation services for ThrillWiki.
|
||||
|
||||
This module provides business logic for the content moderation workflow:
|
||||
- Creating submissions
|
||||
- Starting reviews with locks
|
||||
- Approving submissions with atomic transactions
|
||||
- Selective approval of individual items
|
||||
- Rejecting submissions
|
||||
- Unlocking expired submissions
|
||||
"""
|
||||
|
||||
import logging
|
||||
from datetime import timedelta
|
||||
from django.db import transaction
|
||||
from django.utils import timezone
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from django.core.exceptions import ValidationError, PermissionDenied
|
||||
|
||||
from apps.moderation.models import ContentSubmission, SubmissionItem, ModerationLock
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ModerationService:
|
||||
"""
|
||||
Service class for moderation operations.
|
||||
|
||||
All public methods use atomic transactions to ensure data integrity.
|
||||
"""
|
||||
|
||||
@staticmethod
|
||||
@transaction.atomic
|
||||
def create_submission(
|
||||
user,
|
||||
entity,
|
||||
submission_type,
|
||||
title,
|
||||
description='',
|
||||
items_data=None,
|
||||
metadata=None,
|
||||
auto_submit=True,
|
||||
**kwargs
|
||||
):
|
||||
"""
|
||||
Create a new content submission with items.
|
||||
|
||||
Args:
|
||||
user: User creating the submission
|
||||
entity: Entity being modified (Park, Ride, Company, etc.)
|
||||
submission_type: 'create', 'update', or 'delete'
|
||||
title: Brief description of changes
|
||||
description: Detailed description (optional)
|
||||
items_data: List of dicts with item details:
|
||||
[
|
||||
{
|
||||
'field_name': 'name',
|
||||
'field_label': 'Park Name',
|
||||
'old_value': 'Old Name',
|
||||
'new_value': 'New Name',
|
||||
'change_type': 'modify',
|
||||
'is_required': False,
|
||||
'order': 0
|
||||
},
|
||||
...
|
||||
]
|
||||
metadata: Additional metadata dict
|
||||
auto_submit: Whether to automatically submit (move to pending state)
|
||||
**kwargs: Additional submission fields (source, ip_address, user_agent)
|
||||
|
||||
Returns:
|
||||
ContentSubmission instance
|
||||
|
||||
Raises:
|
||||
ValidationError: If validation fails
|
||||
"""
|
||||
# Get ContentType for entity
|
||||
entity_type = ContentType.objects.get_for_model(entity)
|
||||
|
||||
# Create submission
|
||||
submission = ContentSubmission.objects.create(
|
||||
user=user,
|
||||
entity_type=entity_type,
|
||||
entity_id=entity.id,
|
||||
submission_type=submission_type,
|
||||
title=title,
|
||||
description=description,
|
||||
metadata=metadata or {},
|
||||
source=kwargs.get('source', 'web'),
|
||||
ip_address=kwargs.get('ip_address'),
|
||||
user_agent=kwargs.get('user_agent', '')
|
||||
)
|
||||
|
||||
# Create submission items
|
||||
if items_data:
|
||||
for item_data in items_data:
|
||||
SubmissionItem.objects.create(
|
||||
submission=submission,
|
||||
field_name=item_data['field_name'],
|
||||
field_label=item_data.get('field_label', item_data['field_name']),
|
||||
old_value=item_data.get('old_value'),
|
||||
new_value=item_data.get('new_value'),
|
||||
change_type=item_data.get('change_type', 'modify'),
|
||||
is_required=item_data.get('is_required', False),
|
||||
order=item_data.get('order', 0)
|
||||
)
|
||||
|
||||
# Auto-submit if requested
|
||||
if auto_submit:
|
||||
submission.submit()
|
||||
submission.save()
|
||||
|
||||
return submission
|
||||
|
||||
@staticmethod
|
||||
@transaction.atomic
|
||||
def start_review(submission_id, reviewer):
|
||||
"""
|
||||
Start reviewing a submission (lock it).
|
||||
|
||||
Args:
|
||||
submission_id: UUID of submission
|
||||
reviewer: User starting the review
|
||||
|
||||
Returns:
|
||||
ContentSubmission instance
|
||||
|
||||
Raises:
|
||||
ValidationError: If submission cannot be reviewed
|
||||
PermissionDenied: If user lacks permission
|
||||
"""
|
||||
submission = ContentSubmission.objects.select_for_update().get(id=submission_id)
|
||||
|
||||
# Check if user has permission to review
|
||||
if not ModerationService._can_moderate(reviewer):
|
||||
raise PermissionDenied("User does not have moderation permission")
|
||||
|
||||
# Check if submission is in correct state
|
||||
if submission.status != ContentSubmission.STATE_PENDING:
|
||||
raise ValidationError(f"Submission must be pending to start review (current: {submission.status})")
|
||||
|
||||
# Check if already locked by another user
|
||||
if submission.locked_by and submission.locked_by != reviewer:
|
||||
if submission.is_locked():
|
||||
raise ValidationError(f"Submission is locked by {submission.locked_by.email}")
|
||||
|
||||
# Start review (FSM transition)
|
||||
submission.start_review(reviewer)
|
||||
submission.save()
|
||||
|
||||
# Create lock record
|
||||
expires_at = timezone.now() + timedelta(minutes=15)
|
||||
ModerationLock.objects.update_or_create(
|
||||
submission=submission,
|
||||
defaults={
|
||||
'locked_by': reviewer,
|
||||
'expires_at': expires_at,
|
||||
'is_active': True,
|
||||
'released_at': None
|
||||
}
|
||||
)
|
||||
|
||||
return submission
|
||||
|
||||
@staticmethod
|
||||
@transaction.atomic
|
||||
def approve_submission(submission_id, reviewer):
|
||||
"""
|
||||
Approve an entire submission and apply all changes.
|
||||
|
||||
This method uses atomic transactions to ensure all-or-nothing behavior.
|
||||
If any part fails, the entire operation is rolled back.
|
||||
|
||||
Handles different submission types polymorphically:
|
||||
- 'review': Delegates to ReviewSubmissionService to create Review record
|
||||
- 'create'/'update'/'delete': Applies changes to entity directly
|
||||
|
||||
Args:
|
||||
submission_id: UUID of submission
|
||||
reviewer: User approving the submission
|
||||
|
||||
Returns:
|
||||
ContentSubmission instance
|
||||
|
||||
Raises:
|
||||
ValidationError: If submission cannot be approved
|
||||
PermissionDenied: If user lacks permission
|
||||
"""
|
||||
submission = ContentSubmission.objects.select_for_update().get(id=submission_id)
|
||||
|
||||
# Check permission
|
||||
if not ModerationService._can_moderate(reviewer):
|
||||
raise PermissionDenied("User does not have moderation permission")
|
||||
|
||||
# Check if submission can be reviewed
|
||||
if not submission.can_review(reviewer):
|
||||
raise ValidationError("Submission cannot be reviewed at this time")
|
||||
|
||||
# Get all pending items
|
||||
items = submission.items.filter(status='pending')
|
||||
|
||||
# POLYMORPHIC HANDLING BASED ON SUBMISSION TYPE
|
||||
if submission.submission_type == 'review':
|
||||
# Handle review submissions - delegate to ReviewSubmissionService
|
||||
logger.info(f"Approving review submission {submission_id}")
|
||||
|
||||
from apps.reviews.services import ReviewSubmissionService
|
||||
review = ReviewSubmissionService.apply_review_approval(submission)
|
||||
|
||||
# Mark all items as approved
|
||||
for item in items:
|
||||
item.approve(reviewer)
|
||||
|
||||
logger.info(f"Review created: {review.id} from submission {submission_id}")
|
||||
|
||||
elif submission.submission_type in ['create', 'update', 'delete']:
|
||||
# Handle entity submissions
|
||||
entity = submission.entity
|
||||
if not entity:
|
||||
raise ValidationError("Entity no longer exists")
|
||||
|
||||
logger.info(f"Approving {submission.submission_type} submission {submission_id}")
|
||||
|
||||
if submission.submission_type == 'create':
|
||||
# Entity was created in draft state, now apply all fields and make visible
|
||||
for item in items:
|
||||
if item.change_type in ['add', 'modify']:
|
||||
setattr(entity, item.field_name, item.new_value)
|
||||
item.approve(reviewer)
|
||||
entity.save()
|
||||
|
||||
elif submission.submission_type == 'update':
|
||||
# Apply updates to existing entity
|
||||
for item in items:
|
||||
if item.change_type in ['add', 'modify']:
|
||||
setattr(entity, item.field_name, item.new_value)
|
||||
elif item.change_type == 'remove':
|
||||
setattr(entity, item.field_name, None)
|
||||
item.approve(reviewer)
|
||||
entity.save()
|
||||
|
||||
elif submission.submission_type == 'delete':
|
||||
# Check deletion type from metadata
|
||||
deletion_type = submission.metadata.get('deletion_type', 'soft')
|
||||
|
||||
if deletion_type == 'soft':
|
||||
# Soft delete: Apply status change to 'closed'
|
||||
for item in items:
|
||||
if item.field_name == 'status':
|
||||
# Apply status change
|
||||
setattr(entity, 'status', 'closed')
|
||||
item.approve(reviewer)
|
||||
entity.save()
|
||||
logger.info(f"Entity soft-deleted (status=closed): {entity.id}")
|
||||
else:
|
||||
# Hard delete: Remove from database
|
||||
for item in items:
|
||||
item.approve(reviewer)
|
||||
entity.delete()
|
||||
logger.info(f"Entity hard-deleted from database: {entity.id}")
|
||||
|
||||
logger.info(f"Entity changes applied for submission {submission_id}")
|
||||
|
||||
else:
|
||||
raise ValidationError(f"Unknown submission type: {submission.submission_type}")
|
||||
|
||||
# Approve submission (FSM transition)
|
||||
submission.approve(reviewer)
|
||||
submission.save()
|
||||
|
||||
# Release lock
|
||||
try:
|
||||
lock = ModerationLock.objects.get(submission=submission, is_active=True)
|
||||
lock.release()
|
||||
except ModerationLock.DoesNotExist:
|
||||
pass
|
||||
|
||||
# Send notification email asynchronously
|
||||
try:
|
||||
from apps.moderation.tasks import send_moderation_notification
|
||||
send_moderation_notification.delay(str(submission.id), 'approved')
|
||||
except Exception as e:
|
||||
# Don't fail the approval if email fails to queue
|
||||
logger.warning(f"Failed to queue approval notification: {str(e)}")
|
||||
|
||||
return submission
|
||||
|
||||
@staticmethod
|
||||
@transaction.atomic
|
||||
def approve_selective(submission_id, reviewer, item_ids):
|
||||
"""
|
||||
Approve only specific items in a submission (selective approval).
|
||||
|
||||
This allows moderators to approve some changes while rejecting others.
|
||||
Uses atomic transactions for data integrity.
|
||||
|
||||
Args:
|
||||
submission_id: UUID of submission
|
||||
reviewer: User approving the items
|
||||
item_ids: List of item UUIDs to approve
|
||||
|
||||
Returns:
|
||||
dict with counts: {'approved': N, 'total': M}
|
||||
|
||||
Raises:
|
||||
ValidationError: If submission cannot be reviewed
|
||||
PermissionDenied: If user lacks permission
|
||||
"""
|
||||
submission = ContentSubmission.objects.select_for_update().get(id=submission_id)
|
||||
|
||||
# Check permission
|
||||
if not ModerationService._can_moderate(reviewer):
|
||||
raise PermissionDenied("User does not have moderation permission")
|
||||
|
||||
# Check if submission can be reviewed
|
||||
if not submission.can_review(reviewer):
|
||||
raise ValidationError("Submission cannot be reviewed at this time")
|
||||
|
||||
# Get entity
|
||||
entity = submission.entity
|
||||
if not entity:
|
||||
raise ValidationError("Entity no longer exists")
|
||||
|
||||
# Get items to approve
|
||||
items_to_approve = submission.items.filter(
|
||||
id__in=item_ids,
|
||||
status='pending'
|
||||
)
|
||||
|
||||
approved_count = 0
|
||||
for item in items_to_approve:
|
||||
# Apply change to entity
|
||||
if item.change_type in ['add', 'modify']:
|
||||
setattr(entity, item.field_name, item.new_value)
|
||||
elif item.change_type == 'remove':
|
||||
setattr(entity, item.field_name, None)
|
||||
|
||||
# Mark item as approved
|
||||
item.approve(reviewer)
|
||||
approved_count += 1
|
||||
|
||||
# Save entity if any changes were made
|
||||
if approved_count > 0:
|
||||
entity.save()
|
||||
|
||||
# Check if all items are now reviewed
|
||||
pending_count = submission.items.filter(status='pending').count()
|
||||
|
||||
if pending_count == 0:
|
||||
# All items reviewed - mark submission as approved
|
||||
submission.approve(reviewer)
|
||||
submission.save()
|
||||
|
||||
# Release lock
|
||||
try:
|
||||
lock = ModerationLock.objects.get(submission=submission, is_active=True)
|
||||
lock.release()
|
||||
except ModerationLock.DoesNotExist:
|
||||
pass
|
||||
|
||||
return {
|
||||
'approved': approved_count,
|
||||
'total': submission.items.count(),
|
||||
'pending': pending_count,
|
||||
'submission_approved': pending_count == 0
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
@transaction.atomic
|
||||
def reject_submission(submission_id, reviewer, reason):
|
||||
"""
|
||||
Reject an entire submission.
|
||||
|
||||
Args:
|
||||
submission_id: UUID of submission
|
||||
reviewer: User rejecting the submission
|
||||
reason: Reason for rejection
|
||||
|
||||
Returns:
|
||||
ContentSubmission instance
|
||||
|
||||
Raises:
|
||||
ValidationError: If submission cannot be rejected
|
||||
PermissionDenied: If user lacks permission
|
||||
"""
|
||||
submission = ContentSubmission.objects.select_for_update().get(id=submission_id)
|
||||
|
||||
# Check permission
|
||||
if not ModerationService._can_moderate(reviewer):
|
||||
raise PermissionDenied("User does not have moderation permission")
|
||||
|
||||
# Check if submission can be reviewed
|
||||
if not submission.can_review(reviewer):
|
||||
raise ValidationError("Submission cannot be reviewed at this time")
|
||||
|
||||
# Reject all pending items
|
||||
items = submission.items.filter(status='pending')
|
||||
for item in items:
|
||||
item.reject(reviewer, reason)
|
||||
|
||||
# Reject submission (FSM transition)
|
||||
submission.reject(reviewer, reason)
|
||||
submission.save()
|
||||
|
||||
# Release lock
|
||||
try:
|
||||
lock = ModerationLock.objects.get(submission=submission, is_active=True)
|
||||
lock.release()
|
||||
except ModerationLock.DoesNotExist:
|
||||
pass
|
||||
|
||||
# Send notification email asynchronously
|
||||
try:
|
||||
from apps.moderation.tasks import send_moderation_notification
|
||||
send_moderation_notification.delay(str(submission.id), 'rejected')
|
||||
except Exception as e:
|
||||
# Don't fail the rejection if email fails to queue
|
||||
logger.warning(f"Failed to queue rejection notification: {str(e)}")
|
||||
|
||||
return submission
|
||||
|
||||
@staticmethod
|
||||
@transaction.atomic
|
||||
def reject_selective(submission_id, reviewer, item_ids, reason=''):
|
||||
"""
|
||||
Reject specific items in a submission.
|
||||
|
||||
Args:
|
||||
submission_id: UUID of submission
|
||||
reviewer: User rejecting the items
|
||||
item_ids: List of item UUIDs to reject
|
||||
reason: Reason for rejection (optional)
|
||||
|
||||
Returns:
|
||||
dict with counts: {'rejected': N, 'total': M}
|
||||
|
||||
Raises:
|
||||
ValidationError: If submission cannot be reviewed
|
||||
PermissionDenied: If user lacks permission
|
||||
"""
|
||||
submission = ContentSubmission.objects.select_for_update().get(id=submission_id)
|
||||
|
||||
# Check permission
|
||||
if not ModerationService._can_moderate(reviewer):
|
||||
raise PermissionDenied("User does not have moderation permission")
|
||||
|
||||
# Check if submission can be reviewed
|
||||
if not submission.can_review(reviewer):
|
||||
raise ValidationError("Submission cannot be reviewed at this time")
|
||||
|
||||
# Get items to reject
|
||||
items_to_reject = submission.items.filter(
|
||||
id__in=item_ids,
|
||||
status='pending'
|
||||
)
|
||||
|
||||
rejected_count = 0
|
||||
for item in items_to_reject:
|
||||
item.reject(reviewer, reason)
|
||||
rejected_count += 1
|
||||
|
||||
# Check if all items are now reviewed
|
||||
pending_count = submission.items.filter(status='pending').count()
|
||||
|
||||
if pending_count == 0:
|
||||
# All items reviewed
|
||||
approved_count = submission.items.filter(status='approved').count()
|
||||
|
||||
if approved_count > 0:
|
||||
# Some items approved - mark submission as approved
|
||||
submission.approve(reviewer)
|
||||
submission.save()
|
||||
else:
|
||||
# All items rejected - mark submission as rejected
|
||||
submission.reject(reviewer, "All items rejected")
|
||||
submission.save()
|
||||
|
||||
# Release lock
|
||||
try:
|
||||
lock = ModerationLock.objects.get(submission=submission, is_active=True)
|
||||
lock.release()
|
||||
except ModerationLock.DoesNotExist:
|
||||
pass
|
||||
|
||||
return {
|
||||
'rejected': rejected_count,
|
||||
'total': submission.items.count(),
|
||||
'pending': pending_count,
|
||||
'submission_complete': pending_count == 0
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
@transaction.atomic
|
||||
def unlock_submission(submission_id):
|
||||
"""
|
||||
Manually unlock a submission.
|
||||
|
||||
Args:
|
||||
submission_id: UUID of submission
|
||||
|
||||
Returns:
|
||||
ContentSubmission instance
|
||||
"""
|
||||
submission = ContentSubmission.objects.select_for_update().get(id=submission_id)
|
||||
|
||||
if submission.status == ContentSubmission.STATE_REVIEWING:
|
||||
submission.unlock()
|
||||
submission.save()
|
||||
|
||||
# Release lock record
|
||||
try:
|
||||
lock = ModerationLock.objects.get(submission=submission, is_active=True)
|
||||
lock.release()
|
||||
except ModerationLock.DoesNotExist:
|
||||
pass
|
||||
|
||||
return submission
|
||||
|
||||
@staticmethod
|
||||
def cleanup_expired_locks():
|
||||
"""
|
||||
Cleanup expired locks and unlock submissions.
|
||||
|
||||
This should be called periodically (e.g., every 5 minutes via Celery).
|
||||
|
||||
Returns:
|
||||
int: Number of locks cleaned up
|
||||
"""
|
||||
return ModerationLock.cleanup_expired()
|
||||
|
||||
@staticmethod
|
||||
def get_queue(status=None, user=None, limit=50, offset=0):
|
||||
"""
|
||||
Get moderation queue with filters.
|
||||
|
||||
Args:
|
||||
status: Filter by status (optional)
|
||||
user: Filter by submitter (optional)
|
||||
limit: Maximum results
|
||||
offset: Pagination offset
|
||||
|
||||
Returns:
|
||||
QuerySet of ContentSubmission objects
|
||||
"""
|
||||
queryset = ContentSubmission.objects.select_related(
|
||||
'user',
|
||||
'entity_type',
|
||||
'locked_by',
|
||||
'reviewed_by'
|
||||
).prefetch_related('items')
|
||||
|
||||
if status:
|
||||
queryset = queryset.filter(status=status)
|
||||
|
||||
if user:
|
||||
queryset = queryset.filter(user=user)
|
||||
|
||||
return queryset[offset:offset + limit]
|
||||
|
||||
@staticmethod
|
||||
def get_submission_details(submission_id):
|
||||
"""
|
||||
Get full submission details with all items.
|
||||
|
||||
Args:
|
||||
submission_id: UUID of submission
|
||||
|
||||
Returns:
|
||||
ContentSubmission instance with prefetched items
|
||||
"""
|
||||
return ContentSubmission.objects.select_related(
|
||||
'user',
|
||||
'entity_type',
|
||||
'locked_by',
|
||||
'reviewed_by'
|
||||
).prefetch_related(
|
||||
'items',
|
||||
'items__reviewed_by'
|
||||
).get(id=submission_id)
|
||||
|
||||
@staticmethod
|
||||
def _can_moderate(user):
|
||||
"""
|
||||
Check if user has moderation permission.
|
||||
|
||||
Args:
|
||||
user: User to check
|
||||
|
||||
Returns:
|
||||
bool: True if user can moderate
|
||||
"""
|
||||
if not user or not user.is_authenticated:
|
||||
return False
|
||||
|
||||
# Check if user is superuser
|
||||
if user.is_superuser:
|
||||
return True
|
||||
|
||||
# Check if user has moderator or admin role
|
||||
try:
|
||||
return user.role.is_moderator
|
||||
except:
|
||||
return False
|
||||
|
||||
@staticmethod
|
||||
@transaction.atomic
|
||||
def delete_submission(submission_id, user):
|
||||
"""
|
||||
Delete a submission (only if draft or by owner).
|
||||
|
||||
Args:
|
||||
submission_id: UUID of submission
|
||||
user: User attempting to delete
|
||||
|
||||
Returns:
|
||||
bool: True if deleted
|
||||
|
||||
Raises:
|
||||
PermissionDenied: If user cannot delete
|
||||
ValidationError: If submission cannot be deleted
|
||||
"""
|
||||
submission = ContentSubmission.objects.select_for_update().get(id=submission_id)
|
||||
|
||||
# Check permission
|
||||
is_owner = submission.user == user
|
||||
is_moderator = ModerationService._can_moderate(user)
|
||||
|
||||
if not (is_owner or is_moderator):
|
||||
raise PermissionDenied("Only the owner or a moderator can delete this submission")
|
||||
|
||||
# Check state
|
||||
if submission.status not in [ContentSubmission.STATE_DRAFT, ContentSubmission.STATE_PENDING]:
|
||||
if not is_moderator:
|
||||
raise ValidationError("Only moderators can delete submissions under review")
|
||||
|
||||
# Delete submission (cascades to items and lock)
|
||||
submission.delete()
|
||||
return True
|
||||
304
django-backend/apps/moderation/tasks.py
Normal file
304
django-backend/apps/moderation/tasks.py
Normal file
@@ -0,0 +1,304 @@
|
||||
"""
|
||||
Background tasks for moderation workflows and notifications.
|
||||
"""
|
||||
|
||||
import logging
|
||||
from celery import shared_task
|
||||
from django.core.mail import send_mail
|
||||
from django.template.loader import render_to_string
|
||||
from django.conf import settings
|
||||
from django.utils import timezone
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@shared_task(bind=True, max_retries=3, default_retry_delay=60)
|
||||
def send_moderation_notification(self, submission_id, status):
|
||||
"""
|
||||
Send email notification when a submission is approved or rejected.
|
||||
|
||||
Args:
|
||||
submission_id: UUID of the ContentSubmission
|
||||
status: 'approved' or 'rejected'
|
||||
|
||||
Returns:
|
||||
str: Notification result message
|
||||
"""
|
||||
from apps.moderation.models import ContentSubmission
|
||||
|
||||
try:
|
||||
submission = ContentSubmission.objects.select_related(
|
||||
'user', 'reviewed_by', 'entity_type'
|
||||
).prefetch_related('items').get(id=submission_id)
|
||||
|
||||
# Get user's submission count
|
||||
user_submission_count = ContentSubmission.objects.filter(
|
||||
user=submission.user
|
||||
).count()
|
||||
|
||||
# Prepare email context
|
||||
context = {
|
||||
'submission': submission,
|
||||
'status': status,
|
||||
'user': submission.user,
|
||||
'user_submission_count': user_submission_count,
|
||||
'submission_url': f"{settings.SITE_URL}/submissions/{submission.id}/",
|
||||
'site_url': settings.SITE_URL,
|
||||
}
|
||||
|
||||
# Choose template based on status
|
||||
if status == 'approved':
|
||||
template = 'emails/moderation_approved.html'
|
||||
subject = f'✅ Submission Approved: {submission.title}'
|
||||
else:
|
||||
template = 'emails/moderation_rejected.html'
|
||||
subject = f'⚠️ Submission Requires Changes: {submission.title}'
|
||||
|
||||
# Render HTML email
|
||||
html_message = render_to_string(template, context)
|
||||
|
||||
# Send email
|
||||
send_mail(
|
||||
subject=subject,
|
||||
message='', # Plain text version (optional)
|
||||
html_message=html_message,
|
||||
from_email=settings.DEFAULT_FROM_EMAIL,
|
||||
recipient_list=[submission.user.email],
|
||||
fail_silently=False,
|
||||
)
|
||||
|
||||
logger.info(
|
||||
f"Moderation notification sent: {status} for submission {submission_id} "
|
||||
f"to {submission.user.email}"
|
||||
)
|
||||
|
||||
return f"Notification sent to {submission.user.email}"
|
||||
|
||||
except ContentSubmission.DoesNotExist:
|
||||
logger.error(f"Submission {submission_id} not found")
|
||||
raise
|
||||
except Exception as exc:
|
||||
logger.error(f"Error sending notification for submission {submission_id}: {str(exc)}")
|
||||
# Retry with exponential backoff
|
||||
raise self.retry(exc=exc, countdown=60 * (2 ** self.request.retries))
|
||||
|
||||
|
||||
@shared_task(bind=True, max_retries=2)
|
||||
def cleanup_expired_locks(self):
|
||||
"""
|
||||
Clean up expired moderation locks.
|
||||
|
||||
This task runs periodically to unlock submissions that have
|
||||
been locked for too long (default: 15 minutes).
|
||||
|
||||
Returns:
|
||||
int: Number of locks cleaned up
|
||||
"""
|
||||
from apps.moderation.models import ModerationLock
|
||||
|
||||
try:
|
||||
cleaned = ModerationLock.cleanup_expired()
|
||||
logger.info(f"Cleaned up {cleaned} expired moderation locks")
|
||||
return cleaned
|
||||
|
||||
except Exception as exc:
|
||||
logger.error(f"Error cleaning up expired locks: {str(exc)}")
|
||||
raise self.retry(exc=exc, countdown=300) # Retry after 5 minutes
|
||||
|
||||
|
||||
@shared_task(bind=True, max_retries=3)
|
||||
def send_batch_moderation_summary(self, moderator_id):
|
||||
"""
|
||||
Send a daily summary email to a moderator with their moderation stats.
|
||||
|
||||
Args:
|
||||
moderator_id: ID of the moderator user
|
||||
|
||||
Returns:
|
||||
str: Email send result
|
||||
"""
|
||||
from apps.users.models import User
|
||||
from apps.moderation.models import ContentSubmission
|
||||
from datetime import timedelta
|
||||
|
||||
try:
|
||||
moderator = User.objects.get(id=moderator_id)
|
||||
|
||||
# Get stats for the past 24 hours
|
||||
yesterday = timezone.now() - timedelta(days=1)
|
||||
|
||||
stats = {
|
||||
'reviewed_today': ContentSubmission.objects.filter(
|
||||
reviewed_by=moderator,
|
||||
reviewed_at__gte=yesterday
|
||||
).count(),
|
||||
'approved_today': ContentSubmission.objects.filter(
|
||||
reviewed_by=moderator,
|
||||
reviewed_at__gte=yesterday,
|
||||
status='approved'
|
||||
).count(),
|
||||
'rejected_today': ContentSubmission.objects.filter(
|
||||
reviewed_by=moderator,
|
||||
reviewed_at__gte=yesterday,
|
||||
status='rejected'
|
||||
).count(),
|
||||
'pending_queue': ContentSubmission.objects.filter(
|
||||
status='pending'
|
||||
).count(),
|
||||
}
|
||||
|
||||
context = {
|
||||
'moderator': moderator,
|
||||
'stats': stats,
|
||||
'date': timezone.now(),
|
||||
'site_url': settings.SITE_URL,
|
||||
}
|
||||
|
||||
# For now, just log the stats (template not created yet)
|
||||
logger.info(f"Moderation summary for {moderator.email}: {stats}")
|
||||
|
||||
# In production, you would send an actual email:
|
||||
# html_message = render_to_string('emails/moderation_summary.html', context)
|
||||
# send_mail(...)
|
||||
|
||||
return f"Summary sent to {moderator.email}"
|
||||
|
||||
except User.DoesNotExist:
|
||||
logger.error(f"Moderator {moderator_id} not found")
|
||||
raise
|
||||
except Exception as exc:
|
||||
logger.error(f"Error sending moderation summary: {str(exc)}")
|
||||
raise self.retry(exc=exc, countdown=60 * (2 ** self.request.retries))
|
||||
|
||||
|
||||
@shared_task
|
||||
def update_moderation_statistics():
|
||||
"""
|
||||
Update moderation-related statistics across the database.
|
||||
|
||||
Returns:
|
||||
dict: Updated statistics
|
||||
"""
|
||||
from apps.moderation.models import ContentSubmission
|
||||
from django.db.models import Count, Avg, F
|
||||
from datetime import timedelta
|
||||
|
||||
try:
|
||||
now = timezone.now()
|
||||
week_ago = now - timedelta(days=7)
|
||||
|
||||
stats = {
|
||||
'total_submissions': ContentSubmission.objects.count(),
|
||||
'pending': ContentSubmission.objects.filter(status='pending').count(),
|
||||
'reviewing': ContentSubmission.objects.filter(status='reviewing').count(),
|
||||
'approved': ContentSubmission.objects.filter(status='approved').count(),
|
||||
'rejected': ContentSubmission.objects.filter(status='rejected').count(),
|
||||
'this_week': ContentSubmission.objects.filter(
|
||||
created_at__gte=week_ago
|
||||
).count(),
|
||||
'by_type': dict(
|
||||
ContentSubmission.objects.values('submission_type')
|
||||
.annotate(count=Count('id'))
|
||||
.values_list('submission_type', 'count')
|
||||
),
|
||||
}
|
||||
|
||||
logger.info(f"Moderation statistics updated: {stats}")
|
||||
return stats
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error updating moderation statistics: {str(e)}")
|
||||
raise
|
||||
|
||||
|
||||
@shared_task(bind=True, max_retries=2)
|
||||
def auto_unlock_stale_reviews(self, hours=1):
|
||||
"""
|
||||
Automatically unlock submissions that have been in review for too long.
|
||||
|
||||
This helps prevent submissions from getting stuck if a moderator
|
||||
starts a review but doesn't complete it.
|
||||
|
||||
Args:
|
||||
hours: Number of hours before auto-unlocking (default: 1)
|
||||
|
||||
Returns:
|
||||
int: Number of submissions unlocked
|
||||
"""
|
||||
from apps.moderation.models import ContentSubmission
|
||||
from apps.moderation.services import ModerationService
|
||||
from datetime import timedelta
|
||||
|
||||
try:
|
||||
cutoff = timezone.now() - timedelta(hours=hours)
|
||||
|
||||
# Find submissions that have been reviewing too long
|
||||
stale_reviews = ContentSubmission.objects.filter(
|
||||
status='reviewing',
|
||||
locked_at__lt=cutoff
|
||||
)
|
||||
|
||||
count = 0
|
||||
for submission in stale_reviews:
|
||||
try:
|
||||
ModerationService.unlock_submission(submission.id)
|
||||
count += 1
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to unlock submission {submission.id}: {str(e)}")
|
||||
continue
|
||||
|
||||
logger.info(f"Auto-unlocked {count} stale reviews")
|
||||
return count
|
||||
|
||||
except Exception as exc:
|
||||
logger.error(f"Error auto-unlocking stale reviews: {str(exc)}")
|
||||
raise self.retry(exc=exc, countdown=300)
|
||||
|
||||
|
||||
@shared_task
|
||||
def notify_moderators_of_queue_size():
|
||||
"""
|
||||
Notify moderators when the pending queue gets too large.
|
||||
|
||||
This helps ensure timely review of submissions.
|
||||
|
||||
Returns:
|
||||
dict: Notification result
|
||||
"""
|
||||
from apps.moderation.models import ContentSubmission
|
||||
from apps.users.models import User
|
||||
|
||||
try:
|
||||
pending_count = ContentSubmission.objects.filter(status='pending').count()
|
||||
|
||||
# Threshold for notification (configurable)
|
||||
threshold = getattr(settings, 'MODERATION_QUEUE_THRESHOLD', 50)
|
||||
|
||||
if pending_count >= threshold:
|
||||
# Get all moderators
|
||||
moderators = User.objects.filter(role__is_moderator=True)
|
||||
|
||||
logger.warning(
|
||||
f"Moderation queue size ({pending_count}) exceeds threshold ({threshold}). "
|
||||
f"Notifying {moderators.count()} moderators."
|
||||
)
|
||||
|
||||
# In production, send emails to moderators
|
||||
# For now, just log
|
||||
|
||||
return {
|
||||
'queue_size': pending_count,
|
||||
'threshold': threshold,
|
||||
'notified': moderators.count(),
|
||||
}
|
||||
else:
|
||||
logger.info(f"Moderation queue size ({pending_count}) is within threshold")
|
||||
return {
|
||||
'queue_size': pending_count,
|
||||
'threshold': threshold,
|
||||
'notified': 0,
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error checking moderation queue: {str(e)}")
|
||||
raise
|
||||
0
django-backend/apps/notifications/__init__.py
Normal file
0
django-backend/apps/notifications/__init__.py
Normal file
11
django-backend/apps/notifications/apps.py
Normal file
11
django-backend/apps/notifications/apps.py
Normal file
@@ -0,0 +1,11 @@
|
||||
"""
|
||||
Notifications app configuration.
|
||||
"""
|
||||
|
||||
from django.apps import AppConfig
|
||||
|
||||
|
||||
class NotificationsConfig(AppConfig):
|
||||
default_auto_field = 'django.db.models.BigAutoField'
|
||||
name = 'apps.notifications'
|
||||
verbose_name = 'Notifications'
|
||||
0
django-backend/apps/notifications/models.py
Normal file
0
django-backend/apps/notifications/models.py
Normal file
1
django-backend/apps/reports/__init__.py
Normal file
1
django-backend/apps/reports/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
default_app_config = 'apps.reports.apps.ReportsConfig'
|
||||
38
django-backend/apps/reports/admin.py
Normal file
38
django-backend/apps/reports/admin.py
Normal file
@@ -0,0 +1,38 @@
|
||||
from django.contrib import admin
|
||||
from unfold.admin import ModelAdmin
|
||||
from .models import Report
|
||||
|
||||
|
||||
@admin.register(Report)
|
||||
class ReportAdmin(ModelAdmin):
|
||||
list_display = ['id', 'entity_type', 'entity_id', 'report_type', 'status', 'reported_by', 'reviewed_by', 'created_at']
|
||||
list_filter = ['status', 'report_type', 'entity_type', 'created_at']
|
||||
search_fields = ['id', 'entity_id', 'description', 'resolution_notes', 'reported_by__email']
|
||||
date_hierarchy = 'created_at'
|
||||
ordering = ['-created_at']
|
||||
readonly_fields = ['id', 'created_at', 'updated_at']
|
||||
|
||||
fieldsets = (
|
||||
('Report Details', {
|
||||
'fields': ('id', 'report_type', 'description', 'status')
|
||||
}),
|
||||
('Reported Entity', {
|
||||
'fields': ('entity_type', 'entity_id')
|
||||
}),
|
||||
('Reporter Information', {
|
||||
'fields': ('reported_by', 'created_at')
|
||||
}),
|
||||
('Moderation', {
|
||||
'fields': ('reviewed_by', 'reviewed_at', 'resolution_notes'),
|
||||
'classes': ('collapse',)
|
||||
}),
|
||||
('Tracking', {
|
||||
'fields': ('updated_at',),
|
||||
'classes': ('collapse',)
|
||||
}),
|
||||
)
|
||||
|
||||
def get_queryset(self, request):
|
||||
"""Optimize queryset with select_related for foreign keys"""
|
||||
qs = super().get_queryset(request)
|
||||
return qs.select_related('reported_by', 'reviewed_by')
|
||||
11
django-backend/apps/reports/apps.py
Normal file
11
django-backend/apps/reports/apps.py
Normal file
@@ -0,0 +1,11 @@
|
||||
from django.apps import AppConfig
|
||||
|
||||
|
||||
class ReportsConfig(AppConfig):
|
||||
default_auto_field = 'django.db.models.BigAutoField'
|
||||
name = 'apps.reports'
|
||||
verbose_name = 'Reports'
|
||||
|
||||
def ready(self):
|
||||
# Import signals here if needed
|
||||
pass
|
||||
236
django-backend/apps/reports/migrations/0001_initial.py
Normal file
236
django-backend/apps/reports/migrations/0001_initial.py
Normal file
@@ -0,0 +1,236 @@
|
||||
# Generated by Django 4.2.8 on 2025-11-09 15:50
|
||||
|
||||
from django.conf import settings
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
import pgtrigger.compiler
|
||||
import pgtrigger.migrations
|
||||
import uuid
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
initial = True
|
||||
|
||||
dependencies = [
|
||||
("pghistory", "0006_delete_aggregateevent"),
|
||||
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name="Report",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.UUIDField(
|
||||
default=uuid.uuid4,
|
||||
editable=False,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
),
|
||||
),
|
||||
("entity_type", models.CharField(db_index=True, max_length=50)),
|
||||
("entity_id", models.UUIDField(db_index=True)),
|
||||
(
|
||||
"report_type",
|
||||
models.CharField(
|
||||
choices=[
|
||||
("inappropriate", "Inappropriate Content"),
|
||||
("inaccurate", "Inaccurate Information"),
|
||||
("spam", "Spam"),
|
||||
("duplicate", "Duplicate"),
|
||||
("copyright", "Copyright Violation"),
|
||||
("other", "Other"),
|
||||
],
|
||||
max_length=50,
|
||||
),
|
||||
),
|
||||
("description", models.TextField()),
|
||||
(
|
||||
"status",
|
||||
models.CharField(
|
||||
choices=[
|
||||
("pending", "Pending Review"),
|
||||
("reviewing", "Under Review"),
|
||||
("resolved", "Resolved"),
|
||||
("dismissed", "Dismissed"),
|
||||
],
|
||||
db_index=True,
|
||||
default="pending",
|
||||
max_length=20,
|
||||
),
|
||||
),
|
||||
("reviewed_at", models.DateTimeField(blank=True, null=True)),
|
||||
("resolution_notes", models.TextField(blank=True, null=True)),
|
||||
("created_at", models.DateTimeField(auto_now_add=True)),
|
||||
("updated_at", models.DateTimeField(auto_now=True)),
|
||||
(
|
||||
"reported_by",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
related_name="reports_created",
|
||||
to=settings.AUTH_USER_MODEL,
|
||||
),
|
||||
),
|
||||
(
|
||||
"reviewed_by",
|
||||
models.ForeignKey(
|
||||
blank=True,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
related_name="reports_reviewed",
|
||||
to=settings.AUTH_USER_MODEL,
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"verbose_name": "Report",
|
||||
"verbose_name_plural": "Reports",
|
||||
"ordering": ["-created_at"],
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="ReportEvent",
|
||||
fields=[
|
||||
("pgh_id", models.AutoField(primary_key=True, serialize=False)),
|
||||
("pgh_created_at", models.DateTimeField(auto_now_add=True)),
|
||||
("pgh_label", models.TextField(help_text="The event label.")),
|
||||
(
|
||||
"id",
|
||||
models.UUIDField(
|
||||
default=uuid.uuid4, editable=False, serialize=False
|
||||
),
|
||||
),
|
||||
("entity_type", models.CharField(max_length=50)),
|
||||
("entity_id", models.UUIDField()),
|
||||
(
|
||||
"report_type",
|
||||
models.CharField(
|
||||
choices=[
|
||||
("inappropriate", "Inappropriate Content"),
|
||||
("inaccurate", "Inaccurate Information"),
|
||||
("spam", "Spam"),
|
||||
("duplicate", "Duplicate"),
|
||||
("copyright", "Copyright Violation"),
|
||||
("other", "Other"),
|
||||
],
|
||||
max_length=50,
|
||||
),
|
||||
),
|
||||
("description", models.TextField()),
|
||||
(
|
||||
"status",
|
||||
models.CharField(
|
||||
choices=[
|
||||
("pending", "Pending Review"),
|
||||
("reviewing", "Under Review"),
|
||||
("resolved", "Resolved"),
|
||||
("dismissed", "Dismissed"),
|
||||
],
|
||||
default="pending",
|
||||
max_length=20,
|
||||
),
|
||||
),
|
||||
("reviewed_at", models.DateTimeField(blank=True, null=True)),
|
||||
("resolution_notes", models.TextField(blank=True, null=True)),
|
||||
("created_at", models.DateTimeField(auto_now_add=True)),
|
||||
("updated_at", models.DateTimeField(auto_now=True)),
|
||||
(
|
||||
"pgh_context",
|
||||
models.ForeignKey(
|
||||
db_constraint=False,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.DO_NOTHING,
|
||||
related_name="+",
|
||||
related_query_name="+",
|
||||
to="pghistory.context",
|
||||
),
|
||||
),
|
||||
(
|
||||
"pgh_obj",
|
||||
models.ForeignKey(
|
||||
db_constraint=False,
|
||||
on_delete=django.db.models.deletion.DO_NOTHING,
|
||||
related_name="events",
|
||||
related_query_name="+",
|
||||
to="reports.report",
|
||||
),
|
||||
),
|
||||
(
|
||||
"reported_by",
|
||||
models.ForeignKey(
|
||||
db_constraint=False,
|
||||
on_delete=django.db.models.deletion.DO_NOTHING,
|
||||
related_name="+",
|
||||
related_query_name="+",
|
||||
to=settings.AUTH_USER_MODEL,
|
||||
),
|
||||
),
|
||||
(
|
||||
"reviewed_by",
|
||||
models.ForeignKey(
|
||||
blank=True,
|
||||
db_constraint=False,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.DO_NOTHING,
|
||||
related_name="+",
|
||||
related_query_name="+",
|
||||
to=settings.AUTH_USER_MODEL,
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"abstract": False,
|
||||
},
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="report",
|
||||
index=models.Index(
|
||||
fields=["entity_type", "entity_id"],
|
||||
name="reports_rep_entity__eef0d7_idx",
|
||||
),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="report",
|
||||
index=models.Index(
|
||||
fields=["status", "-created_at"], name="reports_rep_status_7dd5e5_idx"
|
||||
),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="report",
|
||||
index=models.Index(
|
||||
fields=["reported_by", "-created_at"],
|
||||
name="reports_rep_reporte_975d91_idx",
|
||||
),
|
||||
),
|
||||
pgtrigger.migrations.AddTrigger(
|
||||
model_name="report",
|
||||
trigger=pgtrigger.compiler.Trigger(
|
||||
name="insert_insert",
|
||||
sql=pgtrigger.compiler.UpsertTriggerSql(
|
||||
func='INSERT INTO "reports_reportevent" ("created_at", "description", "entity_id", "entity_type", "id", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "report_type", "reported_by_id", "resolution_notes", "reviewed_at", "reviewed_by_id", "status", "updated_at") VALUES (NEW."created_at", NEW."description", NEW."entity_id", NEW."entity_type", NEW."id", _pgh_attach_context(), NOW(), \'insert\', NEW."id", NEW."report_type", NEW."reported_by_id", NEW."resolution_notes", NEW."reviewed_at", NEW."reviewed_by_id", NEW."status", NEW."updated_at"); RETURN NULL;',
|
||||
hash="7e22782db4f84b95a18bb1032e82b80fc3b3e5f5",
|
||||
operation="INSERT",
|
||||
pgid="pgtrigger_insert_insert_84ceb",
|
||||
table="reports_report",
|
||||
when="AFTER",
|
||||
),
|
||||
),
|
||||
),
|
||||
pgtrigger.migrations.AddTrigger(
|
||||
model_name="report",
|
||||
trigger=pgtrigger.compiler.Trigger(
|
||||
name="update_update",
|
||||
sql=pgtrigger.compiler.UpsertTriggerSql(
|
||||
condition="WHEN (OLD.* IS DISTINCT FROM NEW.*)",
|
||||
func='INSERT INTO "reports_reportevent" ("created_at", "description", "entity_id", "entity_type", "id", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "report_type", "reported_by_id", "resolution_notes", "reviewed_at", "reviewed_by_id", "status", "updated_at") VALUES (NEW."created_at", NEW."description", NEW."entity_id", NEW."entity_type", NEW."id", _pgh_attach_context(), NOW(), \'update\', NEW."id", NEW."report_type", NEW."reported_by_id", NEW."resolution_notes", NEW."reviewed_at", NEW."reviewed_by_id", NEW."status", NEW."updated_at"); RETURN NULL;',
|
||||
hash="ca714a1c7581855e9fc840a8267b63f2b5762b75",
|
||||
operation="UPDATE",
|
||||
pgid="pgtrigger_update_update_234a9",
|
||||
table="reports_report",
|
||||
when="AFTER",
|
||||
),
|
||||
),
|
||||
),
|
||||
]
|
||||
0
django-backend/apps/reports/migrations/__init__.py
Normal file
0
django-backend/apps/reports/migrations/__init__.py
Normal file
59
django-backend/apps/reports/models.py
Normal file
59
django-backend/apps/reports/models.py
Normal file
@@ -0,0 +1,59 @@
|
||||
import uuid
|
||||
import pghistory
|
||||
from django.db import models
|
||||
|
||||
|
||||
@pghistory.track()
|
||||
class Report(models.Model):
|
||||
"""User-submitted reports for content moderation"""
|
||||
STATUS_CHOICES = [
|
||||
('pending', 'Pending Review'),
|
||||
('reviewing', 'Under Review'),
|
||||
('resolved', 'Resolved'),
|
||||
('dismissed', 'Dismissed'),
|
||||
]
|
||||
|
||||
REPORT_TYPE_CHOICES = [
|
||||
('inappropriate', 'Inappropriate Content'),
|
||||
('inaccurate', 'Inaccurate Information'),
|
||||
('spam', 'Spam'),
|
||||
('duplicate', 'Duplicate'),
|
||||
('copyright', 'Copyright Violation'),
|
||||
('other', 'Other'),
|
||||
]
|
||||
|
||||
id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
|
||||
|
||||
# What is being reported
|
||||
entity_type = models.CharField(max_length=50, db_index=True)
|
||||
entity_id = models.UUIDField(db_index=True)
|
||||
|
||||
# Report details
|
||||
report_type = models.CharField(max_length=50, choices=REPORT_TYPE_CHOICES)
|
||||
description = models.TextField()
|
||||
status = models.CharField(max_length=20, choices=STATUS_CHOICES, default='pending', db_index=True)
|
||||
|
||||
# Reporter
|
||||
reported_by = models.ForeignKey('users.User', on_delete=models.CASCADE, related_name='reports_created')
|
||||
|
||||
# Moderation
|
||||
reviewed_by = models.ForeignKey('users.User', null=True, blank=True, on_delete=models.SET_NULL, related_name='reports_reviewed')
|
||||
reviewed_at = models.DateTimeField(null=True, blank=True)
|
||||
resolution_notes = models.TextField(null=True, blank=True)
|
||||
|
||||
# Timestamps
|
||||
created_at = models.DateTimeField(auto_now_add=True)
|
||||
updated_at = models.DateTimeField(auto_now=True)
|
||||
|
||||
class Meta:
|
||||
verbose_name = 'Report'
|
||||
verbose_name_plural = 'Reports'
|
||||
ordering = ['-created_at']
|
||||
indexes = [
|
||||
models.Index(fields=['entity_type', 'entity_id']),
|
||||
models.Index(fields=['status', '-created_at']),
|
||||
models.Index(fields=['reported_by', '-created_at']),
|
||||
]
|
||||
|
||||
def __str__(self):
|
||||
return f"{self.get_report_type_display()} - {self.entity_type} ({self.status})"
|
||||
215
django-backend/apps/reviews/admin.py
Normal file
215
django-backend/apps/reviews/admin.py
Normal file
@@ -0,0 +1,215 @@
|
||||
from django.contrib import admin
|
||||
from django.utils.html import format_html
|
||||
from unfold.admin import ModelAdmin
|
||||
from unfold.decorators import display
|
||||
from .models import Review, ReviewHelpfulVote
|
||||
|
||||
|
||||
@admin.register(Review)
|
||||
class ReviewAdmin(ModelAdmin):
|
||||
list_display = [
|
||||
'id',
|
||||
'user_link',
|
||||
'entity_type',
|
||||
'entity_link',
|
||||
'rating_display',
|
||||
'title',
|
||||
'moderation_status_badge',
|
||||
'helpful_score',
|
||||
'created',
|
||||
]
|
||||
list_filter = [
|
||||
'moderation_status',
|
||||
'rating',
|
||||
'created',
|
||||
'content_type',
|
||||
]
|
||||
search_fields = [
|
||||
'title',
|
||||
'content',
|
||||
'user__username',
|
||||
'user__email',
|
||||
]
|
||||
readonly_fields = [
|
||||
'user',
|
||||
'content_type',
|
||||
'object_id',
|
||||
'content_object',
|
||||
'helpful_votes',
|
||||
'total_votes',
|
||||
'helpful_percentage',
|
||||
'created',
|
||||
'modified',
|
||||
]
|
||||
fieldsets = (
|
||||
('Review Information', {
|
||||
'fields': (
|
||||
'user',
|
||||
'content_type',
|
||||
'object_id',
|
||||
'content_object',
|
||||
'title',
|
||||
'content',
|
||||
'rating',
|
||||
)
|
||||
}),
|
||||
('Visit Details', {
|
||||
'fields': (
|
||||
'visit_date',
|
||||
'wait_time_minutes',
|
||||
)
|
||||
}),
|
||||
('Voting Statistics', {
|
||||
'fields': (
|
||||
'helpful_votes',
|
||||
'total_votes',
|
||||
'helpful_percentage',
|
||||
)
|
||||
}),
|
||||
('Moderation', {
|
||||
'fields': (
|
||||
'moderation_status',
|
||||
'moderation_notes',
|
||||
'moderated_by',
|
||||
'moderated_at',
|
||||
)
|
||||
}),
|
||||
('Timestamps', {
|
||||
'fields': (
|
||||
'created',
|
||||
'modified',
|
||||
)
|
||||
}),
|
||||
)
|
||||
list_per_page = 50
|
||||
|
||||
@display(description='User', ordering='user__username')
|
||||
def user_link(self, obj):
|
||||
from django.urls import reverse
|
||||
url = reverse('admin:users_user_change', args=[obj.user.pk])
|
||||
return format_html('<a href="{}">{}</a>', url, obj.user.username)
|
||||
|
||||
@display(description='Entity Type', ordering='content_type')
|
||||
def entity_type(self, obj):
|
||||
return obj.content_type.model.title()
|
||||
|
||||
@display(description='Entity')
|
||||
def entity_link(self, obj):
|
||||
if obj.content_object:
|
||||
from django.urls import reverse
|
||||
model_name = obj.content_type.model
|
||||
url = reverse(f'admin:entities_{model_name}_change', args=[obj.object_id])
|
||||
return format_html('<a href="{}">{}</a>', url, str(obj.content_object))
|
||||
return f"ID: {obj.object_id}"
|
||||
|
||||
@display(description='Rating', ordering='rating')
|
||||
def rating_display(self, obj):
|
||||
stars = '⭐' * obj.rating
|
||||
return format_html('<span title="{}/5">{}</span>', obj.rating, stars)
|
||||
|
||||
@display(description='Status', ordering='moderation_status')
|
||||
def moderation_status_badge(self, obj):
|
||||
colors = {
|
||||
'pending': '#FFA500',
|
||||
'approved': '#28A745',
|
||||
'rejected': '#DC3545',
|
||||
}
|
||||
color = colors.get(obj.moderation_status, '#6C757D')
|
||||
return format_html(
|
||||
'<span style="background-color: {}; color: white; padding: 3px 10px; '
|
||||
'border-radius: 3px; font-weight: bold;">{}</span>',
|
||||
color,
|
||||
obj.get_moderation_status_display()
|
||||
)
|
||||
|
||||
@display(description='Helpful Score')
|
||||
def helpful_score(self, obj):
|
||||
if obj.total_votes == 0:
|
||||
return "No votes yet"
|
||||
percentage = obj.helpful_percentage
|
||||
return f"{obj.helpful_votes}/{obj.total_votes} ({percentage:.0f}%)"
|
||||
|
||||
def has_add_permission(self, request):
|
||||
# Reviews should only be created by users via API
|
||||
return False
|
||||
|
||||
def has_delete_permission(self, request, obj=None):
|
||||
# Only superusers can delete reviews
|
||||
return request.user.is_superuser
|
||||
|
||||
actions = ['approve_reviews', 'reject_reviews']
|
||||
|
||||
@admin.action(description='Approve selected reviews')
|
||||
def approve_reviews(self, request, queryset):
|
||||
count = 0
|
||||
for review in queryset.filter(moderation_status='pending'):
|
||||
review.approve(request.user, 'Bulk approved from admin')
|
||||
count += 1
|
||||
self.message_user(request, f'{count} reviews approved.')
|
||||
|
||||
@admin.action(description='Reject selected reviews')
|
||||
def reject_reviews(self, request, queryset):
|
||||
count = 0
|
||||
for review in queryset.filter(moderation_status='pending'):
|
||||
review.reject(request.user, 'Bulk rejected from admin')
|
||||
count += 1
|
||||
self.message_user(request, f'{count} reviews rejected.')
|
||||
|
||||
|
||||
@admin.register(ReviewHelpfulVote)
|
||||
class ReviewHelpfulVoteAdmin(ModelAdmin):
|
||||
list_display = [
|
||||
'id',
|
||||
'review_link',
|
||||
'user_link',
|
||||
'vote_type',
|
||||
'created',
|
||||
]
|
||||
list_filter = [
|
||||
'is_helpful',
|
||||
'created',
|
||||
]
|
||||
search_fields = [
|
||||
'review__title',
|
||||
'user__username',
|
||||
'user__email',
|
||||
]
|
||||
readonly_fields = [
|
||||
'review',
|
||||
'user',
|
||||
'is_helpful',
|
||||
'created',
|
||||
'modified',
|
||||
]
|
||||
list_per_page = 50
|
||||
|
||||
@display(description='Review', ordering='review__title')
|
||||
def review_link(self, obj):
|
||||
from django.urls import reverse
|
||||
url = reverse('admin:reviews_review_change', args=[obj.review.pk])
|
||||
return format_html('<a href="{}">{}</a>', url, obj.review.title)
|
||||
|
||||
@display(description='User', ordering='user__username')
|
||||
def user_link(self, obj):
|
||||
from django.urls import reverse
|
||||
url = reverse('admin:users_user_change', args=[obj.user.pk])
|
||||
return format_html('<a href="{}">{}</a>', url, obj.user.username)
|
||||
|
||||
@display(description='Vote', ordering='is_helpful')
|
||||
def vote_type(self, obj):
|
||||
if obj.is_helpful:
|
||||
return format_html('<span style="color: green;">👍 Helpful</span>')
|
||||
else:
|
||||
return format_html('<span style="color: red;">👎 Not Helpful</span>')
|
||||
|
||||
def has_add_permission(self, request):
|
||||
# Votes should only be created by users via API
|
||||
return False
|
||||
|
||||
def has_change_permission(self, request, obj=None):
|
||||
# Votes should not be changed after creation
|
||||
return False
|
||||
|
||||
def has_delete_permission(self, request, obj=None):
|
||||
# Only superusers can delete votes
|
||||
return request.user.is_superuser
|
||||
7
django-backend/apps/reviews/apps.py
Normal file
7
django-backend/apps/reviews/apps.py
Normal file
@@ -0,0 +1,7 @@
|
||||
from django.apps import AppConfig
|
||||
|
||||
|
||||
class ReviewsConfig(AppConfig):
|
||||
default_auto_field = 'django.db.models.BigAutoField'
|
||||
name = 'apps.reviews'
|
||||
verbose_name = 'Reviews'
|
||||
225
django-backend/apps/reviews/migrations/0001_initial.py
Normal file
225
django-backend/apps/reviews/migrations/0001_initial.py
Normal file
@@ -0,0 +1,225 @@
|
||||
# Generated by Django 4.2.8 on 2025-11-08 20:44
|
||||
|
||||
from django.conf import settings
|
||||
import django.core.validators
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
import django.utils.timezone
|
||||
import model_utils.fields
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
initial = True
|
||||
|
||||
dependencies = [
|
||||
("contenttypes", "0002_remove_content_type_name"),
|
||||
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name="Review",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.BigAutoField(
|
||||
auto_created=True,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
verbose_name="ID",
|
||||
),
|
||||
),
|
||||
(
|
||||
"created",
|
||||
model_utils.fields.AutoCreatedField(
|
||||
default=django.utils.timezone.now,
|
||||
editable=False,
|
||||
verbose_name="created",
|
||||
),
|
||||
),
|
||||
(
|
||||
"modified",
|
||||
model_utils.fields.AutoLastModifiedField(
|
||||
default=django.utils.timezone.now,
|
||||
editable=False,
|
||||
verbose_name="modified",
|
||||
),
|
||||
),
|
||||
("object_id", models.PositiveIntegerField()),
|
||||
("title", models.CharField(max_length=200)),
|
||||
("content", models.TextField()),
|
||||
(
|
||||
"rating",
|
||||
models.IntegerField(
|
||||
help_text="Rating from 1 to 5 stars",
|
||||
validators=[
|
||||
django.core.validators.MinValueValidator(1),
|
||||
django.core.validators.MaxValueValidator(5),
|
||||
],
|
||||
),
|
||||
),
|
||||
(
|
||||
"visit_date",
|
||||
models.DateField(
|
||||
blank=True, help_text="Date the user visited", null=True
|
||||
),
|
||||
),
|
||||
(
|
||||
"wait_time_minutes",
|
||||
models.PositiveIntegerField(
|
||||
blank=True, help_text="Wait time in minutes", null=True
|
||||
),
|
||||
),
|
||||
(
|
||||
"helpful_votes",
|
||||
models.PositiveIntegerField(
|
||||
default=0,
|
||||
help_text="Number of users who found this review helpful",
|
||||
),
|
||||
),
|
||||
(
|
||||
"total_votes",
|
||||
models.PositiveIntegerField(
|
||||
default=0,
|
||||
help_text="Total number of votes (helpful + not helpful)",
|
||||
),
|
||||
),
|
||||
(
|
||||
"moderation_status",
|
||||
models.CharField(
|
||||
choices=[
|
||||
("pending", "Pending"),
|
||||
("approved", "Approved"),
|
||||
("rejected", "Rejected"),
|
||||
],
|
||||
db_index=True,
|
||||
default="pending",
|
||||
max_length=20,
|
||||
),
|
||||
),
|
||||
(
|
||||
"moderation_notes",
|
||||
models.TextField(blank=True, help_text="Notes from moderator"),
|
||||
),
|
||||
("moderated_at", models.DateTimeField(blank=True, null=True)),
|
||||
(
|
||||
"content_type",
|
||||
models.ForeignKey(
|
||||
limit_choices_to={"model__in": ("park", "ride")},
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
to="contenttypes.contenttype",
|
||||
),
|
||||
),
|
||||
(
|
||||
"moderated_by",
|
||||
models.ForeignKey(
|
||||
blank=True,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
related_name="moderated_reviews",
|
||||
to=settings.AUTH_USER_MODEL,
|
||||
),
|
||||
),
|
||||
(
|
||||
"user",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
related_name="reviews",
|
||||
to=settings.AUTH_USER_MODEL,
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"ordering": ["-created"],
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="ReviewHelpfulVote",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.BigAutoField(
|
||||
auto_created=True,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
verbose_name="ID",
|
||||
),
|
||||
),
|
||||
(
|
||||
"created",
|
||||
model_utils.fields.AutoCreatedField(
|
||||
default=django.utils.timezone.now,
|
||||
editable=False,
|
||||
verbose_name="created",
|
||||
),
|
||||
),
|
||||
(
|
||||
"modified",
|
||||
model_utils.fields.AutoLastModifiedField(
|
||||
default=django.utils.timezone.now,
|
||||
editable=False,
|
||||
verbose_name="modified",
|
||||
),
|
||||
),
|
||||
(
|
||||
"is_helpful",
|
||||
models.BooleanField(
|
||||
help_text="True if user found review helpful, False if not helpful"
|
||||
),
|
||||
),
|
||||
(
|
||||
"review",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
related_name="vote_records",
|
||||
to="reviews.review",
|
||||
),
|
||||
),
|
||||
(
|
||||
"user",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
related_name="review_votes",
|
||||
to=settings.AUTH_USER_MODEL,
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"indexes": [
|
||||
models.Index(
|
||||
fields=["review", "user"], name="reviews_rev_review__7d0d79_idx"
|
||||
)
|
||||
],
|
||||
"unique_together": {("review", "user")},
|
||||
},
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="review",
|
||||
index=models.Index(
|
||||
fields=["content_type", "object_id"],
|
||||
name="reviews_rev_content_627d80_idx",
|
||||
),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="review",
|
||||
index=models.Index(
|
||||
fields=["user", "created"], name="reviews_rev_user_id_d4b7bb_idx"
|
||||
),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="review",
|
||||
index=models.Index(
|
||||
fields=["moderation_status", "created"],
|
||||
name="reviews_rev_moderat_d4dca0_idx",
|
||||
),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="review",
|
||||
index=models.Index(fields=["rating"], name="reviews_rev_rating_2db6dd_idx"),
|
||||
),
|
||||
migrations.AlterUniqueTogether(
|
||||
name="review",
|
||||
unique_together={("user", "content_type", "object_id")},
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,222 @@
|
||||
# Generated by Django 4.2.8 on 2025-11-08 21:32
|
||||
|
||||
from django.conf import settings
|
||||
import django.core.validators
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
import django.utils.timezone
|
||||
import model_utils.fields
|
||||
import pgtrigger.compiler
|
||||
import pgtrigger.migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("moderation", "0001_initial"),
|
||||
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
||||
("pghistory", "0006_delete_aggregateevent"),
|
||||
("contenttypes", "0002_remove_content_type_name"),
|
||||
("reviews", "0001_initial"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name="ReviewEvent",
|
||||
fields=[
|
||||
("pgh_id", models.AutoField(primary_key=True, serialize=False)),
|
||||
("pgh_created_at", models.DateTimeField(auto_now_add=True)),
|
||||
("pgh_label", models.TextField(help_text="The event label.")),
|
||||
("id", models.BigIntegerField()),
|
||||
(
|
||||
"created",
|
||||
model_utils.fields.AutoCreatedField(
|
||||
default=django.utils.timezone.now,
|
||||
editable=False,
|
||||
verbose_name="created",
|
||||
),
|
||||
),
|
||||
(
|
||||
"modified",
|
||||
model_utils.fields.AutoLastModifiedField(
|
||||
default=django.utils.timezone.now,
|
||||
editable=False,
|
||||
verbose_name="modified",
|
||||
),
|
||||
),
|
||||
("object_id", models.PositiveIntegerField()),
|
||||
("title", models.CharField(max_length=200)),
|
||||
("content", models.TextField()),
|
||||
(
|
||||
"rating",
|
||||
models.IntegerField(
|
||||
help_text="Rating from 1 to 5 stars",
|
||||
validators=[
|
||||
django.core.validators.MinValueValidator(1),
|
||||
django.core.validators.MaxValueValidator(5),
|
||||
],
|
||||
),
|
||||
),
|
||||
(
|
||||
"visit_date",
|
||||
models.DateField(
|
||||
blank=True, help_text="Date the user visited", null=True
|
||||
),
|
||||
),
|
||||
(
|
||||
"wait_time_minutes",
|
||||
models.PositiveIntegerField(
|
||||
blank=True, help_text="Wait time in minutes", null=True
|
||||
),
|
||||
),
|
||||
(
|
||||
"helpful_votes",
|
||||
models.PositiveIntegerField(
|
||||
default=0,
|
||||
help_text="Number of users who found this review helpful",
|
||||
),
|
||||
),
|
||||
(
|
||||
"total_votes",
|
||||
models.PositiveIntegerField(
|
||||
default=0,
|
||||
help_text="Total number of votes (helpful + not helpful)",
|
||||
),
|
||||
),
|
||||
(
|
||||
"moderation_status",
|
||||
models.CharField(
|
||||
choices=[
|
||||
("pending", "Pending"),
|
||||
("approved", "Approved"),
|
||||
("rejected", "Rejected"),
|
||||
],
|
||||
default="pending",
|
||||
max_length=20,
|
||||
),
|
||||
),
|
||||
(
|
||||
"moderation_notes",
|
||||
models.TextField(blank=True, help_text="Notes from moderator"),
|
||||
),
|
||||
("moderated_at", models.DateTimeField(blank=True, null=True)),
|
||||
],
|
||||
options={
|
||||
"abstract": False,
|
||||
},
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="review",
|
||||
name="submission",
|
||||
field=models.ForeignKey(
|
||||
blank=True,
|
||||
help_text="ContentSubmission that created this review",
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
related_name="reviews",
|
||||
to="moderation.contentsubmission",
|
||||
),
|
||||
),
|
||||
pgtrigger.migrations.AddTrigger(
|
||||
model_name="review",
|
||||
trigger=pgtrigger.compiler.Trigger(
|
||||
name="insert_insert",
|
||||
sql=pgtrigger.compiler.UpsertTriggerSql(
|
||||
func='INSERT INTO "reviews_reviewevent" ("content", "content_type_id", "created", "helpful_votes", "id", "moderated_at", "moderated_by_id", "moderation_notes", "moderation_status", "modified", "object_id", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "rating", "submission_id", "title", "total_votes", "user_id", "visit_date", "wait_time_minutes") VALUES (NEW."content", NEW."content_type_id", NEW."created", NEW."helpful_votes", NEW."id", NEW."moderated_at", NEW."moderated_by_id", NEW."moderation_notes", NEW."moderation_status", NEW."modified", NEW."object_id", _pgh_attach_context(), NOW(), \'insert\', NEW."id", NEW."rating", NEW."submission_id", NEW."title", NEW."total_votes", NEW."user_id", NEW."visit_date", NEW."wait_time_minutes"); RETURN NULL;',
|
||||
hash="b35102b3c04881bef39a259f1105a6032033b6d7",
|
||||
operation="INSERT",
|
||||
pgid="pgtrigger_insert_insert_7a7c1",
|
||||
table="reviews_review",
|
||||
when="AFTER",
|
||||
),
|
||||
),
|
||||
),
|
||||
pgtrigger.migrations.AddTrigger(
|
||||
model_name="review",
|
||||
trigger=pgtrigger.compiler.Trigger(
|
||||
name="update_update",
|
||||
sql=pgtrigger.compiler.UpsertTriggerSql(
|
||||
condition="WHEN (OLD.* IS DISTINCT FROM NEW.*)",
|
||||
func='INSERT INTO "reviews_reviewevent" ("content", "content_type_id", "created", "helpful_votes", "id", "moderated_at", "moderated_by_id", "moderation_notes", "moderation_status", "modified", "object_id", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "rating", "submission_id", "title", "total_votes", "user_id", "visit_date", "wait_time_minutes") VALUES (NEW."content", NEW."content_type_id", NEW."created", NEW."helpful_votes", NEW."id", NEW."moderated_at", NEW."moderated_by_id", NEW."moderation_notes", NEW."moderation_status", NEW."modified", NEW."object_id", _pgh_attach_context(), NOW(), \'update\', NEW."id", NEW."rating", NEW."submission_id", NEW."title", NEW."total_votes", NEW."user_id", NEW."visit_date", NEW."wait_time_minutes"); RETURN NULL;',
|
||||
hash="252cddc558c9724c0ef840a91c1d0ebd03a1b7a2",
|
||||
operation="UPDATE",
|
||||
pgid="pgtrigger_update_update_b34c8",
|
||||
table="reviews_review",
|
||||
when="AFTER",
|
||||
),
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="reviewevent",
|
||||
name="content_type",
|
||||
field=models.ForeignKey(
|
||||
db_constraint=False,
|
||||
limit_choices_to={"model__in": ("park", "ride")},
|
||||
on_delete=django.db.models.deletion.DO_NOTHING,
|
||||
related_name="+",
|
||||
related_query_name="+",
|
||||
to="contenttypes.contenttype",
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="reviewevent",
|
||||
name="moderated_by",
|
||||
field=models.ForeignKey(
|
||||
blank=True,
|
||||
db_constraint=False,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.DO_NOTHING,
|
||||
related_name="+",
|
||||
related_query_name="+",
|
||||
to=settings.AUTH_USER_MODEL,
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="reviewevent",
|
||||
name="pgh_context",
|
||||
field=models.ForeignKey(
|
||||
db_constraint=False,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.DO_NOTHING,
|
||||
related_name="+",
|
||||
related_query_name="+",
|
||||
to="pghistory.context",
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="reviewevent",
|
||||
name="pgh_obj",
|
||||
field=models.ForeignKey(
|
||||
db_constraint=False,
|
||||
on_delete=django.db.models.deletion.DO_NOTHING,
|
||||
related_name="events",
|
||||
related_query_name="+",
|
||||
to="reviews.review",
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="reviewevent",
|
||||
name="submission",
|
||||
field=models.ForeignKey(
|
||||
blank=True,
|
||||
db_constraint=False,
|
||||
help_text="ContentSubmission that created this review",
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.DO_NOTHING,
|
||||
related_name="+",
|
||||
related_query_name="+",
|
||||
to="moderation.contentsubmission",
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="reviewevent",
|
||||
name="user",
|
||||
field=models.ForeignKey(
|
||||
db_constraint=False,
|
||||
on_delete=django.db.models.deletion.DO_NOTHING,
|
||||
related_name="+",
|
||||
related_query_name="+",
|
||||
to=settings.AUTH_USER_MODEL,
|
||||
),
|
||||
),
|
||||
]
|
||||
0
django-backend/apps/reviews/migrations/__init__.py
Normal file
0
django-backend/apps/reviews/migrations/__init__.py
Normal file
202
django-backend/apps/reviews/models.py
Normal file
202
django-backend/apps/reviews/models.py
Normal file
@@ -0,0 +1,202 @@
|
||||
from django.db import models
|
||||
from django.core.validators import MinValueValidator, MaxValueValidator
|
||||
from django.conf import settings
|
||||
from django.contrib.contenttypes.fields import GenericForeignKey, GenericRelation
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from model_utils.models import TimeStampedModel
|
||||
import pghistory
|
||||
|
||||
|
||||
@pghistory.track()
|
||||
class Review(TimeStampedModel):
|
||||
"""
|
||||
User reviews for parks or rides.
|
||||
|
||||
Users can leave reviews with ratings, text, photos, and metadata like visit date.
|
||||
Reviews support helpful voting and go through moderation workflow.
|
||||
"""
|
||||
|
||||
# User who created the review
|
||||
user = models.ForeignKey(
|
||||
settings.AUTH_USER_MODEL,
|
||||
on_delete=models.CASCADE,
|
||||
related_name='reviews'
|
||||
)
|
||||
|
||||
# Generic relation - can review either a Park or a Ride
|
||||
content_type = models.ForeignKey(
|
||||
ContentType,
|
||||
on_delete=models.CASCADE,
|
||||
limit_choices_to={'model__in': ('park', 'ride')}
|
||||
)
|
||||
object_id = models.PositiveIntegerField()
|
||||
content_object = GenericForeignKey('content_type', 'object_id')
|
||||
|
||||
# Review content
|
||||
title = models.CharField(max_length=200)
|
||||
content = models.TextField()
|
||||
rating = models.IntegerField(
|
||||
validators=[MinValueValidator(1), MaxValueValidator(5)],
|
||||
help_text="Rating from 1 to 5 stars"
|
||||
)
|
||||
|
||||
# Visit metadata
|
||||
visit_date = models.DateField(
|
||||
null=True,
|
||||
blank=True,
|
||||
help_text="Date the user visited"
|
||||
)
|
||||
wait_time_minutes = models.PositiveIntegerField(
|
||||
null=True,
|
||||
blank=True,
|
||||
help_text="Wait time in minutes"
|
||||
)
|
||||
|
||||
# Helpful voting system
|
||||
helpful_votes = models.PositiveIntegerField(
|
||||
default=0,
|
||||
help_text="Number of users who found this review helpful"
|
||||
)
|
||||
total_votes = models.PositiveIntegerField(
|
||||
default=0,
|
||||
help_text="Total number of votes (helpful + not helpful)"
|
||||
)
|
||||
|
||||
# Moderation status
|
||||
MODERATION_PENDING = 'pending'
|
||||
MODERATION_APPROVED = 'approved'
|
||||
MODERATION_REJECTED = 'rejected'
|
||||
|
||||
MODERATION_STATUS_CHOICES = [
|
||||
(MODERATION_PENDING, 'Pending'),
|
||||
(MODERATION_APPROVED, 'Approved'),
|
||||
(MODERATION_REJECTED, 'Rejected'),
|
||||
]
|
||||
|
||||
moderation_status = models.CharField(
|
||||
max_length=20,
|
||||
choices=MODERATION_STATUS_CHOICES,
|
||||
default=MODERATION_PENDING,
|
||||
db_index=True
|
||||
)
|
||||
moderation_notes = models.TextField(
|
||||
blank=True,
|
||||
help_text="Notes from moderator"
|
||||
)
|
||||
moderated_at = models.DateTimeField(null=True, blank=True)
|
||||
moderated_by = models.ForeignKey(
|
||||
settings.AUTH_USER_MODEL,
|
||||
on_delete=models.SET_NULL,
|
||||
null=True,
|
||||
blank=True,
|
||||
related_name='moderated_reviews'
|
||||
)
|
||||
|
||||
# Link to ContentSubmission (Sacred Pipeline integration)
|
||||
submission = models.ForeignKey(
|
||||
'moderation.ContentSubmission',
|
||||
on_delete=models.SET_NULL,
|
||||
null=True,
|
||||
blank=True,
|
||||
related_name='reviews',
|
||||
help_text="ContentSubmission that created this review"
|
||||
)
|
||||
|
||||
# Photos related to this review (via media.Photo model with generic relation)
|
||||
photos = GenericRelation('media.Photo')
|
||||
|
||||
class Meta:
|
||||
ordering = ['-created']
|
||||
indexes = [
|
||||
models.Index(fields=['content_type', 'object_id']),
|
||||
models.Index(fields=['user', 'created']),
|
||||
models.Index(fields=['moderation_status', 'created']),
|
||||
models.Index(fields=['rating']),
|
||||
]
|
||||
# A user can only review a specific park/ride once
|
||||
unique_together = [['user', 'content_type', 'object_id']]
|
||||
|
||||
def __str__(self):
|
||||
entity_type = self.content_type.model
|
||||
return f"{self.user.username}'s review of {entity_type} #{self.object_id}"
|
||||
|
||||
@property
|
||||
def helpful_percentage(self):
|
||||
"""Calculate percentage of helpful votes."""
|
||||
if self.total_votes == 0:
|
||||
return None
|
||||
return (self.helpful_votes / self.total_votes) * 100
|
||||
|
||||
@property
|
||||
def is_approved(self):
|
||||
"""Check if review is approved."""
|
||||
return self.moderation_status == self.MODERATION_APPROVED
|
||||
|
||||
@property
|
||||
def is_pending(self):
|
||||
"""Check if review is pending moderation."""
|
||||
return self.moderation_status == self.MODERATION_PENDING
|
||||
|
||||
|
||||
class ReviewHelpfulVote(TimeStampedModel):
|
||||
"""
|
||||
Track individual helpful votes to prevent duplicate voting.
|
||||
"""
|
||||
review = models.ForeignKey(
|
||||
Review,
|
||||
on_delete=models.CASCADE,
|
||||
related_name='vote_records'
|
||||
)
|
||||
user = models.ForeignKey(
|
||||
settings.AUTH_USER_MODEL,
|
||||
on_delete=models.CASCADE,
|
||||
related_name='review_votes'
|
||||
)
|
||||
is_helpful = models.BooleanField(
|
||||
help_text="True if user found review helpful, False if not helpful"
|
||||
)
|
||||
|
||||
class Meta:
|
||||
unique_together = [['review', 'user']]
|
||||
indexes = [
|
||||
models.Index(fields=['review', 'user']),
|
||||
]
|
||||
|
||||
def __str__(self):
|
||||
vote_type = "helpful" if self.is_helpful else "not helpful"
|
||||
return f"{self.user.username} voted {vote_type} on review #{self.review.id}"
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
"""Update review vote counts when saving."""
|
||||
is_new = self.pk is None
|
||||
old_is_helpful = None
|
||||
|
||||
if not is_new:
|
||||
# Get old value before update
|
||||
old_vote = ReviewHelpfulVote.objects.get(pk=self.pk)
|
||||
old_is_helpful = old_vote.is_helpful
|
||||
|
||||
super().save(*args, **kwargs)
|
||||
|
||||
# Update review vote counts
|
||||
if is_new:
|
||||
# New vote
|
||||
self.review.total_votes += 1
|
||||
if self.is_helpful:
|
||||
self.review.helpful_votes += 1
|
||||
self.review.save()
|
||||
elif old_is_helpful != self.is_helpful:
|
||||
# Vote changed
|
||||
if self.is_helpful:
|
||||
self.review.helpful_votes += 1
|
||||
else:
|
||||
self.review.helpful_votes -= 1
|
||||
self.review.save()
|
||||
|
||||
def delete(self, *args, **kwargs):
|
||||
"""Update review vote counts when deleting."""
|
||||
self.review.total_votes -= 1
|
||||
if self.is_helpful:
|
||||
self.review.helpful_votes -= 1
|
||||
self.review.save()
|
||||
super().delete(*args, **kwargs)
|
||||
378
django-backend/apps/reviews/services.py
Normal file
378
django-backend/apps/reviews/services.py
Normal file
@@ -0,0 +1,378 @@
|
||||
"""
|
||||
Review services for ThrillWiki.
|
||||
|
||||
This module provides business logic for review submissions through the Sacred Pipeline.
|
||||
All reviews must flow through ModerationService to ensure consistency with the rest of the system.
|
||||
"""
|
||||
|
||||
import logging
|
||||
from django.db import transaction
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from django.core.exceptions import ValidationError
|
||||
|
||||
from apps.moderation.services import ModerationService
|
||||
from apps.reviews.models import Review
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ReviewSubmissionService:
|
||||
"""
|
||||
Service class for creating and managing review submissions.
|
||||
|
||||
All reviews flow through the ContentSubmission pipeline, ensuring:
|
||||
- Consistent moderation workflow
|
||||
- FSM state machine transitions
|
||||
- 15-minute lock mechanism
|
||||
- Atomic transaction handling
|
||||
- Automatic versioning via pghistory
|
||||
- Audit trail via ContentSubmission
|
||||
"""
|
||||
|
||||
@staticmethod
|
||||
@transaction.atomic
|
||||
def create_review_submission(
|
||||
user,
|
||||
entity,
|
||||
rating,
|
||||
title,
|
||||
content,
|
||||
visit_date=None,
|
||||
wait_time_minutes=None,
|
||||
**kwargs
|
||||
):
|
||||
"""
|
||||
Create a review submission through the Sacred Pipeline.
|
||||
|
||||
This method creates a ContentSubmission with SubmissionItems for each review field.
|
||||
If the user is a moderator, the submission is auto-approved and the Review is created immediately.
|
||||
Otherwise, the submission enters the pending moderation queue.
|
||||
|
||||
Args:
|
||||
user: User creating the review
|
||||
entity: Entity being reviewed (Park or Ride)
|
||||
rating: Rating from 1-5 stars
|
||||
title: Review title
|
||||
content: Review content text
|
||||
visit_date: Optional date of visit
|
||||
wait_time_minutes: Optional wait time in minutes
|
||||
**kwargs: Additional metadata (source, ip_address, user_agent)
|
||||
|
||||
Returns:
|
||||
tuple: (ContentSubmission, Review or None)
|
||||
Review will be None if pending moderation
|
||||
|
||||
Raises:
|
||||
ValidationError: If validation fails
|
||||
"""
|
||||
# Check if user is moderator (for bypass)
|
||||
is_moderator = hasattr(user, 'role') and user.role.is_moderator if user else False
|
||||
|
||||
# Get entity ContentType
|
||||
entity_type = ContentType.objects.get_for_model(entity)
|
||||
|
||||
# Check for duplicate review
|
||||
existing = Review.objects.filter(
|
||||
user=user,
|
||||
content_type=entity_type,
|
||||
object_id=entity.id
|
||||
).first()
|
||||
|
||||
if existing:
|
||||
raise ValidationError(
|
||||
f"User has already reviewed this {entity_type.model}. "
|
||||
f"Use update method to modify existing review."
|
||||
)
|
||||
|
||||
# Build submission items for each review field
|
||||
items_data = [
|
||||
{
|
||||
'field_name': 'rating',
|
||||
'field_label': 'Rating',
|
||||
'old_value': None,
|
||||
'new_value': str(rating),
|
||||
'change_type': 'add',
|
||||
'is_required': True,
|
||||
'order': 0
|
||||
},
|
||||
{
|
||||
'field_name': 'title',
|
||||
'field_label': 'Title',
|
||||
'old_value': None,
|
||||
'new_value': title,
|
||||
'change_type': 'add',
|
||||
'is_required': True,
|
||||
'order': 1
|
||||
},
|
||||
{
|
||||
'field_name': 'content',
|
||||
'field_label': 'Review Content',
|
||||
'old_value': None,
|
||||
'new_value': content,
|
||||
'change_type': 'add',
|
||||
'is_required': True,
|
||||
'order': 2
|
||||
},
|
||||
]
|
||||
|
||||
# Add optional fields if provided
|
||||
if visit_date is not None:
|
||||
items_data.append({
|
||||
'field_name': 'visit_date',
|
||||
'field_label': 'Visit Date',
|
||||
'old_value': None,
|
||||
'new_value': str(visit_date),
|
||||
'change_type': 'add',
|
||||
'is_required': False,
|
||||
'order': 3
|
||||
})
|
||||
|
||||
if wait_time_minutes is not None:
|
||||
items_data.append({
|
||||
'field_name': 'wait_time_minutes',
|
||||
'field_label': 'Wait Time (minutes)',
|
||||
'old_value': None,
|
||||
'new_value': str(wait_time_minutes),
|
||||
'change_type': 'add',
|
||||
'is_required': False,
|
||||
'order': 4
|
||||
})
|
||||
|
||||
# Create submission through ModerationService
|
||||
submission = ModerationService.create_submission(
|
||||
user=user,
|
||||
entity=entity,
|
||||
submission_type='review',
|
||||
title=f"Review: {title[:50]}",
|
||||
description=f"User review for {entity_type.model}: {entity}",
|
||||
items_data=items_data,
|
||||
metadata={
|
||||
'rating': rating,
|
||||
'entity_type': entity_type.model,
|
||||
},
|
||||
auto_submit=True,
|
||||
source=kwargs.get('source', 'api'),
|
||||
ip_address=kwargs.get('ip_address'),
|
||||
user_agent=kwargs.get('user_agent', '')
|
||||
)
|
||||
|
||||
logger.info(
|
||||
f"Review submission created: {submission.id} by {user.email} "
|
||||
f"for {entity_type.model} {entity.id}"
|
||||
)
|
||||
|
||||
# MODERATOR BYPASS: Auto-approve if user is moderator
|
||||
review = None
|
||||
if is_moderator:
|
||||
logger.info(f"Moderator bypass: Auto-approving submission {submission.id}")
|
||||
|
||||
# Approve through ModerationService (this triggers atomic transaction)
|
||||
submission = ModerationService.approve_submission(submission.id, user)
|
||||
|
||||
# Create the Review record
|
||||
review = ReviewSubmissionService._create_review_from_submission(
|
||||
submission=submission,
|
||||
entity=entity,
|
||||
user=user
|
||||
)
|
||||
|
||||
logger.info(f"Review auto-created for moderator: {review.id}")
|
||||
|
||||
return submission, review
|
||||
|
||||
@staticmethod
|
||||
@transaction.atomic
|
||||
def _create_review_from_submission(submission, entity, user):
|
||||
"""
|
||||
Create a Review record from an approved ContentSubmission.
|
||||
|
||||
This is called internally when a submission is approved.
|
||||
Extracts data from SubmissionItems and creates the Review.
|
||||
|
||||
Args:
|
||||
submission: Approved ContentSubmission
|
||||
entity: Entity being reviewed
|
||||
user: User who created the review
|
||||
|
||||
Returns:
|
||||
Review: Created review instance
|
||||
"""
|
||||
# Extract data from submission items
|
||||
items = submission.items.all()
|
||||
review_data = {}
|
||||
|
||||
for item in items:
|
||||
if item.field_name == 'rating':
|
||||
review_data['rating'] = int(item.new_value)
|
||||
elif item.field_name == 'title':
|
||||
review_data['title'] = item.new_value
|
||||
elif item.field_name == 'content':
|
||||
review_data['content'] = item.new_value
|
||||
elif item.field_name == 'visit_date':
|
||||
from datetime import datetime
|
||||
review_data['visit_date'] = datetime.fromisoformat(item.new_value).date()
|
||||
elif item.field_name == 'wait_time_minutes':
|
||||
review_data['wait_time_minutes'] = int(item.new_value)
|
||||
|
||||
# Get entity ContentType
|
||||
entity_type = ContentType.objects.get_for_model(entity)
|
||||
|
||||
# Create Review
|
||||
review = Review.objects.create(
|
||||
user=user,
|
||||
content_type=entity_type,
|
||||
object_id=entity.id,
|
||||
submission=submission,
|
||||
moderation_status=Review.MODERATION_APPROVED,
|
||||
moderated_by=submission.reviewed_by,
|
||||
moderated_at=submission.reviewed_at,
|
||||
**review_data
|
||||
)
|
||||
|
||||
# pghistory will automatically track this creation
|
||||
|
||||
logger.info(
|
||||
f"Review created from submission: {review.id} "
|
||||
f"(submission: {submission.id})"
|
||||
)
|
||||
|
||||
return review
|
||||
|
||||
@staticmethod
|
||||
@transaction.atomic
|
||||
def update_review_submission(review, user, **update_data):
|
||||
"""
|
||||
Update an existing review by creating a new submission.
|
||||
|
||||
This follows the Sacred Pipeline by creating a new ContentSubmission
|
||||
for the update, which must be approved before changes take effect.
|
||||
|
||||
Args:
|
||||
review: Existing Review to update
|
||||
user: User making the update (must be review owner)
|
||||
**update_data: Fields to update (rating, title, content, etc.)
|
||||
|
||||
Returns:
|
||||
ContentSubmission: The update submission
|
||||
|
||||
Raises:
|
||||
ValidationError: If user is not the review owner
|
||||
"""
|
||||
# Verify ownership
|
||||
if review.user != user:
|
||||
raise ValidationError("Only the review owner can update their review")
|
||||
|
||||
# Check if user is moderator (for bypass)
|
||||
is_moderator = hasattr(user, 'role') and user.role.is_moderator if user else False
|
||||
|
||||
# Get entity
|
||||
entity = review.content_object
|
||||
if not entity:
|
||||
raise ValidationError("Reviewed entity no longer exists")
|
||||
|
||||
# Build submission items for changed fields
|
||||
items_data = []
|
||||
order = 0
|
||||
|
||||
for field_name, new_value in update_data.items():
|
||||
if field_name in ['rating', 'title', 'content', 'visit_date', 'wait_time_minutes']:
|
||||
old_value = getattr(review, field_name)
|
||||
|
||||
# Only include if value actually changed
|
||||
if old_value != new_value:
|
||||
items_data.append({
|
||||
'field_name': field_name,
|
||||
'field_label': field_name.replace('_', ' ').title(),
|
||||
'old_value': str(old_value) if old_value else None,
|
||||
'new_value': str(new_value),
|
||||
'change_type': 'modify',
|
||||
'is_required': field_name in ['rating', 'title', 'content'],
|
||||
'order': order
|
||||
})
|
||||
order += 1
|
||||
|
||||
if not items_data:
|
||||
raise ValidationError("No changes detected")
|
||||
|
||||
# Create update submission
|
||||
submission = ModerationService.create_submission(
|
||||
user=user,
|
||||
entity=entity,
|
||||
submission_type='update',
|
||||
title=f"Review Update: {review.title[:50]}",
|
||||
description=f"User updating review #{review.id}",
|
||||
items_data=items_data,
|
||||
metadata={
|
||||
'review_id': str(review.id),
|
||||
'update_type': 'review',
|
||||
},
|
||||
auto_submit=True,
|
||||
source='api'
|
||||
)
|
||||
|
||||
logger.info(f"Review update submission created: {submission.id}")
|
||||
|
||||
# MODERATOR BYPASS: Auto-approve if moderator
|
||||
if is_moderator:
|
||||
submission = ModerationService.approve_submission(submission.id, user)
|
||||
|
||||
# Apply updates to review
|
||||
for item in submission.items.filter(status='approved'):
|
||||
setattr(review, item.field_name, item.new_value)
|
||||
|
||||
review.moderation_status = Review.MODERATION_APPROVED
|
||||
review.moderated_by = user
|
||||
review.save()
|
||||
|
||||
logger.info(f"Review update auto-approved for moderator: {review.id}")
|
||||
else:
|
||||
# Regular user: mark review as pending
|
||||
review.moderation_status = Review.MODERATION_PENDING
|
||||
review.save()
|
||||
|
||||
return submission
|
||||
|
||||
@staticmethod
|
||||
def apply_review_approval(submission):
|
||||
"""
|
||||
Apply an approved review submission.
|
||||
|
||||
This is called by ModerationService when a review submission is approved.
|
||||
For new reviews, creates the Review record.
|
||||
For updates, applies changes to existing Review.
|
||||
|
||||
Args:
|
||||
submission: Approved ContentSubmission
|
||||
|
||||
Returns:
|
||||
Review: The created or updated review
|
||||
"""
|
||||
entity = submission.entity
|
||||
user = submission.user
|
||||
|
||||
if submission.submission_type == 'review':
|
||||
# New review
|
||||
return ReviewSubmissionService._create_review_from_submission(
|
||||
submission, entity, user
|
||||
)
|
||||
elif submission.submission_type == 'update':
|
||||
# Update existing review
|
||||
review_id = submission.metadata.get('review_id')
|
||||
if not review_id:
|
||||
raise ValidationError("Missing review_id in submission metadata")
|
||||
|
||||
review = Review.objects.get(id=review_id)
|
||||
|
||||
# Apply approved changes
|
||||
for item in submission.items.filter(status='approved'):
|
||||
setattr(review, item.field_name, item.new_value)
|
||||
|
||||
review.moderation_status = Review.MODERATION_APPROVED
|
||||
review.moderated_by = submission.reviewed_by
|
||||
review.moderated_at = submission.reviewed_at
|
||||
review.save()
|
||||
|
||||
logger.info(f"Review updated from submission: {review.id}")
|
||||
return review
|
||||
else:
|
||||
raise ValidationError(f"Invalid submission type: {submission.submission_type}")
|
||||
4
django-backend/apps/timeline/__init__.py
Normal file
4
django-backend/apps/timeline/__init__.py
Normal file
@@ -0,0 +1,4 @@
|
||||
"""
|
||||
Timeline app for tracking entity lifecycle events.
|
||||
"""
|
||||
default_app_config = 'apps.timeline.apps.TimelineConfig'
|
||||
33
django-backend/apps/timeline/admin.py
Normal file
33
django-backend/apps/timeline/admin.py
Normal file
@@ -0,0 +1,33 @@
|
||||
from django.contrib import admin
|
||||
from unfold.admin import ModelAdmin
|
||||
from .models import EntityTimelineEvent
|
||||
|
||||
|
||||
@admin.register(EntityTimelineEvent)
|
||||
class EntityTimelineEventAdmin(ModelAdmin):
|
||||
list_display = ['title', 'entity_type', 'entity_id', 'event_type', 'event_date', 'is_public', 'created_by']
|
||||
list_filter = ['entity_type', 'event_type', 'is_public', 'event_date', 'event_date_precision']
|
||||
search_fields = ['title', 'description', 'entity_id']
|
||||
date_hierarchy = 'event_date'
|
||||
ordering = ['-event_date', '-created_at']
|
||||
readonly_fields = ['created_at', 'updated_at']
|
||||
|
||||
fieldsets = (
|
||||
('Event Information', {
|
||||
'fields': ('title', 'description', 'event_type', 'event_date', 'event_date_precision')
|
||||
}),
|
||||
('Entity Reference', {
|
||||
'fields': ('entity_type', 'entity_id')
|
||||
}),
|
||||
('Event Details', {
|
||||
'fields': ('from_entity_id', 'to_entity_id', 'from_location', 'to_location', 'from_value', 'to_value'),
|
||||
'classes': ('collapse',)
|
||||
}),
|
||||
('Visibility & Order', {
|
||||
'fields': ('is_public', 'display_order')
|
||||
}),
|
||||
('Tracking', {
|
||||
'fields': ('created_by', 'approved_by', 'submission', 'created_at', 'updated_at'),
|
||||
'classes': ('collapse',)
|
||||
}),
|
||||
)
|
||||
11
django-backend/apps/timeline/apps.py
Normal file
11
django-backend/apps/timeline/apps.py
Normal file
@@ -0,0 +1,11 @@
|
||||
"""
|
||||
Timeline app configuration.
|
||||
"""
|
||||
from django.apps import AppConfig
|
||||
|
||||
|
||||
class TimelineConfig(AppConfig):
|
||||
"""Configuration for the timeline app."""
|
||||
default_auto_field = 'django.db.models.BigAutoField'
|
||||
name = 'apps.timeline'
|
||||
verbose_name = 'Timeline Events'
|
||||
308
django-backend/apps/timeline/migrations/0001_initial.py
Normal file
308
django-backend/apps/timeline/migrations/0001_initial.py
Normal file
@@ -0,0 +1,308 @@
|
||||
# Generated by Django 4.2.8 on 2025-11-09 15:45
|
||||
|
||||
from django.conf import settings
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
import pgtrigger.compiler
|
||||
import pgtrigger.migrations
|
||||
import uuid
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
initial = True
|
||||
|
||||
dependencies = [
|
||||
("pghistory", "0006_delete_aggregateevent"),
|
||||
("moderation", "0002_alter_contentsubmission_submission_type"),
|
||||
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
||||
("entities", "0007_add_ride_name_history"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name="EntityTimelineEvent",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.UUIDField(
|
||||
default=uuid.uuid4,
|
||||
editable=False,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
),
|
||||
),
|
||||
("entity_id", models.UUIDField(db_index=True)),
|
||||
("entity_type", models.CharField(db_index=True, max_length=50)),
|
||||
(
|
||||
"event_type",
|
||||
models.CharField(
|
||||
help_text="Type of event: opening, closing, relocation, etc.",
|
||||
max_length=100,
|
||||
),
|
||||
),
|
||||
("event_date", models.DateField()),
|
||||
(
|
||||
"event_date_precision",
|
||||
models.CharField(
|
||||
blank=True,
|
||||
choices=[
|
||||
("day", "Day"),
|
||||
("month", "Month"),
|
||||
("year", "Year"),
|
||||
("decade", "Decade"),
|
||||
],
|
||||
max_length=20,
|
||||
null=True,
|
||||
),
|
||||
),
|
||||
("title", models.CharField(max_length=255)),
|
||||
("description", models.TextField(blank=True, null=True)),
|
||||
("from_entity_id", models.UUIDField(blank=True, null=True)),
|
||||
("to_entity_id", models.UUIDField(blank=True, null=True)),
|
||||
("from_value", models.TextField(blank=True, null=True)),
|
||||
("to_value", models.TextField(blank=True, null=True)),
|
||||
("is_public", models.BooleanField(default=True)),
|
||||
("display_order", models.IntegerField(blank=True, null=True)),
|
||||
("created_at", models.DateTimeField(auto_now_add=True)),
|
||||
("updated_at", models.DateTimeField(auto_now=True)),
|
||||
(
|
||||
"approved_by",
|
||||
models.ForeignKey(
|
||||
blank=True,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
related_name="timeline_events_approved",
|
||||
to=settings.AUTH_USER_MODEL,
|
||||
),
|
||||
),
|
||||
(
|
||||
"created_by",
|
||||
models.ForeignKey(
|
||||
blank=True,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
related_name="timeline_events_created",
|
||||
to=settings.AUTH_USER_MODEL,
|
||||
),
|
||||
),
|
||||
(
|
||||
"from_location",
|
||||
models.ForeignKey(
|
||||
blank=True,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
related_name="timeline_from_events",
|
||||
to="entities.park",
|
||||
),
|
||||
),
|
||||
(
|
||||
"submission",
|
||||
models.ForeignKey(
|
||||
blank=True,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
related_name="timeline_events",
|
||||
to="moderation.contentsubmission",
|
||||
),
|
||||
),
|
||||
(
|
||||
"to_location",
|
||||
models.ForeignKey(
|
||||
blank=True,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
related_name="timeline_to_events",
|
||||
to="entities.park",
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"verbose_name": "Timeline Event",
|
||||
"verbose_name_plural": "Timeline Events",
|
||||
"ordering": ["-event_date", "-created_at"],
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="EntityTimelineEventEvent",
|
||||
fields=[
|
||||
("pgh_id", models.AutoField(primary_key=True, serialize=False)),
|
||||
("pgh_created_at", models.DateTimeField(auto_now_add=True)),
|
||||
("pgh_label", models.TextField(help_text="The event label.")),
|
||||
(
|
||||
"id",
|
||||
models.UUIDField(
|
||||
default=uuid.uuid4, editable=False, serialize=False
|
||||
),
|
||||
),
|
||||
("entity_id", models.UUIDField()),
|
||||
("entity_type", models.CharField(max_length=50)),
|
||||
(
|
||||
"event_type",
|
||||
models.CharField(
|
||||
help_text="Type of event: opening, closing, relocation, etc.",
|
||||
max_length=100,
|
||||
),
|
||||
),
|
||||
("event_date", models.DateField()),
|
||||
(
|
||||
"event_date_precision",
|
||||
models.CharField(
|
||||
blank=True,
|
||||
choices=[
|
||||
("day", "Day"),
|
||||
("month", "Month"),
|
||||
("year", "Year"),
|
||||
("decade", "Decade"),
|
||||
],
|
||||
max_length=20,
|
||||
null=True,
|
||||
),
|
||||
),
|
||||
("title", models.CharField(max_length=255)),
|
||||
("description", models.TextField(blank=True, null=True)),
|
||||
("from_entity_id", models.UUIDField(blank=True, null=True)),
|
||||
("to_entity_id", models.UUIDField(blank=True, null=True)),
|
||||
("from_value", models.TextField(blank=True, null=True)),
|
||||
("to_value", models.TextField(blank=True, null=True)),
|
||||
("is_public", models.BooleanField(default=True)),
|
||||
("display_order", models.IntegerField(blank=True, null=True)),
|
||||
("created_at", models.DateTimeField(auto_now_add=True)),
|
||||
("updated_at", models.DateTimeField(auto_now=True)),
|
||||
(
|
||||
"approved_by",
|
||||
models.ForeignKey(
|
||||
blank=True,
|
||||
db_constraint=False,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.DO_NOTHING,
|
||||
related_name="+",
|
||||
related_query_name="+",
|
||||
to=settings.AUTH_USER_MODEL,
|
||||
),
|
||||
),
|
||||
(
|
||||
"created_by",
|
||||
models.ForeignKey(
|
||||
blank=True,
|
||||
db_constraint=False,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.DO_NOTHING,
|
||||
related_name="+",
|
||||
related_query_name="+",
|
||||
to=settings.AUTH_USER_MODEL,
|
||||
),
|
||||
),
|
||||
(
|
||||
"from_location",
|
||||
models.ForeignKey(
|
||||
blank=True,
|
||||
db_constraint=False,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.DO_NOTHING,
|
||||
related_name="+",
|
||||
related_query_name="+",
|
||||
to="entities.park",
|
||||
),
|
||||
),
|
||||
(
|
||||
"pgh_context",
|
||||
models.ForeignKey(
|
||||
db_constraint=False,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.DO_NOTHING,
|
||||
related_name="+",
|
||||
related_query_name="+",
|
||||
to="pghistory.context",
|
||||
),
|
||||
),
|
||||
(
|
||||
"pgh_obj",
|
||||
models.ForeignKey(
|
||||
db_constraint=False,
|
||||
on_delete=django.db.models.deletion.DO_NOTHING,
|
||||
related_name="events",
|
||||
related_query_name="+",
|
||||
to="timeline.entitytimelineevent",
|
||||
),
|
||||
),
|
||||
(
|
||||
"submission",
|
||||
models.ForeignKey(
|
||||
blank=True,
|
||||
db_constraint=False,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.DO_NOTHING,
|
||||
related_name="+",
|
||||
related_query_name="+",
|
||||
to="moderation.contentsubmission",
|
||||
),
|
||||
),
|
||||
(
|
||||
"to_location",
|
||||
models.ForeignKey(
|
||||
blank=True,
|
||||
db_constraint=False,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.DO_NOTHING,
|
||||
related_name="+",
|
||||
related_query_name="+",
|
||||
to="entities.park",
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"abstract": False,
|
||||
},
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="entitytimelineevent",
|
||||
index=models.Index(
|
||||
fields=["entity_type", "entity_id", "-event_date"],
|
||||
name="timeline_en_entity__1edf78_idx",
|
||||
),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="entitytimelineevent",
|
||||
index=models.Index(
|
||||
fields=["event_type", "-event_date"],
|
||||
name="timeline_en_event_t_ddeb87_idx",
|
||||
),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="entitytimelineevent",
|
||||
index=models.Index(
|
||||
fields=["is_public", "-event_date"],
|
||||
name="timeline_en_is_publ_8737ce_idx",
|
||||
),
|
||||
),
|
||||
pgtrigger.migrations.AddTrigger(
|
||||
model_name="entitytimelineevent",
|
||||
trigger=pgtrigger.compiler.Trigger(
|
||||
name="insert_insert",
|
||||
sql=pgtrigger.compiler.UpsertTriggerSql(
|
||||
func='INSERT INTO "timeline_entitytimelineeventevent" ("approved_by_id", "created_at", "created_by_id", "description", "display_order", "entity_id", "entity_type", "event_date", "event_date_precision", "event_type", "from_entity_id", "from_location_id", "from_value", "id", "is_public", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "submission_id", "title", "to_entity_id", "to_location_id", "to_value", "updated_at") VALUES (NEW."approved_by_id", NEW."created_at", NEW."created_by_id", NEW."description", NEW."display_order", NEW."entity_id", NEW."entity_type", NEW."event_date", NEW."event_date_precision", NEW."event_type", NEW."from_entity_id", NEW."from_location_id", NEW."from_value", NEW."id", NEW."is_public", _pgh_attach_context(), NOW(), \'insert\', NEW."id", NEW."submission_id", NEW."title", NEW."to_entity_id", NEW."to_location_id", NEW."to_value", NEW."updated_at"); RETURN NULL;',
|
||||
hash="76282604c91127f10184eb954cfc832b75b6fd94",
|
||||
operation="INSERT",
|
||||
pgid="pgtrigger_insert_insert_5bf38",
|
||||
table="timeline_entitytimelineevent",
|
||||
when="AFTER",
|
||||
),
|
||||
),
|
||||
),
|
||||
pgtrigger.migrations.AddTrigger(
|
||||
model_name="entitytimelineevent",
|
||||
trigger=pgtrigger.compiler.Trigger(
|
||||
name="update_update",
|
||||
sql=pgtrigger.compiler.UpsertTriggerSql(
|
||||
condition="WHEN (OLD.* IS DISTINCT FROM NEW.*)",
|
||||
func='INSERT INTO "timeline_entitytimelineeventevent" ("approved_by_id", "created_at", "created_by_id", "description", "display_order", "entity_id", "entity_type", "event_date", "event_date_precision", "event_type", "from_entity_id", "from_location_id", "from_value", "id", "is_public", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "submission_id", "title", "to_entity_id", "to_location_id", "to_value", "updated_at") VALUES (NEW."approved_by_id", NEW."created_at", NEW."created_by_id", NEW."description", NEW."display_order", NEW."entity_id", NEW."entity_type", NEW."event_date", NEW."event_date_precision", NEW."event_type", NEW."from_entity_id", NEW."from_location_id", NEW."from_value", NEW."id", NEW."is_public", _pgh_attach_context(), NOW(), \'update\', NEW."id", NEW."submission_id", NEW."title", NEW."to_entity_id", NEW."to_location_id", NEW."to_value", NEW."updated_at"); RETURN NULL;',
|
||||
hash="d8ad44ad25d075f4459a79674a168ad1afcab00b",
|
||||
operation="UPDATE",
|
||||
pgid="pgtrigger_update_update_1687a",
|
||||
table="timeline_entitytimelineevent",
|
||||
when="AFTER",
|
||||
),
|
||||
),
|
||||
),
|
||||
]
|
||||
0
django-backend/apps/timeline/migrations/__init__.py
Normal file
0
django-backend/apps/timeline/migrations/__init__.py
Normal file
94
django-backend/apps/timeline/models.py
Normal file
94
django-backend/apps/timeline/models.py
Normal file
@@ -0,0 +1,94 @@
|
||||
"""
|
||||
Timeline models for tracking entity lifecycle events.
|
||||
"""
|
||||
import uuid
|
||||
import pghistory
|
||||
from django.db import models
|
||||
|
||||
|
||||
@pghistory.track()
|
||||
class EntityTimelineEvent(models.Model):
|
||||
"""
|
||||
Tracks significant events in entity lifecycles (opening, closing, relocation, etc.)
|
||||
"""
|
||||
id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
|
||||
entity_id = models.UUIDField(db_index=True)
|
||||
entity_type = models.CharField(max_length=50, db_index=True)
|
||||
event_type = models.CharField(max_length=100, help_text="Type of event: opening, closing, relocation, etc.")
|
||||
event_date = models.DateField()
|
||||
event_date_precision = models.CharField(
|
||||
max_length=20,
|
||||
null=True,
|
||||
blank=True,
|
||||
choices=[
|
||||
('day', 'Day'),
|
||||
('month', 'Month'),
|
||||
('year', 'Year'),
|
||||
('decade', 'Decade'),
|
||||
]
|
||||
)
|
||||
title = models.CharField(max_length=255)
|
||||
description = models.TextField(null=True, blank=True)
|
||||
|
||||
# Event details - for relocations, transfers, etc.
|
||||
from_entity_id = models.UUIDField(null=True, blank=True)
|
||||
to_entity_id = models.UUIDField(null=True, blank=True)
|
||||
from_location = models.ForeignKey(
|
||||
'entities.Park',
|
||||
null=True,
|
||||
blank=True,
|
||||
on_delete=models.SET_NULL,
|
||||
related_name='timeline_from_events'
|
||||
)
|
||||
to_location = models.ForeignKey(
|
||||
'entities.Park',
|
||||
null=True,
|
||||
blank=True,
|
||||
on_delete=models.SET_NULL,
|
||||
related_name='timeline_to_events'
|
||||
)
|
||||
from_value = models.TextField(null=True, blank=True)
|
||||
to_value = models.TextField(null=True, blank=True)
|
||||
|
||||
# Moderation
|
||||
is_public = models.BooleanField(default=True)
|
||||
display_order = models.IntegerField(null=True, blank=True)
|
||||
|
||||
# Tracking
|
||||
created_by = models.ForeignKey(
|
||||
'users.User',
|
||||
null=True,
|
||||
blank=True,
|
||||
on_delete=models.SET_NULL,
|
||||
related_name='timeline_events_created'
|
||||
)
|
||||
approved_by = models.ForeignKey(
|
||||
'users.User',
|
||||
null=True,
|
||||
blank=True,
|
||||
on_delete=models.SET_NULL,
|
||||
related_name='timeline_events_approved'
|
||||
)
|
||||
submission = models.ForeignKey(
|
||||
'moderation.ContentSubmission',
|
||||
null=True,
|
||||
blank=True,
|
||||
on_delete=models.SET_NULL,
|
||||
related_name='timeline_events'
|
||||
)
|
||||
|
||||
created_at = models.DateTimeField(auto_now_add=True)
|
||||
updated_at = models.DateTimeField(auto_now=True)
|
||||
|
||||
class Meta:
|
||||
verbose_name = 'Timeline Event'
|
||||
verbose_name_plural = 'Timeline Events'
|
||||
ordering = ['-event_date', '-created_at']
|
||||
indexes = [
|
||||
models.Index(fields=['entity_type', 'entity_id', '-event_date']),
|
||||
models.Index(fields=['event_type', '-event_date']),
|
||||
models.Index(fields=['is_public', '-event_date']),
|
||||
]
|
||||
|
||||
def __str__(self):
|
||||
return f"{self.entity_type} {self.entity_id}: {self.title} ({self.event_date})"
|
||||
0
django-backend/apps/users/__init__.py
Normal file
0
django-backend/apps/users/__init__.py
Normal file
584
django-backend/apps/users/admin.py
Normal file
584
django-backend/apps/users/admin.py
Normal file
@@ -0,0 +1,584 @@
|
||||
"""
|
||||
Django admin configuration for User models.
|
||||
"""
|
||||
|
||||
from django.contrib import admin
|
||||
from django.contrib.auth.admin import UserAdmin as BaseUserAdmin
|
||||
from django.utils.html import format_html
|
||||
from django.urls import reverse
|
||||
from django.utils.safestring import mark_safe
|
||||
from unfold.admin import ModelAdmin
|
||||
from unfold.decorators import display
|
||||
from import_export import resources
|
||||
from import_export.admin import ImportExportModelAdmin
|
||||
|
||||
from .models import User, UserRole, UserProfile, UserRideCredit, UserTopList, UserTopListItem
|
||||
|
||||
|
||||
class UserResource(resources.ModelResource):
|
||||
"""Resource for importing/exporting users."""
|
||||
|
||||
class Meta:
|
||||
model = User
|
||||
fields = (
|
||||
'id', 'email', 'username', 'first_name', 'last_name',
|
||||
'date_joined', 'last_login', 'is_active', 'is_staff',
|
||||
'banned', 'reputation_score', 'mfa_enabled'
|
||||
)
|
||||
export_order = fields
|
||||
|
||||
|
||||
class UserRoleInline(admin.StackedInline):
|
||||
"""Inline for user role."""
|
||||
model = UserRole
|
||||
can_delete = False
|
||||
verbose_name_plural = 'Role'
|
||||
fk_name = 'user'
|
||||
fields = ('role', 'granted_by', 'granted_at')
|
||||
readonly_fields = ('granted_at',)
|
||||
|
||||
|
||||
class UserProfileInline(admin.StackedInline):
|
||||
"""Inline for user profile."""
|
||||
model = UserProfile
|
||||
can_delete = False
|
||||
verbose_name_plural = 'Profile & Preferences'
|
||||
fk_name = 'user'
|
||||
fields = (
|
||||
('email_notifications', 'email_on_submission_approved', 'email_on_submission_rejected'),
|
||||
('profile_public', 'show_email'),
|
||||
('total_submissions', 'approved_submissions'),
|
||||
)
|
||||
readonly_fields = ('total_submissions', 'approved_submissions')
|
||||
|
||||
|
||||
@admin.register(User)
|
||||
class UserAdmin(BaseUserAdmin, ModelAdmin, ImportExportModelAdmin):
|
||||
"""Admin interface for User model."""
|
||||
|
||||
resource_class = UserResource
|
||||
|
||||
list_display = [
|
||||
'email',
|
||||
'username',
|
||||
'display_name_admin',
|
||||
'role_badge',
|
||||
'reputation_badge',
|
||||
'status_badge',
|
||||
'mfa_badge',
|
||||
'date_joined',
|
||||
'last_login',
|
||||
]
|
||||
|
||||
list_filter = [
|
||||
'is_active',
|
||||
'is_staff',
|
||||
'is_superuser',
|
||||
'banned',
|
||||
'mfa_enabled',
|
||||
'oauth_provider',
|
||||
'date_joined',
|
||||
'last_login',
|
||||
]
|
||||
|
||||
search_fields = [
|
||||
'email',
|
||||
'username',
|
||||
'first_name',
|
||||
'last_name',
|
||||
]
|
||||
|
||||
ordering = ['-date_joined']
|
||||
|
||||
fieldsets = (
|
||||
('Account Information', {
|
||||
'fields': ('email', 'username', 'password')
|
||||
}),
|
||||
('Personal Information', {
|
||||
'fields': ('first_name', 'last_name', 'avatar_url', 'bio')
|
||||
}),
|
||||
('Permissions', {
|
||||
'fields': (
|
||||
'is_active',
|
||||
'is_staff',
|
||||
'is_superuser',
|
||||
'groups',
|
||||
'user_permissions',
|
||||
)
|
||||
}),
|
||||
('Moderation', {
|
||||
'fields': (
|
||||
'banned',
|
||||
'ban_reason',
|
||||
'banned_at',
|
||||
'banned_by',
|
||||
)
|
||||
}),
|
||||
('OAuth', {
|
||||
'fields': ('oauth_provider', 'oauth_sub'),
|
||||
'classes': ('collapse',)
|
||||
}),
|
||||
('Security', {
|
||||
'fields': ('mfa_enabled', 'reputation_score'),
|
||||
}),
|
||||
('Timestamps', {
|
||||
'fields': ('date_joined', 'last_login'),
|
||||
'classes': ('collapse',)
|
||||
}),
|
||||
)
|
||||
|
||||
add_fieldsets = (
|
||||
('Create New User', {
|
||||
'classes': ('wide',),
|
||||
'fields': ('email', 'username', 'password1', 'password2'),
|
||||
}),
|
||||
)
|
||||
|
||||
readonly_fields = [
|
||||
'date_joined',
|
||||
'last_login',
|
||||
'banned_at',
|
||||
'oauth_provider',
|
||||
'oauth_sub',
|
||||
]
|
||||
|
||||
inlines = [UserRoleInline, UserProfileInline]
|
||||
|
||||
@display(description="Name", label=True)
|
||||
def display_name_admin(self, obj):
|
||||
"""Display user's display name."""
|
||||
return obj.display_name or '-'
|
||||
|
||||
@display(description="Role", label=True)
|
||||
def role_badge(self, obj):
|
||||
"""Display user role with badge."""
|
||||
try:
|
||||
role = obj.role.role
|
||||
colors = {
|
||||
'admin': 'red',
|
||||
'moderator': 'blue',
|
||||
'user': 'green',
|
||||
}
|
||||
return format_html(
|
||||
'<span style="background-color: {}; color: white; padding: 3px 8px; border-radius: 3px; font-size: 11px;">{}</span>',
|
||||
colors.get(role, 'gray'),
|
||||
role.upper()
|
||||
)
|
||||
except UserRole.DoesNotExist:
|
||||
return format_html('<span style="color: gray;">No Role</span>')
|
||||
|
||||
@display(description="Reputation", label=True)
|
||||
def reputation_badge(self, obj):
|
||||
"""Display reputation score."""
|
||||
score = obj.reputation_score
|
||||
if score >= 100:
|
||||
color = 'green'
|
||||
elif score >= 50:
|
||||
color = 'blue'
|
||||
elif score >= 0:
|
||||
color = 'gray'
|
||||
else:
|
||||
color = 'red'
|
||||
|
||||
return format_html(
|
||||
'<span style="color: {}; font-weight: bold;">{}</span>',
|
||||
color,
|
||||
score
|
||||
)
|
||||
|
||||
@display(description="Status", label=True)
|
||||
def status_badge(self, obj):
|
||||
"""Display user status."""
|
||||
if obj.banned:
|
||||
return format_html(
|
||||
'<span style="background-color: red; color: white; padding: 3px 8px; border-radius: 3px; font-size: 11px;">BANNED</span>'
|
||||
)
|
||||
elif not obj.is_active:
|
||||
return format_html(
|
||||
'<span style="background-color: orange; color: white; padding: 3px 8px; border-radius: 3px; font-size: 11px;">INACTIVE</span>'
|
||||
)
|
||||
else:
|
||||
return format_html(
|
||||
'<span style="background-color: green; color: white; padding: 3px 8px; border-radius: 3px; font-size: 11px;">ACTIVE</span>'
|
||||
)
|
||||
|
||||
@display(description="MFA", label=True)
|
||||
def mfa_badge(self, obj):
|
||||
"""Display MFA status."""
|
||||
if obj.mfa_enabled:
|
||||
return format_html(
|
||||
'<span style="color: green;">✓ Enabled</span>'
|
||||
)
|
||||
else:
|
||||
return format_html(
|
||||
'<span style="color: gray;">✗ Disabled</span>'
|
||||
)
|
||||
|
||||
def get_queryset(self, request):
|
||||
"""Optimize queryset with select_related."""
|
||||
qs = super().get_queryset(request)
|
||||
return qs.select_related('role', 'banned_by')
|
||||
|
||||
actions = ['ban_users', 'unban_users', 'make_moderator', 'make_user']
|
||||
|
||||
@admin.action(description="Ban selected users")
|
||||
def ban_users(self, request, queryset):
|
||||
"""Ban selected users."""
|
||||
count = 0
|
||||
for user in queryset:
|
||||
if not user.banned:
|
||||
user.ban(reason="Banned by admin", banned_by=request.user)
|
||||
count += 1
|
||||
|
||||
self.message_user(
|
||||
request,
|
||||
f"{count} user(s) have been banned."
|
||||
)
|
||||
|
||||
@admin.action(description="Unban selected users")
|
||||
def unban_users(self, request, queryset):
|
||||
"""Unban selected users."""
|
||||
count = 0
|
||||
for user in queryset:
|
||||
if user.banned:
|
||||
user.unban()
|
||||
count += 1
|
||||
|
||||
self.message_user(
|
||||
request,
|
||||
f"{count} user(s) have been unbanned."
|
||||
)
|
||||
|
||||
@admin.action(description="Set role to Moderator")
|
||||
def make_moderator(self, request, queryset):
|
||||
"""Set users' role to moderator."""
|
||||
from .services import RoleService
|
||||
|
||||
count = 0
|
||||
for user in queryset:
|
||||
RoleService.assign_role(user, 'moderator', request.user)
|
||||
count += 1
|
||||
|
||||
self.message_user(
|
||||
request,
|
||||
f"{count} user(s) have been set to Moderator role."
|
||||
)
|
||||
|
||||
@admin.action(description="Set role to User")
|
||||
def make_user(self, request, queryset):
|
||||
"""Set users' role to user."""
|
||||
from .services import RoleService
|
||||
|
||||
count = 0
|
||||
for user in queryset:
|
||||
RoleService.assign_role(user, 'user', request.user)
|
||||
count += 1
|
||||
|
||||
self.message_user(
|
||||
request,
|
||||
f"{count} user(s) have been set to User role."
|
||||
)
|
||||
|
||||
|
||||
@admin.register(UserRole)
|
||||
class UserRoleAdmin(ModelAdmin):
|
||||
"""Admin interface for UserRole model."""
|
||||
|
||||
list_display = ['user', 'role', 'is_moderator', 'is_admin', 'granted_at', 'granted_by']
|
||||
list_filter = ['role', 'granted_at']
|
||||
search_fields = ['user__email', 'user__username']
|
||||
ordering = ['-granted_at']
|
||||
|
||||
readonly_fields = ['granted_at']
|
||||
|
||||
def get_queryset(self, request):
|
||||
"""Optimize queryset."""
|
||||
qs = super().get_queryset(request)
|
||||
return qs.select_related('user', 'granted_by')
|
||||
|
||||
|
||||
@admin.register(UserProfile)
|
||||
class UserProfileAdmin(ModelAdmin):
|
||||
"""Admin interface for UserProfile model."""
|
||||
|
||||
list_display = [
|
||||
'user',
|
||||
'total_submissions',
|
||||
'approved_submissions',
|
||||
'approval_rate',
|
||||
'email_notifications',
|
||||
'profile_public',
|
||||
]
|
||||
|
||||
list_filter = [
|
||||
'email_notifications',
|
||||
'profile_public',
|
||||
'show_email',
|
||||
]
|
||||
|
||||
search_fields = ['user__email', 'user__username']
|
||||
|
||||
readonly_fields = ['created', 'modified', 'total_submissions', 'approved_submissions']
|
||||
|
||||
fieldsets = (
|
||||
('User', {
|
||||
'fields': ('user',)
|
||||
}),
|
||||
('Statistics', {
|
||||
'fields': ('total_submissions', 'approved_submissions'),
|
||||
}),
|
||||
('Notification Preferences', {
|
||||
'fields': (
|
||||
'email_notifications',
|
||||
'email_on_submission_approved',
|
||||
'email_on_submission_rejected',
|
||||
)
|
||||
}),
|
||||
('Privacy Settings', {
|
||||
'fields': ('profile_public', 'show_email'),
|
||||
}),
|
||||
('Timestamps', {
|
||||
'fields': ('created', 'modified'),
|
||||
'classes': ('collapse',)
|
||||
}),
|
||||
)
|
||||
|
||||
@display(description="Approval Rate")
|
||||
def approval_rate(self, obj):
|
||||
"""Display approval rate percentage."""
|
||||
if obj.total_submissions == 0:
|
||||
return '-'
|
||||
|
||||
rate = (obj.approved_submissions / obj.total_submissions) * 100
|
||||
|
||||
if rate >= 80:
|
||||
color = 'green'
|
||||
elif rate >= 60:
|
||||
color = 'blue'
|
||||
elif rate >= 40:
|
||||
color = 'orange'
|
||||
else:
|
||||
color = 'red'
|
||||
|
||||
return format_html(
|
||||
'<span style="color: {}; font-weight: bold;">{:.1f}%</span>',
|
||||
color,
|
||||
rate
|
||||
)
|
||||
|
||||
def get_queryset(self, request):
|
||||
"""Optimize queryset."""
|
||||
qs = super().get_queryset(request)
|
||||
return qs.select_related('user')
|
||||
|
||||
|
||||
@admin.register(UserRideCredit)
|
||||
class UserRideCreditAdmin(ModelAdmin):
|
||||
"""Admin interface for UserRideCredit model."""
|
||||
|
||||
list_display = [
|
||||
'user_link',
|
||||
'ride_link',
|
||||
'park_link',
|
||||
'first_ride_date',
|
||||
'ride_count',
|
||||
'created',
|
||||
]
|
||||
|
||||
list_filter = [
|
||||
'first_ride_date',
|
||||
'created',
|
||||
]
|
||||
|
||||
search_fields = [
|
||||
'user__email',
|
||||
'user__username',
|
||||
'ride__name',
|
||||
'notes',
|
||||
]
|
||||
|
||||
ordering = ['-first_ride_date', '-created']
|
||||
|
||||
readonly_fields = ['created', 'modified']
|
||||
|
||||
fieldsets = (
|
||||
('Credit Information', {
|
||||
'fields': ('user', 'ride', 'first_ride_date', 'ride_count')
|
||||
}),
|
||||
('Notes', {
|
||||
'fields': ('notes',)
|
||||
}),
|
||||
('Timestamps', {
|
||||
'fields': ('created', 'modified'),
|
||||
'classes': ('collapse',)
|
||||
}),
|
||||
)
|
||||
|
||||
@display(description='User', ordering='user__username')
|
||||
def user_link(self, obj):
|
||||
url = reverse('admin:users_user_change', args=[obj.user.pk])
|
||||
return format_html('<a href="{}">{}</a>', url, obj.user.username)
|
||||
|
||||
@display(description='Ride', ordering='ride__name')
|
||||
def ride_link(self, obj):
|
||||
url = reverse('admin:entities_ride_change', args=[obj.ride.pk])
|
||||
return format_html('<a href="{}">{}</a>', url, obj.ride.name)
|
||||
|
||||
@display(description='Park')
|
||||
def park_link(self, obj):
|
||||
if obj.ride.park:
|
||||
url = reverse('admin:entities_park_change', args=[obj.ride.park.pk])
|
||||
return format_html('<a href="{}">{}</a>', url, obj.ride.park.name)
|
||||
return '-'
|
||||
|
||||
def get_queryset(self, request):
|
||||
"""Optimize queryset."""
|
||||
qs = super().get_queryset(request)
|
||||
return qs.select_related('user', 'ride', 'ride__park')
|
||||
|
||||
|
||||
class UserTopListItemInline(admin.TabularInline):
|
||||
"""Inline for top list items."""
|
||||
model = UserTopListItem
|
||||
extra = 1
|
||||
fields = ('position', 'content_type', 'object_id', 'notes')
|
||||
ordering = ['position']
|
||||
|
||||
|
||||
@admin.register(UserTopList)
|
||||
class UserTopListAdmin(ModelAdmin):
|
||||
"""Admin interface for UserTopList model."""
|
||||
|
||||
list_display = [
|
||||
'title',
|
||||
'user_link',
|
||||
'list_type',
|
||||
'item_count_display',
|
||||
'visibility_badge',
|
||||
'created',
|
||||
]
|
||||
|
||||
list_filter = [
|
||||
'list_type',
|
||||
'is_public',
|
||||
'created',
|
||||
]
|
||||
|
||||
search_fields = [
|
||||
'title',
|
||||
'description',
|
||||
'user__email',
|
||||
'user__username',
|
||||
]
|
||||
|
||||
ordering = ['-created']
|
||||
|
||||
readonly_fields = ['created', 'modified', 'item_count']
|
||||
|
||||
fieldsets = (
|
||||
('List Information', {
|
||||
'fields': ('user', 'list_type', 'title', 'description')
|
||||
}),
|
||||
('Privacy', {
|
||||
'fields': ('is_public',)
|
||||
}),
|
||||
('Statistics', {
|
||||
'fields': ('item_count',)
|
||||
}),
|
||||
('Timestamps', {
|
||||
'fields': ('created', 'modified'),
|
||||
'classes': ('collapse',)
|
||||
}),
|
||||
)
|
||||
|
||||
inlines = [UserTopListItemInline]
|
||||
|
||||
@display(description='User', ordering='user__username')
|
||||
def user_link(self, obj):
|
||||
url = reverse('admin:users_user_change', args=[obj.user.pk])
|
||||
return format_html('<a href="{}">{}</a>', url, obj.user.username)
|
||||
|
||||
@display(description='Items', ordering='items__count')
|
||||
def item_count_display(self, obj):
|
||||
count = obj.item_count
|
||||
return format_html('<span style="font-weight: bold;">{}</span>', count)
|
||||
|
||||
@display(description='Visibility', ordering='is_public')
|
||||
def visibility_badge(self, obj):
|
||||
if obj.is_public:
|
||||
return format_html(
|
||||
'<span style="background-color: green; color: white; padding: 3px 8px; '
|
||||
'border-radius: 3px; font-size: 11px;">PUBLIC</span>'
|
||||
)
|
||||
else:
|
||||
return format_html(
|
||||
'<span style="background-color: gray; color: white; padding: 3px 8px; '
|
||||
'border-radius: 3px; font-size: 11px;">PRIVATE</span>'
|
||||
)
|
||||
|
||||
def get_queryset(self, request):
|
||||
"""Optimize queryset."""
|
||||
qs = super().get_queryset(request)
|
||||
return qs.select_related('user').prefetch_related('items')
|
||||
|
||||
|
||||
@admin.register(UserTopListItem)
|
||||
class UserTopListItemAdmin(ModelAdmin):
|
||||
"""Admin interface for UserTopListItem model."""
|
||||
|
||||
list_display = [
|
||||
'position',
|
||||
'list_link',
|
||||
'entity_type',
|
||||
'entity_link',
|
||||
'created',
|
||||
]
|
||||
|
||||
list_filter = [
|
||||
'content_type',
|
||||
'created',
|
||||
]
|
||||
|
||||
search_fields = [
|
||||
'top_list__title',
|
||||
'notes',
|
||||
]
|
||||
|
||||
ordering = ['top_list', 'position']
|
||||
|
||||
readonly_fields = ['created', 'modified']
|
||||
|
||||
fieldsets = (
|
||||
('Item Information', {
|
||||
'fields': ('top_list', 'position', 'content_type', 'object_id')
|
||||
}),
|
||||
('Notes', {
|
||||
'fields': ('notes',)
|
||||
}),
|
||||
('Timestamps', {
|
||||
'fields': ('created', 'modified'),
|
||||
'classes': ('collapse',)
|
||||
}),
|
||||
)
|
||||
|
||||
@display(description='List', ordering='top_list__title')
|
||||
def list_link(self, obj):
|
||||
url = reverse('admin:users_usertoplist_change', args=[obj.top_list.pk])
|
||||
return format_html('<a href="{}">{}</a>', url, obj.top_list.title)
|
||||
|
||||
@display(description='Type', ordering='content_type')
|
||||
def entity_type(self, obj):
|
||||
return obj.content_type.model.title()
|
||||
|
||||
@display(description='Entity')
|
||||
def entity_link(self, obj):
|
||||
if obj.content_object:
|
||||
model_name = obj.content_type.model
|
||||
url = reverse(f'admin:entities_{model_name}_change', args=[obj.object_id])
|
||||
return format_html('<a href="{}">{}</a>', url, str(obj.content_object))
|
||||
return f"ID: {obj.object_id}"
|
||||
|
||||
def get_queryset(self, request):
|
||||
"""Optimize queryset."""
|
||||
qs = super().get_queryset(request)
|
||||
return qs.select_related('top_list', 'content_type')
|
||||
17
django-backend/apps/users/apps.py
Normal file
17
django-backend/apps/users/apps.py
Normal file
@@ -0,0 +1,17 @@
|
||||
"""
|
||||
Users app configuration.
|
||||
"""
|
||||
|
||||
from django.apps import AppConfig
|
||||
|
||||
|
||||
class UsersConfig(AppConfig):
|
||||
default_auto_field = 'django.db.models.BigAutoField'
|
||||
name = 'apps.users'
|
||||
verbose_name = 'Users'
|
||||
|
||||
def ready(self):
|
||||
"""Import signal handlers when app is ready"""
|
||||
# Import signals here to avoid circular imports
|
||||
# import apps.users.signals
|
||||
pass
|
||||
370
django-backend/apps/users/migrations/0001_initial.py
Normal file
370
django-backend/apps/users/migrations/0001_initial.py
Normal file
@@ -0,0 +1,370 @@
|
||||
# Generated by Django 4.2.8 on 2025-11-08 16:35
|
||||
|
||||
from django.conf import settings
|
||||
import django.contrib.auth.models
|
||||
import django.contrib.auth.validators
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
import django.utils.timezone
|
||||
import django_lifecycle.mixins
|
||||
import model_utils.fields
|
||||
import uuid
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
initial = True
|
||||
|
||||
dependencies = [
|
||||
("auth", "0012_alter_user_first_name_max_length"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name="User",
|
||||
fields=[
|
||||
("password", models.CharField(max_length=128, verbose_name="password")),
|
||||
(
|
||||
"last_login",
|
||||
models.DateTimeField(
|
||||
blank=True, null=True, verbose_name="last login"
|
||||
),
|
||||
),
|
||||
(
|
||||
"is_superuser",
|
||||
models.BooleanField(
|
||||
default=False,
|
||||
help_text="Designates that this user has all permissions without explicitly assigning them.",
|
||||
verbose_name="superuser status",
|
||||
),
|
||||
),
|
||||
(
|
||||
"username",
|
||||
models.CharField(
|
||||
error_messages={
|
||||
"unique": "A user with that username already exists."
|
||||
},
|
||||
help_text="Required. 150 characters or fewer. Letters, digits and @/./+/-/_ only.",
|
||||
max_length=150,
|
||||
unique=True,
|
||||
validators=[
|
||||
django.contrib.auth.validators.UnicodeUsernameValidator()
|
||||
],
|
||||
verbose_name="username",
|
||||
),
|
||||
),
|
||||
(
|
||||
"first_name",
|
||||
models.CharField(
|
||||
blank=True, max_length=150, verbose_name="first name"
|
||||
),
|
||||
),
|
||||
(
|
||||
"last_name",
|
||||
models.CharField(
|
||||
blank=True, max_length=150, verbose_name="last name"
|
||||
),
|
||||
),
|
||||
(
|
||||
"is_staff",
|
||||
models.BooleanField(
|
||||
default=False,
|
||||
help_text="Designates whether the user can log into this admin site.",
|
||||
verbose_name="staff status",
|
||||
),
|
||||
),
|
||||
(
|
||||
"is_active",
|
||||
models.BooleanField(
|
||||
default=True,
|
||||
help_text="Designates whether this user should be treated as active. Unselect this instead of deleting accounts.",
|
||||
verbose_name="active",
|
||||
),
|
||||
),
|
||||
(
|
||||
"date_joined",
|
||||
models.DateTimeField(
|
||||
default=django.utils.timezone.now, verbose_name="date joined"
|
||||
),
|
||||
),
|
||||
(
|
||||
"id",
|
||||
models.UUIDField(
|
||||
default=uuid.uuid4,
|
||||
editable=False,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
),
|
||||
),
|
||||
(
|
||||
"email",
|
||||
models.EmailField(
|
||||
help_text="Email address for authentication",
|
||||
max_length=254,
|
||||
unique=True,
|
||||
),
|
||||
),
|
||||
(
|
||||
"oauth_provider",
|
||||
models.CharField(
|
||||
blank=True,
|
||||
choices=[
|
||||
("", "None"),
|
||||
("google", "Google"),
|
||||
("discord", "Discord"),
|
||||
],
|
||||
help_text="OAuth provider used for authentication",
|
||||
max_length=50,
|
||||
),
|
||||
),
|
||||
(
|
||||
"oauth_sub",
|
||||
models.CharField(
|
||||
blank=True,
|
||||
help_text="OAuth subject identifier from provider",
|
||||
max_length=255,
|
||||
),
|
||||
),
|
||||
(
|
||||
"mfa_enabled",
|
||||
models.BooleanField(
|
||||
default=False,
|
||||
help_text="Whether two-factor authentication is enabled",
|
||||
),
|
||||
),
|
||||
(
|
||||
"avatar_url",
|
||||
models.URLField(blank=True, help_text="URL to user's avatar image"),
|
||||
),
|
||||
(
|
||||
"bio",
|
||||
models.TextField(
|
||||
blank=True, help_text="User biography", max_length=500
|
||||
),
|
||||
),
|
||||
(
|
||||
"banned",
|
||||
models.BooleanField(
|
||||
db_index=True,
|
||||
default=False,
|
||||
help_text="Whether this user is banned",
|
||||
),
|
||||
),
|
||||
(
|
||||
"ban_reason",
|
||||
models.TextField(blank=True, help_text="Reason for ban"),
|
||||
),
|
||||
(
|
||||
"banned_at",
|
||||
models.DateTimeField(
|
||||
blank=True, help_text="When the user was banned", null=True
|
||||
),
|
||||
),
|
||||
(
|
||||
"reputation_score",
|
||||
models.IntegerField(
|
||||
default=0,
|
||||
help_text="User reputation score based on contributions",
|
||||
),
|
||||
),
|
||||
(
|
||||
"banned_by",
|
||||
models.ForeignKey(
|
||||
blank=True,
|
||||
help_text="Moderator who banned this user",
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
related_name="users_banned",
|
||||
to=settings.AUTH_USER_MODEL,
|
||||
),
|
||||
),
|
||||
(
|
||||
"groups",
|
||||
models.ManyToManyField(
|
||||
blank=True,
|
||||
help_text="The groups this user belongs to. A user will get all permissions granted to each of their groups.",
|
||||
related_name="user_set",
|
||||
related_query_name="user",
|
||||
to="auth.group",
|
||||
verbose_name="groups",
|
||||
),
|
||||
),
|
||||
(
|
||||
"user_permissions",
|
||||
models.ManyToManyField(
|
||||
blank=True,
|
||||
help_text="Specific permissions for this user.",
|
||||
related_name="user_set",
|
||||
related_query_name="user",
|
||||
to="auth.permission",
|
||||
verbose_name="user permissions",
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"db_table": "users",
|
||||
"ordering": ["-date_joined"],
|
||||
},
|
||||
managers=[
|
||||
("objects", django.contrib.auth.models.UserManager()),
|
||||
],
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="UserRole",
|
||||
fields=[
|
||||
(
|
||||
"created",
|
||||
model_utils.fields.AutoCreatedField(
|
||||
default=django.utils.timezone.now,
|
||||
editable=False,
|
||||
verbose_name="created",
|
||||
),
|
||||
),
|
||||
(
|
||||
"modified",
|
||||
model_utils.fields.AutoLastModifiedField(
|
||||
default=django.utils.timezone.now,
|
||||
editable=False,
|
||||
verbose_name="modified",
|
||||
),
|
||||
),
|
||||
(
|
||||
"id",
|
||||
models.UUIDField(
|
||||
default=uuid.uuid4,
|
||||
editable=False,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
),
|
||||
),
|
||||
(
|
||||
"role",
|
||||
models.CharField(
|
||||
choices=[
|
||||
("user", "User"),
|
||||
("moderator", "Moderator"),
|
||||
("admin", "Admin"),
|
||||
],
|
||||
db_index=True,
|
||||
default="user",
|
||||
max_length=20,
|
||||
),
|
||||
),
|
||||
("granted_at", models.DateTimeField(auto_now_add=True)),
|
||||
(
|
||||
"granted_by",
|
||||
models.ForeignKey(
|
||||
blank=True,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
related_name="roles_granted",
|
||||
to=settings.AUTH_USER_MODEL,
|
||||
),
|
||||
),
|
||||
(
|
||||
"user",
|
||||
models.OneToOneField(
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
related_name="role",
|
||||
to=settings.AUTH_USER_MODEL,
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"db_table": "user_roles",
|
||||
},
|
||||
bases=(django_lifecycle.mixins.LifecycleModelMixin, models.Model),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="UserProfile",
|
||||
fields=[
|
||||
(
|
||||
"created",
|
||||
model_utils.fields.AutoCreatedField(
|
||||
default=django.utils.timezone.now,
|
||||
editable=False,
|
||||
verbose_name="created",
|
||||
),
|
||||
),
|
||||
(
|
||||
"modified",
|
||||
model_utils.fields.AutoLastModifiedField(
|
||||
default=django.utils.timezone.now,
|
||||
editable=False,
|
||||
verbose_name="modified",
|
||||
),
|
||||
),
|
||||
(
|
||||
"id",
|
||||
models.UUIDField(
|
||||
default=uuid.uuid4,
|
||||
editable=False,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
),
|
||||
),
|
||||
(
|
||||
"email_notifications",
|
||||
models.BooleanField(
|
||||
default=True, help_text="Receive email notifications"
|
||||
),
|
||||
),
|
||||
(
|
||||
"email_on_submission_approved",
|
||||
models.BooleanField(
|
||||
default=True, help_text="Email when submissions are approved"
|
||||
),
|
||||
),
|
||||
(
|
||||
"email_on_submission_rejected",
|
||||
models.BooleanField(
|
||||
default=True, help_text="Email when submissions are rejected"
|
||||
),
|
||||
),
|
||||
(
|
||||
"profile_public",
|
||||
models.BooleanField(
|
||||
default=True, help_text="Make profile publicly visible"
|
||||
),
|
||||
),
|
||||
(
|
||||
"show_email",
|
||||
models.BooleanField(
|
||||
default=False, help_text="Show email on public profile"
|
||||
),
|
||||
),
|
||||
(
|
||||
"total_submissions",
|
||||
models.IntegerField(
|
||||
default=0, help_text="Total number of submissions made"
|
||||
),
|
||||
),
|
||||
(
|
||||
"approved_submissions",
|
||||
models.IntegerField(
|
||||
default=0, help_text="Number of approved submissions"
|
||||
),
|
||||
),
|
||||
(
|
||||
"user",
|
||||
models.OneToOneField(
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
related_name="profile",
|
||||
to=settings.AUTH_USER_MODEL,
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"db_table": "user_profiles",
|
||||
},
|
||||
bases=(django_lifecycle.mixins.LifecycleModelMixin, models.Model),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="user",
|
||||
index=models.Index(fields=["email"], name="users_email_4b85f2_idx"),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="user",
|
||||
index=models.Index(fields=["banned"], name="users_banned_ee00ad_idx"),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,265 @@
|
||||
# Generated by Django 4.2.8 on 2025-11-08 20:46
|
||||
|
||||
from django.conf import settings
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
import django.utils.timezone
|
||||
import django_lifecycle.mixins
|
||||
import model_utils.fields
|
||||
import uuid
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("contenttypes", "0002_remove_content_type_name"),
|
||||
("entities", "0003_add_search_vector_gin_indexes"),
|
||||
("users", "0001_initial"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name="UserTopList",
|
||||
fields=[
|
||||
(
|
||||
"created",
|
||||
model_utils.fields.AutoCreatedField(
|
||||
default=django.utils.timezone.now,
|
||||
editable=False,
|
||||
verbose_name="created",
|
||||
),
|
||||
),
|
||||
(
|
||||
"modified",
|
||||
model_utils.fields.AutoLastModifiedField(
|
||||
default=django.utils.timezone.now,
|
||||
editable=False,
|
||||
verbose_name="modified",
|
||||
),
|
||||
),
|
||||
(
|
||||
"id",
|
||||
models.UUIDField(
|
||||
default=uuid.uuid4,
|
||||
editable=False,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
),
|
||||
),
|
||||
(
|
||||
"list_type",
|
||||
models.CharField(
|
||||
choices=[
|
||||
("parks", "Parks"),
|
||||
("rides", "Rides"),
|
||||
("coasters", "Coasters"),
|
||||
],
|
||||
db_index=True,
|
||||
help_text="Type of entities in this list",
|
||||
max_length=20,
|
||||
),
|
||||
),
|
||||
(
|
||||
"title",
|
||||
models.CharField(help_text="Title of the list", max_length=200),
|
||||
),
|
||||
(
|
||||
"description",
|
||||
models.TextField(blank=True, help_text="Description of the list"),
|
||||
),
|
||||
(
|
||||
"is_public",
|
||||
models.BooleanField(
|
||||
db_index=True,
|
||||
default=True,
|
||||
help_text="Whether this list is publicly visible",
|
||||
),
|
||||
),
|
||||
(
|
||||
"user",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
related_name="top_lists",
|
||||
to=settings.AUTH_USER_MODEL,
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"db_table": "user_top_lists",
|
||||
"ordering": ["-created"],
|
||||
},
|
||||
bases=(django_lifecycle.mixins.LifecycleModelMixin, models.Model),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="UserRideCredit",
|
||||
fields=[
|
||||
(
|
||||
"created",
|
||||
model_utils.fields.AutoCreatedField(
|
||||
default=django.utils.timezone.now,
|
||||
editable=False,
|
||||
verbose_name="created",
|
||||
),
|
||||
),
|
||||
(
|
||||
"modified",
|
||||
model_utils.fields.AutoLastModifiedField(
|
||||
default=django.utils.timezone.now,
|
||||
editable=False,
|
||||
verbose_name="modified",
|
||||
),
|
||||
),
|
||||
(
|
||||
"id",
|
||||
models.UUIDField(
|
||||
default=uuid.uuid4,
|
||||
editable=False,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
),
|
||||
),
|
||||
(
|
||||
"first_ride_date",
|
||||
models.DateField(
|
||||
blank=True, help_text="Date of first ride", null=True
|
||||
),
|
||||
),
|
||||
(
|
||||
"ride_count",
|
||||
models.PositiveIntegerField(
|
||||
default=1, help_text="Number of times user has ridden this ride"
|
||||
),
|
||||
),
|
||||
(
|
||||
"notes",
|
||||
models.TextField(
|
||||
blank=True, help_text="User notes about this ride"
|
||||
),
|
||||
),
|
||||
(
|
||||
"ride",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
related_name="user_credits",
|
||||
to="entities.ride",
|
||||
),
|
||||
),
|
||||
(
|
||||
"user",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
related_name="ride_credits",
|
||||
to=settings.AUTH_USER_MODEL,
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"db_table": "user_ride_credits",
|
||||
"ordering": ["-first_ride_date", "-created"],
|
||||
},
|
||||
bases=(django_lifecycle.mixins.LifecycleModelMixin, models.Model),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="UserTopListItem",
|
||||
fields=[
|
||||
(
|
||||
"created",
|
||||
model_utils.fields.AutoCreatedField(
|
||||
default=django.utils.timezone.now,
|
||||
editable=False,
|
||||
verbose_name="created",
|
||||
),
|
||||
),
|
||||
(
|
||||
"modified",
|
||||
model_utils.fields.AutoLastModifiedField(
|
||||
default=django.utils.timezone.now,
|
||||
editable=False,
|
||||
verbose_name="modified",
|
||||
),
|
||||
),
|
||||
(
|
||||
"id",
|
||||
models.UUIDField(
|
||||
default=uuid.uuid4,
|
||||
editable=False,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
),
|
||||
),
|
||||
("object_id", models.PositiveIntegerField()),
|
||||
(
|
||||
"position",
|
||||
models.PositiveIntegerField(
|
||||
help_text="Position in the list (1 = top)"
|
||||
),
|
||||
),
|
||||
(
|
||||
"notes",
|
||||
models.TextField(
|
||||
blank=True,
|
||||
help_text="User notes about why this item is ranked here",
|
||||
),
|
||||
),
|
||||
(
|
||||
"content_type",
|
||||
models.ForeignKey(
|
||||
limit_choices_to={"model__in": ("park", "ride")},
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
to="contenttypes.contenttype",
|
||||
),
|
||||
),
|
||||
(
|
||||
"top_list",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
related_name="items",
|
||||
to="users.usertoplist",
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"db_table": "user_top_list_items",
|
||||
"ordering": ["position"],
|
||||
"indexes": [
|
||||
models.Index(
|
||||
fields=["top_list", "position"],
|
||||
name="user_top_li_top_lis_d31db9_idx",
|
||||
),
|
||||
models.Index(
|
||||
fields=["content_type", "object_id"],
|
||||
name="user_top_li_content_889eb7_idx",
|
||||
),
|
||||
],
|
||||
"unique_together": {("top_list", "position")},
|
||||
},
|
||||
bases=(django_lifecycle.mixins.LifecycleModelMixin, models.Model),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="usertoplist",
|
||||
index=models.Index(
|
||||
fields=["user", "list_type"], name="user_top_li_user_id_63f56d_idx"
|
||||
),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="usertoplist",
|
||||
index=models.Index(
|
||||
fields=["is_public", "created"], name="user_top_li_is_publ_983146_idx"
|
||||
),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="userridecredit",
|
||||
index=models.Index(
|
||||
fields=["user", "first_ride_date"],
|
||||
name="user_ride_c_user_id_56a0e5_idx",
|
||||
),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="userridecredit",
|
||||
index=models.Index(fields=["ride"], name="user_ride_c_ride_id_f0990b_idx"),
|
||||
),
|
||||
migrations.AlterUniqueTogether(
|
||||
name="userridecredit",
|
||||
unique_together={("user", "ride")},
|
||||
),
|
||||
]
|
||||
0
django-backend/apps/users/migrations/__init__.py
Normal file
0
django-backend/apps/users/migrations/__init__.py
Normal file
419
django-backend/apps/users/models.py
Normal file
419
django-backend/apps/users/models.py
Normal file
@@ -0,0 +1,419 @@
|
||||
"""
|
||||
User models for ThrillWiki.
|
||||
Custom user model with OAuth and MFA support.
|
||||
"""
|
||||
|
||||
import uuid
|
||||
from django.contrib.auth.models import AbstractUser
|
||||
from django.db import models
|
||||
from apps.core.models import BaseModel
|
||||
|
||||
|
||||
class User(AbstractUser):
|
||||
"""
|
||||
Custom user model with UUID primary key and additional fields.
|
||||
|
||||
Supports:
|
||||
- Email-based authentication
|
||||
- OAuth (Google, Discord)
|
||||
- Two-factor authentication (TOTP)
|
||||
- User reputation and moderation
|
||||
"""
|
||||
|
||||
# Override id to use UUID
|
||||
id = models.UUIDField(
|
||||
primary_key=True,
|
||||
default=uuid.uuid4,
|
||||
editable=False
|
||||
)
|
||||
|
||||
# Email as primary identifier
|
||||
email = models.EmailField(
|
||||
unique=True,
|
||||
help_text="Email address for authentication"
|
||||
)
|
||||
|
||||
# OAuth fields
|
||||
oauth_provider = models.CharField(
|
||||
max_length=50,
|
||||
blank=True,
|
||||
choices=[
|
||||
('', 'None'),
|
||||
('google', 'Google'),
|
||||
('discord', 'Discord'),
|
||||
],
|
||||
help_text="OAuth provider used for authentication"
|
||||
)
|
||||
oauth_sub = models.CharField(
|
||||
max_length=255,
|
||||
blank=True,
|
||||
help_text="OAuth subject identifier from provider"
|
||||
)
|
||||
|
||||
# MFA fields
|
||||
mfa_enabled = models.BooleanField(
|
||||
default=False,
|
||||
help_text="Whether two-factor authentication is enabled"
|
||||
)
|
||||
|
||||
# Profile fields
|
||||
avatar_url = models.URLField(
|
||||
blank=True,
|
||||
help_text="URL to user's avatar image"
|
||||
)
|
||||
bio = models.TextField(
|
||||
blank=True,
|
||||
max_length=500,
|
||||
help_text="User biography"
|
||||
)
|
||||
|
||||
# Moderation fields
|
||||
banned = models.BooleanField(
|
||||
default=False,
|
||||
db_index=True,
|
||||
help_text="Whether this user is banned"
|
||||
)
|
||||
ban_reason = models.TextField(
|
||||
blank=True,
|
||||
help_text="Reason for ban"
|
||||
)
|
||||
banned_at = models.DateTimeField(
|
||||
null=True,
|
||||
blank=True,
|
||||
help_text="When the user was banned"
|
||||
)
|
||||
banned_by = models.ForeignKey(
|
||||
'self',
|
||||
on_delete=models.SET_NULL,
|
||||
null=True,
|
||||
blank=True,
|
||||
related_name='users_banned',
|
||||
help_text="Moderator who banned this user"
|
||||
)
|
||||
|
||||
# Reputation system
|
||||
reputation_score = models.IntegerField(
|
||||
default=0,
|
||||
help_text="User reputation score based on contributions"
|
||||
)
|
||||
|
||||
# Timestamps (inherited from AbstractUser)
|
||||
# date_joined, last_login
|
||||
|
||||
# Use email for authentication
|
||||
USERNAME_FIELD = 'email'
|
||||
REQUIRED_FIELDS = ['username']
|
||||
|
||||
class Meta:
|
||||
db_table = 'users'
|
||||
ordering = ['-date_joined']
|
||||
indexes = [
|
||||
models.Index(fields=['email']),
|
||||
models.Index(fields=['banned']),
|
||||
]
|
||||
|
||||
def __str__(self):
|
||||
return self.email
|
||||
|
||||
def ban(self, reason, banned_by=None):
|
||||
"""Ban this user"""
|
||||
from django.utils import timezone
|
||||
self.banned = True
|
||||
self.ban_reason = reason
|
||||
self.banned_at = timezone.now()
|
||||
self.banned_by = banned_by
|
||||
self.save(update_fields=['banned', 'ban_reason', 'banned_at', 'banned_by'])
|
||||
|
||||
def unban(self):
|
||||
"""Unban this user"""
|
||||
self.banned = False
|
||||
self.ban_reason = ''
|
||||
self.banned_at = None
|
||||
self.banned_by = None
|
||||
self.save(update_fields=['banned', 'ban_reason', 'banned_at', 'banned_by'])
|
||||
|
||||
@property
|
||||
def display_name(self):
|
||||
"""Return the user's display name (full name or username)"""
|
||||
if self.first_name or self.last_name:
|
||||
return f"{self.first_name} {self.last_name}".strip()
|
||||
return self.username or self.email.split('@')[0]
|
||||
|
||||
|
||||
class UserRole(BaseModel):
|
||||
"""
|
||||
User role assignments for permission management.
|
||||
|
||||
Roles:
|
||||
- user: Standard user (default)
|
||||
- moderator: Can approve submissions and moderate content
|
||||
- admin: Full access to admin features
|
||||
"""
|
||||
|
||||
ROLE_CHOICES = [
|
||||
('user', 'User'),
|
||||
('moderator', 'Moderator'),
|
||||
('admin', 'Admin'),
|
||||
]
|
||||
|
||||
user = models.OneToOneField(
|
||||
User,
|
||||
on_delete=models.CASCADE,
|
||||
related_name='role'
|
||||
)
|
||||
role = models.CharField(
|
||||
max_length=20,
|
||||
choices=ROLE_CHOICES,
|
||||
default='user',
|
||||
db_index=True
|
||||
)
|
||||
granted_at = models.DateTimeField(auto_now_add=True)
|
||||
granted_by = models.ForeignKey(
|
||||
User,
|
||||
on_delete=models.SET_NULL,
|
||||
null=True,
|
||||
blank=True,
|
||||
related_name='roles_granted'
|
||||
)
|
||||
|
||||
class Meta:
|
||||
db_table = 'user_roles'
|
||||
|
||||
def __str__(self):
|
||||
return f"{self.user.email} - {self.role}"
|
||||
|
||||
@property
|
||||
def is_moderator(self):
|
||||
"""Check if user is a moderator or admin"""
|
||||
return self.role in ['moderator', 'admin']
|
||||
|
||||
@property
|
||||
def is_admin(self):
|
||||
"""Check if user is an admin"""
|
||||
return self.role == 'admin'
|
||||
|
||||
|
||||
class UserProfile(BaseModel):
|
||||
"""
|
||||
Extended user profile information.
|
||||
|
||||
Stores additional user preferences and settings.
|
||||
"""
|
||||
|
||||
user = models.OneToOneField(
|
||||
User,
|
||||
on_delete=models.CASCADE,
|
||||
related_name='profile'
|
||||
)
|
||||
|
||||
# Preferences
|
||||
email_notifications = models.BooleanField(
|
||||
default=True,
|
||||
help_text="Receive email notifications"
|
||||
)
|
||||
email_on_submission_approved = models.BooleanField(
|
||||
default=True,
|
||||
help_text="Email when submissions are approved"
|
||||
)
|
||||
email_on_submission_rejected = models.BooleanField(
|
||||
default=True,
|
||||
help_text="Email when submissions are rejected"
|
||||
)
|
||||
|
||||
# Privacy
|
||||
profile_public = models.BooleanField(
|
||||
default=True,
|
||||
help_text="Make profile publicly visible"
|
||||
)
|
||||
show_email = models.BooleanField(
|
||||
default=False,
|
||||
help_text="Show email on public profile"
|
||||
)
|
||||
|
||||
# Statistics
|
||||
total_submissions = models.IntegerField(
|
||||
default=0,
|
||||
help_text="Total number of submissions made"
|
||||
)
|
||||
approved_submissions = models.IntegerField(
|
||||
default=0,
|
||||
help_text="Number of approved submissions"
|
||||
)
|
||||
|
||||
class Meta:
|
||||
db_table = 'user_profiles'
|
||||
|
||||
def __str__(self):
|
||||
return f"Profile for {self.user.email}"
|
||||
|
||||
def update_submission_stats(self):
|
||||
"""Update submission statistics"""
|
||||
from apps.moderation.models import ContentSubmission
|
||||
self.total_submissions = ContentSubmission.objects.filter(user=self.user).count()
|
||||
self.approved_submissions = ContentSubmission.objects.filter(
|
||||
user=self.user,
|
||||
status='approved'
|
||||
).count()
|
||||
self.save(update_fields=['total_submissions', 'approved_submissions'])
|
||||
|
||||
|
||||
class UserRideCredit(BaseModel):
|
||||
"""
|
||||
Track which rides users have ridden (ride credits/coaster counting).
|
||||
|
||||
Users can log which rides they've been on and track their first ride date.
|
||||
"""
|
||||
|
||||
user = models.ForeignKey(
|
||||
User,
|
||||
on_delete=models.CASCADE,
|
||||
related_name='ride_credits'
|
||||
)
|
||||
ride = models.ForeignKey(
|
||||
'entities.Ride',
|
||||
on_delete=models.CASCADE,
|
||||
related_name='user_credits'
|
||||
)
|
||||
|
||||
# First ride date
|
||||
first_ride_date = models.DateField(
|
||||
null=True,
|
||||
blank=True,
|
||||
help_text="Date of first ride"
|
||||
)
|
||||
|
||||
# Ride count for this specific ride
|
||||
ride_count = models.PositiveIntegerField(
|
||||
default=1,
|
||||
help_text="Number of times user has ridden this ride"
|
||||
)
|
||||
|
||||
# Notes about the ride experience
|
||||
notes = models.TextField(
|
||||
blank=True,
|
||||
help_text="User notes about this ride"
|
||||
)
|
||||
|
||||
class Meta:
|
||||
db_table = 'user_ride_credits'
|
||||
unique_together = [['user', 'ride']]
|
||||
ordering = ['-first_ride_date', '-created']
|
||||
indexes = [
|
||||
models.Index(fields=['user', 'first_ride_date']),
|
||||
models.Index(fields=['ride']),
|
||||
]
|
||||
|
||||
def __str__(self):
|
||||
return f"{self.user.username} - {self.ride.name}"
|
||||
|
||||
@property
|
||||
def park(self):
|
||||
"""Get the park this ride is at"""
|
||||
return self.ride.park
|
||||
|
||||
|
||||
class UserTopList(BaseModel):
|
||||
"""
|
||||
User-created ranked lists (top parks, top rides, top coasters, etc.).
|
||||
|
||||
Users can create and share their personal rankings of parks, rides, and other entities.
|
||||
"""
|
||||
|
||||
LIST_TYPE_CHOICES = [
|
||||
('parks', 'Parks'),
|
||||
('rides', 'Rides'),
|
||||
('coasters', 'Coasters'),
|
||||
]
|
||||
|
||||
user = models.ForeignKey(
|
||||
User,
|
||||
on_delete=models.CASCADE,
|
||||
related_name='top_lists'
|
||||
)
|
||||
|
||||
# List metadata
|
||||
list_type = models.CharField(
|
||||
max_length=20,
|
||||
choices=LIST_TYPE_CHOICES,
|
||||
db_index=True,
|
||||
help_text="Type of entities in this list"
|
||||
)
|
||||
title = models.CharField(
|
||||
max_length=200,
|
||||
help_text="Title of the list"
|
||||
)
|
||||
description = models.TextField(
|
||||
blank=True,
|
||||
help_text="Description of the list"
|
||||
)
|
||||
|
||||
# Privacy
|
||||
is_public = models.BooleanField(
|
||||
default=True,
|
||||
db_index=True,
|
||||
help_text="Whether this list is publicly visible"
|
||||
)
|
||||
|
||||
class Meta:
|
||||
db_table = 'user_top_lists'
|
||||
ordering = ['-created']
|
||||
indexes = [
|
||||
models.Index(fields=['user', 'list_type']),
|
||||
models.Index(fields=['is_public', 'created']),
|
||||
]
|
||||
|
||||
def __str__(self):
|
||||
return f"{self.user.username} - {self.title}"
|
||||
|
||||
@property
|
||||
def item_count(self):
|
||||
"""Get the number of items in this list"""
|
||||
return self.items.count()
|
||||
|
||||
|
||||
class UserTopListItem(BaseModel):
|
||||
"""
|
||||
Individual items in a user's top list with position and notes.
|
||||
"""
|
||||
|
||||
top_list = models.ForeignKey(
|
||||
UserTopList,
|
||||
on_delete=models.CASCADE,
|
||||
related_name='items'
|
||||
)
|
||||
|
||||
# Generic relation to park or ride
|
||||
from django.contrib.contenttypes.fields import GenericForeignKey
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
|
||||
content_type = models.ForeignKey(
|
||||
ContentType,
|
||||
on_delete=models.CASCADE,
|
||||
limit_choices_to={'model__in': ('park', 'ride')}
|
||||
)
|
||||
object_id = models.PositiveIntegerField()
|
||||
content_object = GenericForeignKey('content_type', 'object_id')
|
||||
|
||||
# Position in list (1 = top)
|
||||
position = models.PositiveIntegerField(
|
||||
help_text="Position in the list (1 = top)"
|
||||
)
|
||||
|
||||
# Optional notes about this specific item
|
||||
notes = models.TextField(
|
||||
blank=True,
|
||||
help_text="User notes about why this item is ranked here"
|
||||
)
|
||||
|
||||
class Meta:
|
||||
db_table = 'user_top_list_items'
|
||||
ordering = ['position']
|
||||
unique_together = [['top_list', 'position']]
|
||||
indexes = [
|
||||
models.Index(fields=['top_list', 'position']),
|
||||
models.Index(fields=['content_type', 'object_id']),
|
||||
]
|
||||
|
||||
def __str__(self):
|
||||
entity_name = str(self.content_object) if self.content_object else f"ID {self.object_id}"
|
||||
return f"#{self.position}: {entity_name}"
|
||||
310
django-backend/apps/users/permissions.py
Normal file
310
django-backend/apps/users/permissions.py
Normal file
@@ -0,0 +1,310 @@
|
||||
"""
|
||||
Permission utilities and decorators for API endpoints.
|
||||
|
||||
Provides:
|
||||
- Permission checking decorators
|
||||
- Role-based access control
|
||||
- Object-level permissions
|
||||
"""
|
||||
|
||||
from functools import wraps
|
||||
from typing import Optional, Callable
|
||||
from django.http import HttpRequest
|
||||
from ninja import Router
|
||||
from ninja.security import HttpBearer
|
||||
from rest_framework_simplejwt.tokens import AccessToken
|
||||
from rest_framework_simplejwt.exceptions import TokenError
|
||||
from django.core.exceptions import PermissionDenied
|
||||
import logging
|
||||
|
||||
from .models import User, UserRole
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class JWTAuth(HttpBearer):
|
||||
"""JWT authentication for django-ninja"""
|
||||
|
||||
def authenticate(self, request: HttpRequest, token: str) -> Optional[User]:
|
||||
"""
|
||||
Authenticate user from JWT token.
|
||||
|
||||
Args:
|
||||
request: HTTP request
|
||||
token: JWT access token
|
||||
|
||||
Returns:
|
||||
User instance if valid, None otherwise
|
||||
"""
|
||||
try:
|
||||
# Decode token
|
||||
access_token = AccessToken(token)
|
||||
user_id = access_token['user_id']
|
||||
|
||||
# Get user
|
||||
user = User.objects.get(id=user_id)
|
||||
|
||||
# Check if banned
|
||||
if user.banned:
|
||||
logger.warning(f"Banned user attempted API access: {user.email}")
|
||||
return None
|
||||
|
||||
return user
|
||||
|
||||
except TokenError as e:
|
||||
logger.debug(f"Invalid token: {e}")
|
||||
return None
|
||||
except User.DoesNotExist:
|
||||
logger.warning(f"Token for non-existent user: {user_id}")
|
||||
return None
|
||||
except Exception as e:
|
||||
logger.error(f"Authentication error: {e}")
|
||||
return None
|
||||
|
||||
|
||||
# Global JWT auth instance
|
||||
jwt_auth = JWTAuth()
|
||||
|
||||
|
||||
def require_auth(func: Callable) -> Callable:
|
||||
"""
|
||||
Decorator to require authentication.
|
||||
|
||||
Usage:
|
||||
@api.get("/protected")
|
||||
@require_auth
|
||||
def protected_endpoint(request):
|
||||
return {"user": request.auth.email}
|
||||
"""
|
||||
@wraps(func)
|
||||
def wrapper(request: HttpRequest, *args, **kwargs):
|
||||
if not request.auth or not isinstance(request.auth, User):
|
||||
raise PermissionDenied("Authentication required")
|
||||
return func(request, *args, **kwargs)
|
||||
return wrapper
|
||||
|
||||
|
||||
def require_role(role: str) -> Callable:
|
||||
"""
|
||||
Decorator to require specific role.
|
||||
|
||||
Args:
|
||||
role: Required role (user, moderator, admin)
|
||||
|
||||
Usage:
|
||||
@api.post("/moderate")
|
||||
@require_role("moderator")
|
||||
def moderate_endpoint(request):
|
||||
return {"message": "Access granted"}
|
||||
"""
|
||||
def decorator(func: Callable) -> Callable:
|
||||
@wraps(func)
|
||||
def wrapper(request: HttpRequest, *args, **kwargs):
|
||||
if not request.auth or not isinstance(request.auth, User):
|
||||
raise PermissionDenied("Authentication required")
|
||||
|
||||
user = request.auth
|
||||
|
||||
try:
|
||||
user_role = user.role
|
||||
|
||||
# Admin has access to everything
|
||||
if user_role.is_admin:
|
||||
return func(request, *args, **kwargs)
|
||||
|
||||
# Check specific role
|
||||
if role == 'moderator' and user_role.is_moderator:
|
||||
return func(request, *args, **kwargs)
|
||||
elif role == 'user':
|
||||
return func(request, *args, **kwargs)
|
||||
|
||||
raise PermissionDenied(f"Role '{role}' required")
|
||||
|
||||
except UserRole.DoesNotExist:
|
||||
raise PermissionDenied("User role not assigned")
|
||||
|
||||
return wrapper
|
||||
return decorator
|
||||
|
||||
|
||||
def require_moderator(func: Callable) -> Callable:
|
||||
"""
|
||||
Decorator to require moderator or admin role.
|
||||
|
||||
Usage:
|
||||
@api.post("/approve")
|
||||
@require_moderator
|
||||
def approve_endpoint(request):
|
||||
return {"message": "Access granted"}
|
||||
"""
|
||||
return require_role("moderator")(func)
|
||||
|
||||
|
||||
def require_admin(func: Callable) -> Callable:
|
||||
"""
|
||||
Decorator to require admin role.
|
||||
|
||||
Usage:
|
||||
@api.delete("/delete-user")
|
||||
@require_admin
|
||||
def delete_user_endpoint(request):
|
||||
return {"message": "Access granted"}
|
||||
"""
|
||||
def decorator(func: Callable) -> Callable:
|
||||
@wraps(func)
|
||||
def wrapper(request: HttpRequest, *args, **kwargs):
|
||||
if not request.auth or not isinstance(request.auth, User):
|
||||
raise PermissionDenied("Authentication required")
|
||||
|
||||
user = request.auth
|
||||
|
||||
try:
|
||||
user_role = user.role
|
||||
|
||||
if not user_role.is_admin:
|
||||
raise PermissionDenied("Admin role required")
|
||||
|
||||
return func(request, *args, **kwargs)
|
||||
|
||||
except UserRole.DoesNotExist:
|
||||
raise PermissionDenied("User role not assigned")
|
||||
|
||||
return wrapper
|
||||
return decorator
|
||||
|
||||
|
||||
def is_owner_or_moderator(user: User, obj_user_id) -> bool:
|
||||
"""
|
||||
Check if user is the owner of an object or a moderator.
|
||||
|
||||
Args:
|
||||
user: User to check
|
||||
obj_user_id: User ID of the object owner
|
||||
|
||||
Returns:
|
||||
True if user is owner or moderator
|
||||
"""
|
||||
if str(user.id) == str(obj_user_id):
|
||||
return True
|
||||
|
||||
try:
|
||||
return user.role.is_moderator
|
||||
except UserRole.DoesNotExist:
|
||||
return False
|
||||
|
||||
|
||||
def can_moderate(user: User) -> bool:
|
||||
"""
|
||||
Check if user can moderate content.
|
||||
|
||||
Args:
|
||||
user: User to check
|
||||
|
||||
Returns:
|
||||
True if user is moderator or admin
|
||||
"""
|
||||
if user.banned:
|
||||
return False
|
||||
|
||||
try:
|
||||
return user.role.is_moderator
|
||||
except UserRole.DoesNotExist:
|
||||
return False
|
||||
|
||||
|
||||
def can_submit(user: User) -> bool:
|
||||
"""
|
||||
Check if user can submit content.
|
||||
|
||||
Args:
|
||||
user: User to check
|
||||
|
||||
Returns:
|
||||
True if user is not banned
|
||||
"""
|
||||
return not user.banned
|
||||
|
||||
|
||||
class PermissionChecker:
|
||||
"""Helper class for checking permissions"""
|
||||
|
||||
def __init__(self, user: User):
|
||||
self.user = user
|
||||
try:
|
||||
self.user_role = user.role
|
||||
except UserRole.DoesNotExist:
|
||||
self.user_role = None
|
||||
|
||||
@property
|
||||
def is_authenticated(self) -> bool:
|
||||
"""Check if user is authenticated"""
|
||||
return self.user is not None
|
||||
|
||||
@property
|
||||
def is_moderator(self) -> bool:
|
||||
"""Check if user is moderator or admin"""
|
||||
if self.user.banned:
|
||||
return False
|
||||
return self.user_role and self.user_role.is_moderator
|
||||
|
||||
@property
|
||||
def is_admin(self) -> bool:
|
||||
"""Check if user is admin"""
|
||||
if self.user.banned:
|
||||
return False
|
||||
return self.user_role and self.user_role.is_admin
|
||||
|
||||
@property
|
||||
def can_submit(self) -> bool:
|
||||
"""Check if user can submit content"""
|
||||
return not self.user.banned
|
||||
|
||||
@property
|
||||
def can_moderate(self) -> bool:
|
||||
"""Check if user can moderate content"""
|
||||
return self.is_moderator
|
||||
|
||||
def can_edit(self, obj_user_id) -> bool:
|
||||
"""Check if user can edit an object"""
|
||||
if self.user.banned:
|
||||
return False
|
||||
return str(self.user.id) == str(obj_user_id) or self.is_moderator
|
||||
|
||||
def can_delete(self, obj_user_id) -> bool:
|
||||
"""Check if user can delete an object"""
|
||||
if self.user.banned:
|
||||
return False
|
||||
return str(self.user.id) == str(obj_user_id) or self.is_admin
|
||||
|
||||
def require_permission(self, permission: str) -> None:
|
||||
"""
|
||||
Raise PermissionDenied if user doesn't have permission.
|
||||
|
||||
Args:
|
||||
permission: Permission to check (submit, moderate, admin)
|
||||
|
||||
Raises:
|
||||
PermissionDenied: If user doesn't have permission
|
||||
"""
|
||||
if permission == 'submit' and not self.can_submit:
|
||||
raise PermissionDenied("You are banned from submitting content")
|
||||
elif permission == 'moderate' and not self.can_moderate:
|
||||
raise PermissionDenied("Moderator role required")
|
||||
elif permission == 'admin' and not self.is_admin:
|
||||
raise PermissionDenied("Admin role required")
|
||||
|
||||
|
||||
def get_permission_checker(request: HttpRequest) -> Optional[PermissionChecker]:
|
||||
"""
|
||||
Get permission checker for request user.
|
||||
|
||||
Args:
|
||||
request: HTTP request
|
||||
|
||||
Returns:
|
||||
PermissionChecker instance or None if not authenticated
|
||||
"""
|
||||
if not request.auth or not isinstance(request.auth, User):
|
||||
return None
|
||||
|
||||
return PermissionChecker(request.auth)
|
||||
592
django-backend/apps/users/services.py
Normal file
592
django-backend/apps/users/services.py
Normal file
@@ -0,0 +1,592 @@
|
||||
"""
|
||||
User authentication and management services.
|
||||
|
||||
Provides business logic for:
|
||||
- User registration and authentication
|
||||
- OAuth integration
|
||||
- MFA/2FA management
|
||||
- Permission and role management
|
||||
"""
|
||||
|
||||
from typing import Optional, Dict, Any
|
||||
from django.contrib.auth import authenticate
|
||||
from django.contrib.auth.password_validation import validate_password
|
||||
from django.core.exceptions import ValidationError
|
||||
from django.db import transaction
|
||||
from django.utils import timezone
|
||||
from django_otp.plugins.otp_totp.models import TOTPDevice
|
||||
from allauth.socialaccount.models import SocialAccount
|
||||
import logging
|
||||
|
||||
from .models import User, UserRole, UserProfile
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class AuthenticationService:
|
||||
"""Service for handling user authentication operations"""
|
||||
|
||||
@staticmethod
|
||||
@transaction.atomic
|
||||
def register_user(
|
||||
email: str,
|
||||
password: str,
|
||||
username: Optional[str] = None,
|
||||
first_name: str = '',
|
||||
last_name: str = ''
|
||||
) -> User:
|
||||
"""
|
||||
Register a new user with email and password.
|
||||
|
||||
Args:
|
||||
email: User's email address
|
||||
password: User's password (will be validated and hashed)
|
||||
username: Optional username (defaults to email prefix)
|
||||
first_name: User's first name
|
||||
last_name: User's last name
|
||||
|
||||
Returns:
|
||||
Created User instance
|
||||
|
||||
Raises:
|
||||
ValidationError: If email exists or password is invalid
|
||||
"""
|
||||
# Normalize email
|
||||
email = email.lower().strip()
|
||||
|
||||
# Check if user exists
|
||||
if User.objects.filter(email=email).exists():
|
||||
raise ValidationError({'email': 'A user with this email already exists.'})
|
||||
|
||||
# Set username if not provided
|
||||
if not username:
|
||||
username = email.split('@')[0]
|
||||
# Make unique if needed
|
||||
base_username = username
|
||||
counter = 1
|
||||
while User.objects.filter(username=username).exists():
|
||||
username = f"{base_username}{counter}"
|
||||
counter += 1
|
||||
|
||||
# Validate password
|
||||
try:
|
||||
validate_password(password)
|
||||
except ValidationError as e:
|
||||
raise ValidationError({'password': e.messages})
|
||||
|
||||
# Create user
|
||||
user = User.objects.create_user(
|
||||
email=email,
|
||||
username=username,
|
||||
password=password,
|
||||
first_name=first_name,
|
||||
last_name=last_name
|
||||
)
|
||||
|
||||
# Create role (default: user)
|
||||
UserRole.objects.create(user=user, role='user')
|
||||
|
||||
# Create profile
|
||||
UserProfile.objects.create(user=user)
|
||||
|
||||
logger.info(f"New user registered: {user.email}")
|
||||
return user
|
||||
|
||||
@staticmethod
|
||||
def authenticate_user(email: str, password: str) -> Optional[User]:
|
||||
"""
|
||||
Authenticate user with email and password.
|
||||
|
||||
Args:
|
||||
email: User's email address
|
||||
password: User's password
|
||||
|
||||
Returns:
|
||||
User instance if authentication successful, None otherwise
|
||||
"""
|
||||
email = email.lower().strip()
|
||||
user = authenticate(username=email, password=password)
|
||||
|
||||
if user and user.banned:
|
||||
logger.warning(f"Banned user attempted login: {email}")
|
||||
raise ValidationError("This account has been banned.")
|
||||
|
||||
if user:
|
||||
user.last_login = timezone.now()
|
||||
user.save(update_fields=['last_login'])
|
||||
logger.info(f"User authenticated: {email}")
|
||||
|
||||
return user
|
||||
|
||||
@staticmethod
|
||||
@transaction.atomic
|
||||
def create_oauth_user(
|
||||
email: str,
|
||||
provider: str,
|
||||
oauth_sub: str,
|
||||
username: Optional[str] = None,
|
||||
first_name: str = '',
|
||||
last_name: str = '',
|
||||
avatar_url: str = ''
|
||||
) -> User:
|
||||
"""
|
||||
Create or get user from OAuth provider.
|
||||
|
||||
Args:
|
||||
email: User's email from OAuth provider
|
||||
provider: OAuth provider name (google, discord)
|
||||
oauth_sub: OAuth subject identifier
|
||||
username: Optional username
|
||||
first_name: User's first name
|
||||
last_name: User's last name
|
||||
avatar_url: URL to user's avatar
|
||||
|
||||
Returns:
|
||||
User instance
|
||||
"""
|
||||
email = email.lower().strip()
|
||||
|
||||
# Check if user exists with this email
|
||||
try:
|
||||
user = User.objects.get(email=email)
|
||||
# Update OAuth info if not set
|
||||
if not user.oauth_provider:
|
||||
user.oauth_provider = provider
|
||||
user.oauth_sub = oauth_sub
|
||||
user.save(update_fields=['oauth_provider', 'oauth_sub'])
|
||||
return user
|
||||
except User.DoesNotExist:
|
||||
pass
|
||||
|
||||
# Create new user
|
||||
if not username:
|
||||
username = email.split('@')[0]
|
||||
base_username = username
|
||||
counter = 1
|
||||
while User.objects.filter(username=username).exists():
|
||||
username = f"{base_username}{counter}"
|
||||
counter += 1
|
||||
|
||||
user = User.objects.create(
|
||||
email=email,
|
||||
username=username,
|
||||
first_name=first_name,
|
||||
last_name=last_name,
|
||||
avatar_url=avatar_url,
|
||||
oauth_provider=provider,
|
||||
oauth_sub=oauth_sub
|
||||
)
|
||||
|
||||
# No password needed for OAuth users
|
||||
user.set_unusable_password()
|
||||
user.save()
|
||||
|
||||
# Create role and profile
|
||||
UserRole.objects.create(user=user, role='user')
|
||||
UserProfile.objects.create(user=user)
|
||||
|
||||
logger.info(f"OAuth user created: {email} via {provider}")
|
||||
return user
|
||||
|
||||
@staticmethod
|
||||
def change_password(user: User, old_password: str, new_password: str) -> bool:
|
||||
"""
|
||||
Change user's password.
|
||||
|
||||
Args:
|
||||
user: User instance
|
||||
old_password: Current password
|
||||
new_password: New password
|
||||
|
||||
Returns:
|
||||
True if successful
|
||||
|
||||
Raises:
|
||||
ValidationError: If old password is incorrect or new password is invalid
|
||||
"""
|
||||
# Check old password
|
||||
if not user.check_password(old_password):
|
||||
raise ValidationError({'old_password': 'Incorrect password.'})
|
||||
|
||||
# Validate new password
|
||||
try:
|
||||
validate_password(new_password, user=user)
|
||||
except ValidationError as e:
|
||||
raise ValidationError({'new_password': e.messages})
|
||||
|
||||
# Set new password
|
||||
user.set_password(new_password)
|
||||
user.save()
|
||||
|
||||
logger.info(f"Password changed for user: {user.email}")
|
||||
return True
|
||||
|
||||
@staticmethod
|
||||
def reset_password(user: User, new_password: str) -> bool:
|
||||
"""
|
||||
Reset user's password (admin/forgot password flow).
|
||||
|
||||
Args:
|
||||
user: User instance
|
||||
new_password: New password
|
||||
|
||||
Returns:
|
||||
True if successful
|
||||
|
||||
Raises:
|
||||
ValidationError: If new password is invalid
|
||||
"""
|
||||
# Validate new password
|
||||
try:
|
||||
validate_password(new_password, user=user)
|
||||
except ValidationError as e:
|
||||
raise ValidationError({'password': e.messages})
|
||||
|
||||
# Set new password
|
||||
user.set_password(new_password)
|
||||
user.save()
|
||||
|
||||
logger.info(f"Password reset for user: {user.email}")
|
||||
return True
|
||||
|
||||
|
||||
class MFAService:
|
||||
"""Service for handling multi-factor authentication"""
|
||||
|
||||
@staticmethod
|
||||
def enable_totp(user: User, device_name: str = 'default') -> TOTPDevice:
|
||||
"""
|
||||
Enable TOTP-based MFA for user.
|
||||
|
||||
Args:
|
||||
user: User instance
|
||||
device_name: Name for the TOTP device
|
||||
|
||||
Returns:
|
||||
TOTPDevice instance with QR code data
|
||||
"""
|
||||
# Check if device already exists
|
||||
device = TOTPDevice.objects.filter(
|
||||
user=user,
|
||||
name=device_name
|
||||
).first()
|
||||
|
||||
if not device:
|
||||
device = TOTPDevice.objects.create(
|
||||
user=user,
|
||||
name=device_name,
|
||||
confirmed=False
|
||||
)
|
||||
|
||||
return device
|
||||
|
||||
@staticmethod
|
||||
@transaction.atomic
|
||||
def confirm_totp(user: User, token: str, device_name: str = 'default') -> bool:
|
||||
"""
|
||||
Confirm TOTP device with verification token.
|
||||
|
||||
Args:
|
||||
user: User instance
|
||||
token: 6-digit TOTP token
|
||||
device_name: Name of the TOTP device
|
||||
|
||||
Returns:
|
||||
True if successful
|
||||
|
||||
Raises:
|
||||
ValidationError: If token is invalid
|
||||
"""
|
||||
device = TOTPDevice.objects.filter(
|
||||
user=user,
|
||||
name=device_name
|
||||
).first()
|
||||
|
||||
if not device:
|
||||
raise ValidationError("TOTP device not found.")
|
||||
|
||||
# Verify token
|
||||
if not device.verify_token(token):
|
||||
raise ValidationError("Invalid verification code.")
|
||||
|
||||
# Confirm device
|
||||
device.confirmed = True
|
||||
device.save()
|
||||
|
||||
# Enable MFA on user
|
||||
user.mfa_enabled = True
|
||||
user.save(update_fields=['mfa_enabled'])
|
||||
|
||||
logger.info(f"MFA enabled for user: {user.email}")
|
||||
return True
|
||||
|
||||
@staticmethod
|
||||
def verify_totp(user: User, token: str) -> bool:
|
||||
"""
|
||||
Verify TOTP token for authentication.
|
||||
|
||||
Args:
|
||||
user: User instance
|
||||
token: 6-digit TOTP token
|
||||
|
||||
Returns:
|
||||
True if valid
|
||||
"""
|
||||
device = TOTPDevice.objects.filter(
|
||||
user=user,
|
||||
confirmed=True
|
||||
).first()
|
||||
|
||||
if not device:
|
||||
return False
|
||||
|
||||
return device.verify_token(token)
|
||||
|
||||
@staticmethod
|
||||
@transaction.atomic
|
||||
def disable_totp(user: User) -> bool:
|
||||
"""
|
||||
Disable TOTP-based MFA for user.
|
||||
|
||||
Args:
|
||||
user: User instance
|
||||
|
||||
Returns:
|
||||
True if successful
|
||||
"""
|
||||
# Delete all TOTP devices
|
||||
TOTPDevice.objects.filter(user=user).delete()
|
||||
|
||||
# Disable MFA on user
|
||||
user.mfa_enabled = False
|
||||
user.save(update_fields=['mfa_enabled'])
|
||||
|
||||
logger.info(f"MFA disabled for user: {user.email}")
|
||||
return True
|
||||
|
||||
|
||||
class RoleService:
|
||||
"""Service for managing user roles and permissions"""
|
||||
|
||||
@staticmethod
|
||||
@transaction.atomic
|
||||
def assign_role(
|
||||
user: User,
|
||||
role: str,
|
||||
granted_by: Optional[User] = None
|
||||
) -> UserRole:
|
||||
"""
|
||||
Assign role to user.
|
||||
|
||||
Args:
|
||||
user: User to assign role to
|
||||
role: Role name (user, moderator, admin)
|
||||
granted_by: User granting the role
|
||||
|
||||
Returns:
|
||||
UserRole instance
|
||||
|
||||
Raises:
|
||||
ValidationError: If role is invalid
|
||||
"""
|
||||
valid_roles = ['user', 'moderator', 'admin']
|
||||
if role not in valid_roles:
|
||||
raise ValidationError(f"Invalid role. Must be one of: {', '.join(valid_roles)}")
|
||||
|
||||
# Get or create role
|
||||
user_role, created = UserRole.objects.get_or_create(
|
||||
user=user,
|
||||
defaults={'role': role, 'granted_by': granted_by}
|
||||
)
|
||||
|
||||
if not created and user_role.role != role:
|
||||
user_role.role = role
|
||||
user_role.granted_by = granted_by
|
||||
user_role.granted_at = timezone.now()
|
||||
user_role.save()
|
||||
|
||||
logger.info(f"Role '{role}' assigned to user: {user.email}")
|
||||
return user_role
|
||||
|
||||
@staticmethod
|
||||
def has_role(user: User, role: str) -> bool:
|
||||
"""
|
||||
Check if user has specific role.
|
||||
|
||||
Args:
|
||||
user: User instance
|
||||
role: Role name to check
|
||||
|
||||
Returns:
|
||||
True if user has the role
|
||||
"""
|
||||
try:
|
||||
user_role = user.role
|
||||
if role == 'moderator':
|
||||
return user_role.is_moderator
|
||||
elif role == 'admin':
|
||||
return user_role.is_admin
|
||||
return user_role.role == role
|
||||
except UserRole.DoesNotExist:
|
||||
return False
|
||||
|
||||
@staticmethod
|
||||
def get_user_permissions(user: User) -> Dict[str, bool]:
|
||||
"""
|
||||
Get user's permission summary.
|
||||
|
||||
Args:
|
||||
user: User instance
|
||||
|
||||
Returns:
|
||||
Dictionary of permissions
|
||||
"""
|
||||
try:
|
||||
user_role = user.role
|
||||
is_moderator = user_role.is_moderator
|
||||
is_admin = user_role.is_admin
|
||||
except UserRole.DoesNotExist:
|
||||
is_moderator = False
|
||||
is_admin = False
|
||||
|
||||
return {
|
||||
'can_submit': not user.banned,
|
||||
'can_moderate': is_moderator and not user.banned,
|
||||
'can_admin': is_admin and not user.banned,
|
||||
'can_edit_own': not user.banned,
|
||||
'can_delete_own': not user.banned,
|
||||
}
|
||||
|
||||
|
||||
class UserManagementService:
|
||||
"""Service for user profile and account management"""
|
||||
|
||||
@staticmethod
|
||||
@transaction.atomic
|
||||
def update_profile(
|
||||
user: User,
|
||||
**kwargs
|
||||
) -> User:
|
||||
"""
|
||||
Update user profile information.
|
||||
|
||||
Args:
|
||||
user: User instance
|
||||
**kwargs: Fields to update
|
||||
|
||||
Returns:
|
||||
Updated User instance
|
||||
"""
|
||||
allowed_fields = [
|
||||
'first_name', 'last_name', 'username',
|
||||
'avatar_url', 'bio'
|
||||
]
|
||||
|
||||
updated_fields = []
|
||||
for field, value in kwargs.items():
|
||||
if field in allowed_fields and hasattr(user, field):
|
||||
setattr(user, field, value)
|
||||
updated_fields.append(field)
|
||||
|
||||
if updated_fields:
|
||||
user.save(update_fields=updated_fields)
|
||||
logger.info(f"Profile updated for user: {user.email}")
|
||||
|
||||
return user
|
||||
|
||||
@staticmethod
|
||||
@transaction.atomic
|
||||
def update_preferences(
|
||||
user: User,
|
||||
**kwargs
|
||||
) -> UserProfile:
|
||||
"""
|
||||
Update user preferences.
|
||||
|
||||
Args:
|
||||
user: User instance
|
||||
**kwargs: Preference fields to update
|
||||
|
||||
Returns:
|
||||
Updated UserProfile instance
|
||||
"""
|
||||
profile = user.profile
|
||||
|
||||
allowed_fields = [
|
||||
'email_notifications',
|
||||
'email_on_submission_approved',
|
||||
'email_on_submission_rejected',
|
||||
'profile_public',
|
||||
'show_email'
|
||||
]
|
||||
|
||||
updated_fields = []
|
||||
for field, value in kwargs.items():
|
||||
if field in allowed_fields and hasattr(profile, field):
|
||||
setattr(profile, field, value)
|
||||
updated_fields.append(field)
|
||||
|
||||
if updated_fields:
|
||||
profile.save(update_fields=updated_fields)
|
||||
logger.info(f"Preferences updated for user: {user.email}")
|
||||
|
||||
return profile
|
||||
|
||||
@staticmethod
|
||||
@transaction.atomic
|
||||
def ban_user(
|
||||
user: User,
|
||||
reason: str,
|
||||
banned_by: User
|
||||
) -> User:
|
||||
"""
|
||||
Ban a user.
|
||||
|
||||
Args:
|
||||
user: User to ban
|
||||
reason: Reason for ban
|
||||
banned_by: User performing the ban
|
||||
|
||||
Returns:
|
||||
Updated User instance
|
||||
"""
|
||||
user.ban(reason=reason, banned_by=banned_by)
|
||||
logger.warning(f"User banned: {user.email} by {banned_by.email}. Reason: {reason}")
|
||||
return user
|
||||
|
||||
@staticmethod
|
||||
@transaction.atomic
|
||||
def unban_user(user: User) -> User:
|
||||
"""
|
||||
Unban a user.
|
||||
|
||||
Args:
|
||||
user: User to unban
|
||||
|
||||
Returns:
|
||||
Updated User instance
|
||||
"""
|
||||
user.unban()
|
||||
logger.info(f"User unbanned: {user.email}")
|
||||
return user
|
||||
|
||||
@staticmethod
|
||||
def get_user_stats(user: User) -> Dict[str, Any]:
|
||||
"""
|
||||
Get user statistics.
|
||||
|
||||
Args:
|
||||
user: User instance
|
||||
|
||||
Returns:
|
||||
Dictionary of user stats
|
||||
"""
|
||||
profile = user.profile
|
||||
|
||||
return {
|
||||
'total_submissions': profile.total_submissions,
|
||||
'approved_submissions': profile.approved_submissions,
|
||||
'reputation_score': user.reputation_score,
|
||||
'member_since': user.date_joined,
|
||||
'last_active': user.last_login,
|
||||
}
|
||||
343
django-backend/apps/users/tasks.py
Normal file
343
django-backend/apps/users/tasks.py
Normal file
@@ -0,0 +1,343 @@
|
||||
"""
|
||||
Background tasks for user management and notifications.
|
||||
"""
|
||||
|
||||
import logging
|
||||
from celery import shared_task
|
||||
from django.core.mail import send_mail
|
||||
from django.template.loader import render_to_string
|
||||
from django.conf import settings
|
||||
from django.utils import timezone
|
||||
from datetime import timedelta
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@shared_task(bind=True, max_retries=3, default_retry_delay=60)
|
||||
def send_welcome_email(self, user_id):
|
||||
"""
|
||||
Send a welcome email to a newly registered user.
|
||||
|
||||
Args:
|
||||
user_id: ID of the User
|
||||
|
||||
Returns:
|
||||
str: Email send result
|
||||
"""
|
||||
from apps.users.models import User
|
||||
|
||||
try:
|
||||
user = User.objects.get(id=user_id)
|
||||
|
||||
context = {
|
||||
'user': user,
|
||||
'site_url': getattr(settings, 'SITE_URL', 'https://thrillwiki.com'),
|
||||
}
|
||||
|
||||
html_message = render_to_string('emails/welcome.html', context)
|
||||
|
||||
send_mail(
|
||||
subject='Welcome to ThrillWiki! 🎢',
|
||||
message='',
|
||||
html_message=html_message,
|
||||
from_email=settings.DEFAULT_FROM_EMAIL,
|
||||
recipient_list=[user.email],
|
||||
fail_silently=False,
|
||||
)
|
||||
|
||||
logger.info(f"Welcome email sent to {user.email}")
|
||||
return f"Welcome email sent to {user.email}"
|
||||
|
||||
except User.DoesNotExist:
|
||||
logger.error(f"User {user_id} not found")
|
||||
raise
|
||||
except Exception as exc:
|
||||
logger.error(f"Error sending welcome email to user {user_id}: {str(exc)}")
|
||||
raise self.retry(exc=exc, countdown=60 * (2 ** self.request.retries))
|
||||
|
||||
|
||||
@shared_task(bind=True, max_retries=3, default_retry_delay=60)
|
||||
def send_password_reset_email(self, user_id, token, reset_url):
|
||||
"""
|
||||
Send a password reset email with a secure token.
|
||||
|
||||
Args:
|
||||
user_id: ID of the User
|
||||
token: Password reset token
|
||||
reset_url: Full URL for password reset
|
||||
|
||||
Returns:
|
||||
str: Email send result
|
||||
"""
|
||||
from apps.users.models import User
|
||||
|
||||
try:
|
||||
user = User.objects.get(id=user_id)
|
||||
|
||||
context = {
|
||||
'user': user,
|
||||
'reset_url': reset_url,
|
||||
'request_time': timezone.now(),
|
||||
'expiry_hours': 24, # Configurable
|
||||
'site_url': getattr(settings, 'SITE_URL', 'https://thrillwiki.com'),
|
||||
}
|
||||
|
||||
html_message = render_to_string('emails/password_reset.html', context)
|
||||
|
||||
send_mail(
|
||||
subject='Reset Your ThrillWiki Password',
|
||||
message='',
|
||||
html_message=html_message,
|
||||
from_email=settings.DEFAULT_FROM_EMAIL,
|
||||
recipient_list=[user.email],
|
||||
fail_silently=False,
|
||||
)
|
||||
|
||||
logger.info(f"Password reset email sent to {user.email}")
|
||||
return f"Password reset email sent to {user.email}"
|
||||
|
||||
except User.DoesNotExist:
|
||||
logger.error(f"User {user_id} not found")
|
||||
raise
|
||||
except Exception as exc:
|
||||
logger.error(f"Error sending password reset email: {str(exc)}")
|
||||
raise self.retry(exc=exc, countdown=60 * (2 ** self.request.retries))
|
||||
|
||||
|
||||
@shared_task(bind=True, max_retries=2)
|
||||
def cleanup_expired_tokens(self):
|
||||
"""
|
||||
Clean up expired JWT tokens and password reset tokens.
|
||||
|
||||
This task runs daily to remove old tokens from the database.
|
||||
|
||||
Returns:
|
||||
dict: Cleanup statistics
|
||||
"""
|
||||
from rest_framework_simplejwt.token_blacklist.models import OutstandingToken
|
||||
from django.contrib.auth.tokens import default_token_generator
|
||||
|
||||
try:
|
||||
# Clean up blacklisted JWT tokens older than 7 days
|
||||
cutoff = timezone.now() - timedelta(days=7)
|
||||
|
||||
# Note: Actual implementation depends on token storage strategy
|
||||
# This is a placeholder for the concept
|
||||
|
||||
logger.info("Token cleanup completed")
|
||||
|
||||
return {
|
||||
'jwt_tokens_cleaned': 0,
|
||||
'reset_tokens_cleaned': 0,
|
||||
}
|
||||
|
||||
except Exception as exc:
|
||||
logger.error(f"Error cleaning up tokens: {str(exc)}")
|
||||
raise self.retry(exc=exc, countdown=300)
|
||||
|
||||
|
||||
@shared_task(bind=True, max_retries=3)
|
||||
def send_account_notification(self, user_id, notification_type, context_data=None):
|
||||
"""
|
||||
Send a generic account notification email.
|
||||
|
||||
Args:
|
||||
user_id: ID of the User
|
||||
notification_type: Type of notification (e.g., 'security_alert', 'profile_update')
|
||||
context_data: Additional context data for the email
|
||||
|
||||
Returns:
|
||||
str: Email send result
|
||||
"""
|
||||
from apps.users.models import User
|
||||
|
||||
try:
|
||||
user = User.objects.get(id=user_id)
|
||||
|
||||
context = {
|
||||
'user': user,
|
||||
'notification_type': notification_type,
|
||||
'site_url': getattr(settings, 'SITE_URL', 'https://thrillwiki.com'),
|
||||
}
|
||||
|
||||
if context_data:
|
||||
context.update(context_data)
|
||||
|
||||
# For now, just log (would need specific templates for each type)
|
||||
logger.info(f"Account notification ({notification_type}) for user {user.email}")
|
||||
|
||||
return f"Notification sent to {user.email}"
|
||||
|
||||
except User.DoesNotExist:
|
||||
logger.error(f"User {user_id} not found")
|
||||
raise
|
||||
except Exception as exc:
|
||||
logger.error(f"Error sending account notification: {str(exc)}")
|
||||
raise self.retry(exc=exc, countdown=60 * (2 ** self.request.retries))
|
||||
|
||||
|
||||
@shared_task(bind=True, max_retries=2)
|
||||
def cleanup_inactive_users(self, days_inactive=365):
|
||||
"""
|
||||
Clean up or flag users who haven't logged in for a long time.
|
||||
|
||||
Args:
|
||||
days_inactive: Number of days of inactivity before flagging (default: 365)
|
||||
|
||||
Returns:
|
||||
dict: Cleanup statistics
|
||||
"""
|
||||
from apps.users.models import User
|
||||
|
||||
try:
|
||||
cutoff = timezone.now() - timedelta(days=days_inactive)
|
||||
|
||||
inactive_users = User.objects.filter(
|
||||
last_login__lt=cutoff,
|
||||
is_active=True
|
||||
)
|
||||
|
||||
count = inactive_users.count()
|
||||
|
||||
# For now, just log inactive users
|
||||
# In production, you might want to send reactivation emails
|
||||
# or mark accounts for deletion
|
||||
|
||||
logger.info(f"Found {count} inactive users (last login before {cutoff})")
|
||||
|
||||
return {
|
||||
'inactive_count': count,
|
||||
'cutoff_date': cutoff.isoformat(),
|
||||
}
|
||||
|
||||
except Exception as exc:
|
||||
logger.error(f"Error cleaning up inactive users: {str(exc)}")
|
||||
raise self.retry(exc=exc, countdown=300)
|
||||
|
||||
|
||||
@shared_task
|
||||
def update_user_statistics():
|
||||
"""
|
||||
Update user-related statistics across the database.
|
||||
|
||||
Returns:
|
||||
dict: Updated statistics
|
||||
"""
|
||||
from apps.users.models import User
|
||||
from django.db.models import Count
|
||||
from datetime import timedelta
|
||||
|
||||
try:
|
||||
now = timezone.now()
|
||||
week_ago = now - timedelta(days=7)
|
||||
month_ago = now - timedelta(days=30)
|
||||
|
||||
stats = {
|
||||
'total_users': User.objects.count(),
|
||||
'active_users': User.objects.filter(is_active=True).count(),
|
||||
'new_this_week': User.objects.filter(date_joined__gte=week_ago).count(),
|
||||
'new_this_month': User.objects.filter(date_joined__gte=month_ago).count(),
|
||||
'verified_users': User.objects.filter(email_verified=True).count(),
|
||||
'by_role': dict(
|
||||
User.objects.values('role__name')
|
||||
.annotate(count=Count('id'))
|
||||
.values_list('role__name', 'count')
|
||||
),
|
||||
}
|
||||
|
||||
logger.info(f"User statistics updated: {stats}")
|
||||
return stats
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error updating user statistics: {str(e)}")
|
||||
raise
|
||||
|
||||
|
||||
@shared_task(bind=True, max_retries=3)
|
||||
def send_bulk_notification(self, user_ids, subject, message, html_message=None):
|
||||
"""
|
||||
Send bulk email notifications to multiple users.
|
||||
|
||||
This is useful for announcements, feature updates, etc.
|
||||
|
||||
Args:
|
||||
user_ids: List of User IDs
|
||||
subject: Email subject
|
||||
message: Plain text message
|
||||
html_message: HTML version of message (optional)
|
||||
|
||||
Returns:
|
||||
dict: Send statistics
|
||||
"""
|
||||
from apps.users.models import User
|
||||
|
||||
try:
|
||||
users = User.objects.filter(id__in=user_ids, is_active=True)
|
||||
|
||||
sent_count = 0
|
||||
failed_count = 0
|
||||
|
||||
for user in users:
|
||||
try:
|
||||
send_mail(
|
||||
subject=subject,
|
||||
message=message,
|
||||
html_message=html_message,
|
||||
from_email=settings.DEFAULT_FROM_EMAIL,
|
||||
recipient_list=[user.email],
|
||||
fail_silently=False,
|
||||
)
|
||||
sent_count += 1
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to send to {user.email}: {str(e)}")
|
||||
failed_count += 1
|
||||
continue
|
||||
|
||||
result = {
|
||||
'total': len(user_ids),
|
||||
'sent': sent_count,
|
||||
'failed': failed_count,
|
||||
}
|
||||
|
||||
logger.info(f"Bulk notification sent: {result}")
|
||||
return result
|
||||
|
||||
except Exception as exc:
|
||||
logger.error(f"Error sending bulk notification: {str(exc)}")
|
||||
raise self.retry(exc=exc, countdown=60 * (2 ** self.request.retries))
|
||||
|
||||
|
||||
@shared_task(bind=True, max_retries=2)
|
||||
def send_email_verification_reminder(self, user_id):
|
||||
"""
|
||||
Send a reminder to users who haven't verified their email.
|
||||
|
||||
Args:
|
||||
user_id: ID of the User
|
||||
|
||||
Returns:
|
||||
str: Reminder result
|
||||
"""
|
||||
from apps.users.models import User
|
||||
|
||||
try:
|
||||
user = User.objects.get(id=user_id)
|
||||
|
||||
if user.email_verified:
|
||||
logger.info(f"User {user.email} already verified, skipping reminder")
|
||||
return "User already verified"
|
||||
|
||||
# Send verification reminder
|
||||
logger.info(f"Sending email verification reminder to {user.email}")
|
||||
|
||||
# In production, generate new verification token and send email
|
||||
# For now, just log
|
||||
|
||||
return f"Verification reminder sent to {user.email}"
|
||||
|
||||
except User.DoesNotExist:
|
||||
logger.error(f"User {user_id} not found")
|
||||
raise
|
||||
except Exception as exc:
|
||||
logger.error(f"Error sending verification reminder: {str(exc)}")
|
||||
raise self.retry(exc=exc, countdown=60 * (2 ** self.request.retries))
|
||||
0
django-backend/apps/versioning/__init__.py
Normal file
0
django-backend/apps/versioning/__init__.py
Normal file
236
django-backend/apps/versioning/admin.py
Normal file
236
django-backend/apps/versioning/admin.py
Normal file
@@ -0,0 +1,236 @@
|
||||
"""
|
||||
Admin interface for versioning models.
|
||||
|
||||
Provides Django admin interface for viewing version history,
|
||||
comparing versions, and managing version records.
|
||||
"""
|
||||
|
||||
from django.contrib import admin
|
||||
from django.utils.html import format_html
|
||||
from django.urls import reverse
|
||||
from unfold.admin import ModelAdmin
|
||||
|
||||
from apps.versioning.models import EntityVersion
|
||||
|
||||
|
||||
@admin.register(EntityVersion)
|
||||
class EntityVersionAdmin(ModelAdmin):
|
||||
"""
|
||||
Admin interface for EntityVersion model.
|
||||
|
||||
Provides read-only view of version history with search and filtering.
|
||||
"""
|
||||
|
||||
# Display settings
|
||||
list_display = [
|
||||
'version_number',
|
||||
'entity_link',
|
||||
'change_type',
|
||||
'changed_by_link',
|
||||
'submission_link',
|
||||
'changed_field_count',
|
||||
'created',
|
||||
]
|
||||
|
||||
list_filter = [
|
||||
'change_type',
|
||||
'entity_type',
|
||||
'created',
|
||||
]
|
||||
|
||||
search_fields = [
|
||||
'entity_id',
|
||||
'comment',
|
||||
'changed_by__email',
|
||||
'changed_by__username',
|
||||
]
|
||||
|
||||
ordering = ['-created']
|
||||
|
||||
date_hierarchy = 'created'
|
||||
|
||||
# Read-only admin (versions should not be modified)
|
||||
readonly_fields = [
|
||||
'id',
|
||||
'entity_type',
|
||||
'entity_id',
|
||||
'entity_link',
|
||||
'version_number',
|
||||
'change_type',
|
||||
'snapshot_display',
|
||||
'changed_fields_display',
|
||||
'changed_by',
|
||||
'submission',
|
||||
'comment',
|
||||
'ip_address',
|
||||
'user_agent',
|
||||
'created',
|
||||
'modified',
|
||||
]
|
||||
|
||||
fieldsets = (
|
||||
('Version Information', {
|
||||
'fields': (
|
||||
'id',
|
||||
'version_number',
|
||||
'change_type',
|
||||
'created',
|
||||
'modified',
|
||||
)
|
||||
}),
|
||||
('Entity', {
|
||||
'fields': (
|
||||
'entity_type',
|
||||
'entity_id',
|
||||
'entity_link',
|
||||
)
|
||||
}),
|
||||
('Changes', {
|
||||
'fields': (
|
||||
'changed_fields_display',
|
||||
'snapshot_display',
|
||||
)
|
||||
}),
|
||||
('Metadata', {
|
||||
'fields': (
|
||||
'changed_by',
|
||||
'submission',
|
||||
'comment',
|
||||
'ip_address',
|
||||
'user_agent',
|
||||
)
|
||||
}),
|
||||
)
|
||||
|
||||
def has_add_permission(self, request):
|
||||
"""Disable adding versions manually."""
|
||||
return False
|
||||
|
||||
def has_delete_permission(self, request, obj=None):
|
||||
"""Disable deleting versions."""
|
||||
return False
|
||||
|
||||
def has_change_permission(self, request, obj=None):
|
||||
"""Only allow viewing versions, not editing."""
|
||||
return False
|
||||
|
||||
def entity_link(self, obj):
|
||||
"""Display link to the entity."""
|
||||
try:
|
||||
entity = obj.entity
|
||||
if entity:
|
||||
# Try to get admin URL for entity
|
||||
admin_url = reverse(
|
||||
f'admin:{obj.entity_type.app_label}_{obj.entity_type.model}_change',
|
||||
args=[entity.pk]
|
||||
)
|
||||
return format_html(
|
||||
'<a href="{}">{}</a>',
|
||||
admin_url,
|
||||
str(entity)
|
||||
)
|
||||
except:
|
||||
pass
|
||||
return f"{obj.entity_type.model}:{obj.entity_id}"
|
||||
entity_link.short_description = 'Entity'
|
||||
|
||||
def changed_by_link(self, obj):
|
||||
"""Display link to user who made the change."""
|
||||
if obj.changed_by:
|
||||
try:
|
||||
admin_url = reverse(
|
||||
'admin:users_user_change',
|
||||
args=[obj.changed_by.pk]
|
||||
)
|
||||
return format_html(
|
||||
'<a href="{}">{}</a>',
|
||||
admin_url,
|
||||
obj.changed_by.email
|
||||
)
|
||||
except:
|
||||
return obj.changed_by.email
|
||||
return '-'
|
||||
changed_by_link.short_description = 'Changed By'
|
||||
|
||||
def submission_link(self, obj):
|
||||
"""Display link to content submission if applicable."""
|
||||
if obj.submission:
|
||||
try:
|
||||
admin_url = reverse(
|
||||
'admin:moderation_contentsubmission_change',
|
||||
args=[obj.submission.pk]
|
||||
)
|
||||
return format_html(
|
||||
'<a href="{}">#{}</a>',
|
||||
admin_url,
|
||||
obj.submission.pk
|
||||
)
|
||||
except:
|
||||
return str(obj.submission.pk)
|
||||
return '-'
|
||||
submission_link.short_description = 'Submission'
|
||||
|
||||
def changed_field_count(self, obj):
|
||||
"""Display count of changed fields."""
|
||||
count = len(obj.changed_fields)
|
||||
if count == 0:
|
||||
return '-'
|
||||
return f"{count} field{'s' if count != 1 else ''}"
|
||||
changed_field_count.short_description = 'Changed Fields'
|
||||
|
||||
def snapshot_display(self, obj):
|
||||
"""Display snapshot in a formatted way."""
|
||||
import json
|
||||
snapshot = obj.get_snapshot_dict()
|
||||
|
||||
# Format as pretty JSON
|
||||
formatted = json.dumps(snapshot, indent=2, sort_keys=True)
|
||||
|
||||
return format_html(
|
||||
'<pre style="background: #f5f5f5; padding: 10px; border-radius: 4px; overflow-x: auto;">{}</pre>',
|
||||
formatted
|
||||
)
|
||||
snapshot_display.short_description = 'Snapshot'
|
||||
|
||||
def changed_fields_display(self, obj):
|
||||
"""Display changed fields in a formatted way."""
|
||||
if not obj.changed_fields:
|
||||
return format_html('<em>No fields changed</em>')
|
||||
|
||||
html_parts = ['<table style="width: 100%; border-collapse: collapse;">']
|
||||
html_parts.append('<thead><tr style="background: #f5f5f5;">')
|
||||
html_parts.append('<th style="padding: 8px; text-align: left; border: 1px solid #ddd;">Field</th>')
|
||||
html_parts.append('<th style="padding: 8px; text-align: left; border: 1px solid #ddd;">Old Value</th>')
|
||||
html_parts.append('<th style="padding: 8px; text-align: left; border: 1px solid #ddd;">New Value</th>')
|
||||
html_parts.append('</tr></thead><tbody>')
|
||||
|
||||
for field_name, change in obj.changed_fields.items():
|
||||
old_val = change.get('old', '-')
|
||||
new_val = change.get('new', '-')
|
||||
|
||||
# Truncate long values
|
||||
if isinstance(old_val, str) and len(old_val) > 100:
|
||||
old_val = old_val[:97] + '...'
|
||||
if isinstance(new_val, str) and len(new_val) > 100:
|
||||
new_val = new_val[:97] + '...'
|
||||
|
||||
html_parts.append('<tr>')
|
||||
html_parts.append(f'<td style="padding: 8px; border: 1px solid #ddd;"><strong>{field_name}</strong></td>')
|
||||
html_parts.append(f'<td style="padding: 8px; border: 1px solid #ddd; color: #d32f2f;">{old_val}</td>')
|
||||
html_parts.append(f'<td style="padding: 8px; border: 1px solid #ddd; color: #388e3c;">{new_val}</td>')
|
||||
html_parts.append('</tr>')
|
||||
|
||||
html_parts.append('</tbody></table>')
|
||||
|
||||
return format_html(''.join(html_parts))
|
||||
changed_fields_display.short_description = 'Changed Fields'
|
||||
|
||||
def get_queryset(self, request):
|
||||
"""Optimize queryset with select_related."""
|
||||
qs = super().get_queryset(request)
|
||||
return qs.select_related(
|
||||
'entity_type',
|
||||
'changed_by',
|
||||
'submission',
|
||||
'submission__user'
|
||||
)
|
||||
11
django-backend/apps/versioning/apps.py
Normal file
11
django-backend/apps/versioning/apps.py
Normal file
@@ -0,0 +1,11 @@
|
||||
"""
|
||||
Versioning app configuration.
|
||||
"""
|
||||
|
||||
from django.apps import AppConfig
|
||||
|
||||
|
||||
class VersioningConfig(AppConfig):
|
||||
default_auto_field = 'django.db.models.BigAutoField'
|
||||
name = 'apps.versioning'
|
||||
verbose_name = 'Versioning'
|
||||
165
django-backend/apps/versioning/migrations/0001_initial.py
Normal file
165
django-backend/apps/versioning/migrations/0001_initial.py
Normal file
@@ -0,0 +1,165 @@
|
||||
# Generated by Django 4.2.8 on 2025-11-08 17:51
|
||||
|
||||
from django.conf import settings
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
import django.utils.timezone
|
||||
import django_lifecycle.mixins
|
||||
import model_utils.fields
|
||||
import uuid
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
initial = True
|
||||
|
||||
dependencies = [
|
||||
("contenttypes", "0002_remove_content_type_name"),
|
||||
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
||||
("moderation", "0001_initial"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name="EntityVersion",
|
||||
fields=[
|
||||
(
|
||||
"created",
|
||||
model_utils.fields.AutoCreatedField(
|
||||
default=django.utils.timezone.now,
|
||||
editable=False,
|
||||
verbose_name="created",
|
||||
),
|
||||
),
|
||||
(
|
||||
"modified",
|
||||
model_utils.fields.AutoLastModifiedField(
|
||||
default=django.utils.timezone.now,
|
||||
editable=False,
|
||||
verbose_name="modified",
|
||||
),
|
||||
),
|
||||
(
|
||||
"id",
|
||||
models.UUIDField(
|
||||
default=uuid.uuid4,
|
||||
editable=False,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
),
|
||||
),
|
||||
(
|
||||
"entity_id",
|
||||
models.UUIDField(db_index=True, help_text="ID of the entity"),
|
||||
),
|
||||
(
|
||||
"version_number",
|
||||
models.PositiveIntegerField(
|
||||
default=1, help_text="Sequential version number for this entity"
|
||||
),
|
||||
),
|
||||
(
|
||||
"change_type",
|
||||
models.CharField(
|
||||
choices=[
|
||||
("created", "Created"),
|
||||
("updated", "Updated"),
|
||||
("deleted", "Deleted"),
|
||||
("restored", "Restored"),
|
||||
],
|
||||
db_index=True,
|
||||
help_text="Type of change",
|
||||
max_length=20,
|
||||
),
|
||||
),
|
||||
(
|
||||
"snapshot",
|
||||
models.JSONField(
|
||||
help_text="Complete snapshot of entity state as JSON"
|
||||
),
|
||||
),
|
||||
(
|
||||
"changed_fields",
|
||||
models.JSONField(
|
||||
default=dict,
|
||||
help_text="Dict of changed fields with old/new values: {'field': {'old': ..., 'new': ...}}",
|
||||
),
|
||||
),
|
||||
(
|
||||
"comment",
|
||||
models.TextField(
|
||||
blank=True, help_text="Optional comment about this version"
|
||||
),
|
||||
),
|
||||
(
|
||||
"ip_address",
|
||||
models.GenericIPAddressField(
|
||||
blank=True, help_text="IP address of change origin", null=True
|
||||
),
|
||||
),
|
||||
(
|
||||
"user_agent",
|
||||
models.CharField(
|
||||
blank=True, help_text="User agent string", max_length=500
|
||||
),
|
||||
),
|
||||
(
|
||||
"changed_by",
|
||||
models.ForeignKey(
|
||||
blank=True,
|
||||
help_text="User who made the change",
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
related_name="entity_versions",
|
||||
to=settings.AUTH_USER_MODEL,
|
||||
),
|
||||
),
|
||||
(
|
||||
"entity_type",
|
||||
models.ForeignKey(
|
||||
help_text="Type of entity (Park, Ride, Company, etc.)",
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
related_name="entity_versions",
|
||||
to="contenttypes.contenttype",
|
||||
),
|
||||
),
|
||||
(
|
||||
"submission",
|
||||
models.ForeignKey(
|
||||
blank=True,
|
||||
help_text="Submission that caused this version (if applicable)",
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
related_name="versions",
|
||||
to="moderation.contentsubmission",
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"verbose_name": "Entity Version",
|
||||
"verbose_name_plural": "Entity Versions",
|
||||
"ordering": ["-created"],
|
||||
"indexes": [
|
||||
models.Index(
|
||||
fields=["entity_type", "entity_id", "-created"],
|
||||
name="versioning__entity__8eabd9_idx",
|
||||
),
|
||||
models.Index(
|
||||
fields=["entity_type", "entity_id", "-version_number"],
|
||||
name="versioning__entity__fe6f1b_idx",
|
||||
),
|
||||
models.Index(
|
||||
fields=["change_type"], name="versioning__change__17de57_idx"
|
||||
),
|
||||
models.Index(
|
||||
fields=["changed_by"], name="versioning__changed_39d5fd_idx"
|
||||
),
|
||||
models.Index(
|
||||
fields=["submission"], name="versioning__submiss_345f6b_idx"
|
||||
),
|
||||
],
|
||||
"unique_together": {("entity_type", "entity_id", "version_number")},
|
||||
},
|
||||
bases=(django_lifecycle.mixins.LifecycleModelMixin, models.Model),
|
||||
),
|
||||
]
|
||||
287
django-backend/apps/versioning/models.py
Normal file
287
django-backend/apps/versioning/models.py
Normal file
@@ -0,0 +1,287 @@
|
||||
"""
|
||||
Versioning models for ThrillWiki.
|
||||
|
||||
This module provides automatic version tracking for all entities:
|
||||
- EntityVersion: Generic version model using ContentType
|
||||
- Full snapshot storage in JSON
|
||||
- Changed fields tracking with old/new values
|
||||
- Link to ContentSubmission when changes come from moderation
|
||||
"""
|
||||
|
||||
import json
|
||||
from django.db import models
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from django.contrib.contenttypes.fields import GenericForeignKey
|
||||
from django.conf import settings
|
||||
|
||||
from apps.core.models import BaseModel
|
||||
|
||||
|
||||
class EntityVersion(BaseModel):
|
||||
"""
|
||||
Generic version tracking for all entities.
|
||||
|
||||
Stores a complete snapshot of the entity state at the time of change,
|
||||
along with metadata about what changed and who made the change.
|
||||
"""
|
||||
|
||||
CHANGE_TYPE_CHOICES = [
|
||||
('created', 'Created'),
|
||||
('updated', 'Updated'),
|
||||
('deleted', 'Deleted'),
|
||||
('restored', 'Restored'),
|
||||
]
|
||||
|
||||
# Entity reference (generic)
|
||||
entity_type = models.ForeignKey(
|
||||
ContentType,
|
||||
on_delete=models.CASCADE,
|
||||
related_name='entity_versions',
|
||||
help_text="Type of entity (Park, Ride, Company, etc.)"
|
||||
)
|
||||
entity_id = models.UUIDField(
|
||||
db_index=True,
|
||||
help_text="ID of the entity"
|
||||
)
|
||||
entity = GenericForeignKey('entity_type', 'entity_id')
|
||||
|
||||
# Version info
|
||||
version_number = models.PositiveIntegerField(
|
||||
default=1,
|
||||
help_text="Sequential version number for this entity"
|
||||
)
|
||||
change_type = models.CharField(
|
||||
max_length=20,
|
||||
choices=CHANGE_TYPE_CHOICES,
|
||||
db_index=True,
|
||||
help_text="Type of change"
|
||||
)
|
||||
|
||||
# Snapshot of entity state
|
||||
snapshot = models.JSONField(
|
||||
help_text="Complete snapshot of entity state as JSON"
|
||||
)
|
||||
|
||||
# Changed fields tracking
|
||||
changed_fields = models.JSONField(
|
||||
default=dict,
|
||||
help_text="Dict of changed fields with old/new values: {'field': {'old': ..., 'new': ...}}"
|
||||
)
|
||||
|
||||
# User who made the change
|
||||
changed_by = models.ForeignKey(
|
||||
settings.AUTH_USER_MODEL,
|
||||
on_delete=models.SET_NULL,
|
||||
null=True,
|
||||
blank=True,
|
||||
related_name='entity_versions',
|
||||
help_text="User who made the change"
|
||||
)
|
||||
|
||||
# Link to ContentSubmission (if change came from moderation)
|
||||
submission = models.ForeignKey(
|
||||
'moderation.ContentSubmission',
|
||||
on_delete=models.SET_NULL,
|
||||
null=True,
|
||||
blank=True,
|
||||
related_name='versions',
|
||||
help_text="Submission that caused this version (if applicable)"
|
||||
)
|
||||
|
||||
# Metadata
|
||||
comment = models.TextField(
|
||||
blank=True,
|
||||
help_text="Optional comment about this version"
|
||||
)
|
||||
ip_address = models.GenericIPAddressField(
|
||||
null=True,
|
||||
blank=True,
|
||||
help_text="IP address of change origin"
|
||||
)
|
||||
user_agent = models.CharField(
|
||||
max_length=500,
|
||||
blank=True,
|
||||
help_text="User agent string"
|
||||
)
|
||||
|
||||
class Meta:
|
||||
verbose_name = 'Entity Version'
|
||||
verbose_name_plural = 'Entity Versions'
|
||||
ordering = ['-created']
|
||||
indexes = [
|
||||
models.Index(fields=['entity_type', 'entity_id', '-created']),
|
||||
models.Index(fields=['entity_type', 'entity_id', '-version_number']),
|
||||
models.Index(fields=['change_type']),
|
||||
models.Index(fields=['changed_by']),
|
||||
models.Index(fields=['submission']),
|
||||
]
|
||||
unique_together = [['entity_type', 'entity_id', 'version_number']]
|
||||
|
||||
def __str__(self):
|
||||
return f"{self.entity_type.model} v{self.version_number} ({self.change_type})"
|
||||
|
||||
@property
|
||||
def entity_name(self):
|
||||
"""Get display name of the entity."""
|
||||
try:
|
||||
entity = self.entity
|
||||
if entity:
|
||||
return str(entity)
|
||||
except:
|
||||
pass
|
||||
return f"{self.entity_type.model}:{self.entity_id}"
|
||||
|
||||
def get_snapshot_dict(self):
|
||||
"""
|
||||
Get snapshot as Python dict.
|
||||
|
||||
Returns:
|
||||
dict: Entity snapshot
|
||||
"""
|
||||
if isinstance(self.snapshot, str):
|
||||
return json.loads(self.snapshot)
|
||||
return self.snapshot
|
||||
|
||||
def get_changed_fields_list(self):
|
||||
"""
|
||||
Get list of changed field names.
|
||||
|
||||
Returns:
|
||||
list: Field names that changed
|
||||
"""
|
||||
return list(self.changed_fields.keys())
|
||||
|
||||
def get_field_change(self, field_name):
|
||||
"""
|
||||
Get old and new values for a specific field.
|
||||
|
||||
Args:
|
||||
field_name: Name of the field
|
||||
|
||||
Returns:
|
||||
dict: {'old': old_value, 'new': new_value} or None if field didn't change
|
||||
"""
|
||||
return self.changed_fields.get(field_name)
|
||||
|
||||
def compare_with(self, other_version):
|
||||
"""
|
||||
Compare this version with another version.
|
||||
|
||||
Args:
|
||||
other_version: EntityVersion to compare with
|
||||
|
||||
Returns:
|
||||
dict: Comparison result with differences
|
||||
"""
|
||||
if not other_version or self.entity_id != other_version.entity_id:
|
||||
return None
|
||||
|
||||
this_snapshot = self.get_snapshot_dict()
|
||||
other_snapshot = other_version.get_snapshot_dict()
|
||||
|
||||
differences = {}
|
||||
all_keys = set(this_snapshot.keys()) | set(other_snapshot.keys())
|
||||
|
||||
for key in all_keys:
|
||||
this_val = this_snapshot.get(key)
|
||||
other_val = other_snapshot.get(key)
|
||||
|
||||
if this_val != other_val:
|
||||
differences[key] = {
|
||||
'this': this_val,
|
||||
'other': other_val
|
||||
}
|
||||
|
||||
return {
|
||||
'this_version': self.version_number,
|
||||
'other_version': other_version.version_number,
|
||||
'differences': differences,
|
||||
'changed_field_count': len(differences)
|
||||
}
|
||||
|
||||
def get_diff_summary(self):
|
||||
"""
|
||||
Get human-readable summary of changes in this version.
|
||||
|
||||
Returns:
|
||||
str: Summary of changes
|
||||
"""
|
||||
if self.change_type == 'created':
|
||||
return f"Created {self.entity_name}"
|
||||
|
||||
if self.change_type == 'deleted':
|
||||
return f"Deleted {self.entity_name}"
|
||||
|
||||
changed_count = len(self.changed_fields)
|
||||
if changed_count == 0:
|
||||
return f"No changes to {self.entity_name}"
|
||||
|
||||
field_names = ', '.join(self.get_changed_fields_list()[:3])
|
||||
if changed_count > 3:
|
||||
field_names += f" and {changed_count - 3} more"
|
||||
|
||||
return f"Updated {field_names}"
|
||||
|
||||
@classmethod
|
||||
def get_latest_version_number(cls, entity_type, entity_id):
|
||||
"""
|
||||
Get the latest version number for an entity.
|
||||
|
||||
Args:
|
||||
entity_type: ContentType of entity
|
||||
entity_id: UUID of entity
|
||||
|
||||
Returns:
|
||||
int: Latest version number (0 if no versions exist)
|
||||
"""
|
||||
latest = cls.objects.filter(
|
||||
entity_type=entity_type,
|
||||
entity_id=entity_id
|
||||
).aggregate(
|
||||
max_version=models.Max('version_number')
|
||||
)
|
||||
return latest['max_version'] or 0
|
||||
|
||||
@classmethod
|
||||
def get_history(cls, entity_type, entity_id, limit=50):
|
||||
"""
|
||||
Get version history for an entity.
|
||||
|
||||
Args:
|
||||
entity_type: ContentType of entity
|
||||
entity_id: UUID of entity
|
||||
limit: Maximum number of versions to return
|
||||
|
||||
Returns:
|
||||
QuerySet: Ordered list of versions (newest first)
|
||||
"""
|
||||
return cls.objects.filter(
|
||||
entity_type=entity_type,
|
||||
entity_id=entity_id
|
||||
).select_related(
|
||||
'changed_by',
|
||||
'submission',
|
||||
'submission__user'
|
||||
).order_by('-version_number')[:limit]
|
||||
|
||||
@classmethod
|
||||
def get_version_by_number(cls, entity_type, entity_id, version_number):
|
||||
"""
|
||||
Get a specific version by number.
|
||||
|
||||
Args:
|
||||
entity_type: ContentType of entity
|
||||
entity_id: UUID of entity
|
||||
version_number: Version number to retrieve
|
||||
|
||||
Returns:
|
||||
EntityVersion or None
|
||||
"""
|
||||
try:
|
||||
return cls.objects.get(
|
||||
entity_type=entity_type,
|
||||
entity_id=entity_id,
|
||||
version_number=version_number
|
||||
)
|
||||
except cls.DoesNotExist:
|
||||
return None
|
||||
473
django-backend/apps/versioning/services.py
Normal file
473
django-backend/apps/versioning/services.py
Normal file
@@ -0,0 +1,473 @@
|
||||
"""
|
||||
Versioning services for ThrillWiki.
|
||||
|
||||
This module provides the business logic for creating and managing entity versions:
|
||||
- Creating versions automatically via lifecycle hooks
|
||||
- Generating snapshots and tracking changed fields
|
||||
- Linking versions to content submissions
|
||||
- Retrieving version history and diffs
|
||||
- Restoring previous versions
|
||||
"""
|
||||
|
||||
import json
|
||||
from decimal import Decimal
|
||||
from datetime import date, datetime
|
||||
from django.db import models, transaction
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from django.core.serializers.json import DjangoJSONEncoder
|
||||
from django.core.exceptions import ValidationError
|
||||
|
||||
from apps.versioning.models import EntityVersion
|
||||
|
||||
|
||||
class VersionService:
|
||||
"""
|
||||
Service class for versioning operations.
|
||||
|
||||
All methods handle automatic version creation and tracking.
|
||||
"""
|
||||
|
||||
@staticmethod
|
||||
@transaction.atomic
|
||||
def create_version(
|
||||
entity,
|
||||
change_type='updated',
|
||||
changed_fields=None,
|
||||
user=None,
|
||||
submission=None,
|
||||
comment='',
|
||||
ip_address=None,
|
||||
user_agent=''
|
||||
):
|
||||
"""
|
||||
Create a version record for an entity.
|
||||
|
||||
This is called automatically by the VersionedModel lifecycle hooks,
|
||||
but can also be called manually when needed.
|
||||
|
||||
Args:
|
||||
entity: Entity instance (Park, Ride, Company, etc.)
|
||||
change_type: Type of change ('created', 'updated', 'deleted', 'restored')
|
||||
changed_fields: Dict of dirty fields from DirtyFieldsMixin
|
||||
user: User who made the change (optional)
|
||||
submission: ContentSubmission that caused this change (optional)
|
||||
comment: Optional comment about the change
|
||||
ip_address: IP address of the change origin
|
||||
user_agent: User agent string
|
||||
|
||||
Returns:
|
||||
EntityVersion instance
|
||||
"""
|
||||
# Get ContentType for entity
|
||||
entity_type = ContentType.objects.get_for_model(entity)
|
||||
|
||||
# Get next version number
|
||||
version_number = EntityVersion.get_latest_version_number(
|
||||
entity_type, entity.id
|
||||
) + 1
|
||||
|
||||
# Create snapshot of current entity state
|
||||
snapshot = VersionService._create_snapshot(entity)
|
||||
|
||||
# Build changed_fields dict with old/new values
|
||||
changed_fields_data = {}
|
||||
if changed_fields and change_type == 'updated':
|
||||
changed_fields_data = VersionService._build_changed_fields(
|
||||
entity, changed_fields
|
||||
)
|
||||
|
||||
# Try to get user from submission if not provided
|
||||
if not user and submission:
|
||||
user = submission.user
|
||||
|
||||
# Create version record
|
||||
version = EntityVersion.objects.create(
|
||||
entity_type=entity_type,
|
||||
entity_id=entity.id,
|
||||
version_number=version_number,
|
||||
change_type=change_type,
|
||||
snapshot=snapshot,
|
||||
changed_fields=changed_fields_data,
|
||||
changed_by=user,
|
||||
submission=submission,
|
||||
comment=comment,
|
||||
ip_address=ip_address,
|
||||
user_agent=user_agent
|
||||
)
|
||||
|
||||
return version
|
||||
|
||||
@staticmethod
|
||||
def _create_snapshot(entity):
|
||||
"""
|
||||
Create a JSON snapshot of the entity's current state.
|
||||
|
||||
Args:
|
||||
entity: Entity instance
|
||||
|
||||
Returns:
|
||||
dict: Serializable snapshot of entity
|
||||
"""
|
||||
snapshot = {}
|
||||
|
||||
# Get all model fields
|
||||
for field in entity._meta.get_fields():
|
||||
# Skip reverse relations
|
||||
if field.is_relation and field.many_to_one is False and field.one_to_many is True:
|
||||
continue
|
||||
if field.is_relation and field.many_to_many is True:
|
||||
continue
|
||||
|
||||
field_name = field.name
|
||||
|
||||
try:
|
||||
value = getattr(entity, field_name)
|
||||
|
||||
# Handle different field types
|
||||
if value is None:
|
||||
snapshot[field_name] = None
|
||||
elif isinstance(value, (str, int, float, bool)):
|
||||
snapshot[field_name] = value
|
||||
elif isinstance(value, Decimal):
|
||||
snapshot[field_name] = float(value)
|
||||
elif isinstance(value, (date, datetime)):
|
||||
snapshot[field_name] = value.isoformat()
|
||||
elif isinstance(value, models.Model):
|
||||
# Store FK as ID
|
||||
snapshot[field_name] = str(value.id) if value.id else None
|
||||
elif isinstance(value, dict):
|
||||
# JSONField
|
||||
snapshot[field_name] = value
|
||||
elif isinstance(value, list):
|
||||
# JSONField array
|
||||
snapshot[field_name] = value
|
||||
else:
|
||||
# Try to serialize as string
|
||||
snapshot[field_name] = str(value)
|
||||
except Exception:
|
||||
# Skip fields that can't be serialized
|
||||
continue
|
||||
|
||||
return snapshot
|
||||
|
||||
@staticmethod
|
||||
def _build_changed_fields(entity, dirty_fields):
|
||||
"""
|
||||
Build a dict of changed fields with old and new values.
|
||||
|
||||
Args:
|
||||
entity: Entity instance
|
||||
dirty_fields: Dict from DirtyFieldsMixin.get_dirty_fields()
|
||||
|
||||
Returns:
|
||||
dict: Changed fields with old/new values
|
||||
"""
|
||||
changed = {}
|
||||
|
||||
for field_name, old_value in dirty_fields.items():
|
||||
try:
|
||||
new_value = getattr(entity, field_name)
|
||||
|
||||
# Normalize values for JSON
|
||||
old_normalized = VersionService._normalize_value(old_value)
|
||||
new_normalized = VersionService._normalize_value(new_value)
|
||||
|
||||
changed[field_name] = {
|
||||
'old': old_normalized,
|
||||
'new': new_normalized
|
||||
}
|
||||
except Exception:
|
||||
continue
|
||||
|
||||
return changed
|
||||
|
||||
@staticmethod
|
||||
def _normalize_value(value):
|
||||
"""
|
||||
Normalize a value for JSON serialization.
|
||||
|
||||
Args:
|
||||
value: Value to normalize
|
||||
|
||||
Returns:
|
||||
Normalized value
|
||||
"""
|
||||
if value is None:
|
||||
return None
|
||||
elif isinstance(value, (str, int, float, bool)):
|
||||
return value
|
||||
elif isinstance(value, Decimal):
|
||||
return float(value)
|
||||
elif isinstance(value, (date, datetime)):
|
||||
return value.isoformat()
|
||||
elif isinstance(value, models.Model):
|
||||
return str(value.id) if value.id else None
|
||||
elif isinstance(value, (dict, list)):
|
||||
return value
|
||||
else:
|
||||
return str(value)
|
||||
|
||||
@staticmethod
|
||||
def get_version_history(entity, limit=50):
|
||||
"""
|
||||
Get version history for an entity.
|
||||
|
||||
Args:
|
||||
entity: Entity instance
|
||||
limit: Maximum number of versions to return
|
||||
|
||||
Returns:
|
||||
QuerySet: Ordered list of versions (newest first)
|
||||
"""
|
||||
entity_type = ContentType.objects.get_for_model(entity)
|
||||
return EntityVersion.get_history(entity_type, entity.id, limit)
|
||||
|
||||
@staticmethod
|
||||
def get_version_by_number(entity, version_number):
|
||||
"""
|
||||
Get a specific version by number.
|
||||
|
||||
Args:
|
||||
entity: Entity instance
|
||||
version_number: Version number to retrieve
|
||||
|
||||
Returns:
|
||||
EntityVersion or None
|
||||
"""
|
||||
entity_type = ContentType.objects.get_for_model(entity)
|
||||
return EntityVersion.get_version_by_number(entity_type, entity.id, version_number)
|
||||
|
||||
@staticmethod
|
||||
def get_latest_version(entity):
|
||||
"""
|
||||
Get the latest version for an entity.
|
||||
|
||||
Args:
|
||||
entity: Entity instance
|
||||
|
||||
Returns:
|
||||
EntityVersion or None
|
||||
"""
|
||||
entity_type = ContentType.objects.get_for_model(entity)
|
||||
return EntityVersion.objects.filter(
|
||||
entity_type=entity_type,
|
||||
entity_id=entity.id
|
||||
).order_by('-version_number').first()
|
||||
|
||||
@staticmethod
|
||||
def compare_versions(version1, version2):
|
||||
"""
|
||||
Compare two versions of the same entity.
|
||||
|
||||
Args:
|
||||
version1: First EntityVersion
|
||||
version2: Second EntityVersion
|
||||
|
||||
Returns:
|
||||
dict: Comparison result with differences
|
||||
"""
|
||||
if version1.entity_id != version2.entity_id:
|
||||
raise ValidationError("Versions must be for the same entity")
|
||||
|
||||
return version1.compare_with(version2)
|
||||
|
||||
@staticmethod
|
||||
def get_diff_with_current(version):
|
||||
"""
|
||||
Compare a version with the current entity state.
|
||||
|
||||
Args:
|
||||
version: EntityVersion to compare
|
||||
|
||||
Returns:
|
||||
dict: Differences between version and current state
|
||||
"""
|
||||
entity = version.entity
|
||||
if not entity:
|
||||
raise ValidationError("Entity no longer exists")
|
||||
|
||||
current_snapshot = VersionService._create_snapshot(entity)
|
||||
version_snapshot = version.get_snapshot_dict()
|
||||
|
||||
differences = {}
|
||||
all_keys = set(current_snapshot.keys()) | set(version_snapshot.keys())
|
||||
|
||||
for key in all_keys:
|
||||
current_val = current_snapshot.get(key)
|
||||
version_val = version_snapshot.get(key)
|
||||
|
||||
if current_val != version_val:
|
||||
differences[key] = {
|
||||
'current': current_val,
|
||||
'version': version_val
|
||||
}
|
||||
|
||||
return {
|
||||
'version_number': version.version_number,
|
||||
'differences': differences,
|
||||
'changed_field_count': len(differences)
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
@transaction.atomic
|
||||
def restore_version(version, user=None, comment=''):
|
||||
"""
|
||||
Restore an entity to a previous version.
|
||||
|
||||
This creates a new version with change_type='restored'.
|
||||
|
||||
Args:
|
||||
version: EntityVersion to restore
|
||||
user: User performing the restore
|
||||
comment: Optional comment about the restore
|
||||
|
||||
Returns:
|
||||
EntityVersion: New version created by restore
|
||||
|
||||
Raises:
|
||||
ValidationError: If entity doesn't exist
|
||||
"""
|
||||
entity = version.entity
|
||||
if not entity:
|
||||
raise ValidationError("Entity no longer exists")
|
||||
|
||||
# Get snapshot to restore
|
||||
snapshot = version.get_snapshot_dict()
|
||||
|
||||
# Track which fields are changing
|
||||
changed_fields = {}
|
||||
|
||||
# Apply snapshot values to entity
|
||||
for field_name, value in snapshot.items():
|
||||
# Skip metadata fields
|
||||
if field_name in ['id', 'created', 'modified']:
|
||||
continue
|
||||
|
||||
try:
|
||||
# Get current value
|
||||
current_value = getattr(entity, field_name, None)
|
||||
current_normalized = VersionService._normalize_value(current_value)
|
||||
|
||||
# Check if value is different
|
||||
if current_normalized != value:
|
||||
changed_fields[field_name] = {
|
||||
'old': current_normalized,
|
||||
'new': value
|
||||
}
|
||||
|
||||
# Apply restored value
|
||||
# Handle special field types
|
||||
field = entity._meta.get_field(field_name)
|
||||
|
||||
if isinstance(field, models.ForeignKey):
|
||||
# FK fields need model instance
|
||||
if value:
|
||||
related_model = field.related_model
|
||||
try:
|
||||
related_obj = related_model.objects.get(id=value)
|
||||
setattr(entity, field_name, related_obj)
|
||||
except:
|
||||
pass
|
||||
else:
|
||||
setattr(entity, field_name, None)
|
||||
elif isinstance(field, models.DateField):
|
||||
# Date fields
|
||||
if value:
|
||||
setattr(entity, field_name, datetime.fromisoformat(value).date())
|
||||
else:
|
||||
setattr(entity, field_name, None)
|
||||
elif isinstance(field, models.DateTimeField):
|
||||
# DateTime fields
|
||||
if value:
|
||||
setattr(entity, field_name, datetime.fromisoformat(value))
|
||||
else:
|
||||
setattr(entity, field_name, None)
|
||||
elif isinstance(field, models.DecimalField):
|
||||
# Decimal fields
|
||||
if value is not None:
|
||||
setattr(entity, field_name, Decimal(str(value)))
|
||||
else:
|
||||
setattr(entity, field_name, None)
|
||||
else:
|
||||
# Regular fields
|
||||
setattr(entity, field_name, value)
|
||||
except Exception:
|
||||
# Skip fields that can't be restored
|
||||
continue
|
||||
|
||||
# Save entity (this will trigger lifecycle hooks)
|
||||
# But we need to create the version manually to mark it as 'restored'
|
||||
entity.save()
|
||||
|
||||
# Create restore version
|
||||
entity_type = ContentType.objects.get_for_model(entity)
|
||||
version_number = EntityVersion.get_latest_version_number(
|
||||
entity_type, entity.id
|
||||
) + 1
|
||||
|
||||
restored_version = EntityVersion.objects.create(
|
||||
entity_type=entity_type,
|
||||
entity_id=entity.id,
|
||||
version_number=version_number,
|
||||
change_type='restored',
|
||||
snapshot=VersionService._create_snapshot(entity),
|
||||
changed_fields=changed_fields,
|
||||
changed_by=user,
|
||||
comment=f"Restored from version {version.version_number}. {comment}".strip()
|
||||
)
|
||||
|
||||
return restored_version
|
||||
|
||||
@staticmethod
|
||||
def get_version_count(entity):
|
||||
"""
|
||||
Get total number of versions for an entity.
|
||||
|
||||
Args:
|
||||
entity: Entity instance
|
||||
|
||||
Returns:
|
||||
int: Number of versions
|
||||
"""
|
||||
entity_type = ContentType.objects.get_for_model(entity)
|
||||
return EntityVersion.objects.filter(
|
||||
entity_type=entity_type,
|
||||
entity_id=entity.id
|
||||
).count()
|
||||
|
||||
@staticmethod
|
||||
def get_versions_by_user(user, limit=50):
|
||||
"""
|
||||
Get versions created by a specific user.
|
||||
|
||||
Args:
|
||||
user: User instance
|
||||
limit: Maximum number of versions to return
|
||||
|
||||
Returns:
|
||||
QuerySet: Versions by user (newest first)
|
||||
"""
|
||||
return EntityVersion.objects.filter(
|
||||
changed_by=user
|
||||
).select_related(
|
||||
'entity_type',
|
||||
'submission'
|
||||
).order_by('-created')[:limit]
|
||||
|
||||
@staticmethod
|
||||
def get_versions_by_submission(submission):
|
||||
"""
|
||||
Get all versions created by a content submission.
|
||||
|
||||
Args:
|
||||
submission: ContentSubmission instance
|
||||
|
||||
Returns:
|
||||
QuerySet: Versions from submission
|
||||
"""
|
||||
return EntityVersion.objects.filter(
|
||||
submission=submission
|
||||
).select_related(
|
||||
'entity_type',
|
||||
'changed_by'
|
||||
).order_by('-created')
|
||||
Reference in New Issue
Block a user