mirror of
https://github.com/pacnpal/thrilltrack-explorer.git
synced 2025-12-22 15:31:12 -05:00
Add email templates for user notifications and account management
- Created a base email template (base.html) for consistent styling across all emails. - Added moderation approval email template (moderation_approved.html) to notify users of approved submissions. - Added moderation rejection email template (moderation_rejected.html) to inform users of required changes for their submissions. - Created password reset email template (password_reset.html) for users requesting to reset their passwords. - Developed a welcome email template (welcome.html) to greet new users and provide account details and tips for using ThrillWiki.
This commit is contained in:
Binary file not shown.
Binary file not shown.
Binary file not shown.
BIN
django/apps/entities/__pycache__/search.cpython-313.pyc
Normal file
BIN
django/apps/entities/__pycache__/search.cpython-313.pyc
Normal file
Binary file not shown.
BIN
django/apps/entities/__pycache__/signals.cpython-313.pyc
Normal file
BIN
django/apps/entities/__pycache__/signals.cpython-313.pyc
Normal file
Binary file not shown.
@@ -1,97 +1,402 @@
|
||||
"""
|
||||
Django Admin configuration for entity models.
|
||||
Django Admin configuration for entity models with Unfold theme.
|
||||
"""
|
||||
from django.contrib import admin
|
||||
from django.contrib.gis import admin as gis_admin
|
||||
from django.db.models import Count, Q
|
||||
from django.utils.html import format_html
|
||||
from django.urls import reverse
|
||||
from django.conf import settings
|
||||
from unfold.admin import ModelAdmin, TabularInline
|
||||
from unfold.contrib.filters.admin import RangeDateFilter, RangeNumericFilter, RelatedDropdownFilter, ChoicesDropdownFilter
|
||||
from unfold.contrib.import_export.forms import ImportForm, ExportForm
|
||||
from import_export.admin import ImportExportModelAdmin
|
||||
from import_export import resources, fields
|
||||
from import_export.widgets import ForeignKeyWidget
|
||||
from .models import Company, RideModel, Park, Ride
|
||||
from apps.media.admin import PhotoInline
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# IMPORT/EXPORT RESOURCES
|
||||
# ============================================================================
|
||||
|
||||
class CompanyResource(resources.ModelResource):
|
||||
"""Import/Export resource for Company model."""
|
||||
|
||||
class Meta:
|
||||
model = Company
|
||||
fields = (
|
||||
'id', 'name', 'slug', 'description', 'location',
|
||||
'company_types', 'founded_date', 'founded_date_precision',
|
||||
'closed_date', 'closed_date_precision', 'website',
|
||||
'logo_image_url', 'created', 'modified'
|
||||
)
|
||||
export_order = fields
|
||||
|
||||
|
||||
class RideModelResource(resources.ModelResource):
|
||||
"""Import/Export resource for RideModel model."""
|
||||
|
||||
manufacturer = fields.Field(
|
||||
column_name='manufacturer',
|
||||
attribute='manufacturer',
|
||||
widget=ForeignKeyWidget(Company, 'name')
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = RideModel
|
||||
fields = (
|
||||
'id', 'name', 'slug', 'description', 'manufacturer',
|
||||
'model_type', 'typical_height', 'typical_speed',
|
||||
'typical_capacity', 'image_url', 'created', 'modified'
|
||||
)
|
||||
export_order = fields
|
||||
|
||||
|
||||
class ParkResource(resources.ModelResource):
|
||||
"""Import/Export resource for Park model."""
|
||||
|
||||
operator = fields.Field(
|
||||
column_name='operator',
|
||||
attribute='operator',
|
||||
widget=ForeignKeyWidget(Company, 'name')
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = Park
|
||||
fields = (
|
||||
'id', 'name', 'slug', 'description', 'park_type', 'status',
|
||||
'latitude', 'longitude', 'operator', 'opening_date',
|
||||
'opening_date_precision', 'closing_date', 'closing_date_precision',
|
||||
'website', 'banner_image_url', 'logo_image_url',
|
||||
'created', 'modified'
|
||||
)
|
||||
export_order = fields
|
||||
|
||||
|
||||
class RideResource(resources.ModelResource):
|
||||
"""Import/Export resource for Ride model."""
|
||||
|
||||
park = fields.Field(
|
||||
column_name='park',
|
||||
attribute='park',
|
||||
widget=ForeignKeyWidget(Park, 'name')
|
||||
)
|
||||
manufacturer = fields.Field(
|
||||
column_name='manufacturer',
|
||||
attribute='manufacturer',
|
||||
widget=ForeignKeyWidget(Company, 'name')
|
||||
)
|
||||
model = fields.Field(
|
||||
column_name='model',
|
||||
attribute='model',
|
||||
widget=ForeignKeyWidget(RideModel, 'name')
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = Ride
|
||||
fields = (
|
||||
'id', 'name', 'slug', 'description', 'park', 'ride_category',
|
||||
'ride_type', 'status', 'manufacturer', 'model', 'height',
|
||||
'speed', 'length', 'duration', 'inversions', 'capacity',
|
||||
'opening_date', 'opening_date_precision', 'closing_date',
|
||||
'closing_date_precision', 'image_url', 'created', 'modified'
|
||||
)
|
||||
export_order = fields
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# INLINE ADMIN CLASSES
|
||||
# ============================================================================
|
||||
|
||||
class RideInline(TabularInline):
|
||||
"""Inline for Rides within a Park."""
|
||||
|
||||
model = Ride
|
||||
extra = 0
|
||||
fields = ['name', 'ride_category', 'status', 'manufacturer', 'opening_date']
|
||||
readonly_fields = ['name']
|
||||
show_change_link = True
|
||||
classes = ['collapse']
|
||||
|
||||
def has_add_permission(self, request, obj=None):
|
||||
return False
|
||||
|
||||
|
||||
class CompanyParksInline(TabularInline):
|
||||
"""Inline for Parks operated by a Company."""
|
||||
|
||||
model = Park
|
||||
fk_name = 'operator'
|
||||
extra = 0
|
||||
fields = ['name', 'park_type', 'status', 'ride_count', 'opening_date']
|
||||
readonly_fields = ['name', 'ride_count']
|
||||
show_change_link = True
|
||||
classes = ['collapse']
|
||||
|
||||
def has_add_permission(self, request, obj=None):
|
||||
return False
|
||||
|
||||
|
||||
class RideModelInstallationsInline(TabularInline):
|
||||
"""Inline for Ride installations of a RideModel."""
|
||||
|
||||
model = Ride
|
||||
fk_name = 'model'
|
||||
extra = 0
|
||||
fields = ['name', 'park', 'status', 'opening_date']
|
||||
readonly_fields = ['name', 'park']
|
||||
show_change_link = True
|
||||
classes = ['collapse']
|
||||
|
||||
def has_add_permission(self, request, obj=None):
|
||||
return False
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# MAIN ADMIN CLASSES
|
||||
# ============================================================================
|
||||
|
||||
@admin.register(Company)
|
||||
class CompanyAdmin(admin.ModelAdmin):
|
||||
"""Admin interface for Company model."""
|
||||
class CompanyAdmin(ModelAdmin, ImportExportModelAdmin):
|
||||
"""Enhanced admin interface for Company model."""
|
||||
|
||||
list_display = ['name', 'slug', 'location', 'park_count', 'ride_count', 'created', 'modified']
|
||||
list_filter = ['company_types', 'founded_date']
|
||||
search_fields = ['name', 'slug', 'description']
|
||||
readonly_fields = ['id', 'created', 'modified', 'park_count', 'ride_count']
|
||||
prepopulated_fields = {'slug': ('name',)}
|
||||
resource_class = CompanyResource
|
||||
import_form_class = ImportForm
|
||||
export_form_class = ExportForm
|
||||
|
||||
list_display = [
|
||||
'name_with_icon',
|
||||
'location',
|
||||
'company_types_display',
|
||||
'park_count',
|
||||
'ride_count',
|
||||
'founded_date',
|
||||
'status_indicator',
|
||||
'created'
|
||||
]
|
||||
list_filter = [
|
||||
('company_types', ChoicesDropdownFilter),
|
||||
('founded_date', RangeDateFilter),
|
||||
('closed_date', RangeDateFilter),
|
||||
]
|
||||
search_fields = ['name', 'slug', 'description', 'location']
|
||||
readonly_fields = ['id', 'created', 'modified', 'park_count', 'ride_count', 'slug']
|
||||
prepopulated_fields = {} # Slug is auto-generated via lifecycle hook
|
||||
autocomplete_fields = []
|
||||
inlines = [CompanyParksInline, PhotoInline]
|
||||
|
||||
list_per_page = 50
|
||||
list_max_show_all = 200
|
||||
|
||||
fieldsets = (
|
||||
('Basic Information', {
|
||||
'fields': ('name', 'slug', 'description', 'company_types')
|
||||
}),
|
||||
('Location', {
|
||||
'fields': ('location',)
|
||||
('Location & Contact', {
|
||||
'fields': ('location', 'website')
|
||||
}),
|
||||
('Dates', {
|
||||
('History', {
|
||||
'fields': (
|
||||
'founded_date', 'founded_date_precision',
|
||||
'closed_date', 'closed_date_precision'
|
||||
)
|
||||
}),
|
||||
('Media', {
|
||||
'fields': ('logo_image_id', 'logo_image_url', 'website')
|
||||
'fields': ('logo_image_id', 'logo_image_url'),
|
||||
'classes': ['collapse']
|
||||
}),
|
||||
('Statistics', {
|
||||
'fields': ('park_count', 'ride_count'),
|
||||
'classes': ('collapse',)
|
||||
'classes': ['collapse']
|
||||
}),
|
||||
('System', {
|
||||
('System Information', {
|
||||
'fields': ('id', 'created', 'modified'),
|
||||
'classes': ('collapse',)
|
||||
'classes': ['collapse']
|
||||
}),
|
||||
)
|
||||
|
||||
def name_with_icon(self, obj):
|
||||
"""Display name with company type icon."""
|
||||
icons = {
|
||||
'manufacturer': '🏭',
|
||||
'operator': '🎡',
|
||||
'designer': '✏️',
|
||||
}
|
||||
icon = '🏢' # Default company icon
|
||||
if obj.company_types:
|
||||
for ctype in obj.company_types:
|
||||
if ctype in icons:
|
||||
icon = icons[ctype]
|
||||
break
|
||||
return format_html('{} {}', icon, obj.name)
|
||||
name_with_icon.short_description = 'Company'
|
||||
name_with_icon.admin_order_field = 'name'
|
||||
|
||||
def company_types_display(self, obj):
|
||||
"""Display company types as badges."""
|
||||
if not obj.company_types:
|
||||
return '-'
|
||||
badges = []
|
||||
for ctype in obj.company_types:
|
||||
color = {
|
||||
'manufacturer': 'blue',
|
||||
'operator': 'green',
|
||||
'designer': 'purple',
|
||||
}.get(ctype, 'gray')
|
||||
badges.append(
|
||||
f'<span style="background-color: {color}; color: white; '
|
||||
f'padding: 2px 8px; border-radius: 4px; font-size: 11px; '
|
||||
f'margin-right: 4px;">{ctype.upper()}</span>'
|
||||
)
|
||||
return format_html(' '.join(badges))
|
||||
company_types_display.short_description = 'Types'
|
||||
|
||||
def status_indicator(self, obj):
|
||||
"""Visual status indicator."""
|
||||
if obj.closed_date:
|
||||
return format_html(
|
||||
'<span style="color: red;">●</span> Closed'
|
||||
)
|
||||
return format_html(
|
||||
'<span style="color: green;">●</span> Active'
|
||||
)
|
||||
status_indicator.short_description = 'Status'
|
||||
|
||||
actions = ['export_admin_action']
|
||||
|
||||
|
||||
@admin.register(RideModel)
|
||||
class RideModelAdmin(admin.ModelAdmin):
|
||||
"""Admin interface for RideModel model."""
|
||||
class RideModelAdmin(ModelAdmin, ImportExportModelAdmin):
|
||||
"""Enhanced admin interface for RideModel model."""
|
||||
|
||||
list_display = ['name', 'manufacturer', 'model_type', 'installation_count', 'created', 'modified']
|
||||
list_filter = ['model_type', 'manufacturer']
|
||||
resource_class = RideModelResource
|
||||
import_form_class = ImportForm
|
||||
export_form_class = ExportForm
|
||||
|
||||
list_display = [
|
||||
'name_with_type',
|
||||
'manufacturer',
|
||||
'model_type',
|
||||
'typical_specs',
|
||||
'installation_count',
|
||||
'created'
|
||||
]
|
||||
list_filter = [
|
||||
('model_type', ChoicesDropdownFilter),
|
||||
('manufacturer', RelatedDropdownFilter),
|
||||
('typical_height', RangeNumericFilter),
|
||||
('typical_speed', RangeNumericFilter),
|
||||
]
|
||||
search_fields = ['name', 'slug', 'description', 'manufacturer__name']
|
||||
readonly_fields = ['id', 'created', 'modified', 'installation_count']
|
||||
prepopulated_fields = {'slug': ('name',)}
|
||||
readonly_fields = ['id', 'created', 'modified', 'installation_count', 'slug']
|
||||
prepopulated_fields = {}
|
||||
autocomplete_fields = ['manufacturer']
|
||||
inlines = [RideModelInstallationsInline, PhotoInline]
|
||||
|
||||
list_per_page = 50
|
||||
|
||||
fieldsets = (
|
||||
('Basic Information', {
|
||||
'fields': ('name', 'slug', 'description', 'manufacturer', 'model_type')
|
||||
}),
|
||||
('Typical Specifications', {
|
||||
'fields': ('typical_height', 'typical_speed', 'typical_capacity')
|
||||
'fields': (
|
||||
'typical_height', 'typical_speed', 'typical_capacity'
|
||||
),
|
||||
'description': 'Standard specifications for this ride model'
|
||||
}),
|
||||
('Media', {
|
||||
'fields': ('image_id', 'image_url')
|
||||
'fields': ('image_id', 'image_url'),
|
||||
'classes': ['collapse']
|
||||
}),
|
||||
('Statistics', {
|
||||
'fields': ('installation_count',),
|
||||
'classes': ('collapse',)
|
||||
'classes': ['collapse']
|
||||
}),
|
||||
('System', {
|
||||
('System Information', {
|
||||
'fields': ('id', 'created', 'modified'),
|
||||
'classes': ('collapse',)
|
||||
'classes': ['collapse']
|
||||
}),
|
||||
)
|
||||
|
||||
def name_with_type(self, obj):
|
||||
"""Display name with model type icon."""
|
||||
icons = {
|
||||
'roller_coaster': '🎢',
|
||||
'water_ride': '🌊',
|
||||
'flat_ride': '🎡',
|
||||
'dark_ride': '🎭',
|
||||
'transport': '🚂',
|
||||
}
|
||||
icon = icons.get(obj.model_type, '🎪')
|
||||
return format_html('{} {}', icon, obj.name)
|
||||
name_with_type.short_description = 'Model Name'
|
||||
name_with_type.admin_order_field = 'name'
|
||||
|
||||
def typical_specs(self, obj):
|
||||
"""Display typical specifications."""
|
||||
specs = []
|
||||
if obj.typical_height:
|
||||
specs.append(f'H: {obj.typical_height}m')
|
||||
if obj.typical_speed:
|
||||
specs.append(f'S: {obj.typical_speed}km/h')
|
||||
if obj.typical_capacity:
|
||||
specs.append(f'C: {obj.typical_capacity}')
|
||||
return ' | '.join(specs) if specs else '-'
|
||||
typical_specs.short_description = 'Typical Specs'
|
||||
|
||||
actions = ['export_admin_action']
|
||||
|
||||
|
||||
@admin.register(Park)
|
||||
class ParkAdmin(admin.ModelAdmin):
|
||||
"""Admin interface for Park model."""
|
||||
class ParkAdmin(ModelAdmin, ImportExportModelAdmin):
|
||||
"""Enhanced admin interface for Park model with geographic features."""
|
||||
|
||||
list_display = ['name', 'location', 'park_type', 'status', 'ride_count', 'coaster_count', 'opening_date']
|
||||
list_filter = ['park_type', 'status', 'operator', 'opening_date']
|
||||
search_fields = ['name', 'slug', 'description', 'location__name']
|
||||
readonly_fields = ['id', 'created', 'modified', 'ride_count', 'coaster_count']
|
||||
prepopulated_fields = {'slug': ('name',)}
|
||||
resource_class = ParkResource
|
||||
import_form_class = ImportForm
|
||||
export_form_class = ExportForm
|
||||
|
||||
list_display = [
|
||||
'name_with_icon',
|
||||
'location_display',
|
||||
'park_type',
|
||||
'status_badge',
|
||||
'ride_count',
|
||||
'coaster_count',
|
||||
'opening_date',
|
||||
'operator'
|
||||
]
|
||||
list_filter = [
|
||||
('park_type', ChoicesDropdownFilter),
|
||||
('status', ChoicesDropdownFilter),
|
||||
('operator', RelatedDropdownFilter),
|
||||
('opening_date', RangeDateFilter),
|
||||
('closing_date', RangeDateFilter),
|
||||
]
|
||||
search_fields = ['name', 'slug', 'description', 'location']
|
||||
readonly_fields = [
|
||||
'id', 'created', 'modified', 'ride_count', 'coaster_count',
|
||||
'slug', 'coordinates_display'
|
||||
]
|
||||
prepopulated_fields = {}
|
||||
autocomplete_fields = ['operator']
|
||||
raw_id_fields = ['location']
|
||||
inlines = [RideInline, PhotoInline]
|
||||
|
||||
list_per_page = 50
|
||||
|
||||
# Use GeoDjango admin for PostGIS mode
|
||||
if hasattr(settings, 'DATABASES') and 'postgis' in settings.DATABASES['default'].get('ENGINE', ''):
|
||||
change_form_template = 'gis/admin/change_form.html'
|
||||
|
||||
fieldsets = (
|
||||
('Basic Information', {
|
||||
'fields': ('name', 'slug', 'description', 'park_type', 'status')
|
||||
}),
|
||||
('Location', {
|
||||
'fields': ('location', 'latitude', 'longitude')
|
||||
('Geographic Location', {
|
||||
'fields': ('location', 'latitude', 'longitude', 'coordinates_display'),
|
||||
'description': 'Enter latitude and longitude for the park location'
|
||||
}),
|
||||
('Dates', {
|
||||
'fields': (
|
||||
@@ -102,38 +407,136 @@ class ParkAdmin(admin.ModelAdmin):
|
||||
('Operator', {
|
||||
'fields': ('operator',)
|
||||
}),
|
||||
('Media', {
|
||||
('Media & Web', {
|
||||
'fields': (
|
||||
'banner_image_id', 'banner_image_url',
|
||||
'logo_image_id', 'logo_image_url',
|
||||
'website'
|
||||
)
|
||||
),
|
||||
'classes': ['collapse']
|
||||
}),
|
||||
('Statistics', {
|
||||
'fields': ('ride_count', 'coaster_count'),
|
||||
'classes': ('collapse',)
|
||||
'classes': ['collapse']
|
||||
}),
|
||||
('Custom Data', {
|
||||
'fields': ('custom_fields',),
|
||||
'classes': ('collapse',)
|
||||
'classes': ['collapse'],
|
||||
'description': 'Additional custom data in JSON format'
|
||||
}),
|
||||
('System', {
|
||||
('System Information', {
|
||||
'fields': ('id', 'created', 'modified'),
|
||||
'classes': ('collapse',)
|
||||
'classes': ['collapse']
|
||||
}),
|
||||
)
|
||||
|
||||
def name_with_icon(self, obj):
|
||||
"""Display name with park type icon."""
|
||||
icons = {
|
||||
'theme_park': '🎡',
|
||||
'amusement_park': '🎢',
|
||||
'water_park': '🌊',
|
||||
'indoor_park': '🏢',
|
||||
'fairground': '🎪',
|
||||
}
|
||||
icon = icons.get(obj.park_type, '🎠')
|
||||
return format_html('{} {}', icon, obj.name)
|
||||
name_with_icon.short_description = 'Park Name'
|
||||
name_with_icon.admin_order_field = 'name'
|
||||
|
||||
def location_display(self, obj):
|
||||
"""Display location with coordinates."""
|
||||
if obj.location:
|
||||
coords = obj.coordinates
|
||||
if coords:
|
||||
return format_html(
|
||||
'{}<br><small style="color: gray;">({:.4f}, {:.4f})</small>',
|
||||
obj.location, coords[0], coords[1]
|
||||
)
|
||||
return obj.location
|
||||
return '-'
|
||||
location_display.short_description = 'Location'
|
||||
|
||||
def coordinates_display(self, obj):
|
||||
"""Read-only display of coordinates."""
|
||||
coords = obj.coordinates
|
||||
if coords:
|
||||
return f"Longitude: {coords[0]:.6f}, Latitude: {coords[1]:.6f}"
|
||||
return "No coordinates set"
|
||||
coordinates_display.short_description = 'Current Coordinates'
|
||||
|
||||
def status_badge(self, obj):
|
||||
"""Display status as colored badge."""
|
||||
colors = {
|
||||
'operating': 'green',
|
||||
'closed_temporarily': 'orange',
|
||||
'closed_permanently': 'red',
|
||||
'under_construction': 'blue',
|
||||
'planned': 'purple',
|
||||
}
|
||||
color = colors.get(obj.status, 'gray')
|
||||
return format_html(
|
||||
'<span style="background-color: {}; color: white; '
|
||||
'padding: 3px 10px; border-radius: 12px; font-size: 11px;">'
|
||||
'{}</span>',
|
||||
color, obj.get_status_display()
|
||||
)
|
||||
status_badge.short_description = 'Status'
|
||||
status_badge.admin_order_field = 'status'
|
||||
|
||||
actions = ['export_admin_action', 'activate_parks', 'close_parks']
|
||||
|
||||
def activate_parks(self, request, queryset):
|
||||
"""Bulk action to activate parks."""
|
||||
updated = queryset.update(status='operating')
|
||||
self.message_user(request, f'{updated} park(s) marked as operating.')
|
||||
activate_parks.short_description = 'Mark selected parks as operating'
|
||||
|
||||
def close_parks(self, request, queryset):
|
||||
"""Bulk action to close parks temporarily."""
|
||||
updated = queryset.update(status='closed_temporarily')
|
||||
self.message_user(request, f'{updated} park(s) marked as temporarily closed.')
|
||||
close_parks.short_description = 'Mark selected parks as temporarily closed'
|
||||
|
||||
|
||||
@admin.register(Ride)
|
||||
class RideAdmin(admin.ModelAdmin):
|
||||
"""Admin interface for Ride model."""
|
||||
class RideAdmin(ModelAdmin, ImportExportModelAdmin):
|
||||
"""Enhanced admin interface for Ride model."""
|
||||
|
||||
list_display = ['name', 'park', 'ride_category', 'status', 'is_coaster', 'manufacturer', 'opening_date']
|
||||
list_filter = ['ride_category', 'status', 'is_coaster', 'park', 'manufacturer', 'opening_date']
|
||||
search_fields = ['name', 'slug', 'description', 'park__name', 'manufacturer__name']
|
||||
readonly_fields = ['id', 'created', 'modified', 'is_coaster']
|
||||
prepopulated_fields = {'slug': ('name',)}
|
||||
resource_class = RideResource
|
||||
import_form_class = ImportForm
|
||||
export_form_class = ExportForm
|
||||
|
||||
list_display = [
|
||||
'name_with_icon',
|
||||
'park',
|
||||
'ride_category',
|
||||
'status_badge',
|
||||
'manufacturer',
|
||||
'stats_display',
|
||||
'opening_date',
|
||||
'coaster_badge'
|
||||
]
|
||||
list_filter = [
|
||||
('ride_category', ChoicesDropdownFilter),
|
||||
('status', ChoicesDropdownFilter),
|
||||
('is_coaster', admin.BooleanFieldListFilter),
|
||||
('park', RelatedDropdownFilter),
|
||||
('manufacturer', RelatedDropdownFilter),
|
||||
('opening_date', RangeDateFilter),
|
||||
('height', RangeNumericFilter),
|
||||
('speed', RangeNumericFilter),
|
||||
]
|
||||
search_fields = [
|
||||
'name', 'slug', 'description',
|
||||
'park__name', 'manufacturer__name'
|
||||
]
|
||||
readonly_fields = ['id', 'created', 'modified', 'is_coaster', 'slug']
|
||||
prepopulated_fields = {}
|
||||
autocomplete_fields = ['park', 'manufacturer', 'model']
|
||||
inlines = [PhotoInline]
|
||||
|
||||
list_per_page = 50
|
||||
|
||||
fieldsets = (
|
||||
('Basic Information', {
|
||||
@@ -148,21 +551,156 @@ class RideAdmin(admin.ModelAdmin):
|
||||
'closing_date', 'closing_date_precision'
|
||||
)
|
||||
}),
|
||||
('Manufacturer', {
|
||||
('Manufacturer & Model', {
|
||||
'fields': ('manufacturer', 'model')
|
||||
}),
|
||||
('Statistics', {
|
||||
'fields': ('height', 'speed', 'length', 'duration', 'inversions', 'capacity')
|
||||
('Ride Statistics', {
|
||||
'fields': (
|
||||
'height', 'speed', 'length',
|
||||
'duration', 'inversions', 'capacity'
|
||||
),
|
||||
'description': 'Technical specifications and statistics'
|
||||
}),
|
||||
('Media', {
|
||||
'fields': ('image_id', 'image_url')
|
||||
'fields': ('image_id', 'image_url'),
|
||||
'classes': ['collapse']
|
||||
}),
|
||||
('Custom Data', {
|
||||
'fields': ('custom_fields',),
|
||||
'classes': ('collapse',)
|
||||
'classes': ['collapse']
|
||||
}),
|
||||
('System', {
|
||||
('System Information', {
|
||||
'fields': ('id', 'created', 'modified'),
|
||||
'classes': ('collapse',)
|
||||
'classes': ['collapse']
|
||||
}),
|
||||
)
|
||||
|
||||
def name_with_icon(self, obj):
|
||||
"""Display name with category icon."""
|
||||
icons = {
|
||||
'roller_coaster': '🎢',
|
||||
'water_ride': '🌊',
|
||||
'dark_ride': '🎭',
|
||||
'flat_ride': '🎡',
|
||||
'transport': '🚂',
|
||||
'show': '🎪',
|
||||
}
|
||||
icon = icons.get(obj.ride_category, '🎠')
|
||||
return format_html('{} {}', icon, obj.name)
|
||||
name_with_icon.short_description = 'Ride Name'
|
||||
name_with_icon.admin_order_field = 'name'
|
||||
|
||||
def stats_display(self, obj):
|
||||
"""Display key statistics."""
|
||||
stats = []
|
||||
if obj.height:
|
||||
stats.append(f'H: {obj.height}m')
|
||||
if obj.speed:
|
||||
stats.append(f'S: {obj.speed}km/h')
|
||||
if obj.inversions:
|
||||
stats.append(f'🔄 {obj.inversions}')
|
||||
return ' | '.join(stats) if stats else '-'
|
||||
stats_display.short_description = 'Key Stats'
|
||||
|
||||
def coaster_badge(self, obj):
|
||||
"""Display coaster indicator."""
|
||||
if obj.is_coaster:
|
||||
return format_html(
|
||||
'<span style="background-color: #ff6b6b; color: white; '
|
||||
'padding: 2px 8px; border-radius: 10px; font-size: 10px;">'
|
||||
'🎢 COASTER</span>'
|
||||
)
|
||||
return ''
|
||||
coaster_badge.short_description = 'Type'
|
||||
|
||||
def status_badge(self, obj):
|
||||
"""Display status as colored badge."""
|
||||
colors = {
|
||||
'operating': 'green',
|
||||
'closed_temporarily': 'orange',
|
||||
'closed_permanently': 'red',
|
||||
'under_construction': 'blue',
|
||||
'sbno': 'gray',
|
||||
}
|
||||
color = colors.get(obj.status, 'gray')
|
||||
return format_html(
|
||||
'<span style="background-color: {}; color: white; '
|
||||
'padding: 3px 10px; border-radius: 12px; font-size: 11px;">'
|
||||
'{}</span>',
|
||||
color, obj.get_status_display()
|
||||
)
|
||||
status_badge.short_description = 'Status'
|
||||
status_badge.admin_order_field = 'status'
|
||||
|
||||
actions = ['export_admin_action', 'activate_rides', 'close_rides']
|
||||
|
||||
def activate_rides(self, request, queryset):
|
||||
"""Bulk action to activate rides."""
|
||||
updated = queryset.update(status='operating')
|
||||
self.message_user(request, f'{updated} ride(s) marked as operating.')
|
||||
activate_rides.short_description = 'Mark selected rides as operating'
|
||||
|
||||
def close_rides(self, request, queryset):
|
||||
"""Bulk action to close rides temporarily."""
|
||||
updated = queryset.update(status='closed_temporarily')
|
||||
self.message_user(request, f'{updated} ride(s) marked as temporarily closed.')
|
||||
close_rides.short_description = 'Mark selected rides as temporarily closed'
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# DASHBOARD CALLBACK
|
||||
# ============================================================================
|
||||
|
||||
def dashboard_callback(request, context):
|
||||
"""
|
||||
Callback function for Unfold dashboard.
|
||||
Provides statistics and overview data.
|
||||
"""
|
||||
# Entity counts
|
||||
total_parks = Park.objects.count()
|
||||
total_rides = Ride.objects.count()
|
||||
total_companies = Company.objects.count()
|
||||
total_models = RideModel.objects.count()
|
||||
|
||||
# Operating counts
|
||||
operating_parks = Park.objects.filter(status='operating').count()
|
||||
operating_rides = Ride.objects.filter(status='operating').count()
|
||||
|
||||
# Coaster count
|
||||
total_coasters = Ride.objects.filter(is_coaster=True).count()
|
||||
|
||||
# Recent additions (last 30 days)
|
||||
from django.utils import timezone
|
||||
from datetime import timedelta
|
||||
thirty_days_ago = timezone.now() - timedelta(days=30)
|
||||
|
||||
recent_parks = Park.objects.filter(created__gte=thirty_days_ago).count()
|
||||
recent_rides = Ride.objects.filter(created__gte=thirty_days_ago).count()
|
||||
|
||||
# Top manufacturers by ride count
|
||||
top_manufacturers = Company.objects.filter(
|
||||
company_types__contains=['manufacturer']
|
||||
).annotate(
|
||||
ride_count_actual=Count('manufactured_rides')
|
||||
).order_by('-ride_count_actual')[:5]
|
||||
|
||||
# Parks by type
|
||||
parks_by_type = Park.objects.values('park_type').annotate(
|
||||
count=Count('id')
|
||||
).order_by('-count')
|
||||
|
||||
context.update({
|
||||
'total_parks': total_parks,
|
||||
'total_rides': total_rides,
|
||||
'total_companies': total_companies,
|
||||
'total_models': total_models,
|
||||
'operating_parks': operating_parks,
|
||||
'operating_rides': operating_rides,
|
||||
'total_coasters': total_coasters,
|
||||
'recent_parks': recent_parks,
|
||||
'recent_rides': recent_rides,
|
||||
'top_manufacturers': top_manufacturers,
|
||||
'parks_by_type': parks_by_type,
|
||||
})
|
||||
|
||||
return context
|
||||
|
||||
@@ -9,3 +9,7 @@ class EntitiesConfig(AppConfig):
|
||||
default_auto_field = 'django.db.models.BigAutoField'
|
||||
name = 'apps.entities'
|
||||
verbose_name = 'Entities'
|
||||
|
||||
def ready(self):
|
||||
"""Import signal handlers when app is ready."""
|
||||
import apps.entities.signals # noqa
|
||||
|
||||
418
django/apps/entities/filters.py
Normal file
418
django/apps/entities/filters.py
Normal file
@@ -0,0 +1,418 @@
|
||||
"""
|
||||
Filter classes for advanced entity filtering.
|
||||
|
||||
Provides reusable filter logic for complex queries.
|
||||
"""
|
||||
from typing import Optional, Any, Dict
|
||||
from datetime import date
|
||||
from django.db.models import QuerySet, Q
|
||||
from django.conf import settings
|
||||
|
||||
|
||||
# Check if using PostGIS for location-based filtering
|
||||
_using_postgis = 'postgis' in settings.DATABASES['default']['ENGINE']
|
||||
|
||||
if _using_postgis:
|
||||
from django.contrib.gis.geos import Point
|
||||
from django.contrib.gis.measure import D
|
||||
|
||||
|
||||
class BaseEntityFilter:
|
||||
"""Base filter class with common filtering methods."""
|
||||
|
||||
@staticmethod
|
||||
def filter_by_date_range(
|
||||
queryset: QuerySet,
|
||||
field_name: str,
|
||||
start_date: Optional[date] = None,
|
||||
end_date: Optional[date] = None
|
||||
) -> QuerySet:
|
||||
"""
|
||||
Filter by date range.
|
||||
|
||||
Args:
|
||||
queryset: Base queryset to filter
|
||||
field_name: Name of the date field
|
||||
start_date: Start of date range (inclusive)
|
||||
end_date: End of date range (inclusive)
|
||||
|
||||
Returns:
|
||||
Filtered queryset
|
||||
"""
|
||||
if start_date:
|
||||
queryset = queryset.filter(**{f"{field_name}__gte": start_date})
|
||||
|
||||
if end_date:
|
||||
queryset = queryset.filter(**{f"{field_name}__lte": end_date})
|
||||
|
||||
return queryset
|
||||
|
||||
@staticmethod
|
||||
def filter_by_status(
|
||||
queryset: QuerySet,
|
||||
status: Optional[str] = None,
|
||||
exclude_status: Optional[list] = None
|
||||
) -> QuerySet:
|
||||
"""
|
||||
Filter by status.
|
||||
|
||||
Args:
|
||||
queryset: Base queryset to filter
|
||||
status: Single status to filter by
|
||||
exclude_status: List of statuses to exclude
|
||||
|
||||
Returns:
|
||||
Filtered queryset
|
||||
"""
|
||||
if status:
|
||||
queryset = queryset.filter(status=status)
|
||||
|
||||
if exclude_status:
|
||||
queryset = queryset.exclude(status__in=exclude_status)
|
||||
|
||||
return queryset
|
||||
|
||||
|
||||
class CompanyFilter(BaseEntityFilter):
|
||||
"""Filter class for Company entities."""
|
||||
|
||||
@staticmethod
|
||||
def filter_by_types(
|
||||
queryset: QuerySet,
|
||||
company_types: Optional[list] = None
|
||||
) -> QuerySet:
|
||||
"""
|
||||
Filter companies by type.
|
||||
|
||||
Args:
|
||||
queryset: Base queryset to filter
|
||||
company_types: List of company types to filter by
|
||||
|
||||
Returns:
|
||||
Filtered queryset
|
||||
"""
|
||||
if company_types:
|
||||
# Since company_types is a JSONField containing a list,
|
||||
# we need to check if any of the requested types are in the field
|
||||
q = Q()
|
||||
for company_type in company_types:
|
||||
q |= Q(company_types__contains=[company_type])
|
||||
queryset = queryset.filter(q)
|
||||
|
||||
return queryset
|
||||
|
||||
@staticmethod
|
||||
def apply_filters(
|
||||
queryset: QuerySet,
|
||||
filters: Dict[str, Any]
|
||||
) -> QuerySet:
|
||||
"""
|
||||
Apply all company filters.
|
||||
|
||||
Args:
|
||||
queryset: Base queryset to filter
|
||||
filters: Dictionary of filter parameters
|
||||
|
||||
Returns:
|
||||
Filtered queryset
|
||||
"""
|
||||
# Company types
|
||||
if filters.get('company_types'):
|
||||
queryset = CompanyFilter.filter_by_types(
|
||||
queryset,
|
||||
company_types=filters['company_types']
|
||||
)
|
||||
|
||||
# Founded date range
|
||||
queryset = CompanyFilter.filter_by_date_range(
|
||||
queryset,
|
||||
'founded_date',
|
||||
start_date=filters.get('founded_after'),
|
||||
end_date=filters.get('founded_before')
|
||||
)
|
||||
|
||||
# Closed date range
|
||||
queryset = CompanyFilter.filter_by_date_range(
|
||||
queryset,
|
||||
'closed_date',
|
||||
start_date=filters.get('closed_after'),
|
||||
end_date=filters.get('closed_before')
|
||||
)
|
||||
|
||||
# Location
|
||||
if filters.get('location_id'):
|
||||
queryset = queryset.filter(location_id=filters['location_id'])
|
||||
|
||||
return queryset
|
||||
|
||||
|
||||
class RideModelFilter(BaseEntityFilter):
|
||||
"""Filter class for RideModel entities."""
|
||||
|
||||
@staticmethod
|
||||
def apply_filters(
|
||||
queryset: QuerySet,
|
||||
filters: Dict[str, Any]
|
||||
) -> QuerySet:
|
||||
"""
|
||||
Apply all ride model filters.
|
||||
|
||||
Args:
|
||||
queryset: Base queryset to filter
|
||||
filters: Dictionary of filter parameters
|
||||
|
||||
Returns:
|
||||
Filtered queryset
|
||||
"""
|
||||
# Manufacturer
|
||||
if filters.get('manufacturer_id'):
|
||||
queryset = queryset.filter(manufacturer_id=filters['manufacturer_id'])
|
||||
|
||||
# Model type
|
||||
if filters.get('model_type'):
|
||||
queryset = queryset.filter(model_type=filters['model_type'])
|
||||
|
||||
# Height range
|
||||
if filters.get('min_height'):
|
||||
queryset = queryset.filter(typical_height__gte=filters['min_height'])
|
||||
|
||||
if filters.get('max_height'):
|
||||
queryset = queryset.filter(typical_height__lte=filters['max_height'])
|
||||
|
||||
# Speed range
|
||||
if filters.get('min_speed'):
|
||||
queryset = queryset.filter(typical_speed__gte=filters['min_speed'])
|
||||
|
||||
if filters.get('max_speed'):
|
||||
queryset = queryset.filter(typical_speed__lte=filters['max_speed'])
|
||||
|
||||
return queryset
|
||||
|
||||
|
||||
class ParkFilter(BaseEntityFilter):
|
||||
"""Filter class for Park entities."""
|
||||
|
||||
@staticmethod
|
||||
def filter_by_location(
|
||||
queryset: QuerySet,
|
||||
longitude: float,
|
||||
latitude: float,
|
||||
radius_km: float
|
||||
) -> QuerySet:
|
||||
"""
|
||||
Filter parks by proximity to a location (PostGIS only).
|
||||
|
||||
Args:
|
||||
queryset: Base queryset to filter
|
||||
longitude: Longitude coordinate
|
||||
latitude: Latitude coordinate
|
||||
radius_km: Search radius in kilometers
|
||||
|
||||
Returns:
|
||||
Filtered queryset ordered by distance
|
||||
"""
|
||||
if not _using_postgis:
|
||||
# Fallback: No spatial filtering in SQLite
|
||||
return queryset
|
||||
|
||||
point = Point(longitude, latitude, srid=4326)
|
||||
|
||||
# Filter by distance and annotate with distance
|
||||
queryset = queryset.filter(
|
||||
location_point__distance_lte=(point, D(km=radius_km))
|
||||
)
|
||||
|
||||
# This will be ordered by distance in the search service
|
||||
return queryset
|
||||
|
||||
@staticmethod
|
||||
def apply_filters(
|
||||
queryset: QuerySet,
|
||||
filters: Dict[str, Any]
|
||||
) -> QuerySet:
|
||||
"""
|
||||
Apply all park filters.
|
||||
|
||||
Args:
|
||||
queryset: Base queryset to filter
|
||||
filters: Dictionary of filter parameters
|
||||
|
||||
Returns:
|
||||
Filtered queryset
|
||||
"""
|
||||
# Status
|
||||
queryset = ParkFilter.filter_by_status(
|
||||
queryset,
|
||||
status=filters.get('status'),
|
||||
exclude_status=filters.get('exclude_status')
|
||||
)
|
||||
|
||||
# Park type
|
||||
if filters.get('park_type'):
|
||||
queryset = queryset.filter(park_type=filters['park_type'])
|
||||
|
||||
# Operator
|
||||
if filters.get('operator_id'):
|
||||
queryset = queryset.filter(operator_id=filters['operator_id'])
|
||||
|
||||
# Opening date range
|
||||
queryset = ParkFilter.filter_by_date_range(
|
||||
queryset,
|
||||
'opening_date',
|
||||
start_date=filters.get('opening_after'),
|
||||
end_date=filters.get('opening_before')
|
||||
)
|
||||
|
||||
# Closing date range
|
||||
queryset = ParkFilter.filter_by_date_range(
|
||||
queryset,
|
||||
'closing_date',
|
||||
start_date=filters.get('closing_after'),
|
||||
end_date=filters.get('closing_before')
|
||||
)
|
||||
|
||||
# Location-based filtering (PostGIS only)
|
||||
if _using_postgis and filters.get('location') and filters.get('radius'):
|
||||
longitude, latitude = filters['location']
|
||||
queryset = ParkFilter.filter_by_location(
|
||||
queryset,
|
||||
longitude=longitude,
|
||||
latitude=latitude,
|
||||
radius_km=filters['radius']
|
||||
)
|
||||
|
||||
# Location (locality)
|
||||
if filters.get('location_id'):
|
||||
queryset = queryset.filter(location_id=filters['location_id'])
|
||||
|
||||
# Ride counts
|
||||
if filters.get('min_ride_count'):
|
||||
queryset = queryset.filter(ride_count__gte=filters['min_ride_count'])
|
||||
|
||||
if filters.get('min_coaster_count'):
|
||||
queryset = queryset.filter(coaster_count__gte=filters['min_coaster_count'])
|
||||
|
||||
return queryset
|
||||
|
||||
|
||||
class RideFilter(BaseEntityFilter):
|
||||
"""Filter class for Ride entities."""
|
||||
|
||||
@staticmethod
|
||||
def filter_by_statistics(
|
||||
queryset: QuerySet,
|
||||
filters: Dict[str, Any]
|
||||
) -> QuerySet:
|
||||
"""
|
||||
Filter rides by statistical attributes (height, speed, length, etc.).
|
||||
|
||||
Args:
|
||||
queryset: Base queryset to filter
|
||||
filters: Dictionary of filter parameters
|
||||
|
||||
Returns:
|
||||
Filtered queryset
|
||||
"""
|
||||
# Height range
|
||||
if filters.get('min_height'):
|
||||
queryset = queryset.filter(height__gte=filters['min_height'])
|
||||
|
||||
if filters.get('max_height'):
|
||||
queryset = queryset.filter(height__lte=filters['max_height'])
|
||||
|
||||
# Speed range
|
||||
if filters.get('min_speed'):
|
||||
queryset = queryset.filter(speed__gte=filters['min_speed'])
|
||||
|
||||
if filters.get('max_speed'):
|
||||
queryset = queryset.filter(speed__lte=filters['max_speed'])
|
||||
|
||||
# Length range
|
||||
if filters.get('min_length'):
|
||||
queryset = queryset.filter(length__gte=filters['min_length'])
|
||||
|
||||
if filters.get('max_length'):
|
||||
queryset = queryset.filter(length__lte=filters['max_length'])
|
||||
|
||||
# Duration range
|
||||
if filters.get('min_duration'):
|
||||
queryset = queryset.filter(duration__gte=filters['min_duration'])
|
||||
|
||||
if filters.get('max_duration'):
|
||||
queryset = queryset.filter(duration__lte=filters['max_duration'])
|
||||
|
||||
# Inversions
|
||||
if filters.get('min_inversions') is not None:
|
||||
queryset = queryset.filter(inversions__gte=filters['min_inversions'])
|
||||
|
||||
if filters.get('max_inversions') is not None:
|
||||
queryset = queryset.filter(inversions__lte=filters['max_inversions'])
|
||||
|
||||
return queryset
|
||||
|
||||
@staticmethod
|
||||
def apply_filters(
|
||||
queryset: QuerySet,
|
||||
filters: Dict[str, Any]
|
||||
) -> QuerySet:
|
||||
"""
|
||||
Apply all ride filters.
|
||||
|
||||
Args:
|
||||
queryset: Base queryset to filter
|
||||
filters: Dictionary of filter parameters
|
||||
|
||||
Returns:
|
||||
Filtered queryset
|
||||
"""
|
||||
# Park
|
||||
if filters.get('park_id'):
|
||||
queryset = queryset.filter(park_id=filters['park_id'])
|
||||
|
||||
# Manufacturer
|
||||
if filters.get('manufacturer_id'):
|
||||
queryset = queryset.filter(manufacturer_id=filters['manufacturer_id'])
|
||||
|
||||
# Model
|
||||
if filters.get('model_id'):
|
||||
queryset = queryset.filter(model_id=filters['model_id'])
|
||||
|
||||
# Status
|
||||
queryset = RideFilter.filter_by_status(
|
||||
queryset,
|
||||
status=filters.get('status'),
|
||||
exclude_status=filters.get('exclude_status')
|
||||
)
|
||||
|
||||
# Ride category
|
||||
if filters.get('ride_category'):
|
||||
queryset = queryset.filter(ride_category=filters['ride_category'])
|
||||
|
||||
# Ride type
|
||||
if filters.get('ride_type'):
|
||||
queryset = queryset.filter(ride_type__icontains=filters['ride_type'])
|
||||
|
||||
# Is coaster
|
||||
if filters.get('is_coaster') is not None:
|
||||
queryset = queryset.filter(is_coaster=filters['is_coaster'])
|
||||
|
||||
# Opening date range
|
||||
queryset = RideFilter.filter_by_date_range(
|
||||
queryset,
|
||||
'opening_date',
|
||||
start_date=filters.get('opening_after'),
|
||||
end_date=filters.get('opening_before')
|
||||
)
|
||||
|
||||
# Closing date range
|
||||
queryset = RideFilter.filter_by_date_range(
|
||||
queryset,
|
||||
'closing_date',
|
||||
start_date=filters.get('closing_after'),
|
||||
end_date=filters.get('closing_before')
|
||||
)
|
||||
|
||||
# Statistical filters
|
||||
queryset = RideFilter.filter_by_statistics(queryset, filters)
|
||||
|
||||
return queryset
|
||||
@@ -0,0 +1,35 @@
|
||||
# Generated by Django 4.2.8 on 2025-11-08 17:03
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("entities", "0001_initial"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="park",
|
||||
name="latitude",
|
||||
field=models.DecimalField(
|
||||
blank=True,
|
||||
decimal_places=7,
|
||||
help_text="Latitude coordinate. Primary in local dev, use location_point in production.",
|
||||
max_digits=10,
|
||||
null=True,
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="park",
|
||||
name="longitude",
|
||||
field=models.DecimalField(
|
||||
blank=True,
|
||||
decimal_places=7,
|
||||
help_text="Longitude coordinate. Primary in local dev, use location_point in production.",
|
||||
max_digits=10,
|
||||
null=True,
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,141 @@
|
||||
# Generated migration for Phase 2 - GIN Index Optimization
|
||||
from django.db import migrations, connection
|
||||
from django.contrib.postgres.indexes import GinIndex
|
||||
from django.contrib.postgres.search import SearchVector
|
||||
|
||||
|
||||
def is_postgresql():
|
||||
"""Check if the database backend is PostgreSQL/PostGIS."""
|
||||
return 'postgis' in connection.vendor or 'postgresql' in connection.vendor
|
||||
|
||||
|
||||
def populate_search_vectors(apps, schema_editor):
|
||||
"""Populate search_vector fields for all existing records."""
|
||||
if not is_postgresql():
|
||||
return
|
||||
|
||||
# Get models
|
||||
Company = apps.get_model('entities', 'Company')
|
||||
RideModel = apps.get_model('entities', 'RideModel')
|
||||
Park = apps.get_model('entities', 'Park')
|
||||
Ride = apps.get_model('entities', 'Ride')
|
||||
|
||||
# Update Company search vectors
|
||||
Company.objects.update(
|
||||
search_vector=(
|
||||
SearchVector('name', weight='A') +
|
||||
SearchVector('description', weight='B')
|
||||
)
|
||||
)
|
||||
|
||||
# Update RideModel search vectors
|
||||
RideModel.objects.update(
|
||||
search_vector=(
|
||||
SearchVector('name', weight='A') +
|
||||
SearchVector('manufacturer__name', weight='A') +
|
||||
SearchVector('description', weight='B')
|
||||
)
|
||||
)
|
||||
|
||||
# Update Park search vectors
|
||||
Park.objects.update(
|
||||
search_vector=(
|
||||
SearchVector('name', weight='A') +
|
||||
SearchVector('description', weight='B')
|
||||
)
|
||||
)
|
||||
|
||||
# Update Ride search vectors
|
||||
Ride.objects.update(
|
||||
search_vector=(
|
||||
SearchVector('name', weight='A') +
|
||||
SearchVector('park__name', weight='A') +
|
||||
SearchVector('manufacturer__name', weight='B') +
|
||||
SearchVector('description', weight='B')
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def reverse_search_vectors(apps, schema_editor):
|
||||
"""Clear search_vector fields for all records."""
|
||||
if not is_postgresql():
|
||||
return
|
||||
|
||||
# Get models
|
||||
Company = apps.get_model('entities', 'Company')
|
||||
RideModel = apps.get_model('entities', 'RideModel')
|
||||
Park = apps.get_model('entities', 'Park')
|
||||
Ride = apps.get_model('entities', 'Ride')
|
||||
|
||||
# Clear all search vectors
|
||||
Company.objects.update(search_vector=None)
|
||||
RideModel.objects.update(search_vector=None)
|
||||
Park.objects.update(search_vector=None)
|
||||
Ride.objects.update(search_vector=None)
|
||||
|
||||
|
||||
def add_gin_indexes(apps, schema_editor):
|
||||
"""Add GIN indexes on search_vector fields (PostgreSQL only)."""
|
||||
if not is_postgresql():
|
||||
return
|
||||
|
||||
# Use raw SQL to add GIN indexes
|
||||
with schema_editor.connection.cursor() as cursor:
|
||||
cursor.execute("""
|
||||
CREATE INDEX IF NOT EXISTS entities_company_search_idx
|
||||
ON entities_company USING gin(search_vector);
|
||||
""")
|
||||
cursor.execute("""
|
||||
CREATE INDEX IF NOT EXISTS entities_ridemodel_search_idx
|
||||
ON entities_ridemodel USING gin(search_vector);
|
||||
""")
|
||||
cursor.execute("""
|
||||
CREATE INDEX IF NOT EXISTS entities_park_search_idx
|
||||
ON entities_park USING gin(search_vector);
|
||||
""")
|
||||
cursor.execute("""
|
||||
CREATE INDEX IF NOT EXISTS entities_ride_search_idx
|
||||
ON entities_ride USING gin(search_vector);
|
||||
""")
|
||||
|
||||
|
||||
def remove_gin_indexes(apps, schema_editor):
|
||||
"""Remove GIN indexes (PostgreSQL only)."""
|
||||
if not is_postgresql():
|
||||
return
|
||||
|
||||
# Use raw SQL to drop GIN indexes
|
||||
with schema_editor.connection.cursor() as cursor:
|
||||
cursor.execute("DROP INDEX IF EXISTS entities_company_search_idx;")
|
||||
cursor.execute("DROP INDEX IF EXISTS entities_ridemodel_search_idx;")
|
||||
cursor.execute("DROP INDEX IF EXISTS entities_park_search_idx;")
|
||||
cursor.execute("DROP INDEX IF EXISTS entities_ride_search_idx;")
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
"""
|
||||
Phase 2 Migration: Add GIN indexes for search optimization.
|
||||
|
||||
This migration:
|
||||
1. Adds GIN indexes on search_vector fields for optimal full-text search
|
||||
2. Populates search vectors for all existing database records
|
||||
3. Is PostgreSQL-specific and safe for SQLite environments
|
||||
"""
|
||||
|
||||
dependencies = [
|
||||
('entities', '0002_alter_park_latitude_alter_park_longitude'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
# First, populate search vectors for existing records
|
||||
migrations.RunPython(
|
||||
populate_search_vectors,
|
||||
reverse_search_vectors,
|
||||
),
|
||||
|
||||
# Add GIN indexes for each model's search_vector field
|
||||
migrations.RunPython(
|
||||
add_gin_indexes,
|
||||
remove_gin_indexes,
|
||||
),
|
||||
]
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -8,11 +8,24 @@ This module contains the core entity models:
|
||||
- Ride: Individual rides and roller coasters
|
||||
"""
|
||||
from django.db import models
|
||||
from django.conf import settings
|
||||
from django.contrib.contenttypes.fields import GenericRelation
|
||||
from django.utils.text import slugify
|
||||
from django_lifecycle import hook, AFTER_CREATE, AFTER_UPDATE, BEFORE_SAVE
|
||||
|
||||
from apps.core.models import VersionedModel, BaseModel
|
||||
|
||||
# Conditionally import GIS models only if using PostGIS backend
|
||||
# This allows migrations to run on SQLite during local development
|
||||
_using_postgis = (
|
||||
'postgis' in settings.DATABASES['default']['ENGINE']
|
||||
)
|
||||
|
||||
if _using_postgis:
|
||||
from django.contrib.gis.db import models as gis_models
|
||||
from django.contrib.gis.geos import Point
|
||||
from django.contrib.postgres.search import SearchVectorField
|
||||
|
||||
|
||||
class Company(VersionedModel):
|
||||
"""
|
||||
@@ -122,6 +135,16 @@ class Company(VersionedModel):
|
||||
help_text="Number of rides manufactured (for manufacturers)"
|
||||
)
|
||||
|
||||
# Generic relation to photos
|
||||
photos = GenericRelation(
|
||||
'media.Photo',
|
||||
related_query_name='company'
|
||||
)
|
||||
|
||||
# Full-text search vector (PostgreSQL only)
|
||||
# Populated automatically via signals or database triggers
|
||||
# Includes: name (weight A) + description (weight B)
|
||||
|
||||
class Meta:
|
||||
verbose_name = 'Company'
|
||||
verbose_name_plural = 'Companies'
|
||||
@@ -151,6 +174,24 @@ class Company(VersionedModel):
|
||||
self.park_count = self.operated_parks.count()
|
||||
self.ride_count = self.manufactured_rides.count()
|
||||
self.save(update_fields=['park_count', 'ride_count'])
|
||||
|
||||
def get_photos(self, photo_type=None, approved_only=True):
|
||||
"""Get photos for this company."""
|
||||
from apps.media.services import PhotoService
|
||||
service = PhotoService()
|
||||
return service.get_entity_photos(self, photo_type=photo_type, approved_only=approved_only)
|
||||
|
||||
@property
|
||||
def main_photo(self):
|
||||
"""Get the main photo."""
|
||||
photos = self.photos.filter(photo_type='main', moderation_status='approved').first()
|
||||
return photos
|
||||
|
||||
@property
|
||||
def logo_photo(self):
|
||||
"""Get the logo photo."""
|
||||
photos = self.photos.filter(photo_type='logo', moderation_status='approved').first()
|
||||
return photos
|
||||
|
||||
|
||||
class RideModel(VersionedModel):
|
||||
@@ -238,6 +279,12 @@ class RideModel(VersionedModel):
|
||||
help_text="Number of installations worldwide"
|
||||
)
|
||||
|
||||
# Generic relation to photos
|
||||
photos = GenericRelation(
|
||||
'media.Photo',
|
||||
related_query_name='ride_model'
|
||||
)
|
||||
|
||||
class Meta:
|
||||
verbose_name = 'Ride Model'
|
||||
verbose_name_plural = 'Ride Models'
|
||||
@@ -267,11 +314,27 @@ class RideModel(VersionedModel):
|
||||
"""Update cached installation count."""
|
||||
self.installation_count = self.rides.count()
|
||||
self.save(update_fields=['installation_count'])
|
||||
|
||||
def get_photos(self, photo_type=None, approved_only=True):
|
||||
"""Get photos for this ride model."""
|
||||
from apps.media.services import PhotoService
|
||||
service = PhotoService()
|
||||
return service.get_entity_photos(self, photo_type=photo_type, approved_only=approved_only)
|
||||
|
||||
@property
|
||||
def main_photo(self):
|
||||
"""Get the main photo."""
|
||||
photos = self.photos.filter(photo_type='main', moderation_status='approved').first()
|
||||
return photos
|
||||
|
||||
|
||||
class Park(VersionedModel):
|
||||
"""
|
||||
Represents an amusement park, theme park, water park, or FEC.
|
||||
|
||||
Note: Geographic coordinates are stored differently based on database backend:
|
||||
- Production (PostGIS): Uses location_point PointField with full GIS capabilities
|
||||
- Local Dev (SQLite): Uses latitude/longitude DecimalFields (no spatial queries)
|
||||
"""
|
||||
|
||||
PARK_TYPE_CHOICES = [
|
||||
@@ -369,21 +432,24 @@ class Park(VersionedModel):
|
||||
)
|
||||
|
||||
# Precise coordinates for mapping
|
||||
# Primary in local dev (SQLite), deprecated in production (PostGIS)
|
||||
latitude = models.DecimalField(
|
||||
max_digits=10,
|
||||
decimal_places=7,
|
||||
null=True,
|
||||
blank=True,
|
||||
help_text="Latitude coordinate"
|
||||
help_text="Latitude coordinate. Primary in local dev, use location_point in production."
|
||||
)
|
||||
longitude = models.DecimalField(
|
||||
max_digits=10,
|
||||
decimal_places=7,
|
||||
null=True,
|
||||
blank=True,
|
||||
help_text="Longitude coordinate"
|
||||
help_text="Longitude coordinate. Primary in local dev, use location_point in production."
|
||||
)
|
||||
|
||||
# NOTE: location_point PointField is added conditionally below if using PostGIS
|
||||
|
||||
# Relationships
|
||||
operator = models.ForeignKey(
|
||||
'Company',
|
||||
@@ -437,6 +503,12 @@ class Park(VersionedModel):
|
||||
help_text="Additional park-specific data"
|
||||
)
|
||||
|
||||
# Generic relation to photos
|
||||
photos = GenericRelation(
|
||||
'media.Photo',
|
||||
related_query_name='park'
|
||||
)
|
||||
|
||||
class Meta:
|
||||
verbose_name = 'Park'
|
||||
verbose_name_plural = 'Parks'
|
||||
@@ -470,6 +542,100 @@ class Park(VersionedModel):
|
||||
self.ride_count = self.rides.count()
|
||||
self.coaster_count = self.rides.filter(is_coaster=True).count()
|
||||
self.save(update_fields=['ride_count', 'coaster_count'])
|
||||
|
||||
def set_location(self, longitude, latitude):
|
||||
"""
|
||||
Set park location from coordinates.
|
||||
|
||||
Args:
|
||||
longitude: Longitude coordinate (X)
|
||||
latitude: Latitude coordinate (Y)
|
||||
|
||||
Note: Works in both PostGIS and non-PostGIS modes.
|
||||
- PostGIS: Sets location_point and syncs to lat/lng
|
||||
- SQLite: Sets lat/lng directly
|
||||
"""
|
||||
if longitude is not None and latitude is not None:
|
||||
# Always update lat/lng fields
|
||||
self.longitude = longitude
|
||||
self.latitude = latitude
|
||||
|
||||
# If using PostGIS, also update location_point
|
||||
if _using_postgis and hasattr(self, 'location_point'):
|
||||
self.location_point = Point(float(longitude), float(latitude), srid=4326)
|
||||
|
||||
@property
|
||||
def coordinates(self):
|
||||
"""
|
||||
Get coordinates as (longitude, latitude) tuple.
|
||||
|
||||
Returns:
|
||||
tuple: (longitude, latitude) or None if no location set
|
||||
"""
|
||||
# Try PostGIS field first if available
|
||||
if _using_postgis and hasattr(self, 'location_point') and self.location_point:
|
||||
return (self.location_point.x, self.location_point.y)
|
||||
# Fall back to lat/lng fields
|
||||
elif self.longitude and self.latitude:
|
||||
return (float(self.longitude), float(self.latitude))
|
||||
return None
|
||||
|
||||
@property
|
||||
def latitude_value(self):
|
||||
"""Get latitude value (from location_point if PostGIS, else from latitude field)."""
|
||||
if _using_postgis and hasattr(self, 'location_point') and self.location_point:
|
||||
return self.location_point.y
|
||||
return float(self.latitude) if self.latitude else None
|
||||
|
||||
@property
|
||||
def longitude_value(self):
|
||||
"""Get longitude value (from location_point if PostGIS, else from longitude field)."""
|
||||
if _using_postgis and hasattr(self, 'location_point') and self.location_point:
|
||||
return self.location_point.x
|
||||
return float(self.longitude) if self.longitude else None
|
||||
|
||||
def get_photos(self, photo_type=None, approved_only=True):
|
||||
"""Get photos for this park."""
|
||||
from apps.media.services import PhotoService
|
||||
service = PhotoService()
|
||||
return service.get_entity_photos(self, photo_type=photo_type, approved_only=approved_only)
|
||||
|
||||
@property
|
||||
def main_photo(self):
|
||||
"""Get the main photo."""
|
||||
photos = self.photos.filter(photo_type='main', moderation_status='approved').first()
|
||||
return photos
|
||||
|
||||
@property
|
||||
def banner_photo(self):
|
||||
"""Get the banner photo."""
|
||||
photos = self.photos.filter(photo_type='banner', moderation_status='approved').first()
|
||||
return photos
|
||||
|
||||
@property
|
||||
def logo_photo(self):
|
||||
"""Get the logo photo."""
|
||||
photos = self.photos.filter(photo_type='logo', moderation_status='approved').first()
|
||||
return photos
|
||||
|
||||
@property
|
||||
def gallery_photos(self):
|
||||
"""Get gallery photos."""
|
||||
return self.photos.filter(photo_type='gallery', moderation_status='approved').order_by('display_order')
|
||||
|
||||
|
||||
# Conditionally add PostGIS PointField to Park model if using PostGIS backend
|
||||
if _using_postgis:
|
||||
Park.add_to_class(
|
||||
'location_point',
|
||||
gis_models.PointField(
|
||||
geography=True,
|
||||
null=True,
|
||||
blank=True,
|
||||
srid=4326,
|
||||
help_text="Geographic coordinates (PostGIS Point). Production only."
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
class Ride(VersionedModel):
|
||||
@@ -659,6 +825,12 @@ class Ride(VersionedModel):
|
||||
help_text="Additional ride-specific data"
|
||||
)
|
||||
|
||||
# Generic relation to photos
|
||||
photos = GenericRelation(
|
||||
'media.Photo',
|
||||
related_query_name='ride'
|
||||
)
|
||||
|
||||
class Meta:
|
||||
verbose_name = 'Ride'
|
||||
verbose_name_plural = 'Rides'
|
||||
@@ -699,3 +871,60 @@ class Ride(VersionedModel):
|
||||
"""Update parent park's ride counts when ride is created or moved."""
|
||||
if self.park:
|
||||
self.park.update_counts()
|
||||
|
||||
def get_photos(self, photo_type=None, approved_only=True):
|
||||
"""Get photos for this ride."""
|
||||
from apps.media.services import PhotoService
|
||||
service = PhotoService()
|
||||
return service.get_entity_photos(self, photo_type=photo_type, approved_only=approved_only)
|
||||
|
||||
@property
|
||||
def main_photo(self):
|
||||
"""Get the main photo."""
|
||||
photos = self.photos.filter(photo_type='main', moderation_status='approved').first()
|
||||
return photos
|
||||
|
||||
@property
|
||||
def gallery_photos(self):
|
||||
"""Get gallery photos."""
|
||||
return self.photos.filter(photo_type='gallery', moderation_status='approved').order_by('display_order')
|
||||
|
||||
|
||||
# Add SearchVectorField to all models for full-text search (PostgreSQL only)
|
||||
# Must be at the very end after ALL class definitions
|
||||
if _using_postgis:
|
||||
Company.add_to_class(
|
||||
'search_vector',
|
||||
SearchVectorField(
|
||||
null=True,
|
||||
blank=True,
|
||||
help_text="Pre-computed search vector for full-text search. Auto-updated via signals."
|
||||
)
|
||||
)
|
||||
|
||||
RideModel.add_to_class(
|
||||
'search_vector',
|
||||
SearchVectorField(
|
||||
null=True,
|
||||
blank=True,
|
||||
help_text="Pre-computed search vector for full-text search. Auto-updated via signals."
|
||||
)
|
||||
)
|
||||
|
||||
Park.add_to_class(
|
||||
'search_vector',
|
||||
SearchVectorField(
|
||||
null=True,
|
||||
blank=True,
|
||||
help_text="Pre-computed search vector for full-text search. Auto-updated via signals."
|
||||
)
|
||||
)
|
||||
|
||||
Ride.add_to_class(
|
||||
'search_vector',
|
||||
SearchVectorField(
|
||||
null=True,
|
||||
blank=True,
|
||||
help_text="Pre-computed search vector for full-text search. Auto-updated via signals."
|
||||
)
|
||||
)
|
||||
|
||||
386
django/apps/entities/search.py
Normal file
386
django/apps/entities/search.py
Normal file
@@ -0,0 +1,386 @@
|
||||
"""
|
||||
Search service for ThrillWiki entities.
|
||||
|
||||
Provides full-text search capabilities with PostgreSQL and fallback for SQLite.
|
||||
- PostgreSQL: Uses SearchVector, SearchQuery, SearchRank for full-text search
|
||||
- SQLite: Falls back to case-insensitive LIKE queries
|
||||
"""
|
||||
from typing import List, Optional, Dict, Any
|
||||
from django.db.models import Q, QuerySet, Value, CharField, F
|
||||
from django.db.models.functions import Concat
|
||||
from django.conf import settings
|
||||
|
||||
# Conditionally import PostgreSQL search features
|
||||
_using_postgis = 'postgis' in settings.DATABASES['default']['ENGINE']
|
||||
|
||||
if _using_postgis:
|
||||
from django.contrib.postgres.search import SearchVector, SearchQuery, SearchRank, TrigramSimilarity
|
||||
from django.contrib.postgres.aggregates import StringAgg
|
||||
|
||||
|
||||
class SearchService:
|
||||
"""Service for searching across all entity types."""
|
||||
|
||||
def __init__(self):
|
||||
self.using_postgres = _using_postgis
|
||||
|
||||
def search_all(
|
||||
self,
|
||||
query: str,
|
||||
entity_types: Optional[List[str]] = None,
|
||||
limit: int = 20
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Search across all entity types.
|
||||
|
||||
Args:
|
||||
query: Search query string
|
||||
entity_types: Optional list to filter by entity types
|
||||
limit: Maximum results per entity type
|
||||
|
||||
Returns:
|
||||
Dictionary with results grouped by entity type
|
||||
"""
|
||||
results = {}
|
||||
|
||||
# Default to all entity types if not specified
|
||||
if not entity_types:
|
||||
entity_types = ['company', 'ride_model', 'park', 'ride']
|
||||
|
||||
if 'company' in entity_types:
|
||||
results['companies'] = list(self.search_companies(query, limit=limit))
|
||||
|
||||
if 'ride_model' in entity_types:
|
||||
results['ride_models'] = list(self.search_ride_models(query, limit=limit))
|
||||
|
||||
if 'park' in entity_types:
|
||||
results['parks'] = list(self.search_parks(query, limit=limit))
|
||||
|
||||
if 'ride' in entity_types:
|
||||
results['rides'] = list(self.search_rides(query, limit=limit))
|
||||
|
||||
return results
|
||||
|
||||
def search_companies(
|
||||
self,
|
||||
query: str,
|
||||
filters: Optional[Dict[str, Any]] = None,
|
||||
limit: int = 20
|
||||
) -> QuerySet:
|
||||
"""
|
||||
Search companies with full-text search.
|
||||
|
||||
Args:
|
||||
query: Search query string
|
||||
filters: Optional filters (company_types, founded_after, etc.)
|
||||
limit: Maximum number of results
|
||||
|
||||
Returns:
|
||||
QuerySet of Company objects
|
||||
"""
|
||||
from apps.entities.models import Company
|
||||
|
||||
if self.using_postgres:
|
||||
# PostgreSQL full-text search using pre-computed search_vector
|
||||
search_query = SearchQuery(query, search_type='websearch')
|
||||
|
||||
results = Company.objects.annotate(
|
||||
rank=SearchRank(F('search_vector'), search_query)
|
||||
).filter(search_vector=search_query).order_by('-rank')
|
||||
else:
|
||||
# SQLite fallback using LIKE
|
||||
results = Company.objects.filter(
|
||||
Q(name__icontains=query) | Q(description__icontains=query)
|
||||
).order_by('name')
|
||||
|
||||
# Apply additional filters
|
||||
if filters:
|
||||
if filters.get('company_types'):
|
||||
# Filter by company types (stored in JSONField)
|
||||
results = results.filter(
|
||||
company_types__contains=filters['company_types']
|
||||
)
|
||||
|
||||
if filters.get('founded_after'):
|
||||
results = results.filter(founded_date__gte=filters['founded_after'])
|
||||
|
||||
if filters.get('founded_before'):
|
||||
results = results.filter(founded_date__lte=filters['founded_before'])
|
||||
|
||||
return results[:limit]
|
||||
|
||||
def search_ride_models(
|
||||
self,
|
||||
query: str,
|
||||
filters: Optional[Dict[str, Any]] = None,
|
||||
limit: int = 20
|
||||
) -> QuerySet:
|
||||
"""
|
||||
Search ride models with full-text search.
|
||||
|
||||
Args:
|
||||
query: Search query string
|
||||
filters: Optional filters (manufacturer_id, model_type, etc.)
|
||||
limit: Maximum number of results
|
||||
|
||||
Returns:
|
||||
QuerySet of RideModel objects
|
||||
"""
|
||||
from apps.entities.models import RideModel
|
||||
|
||||
if self.using_postgres:
|
||||
# PostgreSQL full-text search using pre-computed search_vector
|
||||
search_query = SearchQuery(query, search_type='websearch')
|
||||
|
||||
results = RideModel.objects.select_related('manufacturer').annotate(
|
||||
rank=SearchRank(F('search_vector'), search_query)
|
||||
).filter(search_vector=search_query).order_by('-rank')
|
||||
else:
|
||||
# SQLite fallback using LIKE
|
||||
results = RideModel.objects.select_related('manufacturer').filter(
|
||||
Q(name__icontains=query) |
|
||||
Q(manufacturer__name__icontains=query) |
|
||||
Q(description__icontains=query)
|
||||
).order_by('manufacturer__name', 'name')
|
||||
|
||||
# Apply additional filters
|
||||
if filters:
|
||||
if filters.get('manufacturer_id'):
|
||||
results = results.filter(manufacturer_id=filters['manufacturer_id'])
|
||||
|
||||
if filters.get('model_type'):
|
||||
results = results.filter(model_type=filters['model_type'])
|
||||
|
||||
return results[:limit]
|
||||
|
||||
def search_parks(
|
||||
self,
|
||||
query: str,
|
||||
filters: Optional[Dict[str, Any]] = None,
|
||||
limit: int = 20
|
||||
) -> QuerySet:
|
||||
"""
|
||||
Search parks with full-text search and location filtering.
|
||||
|
||||
Args:
|
||||
query: Search query string
|
||||
filters: Optional filters (status, park_type, location, radius, etc.)
|
||||
limit: Maximum number of results
|
||||
|
||||
Returns:
|
||||
QuerySet of Park objects
|
||||
"""
|
||||
from apps.entities.models import Park
|
||||
|
||||
if self.using_postgres:
|
||||
# PostgreSQL full-text search using pre-computed search_vector
|
||||
search_query = SearchQuery(query, search_type='websearch')
|
||||
|
||||
results = Park.objects.annotate(
|
||||
rank=SearchRank(F('search_vector'), search_query)
|
||||
).filter(search_vector=search_query).order_by('-rank')
|
||||
else:
|
||||
# SQLite fallback using LIKE
|
||||
results = Park.objects.filter(
|
||||
Q(name__icontains=query) | Q(description__icontains=query)
|
||||
).order_by('name')
|
||||
|
||||
# Apply additional filters
|
||||
if filters:
|
||||
if filters.get('status'):
|
||||
results = results.filter(status=filters['status'])
|
||||
|
||||
if filters.get('park_type'):
|
||||
results = results.filter(park_type=filters['park_type'])
|
||||
|
||||
if filters.get('operator_id'):
|
||||
results = results.filter(operator_id=filters['operator_id'])
|
||||
|
||||
if filters.get('opening_after'):
|
||||
results = results.filter(opening_date__gte=filters['opening_after'])
|
||||
|
||||
if filters.get('opening_before'):
|
||||
results = results.filter(opening_date__lte=filters['opening_before'])
|
||||
|
||||
# Location-based filtering (PostGIS only)
|
||||
if self.using_postgres and filters.get('location') and filters.get('radius'):
|
||||
from django.contrib.gis.geos import Point
|
||||
from django.contrib.gis.measure import D
|
||||
|
||||
longitude, latitude = filters['location']
|
||||
point = Point(longitude, latitude, srid=4326)
|
||||
radius_km = filters['radius']
|
||||
|
||||
# Use distance filter
|
||||
results = results.filter(
|
||||
location_point__distance_lte=(point, D(km=radius_km))
|
||||
).annotate(
|
||||
distance=F('location_point__distance')
|
||||
).order_by('distance')
|
||||
|
||||
return results[:limit]
|
||||
|
||||
def search_rides(
|
||||
self,
|
||||
query: str,
|
||||
filters: Optional[Dict[str, Any]] = None,
|
||||
limit: int = 20
|
||||
) -> QuerySet:
|
||||
"""
|
||||
Search rides with full-text search.
|
||||
|
||||
Args:
|
||||
query: Search query string
|
||||
filters: Optional filters (park_id, manufacturer_id, status, etc.)
|
||||
limit: Maximum number of results
|
||||
|
||||
Returns:
|
||||
QuerySet of Ride objects
|
||||
"""
|
||||
from apps.entities.models import Ride
|
||||
|
||||
if self.using_postgres:
|
||||
# PostgreSQL full-text search using pre-computed search_vector
|
||||
search_query = SearchQuery(query, search_type='websearch')
|
||||
|
||||
results = Ride.objects.select_related('park', 'manufacturer', 'model').annotate(
|
||||
rank=SearchRank(F('search_vector'), search_query)
|
||||
).filter(search_vector=search_query).order_by('-rank')
|
||||
else:
|
||||
# SQLite fallback using LIKE
|
||||
results = Ride.objects.select_related('park', 'manufacturer', 'model').filter(
|
||||
Q(name__icontains=query) |
|
||||
Q(park__name__icontains=query) |
|
||||
Q(manufacturer__name__icontains=query) |
|
||||
Q(description__icontains=query)
|
||||
).order_by('park__name', 'name')
|
||||
|
||||
# Apply additional filters
|
||||
if filters:
|
||||
if filters.get('park_id'):
|
||||
results = results.filter(park_id=filters['park_id'])
|
||||
|
||||
if filters.get('manufacturer_id'):
|
||||
results = results.filter(manufacturer_id=filters['manufacturer_id'])
|
||||
|
||||
if filters.get('model_id'):
|
||||
results = results.filter(model_id=filters['model_id'])
|
||||
|
||||
if filters.get('status'):
|
||||
results = results.filter(status=filters['status'])
|
||||
|
||||
if filters.get('ride_category'):
|
||||
results = results.filter(ride_category=filters['ride_category'])
|
||||
|
||||
if filters.get('is_coaster') is not None:
|
||||
results = results.filter(is_coaster=filters['is_coaster'])
|
||||
|
||||
if filters.get('opening_after'):
|
||||
results = results.filter(opening_date__gte=filters['opening_after'])
|
||||
|
||||
if filters.get('opening_before'):
|
||||
results = results.filter(opening_date__lte=filters['opening_before'])
|
||||
|
||||
# Height/speed filters
|
||||
if filters.get('min_height'):
|
||||
results = results.filter(height__gte=filters['min_height'])
|
||||
|
||||
if filters.get('max_height'):
|
||||
results = results.filter(height__lte=filters['max_height'])
|
||||
|
||||
if filters.get('min_speed'):
|
||||
results = results.filter(speed__gte=filters['min_speed'])
|
||||
|
||||
if filters.get('max_speed'):
|
||||
results = results.filter(speed__lte=filters['max_speed'])
|
||||
|
||||
return results[:limit]
|
||||
|
||||
def autocomplete(
|
||||
self,
|
||||
query: str,
|
||||
entity_type: Optional[str] = None,
|
||||
limit: int = 10
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Get autocomplete suggestions for search.
|
||||
|
||||
Args:
|
||||
query: Partial search query
|
||||
entity_type: Optional specific entity type
|
||||
limit: Maximum number of suggestions
|
||||
|
||||
Returns:
|
||||
List of suggestion dictionaries with name and entity_type
|
||||
"""
|
||||
suggestions = []
|
||||
|
||||
if not query or len(query) < 2:
|
||||
return suggestions
|
||||
|
||||
# Search in names only for autocomplete
|
||||
if entity_type == 'company' or not entity_type:
|
||||
from apps.entities.models import Company
|
||||
companies = Company.objects.filter(
|
||||
name__istartswith=query
|
||||
).values('id', 'name', 'slug')[:limit]
|
||||
|
||||
for company in companies:
|
||||
suggestions.append({
|
||||
'id': company['id'],
|
||||
'name': company['name'],
|
||||
'slug': company['slug'],
|
||||
'entity_type': 'company'
|
||||
})
|
||||
|
||||
if entity_type == 'park' or not entity_type:
|
||||
from apps.entities.models import Park
|
||||
parks = Park.objects.filter(
|
||||
name__istartswith=query
|
||||
).values('id', 'name', 'slug')[:limit]
|
||||
|
||||
for park in parks:
|
||||
suggestions.append({
|
||||
'id': park['id'],
|
||||
'name': park['name'],
|
||||
'slug': park['slug'],
|
||||
'entity_type': 'park'
|
||||
})
|
||||
|
||||
if entity_type == 'ride' or not entity_type:
|
||||
from apps.entities.models import Ride
|
||||
rides = Ride.objects.select_related('park').filter(
|
||||
name__istartswith=query
|
||||
).values('id', 'name', 'slug', 'park__name')[:limit]
|
||||
|
||||
for ride in rides:
|
||||
suggestions.append({
|
||||
'id': ride['id'],
|
||||
'name': ride['name'],
|
||||
'slug': ride['slug'],
|
||||
'park_name': ride['park__name'],
|
||||
'entity_type': 'ride'
|
||||
})
|
||||
|
||||
if entity_type == 'ride_model' or not entity_type:
|
||||
from apps.entities.models import RideModel
|
||||
models = RideModel.objects.select_related('manufacturer').filter(
|
||||
name__istartswith=query
|
||||
).values('id', 'name', 'slug', 'manufacturer__name')[:limit]
|
||||
|
||||
for model in models:
|
||||
suggestions.append({
|
||||
'id': model['id'],
|
||||
'name': model['name'],
|
||||
'slug': model['slug'],
|
||||
'manufacturer_name': model['manufacturer__name'],
|
||||
'entity_type': 'ride_model'
|
||||
})
|
||||
|
||||
# Sort by relevance (exact matches first, then alphabetically)
|
||||
suggestions.sort(key=lambda x: (
|
||||
not x['name'].lower().startswith(query.lower()),
|
||||
x['name'].lower()
|
||||
))
|
||||
|
||||
return suggestions[:limit]
|
||||
252
django/apps/entities/signals.py
Normal file
252
django/apps/entities/signals.py
Normal file
@@ -0,0 +1,252 @@
|
||||
"""
|
||||
Signal handlers for automatic search vector updates.
|
||||
|
||||
These signals ensure search vectors stay synchronized with model changes,
|
||||
eliminating the need for manual re-indexing.
|
||||
|
||||
Signal handlers are only active when using PostgreSQL with PostGIS backend.
|
||||
"""
|
||||
from django.db.models.signals import post_save, pre_save
|
||||
from django.dispatch import receiver
|
||||
from django.conf import settings
|
||||
from django.contrib.postgres.search import SearchVector
|
||||
|
||||
from apps.entities.models import Company, RideModel, Park, Ride
|
||||
|
||||
# Only register signals if using PostgreSQL with PostGIS
|
||||
_using_postgis = 'postgis' in settings.DATABASES['default']['ENGINE']
|
||||
|
||||
|
||||
if _using_postgis:
|
||||
|
||||
# ==========================================
|
||||
# Company Signals
|
||||
# ==========================================
|
||||
|
||||
@receiver(post_save, sender=Company)
|
||||
def update_company_search_vector(sender, instance, created, **kwargs):
|
||||
"""
|
||||
Update search vector when company is created or updated.
|
||||
|
||||
Search vector includes:
|
||||
- name (weight A)
|
||||
- description (weight B)
|
||||
"""
|
||||
# Update the company's own search vector
|
||||
Company.objects.filter(pk=instance.pk).update(
|
||||
search_vector=(
|
||||
SearchVector('name', weight='A', config='english') +
|
||||
SearchVector('description', weight='B', config='english')
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
@receiver(pre_save, sender=Company)
|
||||
def check_company_name_change(sender, instance, **kwargs):
|
||||
"""
|
||||
Track if company name is changing to trigger cascading updates.
|
||||
|
||||
Stores the old name on the instance for use in post_save signal.
|
||||
"""
|
||||
if instance.pk:
|
||||
try:
|
||||
old_instance = Company.objects.get(pk=instance.pk)
|
||||
instance._old_name = old_instance.name
|
||||
except Company.DoesNotExist:
|
||||
instance._old_name = None
|
||||
else:
|
||||
instance._old_name = None
|
||||
|
||||
|
||||
@receiver(post_save, sender=Company)
|
||||
def cascade_company_name_updates(sender, instance, created, **kwargs):
|
||||
"""
|
||||
When company name changes, update search vectors for related objects.
|
||||
|
||||
Updates:
|
||||
- All RideModels from this manufacturer
|
||||
- All Rides from this manufacturer
|
||||
"""
|
||||
# Skip if this is a new company or name hasn't changed
|
||||
if created or not hasattr(instance, '_old_name'):
|
||||
return
|
||||
|
||||
old_name = getattr(instance, '_old_name', None)
|
||||
if old_name == instance.name:
|
||||
return
|
||||
|
||||
# Update all RideModels from this manufacturer
|
||||
ride_models = RideModel.objects.filter(manufacturer=instance)
|
||||
for ride_model in ride_models:
|
||||
RideModel.objects.filter(pk=ride_model.pk).update(
|
||||
search_vector=(
|
||||
SearchVector('name', weight='A', config='english') +
|
||||
SearchVector('manufacturer__name', weight='A', config='english') +
|
||||
SearchVector('description', weight='B', config='english')
|
||||
)
|
||||
)
|
||||
|
||||
# Update all Rides from this manufacturer
|
||||
rides = Ride.objects.filter(manufacturer=instance)
|
||||
for ride in rides:
|
||||
Ride.objects.filter(pk=ride.pk).update(
|
||||
search_vector=(
|
||||
SearchVector('name', weight='A', config='english') +
|
||||
SearchVector('park__name', weight='A', config='english') +
|
||||
SearchVector('manufacturer__name', weight='B', config='english') +
|
||||
SearchVector('description', weight='B', config='english')
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
# ==========================================
|
||||
# Park Signals
|
||||
# ==========================================
|
||||
|
||||
@receiver(post_save, sender=Park)
|
||||
def update_park_search_vector(sender, instance, created, **kwargs):
|
||||
"""
|
||||
Update search vector when park is created or updated.
|
||||
|
||||
Search vector includes:
|
||||
- name (weight A)
|
||||
- description (weight B)
|
||||
"""
|
||||
# Update the park's own search vector
|
||||
Park.objects.filter(pk=instance.pk).update(
|
||||
search_vector=(
|
||||
SearchVector('name', weight='A', config='english') +
|
||||
SearchVector('description', weight='B', config='english')
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
@receiver(pre_save, sender=Park)
|
||||
def check_park_name_change(sender, instance, **kwargs):
|
||||
"""
|
||||
Track if park name is changing to trigger cascading updates.
|
||||
|
||||
Stores the old name on the instance for use in post_save signal.
|
||||
"""
|
||||
if instance.pk:
|
||||
try:
|
||||
old_instance = Park.objects.get(pk=instance.pk)
|
||||
instance._old_name = old_instance.name
|
||||
except Park.DoesNotExist:
|
||||
instance._old_name = None
|
||||
else:
|
||||
instance._old_name = None
|
||||
|
||||
|
||||
@receiver(post_save, sender=Park)
|
||||
def cascade_park_name_updates(sender, instance, created, **kwargs):
|
||||
"""
|
||||
When park name changes, update search vectors for related rides.
|
||||
|
||||
Updates:
|
||||
- All Rides in this park
|
||||
"""
|
||||
# Skip if this is a new park or name hasn't changed
|
||||
if created or not hasattr(instance, '_old_name'):
|
||||
return
|
||||
|
||||
old_name = getattr(instance, '_old_name', None)
|
||||
if old_name == instance.name:
|
||||
return
|
||||
|
||||
# Update all Rides in this park
|
||||
rides = Ride.objects.filter(park=instance)
|
||||
for ride in rides:
|
||||
Ride.objects.filter(pk=ride.pk).update(
|
||||
search_vector=(
|
||||
SearchVector('name', weight='A', config='english') +
|
||||
SearchVector('park__name', weight='A', config='english') +
|
||||
SearchVector('manufacturer__name', weight='B', config='english') +
|
||||
SearchVector('description', weight='B', config='english')
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
# ==========================================
|
||||
# RideModel Signals
|
||||
# ==========================================
|
||||
|
||||
@receiver(post_save, sender=RideModel)
|
||||
def update_ride_model_search_vector(sender, instance, created, **kwargs):
|
||||
"""
|
||||
Update search vector when ride model is created or updated.
|
||||
|
||||
Search vector includes:
|
||||
- name (weight A)
|
||||
- manufacturer__name (weight A)
|
||||
- description (weight B)
|
||||
"""
|
||||
RideModel.objects.filter(pk=instance.pk).update(
|
||||
search_vector=(
|
||||
SearchVector('name', weight='A', config='english') +
|
||||
SearchVector('manufacturer__name', weight='A', config='english') +
|
||||
SearchVector('description', weight='B', config='english')
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
@receiver(pre_save, sender=RideModel)
|
||||
def check_ride_model_manufacturer_change(sender, instance, **kwargs):
|
||||
"""
|
||||
Track if ride model manufacturer is changing.
|
||||
|
||||
Stores the old manufacturer on the instance for use in post_save signal.
|
||||
"""
|
||||
if instance.pk:
|
||||
try:
|
||||
old_instance = RideModel.objects.get(pk=instance.pk)
|
||||
instance._old_manufacturer = old_instance.manufacturer
|
||||
except RideModel.DoesNotExist:
|
||||
instance._old_manufacturer = None
|
||||
else:
|
||||
instance._old_manufacturer = None
|
||||
|
||||
|
||||
# ==========================================
|
||||
# Ride Signals
|
||||
# ==========================================
|
||||
|
||||
@receiver(post_save, sender=Ride)
|
||||
def update_ride_search_vector(sender, instance, created, **kwargs):
|
||||
"""
|
||||
Update search vector when ride is created or updated.
|
||||
|
||||
Search vector includes:
|
||||
- name (weight A)
|
||||
- park__name (weight A)
|
||||
- manufacturer__name (weight B)
|
||||
- description (weight B)
|
||||
"""
|
||||
Ride.objects.filter(pk=instance.pk).update(
|
||||
search_vector=(
|
||||
SearchVector('name', weight='A', config='english') +
|
||||
SearchVector('park__name', weight='A', config='english') +
|
||||
SearchVector('manufacturer__name', weight='B', config='english') +
|
||||
SearchVector('description', weight='B', config='english')
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
@receiver(pre_save, sender=Ride)
|
||||
def check_ride_relationships_change(sender, instance, **kwargs):
|
||||
"""
|
||||
Track if ride park or manufacturer are changing.
|
||||
|
||||
Stores old values on the instance for use in post_save signal.
|
||||
"""
|
||||
if instance.pk:
|
||||
try:
|
||||
old_instance = Ride.objects.get(pk=instance.pk)
|
||||
instance._old_park = old_instance.park
|
||||
instance._old_manufacturer = old_instance.manufacturer
|
||||
except Ride.DoesNotExist:
|
||||
instance._old_park = None
|
||||
instance._old_manufacturer = None
|
||||
else:
|
||||
instance._old_park = None
|
||||
instance._old_manufacturer = None
|
||||
354
django/apps/entities/tasks.py
Normal file
354
django/apps/entities/tasks.py
Normal file
@@ -0,0 +1,354 @@
|
||||
"""
|
||||
Background tasks for entity statistics and maintenance.
|
||||
"""
|
||||
|
||||
import logging
|
||||
from celery import shared_task
|
||||
from django.db.models import Count, Q
|
||||
from django.utils import timezone
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@shared_task(bind=True, max_retries=2)
|
||||
def update_entity_statistics(self, entity_type, entity_id):
|
||||
"""
|
||||
Update cached statistics for a specific entity.
|
||||
|
||||
Args:
|
||||
entity_type: Type of entity ('park', 'ride', 'company', 'ridemodel')
|
||||
entity_id: ID of the entity
|
||||
|
||||
Returns:
|
||||
dict: Updated statistics
|
||||
"""
|
||||
from apps.entities.models import Park, Ride, Company, RideModel
|
||||
from apps.media.models import Photo
|
||||
from apps.moderation.models import ContentSubmission
|
||||
|
||||
try:
|
||||
# Get the entity model
|
||||
model_map = {
|
||||
'park': Park,
|
||||
'ride': Ride,
|
||||
'company': Company,
|
||||
'ridemodel': RideModel,
|
||||
}
|
||||
|
||||
model = model_map.get(entity_type.lower())
|
||||
if not model:
|
||||
raise ValueError(f"Invalid entity type: {entity_type}")
|
||||
|
||||
entity = model.objects.get(id=entity_id)
|
||||
|
||||
# Calculate statistics
|
||||
stats = {}
|
||||
|
||||
# Photo count
|
||||
stats['photo_count'] = Photo.objects.filter(
|
||||
content_type__model=entity_type.lower(),
|
||||
object_id=entity_id,
|
||||
moderation_status='approved'
|
||||
).count()
|
||||
|
||||
# Submission count
|
||||
stats['submission_count'] = ContentSubmission.objects.filter(
|
||||
entity_type__model=entity_type.lower(),
|
||||
entity_id=entity_id
|
||||
).count()
|
||||
|
||||
# Entity-specific stats
|
||||
if entity_type.lower() == 'park':
|
||||
stats['ride_count'] = entity.rides.count()
|
||||
elif entity_type.lower() == 'company':
|
||||
stats['park_count'] = entity.parks.count()
|
||||
stats['ride_model_count'] = entity.ride_models.count()
|
||||
elif entity_type.lower() == 'ridemodel':
|
||||
stats['installation_count'] = entity.rides.count()
|
||||
|
||||
logger.info(f"Updated statistics for {entity_type} {entity_id}: {stats}")
|
||||
return stats
|
||||
|
||||
except Exception as exc:
|
||||
logger.error(f"Error updating statistics for {entity_type} {entity_id}: {str(exc)}")
|
||||
raise self.retry(exc=exc, countdown=300)
|
||||
|
||||
|
||||
@shared_task(bind=True, max_retries=2)
|
||||
def update_all_statistics(self):
|
||||
"""
|
||||
Update cached statistics for all entities.
|
||||
|
||||
This task runs periodically (e.g., every 6 hours) to ensure
|
||||
all entity statistics are up to date.
|
||||
|
||||
Returns:
|
||||
dict: Update summary
|
||||
"""
|
||||
from apps.entities.models import Park, Ride, Company, RideModel
|
||||
|
||||
try:
|
||||
summary = {
|
||||
'parks_updated': 0,
|
||||
'rides_updated': 0,
|
||||
'companies_updated': 0,
|
||||
'ride_models_updated': 0,
|
||||
}
|
||||
|
||||
# Update parks
|
||||
for park in Park.objects.all():
|
||||
try:
|
||||
update_entity_statistics.delay('park', park.id)
|
||||
summary['parks_updated'] += 1
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to queue update for park {park.id}: {str(e)}")
|
||||
|
||||
# Update rides
|
||||
for ride in Ride.objects.all():
|
||||
try:
|
||||
update_entity_statistics.delay('ride', ride.id)
|
||||
summary['rides_updated'] += 1
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to queue update for ride {ride.id}: {str(e)}")
|
||||
|
||||
# Update companies
|
||||
for company in Company.objects.all():
|
||||
try:
|
||||
update_entity_statistics.delay('company', company.id)
|
||||
summary['companies_updated'] += 1
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to queue update for company {company.id}: {str(e)}")
|
||||
|
||||
# Update ride models
|
||||
for ride_model in RideModel.objects.all():
|
||||
try:
|
||||
update_entity_statistics.delay('ridemodel', ride_model.id)
|
||||
summary['ride_models_updated'] += 1
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to queue update for ride model {ride_model.id}: {str(e)}")
|
||||
|
||||
logger.info(f"Statistics update queued: {summary}")
|
||||
return summary
|
||||
|
||||
except Exception as exc:
|
||||
logger.error(f"Error updating all statistics: {str(exc)}")
|
||||
raise self.retry(exc=exc, countdown=300)
|
||||
|
||||
|
||||
@shared_task
|
||||
def generate_entity_report(entity_type, entity_id):
|
||||
"""
|
||||
Generate a detailed report for an entity.
|
||||
|
||||
This can be used for admin dashboards, analytics, etc.
|
||||
|
||||
Args:
|
||||
entity_type: Type of entity
|
||||
entity_id: ID of the entity
|
||||
|
||||
Returns:
|
||||
dict: Detailed report
|
||||
"""
|
||||
from apps.entities.models import Park, Ride, Company, RideModel
|
||||
from apps.media.models import Photo
|
||||
from apps.moderation.models import ContentSubmission
|
||||
from apps.versioning.models import EntityVersion
|
||||
|
||||
try:
|
||||
model_map = {
|
||||
'park': Park,
|
||||
'ride': Ride,
|
||||
'company': Company,
|
||||
'ridemodel': RideModel,
|
||||
}
|
||||
|
||||
model = model_map.get(entity_type.lower())
|
||||
if not model:
|
||||
raise ValueError(f"Invalid entity type: {entity_type}")
|
||||
|
||||
entity = model.objects.get(id=entity_id)
|
||||
|
||||
report = {
|
||||
'entity': {
|
||||
'type': entity_type,
|
||||
'id': str(entity_id),
|
||||
'name': str(entity),
|
||||
},
|
||||
'photos': {
|
||||
'total': Photo.objects.filter(
|
||||
content_type__model=entity_type.lower(),
|
||||
object_id=entity_id
|
||||
).count(),
|
||||
'approved': Photo.objects.filter(
|
||||
content_type__model=entity_type.lower(),
|
||||
object_id=entity_id,
|
||||
moderation_status='approved'
|
||||
).count(),
|
||||
'pending': Photo.objects.filter(
|
||||
content_type__model=entity_type.lower(),
|
||||
object_id=entity_id,
|
||||
moderation_status='pending'
|
||||
).count(),
|
||||
},
|
||||
'submissions': {
|
||||
'total': ContentSubmission.objects.filter(
|
||||
entity_type__model=entity_type.lower(),
|
||||
entity_id=entity_id
|
||||
).count(),
|
||||
'approved': ContentSubmission.objects.filter(
|
||||
entity_type__model=entity_type.lower(),
|
||||
entity_id=entity_id,
|
||||
status='approved'
|
||||
).count(),
|
||||
'pending': ContentSubmission.objects.filter(
|
||||
entity_type__model=entity_type.lower(),
|
||||
entity_id=entity_id,
|
||||
status='pending'
|
||||
).count(),
|
||||
},
|
||||
'versions': EntityVersion.objects.filter(
|
||||
content_type__model=entity_type.lower(),
|
||||
object_id=entity_id
|
||||
).count(),
|
||||
}
|
||||
|
||||
logger.info(f"Generated report for {entity_type} {entity_id}")
|
||||
return report
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error generating report: {str(e)}")
|
||||
raise
|
||||
|
||||
|
||||
@shared_task(bind=True, max_retries=2)
|
||||
def cleanup_duplicate_entities(self):
|
||||
"""
|
||||
Detect and flag potential duplicate entities.
|
||||
|
||||
This helps maintain database quality by identifying
|
||||
entities that might be duplicates based on name similarity.
|
||||
|
||||
Returns:
|
||||
dict: Duplicate detection results
|
||||
"""
|
||||
from apps.entities.models import Park, Ride, Company, RideModel
|
||||
|
||||
try:
|
||||
# This is a simplified implementation
|
||||
# In production, you'd want more sophisticated duplicate detection
|
||||
|
||||
results = {
|
||||
'parks_flagged': 0,
|
||||
'rides_flagged': 0,
|
||||
'companies_flagged': 0,
|
||||
}
|
||||
|
||||
logger.info(f"Duplicate detection completed: {results}")
|
||||
return results
|
||||
|
||||
except Exception as exc:
|
||||
logger.error(f"Error detecting duplicates: {str(exc)}")
|
||||
raise self.retry(exc=exc, countdown=300)
|
||||
|
||||
|
||||
@shared_task
|
||||
def calculate_global_statistics():
|
||||
"""
|
||||
Calculate global statistics across all entities.
|
||||
|
||||
Returns:
|
||||
dict: Global statistics
|
||||
"""
|
||||
from apps.entities.models import Park, Ride, Company, RideModel
|
||||
from apps.media.models import Photo
|
||||
from apps.moderation.models import ContentSubmission
|
||||
from apps.users.models import User
|
||||
|
||||
try:
|
||||
stats = {
|
||||
'entities': {
|
||||
'parks': Park.objects.count(),
|
||||
'rides': Ride.objects.count(),
|
||||
'companies': Company.objects.count(),
|
||||
'ride_models': RideModel.objects.count(),
|
||||
},
|
||||
'photos': {
|
||||
'total': Photo.objects.count(),
|
||||
'approved': Photo.objects.filter(moderation_status='approved').count(),
|
||||
},
|
||||
'submissions': {
|
||||
'total': ContentSubmission.objects.count(),
|
||||
'pending': ContentSubmission.objects.filter(status='pending').count(),
|
||||
},
|
||||
'users': {
|
||||
'total': User.objects.count(),
|
||||
'active': User.objects.filter(is_active=True).count(),
|
||||
},
|
||||
'timestamp': timezone.now().isoformat(),
|
||||
}
|
||||
|
||||
logger.info(f"Global statistics calculated: {stats}")
|
||||
return stats
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error calculating global statistics: {str(e)}")
|
||||
raise
|
||||
|
||||
|
||||
@shared_task(bind=True, max_retries=2)
|
||||
def validate_entity_data(self, entity_type, entity_id):
|
||||
"""
|
||||
Validate entity data integrity and flag issues.
|
||||
|
||||
Args:
|
||||
entity_type: Type of entity
|
||||
entity_id: ID of the entity
|
||||
|
||||
Returns:
|
||||
dict: Validation results
|
||||
"""
|
||||
from apps.entities.models import Park, Ride, Company, RideModel
|
||||
|
||||
try:
|
||||
model_map = {
|
||||
'park': Park,
|
||||
'ride': Ride,
|
||||
'company': Company,
|
||||
'ridemodel': RideModel,
|
||||
}
|
||||
|
||||
model = model_map.get(entity_type.lower())
|
||||
if not model:
|
||||
raise ValueError(f"Invalid entity type: {entity_type}")
|
||||
|
||||
entity = model.objects.get(id=entity_id)
|
||||
|
||||
issues = []
|
||||
|
||||
# Check for missing required fields
|
||||
if not entity.name or entity.name.strip() == '':
|
||||
issues.append('Missing or empty name')
|
||||
|
||||
# Entity-specific validation
|
||||
if entity_type.lower() == 'park' and not entity.country:
|
||||
issues.append('Missing country')
|
||||
|
||||
if entity_type.lower() == 'ride' and not entity.park:
|
||||
issues.append('Missing park association')
|
||||
|
||||
result = {
|
||||
'entity': f"{entity_type} {entity_id}",
|
||||
'valid': len(issues) == 0,
|
||||
'issues': issues,
|
||||
}
|
||||
|
||||
if issues:
|
||||
logger.warning(f"Validation issues for {entity_type} {entity_id}: {issues}")
|
||||
else:
|
||||
logger.info(f"Validation passed for {entity_type} {entity_id}")
|
||||
|
||||
return result
|
||||
|
||||
except Exception as exc:
|
||||
logger.error(f"Error validating {entity_type} {entity_id}: {str(exc)}")
|
||||
raise self.retry(exc=exc, countdown=300)
|
||||
Binary file not shown.
BIN
django/apps/media/__pycache__/services.cpython-313.pyc
Normal file
BIN
django/apps/media/__pycache__/services.cpython-313.pyc
Normal file
Binary file not shown.
BIN
django/apps/media/__pycache__/validators.cpython-313.pyc
Normal file
BIN
django/apps/media/__pycache__/validators.cpython-313.pyc
Normal file
Binary file not shown.
@@ -2,16 +2,20 @@
|
||||
Django Admin configuration for media models.
|
||||
"""
|
||||
from django.contrib import admin
|
||||
from django.contrib.contenttypes.admin import GenericTabularInline
|
||||
from django.utils.html import format_html
|
||||
from django.utils.safestring import mark_safe
|
||||
from django.db.models import Count, Q
|
||||
from .models import Photo
|
||||
|
||||
|
||||
@admin.register(Photo)
|
||||
class PhotoAdmin(admin.ModelAdmin):
|
||||
"""Admin interface for Photo model."""
|
||||
"""Admin interface for Photo model with enhanced features."""
|
||||
|
||||
list_display = [
|
||||
'title', 'cloudflare_image_id', 'photo_type', 'moderation_status',
|
||||
'is_approved', 'uploaded_by', 'created'
|
||||
'thumbnail_preview', 'title', 'photo_type', 'moderation_status',
|
||||
'entity_info', 'uploaded_by', 'dimensions', 'file_size_display', 'created'
|
||||
]
|
||||
list_filter = [
|
||||
'moderation_status', 'is_approved', 'photo_type',
|
||||
@@ -62,7 +66,79 @@ class PhotoAdmin(admin.ModelAdmin):
|
||||
}),
|
||||
)
|
||||
|
||||
actions = ['approve_photos', 'reject_photos', 'flag_photos']
|
||||
date_hierarchy = 'created'
|
||||
actions = ['approve_photos', 'reject_photos', 'flag_photos', 'make_featured', 'remove_featured']
|
||||
|
||||
def get_queryset(self, request):
|
||||
"""Optimize queryset with select_related."""
|
||||
qs = super().get_queryset(request)
|
||||
return qs.select_related(
|
||||
'uploaded_by', 'moderated_by', 'content_type'
|
||||
)
|
||||
|
||||
def thumbnail_preview(self, obj):
|
||||
"""Display thumbnail preview in list view."""
|
||||
if obj.cloudflare_url:
|
||||
# Use thumbnail variant for preview
|
||||
from apps.media.services import CloudFlareService
|
||||
cf = CloudFlareService()
|
||||
thumbnail_url = cf.get_image_url(obj.cloudflare_image_id, 'thumbnail')
|
||||
|
||||
return format_html(
|
||||
'<img src="{}" style="width: 60px; height: 60px; object-fit: cover; border-radius: 4px;" />',
|
||||
thumbnail_url
|
||||
)
|
||||
return "-"
|
||||
thumbnail_preview.short_description = "Preview"
|
||||
|
||||
def entity_info(self, obj):
|
||||
"""Display entity information."""
|
||||
if obj.content_type and obj.object_id:
|
||||
entity = obj.content_object
|
||||
if entity:
|
||||
entity_type = obj.content_type.model
|
||||
entity_name = getattr(entity, 'name', str(entity))
|
||||
return format_html(
|
||||
'<strong>{}</strong><br/><small>{}</small>',
|
||||
entity_name,
|
||||
entity_type.upper()
|
||||
)
|
||||
return format_html('<em style="color: #999;">Not attached</em>')
|
||||
entity_info.short_description = "Entity"
|
||||
|
||||
def dimensions(self, obj):
|
||||
"""Display image dimensions."""
|
||||
if obj.width and obj.height:
|
||||
return f"{obj.width}×{obj.height}"
|
||||
return "-"
|
||||
dimensions.short_description = "Size"
|
||||
|
||||
def file_size_display(self, obj):
|
||||
"""Display file size in human-readable format."""
|
||||
if obj.file_size:
|
||||
size_kb = obj.file_size / 1024
|
||||
if size_kb > 1024:
|
||||
return f"{size_kb / 1024:.1f} MB"
|
||||
return f"{size_kb:.1f} KB"
|
||||
return "-"
|
||||
file_size_display.short_description = "File Size"
|
||||
|
||||
def changelist_view(self, request, extra_context=None):
|
||||
"""Add statistics to changelist."""
|
||||
extra_context = extra_context or {}
|
||||
|
||||
# Get photo statistics
|
||||
stats = Photo.objects.aggregate(
|
||||
total=Count('id'),
|
||||
pending=Count('id', filter=Q(moderation_status='pending')),
|
||||
approved=Count('id', filter=Q(moderation_status='approved')),
|
||||
rejected=Count('id', filter=Q(moderation_status='rejected')),
|
||||
flagged=Count('id', filter=Q(moderation_status='flagged')),
|
||||
)
|
||||
|
||||
extra_context['photo_stats'] = stats
|
||||
|
||||
return super().changelist_view(request, extra_context)
|
||||
|
||||
def approve_photos(self, request, queryset):
|
||||
"""Bulk approve selected photos."""
|
||||
@@ -90,3 +166,41 @@ class PhotoAdmin(admin.ModelAdmin):
|
||||
count += 1
|
||||
self.message_user(request, f"{count} photo(s) flagged for review.")
|
||||
flag_photos.short_description = "Flag selected photos"
|
||||
|
||||
def make_featured(self, request, queryset):
|
||||
"""Mark selected photos as featured."""
|
||||
count = queryset.update(is_featured=True)
|
||||
self.message_user(request, f"{count} photo(s) marked as featured.")
|
||||
make_featured.short_description = "Mark as featured"
|
||||
|
||||
def remove_featured(self, request, queryset):
|
||||
"""Remove featured status from selected photos."""
|
||||
count = queryset.update(is_featured=False)
|
||||
self.message_user(request, f"{count} photo(s) removed from featured.")
|
||||
remove_featured.short_description = "Remove featured status"
|
||||
|
||||
|
||||
# Inline admin for use in entity admin pages
|
||||
class PhotoInline(GenericTabularInline):
|
||||
"""Inline admin for photos in entity pages."""
|
||||
model = Photo
|
||||
ct_field = 'content_type'
|
||||
ct_fk_field = 'object_id'
|
||||
extra = 0
|
||||
fields = ['thumbnail_preview', 'title', 'photo_type', 'moderation_status', 'display_order']
|
||||
readonly_fields = ['thumbnail_preview']
|
||||
can_delete = True
|
||||
|
||||
def thumbnail_preview(self, obj):
|
||||
"""Display thumbnail preview in inline."""
|
||||
if obj.cloudflare_url:
|
||||
from apps.media.services import CloudFlareService
|
||||
cf = CloudFlareService()
|
||||
thumbnail_url = cf.get_image_url(obj.cloudflare_image_id, 'thumbnail')
|
||||
|
||||
return format_html(
|
||||
'<img src="{}" style="width: 40px; height: 40px; object-fit: cover; border-radius: 4px;" />',
|
||||
thumbnail_url
|
||||
)
|
||||
return "-"
|
||||
thumbnail_preview.short_description = "Preview"
|
||||
|
||||
492
django/apps/media/services.py
Normal file
492
django/apps/media/services.py
Normal file
@@ -0,0 +1,492 @@
|
||||
"""
|
||||
Media services for photo upload, management, and CloudFlare Images integration.
|
||||
"""
|
||||
|
||||
import logging
|
||||
import mimetypes
|
||||
import os
|
||||
from io import BytesIO
|
||||
from typing import Optional, Dict, Any, List
|
||||
from django.conf import settings
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from django.core.files.uploadedfile import InMemoryUploadedFile, TemporaryUploadedFile
|
||||
from django.db import transaction
|
||||
from django.db.models import Model
|
||||
|
||||
import requests
|
||||
from PIL import Image
|
||||
|
||||
from apps.media.models import Photo
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class CloudFlareError(Exception):
|
||||
"""Base exception for CloudFlare API errors."""
|
||||
pass
|
||||
|
||||
|
||||
class CloudFlareService:
|
||||
"""
|
||||
Service for interacting with CloudFlare Images API.
|
||||
|
||||
Provides image upload, deletion, and URL generation with automatic
|
||||
fallback to mock mode when CloudFlare credentials are not configured.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self.account_id = settings.CLOUDFLARE_ACCOUNT_ID
|
||||
self.api_token = settings.CLOUDFLARE_IMAGE_TOKEN
|
||||
self.delivery_hash = settings.CLOUDFLARE_IMAGE_HASH
|
||||
|
||||
# Enable mock mode if CloudFlare is not configured
|
||||
self.mock_mode = not all([self.account_id, self.api_token, self.delivery_hash])
|
||||
|
||||
if self.mock_mode:
|
||||
logger.warning("CloudFlare Images not configured - using mock mode")
|
||||
|
||||
self.base_url = f"https://api.cloudflare.com/client/v4/accounts/{self.account_id}/images/v1"
|
||||
self.headers = {"Authorization": f"Bearer {self.api_token}"}
|
||||
|
||||
def upload_image(
|
||||
self,
|
||||
file: InMemoryUploadedFile | TemporaryUploadedFile,
|
||||
metadata: Optional[Dict[str, str]] = None
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Upload an image to CloudFlare Images.
|
||||
|
||||
Args:
|
||||
file: The uploaded file object
|
||||
metadata: Optional metadata dictionary
|
||||
|
||||
Returns:
|
||||
Dict containing:
|
||||
- id: CloudFlare image ID
|
||||
- url: CDN URL for the image
|
||||
- variants: Available image variants
|
||||
|
||||
Raises:
|
||||
CloudFlareError: If upload fails
|
||||
"""
|
||||
if self.mock_mode:
|
||||
return self._mock_upload(file, metadata)
|
||||
|
||||
try:
|
||||
# Prepare the file for upload
|
||||
file.seek(0) # Reset file pointer
|
||||
|
||||
# Prepare multipart form data
|
||||
files = {
|
||||
'file': (file.name, file.read(), file.content_type)
|
||||
}
|
||||
|
||||
# Add metadata if provided
|
||||
data = {}
|
||||
if metadata:
|
||||
data['metadata'] = str(metadata)
|
||||
|
||||
# Make API request
|
||||
response = requests.post(
|
||||
self.base_url,
|
||||
headers=self.headers,
|
||||
files=files,
|
||||
data=data,
|
||||
timeout=30
|
||||
)
|
||||
|
||||
response.raise_for_status()
|
||||
result = response.json()
|
||||
|
||||
if not result.get('success'):
|
||||
raise CloudFlareError(f"Upload failed: {result.get('errors', [])}")
|
||||
|
||||
image_data = result['result']
|
||||
|
||||
return {
|
||||
'id': image_data['id'],
|
||||
'url': self._get_cdn_url(image_data['id']),
|
||||
'variants': image_data.get('variants', []),
|
||||
'uploaded': image_data.get('uploaded'),
|
||||
}
|
||||
|
||||
except requests.exceptions.RequestException as e:
|
||||
logger.error(f"CloudFlare upload failed: {str(e)}")
|
||||
raise CloudFlareError(f"Failed to upload image: {str(e)}")
|
||||
|
||||
def delete_image(self, image_id: str) -> bool:
|
||||
"""
|
||||
Delete an image from CloudFlare Images.
|
||||
|
||||
Args:
|
||||
image_id: The CloudFlare image ID
|
||||
|
||||
Returns:
|
||||
True if deletion was successful
|
||||
|
||||
Raises:
|
||||
CloudFlareError: If deletion fails
|
||||
"""
|
||||
if self.mock_mode:
|
||||
return self._mock_delete(image_id)
|
||||
|
||||
try:
|
||||
url = f"{self.base_url}/{image_id}"
|
||||
response = requests.delete(
|
||||
url,
|
||||
headers=self.headers,
|
||||
timeout=30
|
||||
)
|
||||
|
||||
response.raise_for_status()
|
||||
result = response.json()
|
||||
|
||||
return result.get('success', False)
|
||||
|
||||
except requests.exceptions.RequestException as e:
|
||||
logger.error(f"CloudFlare deletion failed: {str(e)}")
|
||||
raise CloudFlareError(f"Failed to delete image: {str(e)}")
|
||||
|
||||
def get_image_url(self, image_id: str, variant: str = "public") -> str:
|
||||
"""
|
||||
Generate a CloudFlare CDN URL for an image.
|
||||
|
||||
Args:
|
||||
image_id: The CloudFlare image ID
|
||||
variant: Image variant (public, thumbnail, banner, etc.)
|
||||
|
||||
Returns:
|
||||
CDN URL for the image
|
||||
"""
|
||||
if self.mock_mode:
|
||||
return self._mock_url(image_id, variant)
|
||||
|
||||
return self._get_cdn_url(image_id, variant)
|
||||
|
||||
def get_image_variants(self, image_id: str) -> List[str]:
|
||||
"""
|
||||
Get available variants for an image.
|
||||
|
||||
Args:
|
||||
image_id: The CloudFlare image ID
|
||||
|
||||
Returns:
|
||||
List of available variant names
|
||||
"""
|
||||
if self.mock_mode:
|
||||
return ['public', 'thumbnail', 'banner']
|
||||
|
||||
try:
|
||||
url = f"{self.base_url}/{image_id}"
|
||||
response = requests.get(
|
||||
url,
|
||||
headers=self.headers,
|
||||
timeout=30
|
||||
)
|
||||
|
||||
response.raise_for_status()
|
||||
result = response.json()
|
||||
|
||||
if result.get('success'):
|
||||
return list(result['result'].get('variants', []))
|
||||
|
||||
return []
|
||||
|
||||
except requests.exceptions.RequestException as e:
|
||||
logger.error(f"Failed to get variants: {str(e)}")
|
||||
return []
|
||||
|
||||
def _get_cdn_url(self, image_id: str, variant: str = "public") -> str:
|
||||
"""Generate CloudFlare CDN URL."""
|
||||
return f"https://imagedelivery.net/{self.delivery_hash}/{image_id}/{variant}"
|
||||
|
||||
# Mock methods for development without CloudFlare
|
||||
|
||||
def _mock_upload(self, file, metadata) -> Dict[str, Any]:
|
||||
"""Mock upload for development."""
|
||||
import uuid
|
||||
mock_id = str(uuid.uuid4())
|
||||
|
||||
logger.info(f"[MOCK] Uploaded image: {file.name} -> {mock_id}")
|
||||
|
||||
return {
|
||||
'id': mock_id,
|
||||
'url': self._mock_url(mock_id),
|
||||
'variants': ['public', 'thumbnail', 'banner'],
|
||||
'uploaded': 'mock',
|
||||
}
|
||||
|
||||
def _mock_delete(self, image_id: str) -> bool:
|
||||
"""Mock deletion for development."""
|
||||
logger.info(f"[MOCK] Deleted image: {image_id}")
|
||||
return True
|
||||
|
||||
def _mock_url(self, image_id: str, variant: str = "public") -> str:
|
||||
"""Generate mock URL for development."""
|
||||
return f"https://placehold.co/800x600/png?text={image_id[:8]}"
|
||||
|
||||
|
||||
class PhotoService:
|
||||
"""
|
||||
Service for managing Photo objects with CloudFlare integration.
|
||||
|
||||
Handles photo creation, attachment to entities, moderation,
|
||||
and gallery management.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self.cloudflare = CloudFlareService()
|
||||
|
||||
def create_photo(
|
||||
self,
|
||||
file: InMemoryUploadedFile | TemporaryUploadedFile,
|
||||
user,
|
||||
entity: Optional[Model] = None,
|
||||
photo_type: str = "gallery",
|
||||
title: str = "",
|
||||
description: str = "",
|
||||
credit: str = "",
|
||||
is_visible: bool = True,
|
||||
) -> Photo:
|
||||
"""
|
||||
Create a new photo with CloudFlare upload.
|
||||
|
||||
Args:
|
||||
file: Uploaded file object
|
||||
user: User uploading the photo
|
||||
entity: Optional entity to attach photo to
|
||||
photo_type: Type of photo (main, gallery, banner, etc.)
|
||||
title: Photo title
|
||||
description: Photo description
|
||||
credit: Photo credit/attribution
|
||||
is_visible: Whether photo is visible
|
||||
|
||||
Returns:
|
||||
Created Photo instance
|
||||
|
||||
Raises:
|
||||
ValidationError: If validation fails
|
||||
CloudFlareError: If upload fails
|
||||
"""
|
||||
# Get image dimensions
|
||||
dimensions = self._get_image_dimensions(file)
|
||||
|
||||
# Upload to CloudFlare
|
||||
upload_result = self.cloudflare.upload_image(
|
||||
file,
|
||||
metadata={
|
||||
'uploaded_by': str(user.id),
|
||||
'photo_type': photo_type,
|
||||
}
|
||||
)
|
||||
|
||||
# Create Photo instance
|
||||
with transaction.atomic():
|
||||
photo = Photo.objects.create(
|
||||
cloudflare_image_id=upload_result['id'],
|
||||
cloudflare_url=upload_result['url'],
|
||||
uploaded_by=user,
|
||||
photo_type=photo_type,
|
||||
title=title or file.name,
|
||||
description=description,
|
||||
credit=credit,
|
||||
width=dimensions['width'],
|
||||
height=dimensions['height'],
|
||||
file_size=file.size,
|
||||
mime_type=file.content_type,
|
||||
is_visible=is_visible,
|
||||
moderation_status='pending',
|
||||
)
|
||||
|
||||
# Attach to entity if provided
|
||||
if entity:
|
||||
self.attach_to_entity(photo, entity)
|
||||
|
||||
logger.info(f"Photo created: {photo.id} by user {user.id}")
|
||||
|
||||
# Trigger async post-processing
|
||||
try:
|
||||
from apps.media.tasks import process_uploaded_image
|
||||
process_uploaded_image.delay(photo.id)
|
||||
except Exception as e:
|
||||
# Don't fail the upload if async task fails to queue
|
||||
logger.warning(f"Failed to queue photo processing task: {str(e)}")
|
||||
|
||||
return photo
|
||||
|
||||
def attach_to_entity(self, photo: Photo, entity: Model) -> None:
|
||||
"""
|
||||
Attach a photo to an entity.
|
||||
|
||||
Args:
|
||||
photo: Photo instance
|
||||
entity: Entity to attach to (Park, Ride, Company, etc.)
|
||||
"""
|
||||
content_type = ContentType.objects.get_for_model(entity)
|
||||
photo.content_type = content_type
|
||||
photo.object_id = entity.pk
|
||||
photo.save(update_fields=['content_type', 'object_id'])
|
||||
|
||||
logger.info(f"Photo {photo.id} attached to {content_type.model} {entity.pk}")
|
||||
|
||||
def detach_from_entity(self, photo: Photo) -> None:
|
||||
"""
|
||||
Detach a photo from its entity.
|
||||
|
||||
Args:
|
||||
photo: Photo instance
|
||||
"""
|
||||
photo.content_type = None
|
||||
photo.object_id = None
|
||||
photo.save(update_fields=['content_type', 'object_id'])
|
||||
|
||||
logger.info(f"Photo {photo.id} detached from entity")
|
||||
|
||||
def moderate_photo(
|
||||
self,
|
||||
photo: Photo,
|
||||
status: str,
|
||||
moderator,
|
||||
notes: str = ""
|
||||
) -> Photo:
|
||||
"""
|
||||
Moderate a photo (approve/reject/flag).
|
||||
|
||||
Args:
|
||||
photo: Photo instance
|
||||
status: New status (approved, rejected, flagged)
|
||||
moderator: User performing moderation
|
||||
notes: Moderation notes
|
||||
|
||||
Returns:
|
||||
Updated Photo instance
|
||||
"""
|
||||
with transaction.atomic():
|
||||
photo.moderation_status = status
|
||||
photo.moderated_by = moderator
|
||||
photo.moderation_notes = notes
|
||||
|
||||
if status == 'approved':
|
||||
photo.approve()
|
||||
elif status == 'rejected':
|
||||
photo.reject()
|
||||
elif status == 'flagged':
|
||||
photo.flag()
|
||||
|
||||
photo.save()
|
||||
|
||||
logger.info(
|
||||
f"Photo {photo.id} moderated: {status} by user {moderator.id}"
|
||||
)
|
||||
|
||||
return photo
|
||||
|
||||
def reorder_photos(
|
||||
self,
|
||||
entity: Model,
|
||||
photo_ids: List[int],
|
||||
photo_type: Optional[str] = None
|
||||
) -> None:
|
||||
"""
|
||||
Reorder photos for an entity.
|
||||
|
||||
Args:
|
||||
entity: Entity whose photos to reorder
|
||||
photo_ids: List of photo IDs in desired order
|
||||
photo_type: Optional photo type filter
|
||||
"""
|
||||
content_type = ContentType.objects.get_for_model(entity)
|
||||
|
||||
with transaction.atomic():
|
||||
for order, photo_id in enumerate(photo_ids):
|
||||
filters = {
|
||||
'id': photo_id,
|
||||
'content_type': content_type,
|
||||
'object_id': entity.pk,
|
||||
}
|
||||
|
||||
if photo_type:
|
||||
filters['photo_type'] = photo_type
|
||||
|
||||
Photo.objects.filter(**filters).update(display_order=order)
|
||||
|
||||
logger.info(f"Reordered {len(photo_ids)} photos for {content_type.model} {entity.pk}")
|
||||
|
||||
def get_entity_photos(
|
||||
self,
|
||||
entity: Model,
|
||||
photo_type: Optional[str] = None,
|
||||
approved_only: bool = True
|
||||
) -> List[Photo]:
|
||||
"""
|
||||
Get photos for an entity.
|
||||
|
||||
Args:
|
||||
entity: Entity to get photos for
|
||||
photo_type: Optional photo type filter
|
||||
approved_only: Whether to return only approved photos
|
||||
|
||||
Returns:
|
||||
List of Photo instances ordered by display_order
|
||||
"""
|
||||
content_type = ContentType.objects.get_for_model(entity)
|
||||
|
||||
queryset = Photo.objects.filter(
|
||||
content_type=content_type,
|
||||
object_id=entity.pk,
|
||||
)
|
||||
|
||||
if photo_type:
|
||||
queryset = queryset.filter(photo_type=photo_type)
|
||||
|
||||
if approved_only:
|
||||
queryset = queryset.approved()
|
||||
|
||||
return list(queryset.order_by('display_order', '-created_at'))
|
||||
|
||||
def delete_photo(self, photo: Photo, delete_from_cloudflare: bool = True) -> None:
|
||||
"""
|
||||
Delete a photo.
|
||||
|
||||
Args:
|
||||
photo: Photo instance to delete
|
||||
delete_from_cloudflare: Whether to also delete from CloudFlare
|
||||
"""
|
||||
cloudflare_id = photo.cloudflare_image_id
|
||||
|
||||
with transaction.atomic():
|
||||
photo.delete()
|
||||
|
||||
# Delete from CloudFlare after DB deletion succeeds
|
||||
if delete_from_cloudflare and cloudflare_id:
|
||||
try:
|
||||
self.cloudflare.delete_image(cloudflare_id)
|
||||
except CloudFlareError as e:
|
||||
logger.error(f"Failed to delete from CloudFlare: {str(e)}")
|
||||
# Don't raise - photo is already deleted from DB
|
||||
|
||||
logger.info(f"Photo deleted: {cloudflare_id}")
|
||||
|
||||
def _get_image_dimensions(
|
||||
self,
|
||||
file: InMemoryUploadedFile | TemporaryUploadedFile
|
||||
) -> Dict[str, int]:
|
||||
"""
|
||||
Extract image dimensions from uploaded file.
|
||||
|
||||
Args:
|
||||
file: Uploaded file object
|
||||
|
||||
Returns:
|
||||
Dict with 'width' and 'height' keys
|
||||
"""
|
||||
try:
|
||||
file.seek(0)
|
||||
image = Image.open(file)
|
||||
width, height = image.size
|
||||
file.seek(0) # Reset for later use
|
||||
|
||||
return {'width': width, 'height': height}
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to get image dimensions: {str(e)}")
|
||||
return {'width': 0, 'height': 0}
|
||||
219
django/apps/media/tasks.py
Normal file
219
django/apps/media/tasks.py
Normal file
@@ -0,0 +1,219 @@
|
||||
"""
|
||||
Background tasks for media processing and management.
|
||||
"""
|
||||
|
||||
import logging
|
||||
from celery import shared_task
|
||||
from django.utils import timezone
|
||||
from datetime import timedelta
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@shared_task(bind=True, max_retries=3, default_retry_delay=60)
|
||||
def process_uploaded_image(self, photo_id):
|
||||
"""
|
||||
Process an uploaded image asynchronously.
|
||||
|
||||
This task runs after a photo is uploaded to perform additional
|
||||
processing like metadata extraction, validation, etc.
|
||||
|
||||
Args:
|
||||
photo_id: ID of the Photo to process
|
||||
|
||||
Returns:
|
||||
str: Processing result message
|
||||
"""
|
||||
from apps.media.models import Photo
|
||||
|
||||
try:
|
||||
photo = Photo.objects.get(id=photo_id)
|
||||
|
||||
# Log processing start
|
||||
logger.info(f"Processing photo {photo_id}: {photo.title}")
|
||||
|
||||
# Additional processing could include:
|
||||
# - Generating additional thumbnails
|
||||
# - Extracting EXIF data
|
||||
# - Running image quality checks
|
||||
# - Updating photo metadata
|
||||
|
||||
# For now, just log that processing is complete
|
||||
logger.info(f"Photo {photo_id} processed successfully")
|
||||
|
||||
return f"Photo {photo_id} processed successfully"
|
||||
|
||||
except Photo.DoesNotExist:
|
||||
logger.error(f"Photo {photo_id} not found")
|
||||
raise
|
||||
except Exception as exc:
|
||||
logger.error(f"Error processing photo {photo_id}: {str(exc)}")
|
||||
# Retry with exponential backoff
|
||||
raise self.retry(exc=exc, countdown=60 * (2 ** self.request.retries))
|
||||
|
||||
|
||||
@shared_task(bind=True, max_retries=2)
|
||||
def cleanup_rejected_photos(self, days_old=30):
|
||||
"""
|
||||
Clean up photos that have been rejected for more than N days.
|
||||
|
||||
This task runs periodically (e.g., weekly) to remove old rejected
|
||||
photos and free up storage space.
|
||||
|
||||
Args:
|
||||
days_old: Number of days after rejection to delete (default: 30)
|
||||
|
||||
Returns:
|
||||
dict: Cleanup statistics
|
||||
"""
|
||||
from apps.media.models import Photo
|
||||
from apps.media.services import PhotoService
|
||||
|
||||
try:
|
||||
cutoff_date = timezone.now() - timedelta(days=days_old)
|
||||
|
||||
# Find rejected photos older than cutoff
|
||||
old_rejected = Photo.objects.filter(
|
||||
moderation_status='rejected',
|
||||
moderated_at__lt=cutoff_date
|
||||
)
|
||||
|
||||
count = old_rejected.count()
|
||||
logger.info(f"Found {count} rejected photos to cleanup")
|
||||
|
||||
# Delete each photo
|
||||
photo_service = PhotoService()
|
||||
deleted_count = 0
|
||||
|
||||
for photo in old_rejected:
|
||||
try:
|
||||
photo_service.delete_photo(photo, delete_from_cloudflare=True)
|
||||
deleted_count += 1
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to delete photo {photo.id}: {str(e)}")
|
||||
continue
|
||||
|
||||
result = {
|
||||
'found': count,
|
||||
'deleted': deleted_count,
|
||||
'failed': count - deleted_count,
|
||||
'cutoff_date': cutoff_date.isoformat()
|
||||
}
|
||||
|
||||
logger.info(f"Cleanup complete: {result}")
|
||||
return result
|
||||
|
||||
except Exception as exc:
|
||||
logger.error(f"Error during photo cleanup: {str(exc)}")
|
||||
raise self.retry(exc=exc, countdown=300) # Retry after 5 minutes
|
||||
|
||||
|
||||
@shared_task(bind=True, max_retries=3)
|
||||
def generate_photo_thumbnails(self, photo_id, variants=None):
|
||||
"""
|
||||
Generate thumbnails for a photo on demand.
|
||||
|
||||
This can be used to regenerate thumbnails if the original
|
||||
is updated or if new variants are needed.
|
||||
|
||||
Args:
|
||||
photo_id: ID of the Photo
|
||||
variants: List of variant names to generate (None = all)
|
||||
|
||||
Returns:
|
||||
dict: Generated variants and their URLs
|
||||
"""
|
||||
from apps.media.models import Photo
|
||||
from apps.media.services import CloudFlareService
|
||||
|
||||
try:
|
||||
photo = Photo.objects.get(id=photo_id)
|
||||
cloudflare = CloudFlareService()
|
||||
|
||||
if variants is None:
|
||||
variants = ['public', 'thumbnail', 'banner']
|
||||
|
||||
result = {}
|
||||
for variant in variants:
|
||||
url = cloudflare.get_image_url(photo.cloudflare_image_id, variant)
|
||||
result[variant] = url
|
||||
|
||||
logger.info(f"Generated thumbnails for photo {photo_id}: {variants}")
|
||||
return result
|
||||
|
||||
except Photo.DoesNotExist:
|
||||
logger.error(f"Photo {photo_id} not found")
|
||||
raise
|
||||
except Exception as exc:
|
||||
logger.error(f"Error generating thumbnails for photo {photo_id}: {str(exc)}")
|
||||
raise self.retry(exc=exc, countdown=60 * (2 ** self.request.retries))
|
||||
|
||||
|
||||
@shared_task(bind=True, max_retries=2)
|
||||
def cleanup_orphaned_cloudflare_images(self):
|
||||
"""
|
||||
Clean up CloudFlare images that no longer have database records.
|
||||
|
||||
This task helps prevent storage bloat by removing images that
|
||||
were uploaded but their database records were deleted.
|
||||
|
||||
Returns:
|
||||
dict: Cleanup statistics
|
||||
"""
|
||||
from apps.media.models import Photo
|
||||
from apps.media.services import CloudFlareService
|
||||
|
||||
try:
|
||||
cloudflare = CloudFlareService()
|
||||
|
||||
# In a real implementation, you would:
|
||||
# 1. Get list of all images from CloudFlare API
|
||||
# 2. Check which ones don't have Photo records
|
||||
# 3. Delete the orphaned images
|
||||
|
||||
# For now, just log that the task ran
|
||||
logger.info("Orphaned image cleanup task completed (not implemented in mock mode)")
|
||||
|
||||
return {
|
||||
'checked': 0,
|
||||
'orphaned': 0,
|
||||
'deleted': 0
|
||||
}
|
||||
|
||||
except Exception as exc:
|
||||
logger.error(f"Error during orphaned image cleanup: {str(exc)}")
|
||||
raise self.retry(exc=exc, countdown=300)
|
||||
|
||||
|
||||
@shared_task
|
||||
def update_photo_statistics():
|
||||
"""
|
||||
Update photo-related statistics across the database.
|
||||
|
||||
This task can update cached counts, generate reports, etc.
|
||||
|
||||
Returns:
|
||||
dict: Updated statistics
|
||||
"""
|
||||
from apps.media.models import Photo
|
||||
from django.db.models import Count
|
||||
|
||||
try:
|
||||
stats = {
|
||||
'total_photos': Photo.objects.count(),
|
||||
'pending': Photo.objects.filter(moderation_status='pending').count(),
|
||||
'approved': Photo.objects.filter(moderation_status='approved').count(),
|
||||
'rejected': Photo.objects.filter(moderation_status='rejected').count(),
|
||||
'flagged': Photo.objects.filter(moderation_status='flagged').count(),
|
||||
'by_type': dict(
|
||||
Photo.objects.values('photo_type').annotate(count=Count('id'))
|
||||
.values_list('photo_type', 'count')
|
||||
)
|
||||
}
|
||||
|
||||
logger.info(f"Photo statistics updated: {stats}")
|
||||
return stats
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error updating photo statistics: {str(e)}")
|
||||
raise
|
||||
195
django/apps/media/validators.py
Normal file
195
django/apps/media/validators.py
Normal file
@@ -0,0 +1,195 @@
|
||||
"""
|
||||
Validators for image uploads.
|
||||
"""
|
||||
|
||||
import magic
|
||||
from django.core.exceptions import ValidationError
|
||||
from django.core.files.uploadedfile import InMemoryUploadedFile, TemporaryUploadedFile
|
||||
from PIL import Image
|
||||
from typing import Optional
|
||||
|
||||
|
||||
# Allowed file types
|
||||
ALLOWED_MIME_TYPES = [
|
||||
'image/jpeg',
|
||||
'image/jpg',
|
||||
'image/png',
|
||||
'image/webp',
|
||||
'image/gif',
|
||||
]
|
||||
|
||||
ALLOWED_EXTENSIONS = ['.jpg', '.jpeg', '.png', '.webp', '.gif']
|
||||
|
||||
# Size limits (in bytes)
|
||||
MAX_FILE_SIZE = 10 * 1024 * 1024 # 10 MB
|
||||
MIN_FILE_SIZE = 1024 # 1 KB
|
||||
|
||||
# Dimension limits
|
||||
MIN_WIDTH = 100
|
||||
MIN_HEIGHT = 100
|
||||
MAX_WIDTH = 8000
|
||||
MAX_HEIGHT = 8000
|
||||
|
||||
# Aspect ratio limits (for specific photo types)
|
||||
ASPECT_RATIO_LIMITS = {
|
||||
'banner': {'min': 2.0, 'max': 4.0}, # Wide banners
|
||||
'logo': {'min': 0.5, 'max': 2.0}, # Square-ish logos
|
||||
}
|
||||
|
||||
|
||||
def validate_image_file_type(file: InMemoryUploadedFile | TemporaryUploadedFile) -> None:
|
||||
"""
|
||||
Validate that the uploaded file is an allowed image type.
|
||||
|
||||
Uses python-magic to detect actual file type, not just extension.
|
||||
|
||||
Args:
|
||||
file: The uploaded file object
|
||||
|
||||
Raises:
|
||||
ValidationError: If file type is not allowed
|
||||
"""
|
||||
# Check file extension
|
||||
file_ext = None
|
||||
if hasattr(file, 'name') and file.name:
|
||||
file_ext = '.' + file.name.split('.')[-1].lower()
|
||||
if file_ext not in ALLOWED_EXTENSIONS:
|
||||
raise ValidationError(
|
||||
f"File extension {file_ext} not allowed. "
|
||||
f"Allowed extensions: {', '.join(ALLOWED_EXTENSIONS)}"
|
||||
)
|
||||
|
||||
# Check MIME type from content type
|
||||
if hasattr(file, 'content_type'):
|
||||
if file.content_type not in ALLOWED_MIME_TYPES:
|
||||
raise ValidationError(
|
||||
f"File type {file.content_type} not allowed. "
|
||||
f"Allowed types: {', '.join(ALLOWED_MIME_TYPES)}"
|
||||
)
|
||||
|
||||
# Verify actual file content using python-magic
|
||||
try:
|
||||
file.seek(0)
|
||||
mime = magic.from_buffer(file.read(2048), mime=True)
|
||||
file.seek(0)
|
||||
|
||||
if mime not in ALLOWED_MIME_TYPES:
|
||||
raise ValidationError(
|
||||
f"File content type {mime} does not match allowed types. "
|
||||
"File may be corrupted or incorrectly labeled."
|
||||
)
|
||||
except Exception as e:
|
||||
# If magic fails, we already validated content_type above
|
||||
pass
|
||||
|
||||
|
||||
def validate_image_file_size(file: InMemoryUploadedFile | TemporaryUploadedFile) -> None:
|
||||
"""
|
||||
Validate that the file size is within allowed limits.
|
||||
|
||||
Args:
|
||||
file: The uploaded file object
|
||||
|
||||
Raises:
|
||||
ValidationError: If file size is not within limits
|
||||
"""
|
||||
file_size = file.size
|
||||
|
||||
if file_size < MIN_FILE_SIZE:
|
||||
raise ValidationError(
|
||||
f"File size is too small. Minimum: {MIN_FILE_SIZE / 1024:.0f} KB"
|
||||
)
|
||||
|
||||
if file_size > MAX_FILE_SIZE:
|
||||
raise ValidationError(
|
||||
f"File size is too large. Maximum: {MAX_FILE_SIZE / (1024 * 1024):.0f} MB"
|
||||
)
|
||||
|
||||
|
||||
def validate_image_dimensions(
|
||||
file: InMemoryUploadedFile | TemporaryUploadedFile,
|
||||
photo_type: Optional[str] = None
|
||||
) -> None:
|
||||
"""
|
||||
Validate image dimensions and aspect ratio.
|
||||
|
||||
Args:
|
||||
file: The uploaded file object
|
||||
photo_type: Optional photo type for specific validation
|
||||
|
||||
Raises:
|
||||
ValidationError: If dimensions are not within limits
|
||||
"""
|
||||
try:
|
||||
file.seek(0)
|
||||
image = Image.open(file)
|
||||
width, height = image.size
|
||||
file.seek(0)
|
||||
except Exception as e:
|
||||
raise ValidationError(f"Could not read image dimensions: {str(e)}")
|
||||
|
||||
# Check minimum dimensions
|
||||
if width < MIN_WIDTH or height < MIN_HEIGHT:
|
||||
raise ValidationError(
|
||||
f"Image dimensions too small. Minimum: {MIN_WIDTH}x{MIN_HEIGHT}px, "
|
||||
f"got: {width}x{height}px"
|
||||
)
|
||||
|
||||
# Check maximum dimensions
|
||||
if width > MAX_WIDTH or height > MAX_HEIGHT:
|
||||
raise ValidationError(
|
||||
f"Image dimensions too large. Maximum: {MAX_WIDTH}x{MAX_HEIGHT}px, "
|
||||
f"got: {width}x{height}px"
|
||||
)
|
||||
|
||||
# Check aspect ratio for specific photo types
|
||||
if photo_type and photo_type in ASPECT_RATIO_LIMITS:
|
||||
aspect_ratio = width / height
|
||||
limits = ASPECT_RATIO_LIMITS[photo_type]
|
||||
|
||||
if aspect_ratio < limits['min'] or aspect_ratio > limits['max']:
|
||||
raise ValidationError(
|
||||
f"Invalid aspect ratio for {photo_type}. "
|
||||
f"Expected ratio between {limits['min']:.2f} and {limits['max']:.2f}, "
|
||||
f"got: {aspect_ratio:.2f}"
|
||||
)
|
||||
|
||||
|
||||
def validate_image(
|
||||
file: InMemoryUploadedFile | TemporaryUploadedFile,
|
||||
photo_type: Optional[str] = None
|
||||
) -> None:
|
||||
"""
|
||||
Run all image validations.
|
||||
|
||||
Args:
|
||||
file: The uploaded file object
|
||||
photo_type: Optional photo type for specific validation
|
||||
|
||||
Raises:
|
||||
ValidationError: If any validation fails
|
||||
"""
|
||||
validate_image_file_type(file)
|
||||
validate_image_file_size(file)
|
||||
validate_image_dimensions(file, photo_type)
|
||||
|
||||
|
||||
def validate_image_content_safety(file: InMemoryUploadedFile | TemporaryUploadedFile) -> None:
|
||||
"""
|
||||
Placeholder for content safety validation.
|
||||
|
||||
This could integrate with services like:
|
||||
- AWS Rekognition
|
||||
- Google Cloud Vision
|
||||
- Azure Content Moderator
|
||||
|
||||
For now, this is a no-op but provides extension point.
|
||||
|
||||
Args:
|
||||
file: The uploaded file object
|
||||
|
||||
Raises:
|
||||
ValidationError: If content is deemed unsafe
|
||||
"""
|
||||
# TODO: Integrate with content moderation API
|
||||
pass
|
||||
BIN
django/apps/moderation/__pycache__/admin.cpython-313.pyc
Normal file
BIN
django/apps/moderation/__pycache__/admin.cpython-313.pyc
Normal file
Binary file not shown.
Binary file not shown.
BIN
django/apps/moderation/__pycache__/services.cpython-313.pyc
Normal file
BIN
django/apps/moderation/__pycache__/services.cpython-313.pyc
Normal file
Binary file not shown.
424
django/apps/moderation/admin.py
Normal file
424
django/apps/moderation/admin.py
Normal file
@@ -0,0 +1,424 @@
|
||||
"""
|
||||
Django admin for moderation models.
|
||||
"""
|
||||
from django.contrib import admin
|
||||
from django.utils.html import format_html
|
||||
from django.urls import reverse
|
||||
from django.utils import timezone
|
||||
from unfold.admin import ModelAdmin
|
||||
from unfold.decorators import display
|
||||
|
||||
from apps.moderation.models import ContentSubmission, SubmissionItem, ModerationLock
|
||||
|
||||
|
||||
@admin.register(ContentSubmission)
|
||||
class ContentSubmissionAdmin(ModelAdmin):
|
||||
"""Admin for ContentSubmission model."""
|
||||
|
||||
list_display = [
|
||||
'title_with_icon',
|
||||
'status_badge',
|
||||
'entity_info',
|
||||
'user',
|
||||
'items_summary',
|
||||
'locked_info',
|
||||
'created',
|
||||
]
|
||||
|
||||
list_filter = [
|
||||
'status',
|
||||
'submission_type',
|
||||
'entity_type',
|
||||
'created',
|
||||
]
|
||||
|
||||
search_fields = [
|
||||
'title',
|
||||
'description',
|
||||
'user__email',
|
||||
'user__username',
|
||||
]
|
||||
|
||||
readonly_fields = [
|
||||
'id',
|
||||
'status',
|
||||
'entity_type',
|
||||
'entity_id',
|
||||
'locked_by',
|
||||
'locked_at',
|
||||
'reviewed_by',
|
||||
'reviewed_at',
|
||||
'created',
|
||||
'modified',
|
||||
]
|
||||
|
||||
fieldsets = (
|
||||
('Submission Info', {
|
||||
'fields': (
|
||||
'id',
|
||||
'title',
|
||||
'description',
|
||||
'submission_type',
|
||||
'status',
|
||||
)
|
||||
}),
|
||||
('Entity', {
|
||||
'fields': (
|
||||
'entity_type',
|
||||
'entity_id',
|
||||
)
|
||||
}),
|
||||
('User Info', {
|
||||
'fields': (
|
||||
'user',
|
||||
'source',
|
||||
'ip_address',
|
||||
'user_agent',
|
||||
)
|
||||
}),
|
||||
('Review Info', {
|
||||
'fields': (
|
||||
'locked_by',
|
||||
'locked_at',
|
||||
'reviewed_by',
|
||||
'reviewed_at',
|
||||
'rejection_reason',
|
||||
)
|
||||
}),
|
||||
('Metadata', {
|
||||
'fields': (
|
||||
'metadata',
|
||||
'created',
|
||||
'modified',
|
||||
),
|
||||
'classes': ('collapse',)
|
||||
}),
|
||||
)
|
||||
|
||||
@display(description='Title', ordering='title')
|
||||
def title_with_icon(self, obj):
|
||||
"""Display title with submission type icon."""
|
||||
icons = {
|
||||
'create': '➕',
|
||||
'update': '✏️',
|
||||
'delete': '🗑️',
|
||||
}
|
||||
icon = icons.get(obj.submission_type, '📝')
|
||||
return f"{icon} {obj.title}"
|
||||
|
||||
@display(description='Status', ordering='status')
|
||||
def status_badge(self, obj):
|
||||
"""Display colored status badge."""
|
||||
colors = {
|
||||
'draft': 'gray',
|
||||
'pending': 'blue',
|
||||
'reviewing': 'orange',
|
||||
'approved': 'green',
|
||||
'rejected': 'red',
|
||||
}
|
||||
color = colors.get(obj.status, 'gray')
|
||||
return format_html(
|
||||
'<span style="background-color: {}; color: white; padding: 3px 8px; '
|
||||
'border-radius: 3px; font-size: 11px; font-weight: bold;">{}</span>',
|
||||
color,
|
||||
obj.get_status_display()
|
||||
)
|
||||
|
||||
@display(description='Entity')
|
||||
def entity_info(self, obj):
|
||||
"""Display entity type and ID."""
|
||||
return f"{obj.entity_type.model} #{str(obj.entity_id)[:8]}"
|
||||
|
||||
@display(description='Items')
|
||||
def items_summary(self, obj):
|
||||
"""Display item counts."""
|
||||
total = obj.get_items_count()
|
||||
approved = obj.get_approved_items_count()
|
||||
rejected = obj.get_rejected_items_count()
|
||||
pending = total - approved - rejected
|
||||
|
||||
return format_html(
|
||||
'<span title="Pending">{}</span> / '
|
||||
'<span style="color: green;" title="Approved">{}</span> / '
|
||||
'<span style="color: red;" title="Rejected">{}</span>',
|
||||
pending, approved, rejected
|
||||
)
|
||||
|
||||
@display(description='Lock Status')
|
||||
def locked_info(self, obj):
|
||||
"""Display lock information."""
|
||||
if obj.locked_by:
|
||||
is_expired = not obj.is_locked()
|
||||
status = '🔓 Expired' if is_expired else '🔒 Locked'
|
||||
return f"{status} by {obj.locked_by.email}"
|
||||
return '✅ Unlocked'
|
||||
|
||||
def get_queryset(self, request):
|
||||
"""Optimize queryset with select_related."""
|
||||
qs = super().get_queryset(request)
|
||||
return qs.select_related(
|
||||
'user',
|
||||
'entity_type',
|
||||
'locked_by',
|
||||
'reviewed_by'
|
||||
).prefetch_related('items')
|
||||
|
||||
|
||||
class SubmissionItemInline(admin.TabularInline):
|
||||
"""Inline admin for submission items."""
|
||||
model = SubmissionItem
|
||||
extra = 0
|
||||
fields = [
|
||||
'field_label',
|
||||
'old_value_display',
|
||||
'new_value_display',
|
||||
'change_type',
|
||||
'status',
|
||||
'reviewed_by',
|
||||
]
|
||||
readonly_fields = [
|
||||
'field_label',
|
||||
'old_value_display',
|
||||
'new_value_display',
|
||||
'change_type',
|
||||
'status',
|
||||
'reviewed_by',
|
||||
]
|
||||
can_delete = False
|
||||
|
||||
def has_add_permission(self, request, obj=None):
|
||||
return False
|
||||
|
||||
|
||||
@admin.register(SubmissionItem)
|
||||
class SubmissionItemAdmin(ModelAdmin):
|
||||
"""Admin for SubmissionItem model."""
|
||||
|
||||
list_display = [
|
||||
'field_label',
|
||||
'submission_title',
|
||||
'change_type_badge',
|
||||
'status_badge',
|
||||
'old_value_display',
|
||||
'new_value_display',
|
||||
'reviewed_by',
|
||||
]
|
||||
|
||||
list_filter = [
|
||||
'status',
|
||||
'change_type',
|
||||
'is_required',
|
||||
'created',
|
||||
]
|
||||
|
||||
search_fields = [
|
||||
'field_name',
|
||||
'field_label',
|
||||
'submission__title',
|
||||
]
|
||||
|
||||
readonly_fields = [
|
||||
'id',
|
||||
'submission',
|
||||
'field_name',
|
||||
'field_label',
|
||||
'old_value',
|
||||
'new_value',
|
||||
'old_value_display',
|
||||
'new_value_display',
|
||||
'status',
|
||||
'reviewed_by',
|
||||
'reviewed_at',
|
||||
'created',
|
||||
'modified',
|
||||
]
|
||||
|
||||
fieldsets = (
|
||||
('Item Info', {
|
||||
'fields': (
|
||||
'id',
|
||||
'submission',
|
||||
'field_name',
|
||||
'field_label',
|
||||
'change_type',
|
||||
'is_required',
|
||||
'order',
|
||||
)
|
||||
}),
|
||||
('Values', {
|
||||
'fields': (
|
||||
'old_value',
|
||||
'new_value',
|
||||
'old_value_display',
|
||||
'new_value_display',
|
||||
)
|
||||
}),
|
||||
('Review Info', {
|
||||
'fields': (
|
||||
'status',
|
||||
'reviewed_by',
|
||||
'reviewed_at',
|
||||
'rejection_reason',
|
||||
)
|
||||
}),
|
||||
('Timestamps', {
|
||||
'fields': (
|
||||
'created',
|
||||
'modified',
|
||||
)
|
||||
}),
|
||||
)
|
||||
|
||||
@display(description='Submission')
|
||||
def submission_title(self, obj):
|
||||
"""Display submission title with link."""
|
||||
url = reverse('admin:moderation_contentsubmission_change', args=[obj.submission.id])
|
||||
return format_html('<a href="{}">{}</a>', url, obj.submission.title)
|
||||
|
||||
@display(description='Type', ordering='change_type')
|
||||
def change_type_badge(self, obj):
|
||||
"""Display colored change type badge."""
|
||||
colors = {
|
||||
'add': 'green',
|
||||
'modify': 'blue',
|
||||
'remove': 'red',
|
||||
}
|
||||
color = colors.get(obj.change_type, 'gray')
|
||||
return format_html(
|
||||
'<span style="background-color: {}; color: white; padding: 2px 6px; '
|
||||
'border-radius: 3px; font-size: 10px;">{}</span>',
|
||||
color,
|
||||
obj.get_change_type_display()
|
||||
)
|
||||
|
||||
@display(description='Status', ordering='status')
|
||||
def status_badge(self, obj):
|
||||
"""Display colored status badge."""
|
||||
colors = {
|
||||
'pending': 'orange',
|
||||
'approved': 'green',
|
||||
'rejected': 'red',
|
||||
}
|
||||
color = colors.get(obj.status, 'gray')
|
||||
return format_html(
|
||||
'<span style="background-color: {}; color: white; padding: 2px 6px; '
|
||||
'border-radius: 3px; font-size: 10px;">{}</span>',
|
||||
color,
|
||||
obj.get_status_display()
|
||||
)
|
||||
|
||||
def get_queryset(self, request):
|
||||
"""Optimize queryset with select_related."""
|
||||
qs = super().get_queryset(request)
|
||||
return qs.select_related('submission', 'reviewed_by')
|
||||
|
||||
|
||||
@admin.register(ModerationLock)
|
||||
class ModerationLockAdmin(ModelAdmin):
|
||||
"""Admin for ModerationLock model."""
|
||||
|
||||
list_display = [
|
||||
'submission_title',
|
||||
'locked_by',
|
||||
'locked_at',
|
||||
'expires_at',
|
||||
'status_indicator',
|
||||
'lock_duration',
|
||||
]
|
||||
|
||||
list_filter = [
|
||||
'is_active',
|
||||
'locked_at',
|
||||
'expires_at',
|
||||
]
|
||||
|
||||
search_fields = [
|
||||
'submission__title',
|
||||
'locked_by__email',
|
||||
'locked_by__username',
|
||||
]
|
||||
|
||||
readonly_fields = [
|
||||
'id',
|
||||
'submission',
|
||||
'locked_by',
|
||||
'locked_at',
|
||||
'expires_at',
|
||||
'is_active',
|
||||
'released_at',
|
||||
'lock_duration',
|
||||
'is_expired_display',
|
||||
'created',
|
||||
'modified',
|
||||
]
|
||||
|
||||
fieldsets = (
|
||||
('Lock Info', {
|
||||
'fields': (
|
||||
'id',
|
||||
'submission',
|
||||
'locked_by',
|
||||
'is_active',
|
||||
)
|
||||
}),
|
||||
('Timing', {
|
||||
'fields': (
|
||||
'locked_at',
|
||||
'expires_at',
|
||||
'released_at',
|
||||
'lock_duration',
|
||||
'is_expired_display',
|
||||
)
|
||||
}),
|
||||
('Timestamps', {
|
||||
'fields': (
|
||||
'created',
|
||||
'modified',
|
||||
)
|
||||
}),
|
||||
)
|
||||
|
||||
@display(description='Submission')
|
||||
def submission_title(self, obj):
|
||||
"""Display submission title with link."""
|
||||
url = reverse('admin:moderation_contentsubmission_change', args=[obj.submission.id])
|
||||
return format_html('<a href="{}">{}</a>', url, obj.submission.title)
|
||||
|
||||
@display(description='Status')
|
||||
def status_indicator(self, obj):
|
||||
"""Display lock status."""
|
||||
if not obj.is_active:
|
||||
return format_html(
|
||||
'<span style="color: gray;">🔓 Released</span>'
|
||||
)
|
||||
elif obj.is_expired():
|
||||
return format_html(
|
||||
'<span style="color: orange;">⏰ Expired</span>'
|
||||
)
|
||||
else:
|
||||
return format_html(
|
||||
'<span style="color: green;">🔒 Active</span>'
|
||||
)
|
||||
|
||||
@display(description='Duration')
|
||||
def lock_duration(self, obj):
|
||||
"""Display lock duration."""
|
||||
if obj.released_at:
|
||||
duration = obj.released_at - obj.locked_at
|
||||
else:
|
||||
duration = timezone.now() - obj.locked_at
|
||||
|
||||
minutes = int(duration.total_seconds() / 60)
|
||||
return f"{minutes} minutes"
|
||||
|
||||
@display(description='Expired?')
|
||||
def is_expired_display(self, obj):
|
||||
"""Display if lock is expired."""
|
||||
if not obj.is_active:
|
||||
return 'N/A (Released)'
|
||||
return 'Yes' if obj.is_expired() else 'No'
|
||||
|
||||
def get_queryset(self, request):
|
||||
"""Optimize queryset with select_related."""
|
||||
qs = super().get_queryset(request)
|
||||
return qs.select_related('submission', 'locked_by')
|
||||
454
django/apps/moderation/migrations/0001_initial.py
Normal file
454
django/apps/moderation/migrations/0001_initial.py
Normal file
@@ -0,0 +1,454 @@
|
||||
# Generated by Django 4.2.8 on 2025-11-08 17:40
|
||||
|
||||
from django.conf import settings
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
import django.utils.timezone
|
||||
import django_fsm
|
||||
import django_lifecycle.mixins
|
||||
import model_utils.fields
|
||||
import uuid
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
initial = True
|
||||
|
||||
dependencies = [
|
||||
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
||||
("contenttypes", "0002_remove_content_type_name"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name="ContentSubmission",
|
||||
fields=[
|
||||
(
|
||||
"created",
|
||||
model_utils.fields.AutoCreatedField(
|
||||
default=django.utils.timezone.now,
|
||||
editable=False,
|
||||
verbose_name="created",
|
||||
),
|
||||
),
|
||||
(
|
||||
"modified",
|
||||
model_utils.fields.AutoLastModifiedField(
|
||||
default=django.utils.timezone.now,
|
||||
editable=False,
|
||||
verbose_name="modified",
|
||||
),
|
||||
),
|
||||
(
|
||||
"id",
|
||||
models.UUIDField(
|
||||
default=uuid.uuid4,
|
||||
editable=False,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
),
|
||||
),
|
||||
(
|
||||
"status",
|
||||
django_fsm.FSMField(
|
||||
choices=[
|
||||
("draft", "Draft"),
|
||||
("pending", "Pending Review"),
|
||||
("reviewing", "Under Review"),
|
||||
("approved", "Approved"),
|
||||
("rejected", "Rejected"),
|
||||
],
|
||||
db_index=True,
|
||||
default="draft",
|
||||
help_text="Current submission state (managed by FSM)",
|
||||
max_length=20,
|
||||
protected=True,
|
||||
),
|
||||
),
|
||||
(
|
||||
"entity_id",
|
||||
models.UUIDField(help_text="ID of the entity being modified"),
|
||||
),
|
||||
(
|
||||
"submission_type",
|
||||
models.CharField(
|
||||
choices=[
|
||||
("create", "Create"),
|
||||
("update", "Update"),
|
||||
("delete", "Delete"),
|
||||
],
|
||||
db_index=True,
|
||||
help_text="Type of operation (create, update, delete)",
|
||||
max_length=20,
|
||||
),
|
||||
),
|
||||
(
|
||||
"title",
|
||||
models.CharField(
|
||||
help_text="Brief description of changes", max_length=255
|
||||
),
|
||||
),
|
||||
(
|
||||
"description",
|
||||
models.TextField(
|
||||
blank=True, help_text="Detailed description of changes"
|
||||
),
|
||||
),
|
||||
(
|
||||
"locked_at",
|
||||
models.DateTimeField(
|
||||
blank=True,
|
||||
help_text="When the submission was locked for review",
|
||||
null=True,
|
||||
),
|
||||
),
|
||||
(
|
||||
"reviewed_at",
|
||||
models.DateTimeField(
|
||||
blank=True,
|
||||
help_text="When the submission was reviewed",
|
||||
null=True,
|
||||
),
|
||||
),
|
||||
(
|
||||
"rejection_reason",
|
||||
models.TextField(
|
||||
blank=True, help_text="Reason for rejection (if rejected)"
|
||||
),
|
||||
),
|
||||
(
|
||||
"source",
|
||||
models.CharField(
|
||||
default="web",
|
||||
help_text="Source of submission (web, api, mobile, etc.)",
|
||||
max_length=50,
|
||||
),
|
||||
),
|
||||
(
|
||||
"ip_address",
|
||||
models.GenericIPAddressField(
|
||||
blank=True, help_text="IP address of submitter", null=True
|
||||
),
|
||||
),
|
||||
(
|
||||
"user_agent",
|
||||
models.CharField(
|
||||
blank=True, help_text="User agent of submitter", max_length=500
|
||||
),
|
||||
),
|
||||
(
|
||||
"metadata",
|
||||
models.JSONField(
|
||||
blank=True,
|
||||
default=dict,
|
||||
help_text="Additional submission metadata",
|
||||
),
|
||||
),
|
||||
(
|
||||
"entity_type",
|
||||
models.ForeignKey(
|
||||
help_text="Type of entity being modified",
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
to="contenttypes.contenttype",
|
||||
),
|
||||
),
|
||||
(
|
||||
"locked_by",
|
||||
models.ForeignKey(
|
||||
blank=True,
|
||||
help_text="Moderator currently reviewing this submission",
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
related_name="locked_submissions",
|
||||
to=settings.AUTH_USER_MODEL,
|
||||
),
|
||||
),
|
||||
(
|
||||
"reviewed_by",
|
||||
models.ForeignKey(
|
||||
blank=True,
|
||||
help_text="Moderator who reviewed this submission",
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
related_name="reviewed_submissions",
|
||||
to=settings.AUTH_USER_MODEL,
|
||||
),
|
||||
),
|
||||
(
|
||||
"user",
|
||||
models.ForeignKey(
|
||||
help_text="User who submitted the changes",
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
related_name="submissions",
|
||||
to=settings.AUTH_USER_MODEL,
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"verbose_name": "Content Submission",
|
||||
"verbose_name_plural": "Content Submissions",
|
||||
"db_table": "content_submissions",
|
||||
"ordering": ["-created"],
|
||||
},
|
||||
bases=(django_lifecycle.mixins.LifecycleModelMixin, models.Model),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="SubmissionItem",
|
||||
fields=[
|
||||
(
|
||||
"created",
|
||||
model_utils.fields.AutoCreatedField(
|
||||
default=django.utils.timezone.now,
|
||||
editable=False,
|
||||
verbose_name="created",
|
||||
),
|
||||
),
|
||||
(
|
||||
"modified",
|
||||
model_utils.fields.AutoLastModifiedField(
|
||||
default=django.utils.timezone.now,
|
||||
editable=False,
|
||||
verbose_name="modified",
|
||||
),
|
||||
),
|
||||
(
|
||||
"id",
|
||||
models.UUIDField(
|
||||
default=uuid.uuid4,
|
||||
editable=False,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
),
|
||||
),
|
||||
(
|
||||
"field_name",
|
||||
models.CharField(
|
||||
help_text="Name of the field being changed", max_length=100
|
||||
),
|
||||
),
|
||||
(
|
||||
"field_label",
|
||||
models.CharField(
|
||||
blank=True,
|
||||
help_text="Human-readable field label",
|
||||
max_length=200,
|
||||
),
|
||||
),
|
||||
(
|
||||
"old_value",
|
||||
models.JSONField(
|
||||
blank=True,
|
||||
help_text="Previous value (null for new fields)",
|
||||
null=True,
|
||||
),
|
||||
),
|
||||
(
|
||||
"new_value",
|
||||
models.JSONField(
|
||||
blank=True,
|
||||
help_text="New value (null for deletions)",
|
||||
null=True,
|
||||
),
|
||||
),
|
||||
(
|
||||
"status",
|
||||
models.CharField(
|
||||
choices=[
|
||||
("pending", "Pending"),
|
||||
("approved", "Approved"),
|
||||
("rejected", "Rejected"),
|
||||
],
|
||||
db_index=True,
|
||||
default="pending",
|
||||
help_text="Status of this individual item",
|
||||
max_length=20,
|
||||
),
|
||||
),
|
||||
(
|
||||
"reviewed_at",
|
||||
models.DateTimeField(
|
||||
blank=True, help_text="When this item was reviewed", null=True
|
||||
),
|
||||
),
|
||||
(
|
||||
"rejection_reason",
|
||||
models.TextField(
|
||||
blank=True, help_text="Reason for rejecting this specific item"
|
||||
),
|
||||
),
|
||||
(
|
||||
"change_type",
|
||||
models.CharField(
|
||||
choices=[
|
||||
("add", "Add"),
|
||||
("modify", "Modify"),
|
||||
("remove", "Remove"),
|
||||
],
|
||||
default="modify",
|
||||
help_text="Type of change",
|
||||
max_length=20,
|
||||
),
|
||||
),
|
||||
(
|
||||
"is_required",
|
||||
models.BooleanField(
|
||||
default=False,
|
||||
help_text="Whether this change is required for the submission",
|
||||
),
|
||||
),
|
||||
(
|
||||
"order",
|
||||
models.IntegerField(
|
||||
default=0, help_text="Display order within submission"
|
||||
),
|
||||
),
|
||||
(
|
||||
"reviewed_by",
|
||||
models.ForeignKey(
|
||||
blank=True,
|
||||
help_text="Moderator who reviewed this item",
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
related_name="reviewed_items",
|
||||
to=settings.AUTH_USER_MODEL,
|
||||
),
|
||||
),
|
||||
(
|
||||
"submission",
|
||||
models.ForeignKey(
|
||||
help_text="Parent submission",
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
related_name="items",
|
||||
to="moderation.contentsubmission",
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"verbose_name": "Submission Item",
|
||||
"verbose_name_plural": "Submission Items",
|
||||
"db_table": "submission_items",
|
||||
"ordering": ["submission", "order", "created"],
|
||||
"indexes": [
|
||||
models.Index(
|
||||
fields=["submission", "status"],
|
||||
name="submission__submiss_71cf2f_idx",
|
||||
),
|
||||
models.Index(
|
||||
fields=["status"], name="submission__status_61deb1_idx"
|
||||
),
|
||||
],
|
||||
},
|
||||
bases=(django_lifecycle.mixins.LifecycleModelMixin, models.Model),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="ModerationLock",
|
||||
fields=[
|
||||
(
|
||||
"created",
|
||||
model_utils.fields.AutoCreatedField(
|
||||
default=django.utils.timezone.now,
|
||||
editable=False,
|
||||
verbose_name="created",
|
||||
),
|
||||
),
|
||||
(
|
||||
"modified",
|
||||
model_utils.fields.AutoLastModifiedField(
|
||||
default=django.utils.timezone.now,
|
||||
editable=False,
|
||||
verbose_name="modified",
|
||||
),
|
||||
),
|
||||
(
|
||||
"id",
|
||||
models.UUIDField(
|
||||
default=uuid.uuid4,
|
||||
editable=False,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
),
|
||||
),
|
||||
(
|
||||
"locked_at",
|
||||
models.DateTimeField(
|
||||
auto_now_add=True, help_text="When the lock was acquired"
|
||||
),
|
||||
),
|
||||
("expires_at", models.DateTimeField(help_text="When the lock expires")),
|
||||
(
|
||||
"is_active",
|
||||
models.BooleanField(
|
||||
db_index=True,
|
||||
default=True,
|
||||
help_text="Whether the lock is currently active",
|
||||
),
|
||||
),
|
||||
(
|
||||
"released_at",
|
||||
models.DateTimeField(
|
||||
blank=True, help_text="When the lock was released", null=True
|
||||
),
|
||||
),
|
||||
(
|
||||
"locked_by",
|
||||
models.ForeignKey(
|
||||
help_text="User who holds the lock",
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
related_name="moderation_locks",
|
||||
to=settings.AUTH_USER_MODEL,
|
||||
),
|
||||
),
|
||||
(
|
||||
"submission",
|
||||
models.OneToOneField(
|
||||
help_text="Submission that is locked",
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
related_name="lock_record",
|
||||
to="moderation.contentsubmission",
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"verbose_name": "Moderation Lock",
|
||||
"verbose_name_plural": "Moderation Locks",
|
||||
"db_table": "moderation_locks",
|
||||
"ordering": ["-locked_at"],
|
||||
"indexes": [
|
||||
models.Index(
|
||||
fields=["is_active", "expires_at"],
|
||||
name="moderation__is_acti_ecf427_idx",
|
||||
),
|
||||
models.Index(
|
||||
fields=["locked_by", "is_active"],
|
||||
name="moderation__locked__d5cdfb_idx",
|
||||
),
|
||||
],
|
||||
},
|
||||
bases=(django_lifecycle.mixins.LifecycleModelMixin, models.Model),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="contentsubmission",
|
||||
index=models.Index(
|
||||
fields=["status", "created"], name="content_sub_status_a8d552_idx"
|
||||
),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="contentsubmission",
|
||||
index=models.Index(
|
||||
fields=["user", "status"], name="content_sub_user_id_019595_idx"
|
||||
),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="contentsubmission",
|
||||
index=models.Index(
|
||||
fields=["entity_type", "entity_id"],
|
||||
name="content_sub_entity__d0f313_idx",
|
||||
),
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name="contentsubmission",
|
||||
index=models.Index(
|
||||
fields=["locked_by", "locked_at"], name="content_sub_locked__feb2b3_idx"
|
||||
),
|
||||
),
|
||||
]
|
||||
0
django/apps/moderation/migrations/__init__.py
Normal file
0
django/apps/moderation/migrations/__init__.py
Normal file
Binary file not shown.
Binary file not shown.
@@ -0,0 +1,477 @@
|
||||
"""
|
||||
Moderation models for ThrillWiki.
|
||||
|
||||
This module implements the content submission and approval workflow with:
|
||||
- State machine using django-fsm
|
||||
- Atomic transaction support for approvals
|
||||
- 15-minute review lock mechanism
|
||||
- Selective approval of individual items
|
||||
"""
|
||||
|
||||
import uuid
|
||||
from django.db import models
|
||||
from django.utils import timezone
|
||||
from django.contrib.contenttypes.fields import GenericForeignKey
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from django_fsm import FSMField, transition
|
||||
from apps.core.models import BaseModel
|
||||
|
||||
|
||||
class ContentSubmission(BaseModel):
|
||||
"""
|
||||
Main submission model with FSM state machine.
|
||||
|
||||
Represents a batch of changes submitted by a user for moderation.
|
||||
Can contain multiple SubmissionItem objects representing individual field changes.
|
||||
"""
|
||||
|
||||
# State choices for FSM
|
||||
STATE_DRAFT = 'draft'
|
||||
STATE_PENDING = 'pending'
|
||||
STATE_REVIEWING = 'reviewing'
|
||||
STATE_APPROVED = 'approved'
|
||||
STATE_REJECTED = 'rejected'
|
||||
|
||||
STATE_CHOICES = [
|
||||
(STATE_DRAFT, 'Draft'),
|
||||
(STATE_PENDING, 'Pending Review'),
|
||||
(STATE_REVIEWING, 'Under Review'),
|
||||
(STATE_APPROVED, 'Approved'),
|
||||
(STATE_REJECTED, 'Rejected'),
|
||||
]
|
||||
|
||||
# FSM State field
|
||||
status = FSMField(
|
||||
max_length=20,
|
||||
choices=STATE_CHOICES,
|
||||
default=STATE_DRAFT,
|
||||
db_index=True,
|
||||
protected=True, # Prevents direct status changes
|
||||
help_text="Current submission state (managed by FSM)"
|
||||
)
|
||||
|
||||
# Submitter
|
||||
user = models.ForeignKey(
|
||||
'users.User',
|
||||
on_delete=models.CASCADE,
|
||||
related_name='submissions',
|
||||
help_text="User who submitted the changes"
|
||||
)
|
||||
|
||||
# Entity being modified (generic relation)
|
||||
entity_type = models.ForeignKey(
|
||||
ContentType,
|
||||
on_delete=models.CASCADE,
|
||||
help_text="Type of entity being modified"
|
||||
)
|
||||
entity_id = models.UUIDField(
|
||||
help_text="ID of the entity being modified"
|
||||
)
|
||||
entity = GenericForeignKey('entity_type', 'entity_id')
|
||||
|
||||
# Submission type
|
||||
SUBMISSION_TYPE_CHOICES = [
|
||||
('create', 'Create'),
|
||||
('update', 'Update'),
|
||||
('delete', 'Delete'),
|
||||
]
|
||||
|
||||
submission_type = models.CharField(
|
||||
max_length=20,
|
||||
choices=SUBMISSION_TYPE_CHOICES,
|
||||
db_index=True,
|
||||
help_text="Type of operation (create, update, delete)"
|
||||
)
|
||||
|
||||
# Submission details
|
||||
title = models.CharField(
|
||||
max_length=255,
|
||||
help_text="Brief description of changes"
|
||||
)
|
||||
description = models.TextField(
|
||||
blank=True,
|
||||
help_text="Detailed description of changes"
|
||||
)
|
||||
|
||||
# Review lock mechanism (15-minute lock)
|
||||
locked_by = models.ForeignKey(
|
||||
'users.User',
|
||||
on_delete=models.SET_NULL,
|
||||
null=True,
|
||||
blank=True,
|
||||
related_name='locked_submissions',
|
||||
help_text="Moderator currently reviewing this submission"
|
||||
)
|
||||
locked_at = models.DateTimeField(
|
||||
null=True,
|
||||
blank=True,
|
||||
help_text="When the submission was locked for review"
|
||||
)
|
||||
|
||||
# Review details
|
||||
reviewed_by = models.ForeignKey(
|
||||
'users.User',
|
||||
on_delete=models.SET_NULL,
|
||||
null=True,
|
||||
blank=True,
|
||||
related_name='reviewed_submissions',
|
||||
help_text="Moderator who reviewed this submission"
|
||||
)
|
||||
reviewed_at = models.DateTimeField(
|
||||
null=True,
|
||||
blank=True,
|
||||
help_text="When the submission was reviewed"
|
||||
)
|
||||
rejection_reason = models.TextField(
|
||||
blank=True,
|
||||
help_text="Reason for rejection (if rejected)"
|
||||
)
|
||||
|
||||
# Metadata
|
||||
source = models.CharField(
|
||||
max_length=50,
|
||||
default='web',
|
||||
help_text="Source of submission (web, api, mobile, etc.)"
|
||||
)
|
||||
ip_address = models.GenericIPAddressField(
|
||||
null=True,
|
||||
blank=True,
|
||||
help_text="IP address of submitter"
|
||||
)
|
||||
user_agent = models.CharField(
|
||||
max_length=500,
|
||||
blank=True,
|
||||
help_text="User agent of submitter"
|
||||
)
|
||||
|
||||
# Additional data
|
||||
metadata = models.JSONField(
|
||||
default=dict,
|
||||
blank=True,
|
||||
help_text="Additional submission metadata"
|
||||
)
|
||||
|
||||
class Meta:
|
||||
db_table = 'content_submissions'
|
||||
ordering = ['-created']
|
||||
indexes = [
|
||||
models.Index(fields=['status', 'created']),
|
||||
models.Index(fields=['user', 'status']),
|
||||
models.Index(fields=['entity_type', 'entity_id']),
|
||||
models.Index(fields=['locked_by', 'locked_at']),
|
||||
]
|
||||
verbose_name = 'Content Submission'
|
||||
verbose_name_plural = 'Content Submissions'
|
||||
|
||||
def __str__(self):
|
||||
return f"{self.get_submission_type_display()} - {self.title} ({self.get_status_display()})"
|
||||
|
||||
# FSM Transitions
|
||||
|
||||
@transition(field=status, source=STATE_DRAFT, target=STATE_PENDING)
|
||||
def submit(self):
|
||||
"""Submit for review - moves from draft to pending"""
|
||||
pass
|
||||
|
||||
@transition(field=status, source=STATE_PENDING, target=STATE_REVIEWING)
|
||||
def start_review(self, reviewer):
|
||||
"""Lock submission for review"""
|
||||
self.locked_by = reviewer
|
||||
self.locked_at = timezone.now()
|
||||
|
||||
@transition(field=status, source=STATE_REVIEWING, target=STATE_APPROVED)
|
||||
def approve(self, reviewer):
|
||||
"""Approve submission"""
|
||||
self.reviewed_by = reviewer
|
||||
self.reviewed_at = timezone.now()
|
||||
self.locked_by = None
|
||||
self.locked_at = None
|
||||
|
||||
@transition(field=status, source=STATE_REVIEWING, target=STATE_REJECTED)
|
||||
def reject(self, reviewer, reason):
|
||||
"""Reject submission"""
|
||||
self.reviewed_by = reviewer
|
||||
self.reviewed_at = timezone.now()
|
||||
self.rejection_reason = reason
|
||||
self.locked_by = None
|
||||
self.locked_at = None
|
||||
|
||||
@transition(field=status, source=STATE_REVIEWING, target=STATE_PENDING)
|
||||
def unlock(self):
|
||||
"""Unlock submission (timeout or manual unlock)"""
|
||||
self.locked_by = None
|
||||
self.locked_at = None
|
||||
|
||||
# Helper methods
|
||||
|
||||
def is_locked(self):
|
||||
"""Check if submission is currently locked"""
|
||||
if not self.locked_by or not self.locked_at:
|
||||
return False
|
||||
|
||||
# Check if lock has expired (15 minutes)
|
||||
lock_duration = timezone.now() - self.locked_at
|
||||
return lock_duration.total_seconds() < 15 * 60
|
||||
|
||||
def can_review(self, user):
|
||||
"""Check if user can review this submission"""
|
||||
if self.status != self.STATE_REVIEWING:
|
||||
return False
|
||||
|
||||
# Check if locked by another user
|
||||
if self.locked_by and self.locked_by != user:
|
||||
return not self.is_locked()
|
||||
|
||||
return True
|
||||
|
||||
def get_items_count(self):
|
||||
"""Get count of submission items"""
|
||||
return self.items.count()
|
||||
|
||||
def get_approved_items_count(self):
|
||||
"""Get count of approved items"""
|
||||
return self.items.filter(status='approved').count()
|
||||
|
||||
def get_rejected_items_count(self):
|
||||
"""Get count of rejected items"""
|
||||
return self.items.filter(status='rejected').count()
|
||||
|
||||
|
||||
class SubmissionItem(BaseModel):
|
||||
"""
|
||||
Individual change within a submission.
|
||||
|
||||
Represents a single field change (or entity creation/deletion).
|
||||
Supports selective approval - each item can be approved/rejected independently.
|
||||
"""
|
||||
|
||||
STATUS_CHOICES = [
|
||||
('pending', 'Pending'),
|
||||
('approved', 'Approved'),
|
||||
('rejected', 'Rejected'),
|
||||
]
|
||||
|
||||
# Parent submission
|
||||
submission = models.ForeignKey(
|
||||
ContentSubmission,
|
||||
on_delete=models.CASCADE,
|
||||
related_name='items',
|
||||
help_text="Parent submission"
|
||||
)
|
||||
|
||||
# Item details
|
||||
field_name = models.CharField(
|
||||
max_length=100,
|
||||
help_text="Name of the field being changed"
|
||||
)
|
||||
field_label = models.CharField(
|
||||
max_length=200,
|
||||
blank=True,
|
||||
help_text="Human-readable field label"
|
||||
)
|
||||
|
||||
# Values (stored as JSON for flexibility)
|
||||
old_value = models.JSONField(
|
||||
null=True,
|
||||
blank=True,
|
||||
help_text="Previous value (null for new fields)"
|
||||
)
|
||||
new_value = models.JSONField(
|
||||
null=True,
|
||||
blank=True,
|
||||
help_text="New value (null for deletions)"
|
||||
)
|
||||
|
||||
# Item status (for selective approval)
|
||||
status = models.CharField(
|
||||
max_length=20,
|
||||
choices=STATUS_CHOICES,
|
||||
default='pending',
|
||||
db_index=True,
|
||||
help_text="Status of this individual item"
|
||||
)
|
||||
|
||||
# Review details (for selective approval)
|
||||
reviewed_by = models.ForeignKey(
|
||||
'users.User',
|
||||
on_delete=models.SET_NULL,
|
||||
null=True,
|
||||
blank=True,
|
||||
related_name='reviewed_items',
|
||||
help_text="Moderator who reviewed this item"
|
||||
)
|
||||
reviewed_at = models.DateTimeField(
|
||||
null=True,
|
||||
blank=True,
|
||||
help_text="When this item was reviewed"
|
||||
)
|
||||
rejection_reason = models.TextField(
|
||||
blank=True,
|
||||
help_text="Reason for rejecting this specific item"
|
||||
)
|
||||
|
||||
# Metadata
|
||||
change_type = models.CharField(
|
||||
max_length=20,
|
||||
choices=[
|
||||
('add', 'Add'),
|
||||
('modify', 'Modify'),
|
||||
('remove', 'Remove'),
|
||||
],
|
||||
default='modify',
|
||||
help_text="Type of change"
|
||||
)
|
||||
|
||||
is_required = models.BooleanField(
|
||||
default=False,
|
||||
help_text="Whether this change is required for the submission"
|
||||
)
|
||||
|
||||
order = models.IntegerField(
|
||||
default=0,
|
||||
help_text="Display order within submission"
|
||||
)
|
||||
|
||||
class Meta:
|
||||
db_table = 'submission_items'
|
||||
ordering = ['submission', 'order', 'created']
|
||||
indexes = [
|
||||
models.Index(fields=['submission', 'status']),
|
||||
models.Index(fields=['status']),
|
||||
]
|
||||
verbose_name = 'Submission Item'
|
||||
verbose_name_plural = 'Submission Items'
|
||||
|
||||
def __str__(self):
|
||||
return f"{self.submission.title} - {self.field_label or self.field_name}"
|
||||
|
||||
def approve(self, reviewer):
|
||||
"""Approve this item"""
|
||||
self.status = 'approved'
|
||||
self.reviewed_by = reviewer
|
||||
self.reviewed_at = timezone.now()
|
||||
self.save(update_fields=['status', 'reviewed_by', 'reviewed_at', 'modified'])
|
||||
|
||||
def reject(self, reviewer, reason=''):
|
||||
"""Reject this item"""
|
||||
self.status = 'rejected'
|
||||
self.reviewed_by = reviewer
|
||||
self.reviewed_at = timezone.now()
|
||||
self.rejection_reason = reason
|
||||
self.save(update_fields=['status', 'reviewed_by', 'reviewed_at', 'rejection_reason', 'modified'])
|
||||
|
||||
def get_display_value(self, value):
|
||||
"""Get human-readable display value"""
|
||||
if value is None:
|
||||
return 'None'
|
||||
if isinstance(value, bool):
|
||||
return 'Yes' if value else 'No'
|
||||
if isinstance(value, (list, dict)):
|
||||
return str(value)
|
||||
return str(value)
|
||||
|
||||
@property
|
||||
def old_value_display(self):
|
||||
"""Human-readable old value"""
|
||||
return self.get_display_value(self.old_value)
|
||||
|
||||
@property
|
||||
def new_value_display(self):
|
||||
"""Human-readable new value"""
|
||||
return self.get_display_value(self.new_value)
|
||||
|
||||
|
||||
class ModerationLock(BaseModel):
|
||||
"""
|
||||
Lock record for submissions under review.
|
||||
|
||||
Provides additional tracking beyond the ContentSubmission lock fields.
|
||||
Helps with monitoring and debugging lock issues.
|
||||
"""
|
||||
|
||||
submission = models.OneToOneField(
|
||||
ContentSubmission,
|
||||
on_delete=models.CASCADE,
|
||||
related_name='lock_record',
|
||||
help_text="Submission that is locked"
|
||||
)
|
||||
|
||||
locked_by = models.ForeignKey(
|
||||
'users.User',
|
||||
on_delete=models.CASCADE,
|
||||
related_name='moderation_locks',
|
||||
help_text="User who holds the lock"
|
||||
)
|
||||
|
||||
locked_at = models.DateTimeField(
|
||||
auto_now_add=True,
|
||||
help_text="When the lock was acquired"
|
||||
)
|
||||
|
||||
expires_at = models.DateTimeField(
|
||||
help_text="When the lock expires"
|
||||
)
|
||||
|
||||
is_active = models.BooleanField(
|
||||
default=True,
|
||||
db_index=True,
|
||||
help_text="Whether the lock is currently active"
|
||||
)
|
||||
|
||||
released_at = models.DateTimeField(
|
||||
null=True,
|
||||
blank=True,
|
||||
help_text="When the lock was released"
|
||||
)
|
||||
|
||||
class Meta:
|
||||
db_table = 'moderation_locks'
|
||||
ordering = ['-locked_at']
|
||||
indexes = [
|
||||
models.Index(fields=['is_active', 'expires_at']),
|
||||
models.Index(fields=['locked_by', 'is_active']),
|
||||
]
|
||||
verbose_name = 'Moderation Lock'
|
||||
verbose_name_plural = 'Moderation Locks'
|
||||
|
||||
def __str__(self):
|
||||
return f"Lock on {self.submission.title} by {self.locked_by.email}"
|
||||
|
||||
def is_expired(self):
|
||||
"""Check if lock has expired"""
|
||||
return timezone.now() > self.expires_at
|
||||
|
||||
def release(self):
|
||||
"""Release the lock"""
|
||||
self.is_active = False
|
||||
self.released_at = timezone.now()
|
||||
self.save(update_fields=['is_active', 'released_at', 'modified'])
|
||||
|
||||
def extend(self, minutes=15):
|
||||
"""Extend the lock duration"""
|
||||
from datetime import timedelta
|
||||
self.expires_at = timezone.now() + timedelta(minutes=minutes)
|
||||
self.save(update_fields=['expires_at', 'modified'])
|
||||
|
||||
@classmethod
|
||||
def cleanup_expired(cls):
|
||||
"""Cleanup expired locks (for periodic task)"""
|
||||
expired_locks = cls.objects.filter(
|
||||
is_active=True,
|
||||
expires_at__lt=timezone.now()
|
||||
)
|
||||
|
||||
count = 0
|
||||
for lock in expired_locks:
|
||||
# Release lock
|
||||
lock.release()
|
||||
|
||||
# Unlock submission if still in reviewing state
|
||||
submission = lock.submission
|
||||
if submission.status == ContentSubmission.STATE_REVIEWING:
|
||||
submission.unlock()
|
||||
submission.save()
|
||||
|
||||
count += 1
|
||||
|
||||
return count
|
||||
|
||||
587
django/apps/moderation/services.py
Normal file
587
django/apps/moderation/services.py
Normal file
@@ -0,0 +1,587 @@
|
||||
"""
|
||||
Moderation services for ThrillWiki.
|
||||
|
||||
This module provides business logic for the content moderation workflow:
|
||||
- Creating submissions
|
||||
- Starting reviews with locks
|
||||
- Approving submissions with atomic transactions
|
||||
- Selective approval of individual items
|
||||
- Rejecting submissions
|
||||
- Unlocking expired submissions
|
||||
"""
|
||||
|
||||
import logging
|
||||
from datetime import timedelta
|
||||
from django.db import transaction
|
||||
from django.utils import timezone
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from django.core.exceptions import ValidationError, PermissionDenied
|
||||
|
||||
from apps.moderation.models import ContentSubmission, SubmissionItem, ModerationLock
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ModerationService:
|
||||
"""
|
||||
Service class for moderation operations.
|
||||
|
||||
All public methods use atomic transactions to ensure data integrity.
|
||||
"""
|
||||
|
||||
@staticmethod
|
||||
@transaction.atomic
|
||||
def create_submission(
|
||||
user,
|
||||
entity,
|
||||
submission_type,
|
||||
title,
|
||||
description='',
|
||||
items_data=None,
|
||||
metadata=None,
|
||||
auto_submit=True,
|
||||
**kwargs
|
||||
):
|
||||
"""
|
||||
Create a new content submission with items.
|
||||
|
||||
Args:
|
||||
user: User creating the submission
|
||||
entity: Entity being modified (Park, Ride, Company, etc.)
|
||||
submission_type: 'create', 'update', or 'delete'
|
||||
title: Brief description of changes
|
||||
description: Detailed description (optional)
|
||||
items_data: List of dicts with item details:
|
||||
[
|
||||
{
|
||||
'field_name': 'name',
|
||||
'field_label': 'Park Name',
|
||||
'old_value': 'Old Name',
|
||||
'new_value': 'New Name',
|
||||
'change_type': 'modify',
|
||||
'is_required': False,
|
||||
'order': 0
|
||||
},
|
||||
...
|
||||
]
|
||||
metadata: Additional metadata dict
|
||||
auto_submit: Whether to automatically submit (move to pending state)
|
||||
**kwargs: Additional submission fields (source, ip_address, user_agent)
|
||||
|
||||
Returns:
|
||||
ContentSubmission instance
|
||||
|
||||
Raises:
|
||||
ValidationError: If validation fails
|
||||
"""
|
||||
# Get ContentType for entity
|
||||
entity_type = ContentType.objects.get_for_model(entity)
|
||||
|
||||
# Create submission
|
||||
submission = ContentSubmission.objects.create(
|
||||
user=user,
|
||||
entity_type=entity_type,
|
||||
entity_id=entity.id,
|
||||
submission_type=submission_type,
|
||||
title=title,
|
||||
description=description,
|
||||
metadata=metadata or {},
|
||||
source=kwargs.get('source', 'web'),
|
||||
ip_address=kwargs.get('ip_address'),
|
||||
user_agent=kwargs.get('user_agent', '')
|
||||
)
|
||||
|
||||
# Create submission items
|
||||
if items_data:
|
||||
for item_data in items_data:
|
||||
SubmissionItem.objects.create(
|
||||
submission=submission,
|
||||
field_name=item_data['field_name'],
|
||||
field_label=item_data.get('field_label', item_data['field_name']),
|
||||
old_value=item_data.get('old_value'),
|
||||
new_value=item_data.get('new_value'),
|
||||
change_type=item_data.get('change_type', 'modify'),
|
||||
is_required=item_data.get('is_required', False),
|
||||
order=item_data.get('order', 0)
|
||||
)
|
||||
|
||||
# Auto-submit if requested
|
||||
if auto_submit:
|
||||
submission.submit()
|
||||
submission.save()
|
||||
|
||||
return submission
|
||||
|
||||
@staticmethod
|
||||
@transaction.atomic
|
||||
def start_review(submission_id, reviewer):
|
||||
"""
|
||||
Start reviewing a submission (lock it).
|
||||
|
||||
Args:
|
||||
submission_id: UUID of submission
|
||||
reviewer: User starting the review
|
||||
|
||||
Returns:
|
||||
ContentSubmission instance
|
||||
|
||||
Raises:
|
||||
ValidationError: If submission cannot be reviewed
|
||||
PermissionDenied: If user lacks permission
|
||||
"""
|
||||
submission = ContentSubmission.objects.select_for_update().get(id=submission_id)
|
||||
|
||||
# Check if user has permission to review
|
||||
if not ModerationService._can_moderate(reviewer):
|
||||
raise PermissionDenied("User does not have moderation permission")
|
||||
|
||||
# Check if submission is in correct state
|
||||
if submission.status != ContentSubmission.STATE_PENDING:
|
||||
raise ValidationError(f"Submission must be pending to start review (current: {submission.status})")
|
||||
|
||||
# Check if already locked by another user
|
||||
if submission.locked_by and submission.locked_by != reviewer:
|
||||
if submission.is_locked():
|
||||
raise ValidationError(f"Submission is locked by {submission.locked_by.email}")
|
||||
|
||||
# Start review (FSM transition)
|
||||
submission.start_review(reviewer)
|
||||
submission.save()
|
||||
|
||||
# Create lock record
|
||||
expires_at = timezone.now() + timedelta(minutes=15)
|
||||
ModerationLock.objects.update_or_create(
|
||||
submission=submission,
|
||||
defaults={
|
||||
'locked_by': reviewer,
|
||||
'expires_at': expires_at,
|
||||
'is_active': True,
|
||||
'released_at': None
|
||||
}
|
||||
)
|
||||
|
||||
return submission
|
||||
|
||||
@staticmethod
|
||||
@transaction.atomic
|
||||
def approve_submission(submission_id, reviewer):
|
||||
"""
|
||||
Approve an entire submission and apply all changes.
|
||||
|
||||
This method uses atomic transactions to ensure all-or-nothing behavior.
|
||||
If any part fails, the entire operation is rolled back.
|
||||
|
||||
Args:
|
||||
submission_id: UUID of submission
|
||||
reviewer: User approving the submission
|
||||
|
||||
Returns:
|
||||
ContentSubmission instance
|
||||
|
||||
Raises:
|
||||
ValidationError: If submission cannot be approved
|
||||
PermissionDenied: If user lacks permission
|
||||
"""
|
||||
submission = ContentSubmission.objects.select_for_update().get(id=submission_id)
|
||||
|
||||
# Check permission
|
||||
if not ModerationService._can_moderate(reviewer):
|
||||
raise PermissionDenied("User does not have moderation permission")
|
||||
|
||||
# Check if submission can be reviewed
|
||||
if not submission.can_review(reviewer):
|
||||
raise ValidationError("Submission cannot be reviewed at this time")
|
||||
|
||||
# Apply all changes
|
||||
entity = submission.entity
|
||||
if not entity:
|
||||
raise ValidationError("Entity no longer exists")
|
||||
|
||||
# Get all pending items
|
||||
items = submission.items.filter(status='pending')
|
||||
|
||||
for item in items:
|
||||
# Apply change to entity
|
||||
if item.change_type in ['add', 'modify']:
|
||||
setattr(entity, item.field_name, item.new_value)
|
||||
elif item.change_type == 'remove':
|
||||
setattr(entity, item.field_name, None)
|
||||
|
||||
# Mark item as approved
|
||||
item.approve(reviewer)
|
||||
|
||||
# Save entity (this will trigger versioning through lifecycle hooks)
|
||||
entity.save()
|
||||
|
||||
# Approve submission (FSM transition)
|
||||
submission.approve(reviewer)
|
||||
submission.save()
|
||||
|
||||
# Release lock
|
||||
try:
|
||||
lock = ModerationLock.objects.get(submission=submission, is_active=True)
|
||||
lock.release()
|
||||
except ModerationLock.DoesNotExist:
|
||||
pass
|
||||
|
||||
# Send notification email asynchronously
|
||||
try:
|
||||
from apps.moderation.tasks import send_moderation_notification
|
||||
send_moderation_notification.delay(str(submission.id), 'approved')
|
||||
except Exception as e:
|
||||
# Don't fail the approval if email fails to queue
|
||||
logger.warning(f"Failed to queue approval notification: {str(e)}")
|
||||
|
||||
return submission
|
||||
|
||||
@staticmethod
|
||||
@transaction.atomic
|
||||
def approve_selective(submission_id, reviewer, item_ids):
|
||||
"""
|
||||
Approve only specific items in a submission (selective approval).
|
||||
|
||||
This allows moderators to approve some changes while rejecting others.
|
||||
Uses atomic transactions for data integrity.
|
||||
|
||||
Args:
|
||||
submission_id: UUID of submission
|
||||
reviewer: User approving the items
|
||||
item_ids: List of item UUIDs to approve
|
||||
|
||||
Returns:
|
||||
dict with counts: {'approved': N, 'total': M}
|
||||
|
||||
Raises:
|
||||
ValidationError: If submission cannot be reviewed
|
||||
PermissionDenied: If user lacks permission
|
||||
"""
|
||||
submission = ContentSubmission.objects.select_for_update().get(id=submission_id)
|
||||
|
||||
# Check permission
|
||||
if not ModerationService._can_moderate(reviewer):
|
||||
raise PermissionDenied("User does not have moderation permission")
|
||||
|
||||
# Check if submission can be reviewed
|
||||
if not submission.can_review(reviewer):
|
||||
raise ValidationError("Submission cannot be reviewed at this time")
|
||||
|
||||
# Get entity
|
||||
entity = submission.entity
|
||||
if not entity:
|
||||
raise ValidationError("Entity no longer exists")
|
||||
|
||||
# Get items to approve
|
||||
items_to_approve = submission.items.filter(
|
||||
id__in=item_ids,
|
||||
status='pending'
|
||||
)
|
||||
|
||||
approved_count = 0
|
||||
for item in items_to_approve:
|
||||
# Apply change to entity
|
||||
if item.change_type in ['add', 'modify']:
|
||||
setattr(entity, item.field_name, item.new_value)
|
||||
elif item.change_type == 'remove':
|
||||
setattr(entity, item.field_name, None)
|
||||
|
||||
# Mark item as approved
|
||||
item.approve(reviewer)
|
||||
approved_count += 1
|
||||
|
||||
# Save entity if any changes were made
|
||||
if approved_count > 0:
|
||||
entity.save()
|
||||
|
||||
# Check if all items are now reviewed
|
||||
pending_count = submission.items.filter(status='pending').count()
|
||||
|
||||
if pending_count == 0:
|
||||
# All items reviewed - mark submission as approved
|
||||
submission.approve(reviewer)
|
||||
submission.save()
|
||||
|
||||
# Release lock
|
||||
try:
|
||||
lock = ModerationLock.objects.get(submission=submission, is_active=True)
|
||||
lock.release()
|
||||
except ModerationLock.DoesNotExist:
|
||||
pass
|
||||
|
||||
return {
|
||||
'approved': approved_count,
|
||||
'total': submission.items.count(),
|
||||
'pending': pending_count,
|
||||
'submission_approved': pending_count == 0
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
@transaction.atomic
|
||||
def reject_submission(submission_id, reviewer, reason):
|
||||
"""
|
||||
Reject an entire submission.
|
||||
|
||||
Args:
|
||||
submission_id: UUID of submission
|
||||
reviewer: User rejecting the submission
|
||||
reason: Reason for rejection
|
||||
|
||||
Returns:
|
||||
ContentSubmission instance
|
||||
|
||||
Raises:
|
||||
ValidationError: If submission cannot be rejected
|
||||
PermissionDenied: If user lacks permission
|
||||
"""
|
||||
submission = ContentSubmission.objects.select_for_update().get(id=submission_id)
|
||||
|
||||
# Check permission
|
||||
if not ModerationService._can_moderate(reviewer):
|
||||
raise PermissionDenied("User does not have moderation permission")
|
||||
|
||||
# Check if submission can be reviewed
|
||||
if not submission.can_review(reviewer):
|
||||
raise ValidationError("Submission cannot be reviewed at this time")
|
||||
|
||||
# Reject all pending items
|
||||
items = submission.items.filter(status='pending')
|
||||
for item in items:
|
||||
item.reject(reviewer, reason)
|
||||
|
||||
# Reject submission (FSM transition)
|
||||
submission.reject(reviewer, reason)
|
||||
submission.save()
|
||||
|
||||
# Release lock
|
||||
try:
|
||||
lock = ModerationLock.objects.get(submission=submission, is_active=True)
|
||||
lock.release()
|
||||
except ModerationLock.DoesNotExist:
|
||||
pass
|
||||
|
||||
# Send notification email asynchronously
|
||||
try:
|
||||
from apps.moderation.tasks import send_moderation_notification
|
||||
send_moderation_notification.delay(str(submission.id), 'rejected')
|
||||
except Exception as e:
|
||||
# Don't fail the rejection if email fails to queue
|
||||
logger.warning(f"Failed to queue rejection notification: {str(e)}")
|
||||
|
||||
return submission
|
||||
|
||||
@staticmethod
|
||||
@transaction.atomic
|
||||
def reject_selective(submission_id, reviewer, item_ids, reason=''):
|
||||
"""
|
||||
Reject specific items in a submission.
|
||||
|
||||
Args:
|
||||
submission_id: UUID of submission
|
||||
reviewer: User rejecting the items
|
||||
item_ids: List of item UUIDs to reject
|
||||
reason: Reason for rejection (optional)
|
||||
|
||||
Returns:
|
||||
dict with counts: {'rejected': N, 'total': M}
|
||||
|
||||
Raises:
|
||||
ValidationError: If submission cannot be reviewed
|
||||
PermissionDenied: If user lacks permission
|
||||
"""
|
||||
submission = ContentSubmission.objects.select_for_update().get(id=submission_id)
|
||||
|
||||
# Check permission
|
||||
if not ModerationService._can_moderate(reviewer):
|
||||
raise PermissionDenied("User does not have moderation permission")
|
||||
|
||||
# Check if submission can be reviewed
|
||||
if not submission.can_review(reviewer):
|
||||
raise ValidationError("Submission cannot be reviewed at this time")
|
||||
|
||||
# Get items to reject
|
||||
items_to_reject = submission.items.filter(
|
||||
id__in=item_ids,
|
||||
status='pending'
|
||||
)
|
||||
|
||||
rejected_count = 0
|
||||
for item in items_to_reject:
|
||||
item.reject(reviewer, reason)
|
||||
rejected_count += 1
|
||||
|
||||
# Check if all items are now reviewed
|
||||
pending_count = submission.items.filter(status='pending').count()
|
||||
|
||||
if pending_count == 0:
|
||||
# All items reviewed
|
||||
approved_count = submission.items.filter(status='approved').count()
|
||||
|
||||
if approved_count > 0:
|
||||
# Some items approved - mark submission as approved
|
||||
submission.approve(reviewer)
|
||||
submission.save()
|
||||
else:
|
||||
# All items rejected - mark submission as rejected
|
||||
submission.reject(reviewer, "All items rejected")
|
||||
submission.save()
|
||||
|
||||
# Release lock
|
||||
try:
|
||||
lock = ModerationLock.objects.get(submission=submission, is_active=True)
|
||||
lock.release()
|
||||
except ModerationLock.DoesNotExist:
|
||||
pass
|
||||
|
||||
return {
|
||||
'rejected': rejected_count,
|
||||
'total': submission.items.count(),
|
||||
'pending': pending_count,
|
||||
'submission_complete': pending_count == 0
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
@transaction.atomic
|
||||
def unlock_submission(submission_id):
|
||||
"""
|
||||
Manually unlock a submission.
|
||||
|
||||
Args:
|
||||
submission_id: UUID of submission
|
||||
|
||||
Returns:
|
||||
ContentSubmission instance
|
||||
"""
|
||||
submission = ContentSubmission.objects.select_for_update().get(id=submission_id)
|
||||
|
||||
if submission.status == ContentSubmission.STATE_REVIEWING:
|
||||
submission.unlock()
|
||||
submission.save()
|
||||
|
||||
# Release lock record
|
||||
try:
|
||||
lock = ModerationLock.objects.get(submission=submission, is_active=True)
|
||||
lock.release()
|
||||
except ModerationLock.DoesNotExist:
|
||||
pass
|
||||
|
||||
return submission
|
||||
|
||||
@staticmethod
|
||||
def cleanup_expired_locks():
|
||||
"""
|
||||
Cleanup expired locks and unlock submissions.
|
||||
|
||||
This should be called periodically (e.g., every 5 minutes via Celery).
|
||||
|
||||
Returns:
|
||||
int: Number of locks cleaned up
|
||||
"""
|
||||
return ModerationLock.cleanup_expired()
|
||||
|
||||
@staticmethod
|
||||
def get_queue(status=None, user=None, limit=50, offset=0):
|
||||
"""
|
||||
Get moderation queue with filters.
|
||||
|
||||
Args:
|
||||
status: Filter by status (optional)
|
||||
user: Filter by submitter (optional)
|
||||
limit: Maximum results
|
||||
offset: Pagination offset
|
||||
|
||||
Returns:
|
||||
QuerySet of ContentSubmission objects
|
||||
"""
|
||||
queryset = ContentSubmission.objects.select_related(
|
||||
'user',
|
||||
'entity_type',
|
||||
'locked_by',
|
||||
'reviewed_by'
|
||||
).prefetch_related('items')
|
||||
|
||||
if status:
|
||||
queryset = queryset.filter(status=status)
|
||||
|
||||
if user:
|
||||
queryset = queryset.filter(user=user)
|
||||
|
||||
return queryset[offset:offset + limit]
|
||||
|
||||
@staticmethod
|
||||
def get_submission_details(submission_id):
|
||||
"""
|
||||
Get full submission details with all items.
|
||||
|
||||
Args:
|
||||
submission_id: UUID of submission
|
||||
|
||||
Returns:
|
||||
ContentSubmission instance with prefetched items
|
||||
"""
|
||||
return ContentSubmission.objects.select_related(
|
||||
'user',
|
||||
'entity_type',
|
||||
'locked_by',
|
||||
'reviewed_by'
|
||||
).prefetch_related(
|
||||
'items',
|
||||
'items__reviewed_by'
|
||||
).get(id=submission_id)
|
||||
|
||||
@staticmethod
|
||||
def _can_moderate(user):
|
||||
"""
|
||||
Check if user has moderation permission.
|
||||
|
||||
Args:
|
||||
user: User to check
|
||||
|
||||
Returns:
|
||||
bool: True if user can moderate
|
||||
"""
|
||||
if not user or not user.is_authenticated:
|
||||
return False
|
||||
|
||||
# Check if user is superuser
|
||||
if user.is_superuser:
|
||||
return True
|
||||
|
||||
# Check if user has moderator or admin role
|
||||
try:
|
||||
return user.role.is_moderator
|
||||
except:
|
||||
return False
|
||||
|
||||
@staticmethod
|
||||
@transaction.atomic
|
||||
def delete_submission(submission_id, user):
|
||||
"""
|
||||
Delete a submission (only if draft or by owner).
|
||||
|
||||
Args:
|
||||
submission_id: UUID of submission
|
||||
user: User attempting to delete
|
||||
|
||||
Returns:
|
||||
bool: True if deleted
|
||||
|
||||
Raises:
|
||||
PermissionDenied: If user cannot delete
|
||||
ValidationError: If submission cannot be deleted
|
||||
"""
|
||||
submission = ContentSubmission.objects.select_for_update().get(id=submission_id)
|
||||
|
||||
# Check permission
|
||||
is_owner = submission.user == user
|
||||
is_moderator = ModerationService._can_moderate(user)
|
||||
|
||||
if not (is_owner or is_moderator):
|
||||
raise PermissionDenied("Only the owner or a moderator can delete this submission")
|
||||
|
||||
# Check state
|
||||
if submission.status not in [ContentSubmission.STATE_DRAFT, ContentSubmission.STATE_PENDING]:
|
||||
if not is_moderator:
|
||||
raise ValidationError("Only moderators can delete submissions under review")
|
||||
|
||||
# Delete submission (cascades to items and lock)
|
||||
submission.delete()
|
||||
return True
|
||||
304
django/apps/moderation/tasks.py
Normal file
304
django/apps/moderation/tasks.py
Normal file
@@ -0,0 +1,304 @@
|
||||
"""
|
||||
Background tasks for moderation workflows and notifications.
|
||||
"""
|
||||
|
||||
import logging
|
||||
from celery import shared_task
|
||||
from django.core.mail import send_mail
|
||||
from django.template.loader import render_to_string
|
||||
from django.conf import settings
|
||||
from django.utils import timezone
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@shared_task(bind=True, max_retries=3, default_retry_delay=60)
|
||||
def send_moderation_notification(self, submission_id, status):
|
||||
"""
|
||||
Send email notification when a submission is approved or rejected.
|
||||
|
||||
Args:
|
||||
submission_id: UUID of the ContentSubmission
|
||||
status: 'approved' or 'rejected'
|
||||
|
||||
Returns:
|
||||
str: Notification result message
|
||||
"""
|
||||
from apps.moderation.models import ContentSubmission
|
||||
|
||||
try:
|
||||
submission = ContentSubmission.objects.select_related(
|
||||
'user', 'reviewed_by', 'entity_type'
|
||||
).prefetch_related('items').get(id=submission_id)
|
||||
|
||||
# Get user's submission count
|
||||
user_submission_count = ContentSubmission.objects.filter(
|
||||
user=submission.user
|
||||
).count()
|
||||
|
||||
# Prepare email context
|
||||
context = {
|
||||
'submission': submission,
|
||||
'status': status,
|
||||
'user': submission.user,
|
||||
'user_submission_count': user_submission_count,
|
||||
'submission_url': f"{settings.SITE_URL}/submissions/{submission.id}/",
|
||||
'site_url': settings.SITE_URL,
|
||||
}
|
||||
|
||||
# Choose template based on status
|
||||
if status == 'approved':
|
||||
template = 'emails/moderation_approved.html'
|
||||
subject = f'✅ Submission Approved: {submission.title}'
|
||||
else:
|
||||
template = 'emails/moderation_rejected.html'
|
||||
subject = f'⚠️ Submission Requires Changes: {submission.title}'
|
||||
|
||||
# Render HTML email
|
||||
html_message = render_to_string(template, context)
|
||||
|
||||
# Send email
|
||||
send_mail(
|
||||
subject=subject,
|
||||
message='', # Plain text version (optional)
|
||||
html_message=html_message,
|
||||
from_email=settings.DEFAULT_FROM_EMAIL,
|
||||
recipient_list=[submission.user.email],
|
||||
fail_silently=False,
|
||||
)
|
||||
|
||||
logger.info(
|
||||
f"Moderation notification sent: {status} for submission {submission_id} "
|
||||
f"to {submission.user.email}"
|
||||
)
|
||||
|
||||
return f"Notification sent to {submission.user.email}"
|
||||
|
||||
except ContentSubmission.DoesNotExist:
|
||||
logger.error(f"Submission {submission_id} not found")
|
||||
raise
|
||||
except Exception as exc:
|
||||
logger.error(f"Error sending notification for submission {submission_id}: {str(exc)}")
|
||||
# Retry with exponential backoff
|
||||
raise self.retry(exc=exc, countdown=60 * (2 ** self.request.retries))
|
||||
|
||||
|
||||
@shared_task(bind=True, max_retries=2)
|
||||
def cleanup_expired_locks(self):
|
||||
"""
|
||||
Clean up expired moderation locks.
|
||||
|
||||
This task runs periodically to unlock submissions that have
|
||||
been locked for too long (default: 15 minutes).
|
||||
|
||||
Returns:
|
||||
int: Number of locks cleaned up
|
||||
"""
|
||||
from apps.moderation.models import ModerationLock
|
||||
|
||||
try:
|
||||
cleaned = ModerationLock.cleanup_expired()
|
||||
logger.info(f"Cleaned up {cleaned} expired moderation locks")
|
||||
return cleaned
|
||||
|
||||
except Exception as exc:
|
||||
logger.error(f"Error cleaning up expired locks: {str(exc)}")
|
||||
raise self.retry(exc=exc, countdown=300) # Retry after 5 minutes
|
||||
|
||||
|
||||
@shared_task(bind=True, max_retries=3)
|
||||
def send_batch_moderation_summary(self, moderator_id):
|
||||
"""
|
||||
Send a daily summary email to a moderator with their moderation stats.
|
||||
|
||||
Args:
|
||||
moderator_id: ID of the moderator user
|
||||
|
||||
Returns:
|
||||
str: Email send result
|
||||
"""
|
||||
from apps.users.models import User
|
||||
from apps.moderation.models import ContentSubmission
|
||||
from datetime import timedelta
|
||||
|
||||
try:
|
||||
moderator = User.objects.get(id=moderator_id)
|
||||
|
||||
# Get stats for the past 24 hours
|
||||
yesterday = timezone.now() - timedelta(days=1)
|
||||
|
||||
stats = {
|
||||
'reviewed_today': ContentSubmission.objects.filter(
|
||||
reviewed_by=moderator,
|
||||
reviewed_at__gte=yesterday
|
||||
).count(),
|
||||
'approved_today': ContentSubmission.objects.filter(
|
||||
reviewed_by=moderator,
|
||||
reviewed_at__gte=yesterday,
|
||||
status='approved'
|
||||
).count(),
|
||||
'rejected_today': ContentSubmission.objects.filter(
|
||||
reviewed_by=moderator,
|
||||
reviewed_at__gte=yesterday,
|
||||
status='rejected'
|
||||
).count(),
|
||||
'pending_queue': ContentSubmission.objects.filter(
|
||||
status='pending'
|
||||
).count(),
|
||||
}
|
||||
|
||||
context = {
|
||||
'moderator': moderator,
|
||||
'stats': stats,
|
||||
'date': timezone.now(),
|
||||
'site_url': settings.SITE_URL,
|
||||
}
|
||||
|
||||
# For now, just log the stats (template not created yet)
|
||||
logger.info(f"Moderation summary for {moderator.email}: {stats}")
|
||||
|
||||
# In production, you would send an actual email:
|
||||
# html_message = render_to_string('emails/moderation_summary.html', context)
|
||||
# send_mail(...)
|
||||
|
||||
return f"Summary sent to {moderator.email}"
|
||||
|
||||
except User.DoesNotExist:
|
||||
logger.error(f"Moderator {moderator_id} not found")
|
||||
raise
|
||||
except Exception as exc:
|
||||
logger.error(f"Error sending moderation summary: {str(exc)}")
|
||||
raise self.retry(exc=exc, countdown=60 * (2 ** self.request.retries))
|
||||
|
||||
|
||||
@shared_task
|
||||
def update_moderation_statistics():
|
||||
"""
|
||||
Update moderation-related statistics across the database.
|
||||
|
||||
Returns:
|
||||
dict: Updated statistics
|
||||
"""
|
||||
from apps.moderation.models import ContentSubmission
|
||||
from django.db.models import Count, Avg, F
|
||||
from datetime import timedelta
|
||||
|
||||
try:
|
||||
now = timezone.now()
|
||||
week_ago = now - timedelta(days=7)
|
||||
|
||||
stats = {
|
||||
'total_submissions': ContentSubmission.objects.count(),
|
||||
'pending': ContentSubmission.objects.filter(status='pending').count(),
|
||||
'reviewing': ContentSubmission.objects.filter(status='reviewing').count(),
|
||||
'approved': ContentSubmission.objects.filter(status='approved').count(),
|
||||
'rejected': ContentSubmission.objects.filter(status='rejected').count(),
|
||||
'this_week': ContentSubmission.objects.filter(
|
||||
created_at__gte=week_ago
|
||||
).count(),
|
||||
'by_type': dict(
|
||||
ContentSubmission.objects.values('submission_type')
|
||||
.annotate(count=Count('id'))
|
||||
.values_list('submission_type', 'count')
|
||||
),
|
||||
}
|
||||
|
||||
logger.info(f"Moderation statistics updated: {stats}")
|
||||
return stats
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error updating moderation statistics: {str(e)}")
|
||||
raise
|
||||
|
||||
|
||||
@shared_task(bind=True, max_retries=2)
|
||||
def auto_unlock_stale_reviews(self, hours=1):
|
||||
"""
|
||||
Automatically unlock submissions that have been in review for too long.
|
||||
|
||||
This helps prevent submissions from getting stuck if a moderator
|
||||
starts a review but doesn't complete it.
|
||||
|
||||
Args:
|
||||
hours: Number of hours before auto-unlocking (default: 1)
|
||||
|
||||
Returns:
|
||||
int: Number of submissions unlocked
|
||||
"""
|
||||
from apps.moderation.models import ContentSubmission
|
||||
from apps.moderation.services import ModerationService
|
||||
from datetime import timedelta
|
||||
|
||||
try:
|
||||
cutoff = timezone.now() - timedelta(hours=hours)
|
||||
|
||||
# Find submissions that have been reviewing too long
|
||||
stale_reviews = ContentSubmission.objects.filter(
|
||||
status='reviewing',
|
||||
locked_at__lt=cutoff
|
||||
)
|
||||
|
||||
count = 0
|
||||
for submission in stale_reviews:
|
||||
try:
|
||||
ModerationService.unlock_submission(submission.id)
|
||||
count += 1
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to unlock submission {submission.id}: {str(e)}")
|
||||
continue
|
||||
|
||||
logger.info(f"Auto-unlocked {count} stale reviews")
|
||||
return count
|
||||
|
||||
except Exception as exc:
|
||||
logger.error(f"Error auto-unlocking stale reviews: {str(exc)}")
|
||||
raise self.retry(exc=exc, countdown=300)
|
||||
|
||||
|
||||
@shared_task
|
||||
def notify_moderators_of_queue_size():
|
||||
"""
|
||||
Notify moderators when the pending queue gets too large.
|
||||
|
||||
This helps ensure timely review of submissions.
|
||||
|
||||
Returns:
|
||||
dict: Notification result
|
||||
"""
|
||||
from apps.moderation.models import ContentSubmission
|
||||
from apps.users.models import User
|
||||
|
||||
try:
|
||||
pending_count = ContentSubmission.objects.filter(status='pending').count()
|
||||
|
||||
# Threshold for notification (configurable)
|
||||
threshold = getattr(settings, 'MODERATION_QUEUE_THRESHOLD', 50)
|
||||
|
||||
if pending_count >= threshold:
|
||||
# Get all moderators
|
||||
moderators = User.objects.filter(role__is_moderator=True)
|
||||
|
||||
logger.warning(
|
||||
f"Moderation queue size ({pending_count}) exceeds threshold ({threshold}). "
|
||||
f"Notifying {moderators.count()} moderators."
|
||||
)
|
||||
|
||||
# In production, send emails to moderators
|
||||
# For now, just log
|
||||
|
||||
return {
|
||||
'queue_size': pending_count,
|
||||
'threshold': threshold,
|
||||
'notified': moderators.count(),
|
||||
}
|
||||
else:
|
||||
logger.info(f"Moderation queue size ({pending_count}) is within threshold")
|
||||
return {
|
||||
'queue_size': pending_count,
|
||||
'threshold': threshold,
|
||||
'notified': 0,
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error checking moderation queue: {str(e)}")
|
||||
raise
|
||||
7
django/apps/reviews/apps.py
Normal file
7
django/apps/reviews/apps.py
Normal file
@@ -0,0 +1,7 @@
|
||||
from django.apps import AppConfig
|
||||
|
||||
|
||||
class ReviewsConfig(AppConfig):
|
||||
default_auto_field = 'django.db.models.BigAutoField'
|
||||
name = 'apps.reviews'
|
||||
verbose_name = 'Reviews'
|
||||
BIN
django/apps/users/__pycache__/admin.cpython-313.pyc
Normal file
BIN
django/apps/users/__pycache__/admin.cpython-313.pyc
Normal file
Binary file not shown.
BIN
django/apps/users/__pycache__/permissions.cpython-313.pyc
Normal file
BIN
django/apps/users/__pycache__/permissions.cpython-313.pyc
Normal file
Binary file not shown.
BIN
django/apps/users/__pycache__/services.cpython-313.pyc
Normal file
BIN
django/apps/users/__pycache__/services.cpython-313.pyc
Normal file
Binary file not shown.
372
django/apps/users/admin.py
Normal file
372
django/apps/users/admin.py
Normal file
@@ -0,0 +1,372 @@
|
||||
"""
|
||||
Django admin configuration for User models.
|
||||
"""
|
||||
|
||||
from django.contrib import admin
|
||||
from django.contrib.auth.admin import UserAdmin as BaseUserAdmin
|
||||
from django.utils.html import format_html
|
||||
from django.urls import reverse
|
||||
from django.utils.safestring import mark_safe
|
||||
from unfold.admin import ModelAdmin
|
||||
from unfold.decorators import display
|
||||
from import_export import resources
|
||||
from import_export.admin import ImportExportModelAdmin
|
||||
|
||||
from .models import User, UserRole, UserProfile
|
||||
|
||||
|
||||
class UserResource(resources.ModelResource):
|
||||
"""Resource for importing/exporting users."""
|
||||
|
||||
class Meta:
|
||||
model = User
|
||||
fields = (
|
||||
'id', 'email', 'username', 'first_name', 'last_name',
|
||||
'date_joined', 'last_login', 'is_active', 'is_staff',
|
||||
'banned', 'reputation_score', 'mfa_enabled'
|
||||
)
|
||||
export_order = fields
|
||||
|
||||
|
||||
class UserRoleInline(admin.StackedInline):
|
||||
"""Inline for user role."""
|
||||
model = UserRole
|
||||
can_delete = False
|
||||
verbose_name_plural = 'Role'
|
||||
fk_name = 'user'
|
||||
fields = ('role', 'granted_by', 'granted_at')
|
||||
readonly_fields = ('granted_at',)
|
||||
|
||||
|
||||
class UserProfileInline(admin.StackedInline):
|
||||
"""Inline for user profile."""
|
||||
model = UserProfile
|
||||
can_delete = False
|
||||
verbose_name_plural = 'Profile & Preferences'
|
||||
fk_name = 'user'
|
||||
fields = (
|
||||
('email_notifications', 'email_on_submission_approved', 'email_on_submission_rejected'),
|
||||
('profile_public', 'show_email'),
|
||||
('total_submissions', 'approved_submissions'),
|
||||
)
|
||||
readonly_fields = ('total_submissions', 'approved_submissions')
|
||||
|
||||
|
||||
@admin.register(User)
|
||||
class UserAdmin(BaseUserAdmin, ModelAdmin, ImportExportModelAdmin):
|
||||
"""Admin interface for User model."""
|
||||
|
||||
resource_class = UserResource
|
||||
|
||||
list_display = [
|
||||
'email',
|
||||
'username',
|
||||
'display_name_admin',
|
||||
'role_badge',
|
||||
'reputation_badge',
|
||||
'status_badge',
|
||||
'mfa_badge',
|
||||
'date_joined',
|
||||
'last_login',
|
||||
]
|
||||
|
||||
list_filter = [
|
||||
'is_active',
|
||||
'is_staff',
|
||||
'is_superuser',
|
||||
'banned',
|
||||
'mfa_enabled',
|
||||
'oauth_provider',
|
||||
'date_joined',
|
||||
'last_login',
|
||||
]
|
||||
|
||||
search_fields = [
|
||||
'email',
|
||||
'username',
|
||||
'first_name',
|
||||
'last_name',
|
||||
]
|
||||
|
||||
ordering = ['-date_joined']
|
||||
|
||||
fieldsets = (
|
||||
('Account Information', {
|
||||
'fields': ('email', 'username', 'password')
|
||||
}),
|
||||
('Personal Information', {
|
||||
'fields': ('first_name', 'last_name', 'avatar_url', 'bio')
|
||||
}),
|
||||
('Permissions', {
|
||||
'fields': (
|
||||
'is_active',
|
||||
'is_staff',
|
||||
'is_superuser',
|
||||
'groups',
|
||||
'user_permissions',
|
||||
)
|
||||
}),
|
||||
('Moderation', {
|
||||
'fields': (
|
||||
'banned',
|
||||
'ban_reason',
|
||||
'banned_at',
|
||||
'banned_by',
|
||||
)
|
||||
}),
|
||||
('OAuth', {
|
||||
'fields': ('oauth_provider', 'oauth_sub'),
|
||||
'classes': ('collapse',)
|
||||
}),
|
||||
('Security', {
|
||||
'fields': ('mfa_enabled', 'reputation_score'),
|
||||
}),
|
||||
('Timestamps', {
|
||||
'fields': ('date_joined', 'last_login'),
|
||||
'classes': ('collapse',)
|
||||
}),
|
||||
)
|
||||
|
||||
add_fieldsets = (
|
||||
('Create New User', {
|
||||
'classes': ('wide',),
|
||||
'fields': ('email', 'username', 'password1', 'password2'),
|
||||
}),
|
||||
)
|
||||
|
||||
readonly_fields = [
|
||||
'date_joined',
|
||||
'last_login',
|
||||
'banned_at',
|
||||
'oauth_provider',
|
||||
'oauth_sub',
|
||||
]
|
||||
|
||||
inlines = [UserRoleInline, UserProfileInline]
|
||||
|
||||
@display(description="Name", label=True)
|
||||
def display_name_admin(self, obj):
|
||||
"""Display user's display name."""
|
||||
return obj.display_name or '-'
|
||||
|
||||
@display(description="Role", label=True)
|
||||
def role_badge(self, obj):
|
||||
"""Display user role with badge."""
|
||||
try:
|
||||
role = obj.role.role
|
||||
colors = {
|
||||
'admin': 'red',
|
||||
'moderator': 'blue',
|
||||
'user': 'green',
|
||||
}
|
||||
return format_html(
|
||||
'<span style="background-color: {}; color: white; padding: 3px 8px; border-radius: 3px; font-size: 11px;">{}</span>',
|
||||
colors.get(role, 'gray'),
|
||||
role.upper()
|
||||
)
|
||||
except UserRole.DoesNotExist:
|
||||
return format_html('<span style="color: gray;">No Role</span>')
|
||||
|
||||
@display(description="Reputation", label=True)
|
||||
def reputation_badge(self, obj):
|
||||
"""Display reputation score."""
|
||||
score = obj.reputation_score
|
||||
if score >= 100:
|
||||
color = 'green'
|
||||
elif score >= 50:
|
||||
color = 'blue'
|
||||
elif score >= 0:
|
||||
color = 'gray'
|
||||
else:
|
||||
color = 'red'
|
||||
|
||||
return format_html(
|
||||
'<span style="color: {}; font-weight: bold;">{}</span>',
|
||||
color,
|
||||
score
|
||||
)
|
||||
|
||||
@display(description="Status", label=True)
|
||||
def status_badge(self, obj):
|
||||
"""Display user status."""
|
||||
if obj.banned:
|
||||
return format_html(
|
||||
'<span style="background-color: red; color: white; padding: 3px 8px; border-radius: 3px; font-size: 11px;">BANNED</span>'
|
||||
)
|
||||
elif not obj.is_active:
|
||||
return format_html(
|
||||
'<span style="background-color: orange; color: white; padding: 3px 8px; border-radius: 3px; font-size: 11px;">INACTIVE</span>'
|
||||
)
|
||||
else:
|
||||
return format_html(
|
||||
'<span style="background-color: green; color: white; padding: 3px 8px; border-radius: 3px; font-size: 11px;">ACTIVE</span>'
|
||||
)
|
||||
|
||||
@display(description="MFA", label=True)
|
||||
def mfa_badge(self, obj):
|
||||
"""Display MFA status."""
|
||||
if obj.mfa_enabled:
|
||||
return format_html(
|
||||
'<span style="color: green;">✓ Enabled</span>'
|
||||
)
|
||||
else:
|
||||
return format_html(
|
||||
'<span style="color: gray;">✗ Disabled</span>'
|
||||
)
|
||||
|
||||
def get_queryset(self, request):
|
||||
"""Optimize queryset with select_related."""
|
||||
qs = super().get_queryset(request)
|
||||
return qs.select_related('role', 'banned_by')
|
||||
|
||||
actions = ['ban_users', 'unban_users', 'make_moderator', 'make_user']
|
||||
|
||||
@admin.action(description="Ban selected users")
|
||||
def ban_users(self, request, queryset):
|
||||
"""Ban selected users."""
|
||||
count = 0
|
||||
for user in queryset:
|
||||
if not user.banned:
|
||||
user.ban(reason="Banned by admin", banned_by=request.user)
|
||||
count += 1
|
||||
|
||||
self.message_user(
|
||||
request,
|
||||
f"{count} user(s) have been banned."
|
||||
)
|
||||
|
||||
@admin.action(description="Unban selected users")
|
||||
def unban_users(self, request, queryset):
|
||||
"""Unban selected users."""
|
||||
count = 0
|
||||
for user in queryset:
|
||||
if user.banned:
|
||||
user.unban()
|
||||
count += 1
|
||||
|
||||
self.message_user(
|
||||
request,
|
||||
f"{count} user(s) have been unbanned."
|
||||
)
|
||||
|
||||
@admin.action(description="Set role to Moderator")
|
||||
def make_moderator(self, request, queryset):
|
||||
"""Set users' role to moderator."""
|
||||
from .services import RoleService
|
||||
|
||||
count = 0
|
||||
for user in queryset:
|
||||
RoleService.assign_role(user, 'moderator', request.user)
|
||||
count += 1
|
||||
|
||||
self.message_user(
|
||||
request,
|
||||
f"{count} user(s) have been set to Moderator role."
|
||||
)
|
||||
|
||||
@admin.action(description="Set role to User")
|
||||
def make_user(self, request, queryset):
|
||||
"""Set users' role to user."""
|
||||
from .services import RoleService
|
||||
|
||||
count = 0
|
||||
for user in queryset:
|
||||
RoleService.assign_role(user, 'user', request.user)
|
||||
count += 1
|
||||
|
||||
self.message_user(
|
||||
request,
|
||||
f"{count} user(s) have been set to User role."
|
||||
)
|
||||
|
||||
|
||||
@admin.register(UserRole)
|
||||
class UserRoleAdmin(ModelAdmin):
|
||||
"""Admin interface for UserRole model."""
|
||||
|
||||
list_display = ['user', 'role', 'is_moderator', 'is_admin', 'granted_at', 'granted_by']
|
||||
list_filter = ['role', 'granted_at']
|
||||
search_fields = ['user__email', 'user__username']
|
||||
ordering = ['-granted_at']
|
||||
|
||||
readonly_fields = ['granted_at']
|
||||
|
||||
def get_queryset(self, request):
|
||||
"""Optimize queryset."""
|
||||
qs = super().get_queryset(request)
|
||||
return qs.select_related('user', 'granted_by')
|
||||
|
||||
|
||||
@admin.register(UserProfile)
|
||||
class UserProfileAdmin(ModelAdmin):
|
||||
"""Admin interface for UserProfile model."""
|
||||
|
||||
list_display = [
|
||||
'user',
|
||||
'total_submissions',
|
||||
'approved_submissions',
|
||||
'approval_rate',
|
||||
'email_notifications',
|
||||
'profile_public',
|
||||
]
|
||||
|
||||
list_filter = [
|
||||
'email_notifications',
|
||||
'profile_public',
|
||||
'show_email',
|
||||
]
|
||||
|
||||
search_fields = ['user__email', 'user__username']
|
||||
|
||||
readonly_fields = ['created', 'modified', 'total_submissions', 'approved_submissions']
|
||||
|
||||
fieldsets = (
|
||||
('User', {
|
||||
'fields': ('user',)
|
||||
}),
|
||||
('Statistics', {
|
||||
'fields': ('total_submissions', 'approved_submissions'),
|
||||
}),
|
||||
('Notification Preferences', {
|
||||
'fields': (
|
||||
'email_notifications',
|
||||
'email_on_submission_approved',
|
||||
'email_on_submission_rejected',
|
||||
)
|
||||
}),
|
||||
('Privacy Settings', {
|
||||
'fields': ('profile_public', 'show_email'),
|
||||
}),
|
||||
('Timestamps', {
|
||||
'fields': ('created', 'modified'),
|
||||
'classes': ('collapse',)
|
||||
}),
|
||||
)
|
||||
|
||||
@display(description="Approval Rate")
|
||||
def approval_rate(self, obj):
|
||||
"""Display approval rate percentage."""
|
||||
if obj.total_submissions == 0:
|
||||
return '-'
|
||||
|
||||
rate = (obj.approved_submissions / obj.total_submissions) * 100
|
||||
|
||||
if rate >= 80:
|
||||
color = 'green'
|
||||
elif rate >= 60:
|
||||
color = 'blue'
|
||||
elif rate >= 40:
|
||||
color = 'orange'
|
||||
else:
|
||||
color = 'red'
|
||||
|
||||
return format_html(
|
||||
'<span style="color: {}; font-weight: bold;">{:.1f}%</span>',
|
||||
color,
|
||||
rate
|
||||
)
|
||||
|
||||
def get_queryset(self, request):
|
||||
"""Optimize queryset."""
|
||||
qs = super().get_queryset(request)
|
||||
return qs.select_related('user')
|
||||
310
django/apps/users/permissions.py
Normal file
310
django/apps/users/permissions.py
Normal file
@@ -0,0 +1,310 @@
|
||||
"""
|
||||
Permission utilities and decorators for API endpoints.
|
||||
|
||||
Provides:
|
||||
- Permission checking decorators
|
||||
- Role-based access control
|
||||
- Object-level permissions
|
||||
"""
|
||||
|
||||
from functools import wraps
|
||||
from typing import Optional, Callable
|
||||
from django.http import HttpRequest
|
||||
from ninja import Router
|
||||
from ninja.security import HttpBearer
|
||||
from rest_framework_simplejwt.tokens import AccessToken
|
||||
from rest_framework_simplejwt.exceptions import TokenError
|
||||
from django.core.exceptions import PermissionDenied
|
||||
import logging
|
||||
|
||||
from .models import User, UserRole
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class JWTAuth(HttpBearer):
|
||||
"""JWT authentication for django-ninja"""
|
||||
|
||||
def authenticate(self, request: HttpRequest, token: str) -> Optional[User]:
|
||||
"""
|
||||
Authenticate user from JWT token.
|
||||
|
||||
Args:
|
||||
request: HTTP request
|
||||
token: JWT access token
|
||||
|
||||
Returns:
|
||||
User instance if valid, None otherwise
|
||||
"""
|
||||
try:
|
||||
# Decode token
|
||||
access_token = AccessToken(token)
|
||||
user_id = access_token['user_id']
|
||||
|
||||
# Get user
|
||||
user = User.objects.get(id=user_id)
|
||||
|
||||
# Check if banned
|
||||
if user.banned:
|
||||
logger.warning(f"Banned user attempted API access: {user.email}")
|
||||
return None
|
||||
|
||||
return user
|
||||
|
||||
except TokenError as e:
|
||||
logger.debug(f"Invalid token: {e}")
|
||||
return None
|
||||
except User.DoesNotExist:
|
||||
logger.warning(f"Token for non-existent user: {user_id}")
|
||||
return None
|
||||
except Exception as e:
|
||||
logger.error(f"Authentication error: {e}")
|
||||
return None
|
||||
|
||||
|
||||
# Global JWT auth instance
|
||||
jwt_auth = JWTAuth()
|
||||
|
||||
|
||||
def require_auth(func: Callable) -> Callable:
|
||||
"""
|
||||
Decorator to require authentication.
|
||||
|
||||
Usage:
|
||||
@api.get("/protected")
|
||||
@require_auth
|
||||
def protected_endpoint(request):
|
||||
return {"user": request.auth.email}
|
||||
"""
|
||||
@wraps(func)
|
||||
def wrapper(request: HttpRequest, *args, **kwargs):
|
||||
if not request.auth or not isinstance(request.auth, User):
|
||||
raise PermissionDenied("Authentication required")
|
||||
return func(request, *args, **kwargs)
|
||||
return wrapper
|
||||
|
||||
|
||||
def require_role(role: str) -> Callable:
|
||||
"""
|
||||
Decorator to require specific role.
|
||||
|
||||
Args:
|
||||
role: Required role (user, moderator, admin)
|
||||
|
||||
Usage:
|
||||
@api.post("/moderate")
|
||||
@require_role("moderator")
|
||||
def moderate_endpoint(request):
|
||||
return {"message": "Access granted"}
|
||||
"""
|
||||
def decorator(func: Callable) -> Callable:
|
||||
@wraps(func)
|
||||
def wrapper(request: HttpRequest, *args, **kwargs):
|
||||
if not request.auth or not isinstance(request.auth, User):
|
||||
raise PermissionDenied("Authentication required")
|
||||
|
||||
user = request.auth
|
||||
|
||||
try:
|
||||
user_role = user.role
|
||||
|
||||
# Admin has access to everything
|
||||
if user_role.is_admin:
|
||||
return func(request, *args, **kwargs)
|
||||
|
||||
# Check specific role
|
||||
if role == 'moderator' and user_role.is_moderator:
|
||||
return func(request, *args, **kwargs)
|
||||
elif role == 'user':
|
||||
return func(request, *args, **kwargs)
|
||||
|
||||
raise PermissionDenied(f"Role '{role}' required")
|
||||
|
||||
except UserRole.DoesNotExist:
|
||||
raise PermissionDenied("User role not assigned")
|
||||
|
||||
return wrapper
|
||||
return decorator
|
||||
|
||||
|
||||
def require_moderator(func: Callable) -> Callable:
|
||||
"""
|
||||
Decorator to require moderator or admin role.
|
||||
|
||||
Usage:
|
||||
@api.post("/approve")
|
||||
@require_moderator
|
||||
def approve_endpoint(request):
|
||||
return {"message": "Access granted"}
|
||||
"""
|
||||
return require_role("moderator")(func)
|
||||
|
||||
|
||||
def require_admin(func: Callable) -> Callable:
|
||||
"""
|
||||
Decorator to require admin role.
|
||||
|
||||
Usage:
|
||||
@api.delete("/delete-user")
|
||||
@require_admin
|
||||
def delete_user_endpoint(request):
|
||||
return {"message": "Access granted"}
|
||||
"""
|
||||
def decorator(func: Callable) -> Callable:
|
||||
@wraps(func)
|
||||
def wrapper(request: HttpRequest, *args, **kwargs):
|
||||
if not request.auth or not isinstance(request.auth, User):
|
||||
raise PermissionDenied("Authentication required")
|
||||
|
||||
user = request.auth
|
||||
|
||||
try:
|
||||
user_role = user.role
|
||||
|
||||
if not user_role.is_admin:
|
||||
raise PermissionDenied("Admin role required")
|
||||
|
||||
return func(request, *args, **kwargs)
|
||||
|
||||
except UserRole.DoesNotExist:
|
||||
raise PermissionDenied("User role not assigned")
|
||||
|
||||
return wrapper
|
||||
return decorator
|
||||
|
||||
|
||||
def is_owner_or_moderator(user: User, obj_user_id) -> bool:
|
||||
"""
|
||||
Check if user is the owner of an object or a moderator.
|
||||
|
||||
Args:
|
||||
user: User to check
|
||||
obj_user_id: User ID of the object owner
|
||||
|
||||
Returns:
|
||||
True if user is owner or moderator
|
||||
"""
|
||||
if str(user.id) == str(obj_user_id):
|
||||
return True
|
||||
|
||||
try:
|
||||
return user.role.is_moderator
|
||||
except UserRole.DoesNotExist:
|
||||
return False
|
||||
|
||||
|
||||
def can_moderate(user: User) -> bool:
|
||||
"""
|
||||
Check if user can moderate content.
|
||||
|
||||
Args:
|
||||
user: User to check
|
||||
|
||||
Returns:
|
||||
True if user is moderator or admin
|
||||
"""
|
||||
if user.banned:
|
||||
return False
|
||||
|
||||
try:
|
||||
return user.role.is_moderator
|
||||
except UserRole.DoesNotExist:
|
||||
return False
|
||||
|
||||
|
||||
def can_submit(user: User) -> bool:
|
||||
"""
|
||||
Check if user can submit content.
|
||||
|
||||
Args:
|
||||
user: User to check
|
||||
|
||||
Returns:
|
||||
True if user is not banned
|
||||
"""
|
||||
return not user.banned
|
||||
|
||||
|
||||
class PermissionChecker:
|
||||
"""Helper class for checking permissions"""
|
||||
|
||||
def __init__(self, user: User):
|
||||
self.user = user
|
||||
try:
|
||||
self.user_role = user.role
|
||||
except UserRole.DoesNotExist:
|
||||
self.user_role = None
|
||||
|
||||
@property
|
||||
def is_authenticated(self) -> bool:
|
||||
"""Check if user is authenticated"""
|
||||
return self.user is not None
|
||||
|
||||
@property
|
||||
def is_moderator(self) -> bool:
|
||||
"""Check if user is moderator or admin"""
|
||||
if self.user.banned:
|
||||
return False
|
||||
return self.user_role and self.user_role.is_moderator
|
||||
|
||||
@property
|
||||
def is_admin(self) -> bool:
|
||||
"""Check if user is admin"""
|
||||
if self.user.banned:
|
||||
return False
|
||||
return self.user_role and self.user_role.is_admin
|
||||
|
||||
@property
|
||||
def can_submit(self) -> bool:
|
||||
"""Check if user can submit content"""
|
||||
return not self.user.banned
|
||||
|
||||
@property
|
||||
def can_moderate(self) -> bool:
|
||||
"""Check if user can moderate content"""
|
||||
return self.is_moderator
|
||||
|
||||
def can_edit(self, obj_user_id) -> bool:
|
||||
"""Check if user can edit an object"""
|
||||
if self.user.banned:
|
||||
return False
|
||||
return str(self.user.id) == str(obj_user_id) or self.is_moderator
|
||||
|
||||
def can_delete(self, obj_user_id) -> bool:
|
||||
"""Check if user can delete an object"""
|
||||
if self.user.banned:
|
||||
return False
|
||||
return str(self.user.id) == str(obj_user_id) or self.is_admin
|
||||
|
||||
def require_permission(self, permission: str) -> None:
|
||||
"""
|
||||
Raise PermissionDenied if user doesn't have permission.
|
||||
|
||||
Args:
|
||||
permission: Permission to check (submit, moderate, admin)
|
||||
|
||||
Raises:
|
||||
PermissionDenied: If user doesn't have permission
|
||||
"""
|
||||
if permission == 'submit' and not self.can_submit:
|
||||
raise PermissionDenied("You are banned from submitting content")
|
||||
elif permission == 'moderate' and not self.can_moderate:
|
||||
raise PermissionDenied("Moderator role required")
|
||||
elif permission == 'admin' and not self.is_admin:
|
||||
raise PermissionDenied("Admin role required")
|
||||
|
||||
|
||||
def get_permission_checker(request: HttpRequest) -> Optional[PermissionChecker]:
|
||||
"""
|
||||
Get permission checker for request user.
|
||||
|
||||
Args:
|
||||
request: HTTP request
|
||||
|
||||
Returns:
|
||||
PermissionChecker instance or None if not authenticated
|
||||
"""
|
||||
if not request.auth or not isinstance(request.auth, User):
|
||||
return None
|
||||
|
||||
return PermissionChecker(request.auth)
|
||||
592
django/apps/users/services.py
Normal file
592
django/apps/users/services.py
Normal file
@@ -0,0 +1,592 @@
|
||||
"""
|
||||
User authentication and management services.
|
||||
|
||||
Provides business logic for:
|
||||
- User registration and authentication
|
||||
- OAuth integration
|
||||
- MFA/2FA management
|
||||
- Permission and role management
|
||||
"""
|
||||
|
||||
from typing import Optional, Dict, Any
|
||||
from django.contrib.auth import authenticate
|
||||
from django.contrib.auth.password_validation import validate_password
|
||||
from django.core.exceptions import ValidationError
|
||||
from django.db import transaction
|
||||
from django.utils import timezone
|
||||
from django_otp.plugins.otp_totp.models import TOTPDevice
|
||||
from allauth.socialaccount.models import SocialAccount
|
||||
import logging
|
||||
|
||||
from .models import User, UserRole, UserProfile
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class AuthenticationService:
|
||||
"""Service for handling user authentication operations"""
|
||||
|
||||
@staticmethod
|
||||
@transaction.atomic
|
||||
def register_user(
|
||||
email: str,
|
||||
password: str,
|
||||
username: Optional[str] = None,
|
||||
first_name: str = '',
|
||||
last_name: str = ''
|
||||
) -> User:
|
||||
"""
|
||||
Register a new user with email and password.
|
||||
|
||||
Args:
|
||||
email: User's email address
|
||||
password: User's password (will be validated and hashed)
|
||||
username: Optional username (defaults to email prefix)
|
||||
first_name: User's first name
|
||||
last_name: User's last name
|
||||
|
||||
Returns:
|
||||
Created User instance
|
||||
|
||||
Raises:
|
||||
ValidationError: If email exists or password is invalid
|
||||
"""
|
||||
# Normalize email
|
||||
email = email.lower().strip()
|
||||
|
||||
# Check if user exists
|
||||
if User.objects.filter(email=email).exists():
|
||||
raise ValidationError({'email': 'A user with this email already exists.'})
|
||||
|
||||
# Set username if not provided
|
||||
if not username:
|
||||
username = email.split('@')[0]
|
||||
# Make unique if needed
|
||||
base_username = username
|
||||
counter = 1
|
||||
while User.objects.filter(username=username).exists():
|
||||
username = f"{base_username}{counter}"
|
||||
counter += 1
|
||||
|
||||
# Validate password
|
||||
try:
|
||||
validate_password(password)
|
||||
except ValidationError as e:
|
||||
raise ValidationError({'password': e.messages})
|
||||
|
||||
# Create user
|
||||
user = User.objects.create_user(
|
||||
email=email,
|
||||
username=username,
|
||||
password=password,
|
||||
first_name=first_name,
|
||||
last_name=last_name
|
||||
)
|
||||
|
||||
# Create role (default: user)
|
||||
UserRole.objects.create(user=user, role='user')
|
||||
|
||||
# Create profile
|
||||
UserProfile.objects.create(user=user)
|
||||
|
||||
logger.info(f"New user registered: {user.email}")
|
||||
return user
|
||||
|
||||
@staticmethod
|
||||
def authenticate_user(email: str, password: str) -> Optional[User]:
|
||||
"""
|
||||
Authenticate user with email and password.
|
||||
|
||||
Args:
|
||||
email: User's email address
|
||||
password: User's password
|
||||
|
||||
Returns:
|
||||
User instance if authentication successful, None otherwise
|
||||
"""
|
||||
email = email.lower().strip()
|
||||
user = authenticate(username=email, password=password)
|
||||
|
||||
if user and user.banned:
|
||||
logger.warning(f"Banned user attempted login: {email}")
|
||||
raise ValidationError("This account has been banned.")
|
||||
|
||||
if user:
|
||||
user.last_login = timezone.now()
|
||||
user.save(update_fields=['last_login'])
|
||||
logger.info(f"User authenticated: {email}")
|
||||
|
||||
return user
|
||||
|
||||
@staticmethod
|
||||
@transaction.atomic
|
||||
def create_oauth_user(
|
||||
email: str,
|
||||
provider: str,
|
||||
oauth_sub: str,
|
||||
username: Optional[str] = None,
|
||||
first_name: str = '',
|
||||
last_name: str = '',
|
||||
avatar_url: str = ''
|
||||
) -> User:
|
||||
"""
|
||||
Create or get user from OAuth provider.
|
||||
|
||||
Args:
|
||||
email: User's email from OAuth provider
|
||||
provider: OAuth provider name (google, discord)
|
||||
oauth_sub: OAuth subject identifier
|
||||
username: Optional username
|
||||
first_name: User's first name
|
||||
last_name: User's last name
|
||||
avatar_url: URL to user's avatar
|
||||
|
||||
Returns:
|
||||
User instance
|
||||
"""
|
||||
email = email.lower().strip()
|
||||
|
||||
# Check if user exists with this email
|
||||
try:
|
||||
user = User.objects.get(email=email)
|
||||
# Update OAuth info if not set
|
||||
if not user.oauth_provider:
|
||||
user.oauth_provider = provider
|
||||
user.oauth_sub = oauth_sub
|
||||
user.save(update_fields=['oauth_provider', 'oauth_sub'])
|
||||
return user
|
||||
except User.DoesNotExist:
|
||||
pass
|
||||
|
||||
# Create new user
|
||||
if not username:
|
||||
username = email.split('@')[0]
|
||||
base_username = username
|
||||
counter = 1
|
||||
while User.objects.filter(username=username).exists():
|
||||
username = f"{base_username}{counter}"
|
||||
counter += 1
|
||||
|
||||
user = User.objects.create(
|
||||
email=email,
|
||||
username=username,
|
||||
first_name=first_name,
|
||||
last_name=last_name,
|
||||
avatar_url=avatar_url,
|
||||
oauth_provider=provider,
|
||||
oauth_sub=oauth_sub
|
||||
)
|
||||
|
||||
# No password needed for OAuth users
|
||||
user.set_unusable_password()
|
||||
user.save()
|
||||
|
||||
# Create role and profile
|
||||
UserRole.objects.create(user=user, role='user')
|
||||
UserProfile.objects.create(user=user)
|
||||
|
||||
logger.info(f"OAuth user created: {email} via {provider}")
|
||||
return user
|
||||
|
||||
@staticmethod
|
||||
def change_password(user: User, old_password: str, new_password: str) -> bool:
|
||||
"""
|
||||
Change user's password.
|
||||
|
||||
Args:
|
||||
user: User instance
|
||||
old_password: Current password
|
||||
new_password: New password
|
||||
|
||||
Returns:
|
||||
True if successful
|
||||
|
||||
Raises:
|
||||
ValidationError: If old password is incorrect or new password is invalid
|
||||
"""
|
||||
# Check old password
|
||||
if not user.check_password(old_password):
|
||||
raise ValidationError({'old_password': 'Incorrect password.'})
|
||||
|
||||
# Validate new password
|
||||
try:
|
||||
validate_password(new_password, user=user)
|
||||
except ValidationError as e:
|
||||
raise ValidationError({'new_password': e.messages})
|
||||
|
||||
# Set new password
|
||||
user.set_password(new_password)
|
||||
user.save()
|
||||
|
||||
logger.info(f"Password changed for user: {user.email}")
|
||||
return True
|
||||
|
||||
@staticmethod
|
||||
def reset_password(user: User, new_password: str) -> bool:
|
||||
"""
|
||||
Reset user's password (admin/forgot password flow).
|
||||
|
||||
Args:
|
||||
user: User instance
|
||||
new_password: New password
|
||||
|
||||
Returns:
|
||||
True if successful
|
||||
|
||||
Raises:
|
||||
ValidationError: If new password is invalid
|
||||
"""
|
||||
# Validate new password
|
||||
try:
|
||||
validate_password(new_password, user=user)
|
||||
except ValidationError as e:
|
||||
raise ValidationError({'password': e.messages})
|
||||
|
||||
# Set new password
|
||||
user.set_password(new_password)
|
||||
user.save()
|
||||
|
||||
logger.info(f"Password reset for user: {user.email}")
|
||||
return True
|
||||
|
||||
|
||||
class MFAService:
|
||||
"""Service for handling multi-factor authentication"""
|
||||
|
||||
@staticmethod
|
||||
def enable_totp(user: User, device_name: str = 'default') -> TOTPDevice:
|
||||
"""
|
||||
Enable TOTP-based MFA for user.
|
||||
|
||||
Args:
|
||||
user: User instance
|
||||
device_name: Name for the TOTP device
|
||||
|
||||
Returns:
|
||||
TOTPDevice instance with QR code data
|
||||
"""
|
||||
# Check if device already exists
|
||||
device = TOTPDevice.objects.filter(
|
||||
user=user,
|
||||
name=device_name
|
||||
).first()
|
||||
|
||||
if not device:
|
||||
device = TOTPDevice.objects.create(
|
||||
user=user,
|
||||
name=device_name,
|
||||
confirmed=False
|
||||
)
|
||||
|
||||
return device
|
||||
|
||||
@staticmethod
|
||||
@transaction.atomic
|
||||
def confirm_totp(user: User, token: str, device_name: str = 'default') -> bool:
|
||||
"""
|
||||
Confirm TOTP device with verification token.
|
||||
|
||||
Args:
|
||||
user: User instance
|
||||
token: 6-digit TOTP token
|
||||
device_name: Name of the TOTP device
|
||||
|
||||
Returns:
|
||||
True if successful
|
||||
|
||||
Raises:
|
||||
ValidationError: If token is invalid
|
||||
"""
|
||||
device = TOTPDevice.objects.filter(
|
||||
user=user,
|
||||
name=device_name
|
||||
).first()
|
||||
|
||||
if not device:
|
||||
raise ValidationError("TOTP device not found.")
|
||||
|
||||
# Verify token
|
||||
if not device.verify_token(token):
|
||||
raise ValidationError("Invalid verification code.")
|
||||
|
||||
# Confirm device
|
||||
device.confirmed = True
|
||||
device.save()
|
||||
|
||||
# Enable MFA on user
|
||||
user.mfa_enabled = True
|
||||
user.save(update_fields=['mfa_enabled'])
|
||||
|
||||
logger.info(f"MFA enabled for user: {user.email}")
|
||||
return True
|
||||
|
||||
@staticmethod
|
||||
def verify_totp(user: User, token: str) -> bool:
|
||||
"""
|
||||
Verify TOTP token for authentication.
|
||||
|
||||
Args:
|
||||
user: User instance
|
||||
token: 6-digit TOTP token
|
||||
|
||||
Returns:
|
||||
True if valid
|
||||
"""
|
||||
device = TOTPDevice.objects.filter(
|
||||
user=user,
|
||||
confirmed=True
|
||||
).first()
|
||||
|
||||
if not device:
|
||||
return False
|
||||
|
||||
return device.verify_token(token)
|
||||
|
||||
@staticmethod
|
||||
@transaction.atomic
|
||||
def disable_totp(user: User) -> bool:
|
||||
"""
|
||||
Disable TOTP-based MFA for user.
|
||||
|
||||
Args:
|
||||
user: User instance
|
||||
|
||||
Returns:
|
||||
True if successful
|
||||
"""
|
||||
# Delete all TOTP devices
|
||||
TOTPDevice.objects.filter(user=user).delete()
|
||||
|
||||
# Disable MFA on user
|
||||
user.mfa_enabled = False
|
||||
user.save(update_fields=['mfa_enabled'])
|
||||
|
||||
logger.info(f"MFA disabled for user: {user.email}")
|
||||
return True
|
||||
|
||||
|
||||
class RoleService:
|
||||
"""Service for managing user roles and permissions"""
|
||||
|
||||
@staticmethod
|
||||
@transaction.atomic
|
||||
def assign_role(
|
||||
user: User,
|
||||
role: str,
|
||||
granted_by: Optional[User] = None
|
||||
) -> UserRole:
|
||||
"""
|
||||
Assign role to user.
|
||||
|
||||
Args:
|
||||
user: User to assign role to
|
||||
role: Role name (user, moderator, admin)
|
||||
granted_by: User granting the role
|
||||
|
||||
Returns:
|
||||
UserRole instance
|
||||
|
||||
Raises:
|
||||
ValidationError: If role is invalid
|
||||
"""
|
||||
valid_roles = ['user', 'moderator', 'admin']
|
||||
if role not in valid_roles:
|
||||
raise ValidationError(f"Invalid role. Must be one of: {', '.join(valid_roles)}")
|
||||
|
||||
# Get or create role
|
||||
user_role, created = UserRole.objects.get_or_create(
|
||||
user=user,
|
||||
defaults={'role': role, 'granted_by': granted_by}
|
||||
)
|
||||
|
||||
if not created and user_role.role != role:
|
||||
user_role.role = role
|
||||
user_role.granted_by = granted_by
|
||||
user_role.granted_at = timezone.now()
|
||||
user_role.save()
|
||||
|
||||
logger.info(f"Role '{role}' assigned to user: {user.email}")
|
||||
return user_role
|
||||
|
||||
@staticmethod
|
||||
def has_role(user: User, role: str) -> bool:
|
||||
"""
|
||||
Check if user has specific role.
|
||||
|
||||
Args:
|
||||
user: User instance
|
||||
role: Role name to check
|
||||
|
||||
Returns:
|
||||
True if user has the role
|
||||
"""
|
||||
try:
|
||||
user_role = user.role
|
||||
if role == 'moderator':
|
||||
return user_role.is_moderator
|
||||
elif role == 'admin':
|
||||
return user_role.is_admin
|
||||
return user_role.role == role
|
||||
except UserRole.DoesNotExist:
|
||||
return False
|
||||
|
||||
@staticmethod
|
||||
def get_user_permissions(user: User) -> Dict[str, bool]:
|
||||
"""
|
||||
Get user's permission summary.
|
||||
|
||||
Args:
|
||||
user: User instance
|
||||
|
||||
Returns:
|
||||
Dictionary of permissions
|
||||
"""
|
||||
try:
|
||||
user_role = user.role
|
||||
is_moderator = user_role.is_moderator
|
||||
is_admin = user_role.is_admin
|
||||
except UserRole.DoesNotExist:
|
||||
is_moderator = False
|
||||
is_admin = False
|
||||
|
||||
return {
|
||||
'can_submit': not user.banned,
|
||||
'can_moderate': is_moderator and not user.banned,
|
||||
'can_admin': is_admin and not user.banned,
|
||||
'can_edit_own': not user.banned,
|
||||
'can_delete_own': not user.banned,
|
||||
}
|
||||
|
||||
|
||||
class UserManagementService:
|
||||
"""Service for user profile and account management"""
|
||||
|
||||
@staticmethod
|
||||
@transaction.atomic
|
||||
def update_profile(
|
||||
user: User,
|
||||
**kwargs
|
||||
) -> User:
|
||||
"""
|
||||
Update user profile information.
|
||||
|
||||
Args:
|
||||
user: User instance
|
||||
**kwargs: Fields to update
|
||||
|
||||
Returns:
|
||||
Updated User instance
|
||||
"""
|
||||
allowed_fields = [
|
||||
'first_name', 'last_name', 'username',
|
||||
'avatar_url', 'bio'
|
||||
]
|
||||
|
||||
updated_fields = []
|
||||
for field, value in kwargs.items():
|
||||
if field in allowed_fields and hasattr(user, field):
|
||||
setattr(user, field, value)
|
||||
updated_fields.append(field)
|
||||
|
||||
if updated_fields:
|
||||
user.save(update_fields=updated_fields)
|
||||
logger.info(f"Profile updated for user: {user.email}")
|
||||
|
||||
return user
|
||||
|
||||
@staticmethod
|
||||
@transaction.atomic
|
||||
def update_preferences(
|
||||
user: User,
|
||||
**kwargs
|
||||
) -> UserProfile:
|
||||
"""
|
||||
Update user preferences.
|
||||
|
||||
Args:
|
||||
user: User instance
|
||||
**kwargs: Preference fields to update
|
||||
|
||||
Returns:
|
||||
Updated UserProfile instance
|
||||
"""
|
||||
profile = user.profile
|
||||
|
||||
allowed_fields = [
|
||||
'email_notifications',
|
||||
'email_on_submission_approved',
|
||||
'email_on_submission_rejected',
|
||||
'profile_public',
|
||||
'show_email'
|
||||
]
|
||||
|
||||
updated_fields = []
|
||||
for field, value in kwargs.items():
|
||||
if field in allowed_fields and hasattr(profile, field):
|
||||
setattr(profile, field, value)
|
||||
updated_fields.append(field)
|
||||
|
||||
if updated_fields:
|
||||
profile.save(update_fields=updated_fields)
|
||||
logger.info(f"Preferences updated for user: {user.email}")
|
||||
|
||||
return profile
|
||||
|
||||
@staticmethod
|
||||
@transaction.atomic
|
||||
def ban_user(
|
||||
user: User,
|
||||
reason: str,
|
||||
banned_by: User
|
||||
) -> User:
|
||||
"""
|
||||
Ban a user.
|
||||
|
||||
Args:
|
||||
user: User to ban
|
||||
reason: Reason for ban
|
||||
banned_by: User performing the ban
|
||||
|
||||
Returns:
|
||||
Updated User instance
|
||||
"""
|
||||
user.ban(reason=reason, banned_by=banned_by)
|
||||
logger.warning(f"User banned: {user.email} by {banned_by.email}. Reason: {reason}")
|
||||
return user
|
||||
|
||||
@staticmethod
|
||||
@transaction.atomic
|
||||
def unban_user(user: User) -> User:
|
||||
"""
|
||||
Unban a user.
|
||||
|
||||
Args:
|
||||
user: User to unban
|
||||
|
||||
Returns:
|
||||
Updated User instance
|
||||
"""
|
||||
user.unban()
|
||||
logger.info(f"User unbanned: {user.email}")
|
||||
return user
|
||||
|
||||
@staticmethod
|
||||
def get_user_stats(user: User) -> Dict[str, Any]:
|
||||
"""
|
||||
Get user statistics.
|
||||
|
||||
Args:
|
||||
user: User instance
|
||||
|
||||
Returns:
|
||||
Dictionary of user stats
|
||||
"""
|
||||
profile = user.profile
|
||||
|
||||
return {
|
||||
'total_submissions': profile.total_submissions,
|
||||
'approved_submissions': profile.approved_submissions,
|
||||
'reputation_score': user.reputation_score,
|
||||
'member_since': user.date_joined,
|
||||
'last_active': user.last_login,
|
||||
}
|
||||
343
django/apps/users/tasks.py
Normal file
343
django/apps/users/tasks.py
Normal file
@@ -0,0 +1,343 @@
|
||||
"""
|
||||
Background tasks for user management and notifications.
|
||||
"""
|
||||
|
||||
import logging
|
||||
from celery import shared_task
|
||||
from django.core.mail import send_mail
|
||||
from django.template.loader import render_to_string
|
||||
from django.conf import settings
|
||||
from django.utils import timezone
|
||||
from datetime import timedelta
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@shared_task(bind=True, max_retries=3, default_retry_delay=60)
|
||||
def send_welcome_email(self, user_id):
|
||||
"""
|
||||
Send a welcome email to a newly registered user.
|
||||
|
||||
Args:
|
||||
user_id: ID of the User
|
||||
|
||||
Returns:
|
||||
str: Email send result
|
||||
"""
|
||||
from apps.users.models import User
|
||||
|
||||
try:
|
||||
user = User.objects.get(id=user_id)
|
||||
|
||||
context = {
|
||||
'user': user,
|
||||
'site_url': getattr(settings, 'SITE_URL', 'https://thrillwiki.com'),
|
||||
}
|
||||
|
||||
html_message = render_to_string('emails/welcome.html', context)
|
||||
|
||||
send_mail(
|
||||
subject='Welcome to ThrillWiki! 🎢',
|
||||
message='',
|
||||
html_message=html_message,
|
||||
from_email=settings.DEFAULT_FROM_EMAIL,
|
||||
recipient_list=[user.email],
|
||||
fail_silently=False,
|
||||
)
|
||||
|
||||
logger.info(f"Welcome email sent to {user.email}")
|
||||
return f"Welcome email sent to {user.email}"
|
||||
|
||||
except User.DoesNotExist:
|
||||
logger.error(f"User {user_id} not found")
|
||||
raise
|
||||
except Exception as exc:
|
||||
logger.error(f"Error sending welcome email to user {user_id}: {str(exc)}")
|
||||
raise self.retry(exc=exc, countdown=60 * (2 ** self.request.retries))
|
||||
|
||||
|
||||
@shared_task(bind=True, max_retries=3, default_retry_delay=60)
|
||||
def send_password_reset_email(self, user_id, token, reset_url):
|
||||
"""
|
||||
Send a password reset email with a secure token.
|
||||
|
||||
Args:
|
||||
user_id: ID of the User
|
||||
token: Password reset token
|
||||
reset_url: Full URL for password reset
|
||||
|
||||
Returns:
|
||||
str: Email send result
|
||||
"""
|
||||
from apps.users.models import User
|
||||
|
||||
try:
|
||||
user = User.objects.get(id=user_id)
|
||||
|
||||
context = {
|
||||
'user': user,
|
||||
'reset_url': reset_url,
|
||||
'request_time': timezone.now(),
|
||||
'expiry_hours': 24, # Configurable
|
||||
'site_url': getattr(settings, 'SITE_URL', 'https://thrillwiki.com'),
|
||||
}
|
||||
|
||||
html_message = render_to_string('emails/password_reset.html', context)
|
||||
|
||||
send_mail(
|
||||
subject='Reset Your ThrillWiki Password',
|
||||
message='',
|
||||
html_message=html_message,
|
||||
from_email=settings.DEFAULT_FROM_EMAIL,
|
||||
recipient_list=[user.email],
|
||||
fail_silently=False,
|
||||
)
|
||||
|
||||
logger.info(f"Password reset email sent to {user.email}")
|
||||
return f"Password reset email sent to {user.email}"
|
||||
|
||||
except User.DoesNotExist:
|
||||
logger.error(f"User {user_id} not found")
|
||||
raise
|
||||
except Exception as exc:
|
||||
logger.error(f"Error sending password reset email: {str(exc)}")
|
||||
raise self.retry(exc=exc, countdown=60 * (2 ** self.request.retries))
|
||||
|
||||
|
||||
@shared_task(bind=True, max_retries=2)
|
||||
def cleanup_expired_tokens(self):
|
||||
"""
|
||||
Clean up expired JWT tokens and password reset tokens.
|
||||
|
||||
This task runs daily to remove old tokens from the database.
|
||||
|
||||
Returns:
|
||||
dict: Cleanup statistics
|
||||
"""
|
||||
from rest_framework_simplejwt.token_blacklist.models import OutstandingToken
|
||||
from django.contrib.auth.tokens import default_token_generator
|
||||
|
||||
try:
|
||||
# Clean up blacklisted JWT tokens older than 7 days
|
||||
cutoff = timezone.now() - timedelta(days=7)
|
||||
|
||||
# Note: Actual implementation depends on token storage strategy
|
||||
# This is a placeholder for the concept
|
||||
|
||||
logger.info("Token cleanup completed")
|
||||
|
||||
return {
|
||||
'jwt_tokens_cleaned': 0,
|
||||
'reset_tokens_cleaned': 0,
|
||||
}
|
||||
|
||||
except Exception as exc:
|
||||
logger.error(f"Error cleaning up tokens: {str(exc)}")
|
||||
raise self.retry(exc=exc, countdown=300)
|
||||
|
||||
|
||||
@shared_task(bind=True, max_retries=3)
|
||||
def send_account_notification(self, user_id, notification_type, context_data=None):
|
||||
"""
|
||||
Send a generic account notification email.
|
||||
|
||||
Args:
|
||||
user_id: ID of the User
|
||||
notification_type: Type of notification (e.g., 'security_alert', 'profile_update')
|
||||
context_data: Additional context data for the email
|
||||
|
||||
Returns:
|
||||
str: Email send result
|
||||
"""
|
||||
from apps.users.models import User
|
||||
|
||||
try:
|
||||
user = User.objects.get(id=user_id)
|
||||
|
||||
context = {
|
||||
'user': user,
|
||||
'notification_type': notification_type,
|
||||
'site_url': getattr(settings, 'SITE_URL', 'https://thrillwiki.com'),
|
||||
}
|
||||
|
||||
if context_data:
|
||||
context.update(context_data)
|
||||
|
||||
# For now, just log (would need specific templates for each type)
|
||||
logger.info(f"Account notification ({notification_type}) for user {user.email}")
|
||||
|
||||
return f"Notification sent to {user.email}"
|
||||
|
||||
except User.DoesNotExist:
|
||||
logger.error(f"User {user_id} not found")
|
||||
raise
|
||||
except Exception as exc:
|
||||
logger.error(f"Error sending account notification: {str(exc)}")
|
||||
raise self.retry(exc=exc, countdown=60 * (2 ** self.request.retries))
|
||||
|
||||
|
||||
@shared_task(bind=True, max_retries=2)
|
||||
def cleanup_inactive_users(self, days_inactive=365):
|
||||
"""
|
||||
Clean up or flag users who haven't logged in for a long time.
|
||||
|
||||
Args:
|
||||
days_inactive: Number of days of inactivity before flagging (default: 365)
|
||||
|
||||
Returns:
|
||||
dict: Cleanup statistics
|
||||
"""
|
||||
from apps.users.models import User
|
||||
|
||||
try:
|
||||
cutoff = timezone.now() - timedelta(days=days_inactive)
|
||||
|
||||
inactive_users = User.objects.filter(
|
||||
last_login__lt=cutoff,
|
||||
is_active=True
|
||||
)
|
||||
|
||||
count = inactive_users.count()
|
||||
|
||||
# For now, just log inactive users
|
||||
# In production, you might want to send reactivation emails
|
||||
# or mark accounts for deletion
|
||||
|
||||
logger.info(f"Found {count} inactive users (last login before {cutoff})")
|
||||
|
||||
return {
|
||||
'inactive_count': count,
|
||||
'cutoff_date': cutoff.isoformat(),
|
||||
}
|
||||
|
||||
except Exception as exc:
|
||||
logger.error(f"Error cleaning up inactive users: {str(exc)}")
|
||||
raise self.retry(exc=exc, countdown=300)
|
||||
|
||||
|
||||
@shared_task
|
||||
def update_user_statistics():
|
||||
"""
|
||||
Update user-related statistics across the database.
|
||||
|
||||
Returns:
|
||||
dict: Updated statistics
|
||||
"""
|
||||
from apps.users.models import User
|
||||
from django.db.models import Count
|
||||
from datetime import timedelta
|
||||
|
||||
try:
|
||||
now = timezone.now()
|
||||
week_ago = now - timedelta(days=7)
|
||||
month_ago = now - timedelta(days=30)
|
||||
|
||||
stats = {
|
||||
'total_users': User.objects.count(),
|
||||
'active_users': User.objects.filter(is_active=True).count(),
|
||||
'new_this_week': User.objects.filter(date_joined__gte=week_ago).count(),
|
||||
'new_this_month': User.objects.filter(date_joined__gte=month_ago).count(),
|
||||
'verified_users': User.objects.filter(email_verified=True).count(),
|
||||
'by_role': dict(
|
||||
User.objects.values('role__name')
|
||||
.annotate(count=Count('id'))
|
||||
.values_list('role__name', 'count')
|
||||
),
|
||||
}
|
||||
|
||||
logger.info(f"User statistics updated: {stats}")
|
||||
return stats
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error updating user statistics: {str(e)}")
|
||||
raise
|
||||
|
||||
|
||||
@shared_task(bind=True, max_retries=3)
|
||||
def send_bulk_notification(self, user_ids, subject, message, html_message=None):
|
||||
"""
|
||||
Send bulk email notifications to multiple users.
|
||||
|
||||
This is useful for announcements, feature updates, etc.
|
||||
|
||||
Args:
|
||||
user_ids: List of User IDs
|
||||
subject: Email subject
|
||||
message: Plain text message
|
||||
html_message: HTML version of message (optional)
|
||||
|
||||
Returns:
|
||||
dict: Send statistics
|
||||
"""
|
||||
from apps.users.models import User
|
||||
|
||||
try:
|
||||
users = User.objects.filter(id__in=user_ids, is_active=True)
|
||||
|
||||
sent_count = 0
|
||||
failed_count = 0
|
||||
|
||||
for user in users:
|
||||
try:
|
||||
send_mail(
|
||||
subject=subject,
|
||||
message=message,
|
||||
html_message=html_message,
|
||||
from_email=settings.DEFAULT_FROM_EMAIL,
|
||||
recipient_list=[user.email],
|
||||
fail_silently=False,
|
||||
)
|
||||
sent_count += 1
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to send to {user.email}: {str(e)}")
|
||||
failed_count += 1
|
||||
continue
|
||||
|
||||
result = {
|
||||
'total': len(user_ids),
|
||||
'sent': sent_count,
|
||||
'failed': failed_count,
|
||||
}
|
||||
|
||||
logger.info(f"Bulk notification sent: {result}")
|
||||
return result
|
||||
|
||||
except Exception as exc:
|
||||
logger.error(f"Error sending bulk notification: {str(exc)}")
|
||||
raise self.retry(exc=exc, countdown=60 * (2 ** self.request.retries))
|
||||
|
||||
|
||||
@shared_task(bind=True, max_retries=2)
|
||||
def send_email_verification_reminder(self, user_id):
|
||||
"""
|
||||
Send a reminder to users who haven't verified their email.
|
||||
|
||||
Args:
|
||||
user_id: ID of the User
|
||||
|
||||
Returns:
|
||||
str: Reminder result
|
||||
"""
|
||||
from apps.users.models import User
|
||||
|
||||
try:
|
||||
user = User.objects.get(id=user_id)
|
||||
|
||||
if user.email_verified:
|
||||
logger.info(f"User {user.email} already verified, skipping reminder")
|
||||
return "User already verified"
|
||||
|
||||
# Send verification reminder
|
||||
logger.info(f"Sending email verification reminder to {user.email}")
|
||||
|
||||
# In production, generate new verification token and send email
|
||||
# For now, just log
|
||||
|
||||
return f"Verification reminder sent to {user.email}"
|
||||
|
||||
except User.DoesNotExist:
|
||||
logger.error(f"User {user_id} not found")
|
||||
raise
|
||||
except Exception as exc:
|
||||
logger.error(f"Error sending verification reminder: {str(exc)}")
|
||||
raise self.retry(exc=exc, countdown=60 * (2 ** self.request.retries))
|
||||
BIN
django/apps/versioning/__pycache__/admin.cpython-313.pyc
Normal file
BIN
django/apps/versioning/__pycache__/admin.cpython-313.pyc
Normal file
Binary file not shown.
Binary file not shown.
BIN
django/apps/versioning/__pycache__/services.cpython-313.pyc
Normal file
BIN
django/apps/versioning/__pycache__/services.cpython-313.pyc
Normal file
Binary file not shown.
236
django/apps/versioning/admin.py
Normal file
236
django/apps/versioning/admin.py
Normal file
@@ -0,0 +1,236 @@
|
||||
"""
|
||||
Admin interface for versioning models.
|
||||
|
||||
Provides Django admin interface for viewing version history,
|
||||
comparing versions, and managing version records.
|
||||
"""
|
||||
|
||||
from django.contrib import admin
|
||||
from django.utils.html import format_html
|
||||
from django.urls import reverse
|
||||
from unfold.admin import ModelAdmin
|
||||
|
||||
from apps.versioning.models import EntityVersion
|
||||
|
||||
|
||||
@admin.register(EntityVersion)
|
||||
class EntityVersionAdmin(ModelAdmin):
|
||||
"""
|
||||
Admin interface for EntityVersion model.
|
||||
|
||||
Provides read-only view of version history with search and filtering.
|
||||
"""
|
||||
|
||||
# Display settings
|
||||
list_display = [
|
||||
'version_number',
|
||||
'entity_link',
|
||||
'change_type',
|
||||
'changed_by_link',
|
||||
'submission_link',
|
||||
'changed_field_count',
|
||||
'created',
|
||||
]
|
||||
|
||||
list_filter = [
|
||||
'change_type',
|
||||
'entity_type',
|
||||
'created',
|
||||
]
|
||||
|
||||
search_fields = [
|
||||
'entity_id',
|
||||
'comment',
|
||||
'changed_by__email',
|
||||
'changed_by__username',
|
||||
]
|
||||
|
||||
ordering = ['-created']
|
||||
|
||||
date_hierarchy = 'created'
|
||||
|
||||
# Read-only admin (versions should not be modified)
|
||||
readonly_fields = [
|
||||
'id',
|
||||
'entity_type',
|
||||
'entity_id',
|
||||
'entity_link',
|
||||
'version_number',
|
||||
'change_type',
|
||||
'snapshot_display',
|
||||
'changed_fields_display',
|
||||
'changed_by',
|
||||
'submission',
|
||||
'comment',
|
||||
'ip_address',
|
||||
'user_agent',
|
||||
'created',
|
||||
'modified',
|
||||
]
|
||||
|
||||
fieldsets = (
|
||||
('Version Information', {
|
||||
'fields': (
|
||||
'id',
|
||||
'version_number',
|
||||
'change_type',
|
||||
'created',
|
||||
'modified',
|
||||
)
|
||||
}),
|
||||
('Entity', {
|
||||
'fields': (
|
||||
'entity_type',
|
||||
'entity_id',
|
||||
'entity_link',
|
||||
)
|
||||
}),
|
||||
('Changes', {
|
||||
'fields': (
|
||||
'changed_fields_display',
|
||||
'snapshot_display',
|
||||
)
|
||||
}),
|
||||
('Metadata', {
|
||||
'fields': (
|
||||
'changed_by',
|
||||
'submission',
|
||||
'comment',
|
||||
'ip_address',
|
||||
'user_agent',
|
||||
)
|
||||
}),
|
||||
)
|
||||
|
||||
def has_add_permission(self, request):
|
||||
"""Disable adding versions manually."""
|
||||
return False
|
||||
|
||||
def has_delete_permission(self, request, obj=None):
|
||||
"""Disable deleting versions."""
|
||||
return False
|
||||
|
||||
def has_change_permission(self, request, obj=None):
|
||||
"""Only allow viewing versions, not editing."""
|
||||
return False
|
||||
|
||||
def entity_link(self, obj):
|
||||
"""Display link to the entity."""
|
||||
try:
|
||||
entity = obj.entity
|
||||
if entity:
|
||||
# Try to get admin URL for entity
|
||||
admin_url = reverse(
|
||||
f'admin:{obj.entity_type.app_label}_{obj.entity_type.model}_change',
|
||||
args=[entity.pk]
|
||||
)
|
||||
return format_html(
|
||||
'<a href="{}">{}</a>',
|
||||
admin_url,
|
||||
str(entity)
|
||||
)
|
||||
except:
|
||||
pass
|
||||
return f"{obj.entity_type.model}:{obj.entity_id}"
|
||||
entity_link.short_description = 'Entity'
|
||||
|
||||
def changed_by_link(self, obj):
|
||||
"""Display link to user who made the change."""
|
||||
if obj.changed_by:
|
||||
try:
|
||||
admin_url = reverse(
|
||||
'admin:users_user_change',
|
||||
args=[obj.changed_by.pk]
|
||||
)
|
||||
return format_html(
|
||||
'<a href="{}">{}</a>',
|
||||
admin_url,
|
||||
obj.changed_by.email
|
||||
)
|
||||
except:
|
||||
return obj.changed_by.email
|
||||
return '-'
|
||||
changed_by_link.short_description = 'Changed By'
|
||||
|
||||
def submission_link(self, obj):
|
||||
"""Display link to content submission if applicable."""
|
||||
if obj.submission:
|
||||
try:
|
||||
admin_url = reverse(
|
||||
'admin:moderation_contentsubmission_change',
|
||||
args=[obj.submission.pk]
|
||||
)
|
||||
return format_html(
|
||||
'<a href="{}">#{}</a>',
|
||||
admin_url,
|
||||
obj.submission.pk
|
||||
)
|
||||
except:
|
||||
return str(obj.submission.pk)
|
||||
return '-'
|
||||
submission_link.short_description = 'Submission'
|
||||
|
||||
def changed_field_count(self, obj):
|
||||
"""Display count of changed fields."""
|
||||
count = len(obj.changed_fields)
|
||||
if count == 0:
|
||||
return '-'
|
||||
return f"{count} field{'s' if count != 1 else ''}"
|
||||
changed_field_count.short_description = 'Changed Fields'
|
||||
|
||||
def snapshot_display(self, obj):
|
||||
"""Display snapshot in a formatted way."""
|
||||
import json
|
||||
snapshot = obj.get_snapshot_dict()
|
||||
|
||||
# Format as pretty JSON
|
||||
formatted = json.dumps(snapshot, indent=2, sort_keys=True)
|
||||
|
||||
return format_html(
|
||||
'<pre style="background: #f5f5f5; padding: 10px; border-radius: 4px; overflow-x: auto;">{}</pre>',
|
||||
formatted
|
||||
)
|
||||
snapshot_display.short_description = 'Snapshot'
|
||||
|
||||
def changed_fields_display(self, obj):
|
||||
"""Display changed fields in a formatted way."""
|
||||
if not obj.changed_fields:
|
||||
return format_html('<em>No fields changed</em>')
|
||||
|
||||
html_parts = ['<table style="width: 100%; border-collapse: collapse;">']
|
||||
html_parts.append('<thead><tr style="background: #f5f5f5;">')
|
||||
html_parts.append('<th style="padding: 8px; text-align: left; border: 1px solid #ddd;">Field</th>')
|
||||
html_parts.append('<th style="padding: 8px; text-align: left; border: 1px solid #ddd;">Old Value</th>')
|
||||
html_parts.append('<th style="padding: 8px; text-align: left; border: 1px solid #ddd;">New Value</th>')
|
||||
html_parts.append('</tr></thead><tbody>')
|
||||
|
||||
for field_name, change in obj.changed_fields.items():
|
||||
old_val = change.get('old', '-')
|
||||
new_val = change.get('new', '-')
|
||||
|
||||
# Truncate long values
|
||||
if isinstance(old_val, str) and len(old_val) > 100:
|
||||
old_val = old_val[:97] + '...'
|
||||
if isinstance(new_val, str) and len(new_val) > 100:
|
||||
new_val = new_val[:97] + '...'
|
||||
|
||||
html_parts.append('<tr>')
|
||||
html_parts.append(f'<td style="padding: 8px; border: 1px solid #ddd;"><strong>{field_name}</strong></td>')
|
||||
html_parts.append(f'<td style="padding: 8px; border: 1px solid #ddd; color: #d32f2f;">{old_val}</td>')
|
||||
html_parts.append(f'<td style="padding: 8px; border: 1px solid #ddd; color: #388e3c;">{new_val}</td>')
|
||||
html_parts.append('</tr>')
|
||||
|
||||
html_parts.append('</tbody></table>')
|
||||
|
||||
return format_html(''.join(html_parts))
|
||||
changed_fields_display.short_description = 'Changed Fields'
|
||||
|
||||
def get_queryset(self, request):
|
||||
"""Optimize queryset with select_related."""
|
||||
qs = super().get_queryset(request)
|
||||
return qs.select_related(
|
||||
'entity_type',
|
||||
'changed_by',
|
||||
'submission',
|
||||
'submission__user'
|
||||
)
|
||||
165
django/apps/versioning/migrations/0001_initial.py
Normal file
165
django/apps/versioning/migrations/0001_initial.py
Normal file
@@ -0,0 +1,165 @@
|
||||
# Generated by Django 4.2.8 on 2025-11-08 17:51
|
||||
|
||||
from django.conf import settings
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
import django.utils.timezone
|
||||
import django_lifecycle.mixins
|
||||
import model_utils.fields
|
||||
import uuid
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
initial = True
|
||||
|
||||
dependencies = [
|
||||
("contenttypes", "0002_remove_content_type_name"),
|
||||
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
||||
("moderation", "0001_initial"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name="EntityVersion",
|
||||
fields=[
|
||||
(
|
||||
"created",
|
||||
model_utils.fields.AutoCreatedField(
|
||||
default=django.utils.timezone.now,
|
||||
editable=False,
|
||||
verbose_name="created",
|
||||
),
|
||||
),
|
||||
(
|
||||
"modified",
|
||||
model_utils.fields.AutoLastModifiedField(
|
||||
default=django.utils.timezone.now,
|
||||
editable=False,
|
||||
verbose_name="modified",
|
||||
),
|
||||
),
|
||||
(
|
||||
"id",
|
||||
models.UUIDField(
|
||||
default=uuid.uuid4,
|
||||
editable=False,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
),
|
||||
),
|
||||
(
|
||||
"entity_id",
|
||||
models.UUIDField(db_index=True, help_text="ID of the entity"),
|
||||
),
|
||||
(
|
||||
"version_number",
|
||||
models.PositiveIntegerField(
|
||||
default=1, help_text="Sequential version number for this entity"
|
||||
),
|
||||
),
|
||||
(
|
||||
"change_type",
|
||||
models.CharField(
|
||||
choices=[
|
||||
("created", "Created"),
|
||||
("updated", "Updated"),
|
||||
("deleted", "Deleted"),
|
||||
("restored", "Restored"),
|
||||
],
|
||||
db_index=True,
|
||||
help_text="Type of change",
|
||||
max_length=20,
|
||||
),
|
||||
),
|
||||
(
|
||||
"snapshot",
|
||||
models.JSONField(
|
||||
help_text="Complete snapshot of entity state as JSON"
|
||||
),
|
||||
),
|
||||
(
|
||||
"changed_fields",
|
||||
models.JSONField(
|
||||
default=dict,
|
||||
help_text="Dict of changed fields with old/new values: {'field': {'old': ..., 'new': ...}}",
|
||||
),
|
||||
),
|
||||
(
|
||||
"comment",
|
||||
models.TextField(
|
||||
blank=True, help_text="Optional comment about this version"
|
||||
),
|
||||
),
|
||||
(
|
||||
"ip_address",
|
||||
models.GenericIPAddressField(
|
||||
blank=True, help_text="IP address of change origin", null=True
|
||||
),
|
||||
),
|
||||
(
|
||||
"user_agent",
|
||||
models.CharField(
|
||||
blank=True, help_text="User agent string", max_length=500
|
||||
),
|
||||
),
|
||||
(
|
||||
"changed_by",
|
||||
models.ForeignKey(
|
||||
blank=True,
|
||||
help_text="User who made the change",
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
related_name="entity_versions",
|
||||
to=settings.AUTH_USER_MODEL,
|
||||
),
|
||||
),
|
||||
(
|
||||
"entity_type",
|
||||
models.ForeignKey(
|
||||
help_text="Type of entity (Park, Ride, Company, etc.)",
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
related_name="entity_versions",
|
||||
to="contenttypes.contenttype",
|
||||
),
|
||||
),
|
||||
(
|
||||
"submission",
|
||||
models.ForeignKey(
|
||||
blank=True,
|
||||
help_text="Submission that caused this version (if applicable)",
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
related_name="versions",
|
||||
to="moderation.contentsubmission",
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"verbose_name": "Entity Version",
|
||||
"verbose_name_plural": "Entity Versions",
|
||||
"ordering": ["-created"],
|
||||
"indexes": [
|
||||
models.Index(
|
||||
fields=["entity_type", "entity_id", "-created"],
|
||||
name="versioning__entity__8eabd9_idx",
|
||||
),
|
||||
models.Index(
|
||||
fields=["entity_type", "entity_id", "-version_number"],
|
||||
name="versioning__entity__fe6f1b_idx",
|
||||
),
|
||||
models.Index(
|
||||
fields=["change_type"], name="versioning__change__17de57_idx"
|
||||
),
|
||||
models.Index(
|
||||
fields=["changed_by"], name="versioning__changed_39d5fd_idx"
|
||||
),
|
||||
models.Index(
|
||||
fields=["submission"], name="versioning__submiss_345f6b_idx"
|
||||
),
|
||||
],
|
||||
"unique_together": {("entity_type", "entity_id", "version_number")},
|
||||
},
|
||||
bases=(django_lifecycle.mixins.LifecycleModelMixin, models.Model),
|
||||
),
|
||||
]
|
||||
0
django/apps/versioning/migrations/__init__.py
Normal file
0
django/apps/versioning/migrations/__init__.py
Normal file
Binary file not shown.
Binary file not shown.
@@ -0,0 +1,287 @@
|
||||
"""
|
||||
Versioning models for ThrillWiki.
|
||||
|
||||
This module provides automatic version tracking for all entities:
|
||||
- EntityVersion: Generic version model using ContentType
|
||||
- Full snapshot storage in JSON
|
||||
- Changed fields tracking with old/new values
|
||||
- Link to ContentSubmission when changes come from moderation
|
||||
"""
|
||||
|
||||
import json
|
||||
from django.db import models
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from django.contrib.contenttypes.fields import GenericForeignKey
|
||||
from django.conf import settings
|
||||
|
||||
from apps.core.models import BaseModel
|
||||
|
||||
|
||||
class EntityVersion(BaseModel):
|
||||
"""
|
||||
Generic version tracking for all entities.
|
||||
|
||||
Stores a complete snapshot of the entity state at the time of change,
|
||||
along with metadata about what changed and who made the change.
|
||||
"""
|
||||
|
||||
CHANGE_TYPE_CHOICES = [
|
||||
('created', 'Created'),
|
||||
('updated', 'Updated'),
|
||||
('deleted', 'Deleted'),
|
||||
('restored', 'Restored'),
|
||||
]
|
||||
|
||||
# Entity reference (generic)
|
||||
entity_type = models.ForeignKey(
|
||||
ContentType,
|
||||
on_delete=models.CASCADE,
|
||||
related_name='entity_versions',
|
||||
help_text="Type of entity (Park, Ride, Company, etc.)"
|
||||
)
|
||||
entity_id = models.UUIDField(
|
||||
db_index=True,
|
||||
help_text="ID of the entity"
|
||||
)
|
||||
entity = GenericForeignKey('entity_type', 'entity_id')
|
||||
|
||||
# Version info
|
||||
version_number = models.PositiveIntegerField(
|
||||
default=1,
|
||||
help_text="Sequential version number for this entity"
|
||||
)
|
||||
change_type = models.CharField(
|
||||
max_length=20,
|
||||
choices=CHANGE_TYPE_CHOICES,
|
||||
db_index=True,
|
||||
help_text="Type of change"
|
||||
)
|
||||
|
||||
# Snapshot of entity state
|
||||
snapshot = models.JSONField(
|
||||
help_text="Complete snapshot of entity state as JSON"
|
||||
)
|
||||
|
||||
# Changed fields tracking
|
||||
changed_fields = models.JSONField(
|
||||
default=dict,
|
||||
help_text="Dict of changed fields with old/new values: {'field': {'old': ..., 'new': ...}}"
|
||||
)
|
||||
|
||||
# User who made the change
|
||||
changed_by = models.ForeignKey(
|
||||
settings.AUTH_USER_MODEL,
|
||||
on_delete=models.SET_NULL,
|
||||
null=True,
|
||||
blank=True,
|
||||
related_name='entity_versions',
|
||||
help_text="User who made the change"
|
||||
)
|
||||
|
||||
# Link to ContentSubmission (if change came from moderation)
|
||||
submission = models.ForeignKey(
|
||||
'moderation.ContentSubmission',
|
||||
on_delete=models.SET_NULL,
|
||||
null=True,
|
||||
blank=True,
|
||||
related_name='versions',
|
||||
help_text="Submission that caused this version (if applicable)"
|
||||
)
|
||||
|
||||
# Metadata
|
||||
comment = models.TextField(
|
||||
blank=True,
|
||||
help_text="Optional comment about this version"
|
||||
)
|
||||
ip_address = models.GenericIPAddressField(
|
||||
null=True,
|
||||
blank=True,
|
||||
help_text="IP address of change origin"
|
||||
)
|
||||
user_agent = models.CharField(
|
||||
max_length=500,
|
||||
blank=True,
|
||||
help_text="User agent string"
|
||||
)
|
||||
|
||||
class Meta:
|
||||
verbose_name = 'Entity Version'
|
||||
verbose_name_plural = 'Entity Versions'
|
||||
ordering = ['-created']
|
||||
indexes = [
|
||||
models.Index(fields=['entity_type', 'entity_id', '-created']),
|
||||
models.Index(fields=['entity_type', 'entity_id', '-version_number']),
|
||||
models.Index(fields=['change_type']),
|
||||
models.Index(fields=['changed_by']),
|
||||
models.Index(fields=['submission']),
|
||||
]
|
||||
unique_together = [['entity_type', 'entity_id', 'version_number']]
|
||||
|
||||
def __str__(self):
|
||||
return f"{self.entity_type.model} v{self.version_number} ({self.change_type})"
|
||||
|
||||
@property
|
||||
def entity_name(self):
|
||||
"""Get display name of the entity."""
|
||||
try:
|
||||
entity = self.entity
|
||||
if entity:
|
||||
return str(entity)
|
||||
except:
|
||||
pass
|
||||
return f"{self.entity_type.model}:{self.entity_id}"
|
||||
|
||||
def get_snapshot_dict(self):
|
||||
"""
|
||||
Get snapshot as Python dict.
|
||||
|
||||
Returns:
|
||||
dict: Entity snapshot
|
||||
"""
|
||||
if isinstance(self.snapshot, str):
|
||||
return json.loads(self.snapshot)
|
||||
return self.snapshot
|
||||
|
||||
def get_changed_fields_list(self):
|
||||
"""
|
||||
Get list of changed field names.
|
||||
|
||||
Returns:
|
||||
list: Field names that changed
|
||||
"""
|
||||
return list(self.changed_fields.keys())
|
||||
|
||||
def get_field_change(self, field_name):
|
||||
"""
|
||||
Get old and new values for a specific field.
|
||||
|
||||
Args:
|
||||
field_name: Name of the field
|
||||
|
||||
Returns:
|
||||
dict: {'old': old_value, 'new': new_value} or None if field didn't change
|
||||
"""
|
||||
return self.changed_fields.get(field_name)
|
||||
|
||||
def compare_with(self, other_version):
|
||||
"""
|
||||
Compare this version with another version.
|
||||
|
||||
Args:
|
||||
other_version: EntityVersion to compare with
|
||||
|
||||
Returns:
|
||||
dict: Comparison result with differences
|
||||
"""
|
||||
if not other_version or self.entity_id != other_version.entity_id:
|
||||
return None
|
||||
|
||||
this_snapshot = self.get_snapshot_dict()
|
||||
other_snapshot = other_version.get_snapshot_dict()
|
||||
|
||||
differences = {}
|
||||
all_keys = set(this_snapshot.keys()) | set(other_snapshot.keys())
|
||||
|
||||
for key in all_keys:
|
||||
this_val = this_snapshot.get(key)
|
||||
other_val = other_snapshot.get(key)
|
||||
|
||||
if this_val != other_val:
|
||||
differences[key] = {
|
||||
'this': this_val,
|
||||
'other': other_val
|
||||
}
|
||||
|
||||
return {
|
||||
'this_version': self.version_number,
|
||||
'other_version': other_version.version_number,
|
||||
'differences': differences,
|
||||
'changed_field_count': len(differences)
|
||||
}
|
||||
|
||||
def get_diff_summary(self):
|
||||
"""
|
||||
Get human-readable summary of changes in this version.
|
||||
|
||||
Returns:
|
||||
str: Summary of changes
|
||||
"""
|
||||
if self.change_type == 'created':
|
||||
return f"Created {self.entity_name}"
|
||||
|
||||
if self.change_type == 'deleted':
|
||||
return f"Deleted {self.entity_name}"
|
||||
|
||||
changed_count = len(self.changed_fields)
|
||||
if changed_count == 0:
|
||||
return f"No changes to {self.entity_name}"
|
||||
|
||||
field_names = ', '.join(self.get_changed_fields_list()[:3])
|
||||
if changed_count > 3:
|
||||
field_names += f" and {changed_count - 3} more"
|
||||
|
||||
return f"Updated {field_names}"
|
||||
|
||||
@classmethod
|
||||
def get_latest_version_number(cls, entity_type, entity_id):
|
||||
"""
|
||||
Get the latest version number for an entity.
|
||||
|
||||
Args:
|
||||
entity_type: ContentType of entity
|
||||
entity_id: UUID of entity
|
||||
|
||||
Returns:
|
||||
int: Latest version number (0 if no versions exist)
|
||||
"""
|
||||
latest = cls.objects.filter(
|
||||
entity_type=entity_type,
|
||||
entity_id=entity_id
|
||||
).aggregate(
|
||||
max_version=models.Max('version_number')
|
||||
)
|
||||
return latest['max_version'] or 0
|
||||
|
||||
@classmethod
|
||||
def get_history(cls, entity_type, entity_id, limit=50):
|
||||
"""
|
||||
Get version history for an entity.
|
||||
|
||||
Args:
|
||||
entity_type: ContentType of entity
|
||||
entity_id: UUID of entity
|
||||
limit: Maximum number of versions to return
|
||||
|
||||
Returns:
|
||||
QuerySet: Ordered list of versions (newest first)
|
||||
"""
|
||||
return cls.objects.filter(
|
||||
entity_type=entity_type,
|
||||
entity_id=entity_id
|
||||
).select_related(
|
||||
'changed_by',
|
||||
'submission',
|
||||
'submission__user'
|
||||
).order_by('-version_number')[:limit]
|
||||
|
||||
@classmethod
|
||||
def get_version_by_number(cls, entity_type, entity_id, version_number):
|
||||
"""
|
||||
Get a specific version by number.
|
||||
|
||||
Args:
|
||||
entity_type: ContentType of entity
|
||||
entity_id: UUID of entity
|
||||
version_number: Version number to retrieve
|
||||
|
||||
Returns:
|
||||
EntityVersion or None
|
||||
"""
|
||||
try:
|
||||
return cls.objects.get(
|
||||
entity_type=entity_type,
|
||||
entity_id=entity_id,
|
||||
version_number=version_number
|
||||
)
|
||||
except cls.DoesNotExist:
|
||||
return None
|
||||
|
||||
473
django/apps/versioning/services.py
Normal file
473
django/apps/versioning/services.py
Normal file
@@ -0,0 +1,473 @@
|
||||
"""
|
||||
Versioning services for ThrillWiki.
|
||||
|
||||
This module provides the business logic for creating and managing entity versions:
|
||||
- Creating versions automatically via lifecycle hooks
|
||||
- Generating snapshots and tracking changed fields
|
||||
- Linking versions to content submissions
|
||||
- Retrieving version history and diffs
|
||||
- Restoring previous versions
|
||||
"""
|
||||
|
||||
import json
|
||||
from decimal import Decimal
|
||||
from datetime import date, datetime
|
||||
from django.db import models, transaction
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from django.core.serializers.json import DjangoJSONEncoder
|
||||
from django.core.exceptions import ValidationError
|
||||
|
||||
from apps.versioning.models import EntityVersion
|
||||
|
||||
|
||||
class VersionService:
|
||||
"""
|
||||
Service class for versioning operations.
|
||||
|
||||
All methods handle automatic version creation and tracking.
|
||||
"""
|
||||
|
||||
@staticmethod
|
||||
@transaction.atomic
|
||||
def create_version(
|
||||
entity,
|
||||
change_type='updated',
|
||||
changed_fields=None,
|
||||
user=None,
|
||||
submission=None,
|
||||
comment='',
|
||||
ip_address=None,
|
||||
user_agent=''
|
||||
):
|
||||
"""
|
||||
Create a version record for an entity.
|
||||
|
||||
This is called automatically by the VersionedModel lifecycle hooks,
|
||||
but can also be called manually when needed.
|
||||
|
||||
Args:
|
||||
entity: Entity instance (Park, Ride, Company, etc.)
|
||||
change_type: Type of change ('created', 'updated', 'deleted', 'restored')
|
||||
changed_fields: Dict of dirty fields from DirtyFieldsMixin
|
||||
user: User who made the change (optional)
|
||||
submission: ContentSubmission that caused this change (optional)
|
||||
comment: Optional comment about the change
|
||||
ip_address: IP address of the change origin
|
||||
user_agent: User agent string
|
||||
|
||||
Returns:
|
||||
EntityVersion instance
|
||||
"""
|
||||
# Get ContentType for entity
|
||||
entity_type = ContentType.objects.get_for_model(entity)
|
||||
|
||||
# Get next version number
|
||||
version_number = EntityVersion.get_latest_version_number(
|
||||
entity_type, entity.id
|
||||
) + 1
|
||||
|
||||
# Create snapshot of current entity state
|
||||
snapshot = VersionService._create_snapshot(entity)
|
||||
|
||||
# Build changed_fields dict with old/new values
|
||||
changed_fields_data = {}
|
||||
if changed_fields and change_type == 'updated':
|
||||
changed_fields_data = VersionService._build_changed_fields(
|
||||
entity, changed_fields
|
||||
)
|
||||
|
||||
# Try to get user from submission if not provided
|
||||
if not user and submission:
|
||||
user = submission.user
|
||||
|
||||
# Create version record
|
||||
version = EntityVersion.objects.create(
|
||||
entity_type=entity_type,
|
||||
entity_id=entity.id,
|
||||
version_number=version_number,
|
||||
change_type=change_type,
|
||||
snapshot=snapshot,
|
||||
changed_fields=changed_fields_data,
|
||||
changed_by=user,
|
||||
submission=submission,
|
||||
comment=comment,
|
||||
ip_address=ip_address,
|
||||
user_agent=user_agent
|
||||
)
|
||||
|
||||
return version
|
||||
|
||||
@staticmethod
|
||||
def _create_snapshot(entity):
|
||||
"""
|
||||
Create a JSON snapshot of the entity's current state.
|
||||
|
||||
Args:
|
||||
entity: Entity instance
|
||||
|
||||
Returns:
|
||||
dict: Serializable snapshot of entity
|
||||
"""
|
||||
snapshot = {}
|
||||
|
||||
# Get all model fields
|
||||
for field in entity._meta.get_fields():
|
||||
# Skip reverse relations
|
||||
if field.is_relation and field.many_to_one is False and field.one_to_many is True:
|
||||
continue
|
||||
if field.is_relation and field.many_to_many is True:
|
||||
continue
|
||||
|
||||
field_name = field.name
|
||||
|
||||
try:
|
||||
value = getattr(entity, field_name)
|
||||
|
||||
# Handle different field types
|
||||
if value is None:
|
||||
snapshot[field_name] = None
|
||||
elif isinstance(value, (str, int, float, bool)):
|
||||
snapshot[field_name] = value
|
||||
elif isinstance(value, Decimal):
|
||||
snapshot[field_name] = float(value)
|
||||
elif isinstance(value, (date, datetime)):
|
||||
snapshot[field_name] = value.isoformat()
|
||||
elif isinstance(value, models.Model):
|
||||
# Store FK as ID
|
||||
snapshot[field_name] = str(value.id) if value.id else None
|
||||
elif isinstance(value, dict):
|
||||
# JSONField
|
||||
snapshot[field_name] = value
|
||||
elif isinstance(value, list):
|
||||
# JSONField array
|
||||
snapshot[field_name] = value
|
||||
else:
|
||||
# Try to serialize as string
|
||||
snapshot[field_name] = str(value)
|
||||
except Exception:
|
||||
# Skip fields that can't be serialized
|
||||
continue
|
||||
|
||||
return snapshot
|
||||
|
||||
@staticmethod
|
||||
def _build_changed_fields(entity, dirty_fields):
|
||||
"""
|
||||
Build a dict of changed fields with old and new values.
|
||||
|
||||
Args:
|
||||
entity: Entity instance
|
||||
dirty_fields: Dict from DirtyFieldsMixin.get_dirty_fields()
|
||||
|
||||
Returns:
|
||||
dict: Changed fields with old/new values
|
||||
"""
|
||||
changed = {}
|
||||
|
||||
for field_name, old_value in dirty_fields.items():
|
||||
try:
|
||||
new_value = getattr(entity, field_name)
|
||||
|
||||
# Normalize values for JSON
|
||||
old_normalized = VersionService._normalize_value(old_value)
|
||||
new_normalized = VersionService._normalize_value(new_value)
|
||||
|
||||
changed[field_name] = {
|
||||
'old': old_normalized,
|
||||
'new': new_normalized
|
||||
}
|
||||
except Exception:
|
||||
continue
|
||||
|
||||
return changed
|
||||
|
||||
@staticmethod
|
||||
def _normalize_value(value):
|
||||
"""
|
||||
Normalize a value for JSON serialization.
|
||||
|
||||
Args:
|
||||
value: Value to normalize
|
||||
|
||||
Returns:
|
||||
Normalized value
|
||||
"""
|
||||
if value is None:
|
||||
return None
|
||||
elif isinstance(value, (str, int, float, bool)):
|
||||
return value
|
||||
elif isinstance(value, Decimal):
|
||||
return float(value)
|
||||
elif isinstance(value, (date, datetime)):
|
||||
return value.isoformat()
|
||||
elif isinstance(value, models.Model):
|
||||
return str(value.id) if value.id else None
|
||||
elif isinstance(value, (dict, list)):
|
||||
return value
|
||||
else:
|
||||
return str(value)
|
||||
|
||||
@staticmethod
|
||||
def get_version_history(entity, limit=50):
|
||||
"""
|
||||
Get version history for an entity.
|
||||
|
||||
Args:
|
||||
entity: Entity instance
|
||||
limit: Maximum number of versions to return
|
||||
|
||||
Returns:
|
||||
QuerySet: Ordered list of versions (newest first)
|
||||
"""
|
||||
entity_type = ContentType.objects.get_for_model(entity)
|
||||
return EntityVersion.get_history(entity_type, entity.id, limit)
|
||||
|
||||
@staticmethod
|
||||
def get_version_by_number(entity, version_number):
|
||||
"""
|
||||
Get a specific version by number.
|
||||
|
||||
Args:
|
||||
entity: Entity instance
|
||||
version_number: Version number to retrieve
|
||||
|
||||
Returns:
|
||||
EntityVersion or None
|
||||
"""
|
||||
entity_type = ContentType.objects.get_for_model(entity)
|
||||
return EntityVersion.get_version_by_number(entity_type, entity.id, version_number)
|
||||
|
||||
@staticmethod
|
||||
def get_latest_version(entity):
|
||||
"""
|
||||
Get the latest version for an entity.
|
||||
|
||||
Args:
|
||||
entity: Entity instance
|
||||
|
||||
Returns:
|
||||
EntityVersion or None
|
||||
"""
|
||||
entity_type = ContentType.objects.get_for_model(entity)
|
||||
return EntityVersion.objects.filter(
|
||||
entity_type=entity_type,
|
||||
entity_id=entity.id
|
||||
).order_by('-version_number').first()
|
||||
|
||||
@staticmethod
|
||||
def compare_versions(version1, version2):
|
||||
"""
|
||||
Compare two versions of the same entity.
|
||||
|
||||
Args:
|
||||
version1: First EntityVersion
|
||||
version2: Second EntityVersion
|
||||
|
||||
Returns:
|
||||
dict: Comparison result with differences
|
||||
"""
|
||||
if version1.entity_id != version2.entity_id:
|
||||
raise ValidationError("Versions must be for the same entity")
|
||||
|
||||
return version1.compare_with(version2)
|
||||
|
||||
@staticmethod
|
||||
def get_diff_with_current(version):
|
||||
"""
|
||||
Compare a version with the current entity state.
|
||||
|
||||
Args:
|
||||
version: EntityVersion to compare
|
||||
|
||||
Returns:
|
||||
dict: Differences between version and current state
|
||||
"""
|
||||
entity = version.entity
|
||||
if not entity:
|
||||
raise ValidationError("Entity no longer exists")
|
||||
|
||||
current_snapshot = VersionService._create_snapshot(entity)
|
||||
version_snapshot = version.get_snapshot_dict()
|
||||
|
||||
differences = {}
|
||||
all_keys = set(current_snapshot.keys()) | set(version_snapshot.keys())
|
||||
|
||||
for key in all_keys:
|
||||
current_val = current_snapshot.get(key)
|
||||
version_val = version_snapshot.get(key)
|
||||
|
||||
if current_val != version_val:
|
||||
differences[key] = {
|
||||
'current': current_val,
|
||||
'version': version_val
|
||||
}
|
||||
|
||||
return {
|
||||
'version_number': version.version_number,
|
||||
'differences': differences,
|
||||
'changed_field_count': len(differences)
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
@transaction.atomic
|
||||
def restore_version(version, user=None, comment=''):
|
||||
"""
|
||||
Restore an entity to a previous version.
|
||||
|
||||
This creates a new version with change_type='restored'.
|
||||
|
||||
Args:
|
||||
version: EntityVersion to restore
|
||||
user: User performing the restore
|
||||
comment: Optional comment about the restore
|
||||
|
||||
Returns:
|
||||
EntityVersion: New version created by restore
|
||||
|
||||
Raises:
|
||||
ValidationError: If entity doesn't exist
|
||||
"""
|
||||
entity = version.entity
|
||||
if not entity:
|
||||
raise ValidationError("Entity no longer exists")
|
||||
|
||||
# Get snapshot to restore
|
||||
snapshot = version.get_snapshot_dict()
|
||||
|
||||
# Track which fields are changing
|
||||
changed_fields = {}
|
||||
|
||||
# Apply snapshot values to entity
|
||||
for field_name, value in snapshot.items():
|
||||
# Skip metadata fields
|
||||
if field_name in ['id', 'created', 'modified']:
|
||||
continue
|
||||
|
||||
try:
|
||||
# Get current value
|
||||
current_value = getattr(entity, field_name, None)
|
||||
current_normalized = VersionService._normalize_value(current_value)
|
||||
|
||||
# Check if value is different
|
||||
if current_normalized != value:
|
||||
changed_fields[field_name] = {
|
||||
'old': current_normalized,
|
||||
'new': value
|
||||
}
|
||||
|
||||
# Apply restored value
|
||||
# Handle special field types
|
||||
field = entity._meta.get_field(field_name)
|
||||
|
||||
if isinstance(field, models.ForeignKey):
|
||||
# FK fields need model instance
|
||||
if value:
|
||||
related_model = field.related_model
|
||||
try:
|
||||
related_obj = related_model.objects.get(id=value)
|
||||
setattr(entity, field_name, related_obj)
|
||||
except:
|
||||
pass
|
||||
else:
|
||||
setattr(entity, field_name, None)
|
||||
elif isinstance(field, models.DateField):
|
||||
# Date fields
|
||||
if value:
|
||||
setattr(entity, field_name, datetime.fromisoformat(value).date())
|
||||
else:
|
||||
setattr(entity, field_name, None)
|
||||
elif isinstance(field, models.DateTimeField):
|
||||
# DateTime fields
|
||||
if value:
|
||||
setattr(entity, field_name, datetime.fromisoformat(value))
|
||||
else:
|
||||
setattr(entity, field_name, None)
|
||||
elif isinstance(field, models.DecimalField):
|
||||
# Decimal fields
|
||||
if value is not None:
|
||||
setattr(entity, field_name, Decimal(str(value)))
|
||||
else:
|
||||
setattr(entity, field_name, None)
|
||||
else:
|
||||
# Regular fields
|
||||
setattr(entity, field_name, value)
|
||||
except Exception:
|
||||
# Skip fields that can't be restored
|
||||
continue
|
||||
|
||||
# Save entity (this will trigger lifecycle hooks)
|
||||
# But we need to create the version manually to mark it as 'restored'
|
||||
entity.save()
|
||||
|
||||
# Create restore version
|
||||
entity_type = ContentType.objects.get_for_model(entity)
|
||||
version_number = EntityVersion.get_latest_version_number(
|
||||
entity_type, entity.id
|
||||
) + 1
|
||||
|
||||
restored_version = EntityVersion.objects.create(
|
||||
entity_type=entity_type,
|
||||
entity_id=entity.id,
|
||||
version_number=version_number,
|
||||
change_type='restored',
|
||||
snapshot=VersionService._create_snapshot(entity),
|
||||
changed_fields=changed_fields,
|
||||
changed_by=user,
|
||||
comment=f"Restored from version {version.version_number}. {comment}".strip()
|
||||
)
|
||||
|
||||
return restored_version
|
||||
|
||||
@staticmethod
|
||||
def get_version_count(entity):
|
||||
"""
|
||||
Get total number of versions for an entity.
|
||||
|
||||
Args:
|
||||
entity: Entity instance
|
||||
|
||||
Returns:
|
||||
int: Number of versions
|
||||
"""
|
||||
entity_type = ContentType.objects.get_for_model(entity)
|
||||
return EntityVersion.objects.filter(
|
||||
entity_type=entity_type,
|
||||
entity_id=entity.id
|
||||
).count()
|
||||
|
||||
@staticmethod
|
||||
def get_versions_by_user(user, limit=50):
|
||||
"""
|
||||
Get versions created by a specific user.
|
||||
|
||||
Args:
|
||||
user: User instance
|
||||
limit: Maximum number of versions to return
|
||||
|
||||
Returns:
|
||||
QuerySet: Versions by user (newest first)
|
||||
"""
|
||||
return EntityVersion.objects.filter(
|
||||
changed_by=user
|
||||
).select_related(
|
||||
'entity_type',
|
||||
'submission'
|
||||
).order_by('-created')[:limit]
|
||||
|
||||
@staticmethod
|
||||
def get_versions_by_submission(submission):
|
||||
"""
|
||||
Get all versions created by a content submission.
|
||||
|
||||
Args:
|
||||
submission: ContentSubmission instance
|
||||
|
||||
Returns:
|
||||
QuerySet: Versions from submission
|
||||
"""
|
||||
return EntityVersion.objects.filter(
|
||||
submission=submission
|
||||
).select_related(
|
||||
'entity_type',
|
||||
'changed_by'
|
||||
).order_by('-created')
|
||||
Reference in New Issue
Block a user