Files
thrillwiki_django_no_react/memory-bank/documentation/Performance.md

8.6 KiB

Performance Documentation

Performance Architecture

Caching Strategy

Cache Layers

CACHES = {
    'default': {
        'BACKEND': 'django.core.cache.backends.redis.RedisCache',
        'LOCATION': 'redis://127.0.0.1:6379/1',
        'OPTIONS': {
            'CLIENT_CLASS': 'django_redis.client.DefaultClient',
            'PARSER_CLASS': 'redis.connection.HiredisParser',
            'CONNECTION_POOL_CLASS': 'redis.BlockingConnectionPool',
            'CONNECTION_POOL_CLASS_KWARGS': {
                'max_connections': 50,
                'timeout': 20,
            }
        }
    }
}

Cache Patterns

# View caching
@method_decorator(cache_page(60 * 15))
def park_list(request):
    parks = Park.objects.all()
    return render(request, 'parks/list.html', {'parks': parks})

# Template fragment caching
{% load cache %}
{% cache 300 park_detail park.id %}
    ... expensive template logic ...
{% endcache %}

# Low-level cache API
def get_park_stats(park_id):
    cache_key = f'park_stats:{park_id}'
    stats = cache.get(cache_key)
    if stats is None:
        stats = calculate_park_stats(park_id)
        cache.set(cache_key, stats, timeout=3600)
    return stats

Database Optimization

Query Optimization

# Efficient querying patterns
class ParkQuerySet(models.QuerySet):
    def with_stats(self):
        return self.annotate(
            ride_count=Count('rides'),
            avg_rating=Avg('reviews__rating')
        ).select_related('owner')\
         .prefetch_related('rides', 'areas')

# Indexes
class Park(models.Model):
    class Meta:
        indexes = [
            models.Index(fields=['slug']),
            models.Index(fields=['status', 'created_at']),
            models.Index(fields=['location_id', 'status'])
        ]

Database Configuration

DATABASES = {
    'default': {
        'ENGINE': 'django.db.backends.postgresql',
        'NAME': 'thrillwiki',
        'CONN_MAX_AGE': 60,
        'OPTIONS': {
            'statement_timeout': 3000,
            'idle_in_transaction_timeout': 3000,
        },
        'ATOMIC_REQUESTS': False,
        'CONN_HEALTH_CHECKS': True,
    }
}

Asset Optimization

Static File Handling

# WhiteNoise configuration
STATICFILES_STORAGE = 'whitenoise.storage.CompressedManifestStaticFilesStorage'

WHITENOISE_OPTIONS = {
    'allow_all_origins': False,
    'max_age': 31536000,  # 1 year
    'compression_enabled': True,
}

Media Optimization

from PIL import Image

def optimize_image(image_path):
    with Image.open(image_path) as img:
        # Convert to WebP
        webp_path = f"{os.path.splitext(image_path)[0]}.webp"
        img.save(webp_path, 'WebP', quality=85, method=6)
        
        # Create thumbnails
        sizes = [(800, 600), (400, 300)]
        for size in sizes:
            thumb = img.copy()
            thumb.thumbnail(size)
            thumb_path = f"{os.path.splitext(image_path)[0]}_{size[0]}x{size[1]}.webp"
            thumb.save(thumb_path, 'WebP', quality=85, method=6)

Performance Monitoring

Application Monitoring

APM Configuration

MIDDLEWARE = [
    'django_prometheus.middleware.PrometheusBeforeMiddleware',
    # ... other middleware ...
    'django_prometheus.middleware.PrometheusAfterMiddleware',
]

PROMETHEUS_METRICS = {
    'scrape_interval': 15,
    'namespace': 'thrillwiki',
    'metrics_path': '/metrics',
}

Custom Metrics

from prometheus_client import Counter, Histogram

# Request metrics
http_requests_total = Counter(
    'http_requests_total',
    'Total HTTP requests',
    ['method', 'endpoint', 'status']
)

# Response time metrics
response_time = Histogram(
    'response_time_seconds',
    'Response time in seconds',
    ['endpoint']
)

Performance Logging

Logging Configuration

LOGGING = {
    'handlers': {
        'performance': {
            'level': 'INFO',
            'class': 'logging.handlers.TimedRotatingFileHandler',
            'filename': 'logs/performance.log',
            'when': 'midnight',
            'interval': 1,
            'backupCount': 30,
        }
    },
    'loggers': {
        'performance': {
            'handlers': ['performance'],
            'level': 'INFO',
            'propagate': False,
        }
    }
}

Performance Logging Middleware

class PerformanceMiddleware:
    def __init__(self, get_response):
        self.get_response = get_response
        self.logger = logging.getLogger('performance')
    
    def __call__(self, request):
        start_time = time.time()
        response = self.get_response(request)
        duration = time.time() - start_time
        
        self.logger.info({
            'path': request.path,
            'method': request.method,
            'duration': duration,
            'status': response.status_code
        })
        
        return response

Scaling Strategy

Application Scaling

Asynchronous Tasks

# Celery configuration
CELERY_BROKER_URL = 'redis://localhost:6379/2'
CELERY_RESULT_BACKEND = 'redis://localhost:6379/3'

CELERY_TASK_ROUTES = {
    'media.tasks.process_image': {'queue': 'media'},
    'analytics.tasks.update_stats': {'queue': 'analytics'},
}

# Task definition
@shared_task(rate_limit='100/m')
def process_image(image_id):
    image = Image.objects.get(id=image_id)
    optimize_image(image.file.path)
    create_thumbnails(image)

Load Balancing

# Nginx configuration
upstream thrillwiki {
    least_conn;  # Least connections algorithm
    server backend1.thrillwiki.com:8000;
    server backend2.thrillwiki.com:8000;
    server backend3.thrillwiki.com:8000;
    
    keepalive 32;
}

server {
    listen 80;
    server_name thrillwiki.com;
    
    location / {
        proxy_pass http://thrillwiki;
        proxy_http_version 1.1;
        proxy_set_header Connection "";
    }
}

Database Scaling

Read Replicas

DATABASES = {
    'default': {
        'ENGINE': 'django.db.backends.postgresql',
        'NAME': 'thrillwiki',
        # Primary DB configuration
    },
    'replica1': {
        'ENGINE': 'django.db.backends.postgresql',
        'NAME': 'thrillwiki',
        # Read replica configuration
    }
}

DATABASE_ROUTERS = ['core.db.PrimaryReplicaRouter']

Connection Pooling

# Django DB configuration with PgBouncer
DATABASES = {
    'default': {
        'ENGINE': 'django.db.backends.postgresql',
        'OPTIONS': {
            'application_name': 'thrillwiki',
            'max_prepared_transactions': 0,
        },
        'POOL_OPTIONS': {
            'POOL_SIZE': 20,
            'MAX_OVERFLOW': 10,
            'RECYCLE': 300,
        }
    }
}

Caching Strategy

Multi-layer Caching

# Cache configuration with fallback
CACHES = {
    'default': {
        'BACKEND': 'django_redis.cache.RedisCache',
        'LOCATION': 'redis://primary:6379/1',
        'OPTIONS': {
            'CLIENT_CLASS': 'django_redis.client.DefaultClient',
            'MASTER_CACHE': True,
        }
    },
    'replica': {
        'BACKEND': 'django_redis.cache.RedisCache',
        'LOCATION': 'redis://replica:6379/1',
        'OPTIONS': {
            'CLIENT_CLASS': 'django_redis.client.DefaultClient',
        }
    }
}

Cache Invalidation

class CacheInvalidationMixin:
    def save(self, *args, **kwargs):
        # Invalidate related caches
        cache_keys = self.get_cache_keys()
        cache.delete_many(cache_keys)
        super().save(*args, **kwargs)
    
    def get_cache_keys(self):
        # Return list of related cache keys
        return [
            f'park:{self.pk}',
            f'park_stats:{self.pk}',
            'park_list'
        ]

Performance Bottlenecks

Known Issues

  1. N+1 Query Patterns
# Bad pattern
for park in Park.objects.all():
    print(park.rides.count())  # Causes N+1 queries

# Solution
parks = Park.objects.annotate(
    ride_count=Count('rides')
).all()
  1. Memory Leaks
# Memory leak in long-running tasks
class LongRunningTask:
    def __init__(self):
        self.cache = {}
    
    def process(self, items):
        # Clear cache periodically
        if len(self.cache) > 1000:
            self.cache.clear()

Performance Tips

  1. Query Optimization
# Use exists() for checking existence
if Park.objects.filter(slug=slug).exists():
    # Do something

# Use values() for simple data
parks = Park.objects.values('id', 'name')
  1. Bulk Operations
# Use bulk create
Park.objects.bulk_create([
    Park(name='Park 1'),
    Park(name='Park 2')
])

# Use bulk update
Park.objects.filter(status='CLOSED').update(
    status='OPERATING'
)