feat: Implement initial schema and add various API, service, and management command enhancements across the application.

This commit is contained in:
pacnpal
2026-01-01 15:13:01 -05:00
parent c95f99ca10
commit b243b17af7
413 changed files with 11164 additions and 17433 deletions

View File

@@ -42,16 +42,12 @@ DEBUG = config("DEBUG", default=True, cast=bool)
# Allowed hosts (comma-separated in .env)
ALLOWED_HOSTS = config(
"ALLOWED_HOSTS",
default="localhost,127.0.0.1",
cast=lambda v: [s.strip() for s in v.split(",") if s.strip()]
"ALLOWED_HOSTS", default="localhost,127.0.0.1", cast=lambda v: [s.strip() for s in v.split(",") if s.strip()]
)
# CSRF trusted origins (comma-separated in .env)
CSRF_TRUSTED_ORIGINS = config(
"CSRF_TRUSTED_ORIGINS",
default="",
cast=lambda v: [s.strip() for s in v.split(",") if s.strip()]
"CSRF_TRUSTED_ORIGINS", default="", cast=lambda v: [s.strip() for s in v.split(",") if s.strip()]
)
# =============================================================================

View File

@@ -149,10 +149,7 @@ LOGGING = {
},
"json": {
"()": "pythonjsonlogger.jsonlogger.JsonFormatter",
"format": (
"%(levelname)s %(asctime)s %(module)s %(process)d "
"%(thread)d %(message)s"
),
"format": ("%(levelname)s %(asctime)s %(module)s %(process)d " "%(thread)d %(message)s"),
},
},
"handlers": {

View File

@@ -20,16 +20,10 @@ from .base import * # noqa: F401,F403
DEBUG = False
# Allowed hosts must be explicitly set in production
ALLOWED_HOSTS = config(
"ALLOWED_HOSTS",
cast=lambda v: [s.strip() for s in v.split(",") if s.strip()]
)
ALLOWED_HOSTS = config("ALLOWED_HOSTS", cast=lambda v: [s.strip() for s in v.split(",") if s.strip()])
# CSRF trusted origins for production
CSRF_TRUSTED_ORIGINS = config(
"CSRF_TRUSTED_ORIGINS",
cast=lambda v: [s.strip() for s in v.split(",") if s.strip()]
)
CSRF_TRUSTED_ORIGINS = config("CSRF_TRUSTED_ORIGINS", cast=lambda v: [s.strip() for s in v.split(",") if s.strip()])
# =============================================================================
# Security Settings for Production
@@ -75,9 +69,7 @@ if redis_url:
"PARSER_CLASS": "redis.connection.HiredisParser",
"CONNECTION_POOL_CLASS": "redis.BlockingConnectionPool",
"CONNECTION_POOL_CLASS_KWARGS": {
"max_connections": config(
"REDIS_MAX_CONNECTIONS", default=100, cast=int
),
"max_connections": config("REDIS_MAX_CONNECTIONS", default=100, cast=int),
"timeout": 20,
"socket_keepalive": True,
"retry_on_timeout": True,
@@ -119,9 +111,7 @@ if redis_url:
STATICFILES_STORAGE = "whitenoise.storage.CompressedManifestStaticFilesStorage"
# Update STORAGES for Django 4.2+
STORAGES["staticfiles"]["BACKEND"] = ( # noqa: F405
"whitenoise.storage.CompressedManifestStaticFilesStorage"
)
STORAGES["staticfiles"]["BACKEND"] = "whitenoise.storage.CompressedManifestStaticFilesStorage" # noqa: F405
# =============================================================================
# Production REST Framework Settings
@@ -148,8 +138,7 @@ LOGGING = {
"json": {
"()": "pythonjsonlogger.jsonlogger.JsonFormatter",
"format": (
"%(levelname)s %(asctime)s %(module)s %(process)d "
"%(thread)d %(message)s %(pathname)s %(lineno)d"
"%(levelname)s %(asctime)s %(module)s %(process)d " "%(thread)d %(message)s %(pathname)s %(lineno)d"
),
},
"simple": {
@@ -257,9 +246,7 @@ if SENTRY_DSN:
RedisIntegration(),
],
environment=config("SENTRY_ENVIRONMENT", default="production"),
traces_sample_rate=config(
"SENTRY_TRACES_SAMPLE_RATE", default=0.1, cast=float
),
traces_sample_rate=config("SENTRY_TRACES_SAMPLE_RATE", default=0.1, cast=float),
send_default_pii=False, # Don't send PII to Sentry
attach_stacktrace=True,
)

View File

@@ -46,15 +46,13 @@ CACHES = {
# Connection pooling for better performance
"CONNECTION_POOL_CLASS": "redis.BlockingConnectionPool",
"CONNECTION_POOL_CLASS_KWARGS": {
"max_connections": config(
"REDIS_MAX_CONNECTIONS", default=100, cast=int
),
"max_connections": config("REDIS_MAX_CONNECTIONS", default=100, cast=int),
"timeout": config("REDIS_CONNECTION_TIMEOUT", default=20, cast=int),
"socket_keepalive": True,
"socket_keepalive_options": {
1: 1, # TCP_KEEPIDLE: Start keepalive after 1s idle
2: 1, # TCP_KEEPINTVL: Send probes every 1s
3: 3, # TCP_KEEPCNT: Close after 3 failed probes
1: 1, # TCP_KEEPIDLE: Start keepalive after 1s idle
2: 1, # TCP_KEEPINTVL: Send probes every 1s
3: 3, # TCP_KEEPCNT: Close after 3 failed probes
},
"retry_on_timeout": True,
"health_check_interval": 30,
@@ -62,14 +60,11 @@ CACHES = {
# Compress cached data to save memory
"COMPRESSOR": "django_redis.compressors.zlib.ZlibCompressor",
# Graceful degradation if Redis is unavailable
"IGNORE_EXCEPTIONS": config(
"REDIS_IGNORE_EXCEPTIONS", default=True, cast=bool
),
"IGNORE_EXCEPTIONS": config("REDIS_IGNORE_EXCEPTIONS", default=True, cast=bool),
},
"KEY_PREFIX": config("CACHE_KEY_PREFIX", default="thrillwiki"),
"VERSION": 1,
},
# Session cache - separate for security isolation
# Uses a different Redis database (db 2)
"sessions": {
@@ -80,16 +75,13 @@ CACHES = {
"PARSER_CLASS": "redis.connection.HiredisParser",
"CONNECTION_POOL_CLASS": "redis.BlockingConnectionPool",
"CONNECTION_POOL_CLASS_KWARGS": {
"max_connections": config(
"REDIS_SESSIONS_MAX_CONNECTIONS", default=50, cast=int
),
"max_connections": config("REDIS_SESSIONS_MAX_CONNECTIONS", default=50, cast=int),
"timeout": 10,
"socket_keepalive": True,
},
},
"KEY_PREFIX": "sessions",
},
# API cache - high concurrency for API responses
# Uses a different Redis database (db 3)
"api": {
@@ -100,9 +92,7 @@ CACHES = {
"PARSER_CLASS": "redis.connection.HiredisParser",
"CONNECTION_POOL_CLASS": "redis.BlockingConnectionPool",
"CONNECTION_POOL_CLASS_KWARGS": {
"max_connections": config(
"REDIS_API_MAX_CONNECTIONS", default=100, cast=int
),
"max_connections": config("REDIS_API_MAX_CONNECTIONS", default=100, cast=int),
"timeout": 15,
"socket_keepalive": True,
"retry_on_timeout": True,
@@ -126,14 +116,10 @@ SESSION_CACHE_ALIAS = "sessions"
SESSION_COOKIE_AGE = config("SESSION_COOKIE_AGE", default=3600, cast=int)
# Update session on each request (sliding expiry)
SESSION_SAVE_EVERY_REQUEST = config(
"SESSION_SAVE_EVERY_REQUEST", default=True, cast=bool
)
SESSION_SAVE_EVERY_REQUEST = config("SESSION_SAVE_EVERY_REQUEST", default=True, cast=bool)
# Session persists until cookie expires (not browser close)
SESSION_EXPIRE_AT_BROWSER_CLOSE = config(
"SESSION_EXPIRE_AT_BROWSER_CLOSE", default=False, cast=bool
)
SESSION_EXPIRE_AT_BROWSER_CLOSE = config("SESSION_EXPIRE_AT_BROWSER_CLOSE", default=False, cast=bool)
# =============================================================================
# Cache Middleware Settings
@@ -141,6 +127,4 @@ SESSION_EXPIRE_AT_BROWSER_CLOSE = config(
# For Django's cache middleware (UpdateCacheMiddleware/FetchFromCacheMiddleware)
CACHE_MIDDLEWARE_SECONDS = config("CACHE_MIDDLEWARE_SECONDS", default=300, cast=int)
CACHE_MIDDLEWARE_KEY_PREFIX = config(
"CACHE_MIDDLEWARE_KEY_PREFIX", default="thrillwiki"
)
CACHE_MIDDLEWARE_KEY_PREFIX = config("CACHE_MIDDLEWARE_KEY_PREFIX", default="thrillwiki")

View File

@@ -26,10 +26,7 @@ from decouple import config
# =============================================================================
# Parse DATABASE_URL environment variable into Django database settings
DATABASE_URL = config(
"DATABASE_URL",
default="postgis://thrillwiki_user:thrillwiki@localhost:5432/thrillwiki_test_db"
)
DATABASE_URL = config("DATABASE_URL", default="postgis://thrillwiki_user:thrillwiki@localhost:5432/thrillwiki_test_db")
# Parse the database URL
db_config = dj_database_url.parse(DATABASE_URL)
@@ -84,14 +81,8 @@ if "postgis" in DATABASE_URL or "postgresql" in DATABASE_URL:
# macOS with Homebrew (default)
# Linux: /usr/lib/x86_64-linux-gnu/libgdal.so
# Docker: Usually handled by the image
GDAL_LIBRARY_PATH = config(
"GDAL_LIBRARY_PATH",
default="/opt/homebrew/lib/libgdal.dylib"
)
GEOS_LIBRARY_PATH = config(
"GEOS_LIBRARY_PATH",
default="/opt/homebrew/lib/libgeos_c.dylib"
)
GDAL_LIBRARY_PATH = config("GDAL_LIBRARY_PATH", default="/opt/homebrew/lib/libgdal.dylib")
GEOS_LIBRARY_PATH = config("GEOS_LIBRARY_PATH", default="/opt/homebrew/lib/libgeos_c.dylib")
# =============================================================================
# Read Replica Configuration (Optional)

View File

@@ -21,10 +21,7 @@ from decouple import config
# - ForwardEmail: django_forwardemail.backends.ForwardEmailBackend (production)
# - SMTP: django.core.mail.backends.smtp.EmailBackend (custom SMTP)
EMAIL_BACKEND = config(
"EMAIL_BACKEND",
default="django_forwardemail.backends.ForwardEmailBackend"
)
EMAIL_BACKEND = config("EMAIL_BACKEND", default="django_forwardemail.backends.ForwardEmailBackend")
# =============================================================================
# ForwardEmail Configuration
@@ -32,10 +29,7 @@ EMAIL_BACKEND = config(
# ForwardEmail is a privacy-focused email service that supports custom domains
# https://forwardemail.net/
FORWARD_EMAIL_BASE_URL = config(
"FORWARD_EMAIL_BASE_URL",
default="https://api.forwardemail.net"
)
FORWARD_EMAIL_BASE_URL = config("FORWARD_EMAIL_BASE_URL", default="https://api.forwardemail.net")
FORWARD_EMAIL_API_KEY = config("FORWARD_EMAIL_API_KEY", default="")
FORWARD_EMAIL_DOMAIN = config("FORWARD_EMAIL_DOMAIN", default="")
@@ -62,10 +56,7 @@ EMAIL_HOST_PASSWORD = config("EMAIL_HOST_PASSWORD", default="")
EMAIL_TIMEOUT = config("EMAIL_TIMEOUT", default=30, cast=int)
# Default from email address
DEFAULT_FROM_EMAIL = config(
"DEFAULT_FROM_EMAIL",
default="ThrillWiki <noreply@thrillwiki.com>"
)
DEFAULT_FROM_EMAIL = config("DEFAULT_FROM_EMAIL", default="ThrillWiki <noreply@thrillwiki.com>")
# =============================================================================
# Email Subject Prefix

View File

View File

@@ -46,10 +46,7 @@ LOGGING_FORMATTERS = {
# JSON format for production - machine parseable for log aggregation
"json": {
"()": "pythonjsonlogger.jsonlogger.JsonFormatter",
"format": (
"%(levelname)s %(asctime)s %(module)s %(process)d "
"%(thread)d %(message)s"
),
"format": ("%(levelname)s %(asctime)s %(module)s %(process)d " "%(thread)d %(message)s"),
},
# Simple format for console output
"simple": {

View File

@@ -82,15 +82,11 @@ REST_FRAMEWORK = {
CORS_ALLOW_CREDENTIALS = True
# Allow all origins (not recommended for production)
CORS_ALLOW_ALL_ORIGINS = config(
"CORS_ALLOW_ALL_ORIGINS", default=False, cast=bool
)
CORS_ALLOW_ALL_ORIGINS = config("CORS_ALLOW_ALL_ORIGINS", default=False, cast=bool)
# Specific allowed origins (comma-separated)
CORS_ALLOWED_ORIGINS = config(
"CORS_ALLOWED_ORIGINS",
default="",
cast=lambda v: [s.strip() for s in v.split(",") if s.strip()]
"CORS_ALLOWED_ORIGINS", default="", cast=lambda v: [s.strip() for s in v.split(",") if s.strip()]
)
# Allowed HTTP headers for CORS requests
@@ -129,33 +125,27 @@ CORS_EXPOSE_HEADERS = [
# API Rate Limiting
# =============================================================================
API_RATE_LIMIT_PER_MINUTE = config(
"API_RATE_LIMIT_PER_MINUTE", default=60, cast=int
)
API_RATE_LIMIT_PER_HOUR = config(
"API_RATE_LIMIT_PER_HOUR", default=1000, cast=int
)
API_RATE_LIMIT_PER_MINUTE = config("API_RATE_LIMIT_PER_MINUTE", default=60, cast=int)
API_RATE_LIMIT_PER_HOUR = config("API_RATE_LIMIT_PER_HOUR", default=1000, cast=int)
# =============================================================================
# SimpleJWT Settings
# =============================================================================
# JWT token configuration for authentication
# Import SECRET_KEY for signing tokens
# This will be set by base.py before this module is imported
def get_secret_key():
"""Get SECRET_KEY lazily to avoid circular imports."""
return config("SECRET_KEY")
SIMPLE_JWT = {
# Token lifetimes
# Short access tokens (15 min) provide better security
"ACCESS_TOKEN_LIFETIME": timedelta(
minutes=config("JWT_ACCESS_TOKEN_LIFETIME_MINUTES", default=15, cast=int)
),
"REFRESH_TOKEN_LIFETIME": timedelta(
days=config("JWT_REFRESH_TOKEN_LIFETIME_DAYS", default=7, cast=int)
),
"ACCESS_TOKEN_LIFETIME": timedelta(minutes=config("JWT_ACCESS_TOKEN_LIFETIME_MINUTES", default=15, cast=int)),
"REFRESH_TOKEN_LIFETIME": timedelta(days=config("JWT_REFRESH_TOKEN_LIFETIME_DAYS", default=7, cast=int)),
# Token rotation and blacklisting
# Rotate refresh tokens on each use and blacklist old ones
"ROTATE_REFRESH_TOKENS": True,
@@ -177,9 +167,7 @@ SIMPLE_JWT = {
# User identification
"USER_ID_FIELD": "id",
"USER_ID_CLAIM": "user_id",
"USER_AUTHENTICATION_RULE": (
"rest_framework_simplejwt.authentication.default_user_authentication_rule"
),
"USER_AUTHENTICATION_RULE": ("rest_framework_simplejwt.authentication.default_user_authentication_rule"),
# Token classes
"AUTH_TOKEN_CLASSES": ("rest_framework_simplejwt.tokens.AccessToken",),
"TOKEN_TYPE_CLAIM": "token_type",
@@ -211,9 +199,7 @@ REST_AUTH = {
# SameSite cookie attribute (Lax is compatible with OAuth flows)
"JWT_AUTH_SAMESITE": "Lax",
"JWT_AUTH_RETURN_EXPIRATION": True,
"JWT_TOKEN_CLAIMS_SERIALIZER": (
"rest_framework_simplejwt.serializers.TokenObtainPairSerializer"
),
"JWT_TOKEN_CLAIMS_SERIALIZER": ("rest_framework_simplejwt.serializers.TokenObtainPairSerializer"),
}
# =============================================================================

View File

@@ -31,17 +31,13 @@ logger = logging.getLogger("security")
# =============================================================================
# Enable secret rotation checking (set to True in production)
SECRET_ROTATION_ENABLED = config(
"SECRET_ROTATION_ENABLED", default=False, cast=bool
)
SECRET_ROTATION_ENABLED = config("SECRET_ROTATION_ENABLED", default=False, cast=bool)
# Secret version for tracking rotations
SECRET_KEY_VERSION = config("SECRET_KEY_VERSION", default="1")
# Secret expiry warning threshold (days before expiry to start warning)
SECRET_EXPIRY_WARNING_DAYS = config(
"SECRET_EXPIRY_WARNING_DAYS", default=30, cast=int
)
SECRET_EXPIRY_WARNING_DAYS = config("SECRET_EXPIRY_WARNING_DAYS", default=30, cast=int)
# =============================================================================
# Required Secrets Registry
@@ -104,10 +100,7 @@ def validate_secret_strength(name: str, value: str, min_length: int = 10) -> boo
return False
if len(value) < min_length:
logger.error(
f"Secret '{name}' is too short ({len(value)} chars, "
f"minimum {min_length})"
)
logger.error(f"Secret '{name}' is too short ({len(value)} chars, " f"minimum {min_length})")
return False
# Check for placeholder values
@@ -123,9 +116,7 @@ def validate_secret_strength(name: str, value: str, min_length: int = 10) -> boo
value_lower = value.lower()
for pattern in placeholder_patterns:
if pattern in value_lower:
logger.warning(
f"Secret '{name}' appears to contain a placeholder value"
)
logger.warning(f"Secret '{name}' appears to contain a placeholder value")
return False
return True
@@ -148,9 +139,7 @@ def validate_secret_key(secret_key: str) -> bool:
bool: True if valid, False otherwise
"""
if len(secret_key) < 50:
logger.error(
f"SECRET_KEY is too short ({len(secret_key)} chars, minimum 50)"
)
logger.error(f"SECRET_KEY is too short ({len(secret_key)} chars, minimum 50)")
return False
has_upper = any(c.isupper() for c in secret_key)
@@ -159,10 +148,7 @@ def validate_secret_key(secret_key: str) -> bool:
has_special = any(not c.isalnum() for c in secret_key)
if not all([has_upper, has_lower, has_digit, has_special]):
logger.warning(
"SECRET_KEY should contain uppercase, lowercase, digits, "
"and special characters"
)
logger.warning("SECRET_KEY should contain uppercase, lowercase, digits, " "and special characters")
# Don't fail, just warn - some generated keys may not have all
return True
@@ -193,7 +179,7 @@ def get_secret(
value = config(name, default=default)
except UndefinedValueError:
if required:
raise ValueError(f"Required secret '{name}' is not set")
raise ValueError(f"Required secret '{name}' is not set") from None
return default
if value and min_length > 0 and not validate_secret_strength(name, value, min_length):
@@ -231,7 +217,7 @@ def validate_required_secrets(raise_on_error: bool = False) -> list[str]:
msg = f"Required secret '{name}' is not set: {rules['description']}"
errors.append(msg)
if raise_on_error:
raise ValueError(msg)
raise ValueError(msg) from None
return errors
@@ -257,9 +243,7 @@ def check_secret_expiry() -> list[str]:
version = int(SECRET_KEY_VERSION)
# If version is very old, suggest rotation
if version < 2:
warnings_list.append(
"SECRET_KEY version is old. Consider rotating secrets."
)
warnings_list.append("SECRET_KEY version is old. Consider rotating secrets.")
except ValueError:
pass
@@ -316,8 +300,7 @@ class EnvironmentSecretProvider(SecretProvider):
def set_secret(self, name: str, value: str) -> bool:
"""Environment variables are read-only at runtime."""
logger.warning(
f"Cannot set secret '{name}' in environment provider. "
"Update your .env file or environment variables."
f"Cannot set secret '{name}' in environment provider. " "Update your .env file or environment variables."
)
return False
@@ -385,4 +368,4 @@ def run_startup_validation() -> None:
raise ValueError("SECRET_KEY does not meet security requirements")
except UndefinedValueError:
if not debug_mode:
raise ValueError("SECRET_KEY is required in production")
raise ValueError("SECRET_KEY is required in production") from None

View File

@@ -35,15 +35,11 @@ TURNSTILE_VERIFY_URL = config(
# X-XSS-Protection: Enables browser's built-in XSS filter
# Note: Modern browsers are deprecating this in favor of CSP, but it's still
# useful for older browsers
SECURE_BROWSER_XSS_FILTER = config(
"SECURE_BROWSER_XSS_FILTER", default=True, cast=bool
)
SECURE_BROWSER_XSS_FILTER = config("SECURE_BROWSER_XSS_FILTER", default=True, cast=bool)
# X-Content-Type-Options: Prevents MIME type sniffing attacks
# When True, adds "X-Content-Type-Options: nosniff" header
SECURE_CONTENT_TYPE_NOSNIFF = config(
"SECURE_CONTENT_TYPE_NOSNIFF", default=True, cast=bool
)
SECURE_CONTENT_TYPE_NOSNIFF = config("SECURE_CONTENT_TYPE_NOSNIFF", default=True, cast=bool)
# X-Frame-Options: Protects against clickjacking attacks
# DENY = Never allow framing (most secure)
@@ -53,24 +49,18 @@ X_FRAME_OPTIONS = config("X_FRAME_OPTIONS", default="DENY")
# Referrer-Policy: Controls how much referrer information is sent
# strict-origin-when-cross-origin = Send full URL for same-origin,
# only origin for cross-origin, nothing for downgrade
SECURE_REFERRER_POLICY = config(
"SECURE_REFERRER_POLICY", default="strict-origin-when-cross-origin"
)
SECURE_REFERRER_POLICY = config("SECURE_REFERRER_POLICY", default="strict-origin-when-cross-origin")
# Cross-Origin-Opener-Policy: Prevents cross-origin attacks via window references
# same-origin = Document can only be accessed by windows from same origin
SECURE_CROSS_ORIGIN_OPENER_POLICY = config(
"SECURE_CROSS_ORIGIN_OPENER_POLICY", default="same-origin"
)
SECURE_CROSS_ORIGIN_OPENER_POLICY = config("SECURE_CROSS_ORIGIN_OPENER_POLICY", default="same-origin")
# =============================================================================
# HSTS (HTTP Strict Transport Security) Configuration
# =============================================================================
# Include subdomains in HSTS policy
SECURE_HSTS_INCLUDE_SUBDOMAINS = config(
"SECURE_HSTS_INCLUDE_SUBDOMAINS", default=True, cast=bool
)
SECURE_HSTS_INCLUDE_SUBDOMAINS = config("SECURE_HSTS_INCLUDE_SUBDOMAINS", default=True, cast=bool)
# HSTS max-age in seconds (31536000 = 1 year, recommended minimum)
SECURE_HSTS_SECONDS = config("SECURE_HSTS_SECONDS", default=31536000, cast=int)
@@ -82,9 +72,7 @@ SECURE_HSTS_PRELOAD = config("SECURE_HSTS_PRELOAD", default=False, cast=bool)
# URLs exempt from SSL redirect (e.g., health checks)
# Format: comma-separated list of URL patterns
SECURE_REDIRECT_EXEMPT = config(
"SECURE_REDIRECT_EXEMPT",
default="",
cast=lambda v: [s.strip() for s in v.split(",") if s.strip()]
"SECURE_REDIRECT_EXEMPT", default="", cast=lambda v: [s.strip() for s in v.split(",") if s.strip()]
)
# Redirect all HTTP requests to HTTPS
@@ -93,9 +81,7 @@ SECURE_SSL_REDIRECT = config("SECURE_SSL_REDIRECT", default=False, cast=bool)
# Header used by proxy to indicate HTTPS
# Common values: ('HTTP_X_FORWARDED_PROTO', 'https')
_proxy_ssl_header = config("SECURE_PROXY_SSL_HEADER", default="")
SECURE_PROXY_SSL_HEADER = (
tuple(_proxy_ssl_header.split(",")) if _proxy_ssl_header else None
)
SECURE_PROXY_SSL_HEADER = tuple(_proxy_ssl_header.split(",")) if _proxy_ssl_header else None
# =============================================================================
# Session Cookie Security
@@ -143,9 +129,7 @@ AUTHENTICATION_BACKENDS = [
AUTH_PASSWORD_VALIDATORS = [
{
"NAME": (
"django.contrib.auth.password_validation.UserAttributeSimilarityValidator"
),
"NAME": ("django.contrib.auth.password_validation.UserAttributeSimilarityValidator"),
},
{
"NAME": "django.contrib.auth.password_validation.MinimumLengthValidator",

View File

@@ -37,19 +37,13 @@ STATIC_ROOT = BASE_DIR / "staticfiles"
# WhiteNoise serves static files efficiently without a separate web server
# Compression quality for Brotli/Gzip (1-100, higher = better but slower)
WHITENOISE_COMPRESSION_QUALITY = config(
"WHITENOISE_COMPRESSION_QUALITY", default=90, cast=int
)
WHITENOISE_COMPRESSION_QUALITY = config("WHITENOISE_COMPRESSION_QUALITY", default=90, cast=int)
# Cache max-age for static files (1 year for immutable content)
WHITENOISE_MAX_AGE = config(
"WHITENOISE_MAX_AGE", default=31536000, cast=int
)
WHITENOISE_MAX_AGE = config("WHITENOISE_MAX_AGE", default=31536000, cast=int)
# Don't fail on missing manifest entries (graceful degradation)
WHITENOISE_MANIFEST_STRICT = config(
"WHITENOISE_MANIFEST_STRICT", default=False, cast=bool
)
WHITENOISE_MANIFEST_STRICT = config("WHITENOISE_MANIFEST_STRICT", default=False, cast=bool)
# Additional MIME types
WHITENOISE_MIMETYPES = {
@@ -59,11 +53,26 @@ WHITENOISE_MIMETYPES = {
# Skip compressing already compressed formats
WHITENOISE_SKIP_COMPRESS_EXTENSIONS = [
"jpg", "jpeg", "png", "gif", "webp", # Images
"zip", "gz", "tgz", "bz2", "tbz", "xz", "br", # Archives
"swf", "flv", # Flash
"woff", "woff2", # Fonts
"mp3", "mp4", "ogg", "webm", # Media
"jpg",
"jpeg",
"png",
"gif",
"webp", # Images
"zip",
"gz",
"tgz",
"bz2",
"tbz",
"xz",
"br", # Archives
"swf",
"flv", # Flash
"woff",
"woff2", # Fonts
"mp3",
"mp4",
"ogg",
"webm", # Media
]
# =============================================================================
@@ -103,20 +112,14 @@ STORAGES = {
# Maximum size (in bytes) of file to upload into memory (2.5MB)
# Files larger than this are written to disk
FILE_UPLOAD_MAX_MEMORY_SIZE = config(
"FILE_UPLOAD_MAX_MEMORY_SIZE", default=2621440, cast=int
)
FILE_UPLOAD_MAX_MEMORY_SIZE = config("FILE_UPLOAD_MAX_MEMORY_SIZE", default=2621440, cast=int)
# Maximum size (in bytes) of request data (10MB)
# This limits the total size of POST request body
DATA_UPLOAD_MAX_MEMORY_SIZE = config(
"DATA_UPLOAD_MAX_MEMORY_SIZE", default=10485760, cast=int
)
DATA_UPLOAD_MAX_MEMORY_SIZE = config("DATA_UPLOAD_MAX_MEMORY_SIZE", default=10485760, cast=int)
# Maximum number of GET/POST parameters (1000)
DATA_UPLOAD_MAX_NUMBER_FIELDS = config(
"DATA_UPLOAD_MAX_NUMBER_FIELDS", default=1000, cast=int
)
DATA_UPLOAD_MAX_NUMBER_FIELDS = config("DATA_UPLOAD_MAX_NUMBER_FIELDS", default=1000, cast=int)
# File upload permissions (0o644 = rw-r--r--)
FILE_UPLOAD_PERMISSIONS = 0o644

View File

@@ -33,9 +33,7 @@ ACCOUNT_SIGNUP_FIELDS = ["email*", "username*", "password1*", "password2*"]
ACCOUNT_LOGIN_METHODS = {"email", "username"}
# Email verification settings
ACCOUNT_EMAIL_VERIFICATION = config(
"ACCOUNT_EMAIL_VERIFICATION", default="mandatory"
)
ACCOUNT_EMAIL_VERIFICATION = config("ACCOUNT_EMAIL_VERIFICATION", default="mandatory")
ACCOUNT_EMAIL_REQUIRED = True
ACCOUNT_EMAIL_VERIFICATION_SUPPORTS_CHANGE = True
ACCOUNT_EMAIL_VERIFICATION_SUPPORTS_RESEND = True
@@ -114,12 +112,8 @@ CELERY_BROKER_URL = config("REDIS_URL", default="redis://localhost:6379/1")
CELERY_RESULT_BACKEND = config("REDIS_URL", default="redis://localhost:6379/1")
# Task settings for test environments
CELERY_TASK_ALWAYS_EAGER = config(
"CELERY_TASK_ALWAYS_EAGER", default=False, cast=bool
)
CELERY_TASK_EAGER_PROPAGATES = config(
"CELERY_TASK_EAGER_PROPAGATES", default=False, cast=bool
)
CELERY_TASK_ALWAYS_EAGER = config("CELERY_TASK_ALWAYS_EAGER", default=False, cast=bool)
CELERY_TASK_EAGER_PROPAGATES = config("CELERY_TASK_EAGER_PROPAGATES", default=False, cast=bool)
# =============================================================================
# Health Check Configuration
@@ -165,16 +159,10 @@ CLOUDFLARE_IMAGES = {
"DEFAULT_VARIANT": config("CLOUDFLARE_IMAGES_DEFAULT_VARIANT", default="public"),
"UPLOAD_TIMEOUT": config("CLOUDFLARE_IMAGES_UPLOAD_TIMEOUT", default=300, cast=int),
"WEBHOOK_SECRET": config("CLOUDFLARE_IMAGES_WEBHOOK_SECRET", default=""),
"CLEANUP_EXPIRED_HOURS": config(
"CLOUDFLARE_IMAGES_CLEANUP_HOURS", default=24, cast=int
),
"MAX_FILE_SIZE": config(
"CLOUDFLARE_IMAGES_MAX_FILE_SIZE", default=10 * 1024 * 1024, cast=int
),
"CLEANUP_EXPIRED_HOURS": config("CLOUDFLARE_IMAGES_CLEANUP_HOURS", default=24, cast=int),
"MAX_FILE_SIZE": config("CLOUDFLARE_IMAGES_MAX_FILE_SIZE", default=10 * 1024 * 1024, cast=int),
"ALLOWED_FORMATS": ["jpeg", "png", "gif", "webp"],
"REQUIRE_SIGNED_URLS": config(
"CLOUDFLARE_IMAGES_REQUIRE_SIGNED_URLS", default=False, cast=bool
),
"REQUIRE_SIGNED_URLS": config("CLOUDFLARE_IMAGES_REQUIRE_SIGNED_URLS", default=False, cast=bool),
"DEFAULT_METADATA": {},
}
@@ -183,21 +171,13 @@ CLOUDFLARE_IMAGES = {
# =============================================================================
# Settings for the road trip planning service using OpenStreetMap
ROADTRIP_CACHE_TIMEOUT = config(
"ROADTRIP_CACHE_TIMEOUT", default=3600 * 24, cast=int
) # 24 hours for geocoding
ROADTRIP_ROUTE_CACHE_TIMEOUT = config(
"ROADTRIP_ROUTE_CACHE_TIMEOUT", default=3600 * 6, cast=int
) # 6 hours for routes
ROADTRIP_CACHE_TIMEOUT = config("ROADTRIP_CACHE_TIMEOUT", default=3600 * 24, cast=int) # 24 hours for geocoding
ROADTRIP_ROUTE_CACHE_TIMEOUT = config("ROADTRIP_ROUTE_CACHE_TIMEOUT", default=3600 * 6, cast=int) # 6 hours for routes
ROADTRIP_MAX_REQUESTS_PER_SECOND = config(
"ROADTRIP_MAX_REQUESTS_PER_SECOND", default=1, cast=int
) # Respect OSM rate limits
ROADTRIP_USER_AGENT = config(
"ROADTRIP_USER_AGENT", default="ThrillWiki/1.0 (https://thrillwiki.com)"
)
ROADTRIP_REQUEST_TIMEOUT = config(
"ROADTRIP_REQUEST_TIMEOUT", default=10, cast=int
) # seconds
ROADTRIP_USER_AGENT = config("ROADTRIP_USER_AGENT", default="ThrillWiki/1.0 (https://thrillwiki.com)")
ROADTRIP_REQUEST_TIMEOUT = config("ROADTRIP_REQUEST_TIMEOUT", default=10, cast=int) # seconds
ROADTRIP_MAX_RETRIES = config("ROADTRIP_MAX_RETRIES", default=3, cast=int)
ROADTRIP_BACKOFF_FACTOR = config("ROADTRIP_BACKOFF_FACTOR", default=2, cast=int)
@@ -206,9 +186,7 @@ ROADTRIP_BACKOFF_FACTOR = config("ROADTRIP_BACKOFF_FACTOR", default=2, cast=int)
# =============================================================================
# django-autocomplete-light settings
AUTOCOMPLETE_BLOCK_UNAUTHENTICATED = config(
"AUTOCOMPLETE_BLOCK_UNAUTHENTICATED", default=False, cast=bool
)
AUTOCOMPLETE_BLOCK_UNAUTHENTICATED = config("AUTOCOMPLETE_BLOCK_UNAUTHENTICATED", default=False, cast=bool)
# =============================================================================
# Frontend Configuration
@@ -226,7 +204,5 @@ TURNSTILE_SECRET = config("TURNSTILE_SECRET", default="")
# Skip Turnstile validation in development if keys not set
TURNSTILE_SKIP_VALIDATION = config(
"TURNSTILE_SKIP_VALIDATION",
default=not TURNSTILE_SECRET, # Skip if no secret
cast=bool
"TURNSTILE_SKIP_VALIDATION", default=not TURNSTILE_SECRET, cast=bool # Skip if no secret
)

View File

@@ -160,19 +160,13 @@ def validate_email(value: str) -> bool:
def validate_type(value: Any, expected_type: type) -> bool:
"""Validate that a value is of the expected type."""
if expected_type == bool:
if expected_type is bool:
# Special handling for boolean strings
return isinstance(value, bool) or str(value).lower() in (
"true", "false", "1", "0", "yes", "no"
)
return isinstance(value, bool) or str(value).lower() in ("true", "false", "1", "0", "yes", "no")
return isinstance(value, expected_type)
def validate_range(
value: Any,
min_value: Any | None = None,
max_value: Any | None = None
) -> bool:
def validate_range(value: Any, min_value: Any | None = None, max_value: Any | None = None) -> bool:
"""Validate that a value is within a specified range."""
if min_value is not None and value < min_value:
return False
@@ -215,11 +209,11 @@ def validate_variable(name: str, rules: dict) -> list[str]:
var_type = rules.get("type", str)
default = rules.get("default")
if var_type == bool:
if var_type is bool:
value = config(name, default=default, cast=bool)
elif var_type == int:
elif var_type is int:
value = config(name, default=default, cast=int)
elif var_type == float:
elif var_type is float:
value = config(name, default=default, cast=float)
else:
value = config(name, default=default)
@@ -233,29 +227,21 @@ def validate_variable(name: str, rules: dict) -> list[str]:
# Type validation
if not validate_type(value, rules.get("type", str)):
errors.append(
f"{name}: Expected type {rules['type'].__name__}, "
f"got {type(value).__name__}"
)
errors.append(f"{name}: Expected type {rules['type'].__name__}, " f"got {type(value).__name__}")
# Length validation (for strings)
if isinstance(value, str):
min_length = rules.get("min_length", 0)
max_length = rules.get("max_length")
if not validate_length(value, min_length, max_length):
errors.append(
f"{name}: Length must be between {min_length} and "
f"{max_length or 'unlimited'}"
)
errors.append(f"{name}: Length must be between {min_length} and " f"{max_length or 'unlimited'}")
# Range validation (for numbers)
if isinstance(value, (int, float)):
if isinstance(value, int | float):
min_value = rules.get("min_value")
max_value = rules.get("max_value")
if not validate_range(value, min_value, max_value):
errors.append(
f"{name}: Value must be between {min_value} and {max_value}"
)
errors.append(f"{name}: Value must be between {min_value} and {max_value}")
# Custom validator
validator_name = rules.get("validator")
@@ -285,13 +271,9 @@ def validate_cross_rules() -> list[str]:
try:
value = config(var_name, default=None)
if value is not None and not check_fn(value):
errors.append(
f"{rule['name']}: {var_name} {message}"
)
errors.append(f"{rule['name']}: {var_name} {message}")
except Exception:
errors.append(
f"{rule['name']}: Could not validate {var_name}"
)
errors.append(f"{rule['name']}: Could not validate {var_name}")
except Exception as e:
errors.append(f"Cross-validation error for {rule['name']}: {e}")
@@ -343,9 +325,7 @@ def validate_all_settings(raise_on_error: bool = False) -> dict:
logger.error(f"Configuration error: {error}")
if raise_on_error:
raise ValueError(
f"Configuration validation failed: {result['errors']}"
)
raise ValueError(f"Configuration validation failed: {result['errors']}")
# Log warnings
for warning in result["warnings"]:
@@ -372,9 +352,7 @@ def run_startup_validation() -> None:
for error in result["errors"]:
warnings.warn(f"Configuration error: {error}", stacklevel=2)
else:
raise ValueError(
"Configuration validation failed. Check logs for details."
)
raise ValueError("Configuration validation failed. Check logs for details.")
# =============================================================================