mirror of
https://github.com/pacnpal/thrillwiki_django_no_react.git
synced 2025-12-20 11:11:10 -05:00
Add migrations for ParkPhoto and RidePhoto models with associated events
- Created ParkPhoto and ParkPhotoEvent models in the parks app, including fields for image, caption, alt text, and relationships to the Park model. - Implemented triggers for insert and update operations on ParkPhoto to log changes in ParkPhotoEvent. - Created RidePhoto and RidePhotoEvent models in the rides app, with similar structure and functionality as ParkPhoto. - Added fields for photo type in RidePhoto and implemented corresponding triggers for logging changes. - Established necessary indexes and unique constraints for both models to ensure data integrity and optimize queries.
This commit is contained in:
@@ -100,12 +100,9 @@ def cache_api_response(
|
||||
)
|
||||
else:
|
||||
logger.debug(
|
||||
f"Not caching response for view {
|
||||
view_func.__name__} (status: {
|
||||
getattr(
|
||||
response,
|
||||
'status_code',
|
||||
'unknown')})"
|
||||
f"Not caching response for view {view_func.__name__} (status: {
|
||||
getattr(response, 'status_code', 'unknown')
|
||||
})"
|
||||
)
|
||||
|
||||
return response
|
||||
@@ -135,10 +132,7 @@ def cache_queryset_result(
|
||||
cache_key = cache_key_template.format(*args, **kwargs)
|
||||
except (KeyError, IndexError):
|
||||
# Fallback to simpler key generation
|
||||
cache_key = f"{cache_key_template}:{
|
||||
hash(
|
||||
str(args) +
|
||||
str(kwargs))}"
|
||||
cache_key = f"{cache_key_template}:{hash(str(args) + str(kwargs))}"
|
||||
|
||||
cache_service = EnhancedCacheService()
|
||||
cached_result = getattr(cache_service, cache_backend + "_cache").get(
|
||||
@@ -146,10 +140,7 @@ def cache_queryset_result(
|
||||
)
|
||||
|
||||
if cached_result is not None:
|
||||
logger.debug(
|
||||
f"Cache hit for queryset operation: {
|
||||
func.__name__}"
|
||||
)
|
||||
logger.debug(f"Cache hit for queryset operation: {func.__name__}")
|
||||
return cached_result
|
||||
|
||||
# Execute function and cache result
|
||||
@@ -314,9 +305,9 @@ def smart_cache(
|
||||
"kwargs": json.dumps(kwargs, sort_keys=True, default=str),
|
||||
}
|
||||
key_string = json.dumps(key_data, sort_keys=True)
|
||||
cache_key = f"smart_cache:{
|
||||
hashlib.md5(
|
||||
key_string.encode()).hexdigest()}"
|
||||
cache_key = (
|
||||
f"smart_cache:{hashlib.md5(key_string.encode()).hexdigest()}"
|
||||
)
|
||||
|
||||
# Try to get from cache
|
||||
cache_service = EnhancedCacheService()
|
||||
|
||||
@@ -57,13 +57,11 @@ class CacheHealthCheck(BaseHealthCheckBackend):
|
||||
memory_usage_percent = (used_memory / max_memory) * 100
|
||||
if memory_usage_percent > 90:
|
||||
self.add_error(
|
||||
f"Redis memory usage critical: {
|
||||
memory_usage_percent:.1f}%"
|
||||
f"Redis memory usage critical: {memory_usage_percent:.1f}%"
|
||||
)
|
||||
elif memory_usage_percent > 80:
|
||||
logger.warning(
|
||||
f"Redis memory usage high: {
|
||||
memory_usage_percent:.1f}%"
|
||||
f"Redis memory usage high: {memory_usage_percent:.1f}%"
|
||||
)
|
||||
|
||||
except ImportError:
|
||||
@@ -190,10 +188,7 @@ class ApplicationHealthCheck(BaseHealthCheckBackend):
|
||||
import os
|
||||
|
||||
if not os.path.exists(settings.MEDIA_ROOT):
|
||||
self.add_error(
|
||||
f"Media directory does not exist: {
|
||||
settings.MEDIA_ROOT}"
|
||||
)
|
||||
self.add_error(f"Media directory does not exist: {settings.MEDIA_ROOT}")
|
||||
|
||||
if not os.path.exists(settings.STATIC_ROOT) and not settings.DEBUG:
|
||||
self.add_error(
|
||||
@@ -305,8 +300,7 @@ class DiskSpaceHealthCheck(BaseHealthCheckBackend):
|
||||
)
|
||||
elif media_free_percent < 20:
|
||||
logger.warning(
|
||||
f"Low disk space: {
|
||||
media_free_percent:.1f}% free in media directory"
|
||||
f"Low disk space: {media_free_percent:.1f}% free in media directory"
|
||||
)
|
||||
|
||||
if logs_free_percent < 10:
|
||||
@@ -316,8 +310,7 @@ class DiskSpaceHealthCheck(BaseHealthCheckBackend):
|
||||
)
|
||||
elif logs_free_percent < 20:
|
||||
logger.warning(
|
||||
f"Low disk space: {
|
||||
logs_free_percent:.1f}% free in logs directory"
|
||||
f"Low disk space: {logs_free_percent:.1f}% free in logs directory"
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
|
||||
@@ -5,8 +5,6 @@ This command automatically sets up the development environment and starts
|
||||
the server, replacing the need for the dev_server.sh script.
|
||||
"""
|
||||
|
||||
import subprocess
|
||||
import sys
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.core.management import execute_from_command_line
|
||||
|
||||
@@ -62,7 +60,7 @@ class Command(BaseCommand):
|
||||
self.stdout.write("")
|
||||
self.stdout.write(
|
||||
self.style.SUCCESS(
|
||||
f'🌟 Starting Django development server on http://{options["host"]}:{options["port"]}'
|
||||
f"🌟 Starting Django development server on http://{options['host']}:{options['port']}"
|
||||
)
|
||||
)
|
||||
self.stdout.write("Press Ctrl+C to stop the server")
|
||||
@@ -74,12 +72,12 @@ class Command(BaseCommand):
|
||||
[
|
||||
"manage.py",
|
||||
"runserver_plus",
|
||||
f'{options["host"]}:{options["port"]}',
|
||||
f"{options['host']}:{options['port']}",
|
||||
]
|
||||
)
|
||||
else:
|
||||
execute_from_command_line(
|
||||
["manage.py", "runserver", f'{options["host"]}:{options["port"]}']
|
||||
["manage.py", "runserver", f"{options['host']}:{options['port']}"]
|
||||
)
|
||||
except KeyboardInterrupt:
|
||||
self.stdout.write("")
|
||||
|
||||
@@ -5,13 +5,11 @@ This command performs all the setup tasks that the dev_server.sh script does,
|
||||
allowing the project to run without requiring the shell script.
|
||||
"""
|
||||
|
||||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.core.management import execute_from_command_line
|
||||
from django.conf import settings
|
||||
|
||||
|
||||
|
||||
@@ -5,10 +5,6 @@ Analytics and tracking middleware for Django application.
|
||||
import pghistory
|
||||
from django.contrib.auth.models import AnonymousUser
|
||||
from django.core.handlers.wsgi import WSGIRequest
|
||||
from django.utils.deprecation import MiddlewareMixin
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from django.views.generic.detail import DetailView
|
||||
from apps.core.analytics import PageView
|
||||
|
||||
|
||||
class RequestContextProvider(pghistory.context):
|
||||
|
||||
@@ -151,12 +151,10 @@ class PerformanceMiddleware(MiddlewareMixin):
|
||||
}
|
||||
|
||||
performance_logger.error(
|
||||
f"Request exception: {
|
||||
request.method} {
|
||||
request.path} - "
|
||||
f"{
|
||||
duration:.3f}s, {total_queries} queries, {
|
||||
type(exception).__name__}: {exception}",
|
||||
f"Request exception: {request.method} {request.path} - "
|
||||
f"{duration:.3f}s, {total_queries} queries, {type(exception).__name__}: {
|
||||
exception
|
||||
}",
|
||||
extra=performance_data,
|
||||
)
|
||||
|
||||
@@ -216,10 +214,10 @@ class QueryCountMiddleware(MiddlewareMixin):
|
||||
|
||||
if request_query_count > self.query_limit:
|
||||
logger.warning(
|
||||
f"Excessive query count: {
|
||||
request.path} executed {request_query_count} queries "
|
||||
f"(limit: {
|
||||
self.query_limit})",
|
||||
f"Excessive query count: {request.path} executed {
|
||||
request_query_count
|
||||
} queries "
|
||||
f"(limit: {self.query_limit})",
|
||||
extra={
|
||||
"path": request.path,
|
||||
"method": request.method,
|
||||
@@ -308,9 +306,7 @@ class CachePerformanceMiddleware(MiddlewareMixin):
|
||||
)
|
||||
else:
|
||||
logger.debug(
|
||||
f"Cache performance for {
|
||||
request.path}: {
|
||||
hit_rate:.1f}% hit rate",
|
||||
f"Cache performance for {request.path}: {hit_rate:.1f}% hit rate",
|
||||
extra=cache_data,
|
||||
)
|
||||
|
||||
|
||||
@@ -8,14 +8,13 @@ analytics for the trending algorithm.
|
||||
|
||||
import logging
|
||||
import re
|
||||
from datetime import datetime, timedelta
|
||||
from datetime import timedelta
|
||||
from typing import Optional, Union
|
||||
from django.http import HttpRequest, HttpResponse
|
||||
from django.utils import timezone
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from django.core.cache import cache
|
||||
from django.conf import settings
|
||||
from django.db import models
|
||||
|
||||
from apps.core.analytics import PageView
|
||||
from apps.parks.models import Park
|
||||
@@ -68,7 +67,6 @@ class ViewTrackingMiddleware:
|
||||
and 200 <= response.status_code < 300
|
||||
and not self._should_skip_tracking(request)
|
||||
):
|
||||
|
||||
try:
|
||||
self._track_view_if_applicable(request)
|
||||
except Exception as e:
|
||||
|
||||
@@ -5,7 +5,6 @@ from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
initial = True
|
||||
|
||||
dependencies = [
|
||||
|
||||
@@ -6,7 +6,6 @@ from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("contenttypes", "0002_remove_content_type_name"),
|
||||
("core", "0001_initial"),
|
||||
|
||||
@@ -7,7 +7,6 @@ from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("contenttypes", "0002_remove_content_type_name"),
|
||||
("core", "0002_historicalslug_pageview"),
|
||||
|
||||
@@ -50,8 +50,7 @@ class EnhancedCacheService:
|
||||
|
||||
# Log cache miss and function execution time
|
||||
logger.info(
|
||||
f"Cache miss for key '{cache_key}', executed in {
|
||||
duration:.3f}s",
|
||||
f"Cache miss for key '{cache_key}', executed in {duration:.3f}s",
|
||||
extra={"cache_key": cache_key, "execution_time": duration},
|
||||
)
|
||||
|
||||
@@ -96,11 +95,9 @@ class EnhancedCacheService:
|
||||
):
|
||||
"""Cache geographic data with spatial keys"""
|
||||
# Generate spatial cache key based on bounds and zoom level
|
||||
cache_key = f"geo:{
|
||||
bounds.min_lat}:{
|
||||
bounds.min_lng}:{
|
||||
bounds.max_lat}:{
|
||||
bounds.max_lng}:z{zoom_level}"
|
||||
cache_key = f"geo:{bounds.min_lat}:{bounds.min_lng}:{bounds.max_lat}:{
|
||||
bounds.max_lng
|
||||
}:z{zoom_level}"
|
||||
self.default_cache.set(cache_key, data, timeout)
|
||||
logger.debug(f"Cached geographic data for bounds {bounds}")
|
||||
|
||||
@@ -108,11 +105,9 @@ class EnhancedCacheService:
|
||||
self, bounds: "GeoBounds", zoom_level: int
|
||||
) -> Optional[Any]:
|
||||
"""Retrieve cached geographic data"""
|
||||
cache_key = f"geo:{
|
||||
bounds.min_lat}:{
|
||||
bounds.min_lng}:{
|
||||
bounds.max_lat}:{
|
||||
bounds.max_lng}:z{zoom_level}"
|
||||
cache_key = f"geo:{bounds.min_lat}:{bounds.min_lng}:{bounds.max_lat}:{
|
||||
bounds.max_lng
|
||||
}:z{zoom_level}"
|
||||
return self.default_cache.get(cache_key)
|
||||
|
||||
# Cache invalidation utilities
|
||||
@@ -206,10 +201,7 @@ def cache_api_response(timeout=1800, vary_on=None, key_prefix=""):
|
||||
response = view_func(self, request, *args, **kwargs)
|
||||
if hasattr(response, "status_code") and response.status_code == 200:
|
||||
cache_service.api_cache.set(cache_key, response, timeout)
|
||||
logger.debug(
|
||||
f"Cached API response for view {
|
||||
view_func.__name__}"
|
||||
)
|
||||
logger.debug(f"Cached API response for view {view_func.__name__}")
|
||||
|
||||
return response
|
||||
|
||||
@@ -273,10 +265,7 @@ class CacheWarmer:
|
||||
try:
|
||||
self.cache_service.warm_cache(**operation)
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Error warming cache for {
|
||||
operation['cache_key']}: {e}"
|
||||
)
|
||||
logger.error(f"Error warming cache for {operation['cache_key']}: {e}")
|
||||
|
||||
|
||||
# Cache statistics and monitoring
|
||||
|
||||
@@ -2,7 +2,6 @@
|
||||
Location adapters for converting between domain-specific models and UnifiedLocation.
|
||||
"""
|
||||
|
||||
from django.db import models
|
||||
from typing import List, Optional
|
||||
from django.db.models import QuerySet
|
||||
from django.urls import reverse
|
||||
|
||||
@@ -447,13 +447,10 @@ class LocationSearchService:
|
||||
suggestions.append(
|
||||
{
|
||||
"type": "city",
|
||||
"name": f"{
|
||||
city_data['city']}, {
|
||||
city_data['state']}",
|
||||
"address": f"{
|
||||
city_data['city']}, {
|
||||
city_data['state']}, {
|
||||
city_data['country']}",
|
||||
"name": f"{city_data['city']}, {city_data['state']}",
|
||||
"address": f"{city_data['city']}, {city_data['state']}, {
|
||||
city_data['country']
|
||||
}",
|
||||
"coordinates": None,
|
||||
}
|
||||
)
|
||||
|
||||
@@ -289,11 +289,7 @@ class MapCacheService:
|
||||
"""Record query performance metrics for analysis."""
|
||||
try:
|
||||
# 5-minute buckets
|
||||
stats_key = f"{
|
||||
self.STATS_PREFIX}:performance:{
|
||||
int(
|
||||
time.time() //
|
||||
300)}"
|
||||
stats_key = f"{self.STATS_PREFIX}:performance:{int(time.time() // 300)}"
|
||||
|
||||
current_stats = cache.get(
|
||||
stats_key,
|
||||
|
||||
@@ -21,10 +21,7 @@ class MediaService:
|
||||
|
||||
@staticmethod
|
||||
def generate_upload_path(
|
||||
domain: str,
|
||||
identifier: str,
|
||||
filename: str,
|
||||
subdirectory: Optional[str] = None
|
||||
domain: str, identifier: str, filename: str, subdirectory: Optional[str] = None
|
||||
) -> str:
|
||||
"""
|
||||
Generate standardized upload path for media files.
|
||||
@@ -86,16 +83,26 @@ class MediaService:
|
||||
"""
|
||||
try:
|
||||
# Check file size
|
||||
max_size = getattr(settings, 'MAX_PHOTO_SIZE',
|
||||
10 * 1024 * 1024) # 10MB default
|
||||
max_size = getattr(
|
||||
settings, "MAX_PHOTO_SIZE", 10 * 1024 * 1024
|
||||
) # 10MB default
|
||||
if image_file.size > max_size:
|
||||
return False, f"File size too large. Maximum size is {max_size // (1024 * 1024)}MB"
|
||||
return (
|
||||
False,
|
||||
f"File size too large. Maximum size is {max_size // (1024 * 1024)}MB",
|
||||
)
|
||||
|
||||
# Check file type
|
||||
allowed_types = getattr(settings, 'ALLOWED_PHOTO_TYPES', [
|
||||
'image/jpeg', 'image/png', 'image/webp'])
|
||||
allowed_types = getattr(
|
||||
settings,
|
||||
"ALLOWED_PHOTO_TYPES",
|
||||
["image/jpeg", "image/png", "image/webp"],
|
||||
)
|
||||
if image_file.content_type not in allowed_types:
|
||||
return False, f"File type not allowed. Allowed types: {', '.join(allowed_types)}"
|
||||
return (
|
||||
False,
|
||||
f"File type not allowed. Allowed types: {', '.join(allowed_types)}",
|
||||
)
|
||||
|
||||
# Try to open with PIL to validate it's a real image
|
||||
with Image.open(image_file) as img:
|
||||
@@ -111,7 +118,7 @@ class MediaService:
|
||||
image_file: UploadedFile,
|
||||
max_width: int = 1920,
|
||||
max_height: int = 1080,
|
||||
quality: int = 85
|
||||
quality: int = 85,
|
||||
) -> UploadedFile:
|
||||
"""
|
||||
Process and optimize image file.
|
||||
@@ -128,8 +135,8 @@ class MediaService:
|
||||
try:
|
||||
with Image.open(image_file) as img:
|
||||
# Convert to RGB if necessary
|
||||
if img.mode in ('RGBA', 'LA', 'P'):
|
||||
img = img.convert('RGB')
|
||||
if img.mode in ("RGBA", "LA", "P"):
|
||||
img = img.convert("RGB")
|
||||
|
||||
# Resize if necessary
|
||||
if img.width > max_width or img.height > max_height:
|
||||
@@ -140,16 +147,16 @@ class MediaService:
|
||||
from django.core.files.uploadedfile import InMemoryUploadedFile
|
||||
|
||||
output = BytesIO()
|
||||
img.save(output, format='JPEG', quality=quality, optimize=True)
|
||||
img.save(output, format="JPEG", quality=quality, optimize=True)
|
||||
output.seek(0)
|
||||
|
||||
return InMemoryUploadedFile(
|
||||
output,
|
||||
'ImageField',
|
||||
"ImageField",
|
||||
f"{os.path.splitext(image_file.name)[0]}.jpg",
|
||||
'image/jpeg',
|
||||
"image/jpeg",
|
||||
output.getbuffer().nbytes,
|
||||
None
|
||||
None,
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
@@ -168,6 +175,7 @@ class MediaService:
|
||||
Default caption string
|
||||
"""
|
||||
from django.utils import timezone
|
||||
|
||||
current_time = timezone.now()
|
||||
return f"Uploaded by {username} on {current_time.strftime('%B %d, %Y at %I:%M %p')}"
|
||||
|
||||
@@ -185,7 +193,7 @@ class MediaService:
|
||||
"total_files": 0,
|
||||
"total_size_bytes": 0,
|
||||
"storage_backend": "default",
|
||||
"available_space": "unknown"
|
||||
"available_space": "unknown",
|
||||
}
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to get storage stats: {str(e)}")
|
||||
|
||||
@@ -57,16 +57,16 @@ def monitor_performance(operation_name: str, **tags):
|
||||
)
|
||||
logger.log(
|
||||
log_level,
|
||||
f"Performance: {operation_name} completed in {
|
||||
duration:.3f}s with {total_queries} queries",
|
||||
f"Performance: {operation_name} completed in {duration:.3f}s with {
|
||||
total_queries
|
||||
} queries",
|
||||
extra=performance_context,
|
||||
)
|
||||
|
||||
# Log slow operations with additional detail
|
||||
if duration > 2.0:
|
||||
logger.warning(
|
||||
f"Slow operation detected: {operation_name} took {
|
||||
duration:.3f}s",
|
||||
f"Slow operation detected: {operation_name} took {duration:.3f}s",
|
||||
extra={
|
||||
"slow_operation": True,
|
||||
"threshold_exceeded": "duration",
|
||||
@@ -246,9 +246,9 @@ class PerformanceProfiler:
|
||||
log_level = logging.WARNING if total_duration > 1.0 else logging.INFO
|
||||
logger.log(
|
||||
log_level,
|
||||
f"Profiling complete: {
|
||||
self.name} took {
|
||||
total_duration:.3f}s with {total_queries} queries",
|
||||
f"Profiling complete: {self.name} took {total_duration:.3f}s with {
|
||||
total_queries
|
||||
} queries",
|
||||
extra=report,
|
||||
)
|
||||
|
||||
|
||||
@@ -395,7 +395,9 @@ class TrendingService:
|
||||
"""Calculate popularity score based on total view count."""
|
||||
try:
|
||||
total_views = PageView.get_total_views_count(
|
||||
content_type, object_id, hours=168 # Last 7 days
|
||||
content_type,
|
||||
object_id,
|
||||
hours=168, # Last 7 days
|
||||
)
|
||||
|
||||
# Normalize views to 0-1 scale
|
||||
|
||||
@@ -323,10 +323,7 @@ class IndexAnalyzer:
|
||||
common_filter_fields = ["slug", "name", "created_at", "updated_at"]
|
||||
for field in opts.fields:
|
||||
if field.name in common_filter_fields and not field.db_index:
|
||||
suggestions.append(
|
||||
f"Consider adding db_index=True to {
|
||||
field.name}"
|
||||
)
|
||||
suggestions.append(f"Consider adding db_index=True to {field.name}")
|
||||
|
||||
return suggestions
|
||||
|
||||
@@ -419,9 +416,9 @@ def monitor_db_performance(operation_name: str):
|
||||
if duration > 1.0 or total_queries > 15 or slow_queries:
|
||||
logger.warning(
|
||||
f"Performance issue in {operation_name}: "
|
||||
f"{
|
||||
duration:.3f}s, {total_queries} queries, {
|
||||
len(slow_queries)} slow",
|
||||
f"{duration:.3f}s, {total_queries} queries, {
|
||||
len(slow_queries)
|
||||
} slow",
|
||||
extra=performance_data,
|
||||
)
|
||||
else:
|
||||
|
||||
@@ -41,11 +41,7 @@ class MapAPIView(View):
|
||||
response["Access-Control-Allow-Headers"] = "Content-Type, Authorization"
|
||||
|
||||
# Add performance headers
|
||||
response["X-Response-Time"] = (
|
||||
f"{(time.time() -
|
||||
start_time) *
|
||||
1000:.2f}ms"
|
||||
)
|
||||
response["X-Response-Time"] = f"{(time.time() - start_time) * 1000:.2f}ms"
|
||||
|
||||
# Add compression hint for large responses
|
||||
if hasattr(response, "content") and len(response.content) > 1024:
|
||||
@@ -55,9 +51,7 @@ class MapAPIView(View):
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"API error in {
|
||||
request.path}: {
|
||||
str(e)}",
|
||||
f"API error in {request.path}: {str(e)}",
|
||||
exc_info=True,
|
||||
)
|
||||
return self._error_response("An internal server error occurred", status=500)
|
||||
@@ -412,7 +406,8 @@ class MapLocationDetailView(MapAPIView):
|
||||
if location_type not in valid_types:
|
||||
return self._error_response(
|
||||
f"Invalid location type: {location_type}. Valid types: {
|
||||
', '.join(valid_types)}",
|
||||
', '.join(valid_types)
|
||||
}",
|
||||
400,
|
||||
error_code="INVALID_LOCATION_TYPE",
|
||||
)
|
||||
@@ -450,8 +445,7 @@ class MapLocationDetailView(MapAPIView):
|
||||
return self._error_response(str(e), 400, error_code="INVALID_PARAMETER")
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Error in MapLocationDetailView: {
|
||||
str(e)}",
|
||||
f"Error in MapLocationDetailView: {str(e)}",
|
||||
exc_info=True,
|
||||
)
|
||||
return self._error_response(
|
||||
@@ -606,8 +600,7 @@ class MapBoundsView(MapAPIView):
|
||||
return self._error_response(str(e), 400)
|
||||
except Exception as e:
|
||||
return self._error_response(
|
||||
f"Internal server error: {
|
||||
str(e)}",
|
||||
f"Internal server error: {str(e)}",
|
||||
500,
|
||||
)
|
||||
|
||||
@@ -628,8 +621,7 @@ class MapStatsView(MapAPIView):
|
||||
|
||||
except Exception as e:
|
||||
return self._error_response(
|
||||
f"Internal server error: {
|
||||
str(e)}",
|
||||
f"Internal server error: {str(e)}",
|
||||
500,
|
||||
)
|
||||
|
||||
@@ -657,8 +649,7 @@ class MapCacheView(MapAPIView):
|
||||
|
||||
except Exception as e:
|
||||
return self._error_response(
|
||||
f"Internal server error: {
|
||||
str(e)}",
|
||||
f"Internal server error: {str(e)}",
|
||||
500,
|
||||
)
|
||||
|
||||
@@ -693,7 +684,6 @@ class MapCacheView(MapAPIView):
|
||||
return self._error_response(f"Invalid request data: {str(e)}", 400)
|
||||
except Exception as e:
|
||||
return self._error_response(
|
||||
f"Internal server error: {
|
||||
str(e)}",
|
||||
f"Internal server error: {str(e)}",
|
||||
500,
|
||||
)
|
||||
|
||||
@@ -143,8 +143,7 @@ class NearbyLocationsView(MapViewMixin, TemplateView):
|
||||
|
||||
context.update(
|
||||
{
|
||||
"page_title": f"Locations Near {
|
||||
center_lat:.4f}, {
|
||||
"page_title": f"Locations Near {center_lat:.4f}, {
|
||||
center_lng:.4f}",
|
||||
"map_type": "nearby",
|
||||
"center_coordinates": {
|
||||
|
||||
Reference in New Issue
Block a user