feat: Implement email change cancellation, location search, and admin anomaly detection endpoints.

This commit is contained in:
pacnpal
2026-01-05 14:31:04 -05:00
parent a801813dcf
commit 2b7bb4dfaa
13 changed files with 2074 additions and 22 deletions

View File

@@ -33,4 +33,5 @@ urlpatterns = [
# Location detection and enrichment
path("detect-location/", views.DetectLocationView.as_view(), name="detect_location"),
path("enrich-location/", views.EnrichLocationView.as_view(), name="enrich_location"),
path("search-location/", views.SearchLocationView.as_view(), name="search_location"),
]

View File

@@ -1241,3 +1241,388 @@ class EnrichLocationView(APIView):
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
)
@extend_schema_view(
post=extend_schema(
summary="Search for a location by text",
description="Forward geocoding - convert a text query (address, city name, etc.) to coordinates.",
request={
"application/json": {
"type": "object",
"properties": {
"query": {
"type": "string",
"description": "Location search query (address, city, place name, etc.)",
},
"limit": {
"type": "integer",
"description": "Maximum number of results to return (default: 5)",
},
"country": {
"type": "string",
"description": "ISO 3166-1 alpha-2 country code to restrict search",
},
},
"required": ["query"],
}
},
responses={
200: {
"type": "object",
"properties": {
"results": {
"type": "array",
"items": {
"type": "object",
"properties": {
"latitude": {"type": "number"},
"longitude": {"type": "number"},
"formatted_address": {"type": "string"},
"city": {"type": "string"},
"state": {"type": "string"},
"country": {"type": "string"},
"importance": {"type": "number"},
},
},
},
"query": {"type": "string"},
"count": {"type": "integer"},
},
},
400: {"description": "Missing or invalid query parameter"},
},
tags=["Maps"],
),
)
class SearchLocationView(APIView):
"""
POST /maps/search-location/
Forward geocoding - search for locations by text query.
Full parity with Supabase Edge Function: search-location
Features:
- Query caching with SHA-256 hash (7-day expiration)
- Rate limiting (30 requests per minute per IP)
- Usage logging for monitoring
- Cache headers (X-Cache: HIT/MISS)
"""
permission_classes = [AllowAny]
# Rate limit settings matching original
RATE_LIMIT_REQUESTS = 30
RATE_LIMIT_PERIOD = 60 # 1 minute
CACHE_EXPIRATION = 7 * 24 * 60 * 60 # 7 days in seconds
def _hash_query(self, query: str) -> str:
"""Hash query for cache lookup (matching original SHA-256)."""
import hashlib
normalized = query.strip().lower()
return hashlib.sha256(normalized.encode()).hexdigest()
def _get_client_ip(self, request) -> str:
"""Get client IP from request headers."""
x_forwarded_for = request.META.get('HTTP_X_FORWARDED_FOR')
if x_forwarded_for:
return x_forwarded_for.split(',')[0].strip()
return request.META.get('HTTP_X_REAL_IP') or request.META.get('REMOTE_ADDR') or 'unknown'
def _check_rate_limit(self, client_ip: str) -> tuple[bool, int]:
"""
Check if client is rate limited.
Returns (is_allowed, current_count).
"""
from django.core.cache import cache
rate_limit_key = f"search_location:rate:{client_ip}"
current_count = cache.get(rate_limit_key, 0)
if current_count >= self.RATE_LIMIT_REQUESTS:
return False, current_count
# Increment counter with TTL
cache.set(rate_limit_key, current_count + 1, self.RATE_LIMIT_PERIOD)
return True, current_count + 1
def _get_cached_result(self, query_hash: str):
"""Get cached result if available."""
from django.core.cache import cache
cache_key = f"search_location:query:{query_hash}"
cached_data = cache.get(cache_key)
if cached_data:
# Update access count in a separate key
access_key = f"search_location:access:{query_hash}"
access_count = cache.get(access_key, 0)
cache.set(access_key, access_count + 1, self.CACHE_EXPIRATION)
return cached_data
def _set_cached_result(self, query: str, query_hash: str, results: list):
"""Cache the results."""
from django.core.cache import cache
cache_key = f"search_location:query:{query_hash}"
cache_data = {
"query": query,
"results": results,
"result_count": len(results),
}
cache.set(cache_key, cache_data, self.CACHE_EXPIRATION)
# Initialize access count
access_key = f"search_location:access:{query_hash}"
cache.set(access_key, 1, self.CACHE_EXPIRATION)
def _log_usage(self, query: str, cache_hit: bool, api_called: bool,
response_time_ms: int = None, result_count: int = None,
client_ip: str = None, user_id: str = None,
error: str = None, status_code: int = None):
"""Log API usage for monitoring."""
# Log to structured logger for now (can be enhanced to write to DB)
logger.info(
"OpenStreetMap API usage",
extra={
"query": query[:100],
"cache_hit": cache_hit,
"api_called": api_called,
"response_time_ms": response_time_ms,
"result_count": result_count,
"client_ip": client_ip,
"user_id": user_id,
"error": error,
"status_code": status_code,
}
)
def post(self, request):
import time
import re
start_time = time.time()
client_ip = self._get_client_ip(request)
user_id = None
try:
# Safely get user ID
if request.user and request.user.is_authenticated:
user_id = str(getattr(request.user, 'user_id', request.user.id))
except Exception:
pass
try:
# ================================================================
# STEP 0: Sanitize and validate input
# ================================================================
raw_query = request.data.get("query", "")
if not isinstance(raw_query, str):
raw_query = str(raw_query) if raw_query else ""
# Sanitize query: strip, limit length, remove control characters
query = raw_query.strip()[:500]
query = re.sub(r'[\x00-\x1f\x7f-\x9f]', '', query)
# Validate limit
try:
limit = min(int(request.data.get("limit", 5)), 10)
limit = max(limit, 1) # At least 1
except (ValueError, TypeError):
limit = 5
# Sanitize country code (2-letter ISO code)
raw_country = request.data.get("country", "")
country_code = ""
if raw_country and isinstance(raw_country, str):
country_code = re.sub(r'[^a-zA-Z]', '', raw_country)[:2].lower()
# ================================================================
# STEP 1: Validate query (original: min 3 characters)
# ================================================================
if not query:
response_time = int((time.time() - start_time) * 1000)
self._log_usage(
query="",
cache_hit=False,
api_called=False,
response_time_ms=response_time,
client_ip=client_ip,
user_id=user_id,
error="Query is required",
status_code=400
)
return Response(
{"error": "Query is required"},
status=status.HTTP_400_BAD_REQUEST,
)
if len(query) < 3: # Match original: min 3 characters
response_time = int((time.time() - start_time) * 1000)
self._log_usage(
query=query,
cache_hit=False,
api_called=False,
response_time_ms=response_time,
client_ip=client_ip,
user_id=user_id,
error="Query must be at least 3 characters",
status_code=400
)
return Response(
{"error": "Query must be at least 3 characters"},
status=status.HTTP_400_BAD_REQUEST,
)
# ================================================================
# STEP 2: Check rate limit (30 req/min per IP)
# ================================================================
is_allowed, current_count = self._check_rate_limit(client_ip)
if not is_allowed:
response_time = int((time.time() - start_time) * 1000)
self._log_usage(
query=query,
cache_hit=False,
api_called=False,
response_time_ms=response_time,
client_ip=client_ip,
user_id=user_id,
error="Rate limit exceeded",
status_code=429
)
return Response(
{"error": "Rate limit exceeded. Please try again later."},
status=status.HTTP_429_TOO_MANY_REQUESTS,
headers={
"Retry-After": str(self.RATE_LIMIT_PERIOD),
"X-RateLimit-Limit": str(self.RATE_LIMIT_REQUESTS),
"X-RateLimit-Remaining": "0",
}
)
# ================================================================
# STEP 3: Check cache
# ================================================================
query_hash = self._hash_query(query)
cached = self._get_cached_result(query_hash)
if cached:
response_time = int((time.time() - start_time) * 1000)
results = cached.get("results", [])
self._log_usage(
query=query,
cache_hit=True,
api_called=False,
response_time_ms=response_time,
result_count=len(results),
client_ip=client_ip,
user_id=user_id,
status_code=200
)
# Return raw array like original (frontend handles both formats)
response = Response(
results,
status=status.HTTP_200_OK,
)
response["X-Cache"] = "HIT"
response["Cache-Control"] = "public, max-age=3600"
return response
# ================================================================
# STEP 4: Cache miss - call Nominatim API
# ================================================================
import httpx
try:
params = {
"q": query,
"format": "json",
"addressdetails": 1,
"limit": limit,
}
if country_code:
params["countrycodes"] = country_code.lower()
api_response = httpx.get(
"https://nominatim.openstreetmap.org/search",
params=params,
timeout=10.0,
headers={"User-Agent": "ThrillWiki/1.0 (https://thrillwiki.com)"},
)
if api_response.status_code != 200:
logger.warning(
f"Nominatim API error: {api_response.status_code}",
extra={"status": api_response.status_code}
)
return Response(
{"error": "Location search failed", "status": api_response.status_code},
status=api_response.status_code,
)
data = api_response.json()
response_time = int((time.time() - start_time) * 1000)
# ================================================================
# STEP 5: Cache the results (background-like, but sync in Django)
# ================================================================
try:
self._set_cached_result(query, query_hash, data)
except Exception as cache_error:
logger.warning(f"Failed to cache result: {cache_error}")
# Log usage
self._log_usage(
query=query,
cache_hit=False,
api_called=True,
response_time_ms=response_time,
result_count=len(data) if isinstance(data, list) else 0,
client_ip=client_ip,
user_id=user_id,
status_code=200
)
# Return raw array like original Nominatim response
response = Response(
data,
status=status.HTTP_200_OK,
)
response["X-Cache"] = "MISS"
response["Cache-Control"] = "public, max-age=3600"
return response
except httpx.HTTPError as e:
logger.warning(f"Forward geocoding failed: {e}")
response_time = int((time.time() - start_time) * 1000)
self._log_usage(
query=query,
cache_hit=False,
api_called=True,
response_time_ms=response_time,
client_ip=client_ip,
user_id=user_id,
error=str(e),
status_code=500
)
return Response(
{"error": "Failed to fetch location data"},
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
)
except ValueError as e:
return Response(
{"error": f"Invalid parameter: {str(e)}"},
status=status.HTTP_400_BAD_REQUEST,
)
except Exception as e:
capture_and_log(e, "Search location", source="api")
return Response(
{"error": str(e)},
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
)