mirror of
https://github.com/pacnpal/thrillwiki_django_no_react.git
synced 2025-12-22 07:11:08 -05:00
Add OWASP compliance mapping and security test case templates, and document version control implementation phases
This commit is contained in:
195
history_tracking/batch.py
Normal file
195
history_tracking/batch.py
Normal file
@@ -0,0 +1,195 @@
|
||||
from django.db import transaction
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from django.utils import timezone
|
||||
from typing import List, Dict, Any, Optional
|
||||
from concurrent.futures import ThreadPoolExecutor
|
||||
import logging
|
||||
|
||||
from .models import VersionBranch, ChangeSet
|
||||
from .caching import VersionHistoryCache
|
||||
from .signals import get_current_branch
|
||||
|
||||
logger = logging.getLogger('version_control')
|
||||
|
||||
class BatchOperation:
|
||||
"""
|
||||
Handles batch operations for version control system.
|
||||
Provides efficient handling of multiple changes and updates.
|
||||
"""
|
||||
|
||||
def __init__(self, max_workers: int = 4):
|
||||
self.max_workers = max_workers
|
||||
self.changes: List[Dict[str, Any]] = []
|
||||
self.error_handler = self.default_error_handler
|
||||
|
||||
def default_error_handler(self, error: Exception, item: Dict[str, Any]) -> None:
|
||||
"""Default error handling for batch operations"""
|
||||
logger.error(f"Batch operation error: {error}, item: {item}")
|
||||
raise error
|
||||
|
||||
def set_error_handler(self, handler) -> None:
|
||||
"""Set custom error handler for batch operations"""
|
||||
self.error_handler = handler
|
||||
|
||||
def add_change(self, obj: Any, data: Dict[str, Any], branch: Optional[VersionBranch] = None) -> None:
|
||||
"""Add a change to the batch"""
|
||||
content_type = ContentType.objects.get_for_model(obj)
|
||||
self.changes.append({
|
||||
'content_type': content_type,
|
||||
'object_id': obj.pk,
|
||||
'data': data,
|
||||
'branch': branch or get_current_branch()
|
||||
})
|
||||
|
||||
@transaction.atomic
|
||||
def process_change(self, change: Dict[str, Any]) -> ChangeSet:
|
||||
"""Process a single change in the batch"""
|
||||
try:
|
||||
changeset = ChangeSet.objects.create(
|
||||
branch=change['branch'],
|
||||
content_type=change['content_type'],
|
||||
object_id=change['object_id'],
|
||||
data=change['data'],
|
||||
status='pending'
|
||||
)
|
||||
|
||||
# Apply the change
|
||||
changeset.apply()
|
||||
|
||||
# Cache the result
|
||||
VersionHistoryCache.cache_change(changeset.to_dict())
|
||||
|
||||
return changeset
|
||||
except Exception as e:
|
||||
self.error_handler(e, change)
|
||||
raise
|
||||
|
||||
def process_parallel(self) -> List[ChangeSet]:
|
||||
"""Process changes in parallel using thread pool"""
|
||||
results = []
|
||||
with ThreadPoolExecutor(max_workers=self.max_workers) as executor:
|
||||
future_to_change = {
|
||||
executor.submit(self.process_change, change): change
|
||||
for change in self.changes
|
||||
}
|
||||
|
||||
for future in future_to_change:
|
||||
try:
|
||||
changeset = future.result()
|
||||
results.append(changeset)
|
||||
except Exception as e:
|
||||
change = future_to_change[future]
|
||||
self.error_handler(e, change)
|
||||
|
||||
return results
|
||||
|
||||
@transaction.atomic
|
||||
def process_sequential(self) -> List[ChangeSet]:
|
||||
"""Process changes sequentially in a single transaction"""
|
||||
results = []
|
||||
for change in self.changes:
|
||||
try:
|
||||
changeset = self.process_change(change)
|
||||
results.append(changeset)
|
||||
except Exception as e:
|
||||
self.error_handler(e, change)
|
||||
|
||||
return results
|
||||
|
||||
def commit(self, parallel: bool = False) -> List[ChangeSet]:
|
||||
"""Commit all changes in the batch"""
|
||||
if not self.changes:
|
||||
return []
|
||||
|
||||
start_time = timezone.now()
|
||||
logger.info(f"Starting batch operation with {len(self.changes)} changes")
|
||||
|
||||
try:
|
||||
results = self.process_parallel() if parallel else self.process_sequential()
|
||||
|
||||
duration = (timezone.now() - start_time).total_seconds()
|
||||
logger.info(
|
||||
f"Batch operation completed: {len(results)} changes processed in {duration:.2f}s"
|
||||
)
|
||||
|
||||
return results
|
||||
finally:
|
||||
self.changes = [] # Clear the batch
|
||||
|
||||
class BulkVersionControl:
|
||||
"""
|
||||
Handles bulk version control operations for collections of objects.
|
||||
"""
|
||||
|
||||
def __init__(self, model_class, branch: Optional[VersionBranch] = None):
|
||||
self.model_class = model_class
|
||||
self.branch = branch or get_current_branch()
|
||||
self.content_type = ContentType.objects.get_for_model(model_class)
|
||||
self.batch = BatchOperation()
|
||||
|
||||
def prepare_bulk_update(self, objects: List[Any], data: Dict[str, Any]) -> None:
|
||||
"""Prepare bulk update for multiple objects"""
|
||||
for obj in objects:
|
||||
self.batch.add_change(obj, data, self.branch)
|
||||
|
||||
def prepare_bulk_delete(self, objects: List[Any]) -> None:
|
||||
"""Prepare bulk delete for multiple objects"""
|
||||
for obj in objects:
|
||||
self.batch.add_change(obj, {'action': 'delete'}, self.branch)
|
||||
|
||||
def prepare_bulk_create(self, data_list: List[Dict[str, Any]]) -> None:
|
||||
"""Prepare bulk create for multiple objects"""
|
||||
for data in data_list:
|
||||
# Create temporary object for content type
|
||||
temp_obj = self.model_class()
|
||||
self.batch.add_change(temp_obj, {'action': 'create', **data}, self.branch)
|
||||
|
||||
def commit(self, parallel: bool = True) -> List[ChangeSet]:
|
||||
"""Commit all prepared bulk operations"""
|
||||
return self.batch.commit(parallel=parallel)
|
||||
|
||||
class VersionControlQueue:
|
||||
"""
|
||||
Queue system for handling version control operations.
|
||||
Allows for delayed processing and batching of changes.
|
||||
"""
|
||||
|
||||
def __init__(self, batch_size: int = 100, auto_commit: bool = True):
|
||||
self.batch_size = batch_size
|
||||
self.auto_commit = auto_commit
|
||||
self.current_batch = BatchOperation()
|
||||
self._queued_count = 0
|
||||
|
||||
def queue_change(self, obj: Any, data: Dict[str, Any], branch: Optional[VersionBranch] = None) -> None:
|
||||
"""Queue a change for processing"""
|
||||
self.current_batch.add_change(obj, data, branch)
|
||||
self._queued_count += 1
|
||||
|
||||
if self.auto_commit and self._queued_count >= self.batch_size:
|
||||
self.process_queue()
|
||||
|
||||
def process_queue(self, parallel: bool = True) -> List[ChangeSet]:
|
||||
"""Process all queued changes"""
|
||||
if not self._queued_count:
|
||||
return []
|
||||
|
||||
results = self.current_batch.commit(parallel=parallel)
|
||||
self._queued_count = 0
|
||||
return results
|
||||
|
||||
def batch_version_control(func):
|
||||
"""
|
||||
Decorator for batching version control operations within a function.
|
||||
"""
|
||||
def wrapper(*args, **kwargs):
|
||||
batch = BatchOperation()
|
||||
try:
|
||||
with transaction.atomic():
|
||||
result = func(*args, batch=batch, **kwargs)
|
||||
if batch.changes:
|
||||
batch.commit()
|
||||
return result
|
||||
except Exception as e:
|
||||
logger.error(f"Batch operation failed: {e}")
|
||||
raise
|
||||
return wrapper
|
||||
223
history_tracking/caching.py
Normal file
223
history_tracking/caching.py
Normal file
@@ -0,0 +1,223 @@
|
||||
from django.core.cache import cache
|
||||
from django.conf import settings
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from typing import Optional, List, Dict, Any
|
||||
import hashlib
|
||||
import json
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger('version_control')
|
||||
|
||||
class VersionHistoryCache:
|
||||
"""
|
||||
Caching system for version control history data.
|
||||
Implements a multi-level caching strategy with memory and persistent storage.
|
||||
"""
|
||||
|
||||
# Cache key prefixes
|
||||
BRANCH_PREFIX = 'vc_branch_'
|
||||
CHANGE_PREFIX = 'vc_change_'
|
||||
HISTORY_PREFIX = 'vc_history_'
|
||||
|
||||
# Cache durations (in seconds)
|
||||
BRANCH_CACHE_DURATION = 3600 # 1 hour
|
||||
CHANGE_CACHE_DURATION = 1800 # 30 minutes
|
||||
HISTORY_CACHE_DURATION = 3600 * 24 # 24 hours
|
||||
|
||||
@classmethod
|
||||
def get_branch_key(cls, branch_id: int) -> str:
|
||||
"""Generate cache key for branch data"""
|
||||
return f"{cls.BRANCH_PREFIX}{branch_id}"
|
||||
|
||||
@classmethod
|
||||
def get_change_key(cls, change_id: int) -> str:
|
||||
"""Generate cache key for change data"""
|
||||
return f"{cls.CHANGE_PREFIX}{change_id}"
|
||||
|
||||
@classmethod
|
||||
def get_history_key(cls, content_type_id: int, object_id: int) -> str:
|
||||
"""Generate cache key for object history"""
|
||||
return f"{cls.HISTORY_PREFIX}{content_type_id}_{object_id}"
|
||||
|
||||
@classmethod
|
||||
def generate_cache_version(cls, data: Dict[str, Any]) -> str:
|
||||
"""Generate version hash for cache invalidation"""
|
||||
data_str = json.dumps(data, sort_keys=True)
|
||||
return hashlib.md5(data_str.encode()).hexdigest()
|
||||
|
||||
@classmethod
|
||||
def cache_branch(cls, branch_data: Dict[str, Any]) -> None:
|
||||
"""Cache branch data with versioning"""
|
||||
key = cls.get_branch_key(branch_data['id'])
|
||||
version = cls.generate_cache_version(branch_data)
|
||||
|
||||
cache_data = {
|
||||
'data': branch_data,
|
||||
'version': version,
|
||||
'timestamp': settings.VERSION_CONTROL_TIMESTAMP
|
||||
}
|
||||
|
||||
try:
|
||||
cache.set(key, cache_data, cls.BRANCH_CACHE_DURATION)
|
||||
logger.debug(f"Cached branch data: {key}")
|
||||
except Exception as e:
|
||||
logger.error(f"Error caching branch data: {e}")
|
||||
|
||||
@classmethod
|
||||
def get_cached_branch(cls, branch_id: int) -> Optional[Dict[str, Any]]:
|
||||
"""Retrieve cached branch data if valid"""
|
||||
key = cls.get_branch_key(branch_id)
|
||||
cache_data = cache.get(key)
|
||||
|
||||
if cache_data:
|
||||
# Validate cache version and timestamp
|
||||
if (
|
||||
cache_data.get('timestamp') == settings.VERSION_CONTROL_TIMESTAMP and
|
||||
cls.generate_cache_version(cache_data['data']) == cache_data['version']
|
||||
):
|
||||
logger.debug(f"Cache hit for branch: {key}")
|
||||
return cache_data['data']
|
||||
|
||||
# Invalid cache, delete it
|
||||
cache.delete(key)
|
||||
logger.debug(f"Invalidated branch cache: {key}")
|
||||
|
||||
return None
|
||||
|
||||
@classmethod
|
||||
def cache_change(cls, change_data: Dict[str, Any]) -> None:
|
||||
"""Cache change data"""
|
||||
key = cls.get_change_key(change_data['id'])
|
||||
version = cls.generate_cache_version(change_data)
|
||||
|
||||
cache_data = {
|
||||
'data': change_data,
|
||||
'version': version,
|
||||
'timestamp': settings.VERSION_CONTROL_TIMESTAMP
|
||||
}
|
||||
|
||||
try:
|
||||
cache.set(key, cache_data, cls.CHANGE_CACHE_DURATION)
|
||||
logger.debug(f"Cached change data: {key}")
|
||||
except Exception as e:
|
||||
logger.error(f"Error caching change data: {e}")
|
||||
|
||||
@classmethod
|
||||
def get_cached_change(cls, change_id: int) -> Optional[Dict[str, Any]]:
|
||||
"""Retrieve cached change data if valid"""
|
||||
key = cls.get_change_key(change_id)
|
||||
cache_data = cache.get(key)
|
||||
|
||||
if cache_data:
|
||||
if (
|
||||
cache_data.get('timestamp') == settings.VERSION_CONTROL_TIMESTAMP and
|
||||
cls.generate_cache_version(cache_data['data']) == cache_data['version']
|
||||
):
|
||||
logger.debug(f"Cache hit for change: {key}")
|
||||
return cache_data['data']
|
||||
|
||||
cache.delete(key)
|
||||
logger.debug(f"Invalidated change cache: {key}")
|
||||
|
||||
return None
|
||||
|
||||
@classmethod
|
||||
def cache_history(cls, content_type_id: int, object_id: int, history_data: List[Dict[str, Any]]) -> None:
|
||||
"""Cache version history for an object"""
|
||||
key = cls.get_history_key(content_type_id, object_id)
|
||||
version = cls.generate_cache_version({'history': history_data})
|
||||
|
||||
cache_data = {
|
||||
'data': history_data,
|
||||
'version': version,
|
||||
'timestamp': settings.VERSION_CONTROL_TIMESTAMP
|
||||
}
|
||||
|
||||
try:
|
||||
cache.set(key, cache_data, cls.HISTORY_CACHE_DURATION)
|
||||
logger.debug(f"Cached history data: {key}")
|
||||
except Exception as e:
|
||||
logger.error(f"Error caching history data: {e}")
|
||||
|
||||
@classmethod
|
||||
def get_cached_history(cls, content_type_id: int, object_id: int) -> Optional[List[Dict[str, Any]]]:
|
||||
"""Retrieve cached history data if valid"""
|
||||
key = cls.get_history_key(content_type_id, object_id)
|
||||
cache_data = cache.get(key)
|
||||
|
||||
if cache_data:
|
||||
if (
|
||||
cache_data.get('timestamp') == settings.VERSION_CONTROL_TIMESTAMP and
|
||||
cls.generate_cache_version({'history': cache_data['data']}) == cache_data['version']
|
||||
):
|
||||
logger.debug(f"Cache hit for history: {key}")
|
||||
return cache_data['data']
|
||||
|
||||
cache.delete(key)
|
||||
logger.debug(f"Invalidated history cache: {key}")
|
||||
|
||||
return None
|
||||
|
||||
@classmethod
|
||||
def invalidate_branch(cls, branch_id: int) -> None:
|
||||
"""Invalidate branch cache"""
|
||||
key = cls.get_branch_key(branch_id)
|
||||
cache.delete(key)
|
||||
logger.debug(f"Manually invalidated branch cache: {key}")
|
||||
|
||||
@classmethod
|
||||
def invalidate_change(cls, change_id: int) -> None:
|
||||
"""Invalidate change cache"""
|
||||
key = cls.get_change_key(change_id)
|
||||
cache.delete(key)
|
||||
logger.debug(f"Manually invalidated change cache: {key}")
|
||||
|
||||
@classmethod
|
||||
def invalidate_history(cls, content_type_id: int, object_id: int) -> None:
|
||||
"""Invalidate history cache"""
|
||||
key = cls.get_history_key(content_type_id, object_id)
|
||||
cache.delete(key)
|
||||
logger.debug(f"Manually invalidated history cache: {key}")
|
||||
|
||||
@classmethod
|
||||
def invalidate_all(cls) -> None:
|
||||
"""Invalidate all version control caches"""
|
||||
try:
|
||||
# Get all keys with our prefixes
|
||||
keys = []
|
||||
for prefix in [cls.BRANCH_PREFIX, cls.CHANGE_PREFIX, cls.HISTORY_PREFIX]:
|
||||
keys.extend(cache.keys(f"{prefix}*"))
|
||||
|
||||
# Delete all matching keys
|
||||
cache.delete_many(keys)
|
||||
logger.info(f"Invalidated {len(keys)} version control cache entries")
|
||||
except Exception as e:
|
||||
logger.error(f"Error invalidating all caches: {e}")
|
||||
|
||||
class CacheableVersionMixin:
|
||||
"""Mixin to add caching capabilities to version control models"""
|
||||
|
||||
def cache_data(self) -> None:
|
||||
"""Cache the object's data"""
|
||||
if hasattr(self, 'to_dict'):
|
||||
data = self.to_dict()
|
||||
|
||||
if hasattr(self, 'branch_id'):
|
||||
VersionHistoryCache.cache_branch(data)
|
||||
elif hasattr(self, 'change_id'):
|
||||
VersionHistoryCache.cache_change(data)
|
||||
|
||||
def invalidate_cache(self) -> None:
|
||||
"""Invalidate the object's cache"""
|
||||
if hasattr(self, 'branch_id'):
|
||||
VersionHistoryCache.invalidate_branch(self.branch_id)
|
||||
elif hasattr(self, 'change_id'):
|
||||
VersionHistoryCache.invalidate_change(self.change_id)
|
||||
|
||||
def invalidate_related_caches(self) -> None:
|
||||
"""Invalidate related object caches"""
|
||||
if hasattr(self, 'content_type_id') and hasattr(self, 'object_id'):
|
||||
VersionHistoryCache.invalidate_history(
|
||||
self.content_type_id,
|
||||
self.object_id
|
||||
)
|
||||
248
history_tracking/cleanup.py
Normal file
248
history_tracking/cleanup.py
Normal file
@@ -0,0 +1,248 @@
|
||||
from django.db import transaction
|
||||
from django.utils import timezone
|
||||
from django.conf import settings
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from typing import List, Dict, Any, Optional
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
import json
|
||||
import os
|
||||
|
||||
from .models import VersionBranch, ChangeSet
|
||||
from .caching import VersionHistoryCache
|
||||
|
||||
logger = logging.getLogger('version_control')
|
||||
|
||||
|
||||
class VersionCleanup:
|
||||
"""
|
||||
Manages cleanup of old version control data through archival and deletion.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self.archive_path = getattr(
|
||||
settings,
|
||||
'VERSION_CONTROL_ARCHIVE_PATH',
|
||||
'version_archives'
|
||||
)
|
||||
self.retention_days = getattr(
|
||||
settings,
|
||||
'VERSION_CONTROL_RETENTION_DAYS',
|
||||
90
|
||||
)
|
||||
self.merged_retention_days = getattr(
|
||||
settings,
|
||||
'VERSION_CONTROL_MERGED_RETENTION_DAYS',
|
||||
30
|
||||
)
|
||||
self.ensure_archive_directory()
|
||||
|
||||
def ensure_archive_directory(self) -> None:
|
||||
"""Ensure archive directory exists"""
|
||||
if not os.path.exists(self.archive_path):
|
||||
os.makedirs(self.archive_path)
|
||||
|
||||
def get_archive_filename(self, date: timezone.datetime) -> str:
|
||||
"""Generate archive filename for a given date"""
|
||||
return os.path.join(
|
||||
self.archive_path,
|
||||
f'version_archive_{date.strftime("%Y%m%d_%H%M%S")}.json'
|
||||
)
|
||||
|
||||
@transaction.atomic
|
||||
def archive_old_changes(self, batch_size: int = 1000) -> int:
|
||||
"""Archive and clean up old changes"""
|
||||
cutoff_date = timezone.now() - timedelta(days=self.retention_days)
|
||||
|
||||
# Get changes to archive
|
||||
old_changes = ChangeSet.objects.filter(
|
||||
created_at__lt=cutoff_date,
|
||||
archived=False
|
||||
)[:batch_size]
|
||||
|
||||
if not old_changes:
|
||||
return 0
|
||||
|
||||
# Prepare archive data
|
||||
archive_data = {
|
||||
'timestamp': timezone.now().isoformat(),
|
||||
'changes': [
|
||||
{
|
||||
'id': change.id,
|
||||
'branch': change.branch_id,
|
||||
'content_type': change.content_type_id,
|
||||
'object_id': change.object_id,
|
||||
'data': change.data,
|
||||
'status': change.status,
|
||||
'created_at': change.created_at.isoformat(),
|
||||
'applied_at': change.applied_at.isoformat() if change.applied_at else None
|
||||
}
|
||||
for change in old_changes
|
||||
]
|
||||
}
|
||||
|
||||
# Write to archive file
|
||||
archive_file = self.get_archive_filename(timezone.now())
|
||||
with open(archive_file, 'w') as f:
|
||||
json.dump(archive_data, f, indent=2)
|
||||
|
||||
# Mark changes as archived
|
||||
change_ids = [change.id for change in old_changes]
|
||||
ChangeSet.objects.filter(id__in=change_ids).update(archived=True)
|
||||
|
||||
logger.info(f"Archived {len(change_ids)} changes to {archive_file}")
|
||||
return len(change_ids)
|
||||
|
||||
@transaction.atomic
|
||||
def cleanup_merged_branches(self) -> int:
|
||||
"""Clean up old merged branches"""
|
||||
cutoff_date = timezone.now() - timedelta(days=self.merged_retention_days)
|
||||
|
||||
# Find merged branches to clean up
|
||||
merged_branches = VersionBranch.objects.filter(
|
||||
is_merged=True,
|
||||
merged_at__lt=cutoff_date,
|
||||
is_protected=False
|
||||
)
|
||||
|
||||
count = 0
|
||||
for branch in merged_branches:
|
||||
try:
|
||||
# Archive branch changes
|
||||
self.archive_branch_changes(branch)
|
||||
|
||||
# Delete branch
|
||||
branch.delete()
|
||||
count += 1
|
||||
|
||||
logger.info(f"Cleaned up merged branch: {branch.name}")
|
||||
except Exception as e:
|
||||
logger.error(f"Error cleaning up branch {branch.name}: {e}")
|
||||
|
||||
return count
|
||||
|
||||
def archive_branch_changes(self, branch: VersionBranch) -> None:
|
||||
"""Archive all changes for a specific branch"""
|
||||
changes = ChangeSet.objects.filter(
|
||||
branch=branch,
|
||||
archived=False
|
||||
)
|
||||
|
||||
if not changes:
|
||||
return
|
||||
|
||||
archive_data = {
|
||||
'timestamp': timezone.now().isoformat(),
|
||||
'branch': {
|
||||
'id': branch.id,
|
||||
'name': branch.name,
|
||||
'metadata': branch.metadata,
|
||||
'created_at': branch.created_at.isoformat(),
|
||||
'merged_at': branch.merged_at.isoformat() if branch.merged_at else None
|
||||
},
|
||||
'changes': [
|
||||
{
|
||||
'id': change.id,
|
||||
'content_type': change.content_type_id,
|
||||
'object_id': change.object_id,
|
||||
'data': change.data,
|
||||
'status': change.status,
|
||||
'created_at': change.created_at.isoformat(),
|
||||
'applied_at': change.applied_at.isoformat() if change.applied_at else None
|
||||
}
|
||||
for change in changes
|
||||
]
|
||||
}
|
||||
|
||||
# Write to archive file
|
||||
archive_file = self.get_archive_filename(timezone.now())
|
||||
with open(archive_file, 'w') as f:
|
||||
json.dump(archive_data, f, indent=2)
|
||||
|
||||
# Mark changes as archived
|
||||
changes.update(archived=True)
|
||||
|
||||
@transaction.atomic
|
||||
def cleanup_inactive_branches(self, days: int = 60) -> int:
|
||||
"""Clean up inactive branches"""
|
||||
cutoff_date = timezone.now() - timedelta(days=days)
|
||||
|
||||
# Find inactive branches
|
||||
inactive_branches = VersionBranch.objects.filter(
|
||||
is_active=True,
|
||||
is_protected=False,
|
||||
updated_at__lt=cutoff_date
|
||||
)
|
||||
|
||||
count = 0
|
||||
for branch in inactive_branches:
|
||||
try:
|
||||
# Archive branch changes
|
||||
self.archive_branch_changes(branch)
|
||||
|
||||
# Deactivate branch
|
||||
branch.is_active = False
|
||||
branch.save()
|
||||
count += 1
|
||||
|
||||
logger.info(f"Deactivated inactive branch: {branch.name}")
|
||||
except Exception as e:
|
||||
logger.error(f"Error deactivating branch {branch.name}: {e}")
|
||||
|
||||
return count
|
||||
|
||||
def cleanup_orphaned_changes(self) -> int:
|
||||
"""Clean up changes without valid content objects"""
|
||||
count = 0
|
||||
for change in ChangeSet.objects.filter(archived=False):
|
||||
try:
|
||||
# Try to get the related object
|
||||
obj = change.content_type.get_object_for_this_type(
|
||||
pk=change.object_id)
|
||||
if obj is None:
|
||||
self.archive_change(change)
|
||||
count += 1
|
||||
except Exception:
|
||||
# If object doesn't exist, archive the change
|
||||
self.archive_change(change)
|
||||
count += 1
|
||||
|
||||
logger.info(f"Cleaned up {count} orphaned changes")
|
||||
return count
|
||||
|
||||
def archive_change(self, change: ChangeSet) -> None:
|
||||
"""Archive a single change"""
|
||||
archive_data = {
|
||||
'timestamp': timezone.now().isoformat(),
|
||||
'changes': [{
|
||||
'id': change.id,
|
||||
'branch': change.branch_id,
|
||||
'content_type': change.content_type_id,
|
||||
'object_id': change.object_id,
|
||||
'data': change.data,
|
||||
'status': change.status,
|
||||
'created_at': change.created_at.isoformat(),
|
||||
'applied_at': change.applied_at.isoformat() if change.applied_at else None
|
||||
}]
|
||||
}
|
||||
|
||||
# Write to archive file
|
||||
archive_file = self.get_archive_filename(timezone.now())
|
||||
with open(archive_file, 'w') as f:
|
||||
json.dump(archive_data, f, indent=2)
|
||||
|
||||
# Mark change as archived
|
||||
change.archived = True
|
||||
change.save()
|
||||
|
||||
def run_maintenance(self) -> Dict[str, int]:
|
||||
"""Run all cleanup operations"""
|
||||
results = {
|
||||
'archived_changes': self.archive_old_changes(),
|
||||
'cleaned_branches': self.cleanup_merged_branches(),
|
||||
'deactivated_branches': self.cleanup_inactive_branches(),
|
||||
'cleaned_orphans': self.cleanup_orphaned_changes()
|
||||
}
|
||||
|
||||
logger.info("Version control maintenance completed", extra=results)
|
||||
return results
|
||||
202
history_tracking/monitoring.py
Normal file
202
history_tracking/monitoring.py
Normal file
@@ -0,0 +1,202 @@
|
||||
import logging
|
||||
import time
|
||||
from functools import wraps
|
||||
from django.conf import settings
|
||||
from django.db import connection
|
||||
|
||||
# Configure logger
|
||||
logger = logging.getLogger('version_control')
|
||||
|
||||
def track_operation_timing(operation_name):
|
||||
"""Decorator to track timing of version control operations"""
|
||||
def decorator(func):
|
||||
@wraps(func)
|
||||
def wrapper(*args, **kwargs):
|
||||
start_time = time.time()
|
||||
try:
|
||||
result = func(*args, **kwargs)
|
||||
duration = time.time() - start_time
|
||||
|
||||
# Log timing metrics
|
||||
logger.info(
|
||||
'Version Control Operation Timing',
|
||||
extra={
|
||||
'operation': operation_name,
|
||||
'duration': duration,
|
||||
'success': True
|
||||
}
|
||||
)
|
||||
return result
|
||||
except Exception as e:
|
||||
duration = time.time() - start_time
|
||||
logger.error(
|
||||
'Version Control Operation Failed',
|
||||
extra={
|
||||
'operation': operation_name,
|
||||
'duration': duration,
|
||||
'error': str(e),
|
||||
'success': False
|
||||
}
|
||||
)
|
||||
raise
|
||||
return wrapper
|
||||
return decorator
|
||||
|
||||
def track_merge_result(source_branch, target_branch, success, conflict_count=0):
|
||||
"""Track the results of merge operations"""
|
||||
logger.info(
|
||||
'Branch Merge Operation',
|
||||
extra={
|
||||
'source_branch': source_branch.name,
|
||||
'target_branch': target_branch.name,
|
||||
'success': success,
|
||||
'conflict_count': conflict_count
|
||||
}
|
||||
)
|
||||
|
||||
def track_branch_metrics(branch):
|
||||
"""Track metrics for a specific branch"""
|
||||
from history_tracking.models import ChangeSet
|
||||
|
||||
changes = ChangeSet.objects.filter(branch=branch)
|
||||
applied_changes = changes.filter(status='applied')
|
||||
pending_changes = changes.filter(status='pending')
|
||||
|
||||
logger.info(
|
||||
'Branch Metrics',
|
||||
extra={
|
||||
'branch_name': branch.name,
|
||||
'total_changes': changes.count(),
|
||||
'applied_changes': applied_changes.count(),
|
||||
'pending_changes': pending_changes.count(),
|
||||
'is_active': branch.is_active
|
||||
}
|
||||
)
|
||||
|
||||
def track_database_metrics():
|
||||
"""Track database metrics for version control operations"""
|
||||
with connection.execute_wrapper(StatementLogger()):
|
||||
yield
|
||||
|
||||
class StatementLogger:
|
||||
"""Log database statements for monitoring"""
|
||||
def __call__(self, execute, sql, params, many, context):
|
||||
start = time.time()
|
||||
try:
|
||||
result = execute(sql, params, many, context)
|
||||
duration = time.time() - start
|
||||
|
||||
# Log only version control related queries
|
||||
if 'version' in sql.lower() or 'changeset' in sql.lower():
|
||||
logger.info(
|
||||
'Version Control DB Operation',
|
||||
extra={
|
||||
'sql': sql,
|
||||
'duration': duration,
|
||||
'success': True
|
||||
}
|
||||
)
|
||||
return result
|
||||
except Exception as e:
|
||||
duration = time.time() - start
|
||||
logger.error(
|
||||
'Version Control DB Operation Failed',
|
||||
extra={
|
||||
'sql': sql,
|
||||
'duration': duration,
|
||||
'error': str(e),
|
||||
'success': False
|
||||
}
|
||||
)
|
||||
raise
|
||||
|
||||
class VersionControlMetrics:
|
||||
"""Collect and report version control system metrics"""
|
||||
|
||||
@staticmethod
|
||||
def collect_system_metrics():
|
||||
"""Collect overall system metrics"""
|
||||
from history_tracking.models import VersionBranch, ChangeSet
|
||||
|
||||
total_branches = VersionBranch.objects.count()
|
||||
active_branches = VersionBranch.objects.filter(is_active=True).count()
|
||||
total_changes = ChangeSet.objects.count()
|
||||
pending_changes = ChangeSet.objects.filter(status='pending').count()
|
||||
conflicted_merges = ChangeSet.objects.filter(
|
||||
status='conflict'
|
||||
).count()
|
||||
|
||||
logger.info(
|
||||
'Version Control System Metrics',
|
||||
extra={
|
||||
'total_branches': total_branches,
|
||||
'active_branches': active_branches,
|
||||
'total_changes': total_changes,
|
||||
'pending_changes': pending_changes,
|
||||
'conflicted_merges': conflicted_merges
|
||||
}
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def collect_performance_metrics():
|
||||
"""Collect performance-related metrics"""
|
||||
from django.db import connection
|
||||
from django.core.cache import cache
|
||||
|
||||
# Database metrics
|
||||
with connection.execute_wrapper(StatementLogger()):
|
||||
db_metrics = {
|
||||
'total_queries': len(connection.queries),
|
||||
'total_time': sum(
|
||||
float(q['time']) for q in connection.queries
|
||||
)
|
||||
}
|
||||
|
||||
# Cache metrics
|
||||
cache_metrics = {
|
||||
'hits': cache.get('version_control_cache_hits', 0),
|
||||
'misses': cache.get('version_control_cache_misses', 0)
|
||||
}
|
||||
|
||||
logger.info(
|
||||
'Version Control Performance Metrics',
|
||||
extra={
|
||||
'database': db_metrics,
|
||||
'cache': cache_metrics
|
||||
}
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def track_user_operations(user, operation, success):
|
||||
"""Track user operations on version control"""
|
||||
logger.info(
|
||||
'Version Control User Operation',
|
||||
extra={
|
||||
'user_id': user.id,
|
||||
'username': user.username,
|
||||
'operation': operation,
|
||||
'success': success
|
||||
}
|
||||
)
|
||||
|
||||
def setup_monitoring():
|
||||
"""Configure monitoring for version control system"""
|
||||
if not settings.DEBUG:
|
||||
# Configure logging handlers
|
||||
handler = logging.handlers.RotatingFileHandler(
|
||||
'logs/version_control.log',
|
||||
maxBytes=10485760, # 10MB
|
||||
backupCount=5
|
||||
)
|
||||
handler.setFormatter(logging.Formatter(
|
||||
'%(asctime)s [%(levelname)s] %(message)s'
|
||||
))
|
||||
logger.addHandler(handler)
|
||||
|
||||
# Set up error reporting
|
||||
import sentry_sdk # type: ignore
|
||||
sentry_sdk.init(
|
||||
dsn=settings.SENTRY_DSN,
|
||||
traces_sample_rate=0.1,
|
||||
profiles_sample_rate=0.1,
|
||||
)
|
||||
@@ -0,0 +1,172 @@
|
||||
{% extends "base.html" %}
|
||||
{% load static %}
|
||||
|
||||
{% block title %}Version Control Monitoring - ThrillWiki{% endblock %}
|
||||
|
||||
{% block extra_css %}
|
||||
<link rel="stylesheet" href="{% static 'css/monitoring.css' %}">
|
||||
{% endblock %}
|
||||
|
||||
{% block content %}
|
||||
<div class="container mx-auto px-4 py-8">
|
||||
<h1 class="text-3xl font-bold text-gray-900 mb-8">Version Control Monitoring</h1>
|
||||
|
||||
<!-- System Overview -->
|
||||
<div class="grid grid-cols-1 md:grid-cols-4 gap-6 mb-8">
|
||||
<div class="bg-white rounded-lg shadow-sm p-6">
|
||||
<h3 class="text-lg font-semibold mb-2">Total Branches</h3>
|
||||
<p class="text-3xl font-bold text-blue-600">{{ metrics.total_branches }}</p>
|
||||
<p class="text-sm text-gray-500 mt-2">{{ metrics.active_branches }} active</p>
|
||||
</div>
|
||||
|
||||
<div class="bg-white rounded-lg shadow-sm p-6">
|
||||
<h3 class="text-lg font-semibold mb-2">Total Changes</h3>
|
||||
<p class="text-3xl font-bold text-green-600">{{ metrics.total_changes }}</p>
|
||||
<p class="text-sm text-gray-500 mt-2">{{ metrics.pending_changes }} pending</p>
|
||||
</div>
|
||||
|
||||
<div class="bg-white rounded-lg shadow-sm p-6">
|
||||
<h3 class="text-lg font-semibold mb-2">Merge Success Rate</h3>
|
||||
<p class="text-3xl font-bold text-indigo-600">{{ metrics.merge_success_rate }}%</p>
|
||||
<p class="text-sm text-gray-500 mt-2">{{ metrics.conflicted_merges }} conflicts</p>
|
||||
</div>
|
||||
|
||||
<div class="bg-white rounded-lg shadow-sm p-6">
|
||||
<h3 class="text-lg font-semibold mb-2">System Health</h3>
|
||||
<p class="text-3xl font-bold {% if metrics.system_health >= 90 %}text-green-600{% elif metrics.system_health >= 70 %}text-yellow-600{% else %}text-red-600{% endif %}">
|
||||
{{ metrics.system_health }}%
|
||||
</p>
|
||||
<p class="text-sm text-gray-500 mt-2">Based on {{ metrics.health_checks }} checks</p>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Performance Metrics -->
|
||||
<div class="bg-white rounded-lg shadow-sm p-6 mb-8">
|
||||
<h2 class="text-xl font-bold mb-4">Performance Metrics</h2>
|
||||
<div class="grid grid-cols-1 md:grid-cols-3 gap-6">
|
||||
<!-- Operation Timing -->
|
||||
<div>
|
||||
<h3 class="text-lg font-semibold mb-3">Operation Timing (avg)</h3>
|
||||
<ul class="space-y-2">
|
||||
<li class="flex justify-between items-center">
|
||||
<span class="text-gray-600">Branch Creation</span>
|
||||
<span class="font-medium">{{ metrics.timing.branch_creation }}ms</span>
|
||||
</li>
|
||||
<li class="flex justify-between items-center">
|
||||
<span class="text-gray-600">Branch Switch</span>
|
||||
<span class="font-medium">{{ metrics.timing.branch_switch }}ms</span>
|
||||
</li>
|
||||
<li class="flex justify-between items-center">
|
||||
<span class="text-gray-600">Merge Operation</span>
|
||||
<span class="font-medium">{{ metrics.timing.merge }}ms</span>
|
||||
</li>
|
||||
</ul>
|
||||
</div>
|
||||
|
||||
<!-- Database Metrics -->
|
||||
<div>
|
||||
<h3 class="text-lg font-semibold mb-3">Database Performance</h3>
|
||||
<ul class="space-y-2">
|
||||
<li class="flex justify-between items-center">
|
||||
<span class="text-gray-600">Query Count (avg)</span>
|
||||
<span class="font-medium">{{ metrics.database.query_count }}</span>
|
||||
</li>
|
||||
<li class="flex justify-between items-center">
|
||||
<span class="text-gray-600">Query Time (avg)</span>
|
||||
<span class="font-medium">{{ metrics.database.query_time }}ms</span>
|
||||
</li>
|
||||
<li class="flex justify-between items-center">
|
||||
<span class="text-gray-600">Connection Pool</span>
|
||||
<span class="font-medium">{{ metrics.database.pool_size }}/{{ metrics.database.max_pool }}</span>
|
||||
</li>
|
||||
</ul>
|
||||
</div>
|
||||
|
||||
<!-- Cache Metrics -->
|
||||
<div>
|
||||
<h3 class="text-lg font-semibold mb-3">Cache Performance</h3>
|
||||
<ul class="space-y-2">
|
||||
<li class="flex justify-between items-center">
|
||||
<span class="text-gray-600">Hit Rate</span>
|
||||
<span class="font-medium">{{ metrics.cache.hit_rate }}%</span>
|
||||
</li>
|
||||
<li class="flex justify-between items-center">
|
||||
<span class="text-gray-600">Miss Rate</span>
|
||||
<span class="font-medium">{{ metrics.cache.miss_rate }}%</span>
|
||||
</li>
|
||||
<li class="flex justify-between items-center">
|
||||
<span class="text-gray-600">Memory Usage</span>
|
||||
<span class="font-medium">{{ metrics.cache.memory_usage }}MB</span>
|
||||
</li>
|
||||
</ul>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Error Tracking -->
|
||||
<div class="bg-white rounded-lg shadow-sm p-6 mb-8">
|
||||
<h2 class="text-xl font-bold mb-4">Error Tracking</h2>
|
||||
<div class="overflow-x-auto">
|
||||
<table class="min-w-full">
|
||||
<thead>
|
||||
<tr>
|
||||
<th class="px-6 py-3 bg-gray-50 text-left text-xs font-medium text-gray-500 uppercase tracking-wider">Time</th>
|
||||
<th class="px-6 py-3 bg-gray-50 text-left text-xs font-medium text-gray-500 uppercase tracking-wider">Type</th>
|
||||
<th class="px-6 py-3 bg-gray-50 text-left text-xs font-medium text-gray-500 uppercase tracking-wider">Operation</th>
|
||||
<th class="px-6 py-3 bg-gray-50 text-left text-xs font-medium text-gray-500 uppercase tracking-wider">Message</th>
|
||||
<th class="px-6 py-3 bg-gray-50 text-left text-xs font-medium text-gray-500 uppercase tracking-wider">Status</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody class="bg-white divide-y divide-gray-200">
|
||||
{% for error in metrics.errors %}
|
||||
<tr>
|
||||
<td class="px-6 py-4 whitespace-nowrap text-sm text-gray-500">{{ error.timestamp }}</td>
|
||||
<td class="px-6 py-4 whitespace-nowrap text-sm text-gray-900">{{ error.type }}</td>
|
||||
<td class="px-6 py-4 whitespace-nowrap text-sm text-gray-500">{{ error.operation }}</td>
|
||||
<td class="px-6 py-4 text-sm text-gray-500">{{ error.message }}</td>
|
||||
<td class="px-6 py-4 whitespace-nowrap">
|
||||
<span class="px-2 inline-flex text-xs leading-5 font-semibold rounded-full {% if error.resolved %}bg-green-100 text-green-800{% else %}bg-red-100 text-red-800{% endif %}">
|
||||
{{ error.resolved|yesno:"Resolved,Unresolved" }}
|
||||
</span>
|
||||
</td>
|
||||
</tr>
|
||||
{% endfor %}
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Active Users -->
|
||||
<div class="bg-white rounded-lg shadow-sm p-6">
|
||||
<h2 class="text-xl font-bold mb-4">Active Users</h2>
|
||||
<div class="grid grid-cols-1 md:grid-cols-2 gap-6">
|
||||
<div>
|
||||
<h3 class="text-lg font-semibold mb-3">Current Operations</h3>
|
||||
<ul class="space-y-2">
|
||||
{% for operation in metrics.current_operations %}
|
||||
<li class="flex justify-between items-center">
|
||||
<span class="text-gray-600">{{ operation.user }}</span>
|
||||
<span class="text-sm">{{ operation.action }}</span>
|
||||
</li>
|
||||
{% endfor %}
|
||||
</ul>
|
||||
</div>
|
||||
|
||||
<div>
|
||||
<h3 class="text-lg font-semibold mb-3">Recent Activity</h3>
|
||||
<ul class="space-y-2">
|
||||
{% for activity in metrics.recent_activity %}
|
||||
<li class="text-sm text-gray-600">
|
||||
{{ activity.user }} {{ activity.action }} {{ activity.timestamp|timesince }} ago
|
||||
</li>
|
||||
{% endfor %}
|
||||
</ul>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
{% endblock %}
|
||||
|
||||
{% block extra_js %}
|
||||
<script src="{% static 'js/monitoring.js' %}"></script>
|
||||
{% endblock %}
|
||||
268
history_tracking/tests/test_managers.py
Normal file
268
history_tracking/tests/test_managers.py
Normal file
@@ -0,0 +1,268 @@
|
||||
from django.test import TestCase
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from django.core.exceptions import ValidationError
|
||||
from django.db import transaction
|
||||
from django.utils import timezone
|
||||
|
||||
from history_tracking.models import VersionBranch, ChangeSet
|
||||
from history_tracking.managers import BranchManager, MergeStrategy
|
||||
from parks.models import Park
|
||||
|
||||
class BranchManagerTests(TestCase):
|
||||
def setUp(self):
|
||||
self.park = Park.objects.create(
|
||||
name='Test Park',
|
||||
slug='test-park',
|
||||
status='OPERATING'
|
||||
)
|
||||
self.content_type = ContentType.objects.get_for_model(Park)
|
||||
self.manager = BranchManager()
|
||||
self.main_branch = VersionBranch.objects.create(
|
||||
name='main',
|
||||
metadata={'type': 'default_branch'}
|
||||
)
|
||||
|
||||
def test_create_branch(self):
|
||||
"""Test branch creation with metadata"""
|
||||
branch = self.manager.create_branch(
|
||||
name='feature/test',
|
||||
metadata={'type': 'feature', 'description': 'Test branch'}
|
||||
)
|
||||
self.assertEqual(branch.name, 'feature/test')
|
||||
self.assertEqual(branch.metadata['type'], 'feature')
|
||||
self.assertTrue(branch.is_active)
|
||||
|
||||
def test_get_active_branches(self):
|
||||
"""Test retrieving only active branches"""
|
||||
# Create some branches
|
||||
feature_branch = self.manager.create_branch(
|
||||
name='feature/active',
|
||||
metadata={'type': 'feature'}
|
||||
)
|
||||
inactive_branch = self.manager.create_branch(
|
||||
name='feature/inactive',
|
||||
metadata={'type': 'feature'}
|
||||
)
|
||||
inactive_branch.is_active = False
|
||||
inactive_branch.save()
|
||||
|
||||
active_branches = self.manager.get_active_branches()
|
||||
self.assertIn(self.main_branch, active_branches)
|
||||
self.assertIn(feature_branch, active_branches)
|
||||
self.assertNotIn(inactive_branch, active_branches)
|
||||
|
||||
def test_get_branch_changes(self):
|
||||
"""Test retrieving changes for a specific branch"""
|
||||
# Create some changes in different branches
|
||||
main_change = ChangeSet.objects.create(
|
||||
branch=self.main_branch,
|
||||
content_type=self.content_type,
|
||||
object_id=self.park.id,
|
||||
data={'name': 'Main Change'},
|
||||
status='applied'
|
||||
)
|
||||
feature_branch = self.manager.create_branch(name='feature/test')
|
||||
feature_change = ChangeSet.objects.create(
|
||||
branch=feature_branch,
|
||||
content_type=self.content_type,
|
||||
object_id=self.park.id,
|
||||
data={'name': 'Feature Change'},
|
||||
status='applied'
|
||||
)
|
||||
|
||||
main_changes = self.manager.get_branch_changes(self.main_branch)
|
||||
feature_changes = self.manager.get_branch_changes(feature_branch)
|
||||
|
||||
self.assertIn(main_change, main_changes)
|
||||
self.assertNotIn(feature_change, main_changes)
|
||||
self.assertIn(feature_change, feature_changes)
|
||||
self.assertNotIn(main_change, feature_changes)
|
||||
|
||||
def test_merge_branches(self):
|
||||
"""Test merging changes between branches"""
|
||||
# Create feature branch with changes
|
||||
feature_branch = self.manager.create_branch(name='feature/test')
|
||||
change = ChangeSet.objects.create(
|
||||
branch=feature_branch,
|
||||
content_type=self.content_type,
|
||||
object_id=self.park.id,
|
||||
data={'name': 'Updated Name'},
|
||||
status='applied'
|
||||
)
|
||||
|
||||
# Merge feature branch into main
|
||||
self.manager.merge_branches(
|
||||
source_branch=feature_branch,
|
||||
target_branch=self.main_branch
|
||||
)
|
||||
|
||||
# Verify changes were copied to main branch
|
||||
main_changes = self.manager.get_branch_changes(self.main_branch)
|
||||
self.assertEqual(main_changes.count(), 1)
|
||||
merged_change = main_changes.first()
|
||||
self.assertEqual(merged_change.data, change.data)
|
||||
|
||||
def test_branch_deletion(self):
|
||||
"""Test branch deletion with cleanup"""
|
||||
feature_branch = self.manager.create_branch(name='feature/delete')
|
||||
ChangeSet.objects.create(
|
||||
branch=feature_branch,
|
||||
content_type=self.content_type,
|
||||
object_id=self.park.id,
|
||||
data={'name': 'Test Change'},
|
||||
status='applied'
|
||||
)
|
||||
|
||||
# Delete the branch
|
||||
self.manager.delete_branch(feature_branch)
|
||||
|
||||
# Verify branch and its changes are gone
|
||||
with self.assertRaises(VersionBranch.DoesNotExist):
|
||||
VersionBranch.objects.get(name='feature/delete')
|
||||
self.assertEqual(
|
||||
ChangeSet.objects.filter(branch=feature_branch).count(),
|
||||
0
|
||||
)
|
||||
|
||||
class MergeStrategyTests(TestCase):
|
||||
def setUp(self):
|
||||
self.park = Park.objects.create(
|
||||
name='Test Park',
|
||||
slug='test-park',
|
||||
status='OPERATING'
|
||||
)
|
||||
self.content_type = ContentType.objects.get_for_model(Park)
|
||||
self.main_branch = VersionBranch.objects.create(
|
||||
name='main',
|
||||
metadata={'type': 'default_branch'}
|
||||
)
|
||||
self.feature_branch = VersionBranch.objects.create(
|
||||
name='feature/test',
|
||||
metadata={'type': 'feature'}
|
||||
)
|
||||
self.merge_strategy = MergeStrategy()
|
||||
|
||||
def test_simple_merge(self):
|
||||
"""Test merging non-conflicting changes"""
|
||||
# Create changes in feature branch
|
||||
feature_changes = [
|
||||
ChangeSet.objects.create(
|
||||
branch=self.feature_branch,
|
||||
content_type=self.content_type,
|
||||
object_id=self.park.id,
|
||||
data={'name': 'New Name'},
|
||||
status='applied',
|
||||
applied_at=timezone.now()
|
||||
),
|
||||
ChangeSet.objects.create(
|
||||
branch=self.feature_branch,
|
||||
content_type=self.content_type,
|
||||
object_id=self.park.id,
|
||||
data={'description': 'New Description'},
|
||||
status='applied',
|
||||
applied_at=timezone.now()
|
||||
)
|
||||
]
|
||||
|
||||
# Perform merge
|
||||
with transaction.atomic():
|
||||
conflicts = self.merge_strategy.merge(
|
||||
source_branch=self.feature_branch,
|
||||
target_branch=self.main_branch
|
||||
)
|
||||
|
||||
self.assertEqual(conflicts, []) # No conflicts expected
|
||||
main_changes = ChangeSet.objects.filter(branch=self.main_branch)
|
||||
self.assertEqual(main_changes.count(), 2)
|
||||
|
||||
def test_conflict_detection(self):
|
||||
"""Test detection of conflicting changes"""
|
||||
# Create conflicting changes
|
||||
ChangeSet.objects.create(
|
||||
branch=self.main_branch,
|
||||
content_type=self.content_type,
|
||||
object_id=self.park.id,
|
||||
data={'name': 'Main Name'},
|
||||
status='applied',
|
||||
applied_at=timezone.now()
|
||||
)
|
||||
ChangeSet.objects.create(
|
||||
branch=self.feature_branch,
|
||||
content_type=self.content_type,
|
||||
object_id=self.park.id,
|
||||
data={'name': 'Feature Name'},
|
||||
status='applied',
|
||||
applied_at=timezone.now()
|
||||
)
|
||||
|
||||
# Attempt merge
|
||||
with transaction.atomic():
|
||||
conflicts = self.merge_strategy.merge(
|
||||
source_branch=self.feature_branch,
|
||||
target_branch=self.main_branch
|
||||
)
|
||||
|
||||
self.assertTrue(conflicts) # Conflicts should be detected
|
||||
conflict = conflicts[0]
|
||||
self.assertEqual(conflict['field'], 'name')
|
||||
self.assertEqual(conflict['target_value'], 'Main Name')
|
||||
self.assertEqual(conflict['source_value'], 'Feature Name')
|
||||
|
||||
def test_merge_ordering(self):
|
||||
"""Test that changes are merged in the correct order"""
|
||||
# Create sequential changes
|
||||
change1 = ChangeSet.objects.create(
|
||||
branch=self.feature_branch,
|
||||
content_type=self.content_type,
|
||||
object_id=self.park.id,
|
||||
data={'name': 'First Change'},
|
||||
status='applied',
|
||||
applied_at=timezone.now()
|
||||
)
|
||||
change2 = ChangeSet.objects.create(
|
||||
branch=self.feature_branch,
|
||||
content_type=self.content_type,
|
||||
object_id=self.park.id,
|
||||
data={'name': 'Second Change'},
|
||||
status='applied',
|
||||
applied_at=timezone.now()
|
||||
)
|
||||
|
||||
# Perform merge
|
||||
with transaction.atomic():
|
||||
self.merge_strategy.merge(
|
||||
source_branch=self.feature_branch,
|
||||
target_branch=self.main_branch
|
||||
)
|
||||
|
||||
# Verify changes were merged in order
|
||||
merged_changes = ChangeSet.objects.filter(
|
||||
branch=self.main_branch
|
||||
).order_by('applied_at')
|
||||
self.assertEqual(
|
||||
merged_changes[0].data['name'],
|
||||
'First Change'
|
||||
)
|
||||
self.assertEqual(
|
||||
merged_changes[1].data['name'],
|
||||
'Second Change'
|
||||
)
|
||||
|
||||
def test_merge_validation(self):
|
||||
"""Test validation of merge operations"""
|
||||
# Test merging inactive branch
|
||||
self.feature_branch.is_active = False
|
||||
self.feature_branch.save()
|
||||
|
||||
with self.assertRaises(ValidationError):
|
||||
self.merge_strategy.merge(
|
||||
source_branch=self.feature_branch,
|
||||
target_branch=self.main_branch
|
||||
)
|
||||
|
||||
# Test merging branch into itself
|
||||
with self.assertRaises(ValidationError):
|
||||
self.merge_strategy.merge(
|
||||
source_branch=self.main_branch,
|
||||
target_branch=self.main_branch
|
||||
)
|
||||
173
history_tracking/tests/test_models.py
Normal file
173
history_tracking/tests/test_models.py
Normal file
@@ -0,0 +1,173 @@
|
||||
from django.test import TestCase
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from django.core.exceptions import ValidationError
|
||||
from django.utils import timezone
|
||||
|
||||
from history_tracking.models import VersionBranch, ChangeSet
|
||||
from parks.models import Park
|
||||
|
||||
class VersionBranchTests(TestCase):
|
||||
def setUp(self):
|
||||
self.main_branch = VersionBranch.objects.create(
|
||||
name='main',
|
||||
metadata={'type': 'default_branch'}
|
||||
)
|
||||
self.feature_branch = VersionBranch.objects.create(
|
||||
name='feature/new-layout',
|
||||
metadata={'type': 'feature'}
|
||||
)
|
||||
|
||||
def test_branch_creation(self):
|
||||
"""Test that branch creation works with valid data"""
|
||||
branch = VersionBranch.objects.create(
|
||||
name='test-branch',
|
||||
metadata={'type': 'test'}
|
||||
)
|
||||
self.assertEqual(branch.name, 'test-branch')
|
||||
self.assertEqual(branch.metadata['type'], 'test')
|
||||
self.assertTrue(branch.is_active)
|
||||
self.assertIsNotNone(branch.created_at)
|
||||
|
||||
def test_invalid_branch_name(self):
|
||||
"""Test that branch names are properly validated"""
|
||||
with self.assertRaises(ValidationError):
|
||||
VersionBranch.objects.create(name='', metadata={})
|
||||
|
||||
# Test overly long name
|
||||
with self.assertRaises(ValidationError):
|
||||
VersionBranch.objects.create(
|
||||
name='a' * 256,
|
||||
metadata={}
|
||||
)
|
||||
|
||||
def test_branch_deactivation(self):
|
||||
"""Test that branches can be deactivated"""
|
||||
self.feature_branch.is_active = False
|
||||
self.feature_branch.save()
|
||||
|
||||
branch = VersionBranch.objects.get(name='feature/new-layout')
|
||||
self.assertFalse(branch.is_active)
|
||||
|
||||
def test_branch_metadata(self):
|
||||
"""Test that branch metadata can be updated"""
|
||||
metadata = {
|
||||
'type': 'feature',
|
||||
'description': 'New layout implementation',
|
||||
'owner': 'test-user'
|
||||
}
|
||||
self.feature_branch.metadata = metadata
|
||||
self.feature_branch.save()
|
||||
|
||||
branch = VersionBranch.objects.get(name='feature/new-layout')
|
||||
self.assertEqual(branch.metadata, metadata)
|
||||
|
||||
class ChangeSetTests(TestCase):
|
||||
def setUp(self):
|
||||
self.main_branch = VersionBranch.objects.create(
|
||||
name='main',
|
||||
metadata={'type': 'default_branch'}
|
||||
)
|
||||
self.park = Park.objects.create(
|
||||
name='Test Park',
|
||||
slug='test-park',
|
||||
status='OPERATING'
|
||||
)
|
||||
self.content_type = ContentType.objects.get_for_model(Park)
|
||||
|
||||
def test_changeset_creation(self):
|
||||
"""Test that changeset creation works with valid data"""
|
||||
changeset = ChangeSet.objects.create(
|
||||
branch=self.main_branch,
|
||||
content_type=self.content_type,
|
||||
object_id=self.park.id,
|
||||
data={'name': 'Updated Park Name'},
|
||||
status='pending',
|
||||
description='Update park name'
|
||||
)
|
||||
self.assertEqual(changeset.branch, self.main_branch)
|
||||
self.assertEqual(changeset.content_type, self.content_type)
|
||||
self.assertEqual(changeset.object_id, self.park.id)
|
||||
self.assertEqual(changeset.status, 'pending')
|
||||
|
||||
def test_changeset_status_flow(self):
|
||||
"""Test that changeset status transitions work correctly"""
|
||||
changeset = ChangeSet.objects.create(
|
||||
branch=self.main_branch,
|
||||
content_type=self.content_type,
|
||||
object_id=self.park.id,
|
||||
data={'name': 'Updated Park Name'},
|
||||
status='pending'
|
||||
)
|
||||
|
||||
# Test status transition: pending -> applied
|
||||
changeset.status = 'applied'
|
||||
changeset.applied_at = timezone.now()
|
||||
changeset.save()
|
||||
|
||||
updated_changeset = ChangeSet.objects.get(pk=changeset.pk)
|
||||
self.assertEqual(updated_changeset.status, 'applied')
|
||||
self.assertIsNotNone(updated_changeset.applied_at)
|
||||
|
||||
def test_invalid_changeset_status(self):
|
||||
"""Test that invalid changeset statuses are rejected"""
|
||||
with self.assertRaises(ValidationError):
|
||||
ChangeSet.objects.create(
|
||||
branch=self.main_branch,
|
||||
content_type=self.content_type,
|
||||
object_id=self.park.id,
|
||||
data={'name': 'Updated Park Name'},
|
||||
status='invalid_status'
|
||||
)
|
||||
|
||||
def test_changeset_validation(self):
|
||||
"""Test that changesets require valid branch and content object"""
|
||||
# Test missing branch
|
||||
with self.assertRaises(ValidationError):
|
||||
ChangeSet.objects.create(
|
||||
content_type=self.content_type,
|
||||
object_id=self.park.id,
|
||||
data={'name': 'Updated Park Name'},
|
||||
status='pending'
|
||||
)
|
||||
|
||||
# Test invalid content object
|
||||
with self.assertRaises(ValidationError):
|
||||
ChangeSet.objects.create(
|
||||
branch=self.main_branch,
|
||||
content_type=self.content_type,
|
||||
object_id=99999, # Non-existent object
|
||||
data={'name': 'Updated Park Name'},
|
||||
status='pending'
|
||||
)
|
||||
|
||||
def test_changeset_relationship_cascade(self):
|
||||
"""Test that changesets are deleted when branch is deleted"""
|
||||
changeset = ChangeSet.objects.create(
|
||||
branch=self.main_branch,
|
||||
content_type=self.content_type,
|
||||
object_id=self.park.id,
|
||||
data={'name': 'Updated Park Name'},
|
||||
status='pending'
|
||||
)
|
||||
|
||||
# Delete the branch
|
||||
self.main_branch.delete()
|
||||
|
||||
# Verify changeset was deleted
|
||||
with self.assertRaises(ChangeSet.DoesNotExist):
|
||||
ChangeSet.objects.get(pk=changeset.pk)
|
||||
|
||||
def test_changeset_data_validation(self):
|
||||
"""Test that changeset data must be valid JSON"""
|
||||
changeset = ChangeSet.objects.create(
|
||||
branch=self.main_branch,
|
||||
content_type=self.content_type,
|
||||
object_id=self.park.id,
|
||||
data={'valid': 'json_data'},
|
||||
status='pending'
|
||||
)
|
||||
|
||||
# Test invalid JSON data
|
||||
with self.assertRaises(ValidationError):
|
||||
changeset.data = "invalid_json"
|
||||
changeset.save()
|
||||
223
history_tracking/tests/test_views.py
Normal file
223
history_tracking/tests/test_views.py
Normal file
@@ -0,0 +1,223 @@
|
||||
from django.test import TestCase, Client
|
||||
from django.urls import reverse
|
||||
from django.contrib.auth import get_user_model
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from django.test import override_settings
|
||||
|
||||
from history_tracking.models import VersionBranch, ChangeSet
|
||||
from parks.models import Park
|
||||
|
||||
User = get_user_model()
|
||||
|
||||
@override_settings(HTMX_ENABLED=True)
|
||||
class VersionControlViewsTests(TestCase):
|
||||
def setUp(self):
|
||||
self.client = Client()
|
||||
self.user = User.objects.create_superuser(
|
||||
username='admin',
|
||||
email='admin@example.com',
|
||||
password='testpass123'
|
||||
)
|
||||
self.client.login(username='admin', password='testpass123')
|
||||
|
||||
self.park = Park.objects.create(
|
||||
name='Test Park',
|
||||
slug='test-park',
|
||||
status='OPERATING'
|
||||
)
|
||||
self.content_type = ContentType.objects.get_for_model(Park)
|
||||
|
||||
self.main_branch = VersionBranch.objects.create(
|
||||
name='main',
|
||||
metadata={'type': 'default_branch'}
|
||||
)
|
||||
self.feature_branch = VersionBranch.objects.create(
|
||||
name='feature/test',
|
||||
metadata={'type': 'feature'}
|
||||
)
|
||||
|
||||
def test_version_control_panel(self):
|
||||
"""Test rendering of version control panel"""
|
||||
response = self.client.get(
|
||||
reverse('version_control_panel'),
|
||||
HTTP_HX_REQUEST='true',
|
||||
HTTP_HX_TARGET='version-control-panel'
|
||||
)
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertTemplateUsed('history_tracking/includes/version_control_ui.html')
|
||||
self.assertContains(response, 'main') # Should show main branch
|
||||
self.assertContains(response, 'feature/test') # Should show feature branch
|
||||
|
||||
def test_create_branch(self):
|
||||
"""Test branch creation through view"""
|
||||
response = self.client.post(
|
||||
reverse('create_branch'),
|
||||
{
|
||||
'name': 'feature/new',
|
||||
'metadata': '{"type": "feature", "description": "New feature"}'
|
||||
},
|
||||
HTTP_HX_REQUEST='true'
|
||||
)
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertTrue(
|
||||
VersionBranch.objects.filter(name='feature/new').exists()
|
||||
)
|
||||
self.assertContains(response, 'Branch created successfully')
|
||||
|
||||
def test_switch_branch(self):
|
||||
"""Test switching between branches"""
|
||||
response = self.client.post(
|
||||
reverse('switch_branch'),
|
||||
{'branch_id': self.feature_branch.id},
|
||||
HTTP_HX_REQUEST='true'
|
||||
)
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertContains(response, 'Switched to branch')
|
||||
self.assertContains(response, 'feature/test')
|
||||
|
||||
def test_merge_branch(self):
|
||||
"""Test branch merging through view"""
|
||||
# Create a change in feature branch
|
||||
ChangeSet.objects.create(
|
||||
branch=self.feature_branch,
|
||||
content_type=self.content_type,
|
||||
object_id=self.park.id,
|
||||
data={'name': 'Updated Name'},
|
||||
status='applied'
|
||||
)
|
||||
|
||||
response = self.client.post(
|
||||
reverse('merge_branch'),
|
||||
{
|
||||
'source_branch_id': self.feature_branch.id,
|
||||
'target_branch_id': self.main_branch.id
|
||||
},
|
||||
HTTP_HX_REQUEST='true'
|
||||
)
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertContains(response, 'Branch merged successfully')
|
||||
|
||||
# Verify changes were merged
|
||||
main_changes = ChangeSet.objects.filter(branch=self.main_branch)
|
||||
self.assertEqual(main_changes.count(), 1)
|
||||
|
||||
def test_merge_conflict_handling(self):
|
||||
"""Test handling of merge conflicts"""
|
||||
# Create conflicting changes
|
||||
ChangeSet.objects.create(
|
||||
branch=self.main_branch,
|
||||
content_type=self.content_type,
|
||||
object_id=self.park.id,
|
||||
data={'name': 'Main Name'},
|
||||
status='applied'
|
||||
)
|
||||
ChangeSet.objects.create(
|
||||
branch=self.feature_branch,
|
||||
content_type=self.content_type,
|
||||
object_id=self.park.id,
|
||||
data={'name': 'Feature Name'},
|
||||
status='applied'
|
||||
)
|
||||
|
||||
response = self.client.post(
|
||||
reverse('merge_branch'),
|
||||
{
|
||||
'source_branch_id': self.feature_branch.id,
|
||||
'target_branch_id': self.main_branch.id
|
||||
},
|
||||
HTTP_HX_REQUEST='true'
|
||||
)
|
||||
self.assertEqual(response.status_code, 409) # Conflict status
|
||||
self.assertContains(response, 'Merge conflicts detected')
|
||||
|
||||
def test_view_history(self):
|
||||
"""Test viewing version history"""
|
||||
# Create some changes
|
||||
change = ChangeSet.objects.create(
|
||||
branch=self.main_branch,
|
||||
content_type=self.content_type,
|
||||
object_id=self.park.id,
|
||||
data={'name': 'Updated Name'},
|
||||
status='applied'
|
||||
)
|
||||
|
||||
response = self.client.get(
|
||||
reverse('version_history', kwargs={'pk': self.park.pk}),
|
||||
HTTP_HX_REQUEST='true'
|
||||
)
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertContains(response, 'Updated Name')
|
||||
self.assertContains(response, str(change.created_at))
|
||||
|
||||
def test_branch_deletion(self):
|
||||
"""Test branch deletion through view"""
|
||||
response = self.client.post(
|
||||
reverse('delete_branch'),
|
||||
{'branch_id': self.feature_branch.id},
|
||||
HTTP_HX_REQUEST='true'
|
||||
)
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertContains(response, 'Branch deleted successfully')
|
||||
self.assertFalse(
|
||||
VersionBranch.objects.filter(id=self.feature_branch.id).exists()
|
||||
)
|
||||
|
||||
def test_unauthorized_access(self):
|
||||
"""Test that unauthorized users cannot access version control"""
|
||||
self.client.logout()
|
||||
response = self.client.get(
|
||||
reverse('version_control_panel'),
|
||||
HTTP_HX_REQUEST='true'
|
||||
)
|
||||
self.assertEqual(response.status_code, 302) # Redirect to login
|
||||
|
||||
def test_htmx_requirements(self):
|
||||
"""Test that views require HTMX headers"""
|
||||
# Try without HTMX headers
|
||||
response = self.client.get(reverse('version_control_panel'))
|
||||
self.assertEqual(response.status_code, 400)
|
||||
self.assertContains(
|
||||
response,
|
||||
'This endpoint requires HTMX',
|
||||
status_code=400
|
||||
)
|
||||
|
||||
def test_branch_validation(self):
|
||||
"""Test branch name validation in views"""
|
||||
response = self.client.post(
|
||||
reverse('create_branch'),
|
||||
{
|
||||
'name': 'invalid/branch/name/with/too/many/segments',
|
||||
'metadata': '{}'
|
||||
},
|
||||
HTTP_HX_REQUEST='true'
|
||||
)
|
||||
self.assertEqual(response.status_code, 400)
|
||||
self.assertContains(
|
||||
response,
|
||||
'Invalid branch name',
|
||||
status_code=400
|
||||
)
|
||||
|
||||
def test_branch_list_update(self):
|
||||
"""Test that branch list updates after operations"""
|
||||
response = self.client.get(
|
||||
reverse('branch_list'),
|
||||
HTTP_HX_REQUEST='true'
|
||||
)
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertContains(response, 'main')
|
||||
self.assertContains(response, 'feature/test')
|
||||
|
||||
# Create new branch
|
||||
new_branch = VersionBranch.objects.create(
|
||||
name='feature/new',
|
||||
metadata={'type': 'feature'}
|
||||
)
|
||||
|
||||
# List should update
|
||||
response = self.client.get(
|
||||
reverse('branch_list'),
|
||||
HTTP_HX_REQUEST='true'
|
||||
)
|
||||
self.assertContains(response, 'feature/new')
|
||||
320
history_tracking/views_monitoring.py
Normal file
320
history_tracking/views_monitoring.py
Normal file
@@ -0,0 +1,320 @@
|
||||
from django.views.generic import TemplateView
|
||||
from django.contrib.admin.views.decorators import staff_member_required
|
||||
from django.utils.decorators import method_decorator
|
||||
from django.utils import timezone
|
||||
from datetime import timedelta
|
||||
|
||||
from .models import VersionBranch, ChangeSet
|
||||
from .monitoring import VersionControlMetrics
|
||||
|
||||
@method_decorator(staff_member_required, name='dispatch')
|
||||
class MonitoringDashboardView(TemplateView):
|
||||
template_name = 'history_tracking/monitoring_dashboard.html'
|
||||
|
||||
def get_context_data(self, **kwargs):
|
||||
context = super().get_context_data(**kwargs)
|
||||
metrics = self._collect_metrics()
|
||||
context['metrics'] = metrics
|
||||
return context
|
||||
|
||||
def _collect_metrics(self):
|
||||
"""Collect all monitoring metrics"""
|
||||
# Collect basic statistics
|
||||
total_branches = VersionBranch.objects.count()
|
||||
active_branches = VersionBranch.objects.filter(is_active=True).count()
|
||||
total_changes = ChangeSet.objects.count()
|
||||
pending_changes = ChangeSet.objects.filter(status='pending').count()
|
||||
|
||||
# Calculate merge success rate
|
||||
last_week = timezone.now() - timedelta(days=7)
|
||||
total_merges = ChangeSet.objects.filter(
|
||||
created_at__gte=last_week,
|
||||
status__in=['applied', 'conflict']
|
||||
).count()
|
||||
successful_merges = ChangeSet.objects.filter(
|
||||
created_at__gte=last_week,
|
||||
status='applied'
|
||||
).count()
|
||||
merge_success_rate = round(
|
||||
(successful_merges / total_merges * 100) if total_merges > 0 else 100
|
||||
)
|
||||
|
||||
# Get performance metrics
|
||||
VersionControlMetrics.collect_performance_metrics()
|
||||
perf_metrics = self._get_performance_metrics()
|
||||
|
||||
# Get error tracking data
|
||||
errors = self._get_error_tracking()
|
||||
|
||||
# Get user activity
|
||||
user_activity = self._get_user_activity()
|
||||
|
||||
return {
|
||||
# System Overview
|
||||
'total_branches': total_branches,
|
||||
'active_branches': active_branches,
|
||||
'total_changes': total_changes,
|
||||
'pending_changes': pending_changes,
|
||||
'merge_success_rate': merge_success_rate,
|
||||
'conflicted_merges': ChangeSet.objects.filter(
|
||||
status='conflict'
|
||||
).count(),
|
||||
'system_health': self._calculate_system_health(),
|
||||
'health_checks': 5, # Number of health checks performed
|
||||
|
||||
# Performance Metrics
|
||||
'timing': perf_metrics['timing'],
|
||||
'database': perf_metrics['database'],
|
||||
'cache': perf_metrics['cache'],
|
||||
|
||||
# Error Tracking
|
||||
'errors': errors,
|
||||
|
||||
# User Activity
|
||||
'current_operations': user_activity['current'],
|
||||
'recent_activity': user_activity['recent']
|
||||
}
|
||||
|
||||
def _get_performance_metrics(self):
|
||||
"""Get detailed performance metrics"""
|
||||
from django.db import connection
|
||||
from django.core.cache import cache
|
||||
|
||||
# Calculate average operation timings
|
||||
operation_times = {
|
||||
'branch_creation': [],
|
||||
'branch_switch': [],
|
||||
'merge': []
|
||||
}
|
||||
|
||||
for log in self._get_operation_logs():
|
||||
if log['operation'] in operation_times:
|
||||
operation_times[log['operation']].append(log['duration'])
|
||||
|
||||
timing = {
|
||||
op: round(sum(times) / len(times), 2) if times else 0
|
||||
for op, times in operation_times.items()
|
||||
}
|
||||
|
||||
return {
|
||||
'timing': timing,
|
||||
'database': {
|
||||
'query_count': len(connection.queries),
|
||||
'query_time': round(
|
||||
sum(float(q['time']) for q in connection.queries),
|
||||
3
|
||||
),
|
||||
'pool_size': connection.pool_size if hasattr(connection, 'pool_size') else 'N/A',
|
||||
'max_pool': connection.max_pool if hasattr(connection, 'max_pool') else 'N/A'
|
||||
},
|
||||
'cache': {
|
||||
'hit_rate': round(
|
||||
cache.get('version_control_cache_hits', 0) /
|
||||
(cache.get('version_control_cache_hits', 0) +
|
||||
cache.get('version_control_cache_misses', 1)) * 100,
|
||||
1
|
||||
),
|
||||
'miss_rate': round(
|
||||
cache.get('version_control_cache_misses', 0) /
|
||||
(cache.get('version_control_cache_hits', 0) +
|
||||
cache.get('version_control_cache_misses', 1)) * 100,
|
||||
1
|
||||
),
|
||||
'memory_usage': round(
|
||||
cache.get('version_control_memory_usage', 0) / 1024 / 1024,
|
||||
2
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
def _get_error_tracking(self):
|
||||
"""Get recent error tracking data"""
|
||||
from django.conf import settings
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger('version_control')
|
||||
errors = []
|
||||
|
||||
# Get last 10 error logs
|
||||
if hasattr(logger, 'handlers'):
|
||||
for handler in logger.handlers:
|
||||
if isinstance(handler, logging.FileHandler):
|
||||
try:
|
||||
with open(handler.baseFilename, 'r') as f:
|
||||
for line in f.readlines()[-10:]:
|
||||
if '[ERROR]' in line:
|
||||
errors.append(self._parse_error_log(line))
|
||||
except FileNotFoundError:
|
||||
pass
|
||||
|
||||
return errors
|
||||
|
||||
def _parse_error_log(self, log_line):
|
||||
"""Parse error log line into structured data"""
|
||||
import re
|
||||
from datetime import datetime
|
||||
|
||||
pattern = r'(\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2},\d{3}) \[ERROR\] (.*)'
|
||||
match = re.match(pattern, log_line)
|
||||
|
||||
if match:
|
||||
timestamp_str, message = match.groups()
|
||||
return {
|
||||
'timestamp': datetime.strptime(
|
||||
timestamp_str,
|
||||
'%Y-%m-%d %H:%M:%S,%f'
|
||||
),
|
||||
'type': 'Error',
|
||||
'operation': self._extract_operation(message),
|
||||
'message': message,
|
||||
'resolved': False
|
||||
}
|
||||
return None
|
||||
|
||||
def _extract_operation(self, message):
|
||||
"""Extract operation type from error message"""
|
||||
if 'branch' in message.lower():
|
||||
return 'Branch Operation'
|
||||
elif 'merge' in message.lower():
|
||||
return 'Merge Operation'
|
||||
elif 'changeset' in message.lower():
|
||||
return 'Change Operation'
|
||||
return 'Unknown Operation'
|
||||
|
||||
def _get_user_activity(self):
|
||||
"""Get current and recent user activity"""
|
||||
from django.contrib.auth import get_user_model
|
||||
User = get_user_model()
|
||||
|
||||
# Get active sessions
|
||||
from django.contrib.sessions.models import Session
|
||||
current_sessions = Session.objects.filter(
|
||||
expire_date__gte=timezone.now()
|
||||
)
|
||||
|
||||
current_operations = []
|
||||
for session in current_sessions:
|
||||
try:
|
||||
uid = session.get_decoded().get('_auth_user_id')
|
||||
if uid:
|
||||
user = User.objects.get(pk=uid)
|
||||
current_operations.append({
|
||||
'user': user.username,
|
||||
'action': self._get_user_current_action(user)
|
||||
})
|
||||
except (User.DoesNotExist, KeyError):
|
||||
continue
|
||||
|
||||
# Get recent activity
|
||||
recent = ChangeSet.objects.select_related('user').order_by(
|
||||
'-created_at'
|
||||
)[:10]
|
||||
recent_activity = [
|
||||
{
|
||||
'user': change.user.username if change.user else 'System',
|
||||
'action': self._get_change_action(change),
|
||||
'timestamp': change.created_at
|
||||
}
|
||||
for change in recent
|
||||
]
|
||||
|
||||
return {
|
||||
'current': current_operations,
|
||||
'recent': recent_activity
|
||||
}
|
||||
|
||||
def _get_user_current_action(self, user):
|
||||
"""Get user's current action based on recent activity"""
|
||||
last_change = ChangeSet.objects.filter(
|
||||
user=user
|
||||
).order_by('-created_at').first()
|
||||
|
||||
if last_change:
|
||||
if (timezone.now() - last_change.created_at).seconds < 300: # 5 minutes
|
||||
return self._get_change_action(last_change)
|
||||
return 'Viewing'
|
||||
|
||||
def _get_change_action(self, change):
|
||||
"""Get human-readable action from change"""
|
||||
if change.status == 'applied':
|
||||
return f'Applied changes to {change.content_object}'
|
||||
elif change.status == 'pending':
|
||||
return f'Started editing {change.content_object}'
|
||||
elif change.status == 'conflict':
|
||||
return f'Resolving conflicts on {change.content_object}'
|
||||
return 'Unknown action'
|
||||
|
||||
def _calculate_system_health(self):
|
||||
"""Calculate overall system health percentage"""
|
||||
factors = {
|
||||
'merge_success': self._get_merge_success_health(),
|
||||
'performance': self._get_performance_health(),
|
||||
'error_rate': self._get_error_rate_health()
|
||||
}
|
||||
return round(sum(factors.values()) / len(factors))
|
||||
|
||||
def _get_merge_success_health(self):
|
||||
"""Calculate health based on merge success rate"""
|
||||
last_week = timezone.now() - timedelta(days=7)
|
||||
total_merges = ChangeSet.objects.filter(
|
||||
created_at__gte=last_week,
|
||||
status__in=['applied', 'conflict']
|
||||
).count()
|
||||
successful_merges = ChangeSet.objects.filter(
|
||||
created_at__gte=last_week,
|
||||
status='applied'
|
||||
).count()
|
||||
|
||||
if total_merges == 0:
|
||||
return 100
|
||||
return round((successful_merges / total_merges) * 100)
|
||||
|
||||
def _get_performance_health(self):
|
||||
"""Calculate health based on performance metrics"""
|
||||
metrics = self._get_performance_metrics()
|
||||
|
||||
factors = [
|
||||
100 if metrics['timing']['merge'] < 1000 else 50, # Under 1 second is healthy
|
||||
100 if metrics['cache']['hit_rate'] > 80 else 50, # Over 80% cache hit rate is healthy
|
||||
100 if metrics['database']['query_time'] < 0.5 else 50 # Under 0.5s query time is healthy
|
||||
]
|
||||
|
||||
return round(sum(factors) / len(factors))
|
||||
|
||||
def _get_error_rate_health(self):
|
||||
"""Calculate health based on error rate"""
|
||||
last_day = timezone.now() - timedelta(days=1)
|
||||
total_operations = ChangeSet.objects.filter(
|
||||
created_at__gte=last_day
|
||||
).count()
|
||||
error_count = len([
|
||||
e for e in self._get_error_tracking()
|
||||
if e['timestamp'] >= last_day
|
||||
])
|
||||
|
||||
if total_operations == 0:
|
||||
return 100
|
||||
error_rate = (error_count / total_operations) * 100
|
||||
return round(100 - error_rate)
|
||||
|
||||
def _get_operation_logs(self):
|
||||
"""Get operation timing logs"""
|
||||
import json
|
||||
from pathlib import Path
|
||||
|
||||
log_file = Path('logs/version_control_timing.log')
|
||||
if not log_file.exists():
|
||||
return []
|
||||
|
||||
logs = []
|
||||
try:
|
||||
with open(log_file, 'r') as f:
|
||||
for line in f:
|
||||
try:
|
||||
logs.append(json.loads(line))
|
||||
except json.JSONDecodeError:
|
||||
continue
|
||||
except Exception:
|
||||
return []
|
||||
|
||||
return logs
|
||||
Reference in New Issue
Block a user