Converted all 'from videoarchiver.' imports to relative imports

This commit is contained in:
pacnpal
2024-11-17 16:05:12 +00:00
parent c830be2841
commit 62c97f0b01
69 changed files with 417 additions and 359 deletions

View File

@@ -1,10 +1,10 @@
"""Queue management package for video processing"""
from videoarchiver.queue.models import QueueItem, QueueMetrics
from videoarchiver.queue.manager import EnhancedVideoQueueManager
from videoarchiver.queue.persistence import QueuePersistenceManager, QueueError
from videoarchiver.queue.monitoring import QueueMonitor, MonitoringError
from videoarchiver.queue.cleanup import QueueCleaner, CleanupError
from .models import QueueItem, QueueMetrics
from .manager import EnhancedVideoQueueManager
from .persistence import QueuePersistenceManager, QueueError
from .monitoring import QueueMonitor, MonitoringError
from .cleanup import QueueCleaner, CleanupError
__all__ = [
'QueueItem',

View File

@@ -1,8 +1,8 @@
"""Queue cleaning functionality"""
from videoarchiver.queue.cleaners.guild_cleaner import GuildCleaner
from videoarchiver.queue.cleaners.history_cleaner import HistoryCleaner
from videoarchiver.queue.cleaners.tracking_cleaner import TrackingCleaner
from .guild_cleaner import GuildCleaner
from .history_cleaner import HistoryCleaner
from .tracking_cleaner import TrackingCleaner
__all__ = [
'GuildCleaner',

View File

@@ -7,7 +7,7 @@ from dataclasses import dataclass, field
from typing import Dict, List, Set, Tuple, Any, Optional
from datetime import datetime
from videoarchiver.queue.models import QueueItem
from ..models import QueueItem
logger = logging.getLogger("GuildCleaner")

View File

@@ -6,7 +6,7 @@ from dataclasses import dataclass, field
from typing import Dict, Optional, List, Any, Set
from datetime import datetime, timedelta
from videoarchiver.queue.models import QueueItem
from ..models import QueueItem
logger = logging.getLogger("HistoryCleaner")

View File

@@ -7,7 +7,7 @@ from dataclasses import dataclass, field
from typing import Dict, List, Set, Tuple, Any, Optional
from datetime import datetime
from videoarchiver.queue.models import QueueItem
from ..models import QueueItem
logger = logging.getLogger("TrackingCleaner")

View File

@@ -7,16 +7,16 @@ from dataclasses import dataclass, field
from typing import Dict, List, Set, Optional, Any, Tuple
from datetime import datetime, timedelta
from videoarchiver.queue.models import QueueItem, QueueMetrics
from videoarchiver.queue.cleaners.history_cleaner import (
from .models import QueueItem, QueueMetrics
from .cleaners.history_cleaner import (
HistoryCleaner,
CleanupStrategy as HistoryStrategy
)
from videoarchiver.queue.cleaners.guild_cleaner import (
from .cleaners.guild_cleaner import (
GuildCleaner,
GuildCleanupStrategy
)
from videoarchiver.queue.cleaners.tracking_cleaner import (
from .cleaners.tracking_cleaner import (
TrackingCleaner,
TrackingCleanupStrategy
)

View File

@@ -7,13 +7,13 @@ from dataclasses import dataclass, field
from typing import Optional, Tuple, Dict, Any, List, Set, Callable
from datetime import datetime, timedelta
from videoarchiver.queue.state_manager import QueueStateManager
from videoarchiver.queue.processor import QueueProcessor
from videoarchiver.queue.metrics_manager import QueueMetricsManager
from videoarchiver.queue.persistence import QueuePersistenceManager
from videoarchiver.queue.monitoring import QueueMonitor, MonitoringLevel
from videoarchiver.queue.cleanup import QueueCleaner
from videoarchiver.queue.models import QueueItem, QueueError, CleanupError
from .state_manager import QueueStateManager
from .processor import QueueProcessor
from .metrics_manager import QueueMetricsManager
from .persistence import QueuePersistenceManager
from .monitoring import QueueMonitor, MonitoringLevel
from .cleanup import QueueCleaner
from .models import QueueItem, QueueError, CleanupError
logger = logging.getLogger("QueueManager")

View File

@@ -8,8 +8,8 @@ from dataclasses import dataclass, field
from typing import Optional, Dict, Any, List, Set
from datetime import datetime, timedelta
from videoarchiver.queue.health_checker import HealthChecker, HealthStatus, HealthCategory
from videoarchiver.queue.recovery_manager import RecoveryManager, RecoveryStrategy
from .health_checker import HealthChecker, HealthStatus, HealthCategory
from .recovery_manager import RecoveryManager, RecoveryStrategy
logger = logging.getLogger("QueueMonitoring")

View File

@@ -8,7 +8,8 @@ import fcntl
import asyncio
from datetime import datetime, timedelta
from typing import Dict, Any, Optional
from videoarchiver.queue.models import QueueItem, QueueMetrics
from .models import QueueItem, QueueMetrics
# Configure logging
logging.basicConfig(
@@ -16,6 +17,7 @@ logging.basicConfig(
)
logger = logging.getLogger("QueuePersistence")
class QueuePersistenceManager:
"""Manages persistence of queue state to disk"""
@@ -25,10 +27,10 @@ class QueuePersistenceManager:
max_retries: int = 3,
retry_delay: int = 1,
backup_interval: int = 3600, # 1 hour
max_backups: int = 24 # Keep last 24 backups
max_backups: int = 24, # Keep last 24 backups
):
"""Initialize the persistence manager
Args:
persistence_path: Path to the persistence file
max_retries: Maximum number of retries for file operations
@@ -50,17 +52,17 @@ class QueuePersistenceManager:
processing: Dict[str, QueueItem],
completed: Dict[str, QueueItem],
failed: Dict[str, QueueItem],
metrics: QueueMetrics
metrics: QueueMetrics,
) -> None:
"""Persist queue state to disk with improved error handling
Args:
queue: List of pending queue items
processing: Dict of items currently being processed
completed: Dict of completed items
failed: Dict of failed items
metrics: Queue metrics object
Raises:
QueueError: If persistence fails
"""
@@ -87,14 +89,14 @@ class QueuePersistenceManager:
"compression_failures": metrics.compression_failures,
"hardware_accel_failures": metrics.hardware_accel_failures,
},
"timestamp": datetime.utcnow().isoformat()
"timestamp": datetime.utcnow().isoformat(),
}
# Ensure directory exists
os.makedirs(os.path.dirname(self.persistence_path), exist_ok=True)
# Acquire file lock
lock_fd = open(self._lock_file, 'w')
lock_fd = open(self._lock_file, "w")
fcntl.flock(lock_fd.fileno(), fcntl.LOCK_EX)
# Write with retries
@@ -120,7 +122,9 @@ class QueuePersistenceManager:
except Exception as e:
if attempt == self.max_retries - 1:
raise
logger.warning(f"Retry {attempt + 1}/{self.max_retries} failed: {e}")
logger.warning(
f"Retry {attempt + 1}/{self.max_retries} failed: {e}"
)
await asyncio.sleep(self.retry_delay)
except Exception as e:
@@ -140,18 +144,25 @@ class QueuePersistenceManager:
# Create backup
timestamp = datetime.utcnow().strftime("%Y%m%d_%H%M%S")
backup_path = f"{self.persistence_path}.bak.{timestamp}"
with open(self.persistence_path, "rb") as src, open(backup_path, "wb") as dst:
with open(self.persistence_path, "rb") as src, open(
backup_path, "wb"
) as dst:
dst.write(src.read())
dst.flush()
os.fsync(dst.fileno())
# Clean old backups
backup_files = sorted([
f for f in os.listdir(os.path.dirname(self.persistence_path))
if f.startswith(os.path.basename(self.persistence_path) + ".bak.")
])
backup_files = sorted(
[
f
for f in os.listdir(os.path.dirname(self.persistence_path))
if f.startswith(os.path.basename(self.persistence_path) + ".bak.")
]
)
while len(backup_files) > self.max_backups:
old_backup = os.path.join(os.path.dirname(self.persistence_path), backup_files.pop(0))
old_backup = os.path.join(
os.path.dirname(self.persistence_path), backup_files.pop(0)
)
try:
os.remove(old_backup)
except Exception as e:
@@ -162,10 +173,10 @@ class QueuePersistenceManager:
def load_queue_state(self) -> Optional[Dict[str, Any]]:
"""Load persisted queue state from disk with retries
Returns:
Dict containing queue state if successful, None if file doesn't exist
Raises:
QueueError: If loading fails
"""
@@ -175,7 +186,7 @@ class QueuePersistenceManager:
lock_fd = None
try:
# Acquire file lock
lock_fd = open(self._lock_file, 'w')
lock_fd = open(self._lock_file, "w")
fcntl.flock(lock_fd.fileno(), fcntl.LOCK_EX)
# Try loading main file
@@ -188,18 +199,28 @@ class QueuePersistenceManager:
break
except Exception as e:
last_error = e
logger.warning(f"Retry {attempt + 1}/{self.max_retries} failed: {e}")
logger.warning(
f"Retry {attempt + 1}/{self.max_retries} failed: {e}"
)
time.sleep(self.retry_delay)
# If main file failed, try loading latest backup
if state is None:
backup_files = sorted([
f for f in os.listdir(os.path.dirname(self.persistence_path))
if f.startswith(os.path.basename(self.persistence_path) + ".bak.")
], reverse=True)
backup_files = sorted(
[
f
for f in os.listdir(os.path.dirname(self.persistence_path))
if f.startswith(
os.path.basename(self.persistence_path) + ".bak."
)
],
reverse=True,
)
if backup_files:
latest_backup = os.path.join(os.path.dirname(self.persistence_path), backup_files[0])
latest_backup = os.path.join(
os.path.dirname(self.persistence_path), backup_files[0]
)
try:
with open(latest_backup, "r") as f:
state = json.load(f)
@@ -207,7 +228,9 @@ class QueuePersistenceManager:
except Exception as e:
logger.error(f"Failed to load backup: {e}")
if last_error:
raise QueueError(f"Failed to load queue state: {last_error}")
raise QueueError(
f"Failed to load queue state: {last_error}"
)
raise
if state is None:
@@ -218,22 +241,34 @@ class QueuePersistenceManager:
try:
if isinstance(item_data, dict):
# Ensure datetime fields are properly formatted
for field in ['added_at', 'last_retry', 'last_error_time']:
for field in ["added_at", "last_retry", "last_error_time"]:
if field in item_data and item_data[field]:
if isinstance(item_data[field], str):
try:
item_data[field] = datetime.fromisoformat(item_data[field])
item_data[field] = datetime.fromisoformat(
item_data[field]
)
except ValueError:
item_data[field] = datetime.utcnow() if field == 'added_at' else None
item_data[field] = (
datetime.utcnow()
if field == "added_at"
else None
)
elif not isinstance(item_data[field], datetime):
item_data[field] = datetime.utcnow() if field == 'added_at' else None
item_data[field] = (
datetime.utcnow()
if field == "added_at"
else None
)
# Ensure processing_time is a float
if 'processing_time' in item_data:
if "processing_time" in item_data:
try:
item_data['processing_time'] = float(item_data['processing_time'])
item_data["processing_time"] = float(
item_data["processing_time"]
)
except (ValueError, TypeError):
item_data['processing_time'] = 0.0
item_data["processing_time"] = 0.0
return QueueItem(**item_data)
return None
@@ -283,15 +318,21 @@ class QueuePersistenceManager:
backup_path = f"{self.persistence_path}.corrupted.{int(time.time())}"
try:
os.rename(self.persistence_path, backup_path)
logger.info(f"Created backup of corrupted state file: {backup_path}")
logger.info(
f"Created backup of corrupted state file: {backup_path}"
)
except Exception as be:
logger.error(f"Failed to create backup of corrupted state file: {str(be)}")
logger.error(
f"Failed to create backup of corrupted state file: {str(be)}"
)
raise QueueError(f"Failed to load queue state: {str(e)}")
finally:
if lock_fd:
fcntl.flock(lock_fd.fileno(), fcntl.LOCK_UN)
lock_fd.close()
class QueueError(Exception):
"""Base exception for queue-related errors"""
pass

View File

@@ -8,9 +8,9 @@ from dataclasses import dataclass
from typing import Callable, Optional, Tuple, List, Set, Dict, Any
from datetime import datetime, timedelta
from videoarchiver.queue.models import QueueItem
from videoarchiver.queue.state_manager import QueueStateManager, ItemState
from videoarchiver.queue.monitoring import QueueMonitor
from .models import QueueItem
from .state_manager import QueueStateManager, ItemState
from .monitoring import QueueMonitor
logger = logging.getLogger("QueueProcessor")

View File

@@ -7,7 +7,7 @@ from dataclasses import dataclass, field
from typing import List, Tuple, Dict, Optional, Any, Set
from datetime import datetime, timedelta
from videoarchiver.queue.models import QueueItem
from .models import QueueItem
logger = logging.getLogger("QueueRecoveryManager")

View File

@@ -7,7 +7,7 @@ from dataclasses import dataclass
from typing import Dict, Set, List, Optional, Any
from datetime import datetime
from videoarchiver.queue.models import QueueItem, QueueMetrics
from .models import QueueItem, QueueMetrics
logger = logging.getLogger("QueueStateManager")