mirror of
https://github.com/pacnpal/Pac-cogs.git
synced 2025-12-20 02:41:06 -05:00
Updating queue_processor.py to use the shared types Updating manager.py to use the correct imports and shared types The cyclic dependency has been resolved by: Moving shared types to a separate module Having queue_processor.py only import from shared modules Having manager.py use the QueueProcessor from queue/processor.py
82 lines
2.5 KiB
Python
82 lines
2.5 KiB
Python
"""Queue processing functionality for video processing"""
|
|
|
|
import logging
|
|
import asyncio
|
|
from typing import List, Optional, Dict, Any, Set, ClassVar
|
|
from datetime import datetime
|
|
|
|
from ..queue.types import QueuePriority, QueueMetrics, ProcessingMetrics
|
|
from ..queue.models import QueueItem
|
|
|
|
logger = logging.getLogger("VideoArchiver")
|
|
|
|
class QueueProcessor:
|
|
"""Handles processing of video queue items"""
|
|
|
|
_active_items: ClassVar[Set[int]] = set()
|
|
_processing_lock: ClassVar[asyncio.Lock] = asyncio.Lock()
|
|
|
|
def __init__(self):
|
|
self._metrics = ProcessingMetrics()
|
|
|
|
async def process_item(self, item: QueueItem) -> bool:
|
|
"""
|
|
Process a single queue item
|
|
|
|
Args:
|
|
item: Queue item to process
|
|
|
|
Returns:
|
|
bool: Success status
|
|
"""
|
|
if item.id in self._active_items:
|
|
logger.warning(f"Item {item.id} is already being processed")
|
|
return False
|
|
|
|
try:
|
|
self._active_items.add(item.id)
|
|
start_time = datetime.now()
|
|
|
|
# Process item logic here
|
|
# Placeholder for actual video processing
|
|
await asyncio.sleep(1)
|
|
|
|
processing_time = (datetime.now() - start_time).total_seconds()
|
|
self._update_metrics(processing_time, True, item.size)
|
|
return True
|
|
|
|
except Exception as e:
|
|
logger.error(f"Error processing item {item.id}: {str(e)}")
|
|
self._update_metrics(0, False, 0)
|
|
return False
|
|
|
|
finally:
|
|
self._active_items.remove(item.id)
|
|
|
|
def _update_metrics(self, processing_time: float, success: bool, size: int) -> None:
|
|
"""Update processing metrics"""
|
|
if success:
|
|
self._metrics.record_success(processing_time)
|
|
else:
|
|
self._metrics.record_failure("Processing error")
|
|
|
|
def get_metrics(self) -> QueueMetrics:
|
|
"""Get current processing metrics"""
|
|
total = self._metrics.total_processed
|
|
if total == 0:
|
|
return QueueMetrics(
|
|
total_items=0,
|
|
processing_time=0,
|
|
success_rate=0,
|
|
error_rate=0,
|
|
average_size=0,
|
|
)
|
|
|
|
return QueueMetrics(
|
|
total_items=total,
|
|
processing_time=self._metrics.avg_processing_time,
|
|
success_rate=self._metrics.successful / total,
|
|
error_rate=self._metrics.failed / total,
|
|
average_size=0, # This would need to be tracked separately if needed
|
|
)
|