mirror of
https://github.com/pacnpal/Pac-cogs.git
synced 2025-12-23 04:11:05 -05:00
loads of import fixes
This commit is contained in:
@@ -7,11 +7,11 @@ import subprocess
|
||||
from datetime import datetime
|
||||
from typing import Dict, Optional, Callable, Set, Tuple
|
||||
|
||||
from .ffmpeg.ffmpeg_manager import FFmpegManager
|
||||
from .ffmpeg.exceptions import CompressionError
|
||||
from .utils.exceptions import VideoVerificationError
|
||||
from .utils.file_operations import FileOperations
|
||||
from .utils.progress_handler import ProgressHandler
|
||||
from ..ffmpeg.ffmpeg_manager import FFmpegManager
|
||||
from ..ffmpeg.exceptions import CompressionError
|
||||
from ..utils.exceptions import VideoVerificationError
|
||||
from ..utils.file_operations import FileOperations
|
||||
from ..utils.progress_handler import ProgressHandler
|
||||
|
||||
logger = logging.getLogger("VideoArchiver")
|
||||
|
||||
|
||||
@@ -7,14 +7,15 @@ import subprocess
|
||||
from datetime import datetime
|
||||
from typing import Dict, Any, Optional, Callable, List, Set, Tuple
|
||||
|
||||
from .processor import _compression_progress
|
||||
from .utils.compression_handler import CompressionHandler
|
||||
from .utils.progress_handler import ProgressHandler
|
||||
from .utils.file_operations import FileOperations
|
||||
from .utils.exceptions import CompressionError, VideoVerificationError
|
||||
from ..processor import _compression_progress
|
||||
from ..utils.compression_handler import CompressionHandler
|
||||
from ..utils.progress_handler import ProgressHandler
|
||||
from ..utils.file_operations import FileOperations
|
||||
from ..utils.exceptions import CompressionError, VideoVerificationError
|
||||
|
||||
logger = logging.getLogger("VideoArchiver")
|
||||
|
||||
|
||||
class CompressionManager:
|
||||
"""Manages video compression operations"""
|
||||
|
||||
|
||||
@@ -6,11 +6,12 @@ import asyncio
|
||||
from pathlib import Path
|
||||
from typing import List, Optional, Tuple
|
||||
|
||||
from .utils.exceptions import FileCleanupError
|
||||
from .utils.file_deletion import SecureFileDeleter
|
||||
from ..utils.exceptions import FileCleanupError
|
||||
from ..utils.file_deletion import SecureFileDeleter
|
||||
|
||||
logger = logging.getLogger("DirectoryManager")
|
||||
|
||||
|
||||
class DirectoryManager:
|
||||
"""Handles directory operations and cleanup"""
|
||||
|
||||
@@ -18,21 +19,18 @@ class DirectoryManager:
|
||||
self.file_deleter = SecureFileDeleter()
|
||||
|
||||
async def cleanup_directory(
|
||||
self,
|
||||
directory_path: str,
|
||||
recursive: bool = True,
|
||||
delete_empty: bool = True
|
||||
self, directory_path: str, recursive: bool = True, delete_empty: bool = True
|
||||
) -> Tuple[int, List[str]]:
|
||||
"""Clean up a directory by removing files and optionally empty subdirectories
|
||||
|
||||
|
||||
Args:
|
||||
directory_path: Path to the directory to clean
|
||||
recursive: Whether to clean subdirectories
|
||||
delete_empty: Whether to delete empty directories
|
||||
|
||||
|
||||
Returns:
|
||||
Tuple[int, List[str]]: (Number of files deleted, List of errors)
|
||||
|
||||
|
||||
Raises:
|
||||
FileCleanupError: If cleanup fails critically
|
||||
"""
|
||||
@@ -45,9 +43,7 @@ class DirectoryManager:
|
||||
try:
|
||||
# Process files and directories
|
||||
deleted, errs = await self._process_directory_contents(
|
||||
directory_path,
|
||||
recursive,
|
||||
delete_empty
|
||||
directory_path, recursive, delete_empty
|
||||
)
|
||||
deleted_count += deleted
|
||||
errors.extend(errs)
|
||||
@@ -69,10 +65,7 @@ class DirectoryManager:
|
||||
raise FileCleanupError(f"Directory cleanup failed: {str(e)}")
|
||||
|
||||
async def _process_directory_contents(
|
||||
self,
|
||||
directory_path: str,
|
||||
recursive: bool,
|
||||
delete_empty: bool
|
||||
self, directory_path: str, recursive: bool, delete_empty: bool
|
||||
) -> Tuple[int, List[str]]:
|
||||
"""Process contents of a directory"""
|
||||
deleted_count = 0
|
||||
@@ -90,9 +83,7 @@ class DirectoryManager:
|
||||
elif entry.is_dir() and recursive:
|
||||
# Process subdirectory
|
||||
subdir_deleted, subdir_errors = await self.cleanup_directory(
|
||||
entry.path,
|
||||
recursive=True,
|
||||
delete_empty=delete_empty
|
||||
entry.path, recursive=True, delete_empty=delete_empty
|
||||
)
|
||||
deleted_count += subdir_deleted
|
||||
errors.extend(subdir_errors)
|
||||
@@ -133,31 +124,31 @@ class DirectoryManager:
|
||||
|
||||
async def ensure_directory(self, directory_path: str) -> None:
|
||||
"""Ensure a directory exists and is accessible
|
||||
|
||||
|
||||
Args:
|
||||
directory_path: Path to the directory to ensure
|
||||
|
||||
|
||||
Raises:
|
||||
FileCleanupError: If directory cannot be created or accessed
|
||||
"""
|
||||
try:
|
||||
path = Path(directory_path)
|
||||
path.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
|
||||
# Verify directory is writable
|
||||
if not os.access(directory_path, os.W_OK):
|
||||
raise FileCleanupError(f"Directory {directory_path} is not writable")
|
||||
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error ensuring directory {directory_path}: {e}")
|
||||
raise FileCleanupError(f"Failed to ensure directory: {str(e)}")
|
||||
|
||||
async def get_directory_size(self, directory_path: str) -> int:
|
||||
"""Get total size of a directory in bytes
|
||||
|
||||
|
||||
Args:
|
||||
directory_path: Path to the directory
|
||||
|
||||
|
||||
Returns:
|
||||
int: Total size in bytes
|
||||
"""
|
||||
@@ -173,5 +164,5 @@ class DirectoryManager:
|
||||
logger.warning(f"Error getting size for {entry.path}: {e}")
|
||||
except Exception as e:
|
||||
logger.error(f"Error calculating directory size: {e}")
|
||||
|
||||
|
||||
return total_size
|
||||
|
||||
@@ -3,16 +3,16 @@
|
||||
import os
|
||||
import asyncio
|
||||
import logging
|
||||
import yt_dlp
|
||||
import yt_dlp # type: ignore
|
||||
from typing import Dict, Optional, Callable, Tuple
|
||||
from pathlib import Path
|
||||
|
||||
from .utils.url_validator import check_url_support
|
||||
from .utils.progress_handler import ProgressHandler, CancellableYTDLLogger
|
||||
from .utils.file_operations import FileOperations
|
||||
from .utils.compression_handler import CompressionHandler
|
||||
from .utils.process_manager import ProcessManager
|
||||
from .ffmpeg.ffmpeg_manager import FFmpegManager
|
||||
from ..utils.url_validator import check_url_support
|
||||
from ..utils.progress_handler import ProgressHandler, CancellableYTDLLogger
|
||||
from ..utils.file_operations import FileOperations
|
||||
from ..utils.compression_handler import CompressionHandler
|
||||
from ..utils.process_manager import ProcessManager
|
||||
from ..ffmpeg.ffmpeg_manager import FFmpegManager
|
||||
|
||||
logger = logging.getLogger("VideoArchiver")
|
||||
|
||||
|
||||
@@ -3,21 +3,22 @@
|
||||
import os
|
||||
import logging
|
||||
import asyncio
|
||||
import yt_dlp
|
||||
import yt_dlp # type: ignore
|
||||
from datetime import datetime
|
||||
from concurrent.futures import ThreadPoolExecutor
|
||||
from typing import Dict, List, Optional, Tuple, Callable, Any
|
||||
from pathlib import Path
|
||||
|
||||
from .utils.verification_manager import VideoVerificationManager
|
||||
from .utils.compression_manager import CompressionManager
|
||||
from .utils import progress_tracker
|
||||
from ..ffmpeg.verification_manager import VerificationManager
|
||||
from ..utils.compression_manager import CompressionManager
|
||||
from ..utils import progress_tracker
|
||||
|
||||
logger = logging.getLogger("DownloadManager")
|
||||
|
||||
|
||||
class CancellableYTDLLogger:
|
||||
"""Custom yt-dlp logger that can be cancelled"""
|
||||
|
||||
|
||||
def __init__(self):
|
||||
self.cancelled = False
|
||||
|
||||
@@ -36,6 +37,7 @@ class CancellableYTDLLogger:
|
||||
raise Exception("Download cancelled")
|
||||
logger.error(msg)
|
||||
|
||||
|
||||
class DownloadManager:
|
||||
"""Manages video downloads and processing"""
|
||||
|
||||
@@ -53,40 +55,33 @@ class DownloadManager:
|
||||
max_file_size: int,
|
||||
enabled_sites: Optional[List[str]] = None,
|
||||
concurrent_downloads: int = 2,
|
||||
ffmpeg_mgr = None
|
||||
ffmpeg_mgr=None,
|
||||
):
|
||||
self.download_path = Path(download_path)
|
||||
self.download_path.mkdir(parents=True, exist_ok=True)
|
||||
os.chmod(str(self.download_path), 0o755)
|
||||
|
||||
# Initialize components
|
||||
self.verification_manager = VideoVerificationManager(ffmpeg_mgr)
|
||||
self.verification_manager = VerificationManager(ffmpeg_mgr)
|
||||
self.compression_manager = CompressionManager(ffmpeg_mgr, max_file_size)
|
||||
|
||||
|
||||
# Create thread pool
|
||||
self.download_pool = ThreadPoolExecutor(
|
||||
max_workers=max(1, min(3, concurrent_downloads)),
|
||||
thread_name_prefix="videoarchiver_download"
|
||||
thread_name_prefix="videoarchiver_download",
|
||||
)
|
||||
|
||||
# Initialize state
|
||||
self._shutting_down = False
|
||||
self.ytdl_logger = CancellableYTDLLogger()
|
||||
|
||||
|
||||
# Configure yt-dlp options
|
||||
self.ydl_opts = self._configure_ydl_opts(
|
||||
video_format,
|
||||
max_quality,
|
||||
max_file_size,
|
||||
ffmpeg_mgr
|
||||
video_format, max_quality, max_file_size, ffmpeg_mgr
|
||||
)
|
||||
|
||||
def _configure_ydl_opts(
|
||||
self,
|
||||
video_format: str,
|
||||
max_quality: int,
|
||||
max_file_size: int,
|
||||
ffmpeg_mgr
|
||||
self, video_format: str, max_quality: int, max_file_size: int, ffmpeg_mgr
|
||||
) -> Dict[str, Any]:
|
||||
"""Configure yt-dlp options"""
|
||||
return {
|
||||
@@ -124,7 +119,9 @@ class DownloadManager:
|
||||
try:
|
||||
size = os.path.getsize(info["filepath"])
|
||||
if size > self.compression_manager.max_file_size:
|
||||
logger.info(f"File exceeds size limit, will compress: {info['filepath']}")
|
||||
logger.info(
|
||||
f"File exceeds size limit, will compress: {info['filepath']}"
|
||||
)
|
||||
except OSError as e:
|
||||
logger.error(f"Error checking file size: {str(e)}")
|
||||
|
||||
@@ -155,30 +152,24 @@ class DownloadManager:
|
||||
progress_tracker.clear_progress()
|
||||
|
||||
async def download_video(
|
||||
self,
|
||||
url: str,
|
||||
progress_callback: Optional[Callable[[float], None]] = None
|
||||
self, url: str, progress_callback: Optional[Callable[[float], None]] = None
|
||||
) -> Tuple[bool, str, str]:
|
||||
"""Download and process a video"""
|
||||
if self._shutting_down:
|
||||
return False, "", "Downloader is shutting down"
|
||||
|
||||
progress_tracker.start_download(url)
|
||||
|
||||
|
||||
try:
|
||||
# Download video
|
||||
success, file_path, error = await self._safe_download(
|
||||
url,
|
||||
progress_callback
|
||||
url, progress_callback
|
||||
)
|
||||
if not success:
|
||||
return False, "", error
|
||||
|
||||
# Verify and compress if needed
|
||||
return await self._process_downloaded_file(
|
||||
file_path,
|
||||
progress_callback
|
||||
)
|
||||
return await self._process_downloaded_file(file_path, progress_callback)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Download error: {str(e)}")
|
||||
@@ -188,18 +179,14 @@ class DownloadManager:
|
||||
progress_tracker.end_download(url)
|
||||
|
||||
async def _safe_download(
|
||||
self,
|
||||
url: str,
|
||||
progress_callback: Optional[Callable[[float], None]]
|
||||
self, url: str, progress_callback: Optional[Callable[[float], None]]
|
||||
) -> Tuple[bool, str, str]:
|
||||
"""Safely download video with retries"""
|
||||
# Implementation moved to separate method for clarity
|
||||
pass # Implementation would be similar to original but using new components
|
||||
|
||||
async def _process_downloaded_file(
|
||||
self,
|
||||
file_path: str,
|
||||
progress_callback: Optional[Callable[[float], None]]
|
||||
self, file_path: str, progress_callback: Optional[Callable[[float], None]]
|
||||
) -> Tuple[bool, str, str]:
|
||||
"""Process a downloaded file (verify and compress if needed)"""
|
||||
# Implementation moved to separate method for clarity
|
||||
|
||||
@@ -7,16 +7,17 @@ import logging
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
|
||||
from .utils.exceptions import FileCleanupError
|
||||
from ..utils.exceptions import FileCleanupError
|
||||
|
||||
logger = logging.getLogger("FileDeleter")
|
||||
|
||||
|
||||
class SecureFileDeleter:
|
||||
"""Handles secure file deletion operations"""
|
||||
|
||||
def __init__(self, max_size: int = 100 * 1024 * 1024):
|
||||
"""Initialize the file deleter
|
||||
|
||||
|
||||
Args:
|
||||
max_size: Maximum file size in bytes for secure deletion (default: 100MB)
|
||||
"""
|
||||
@@ -24,13 +25,13 @@ class SecureFileDeleter:
|
||||
|
||||
async def delete_file(self, file_path: str) -> bool:
|
||||
"""Delete a file securely
|
||||
|
||||
|
||||
Args:
|
||||
file_path: Path to the file to delete
|
||||
|
||||
|
||||
Returns:
|
||||
bool: True if file was successfully deleted
|
||||
|
||||
|
||||
Raises:
|
||||
FileCleanupError: If file deletion fails after all attempts
|
||||
"""
|
||||
@@ -65,7 +66,9 @@ class SecureFileDeleter:
|
||||
async def _delete_large_file(self, file_path: str) -> bool:
|
||||
"""Delete a large file directly"""
|
||||
try:
|
||||
logger.debug(f"File {file_path} exceeds max size for secure deletion, performing direct removal")
|
||||
logger.debug(
|
||||
f"File {file_path} exceeds max size for secure deletion, performing direct removal"
|
||||
)
|
||||
os.remove(file_path)
|
||||
return True
|
||||
except OSError as e:
|
||||
@@ -84,11 +87,13 @@ class SecureFileDeleter:
|
||||
async def _zero_file_content(self, file_path: str, file_size: int) -> None:
|
||||
"""Zero out file content in chunks"""
|
||||
try:
|
||||
chunk_size = min(1024 * 1024, file_size) # 1MB chunks or file size if smaller
|
||||
chunk_size = min(
|
||||
1024 * 1024, file_size
|
||||
) # 1MB chunks or file size if smaller
|
||||
with open(file_path, "wb") as f:
|
||||
for offset in range(0, file_size, chunk_size):
|
||||
write_size = min(chunk_size, file_size - offset)
|
||||
f.write(b'\0' * write_size)
|
||||
f.write(b"\0" * write_size)
|
||||
await asyncio.sleep(0) # Allow other tasks to run
|
||||
f.flush()
|
||||
os.fsync(f.fileno())
|
||||
|
||||
@@ -9,14 +9,15 @@ import subprocess
|
||||
from typing import Tuple
|
||||
from pathlib import Path
|
||||
|
||||
from .utils.exceptions import VideoVerificationError
|
||||
from .utils.file_deletion import secure_delete_file
|
||||
from ..utils.exceptions import VideoVerificationError
|
||||
from ..utils.file_deletion import SecureFileDeleter
|
||||
|
||||
logger = logging.getLogger("VideoArchiver")
|
||||
|
||||
|
||||
class FileOperations:
|
||||
"""Handles safe file operations with retries"""
|
||||
|
||||
|
||||
def __init__(self, max_retries: int = 3, retry_delay: int = 1):
|
||||
self.max_retries = max_retries
|
||||
self.retry_delay = retry_delay
|
||||
@@ -26,7 +27,7 @@ class FileOperations:
|
||||
for attempt in range(self.max_retries):
|
||||
try:
|
||||
if os.path.exists(file_path):
|
||||
await secure_delete_file(file_path)
|
||||
await SecureFileDeleter(file_path)
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.error(f"Delete attempt {attempt + 1} failed: {str(e)}")
|
||||
@@ -122,7 +123,7 @@ class FileOperations:
|
||||
result = subprocess.run(cmd, capture_output=True, text=True)
|
||||
if result.returncode != 0:
|
||||
raise Exception(f"FFprobe failed: {result.stderr}")
|
||||
|
||||
|
||||
data = json.loads(result.stdout)
|
||||
return float(data["format"]["duration"])
|
||||
except Exception as e:
|
||||
|
||||
@@ -4,10 +4,10 @@ import logging
|
||||
from pathlib import Path
|
||||
from typing import List, Tuple, Optional
|
||||
|
||||
from .utils.exceptions import FileCleanupError
|
||||
from .utils.file_deletion import SecureFileDeleter
|
||||
from .utils.directory_manager import DirectoryManager
|
||||
from .utils.permission_manager import PermissionManager
|
||||
from ..utils.exceptions import FileCleanupError
|
||||
from ..utils.file_deletion import SecureFileDeleter
|
||||
from ..utils.directory_manager import DirectoryManager
|
||||
from ..utils.permission_manager import PermissionManager
|
||||
|
||||
logger = logging.getLogger("VideoArchiver")
|
||||
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
"""Path management utilities"""
|
||||
|
||||
import asyncio
|
||||
import os
|
||||
import tempfile
|
||||
import shutil
|
||||
@@ -7,11 +8,11 @@ import stat
|
||||
import logging
|
||||
import contextlib
|
||||
import time
|
||||
from typing import Generator, List, Optional
|
||||
from typing import List, Optional, AsyncGenerator
|
||||
from pathlib import Path
|
||||
|
||||
from .utils.exceptions import FileCleanupError
|
||||
from .utils.permission_manager import PermissionManager
|
||||
from ..utils.exceptions import FileCleanupError
|
||||
from ..utils.permission_manager import PermissionManager
|
||||
|
||||
logger = logging.getLogger("PathManager")
|
||||
|
||||
@@ -162,7 +163,7 @@ class PathManager:
|
||||
async def temp_path_context(
|
||||
self,
|
||||
prefix: str = "videoarchiver_"
|
||||
) -> Generator[str, None, None]:
|
||||
) -> AsyncGenerator[str, None]:
|
||||
"""Async context manager for temporary path creation and cleanup
|
||||
|
||||
Args:
|
||||
|
||||
@@ -6,7 +6,7 @@ import logging
|
||||
from pathlib import Path
|
||||
from typing import Optional, Union, List
|
||||
|
||||
from .utils.exceptions import FileCleanupError
|
||||
from ..utils.exceptions import FileCleanupError
|
||||
|
||||
logger = logging.getLogger("PermissionManager")
|
||||
|
||||
|
||||
@@ -1,8 +1,7 @@
|
||||
"""Progress tracking and logging utilities for video downloads"""
|
||||
|
||||
import logging
|
||||
from datetime import datetime
|
||||
from typing import Dict, Any, Optional, Callable
|
||||
import os
|
||||
|
||||
logger = logging.getLogger("VideoArchiver")
|
||||
|
||||
@@ -13,17 +12,17 @@ class CancellableYTDLLogger:
|
||||
|
||||
def debug(self, msg):
|
||||
if self.cancelled:
|
||||
raise yt_dlp.utils.DownloadError("Download cancelled")
|
||||
raise yt_dlp.utils.DownloadError("Download cancelled") # type: ignore
|
||||
logger.debug(msg)
|
||||
|
||||
def warning(self, msg):
|
||||
if self.cancelled:
|
||||
raise yt_dlp.utils.DownloadError("Download cancelled")
|
||||
raise yt_dlp.utils.DownloadError("Download cancelled") # type: ignore
|
||||
logger.warning(msg)
|
||||
|
||||
def error(self, msg):
|
||||
if self.cancelled:
|
||||
raise yt_dlp.utils.DownloadError("Download cancelled")
|
||||
raise yt_dlp.utils.DownloadError("Download cancelled") # type: ignore
|
||||
logger.error(msg)
|
||||
|
||||
class ProgressHandler:
|
||||
@@ -123,4 +122,4 @@ class ProgressHandler:
|
||||
progress_callback(progress)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error upda
|
||||
logger.error(f"Error updating compression progress: {str(e)}")
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
import re
|
||||
import logging
|
||||
import yt_dlp
|
||||
import yt_dlp # type: ignore
|
||||
from typing import List, Optional
|
||||
|
||||
logger = logging.getLogger("VideoArchiver")
|
||||
|
||||
Reference in New Issue
Block a user