mirror of
https://github.com/pacnpal/Pac-cogs.git
synced 2025-12-21 03:11:07 -05:00
Made secure_delete_file and cleanup_downloads async in file_ops.py
Added chunked file processing and size limits to prevent blocking Updated video_downloader.py to properly await secure_delete_file Updated video_archiver.py to properly await cleanup_downloads Simplified the secure deletion process while maintaining security Added proper error handling and logging throughout
This commit is contained in:
@@ -2,10 +2,8 @@
|
|||||||
|
|
||||||
import os
|
import os
|
||||||
import stat
|
import stat
|
||||||
import time
|
import asyncio
|
||||||
import logging
|
import logging
|
||||||
import shutil
|
|
||||||
from datetime import datetime
|
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
@@ -13,13 +11,12 @@ from .exceptions import FileCleanupError
|
|||||||
|
|
||||||
logger = logging.getLogger("VideoArchiver")
|
logger = logging.getLogger("VideoArchiver")
|
||||||
|
|
||||||
def secure_delete_file(file_path: str, passes: int = 3, timeout: int = 30) -> bool:
|
async def secure_delete_file(file_path: str, max_size: int = 100 * 1024 * 1024) -> bool:
|
||||||
"""Securely delete a file by overwriting it multiple times before removal
|
"""Delete a file securely
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
file_path: Path to the file to delete
|
file_path: Path to the file to delete
|
||||||
passes: Number of overwrite passes (default: 3)
|
max_size: Maximum file size in bytes to attempt secure deletion (default: 100MB)
|
||||||
timeout: Maximum time in seconds to attempt deletion (default: 30)
|
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
bool: True if file was successfully deleted, False otherwise
|
bool: True if file was successfully deleted, False otherwise
|
||||||
@@ -30,14 +27,23 @@ def secure_delete_file(file_path: str, passes: int = 3, timeout: int = 30) -> bo
|
|||||||
if not os.path.exists(file_path):
|
if not os.path.exists(file_path):
|
||||||
return True
|
return True
|
||||||
|
|
||||||
start_time = datetime.now()
|
|
||||||
try:
|
try:
|
||||||
# Get file size before starting
|
# Get file size
|
||||||
try:
|
try:
|
||||||
file_size = os.path.getsize(file_path)
|
file_size = os.path.getsize(file_path)
|
||||||
except OSError as e:
|
except OSError as e:
|
||||||
file_size = 0
|
|
||||||
logger.warning(f"Could not get size of {file_path}: {e}")
|
logger.warning(f"Could not get size of {file_path}: {e}")
|
||||||
|
file_size = 0
|
||||||
|
|
||||||
|
# For large files, skip secure deletion and just remove
|
||||||
|
if file_size > max_size:
|
||||||
|
logger.debug(f"File {file_path} exceeds max size for secure deletion, performing direct removal")
|
||||||
|
try:
|
||||||
|
os.remove(file_path)
|
||||||
|
return True
|
||||||
|
except OSError as e:
|
||||||
|
logger.error(f"Failed to remove large file {file_path}: {e}")
|
||||||
|
return False
|
||||||
|
|
||||||
# Ensure file is writable
|
# Ensure file is writable
|
||||||
try:
|
try:
|
||||||
@@ -47,52 +53,31 @@ def secure_delete_file(file_path: str, passes: int = 3, timeout: int = 30) -> bo
|
|||||||
logger.warning(f"Could not modify permissions of {file_path}: {e}")
|
logger.warning(f"Could not modify permissions of {file_path}: {e}")
|
||||||
raise FileCleanupError(f"Permission error: {str(e)}")
|
raise FileCleanupError(f"Permission error: {str(e)}")
|
||||||
|
|
||||||
# Overwrite file content
|
# Zero out file content in chunks to avoid memory issues
|
||||||
if file_size > 0:
|
if file_size > 0:
|
||||||
for pass_num in range(passes):
|
|
||||||
try:
|
|
||||||
with open(file_path, "wb") as f:
|
|
||||||
# Write random data
|
|
||||||
f.write(os.urandom(file_size))
|
|
||||||
# Ensure data is written to disk
|
|
||||||
f.flush()
|
|
||||||
os.fsync(f.fileno())
|
|
||||||
except OSError as e:
|
|
||||||
logger.warning(f"Error during pass {pass_num + 1} of overwriting {file_path}: {e}")
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Try multiple deletion methods
|
|
||||||
deletion_methods = [
|
|
||||||
lambda p: os.remove(p),
|
|
||||||
lambda p: os.unlink(p),
|
|
||||||
lambda p: Path(p).unlink(missing_ok=True),
|
|
||||||
lambda p: shutil.rmtree(p, ignore_errors=True) if os.path.isdir(p) else os.remove(p)
|
|
||||||
]
|
|
||||||
|
|
||||||
for method in deletion_methods:
|
|
||||||
try:
|
try:
|
||||||
if os.path.exists(file_path):
|
chunk_size = min(1024 * 1024, file_size) # 1MB chunks or file size if smaller
|
||||||
method(file_path)
|
with open(file_path, "wb") as f:
|
||||||
if not os.path.exists(file_path):
|
for offset in range(0, file_size, chunk_size):
|
||||||
logger.debug(f"Successfully deleted {file_path}")
|
write_size = min(chunk_size, file_size - offset)
|
||||||
return True
|
f.write(b'\0' * write_size)
|
||||||
|
# Allow other tasks to run
|
||||||
|
await asyncio.sleep(0)
|
||||||
|
f.flush()
|
||||||
|
os.fsync(f.fileno())
|
||||||
except OSError as e:
|
except OSError as e:
|
||||||
logger.debug(f"Deletion method failed for {file_path}: {e}")
|
logger.warning(f"Error zeroing file {file_path}: {e}")
|
||||||
continue
|
|
||||||
|
|
||||||
# If file still exists, check timeout
|
# Delete the file
|
||||||
while os.path.exists(file_path):
|
try:
|
||||||
if (datetime.now() - start_time).total_seconds() > timeout:
|
Path(file_path).unlink(missing_ok=True)
|
||||||
logger.error(f"Timeout while trying to delete {file_path}")
|
return True
|
||||||
raise FileCleanupError(f"Deletion timeout for {file_path}")
|
except OSError as e:
|
||||||
time.sleep(0.1)
|
logger.error(f"Failed to delete file {file_path}: {e}")
|
||||||
|
return False
|
||||||
|
|
||||||
return True
|
|
||||||
|
|
||||||
except FileCleanupError:
|
|
||||||
raise
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"Error during secure deletion of {file_path}: {e}")
|
logger.error(f"Error during deletion of {file_path}: {e}")
|
||||||
# Last resort: try force delete
|
# Last resort: try force delete
|
||||||
try:
|
try:
|
||||||
if os.path.exists(file_path):
|
if os.path.exists(file_path):
|
||||||
@@ -103,7 +88,7 @@ def secure_delete_file(file_path: str, passes: int = 3, timeout: int = 30) -> bo
|
|||||||
raise FileCleanupError(f"Force delete failed: {str(e2)}")
|
raise FileCleanupError(f"Force delete failed: {str(e2)}")
|
||||||
return not os.path.exists(file_path)
|
return not os.path.exists(file_path)
|
||||||
|
|
||||||
def cleanup_downloads(download_path: str) -> None:
|
async def cleanup_downloads(download_path: str) -> None:
|
||||||
"""Clean up the downloads directory
|
"""Clean up the downloads directory
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
@@ -122,10 +107,10 @@ def cleanup_downloads(download_path: str) -> None:
|
|||||||
try:
|
try:
|
||||||
path = entry.path
|
path = entry.path
|
||||||
if entry.is_file():
|
if entry.is_file():
|
||||||
if not secure_delete_file(path):
|
if not await secure_delete_file(path):
|
||||||
errors.append(f"Failed to delete file: {path}")
|
errors.append(f"Failed to delete file: {path}")
|
||||||
elif entry.is_dir():
|
elif entry.is_dir():
|
||||||
shutil.rmtree(path, ignore_errors=True)
|
await asyncio.to_thread(lambda: os.rmdir(path) if not os.listdir(path) else None)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
errors.append(f"Error processing {entry.path}: {str(e)}")
|
errors.append(f"Error processing {entry.path}: {str(e)}")
|
||||||
continue
|
continue
|
||||||
@@ -136,7 +121,7 @@ def cleanup_downloads(download_path: str) -> None:
|
|||||||
try:
|
try:
|
||||||
dir_path = os.path.join(root, name)
|
dir_path = os.path.join(root, name)
|
||||||
if not os.listdir(dir_path): # Check if directory is empty
|
if not os.listdir(dir_path): # Check if directory is empty
|
||||||
os.rmdir(dir_path)
|
await asyncio.to_thread(os.rmdir, dir_path)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
errors.append(f"Error removing directory {name}: {str(e)}")
|
errors.append(f"Error removing directory {name}: {str(e)}")
|
||||||
|
|
||||||
|
|||||||
@@ -447,7 +447,7 @@ class VideoDownloader:
|
|||||||
"""Safely delete a file with retries"""
|
"""Safely delete a file with retries"""
|
||||||
for attempt in range(self.FILE_OP_RETRIES):
|
for attempt in range(self.FILE_OP_RETRIES):
|
||||||
try:
|
try:
|
||||||
if secure_delete_file(file_path):
|
if await secure_delete_file(file_path):
|
||||||
return True
|
return True
|
||||||
await asyncio.sleep(self.FILE_OP_RETRY_DELAY * (attempt + 1))
|
await asyncio.sleep(self.FILE_OP_RETRY_DELAY * (attempt + 1))
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
|
|||||||
@@ -58,7 +58,7 @@ class VideoArchiver(commands.Cog):
|
|||||||
self.download_path.mkdir(parents=True, exist_ok=True)
|
self.download_path.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
# Clean existing downloads
|
# Clean existing downloads
|
||||||
cleanup_downloads(str(self.download_path))
|
await cleanup_downloads(str(self.download_path))
|
||||||
|
|
||||||
# Initialize shared FFmpeg manager
|
# Initialize shared FFmpeg manager
|
||||||
self.ffmpeg_mgr = FFmpegManager()
|
self.ffmpeg_mgr = FFmpegManager()
|
||||||
@@ -184,7 +184,7 @@ class VideoArchiver(commands.Cog):
|
|||||||
# Clean up download directory
|
# Clean up download directory
|
||||||
if hasattr(self, "download_path") and self.download_path.exists():
|
if hasattr(self, "download_path") and self.download_path.exists():
|
||||||
try:
|
try:
|
||||||
cleanup_downloads(str(self.download_path))
|
await cleanup_downloads(str(self.download_path))
|
||||||
self.download_path.rmdir()
|
self.download_path.rmdir()
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"Error cleaning up download directory: {str(e)}")
|
logger.error(f"Error cleaning up download directory: {str(e)}")
|
||||||
@@ -202,7 +202,7 @@ class VideoArchiver(commands.Cog):
|
|||||||
|
|
||||||
# Ensure download directory exists and is clean
|
# Ensure download directory exists and is clean
|
||||||
self.download_path.mkdir(parents=True, exist_ok=True)
|
self.download_path.mkdir(parents=True, exist_ok=True)
|
||||||
cleanup_downloads(str(self.download_path))
|
await cleanup_downloads(str(self.download_path))
|
||||||
|
|
||||||
# Clean up old components if they exist
|
# Clean up old components if they exist
|
||||||
if guild_id in self.components:
|
if guild_id in self.components:
|
||||||
|
|||||||
Reference in New Issue
Block a user