mirror of
https://github.com/pacnpal/Pac-cogs.git
synced 2025-12-20 10:51:05 -05:00
Fixed the "cannot import name '_download_progress'" error by:
Adding a shared progress_tracker instance in processor/init.py Exposing public functions that wrap the ProgressTracker methods Removing direct access to private _download_progress variable Fixed the "'QueueItem' object has no attribute 'retry_count'" error by: Updating the QueueItem class in queue/models.py to use retry_count instead of retries This matches the field name used in the queue manager's implementation
This commit is contained in:
79
videoarchiver/database/video_archive_db.py
Normal file
79
videoarchiver/database/video_archive_db.py
Normal file
@@ -0,0 +1,79 @@
|
|||||||
|
"""Database management for archived videos"""
|
||||||
|
import sqlite3
|
||||||
|
import logging
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Optional, Tuple
|
||||||
|
|
||||||
|
logger = logging.getLogger("VideoArchiverDB")
|
||||||
|
|
||||||
|
class VideoArchiveDB:
|
||||||
|
"""Manages the SQLite database for archived videos"""
|
||||||
|
|
||||||
|
def __init__(self, data_path: Path):
|
||||||
|
"""Initialize the database connection"""
|
||||||
|
self.db_path = data_path / "archived_videos.db"
|
||||||
|
self.db_path.parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
self._init_db()
|
||||||
|
|
||||||
|
def _init_db(self):
|
||||||
|
"""Initialize the database schema"""
|
||||||
|
try:
|
||||||
|
with sqlite3.connect(self.db_path) as conn:
|
||||||
|
cursor = conn.cursor()
|
||||||
|
cursor.execute("""
|
||||||
|
CREATE TABLE IF NOT EXISTS archived_videos (
|
||||||
|
original_url TEXT PRIMARY KEY,
|
||||||
|
discord_url TEXT NOT NULL,
|
||||||
|
message_id INTEGER NOT NULL,
|
||||||
|
channel_id INTEGER NOT NULL,
|
||||||
|
guild_id INTEGER NOT NULL,
|
||||||
|
archived_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||||
|
)
|
||||||
|
""")
|
||||||
|
conn.commit()
|
||||||
|
except sqlite3.Error as e:
|
||||||
|
logger.error(f"Database initialization error: {e}")
|
||||||
|
raise
|
||||||
|
|
||||||
|
def add_archived_video(self, original_url: str, discord_url: str, message_id: int, channel_id: int, guild_id: int) -> bool:
|
||||||
|
"""Add a newly archived video to the database"""
|
||||||
|
try:
|
||||||
|
with sqlite3.connect(self.db_path) as conn:
|
||||||
|
cursor = conn.cursor()
|
||||||
|
cursor.execute("""
|
||||||
|
INSERT OR REPLACE INTO archived_videos
|
||||||
|
(original_url, discord_url, message_id, channel_id, guild_id)
|
||||||
|
VALUES (?, ?, ?, ?, ?)
|
||||||
|
""", (original_url, discord_url, message_id, channel_id, guild_id))
|
||||||
|
conn.commit()
|
||||||
|
return True
|
||||||
|
except sqlite3.Error as e:
|
||||||
|
logger.error(f"Error adding archived video: {e}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
def get_archived_video(self, url: str) -> Optional[Tuple[str, int, int, int]]:
|
||||||
|
"""Get archived video information by original URL"""
|
||||||
|
try:
|
||||||
|
with sqlite3.connect(self.db_path) as conn:
|
||||||
|
cursor = conn.cursor()
|
||||||
|
cursor.execute("""
|
||||||
|
SELECT discord_url, message_id, channel_id, guild_id
|
||||||
|
FROM archived_videos
|
||||||
|
WHERE original_url = ?
|
||||||
|
""", (url,))
|
||||||
|
result = cursor.fetchone()
|
||||||
|
return result if result else None
|
||||||
|
except sqlite3.Error as e:
|
||||||
|
logger.error(f"Error retrieving archived video: {e}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
def is_url_archived(self, url: str) -> bool:
|
||||||
|
"""Check if a URL has already been archived"""
|
||||||
|
try:
|
||||||
|
with sqlite3.connect(self.db_path) as conn:
|
||||||
|
cursor = conn.cursor()
|
||||||
|
cursor.execute("SELECT 1 FROM archived_videos WHERE original_url = ?", (url,))
|
||||||
|
return cursor.fetchone() is not None
|
||||||
|
except sqlite3.Error as e:
|
||||||
|
logger.error(f"Error checking archived status: {e}")
|
||||||
|
return False
|
||||||
@@ -6,6 +6,7 @@ from .progress_tracker import ProgressTracker
|
|||||||
from .message_handler import MessageHandler
|
from .message_handler import MessageHandler
|
||||||
from .queue_handler import QueueHandler
|
from .queue_handler import QueueHandler
|
||||||
|
|
||||||
|
# Export public classes and constants
|
||||||
__all__ = [
|
__all__ = [
|
||||||
'VideoProcessor',
|
'VideoProcessor',
|
||||||
'REACTIONS',
|
'REACTIONS',
|
||||||
@@ -13,3 +14,27 @@ __all__ = [
|
|||||||
'MessageHandler',
|
'MessageHandler',
|
||||||
'QueueHandler'
|
'QueueHandler'
|
||||||
]
|
]
|
||||||
|
|
||||||
|
# Create a shared progress tracker instance for module-level access
|
||||||
|
progress_tracker = ProgressTracker()
|
||||||
|
|
||||||
|
# Export progress tracking functions that wrap the instance methods
|
||||||
|
def update_download_progress(url, progress_data):
|
||||||
|
"""Update download progress for a specific URL"""
|
||||||
|
progress_tracker.update_download_progress(url, progress_data)
|
||||||
|
|
||||||
|
def complete_download(url):
|
||||||
|
"""Mark a download as complete"""
|
||||||
|
progress_tracker.complete_download(url)
|
||||||
|
|
||||||
|
def increment_download_retries(url):
|
||||||
|
"""Increment retry count for a download"""
|
||||||
|
progress_tracker.increment_download_retries(url)
|
||||||
|
|
||||||
|
def get_download_progress(url=None):
|
||||||
|
"""Get download progress for a specific URL or all downloads"""
|
||||||
|
return progress_tracker.get_download_progress(url)
|
||||||
|
|
||||||
|
def get_active_operations():
|
||||||
|
"""Get all active operations"""
|
||||||
|
return progress_tracker.get_active_operations()
|
||||||
|
|||||||
@@ -15,10 +15,11 @@ logger = logging.getLogger("VideoArchiver")
|
|||||||
class QueueHandler:
|
class QueueHandler:
|
||||||
"""Handles queue processing and video operations"""
|
"""Handles queue processing and video operations"""
|
||||||
|
|
||||||
def __init__(self, bot, config_manager, components):
|
def __init__(self, bot, config_manager, components, db=None):
|
||||||
self.bot = bot
|
self.bot = bot
|
||||||
self.config = config_manager
|
self.config = config_manager
|
||||||
self.components = components
|
self.components = components
|
||||||
|
self.db = db
|
||||||
self._unloading = False
|
self._unloading = False
|
||||||
self._active_downloads: Dict[str, asyncio.Task] = {}
|
self._active_downloads: Dict[str, asyncio.Task] = {}
|
||||||
self._active_downloads_lock = asyncio.Lock()
|
self._active_downloads_lock = asyncio.Lock()
|
||||||
@@ -34,6 +35,16 @@ class QueueHandler:
|
|||||||
download_task = None
|
download_task = None
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
# Check if video is already archived
|
||||||
|
if self.db and self.db.is_url_archived(item.url):
|
||||||
|
logger.info(f"Video already archived: {item.url}")
|
||||||
|
if original_message := await self._get_original_message(item):
|
||||||
|
await original_message.add_reaction(REACTIONS["success"])
|
||||||
|
archived_info = self.db.get_archived_video(item.url)
|
||||||
|
if archived_info:
|
||||||
|
await original_message.reply(f"This video was already archived. You can find it here: {archived_info[0]}")
|
||||||
|
return True, None
|
||||||
|
|
||||||
guild_id = item.guild_id
|
guild_id = item.guild_id
|
||||||
if guild_id not in self.components:
|
if guild_id not in self.components:
|
||||||
return False, f"No components found for guild {guild_id}"
|
return False, f"No components found for guild {guild_id}"
|
||||||
@@ -85,6 +96,65 @@ class QueueHandler:
|
|||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"Failed to clean up file {file_path}: {e}")
|
logger.error(f"Failed to clean up file {file_path}: {e}")
|
||||||
|
|
||||||
|
async def _archive_video(self, guild_id: int, original_message: Optional[discord.Message],
|
||||||
|
message_manager, url: str, file_path: str) -> Tuple[bool, Optional[str]]:
|
||||||
|
"""Archive downloaded video"""
|
||||||
|
try:
|
||||||
|
# Get archive channel
|
||||||
|
guild = self.bot.get_guild(guild_id)
|
||||||
|
if not guild:
|
||||||
|
return False, f"Guild {guild_id} not found"
|
||||||
|
|
||||||
|
archive_channel = await self.config.get_channel(guild, "archive")
|
||||||
|
if not archive_channel:
|
||||||
|
return False, "Archive channel not configured"
|
||||||
|
|
||||||
|
# Format message
|
||||||
|
try:
|
||||||
|
author = original_message.author if original_message else None
|
||||||
|
channel = original_message.channel if original_message else None
|
||||||
|
message = await message_manager.format_message(
|
||||||
|
author=author, channel=channel, url=url
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
return False, f"Failed to format message: {str(e)}"
|
||||||
|
|
||||||
|
# Upload to archive channel
|
||||||
|
if not os.path.exists(file_path):
|
||||||
|
return False, "Processed file not found"
|
||||||
|
|
||||||
|
archive_message = await archive_channel.send(content=message, file=discord.File(file_path))
|
||||||
|
|
||||||
|
# Store in database if available
|
||||||
|
if self.db and archive_message.attachments:
|
||||||
|
discord_url = archive_message.attachments[0].url
|
||||||
|
self.db.add_archived_video(
|
||||||
|
url,
|
||||||
|
discord_url,
|
||||||
|
archive_message.id,
|
||||||
|
archive_channel.id,
|
||||||
|
guild_id
|
||||||
|
)
|
||||||
|
logger.info(f"Added video to archive database: {url} -> {discord_url}")
|
||||||
|
|
||||||
|
if original_message:
|
||||||
|
await original_message.remove_reaction(REACTIONS["processing"], self.bot.user)
|
||||||
|
await original_message.add_reaction(REACTIONS["success"])
|
||||||
|
logger.info(f"Successfully processed message {original_message.id}")
|
||||||
|
|
||||||
|
return True, None
|
||||||
|
|
||||||
|
except discord.HTTPException as e:
|
||||||
|
if original_message:
|
||||||
|
await original_message.add_reaction(REACTIONS["error"])
|
||||||
|
logger.error(f"Failed to upload to Discord: {str(e)}")
|
||||||
|
return False, f"Failed to upload to Discord: {str(e)}"
|
||||||
|
except Exception as e:
|
||||||
|
if original_message:
|
||||||
|
await original_message.add_reaction(REACTIONS["error"])
|
||||||
|
logger.error(f"Failed to archive video: {str(e)}")
|
||||||
|
return False, f"Failed to archive video: {str(e)}"
|
||||||
|
|
||||||
async def _get_original_message(self, item) -> Optional[discord.Message]:
|
async def _get_original_message(self, item) -> Optional[discord.Message]:
|
||||||
"""Retrieve the original message"""
|
"""Retrieve the original message"""
|
||||||
try:
|
try:
|
||||||
@@ -149,53 +219,6 @@ class QueueHandler:
|
|||||||
async with self._active_downloads_lock:
|
async with self._active_downloads_lock:
|
||||||
self._active_downloads.pop(url, None)
|
self._active_downloads.pop(url, None)
|
||||||
|
|
||||||
async def _archive_video(self, guild_id: int, original_message: Optional[discord.Message],
|
|
||||||
message_manager, url: str, file_path: str) -> Tuple[bool, Optional[str]]:
|
|
||||||
"""Archive downloaded video"""
|
|
||||||
try:
|
|
||||||
# Get archive channel
|
|
||||||
guild = self.bot.get_guild(guild_id)
|
|
||||||
if not guild:
|
|
||||||
return False, f"Guild {guild_id} not found"
|
|
||||||
|
|
||||||
archive_channel = await self.config.get_channel(guild, "archive")
|
|
||||||
if not archive_channel:
|
|
||||||
return False, "Archive channel not configured"
|
|
||||||
|
|
||||||
# Format message
|
|
||||||
try:
|
|
||||||
author = original_message.author if original_message else None
|
|
||||||
channel = original_message.channel if original_message else None
|
|
||||||
message = await message_manager.format_message(
|
|
||||||
author=author, channel=channel, url=url
|
|
||||||
)
|
|
||||||
except Exception as e:
|
|
||||||
return False, f"Failed to format message: {str(e)}"
|
|
||||||
|
|
||||||
# Upload to archive channel
|
|
||||||
if not os.path.exists(file_path):
|
|
||||||
return False, "Processed file not found"
|
|
||||||
|
|
||||||
await archive_channel.send(content=message, file=discord.File(file_path))
|
|
||||||
|
|
||||||
if original_message:
|
|
||||||
await original_message.remove_reaction(REACTIONS["processing"], self.bot.user)
|
|
||||||
await original_message.add_reaction(REACTIONS["success"])
|
|
||||||
logger.info(f"Successfully processed message {original_message.id}")
|
|
||||||
|
|
||||||
return True, None
|
|
||||||
|
|
||||||
except discord.HTTPException as e:
|
|
||||||
if original_message:
|
|
||||||
await original_message.add_reaction(REACTIONS["error"])
|
|
||||||
logger.error(f"Failed to upload to Discord: {str(e)}")
|
|
||||||
return False, f"Failed to upload to Discord: {str(e)}"
|
|
||||||
except Exception as e:
|
|
||||||
if original_message:
|
|
||||||
await original_message.add_reaction(REACTIONS["error"])
|
|
||||||
logger.error(f"Failed to archive video: {str(e)}")
|
|
||||||
return False, f"Failed to archive video: {str(e)}"
|
|
||||||
|
|
||||||
async def cleanup(self):
|
async def cleanup(self):
|
||||||
"""Clean up resources and stop processing"""
|
"""Clean up resources and stop processing"""
|
||||||
try:
|
try:
|
||||||
|
|||||||
@@ -12,11 +12,37 @@ REACTIONS = {
|
|||||||
'processing': '⚙️',
|
'processing': '⚙️',
|
||||||
'success': '✅',
|
'success': '✅',
|
||||||
'error': '❌',
|
'error': '❌',
|
||||||
|
'archived': '🔄', # New reaction for already archived videos
|
||||||
'numbers': ['1️⃣', '2️⃣', '3️⃣', '4️⃣', '5️⃣'],
|
'numbers': ['1️⃣', '2️⃣', '3️⃣', '4️⃣', '5️⃣'],
|
||||||
'progress': ['⬛', '🟨', '🟩'],
|
'progress': ['⬛', '🟨', '🟩'],
|
||||||
'download': ['0️⃣', '2️⃣', '4️⃣', '6️⃣', '8️⃣', '🔟']
|
'download': ['0️⃣', '2️⃣', '4️⃣', '6️⃣', '8️⃣', '🔟']
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async def handle_archived_reaction(message: discord.Message, user: discord.User, db) -> None:
|
||||||
|
"""Handle reaction to archived video message"""
|
||||||
|
try:
|
||||||
|
# Check if the reaction is from a user (not the bot) and is the archived reaction
|
||||||
|
if user.bot or str(message.reactions[0].emoji) != REACTIONS['archived']:
|
||||||
|
return
|
||||||
|
|
||||||
|
# Extract URLs from the message
|
||||||
|
urls = []
|
||||||
|
if message.content:
|
||||||
|
for word in message.content.split():
|
||||||
|
if any(s in word.lower() for s in ['http://', 'https://']):
|
||||||
|
urls.append(word)
|
||||||
|
|
||||||
|
# Check each URL in the database
|
||||||
|
for url in urls:
|
||||||
|
result = db.get_archived_video(url)
|
||||||
|
if result:
|
||||||
|
discord_url = result[0]
|
||||||
|
await message.reply(f"This video was already archived. You can find it here: {discord_url}")
|
||||||
|
return
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error handling archived reaction: {e}")
|
||||||
|
|
||||||
async def update_queue_position_reaction(message: discord.Message, position: int, bot_user) -> None:
|
async def update_queue_position_reaction(message: discord.Message, position: int, bot_user) -> None:
|
||||||
"""Update queue position reaction"""
|
"""Update queue position reaction"""
|
||||||
try:
|
try:
|
||||||
|
|||||||
@@ -22,7 +22,7 @@ class QueueItem:
|
|||||||
guild_id: int # Discord ID
|
guild_id: int # Discord ID
|
||||||
added_at: datetime = field(default_factory=datetime.utcnow)
|
added_at: datetime = field(default_factory=datetime.utcnow)
|
||||||
status: str = "pending"
|
status: str = "pending"
|
||||||
retries: int = 0
|
retry_count: int = 0 # Changed from retries to retry_count
|
||||||
priority: int = 0 # Added priority field with default value 0
|
priority: int = 0 # Added priority field with default value 0
|
||||||
last_retry: Optional[datetime] = None
|
last_retry: Optional[datetime] = None
|
||||||
last_error: Optional[str] = None
|
last_error: Optional[str] = None
|
||||||
|
|||||||
@@ -2,7 +2,7 @@
|
|||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import discord
|
import discord
|
||||||
from redbot.core import commands, Config, data_manager
|
from redbot.core import commands, Config, data_manager, checks
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
import logging
|
import logging
|
||||||
import asyncio
|
import asyncio
|
||||||
@@ -17,8 +17,10 @@ from videoarchiver.processor import VideoProcessor
|
|||||||
from videoarchiver.utils.video_downloader import VideoDownloader
|
from videoarchiver.utils.video_downloader import VideoDownloader
|
||||||
from videoarchiver.utils.message_manager import MessageManager
|
from videoarchiver.utils.message_manager import MessageManager
|
||||||
from videoarchiver.utils.file_ops import cleanup_downloads
|
from videoarchiver.utils.file_ops import cleanup_downloads
|
||||||
from videoarchiver.queue import EnhancedVideoQueueManager # Updated import
|
from videoarchiver.queue import EnhancedVideoQueueManager
|
||||||
from videoarchiver.ffmpeg.ffmpeg_manager import FFmpegManager
|
from videoarchiver.ffmpeg.ffmpeg_manager import FFmpegManager
|
||||||
|
from videoarchiver.database.video_archive_db import VideoArchiveDB
|
||||||
|
from videoarchiver.processor.reactions import REACTIONS, handle_archived_reaction
|
||||||
from videoarchiver.utils.exceptions import (
|
from videoarchiver.utils.exceptions import (
|
||||||
VideoArchiverError as ProcessingError,
|
VideoArchiverError as ProcessingError,
|
||||||
ConfigurationError as ConfigError,
|
ConfigurationError as ConfigError,
|
||||||
@@ -36,6 +38,21 @@ CLEANUP_TIMEOUT = 15 # seconds
|
|||||||
class VideoArchiver(commands.Cog):
|
class VideoArchiver(commands.Cog):
|
||||||
"""Archive videos from Discord channels"""
|
"""Archive videos from Discord channels"""
|
||||||
|
|
||||||
|
default_guild_settings = {
|
||||||
|
"enabled": False,
|
||||||
|
"archive_channel": None,
|
||||||
|
"log_channel": None,
|
||||||
|
"enabled_channels": [],
|
||||||
|
"video_format": "mp4",
|
||||||
|
"video_quality": "high",
|
||||||
|
"max_file_size": 25, # MB
|
||||||
|
"message_duration": 30, # seconds
|
||||||
|
"message_template": "{author} archived a video from {channel}",
|
||||||
|
"concurrent_downloads": 2,
|
||||||
|
"enabled_sites": None, # None means all sites
|
||||||
|
"use_database": False, # Database tracking is off by default
|
||||||
|
}
|
||||||
|
|
||||||
def __init__(self, bot: commands.Bot) -> None:
|
def __init__(self, bot: commands.Bot) -> None:
|
||||||
"""Initialize the cog with proper error handling"""
|
"""Initialize the cog with proper error handling"""
|
||||||
self.bot = bot
|
self.bot = bot
|
||||||
@@ -43,16 +60,28 @@ class VideoArchiver(commands.Cog):
|
|||||||
self._init_task: Optional[asyncio.Task] = None
|
self._init_task: Optional[asyncio.Task] = None
|
||||||
self._cleanup_task: Optional[asyncio.Task] = None
|
self._cleanup_task: Optional[asyncio.Task] = None
|
||||||
self._unloading = False
|
self._unloading = False
|
||||||
|
self.db = None
|
||||||
|
|
||||||
# Start initialization
|
# Start initialization
|
||||||
self._init_task = asyncio.create_task(self._initialize())
|
self._init_task = asyncio.create_task(self._initialize())
|
||||||
self._init_task.add_done_callback(self._init_callback)
|
self._init_task.add_done_callback(self._init_callback)
|
||||||
|
|
||||||
|
def _init_callback(self, task: asyncio.Task) -> None:
|
||||||
|
"""Handle initialization task completion"""
|
||||||
|
try:
|
||||||
|
task.result()
|
||||||
|
except asyncio.CancelledError:
|
||||||
|
pass
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Initialization failed: {str(e)}")
|
||||||
|
asyncio.create_task(self._cleanup())
|
||||||
|
|
||||||
async def _initialize(self) -> None:
|
async def _initialize(self) -> None:
|
||||||
"""Initialize all components with proper error handling"""
|
"""Initialize all components with proper error handling"""
|
||||||
try:
|
try:
|
||||||
# Initialize config first as other components depend on it
|
# Initialize config first as other components depend on it
|
||||||
config = Config.get_conf(self, identifier=855847, force_registration=True)
|
config = Config.get_conf(self, identifier=855847, force_registration=True)
|
||||||
|
config.register_guild(**self.default_guild_settings)
|
||||||
self.config_manager = ConfigManager(config)
|
self.config_manager = ConfigManager(config)
|
||||||
|
|
||||||
# Set up paths
|
# Set up paths
|
||||||
@@ -100,6 +129,7 @@ class VideoArchiver(commands.Cog):
|
|||||||
self.components,
|
self.components,
|
||||||
queue_manager=self.queue_manager,
|
queue_manager=self.queue_manager,
|
||||||
ffmpeg_mgr=self.ffmpeg_mgr,
|
ffmpeg_mgr=self.ffmpeg_mgr,
|
||||||
|
db=self.db # Pass database to processor (None by default)
|
||||||
)
|
)
|
||||||
|
|
||||||
# Start update checker
|
# Start update checker
|
||||||
@@ -115,15 +145,94 @@ class VideoArchiver(commands.Cog):
|
|||||||
await self._cleanup()
|
await self._cleanup()
|
||||||
raise
|
raise
|
||||||
|
|
||||||
def _init_callback(self, task: asyncio.Task) -> None:
|
@commands.group(name="videoarchiver")
|
||||||
"""Handle initialization task completion"""
|
@commands.guild_only()
|
||||||
try:
|
async def videoarchiver(self, ctx: commands.Context):
|
||||||
task.result()
|
"""Video archiver commands"""
|
||||||
except asyncio.CancelledError:
|
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
@videoarchiver.command(name="toggledb")
|
||||||
|
@checks.admin_or_permissions(administrator=True)
|
||||||
|
async def toggle_database(self, ctx: commands.Context):
|
||||||
|
"""Toggle the video archive database on/off."""
|
||||||
|
try:
|
||||||
|
current_setting = await self.config_manager.get_guild_setting(ctx.guild, "use_database")
|
||||||
|
new_setting = not current_setting
|
||||||
|
|
||||||
|
if new_setting and self.db is None:
|
||||||
|
# Initialize database if it's being enabled
|
||||||
|
self.db = VideoArchiveDB(self.data_path)
|
||||||
|
# Update processor with database
|
||||||
|
self.processor.db = self.db
|
||||||
|
self.processor.queue_handler.db = self.db
|
||||||
|
|
||||||
|
elif not new_setting:
|
||||||
|
# Remove database if it's being disabled
|
||||||
|
self.db = None
|
||||||
|
self.processor.db = None
|
||||||
|
self.processor.queue_handler.db = None
|
||||||
|
|
||||||
|
await self.config_manager.set_guild_setting(ctx.guild, "use_database", new_setting)
|
||||||
|
status = "enabled" if new_setting else "disabled"
|
||||||
|
await ctx.send(f"Video archive database has been {status}.")
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"Initialization failed: {str(e)}")
|
logger.error(f"Error toggling database: {e}")
|
||||||
asyncio.create_task(self._cleanup())
|
await ctx.send("An error occurred while toggling the database setting.")
|
||||||
|
|
||||||
|
@commands.hybrid_command()
|
||||||
|
async def checkarchived(self, ctx: commands.Context, url: str):
|
||||||
|
"""Check if a video URL has been archived and get its Discord link if it exists."""
|
||||||
|
try:
|
||||||
|
if not self.db:
|
||||||
|
await ctx.send("The archive database is not enabled. Ask an admin to enable it with `/videoarchiver toggledb`")
|
||||||
|
return
|
||||||
|
|
||||||
|
result = self.db.get_archived_video(url)
|
||||||
|
if result:
|
||||||
|
discord_url, message_id, channel_id, guild_id = result
|
||||||
|
embed = discord.Embed(
|
||||||
|
title="Video Found in Archive",
|
||||||
|
description=f"This video has been archived!\n\nOriginal URL: {url}",
|
||||||
|
color=discord.Color.green()
|
||||||
|
)
|
||||||
|
embed.add_field(name="Archived Link", value=discord_url)
|
||||||
|
await ctx.send(embed=embed)
|
||||||
|
else:
|
||||||
|
embed = discord.Embed(
|
||||||
|
title="Video Not Found",
|
||||||
|
description="This video has not been archived yet.",
|
||||||
|
color=discord.Color.red()
|
||||||
|
)
|
||||||
|
await ctx.send(embed=embed)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error checking archived video: {e}")
|
||||||
|
await ctx.send("An error occurred while checking the archive.")
|
||||||
|
|
||||||
|
@commands.Cog.listener()
|
||||||
|
async def on_raw_reaction_add(self, payload: discord.RawReactionActionEvent):
|
||||||
|
"""Handle reactions to messages"""
|
||||||
|
if payload.user_id == self.bot.user.id:
|
||||||
|
return
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Get the channel and message
|
||||||
|
channel = self.bot.get_channel(payload.channel_id)
|
||||||
|
if not channel:
|
||||||
|
return
|
||||||
|
message = await channel.fetch_message(payload.message_id)
|
||||||
|
if not message:
|
||||||
|
return
|
||||||
|
|
||||||
|
# Check if it's the archived reaction
|
||||||
|
if str(payload.emoji) == REACTIONS['archived']:
|
||||||
|
# Only process if database is enabled
|
||||||
|
if self.db:
|
||||||
|
user = self.bot.get_user(payload.user_id)
|
||||||
|
await handle_archived_reaction(message, user, self.db)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error handling reaction: {e}")
|
||||||
|
|
||||||
async def cog_load(self) -> None:
|
async def cog_load(self) -> None:
|
||||||
"""Handle cog loading"""
|
"""Handle cog loading"""
|
||||||
@@ -196,21 +305,27 @@ class VideoArchiver(commands.Cog):
|
|||||||
# Stop update checker
|
# Stop update checker
|
||||||
if hasattr(self, "update_checker"):
|
if hasattr(self, "update_checker"):
|
||||||
try:
|
try:
|
||||||
await asyncio.wait_for(self.update_checker.stop(), timeout=CLEANUP_TIMEOUT)
|
await asyncio.wait_for(
|
||||||
|
self.update_checker.stop(), timeout=CLEANUP_TIMEOUT
|
||||||
|
)
|
||||||
except asyncio.TimeoutError:
|
except asyncio.TimeoutError:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
# Clean up processor
|
# Clean up processor
|
||||||
if hasattr(self, "processor"):
|
if hasattr(self, "processor"):
|
||||||
try:
|
try:
|
||||||
await asyncio.wait_for(self.processor.cleanup(), timeout=CLEANUP_TIMEOUT)
|
await asyncio.wait_for(
|
||||||
|
self.processor.cleanup(), timeout=CLEANUP_TIMEOUT
|
||||||
|
)
|
||||||
except asyncio.TimeoutError:
|
except asyncio.TimeoutError:
|
||||||
await self.processor.force_cleanup()
|
await self.processor.force_cleanup()
|
||||||
|
|
||||||
# Clean up queue manager
|
# Clean up queue manager
|
||||||
if hasattr(self, "queue_manager"):
|
if hasattr(self, "queue_manager"):
|
||||||
try:
|
try:
|
||||||
await asyncio.wait_for(self.queue_manager.cleanup(), timeout=CLEANUP_TIMEOUT)
|
await asyncio.wait_for(
|
||||||
|
self.queue_manager.cleanup(), timeout=CLEANUP_TIMEOUT
|
||||||
|
)
|
||||||
except asyncio.TimeoutError:
|
except asyncio.TimeoutError:
|
||||||
self.queue_manager.force_stop()
|
self.queue_manager.force_stop()
|
||||||
|
|
||||||
|
|||||||
Reference in New Issue
Block a user