mirror of
https://github.com/pacnpal/Pac-cogs.git
synced 2025-12-19 18:31:05 -05:00
loads of import fixes
This commit is contained in:
@@ -1,8 +1,8 @@
|
||||
"""Birthday cog for Red-DiscordBot"""
|
||||
from redbot.core.bot import Red
|
||||
from redbot.core.bot import Red # type: ignore
|
||||
import logging
|
||||
import discord
|
||||
from discord.app_commands.errors import CommandAlreadyRegistered
|
||||
import discord # type: ignore
|
||||
from discord.app_commands.errors import CommandAlreadyRegistered # type: ignore
|
||||
from .birthday import Birthday, birthday_context_menu
|
||||
|
||||
logger = logging.getLogger("Birthday")
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import discord
|
||||
from redbot.core import commands, checks, app_commands
|
||||
from redbot.core.bot import Red
|
||||
from redbot.core.config import Config
|
||||
import discord # type: ignore
|
||||
from redbot.core import commands, checks, app_commands # type: ignore
|
||||
from redbot.core.bot import Red # type: ignore
|
||||
from redbot.core.config import Config # type: ignore
|
||||
from datetime import datetime, time, timedelta
|
||||
from zoneinfo import ZoneInfo, ZoneInfoNotFoundError
|
||||
import random
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
"""Overseerr cog for Red-DiscordBot"""
|
||||
from redbot.core.bot import Red
|
||||
from redbot.core.bot import Red # type: ignore
|
||||
import logging
|
||||
from .overseerr import Overseerr
|
||||
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
import aiohttp
|
||||
from redbot.core import commands, Config, app_commands
|
||||
from redbot.core.bot import Red
|
||||
from redbot.core import commands, Config, app_commands # type: ignore
|
||||
from redbot.core.bot import Red # type: ignore
|
||||
import asyncio
|
||||
import json
|
||||
import urllib.parse
|
||||
import discord
|
||||
import discord # type: ignore
|
||||
|
||||
class Overseerr(commands.Cog):
|
||||
def __init__(self, bot: Red):
|
||||
|
||||
@@ -5,7 +5,7 @@ import asyncio
|
||||
import logging
|
||||
import importlib
|
||||
from typing import Optional
|
||||
from redbot.core.bot import Red
|
||||
from redbot.core.bot import Red # type: ignore
|
||||
|
||||
# Force reload of all modules
|
||||
modules_to_reload = [
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
import logging
|
||||
from typing import Dict, List, Optional, Tuple
|
||||
import discord
|
||||
import discord # type: ignore
|
||||
|
||||
from .exceptions import (
|
||||
ConfigurationError as ConfigError,
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
import logging
|
||||
from typing import Dict, List, Set, Tuple, Optional, Any
|
||||
import discord
|
||||
import discord # type: ignore
|
||||
|
||||
from .exceptions import ConfigurationError as ConfigError
|
||||
|
||||
|
||||
@@ -3,9 +3,9 @@
|
||||
import logging
|
||||
from typing import Dict, Any, List
|
||||
from datetime import datetime
|
||||
import discord
|
||||
import discord # type: ignore
|
||||
|
||||
from .config.exceptions import ConfigurationError as ConfigError
|
||||
from ..config.exceptions import ConfigurationError as ConfigError
|
||||
|
||||
logger = logging.getLogger("SettingsFormatter")
|
||||
|
||||
|
||||
@@ -3,8 +3,8 @@
|
||||
import logging
|
||||
import asyncio
|
||||
from typing import Dict, Any, Optional, List, Union
|
||||
import discord
|
||||
from redbot.core import Config
|
||||
import discord # type: ignore
|
||||
from redbot.core import Config # type: ignore
|
||||
|
||||
from .config.validation_manager import ValidationManager
|
||||
from .config.settings_formatter import SettingsFormatter
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
"""Core module for VideoArchiver cog"""
|
||||
|
||||
from .core.base import VideoArchiver
|
||||
from ..core.base import VideoArchiver
|
||||
|
||||
__all__ = ["VideoArchiver"]
|
||||
|
||||
@@ -8,9 +8,9 @@ from typing import Dict, Any, Optional, TypedDict, ClassVar, List, Set, Union
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
|
||||
import discord
|
||||
from redbot.core.bot import Red
|
||||
from redbot.core.commands import GroupCog, Context
|
||||
import discord # type: ignore
|
||||
from redbot.core.bot import Red # type: ignore
|
||||
from redbot.core.commands import GroupCog, Context # type: ignore
|
||||
|
||||
from .settings import Settings
|
||||
from .lifecycle import LifecycleManager, LifecycleState
|
||||
@@ -22,12 +22,12 @@ from .commands.database_commands import setup_database_commands
|
||||
from .commands.settings_commands import setup_settings_commands
|
||||
from .events import setup_events, EventManager
|
||||
|
||||
from .processor.core import Processor
|
||||
from .queue.manager import QueueManager
|
||||
from .ffmpeg.ffmpeg_manager import FFmpegManager
|
||||
from .database.video_archive_db import VideoArchiveDB
|
||||
from .config_manager import ConfigManager
|
||||
from .utils.exceptions import (
|
||||
from ..processor.core import Processor
|
||||
from ..queue.manager import QueueManager
|
||||
from ..ffmpeg.ffmpeg_manager import FFmpegManager
|
||||
from ..database.video_archive_db import VideoArchiveDB
|
||||
from ..config_manager import ConfigManager
|
||||
from ..utils.exceptions import (
|
||||
CogError,
|
||||
ErrorContext,
|
||||
ErrorSeverity
|
||||
|
||||
@@ -8,8 +8,8 @@ from enum import Enum, auto
|
||||
from pathlib import Path
|
||||
from typing import TYPE_CHECKING, Dict, Any, Optional, TypedDict, ClassVar
|
||||
|
||||
from .utils.file_ops import cleanup_downloads
|
||||
from .utils.exceptions import (
|
||||
from ..utils.file_ops import cleanup_downloads
|
||||
from ..utils.exceptions import (
|
||||
CleanupError,
|
||||
ErrorContext,
|
||||
ErrorSeverity
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
"""Command handlers for VideoArchiver"""
|
||||
|
||||
from .core.commands.archiver_commands import setup_archiver_commands
|
||||
from .core.commands.database_commands import setup_database_commands
|
||||
from .core.commands.settings_commands import setup_settings_commands
|
||||
from .archiver_commands import setup_archiver_commands
|
||||
from .database_commands import setup_database_commands
|
||||
from .settings_commands import setup_settings_commands
|
||||
|
||||
__all__ = [
|
||||
'setup_archiver_commands',
|
||||
'setup_database_commands',
|
||||
'setup_settings_commands'
|
||||
"setup_archiver_commands",
|
||||
"setup_database_commands",
|
||||
"setup_settings_commands",
|
||||
]
|
||||
|
||||
@@ -4,47 +4,44 @@ import logging
|
||||
from enum import Enum, auto
|
||||
from typing import Optional, Any, Dict, TypedDict, Callable, Awaitable
|
||||
|
||||
import discord
|
||||
from discord import app_commands
|
||||
from redbot.core import commands
|
||||
from redbot.core.commands import Context, hybrid_group, guild_only, admin_or_permissions
|
||||
import discord # type: ignore
|
||||
from discord import app_commands # type: ignore
|
||||
from redbot.core import commands # type: ignore
|
||||
from redbot.core.commands import Context, hybrid_group, guild_only, admin_or_permissions # type: ignore
|
||||
|
||||
from .core.response_handler import handle_response, ResponseType
|
||||
from .utils.exceptions import (
|
||||
CommandError,
|
||||
ErrorContext,
|
||||
ErrorSeverity
|
||||
)
|
||||
from core.response_handler import handle_response, ResponseType
|
||||
from utils.exceptions import CommandError, ErrorContext, ErrorSeverity
|
||||
|
||||
logger = logging.getLogger("VideoArchiver")
|
||||
|
||||
|
||||
class CommandCategory(Enum):
|
||||
"""Command categories"""
|
||||
|
||||
MANAGEMENT = auto()
|
||||
STATUS = auto()
|
||||
UTILITY = auto()
|
||||
|
||||
|
||||
class CommandResult(TypedDict):
|
||||
"""Type definition for command result"""
|
||||
|
||||
success: bool
|
||||
message: str
|
||||
details: Optional[Dict[str, Any]]
|
||||
error: Optional[str]
|
||||
|
||||
|
||||
class CommandContext:
|
||||
"""Context manager for command execution"""
|
||||
def __init__(
|
||||
self,
|
||||
ctx: Context,
|
||||
category: CommandCategory,
|
||||
operation: str
|
||||
) -> None:
|
||||
|
||||
def __init__(self, ctx: Context, category: CommandCategory, operation: str) -> None:
|
||||
self.ctx = ctx
|
||||
self.category = category
|
||||
self.operation = operation
|
||||
self.start_time = None
|
||||
|
||||
async def __aenter__(self) -> 'CommandContext':
|
||||
async def __aenter__(self) -> "CommandContext":
|
||||
"""Set up command context"""
|
||||
self.start_time = self.ctx.message.created_at
|
||||
logger.debug(
|
||||
@@ -62,11 +59,12 @@ class CommandContext:
|
||||
await handle_response(
|
||||
self.ctx,
|
||||
f"An error occurred: {str(exc_val)}",
|
||||
response_type=ResponseType.ERROR
|
||||
response_type=ResponseType.ERROR,
|
||||
)
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def setup_archiver_commands(cog: Any) -> Callable:
|
||||
"""
|
||||
Set up archiver commands for the cog.
|
||||
@@ -86,7 +84,7 @@ def setup_archiver_commands(cog: Any) -> Callable:
|
||||
await handle_response(
|
||||
ctx,
|
||||
"Use `/help archiver` for a list of commands.",
|
||||
response_type=ResponseType.INFO
|
||||
response_type=ResponseType.INFO,
|
||||
)
|
||||
|
||||
@archiver.command(name="enable")
|
||||
@@ -104,8 +102,8 @@ def setup_archiver_commands(cog: Any) -> Callable:
|
||||
"ArchiverCommands",
|
||||
"enable_archiver",
|
||||
{"guild_id": ctx.guild.id},
|
||||
ErrorSeverity.HIGH
|
||||
)
|
||||
ErrorSeverity.HIGH,
|
||||
),
|
||||
)
|
||||
|
||||
# Check current setting
|
||||
@@ -116,7 +114,7 @@ def setup_archiver_commands(cog: Any) -> Callable:
|
||||
await handle_response(
|
||||
ctx,
|
||||
"Video archiving is already enabled.",
|
||||
response_type=ResponseType.WARNING
|
||||
response_type=ResponseType.WARNING,
|
||||
)
|
||||
return
|
||||
|
||||
@@ -125,7 +123,7 @@ def setup_archiver_commands(cog: Any) -> Callable:
|
||||
await handle_response(
|
||||
ctx,
|
||||
"Video archiving has been enabled.",
|
||||
response_type=ResponseType.SUCCESS
|
||||
response_type=ResponseType.SUCCESS,
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
@@ -137,8 +135,8 @@ def setup_archiver_commands(cog: Any) -> Callable:
|
||||
"ArchiverCommands",
|
||||
"enable_archiver",
|
||||
{"guild_id": ctx.guild.id},
|
||||
ErrorSeverity.HIGH
|
||||
)
|
||||
ErrorSeverity.HIGH,
|
||||
),
|
||||
)
|
||||
|
||||
@archiver.command(name="disable")
|
||||
@@ -156,8 +154,8 @@ def setup_archiver_commands(cog: Any) -> Callable:
|
||||
"ArchiverCommands",
|
||||
"disable_archiver",
|
||||
{"guild_id": ctx.guild.id},
|
||||
ErrorSeverity.HIGH
|
||||
)
|
||||
ErrorSeverity.HIGH,
|
||||
),
|
||||
)
|
||||
|
||||
# Check current setting
|
||||
@@ -168,7 +166,7 @@ def setup_archiver_commands(cog: Any) -> Callable:
|
||||
await handle_response(
|
||||
ctx,
|
||||
"Video archiving is already disabled.",
|
||||
response_type=ResponseType.WARNING
|
||||
response_type=ResponseType.WARNING,
|
||||
)
|
||||
return
|
||||
|
||||
@@ -177,7 +175,7 @@ def setup_archiver_commands(cog: Any) -> Callable:
|
||||
await handle_response(
|
||||
ctx,
|
||||
"Video archiving has been disabled.",
|
||||
response_type=ResponseType.SUCCESS
|
||||
response_type=ResponseType.SUCCESS,
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
@@ -189,8 +187,8 @@ def setup_archiver_commands(cog: Any) -> Callable:
|
||||
"ArchiverCommands",
|
||||
"disable_archiver",
|
||||
{"guild_id": ctx.guild.id},
|
||||
ErrorSeverity.HIGH
|
||||
)
|
||||
ErrorSeverity.HIGH,
|
||||
),
|
||||
)
|
||||
|
||||
@archiver.command(name="queue")
|
||||
@@ -207,8 +205,8 @@ def setup_archiver_commands(cog: Any) -> Callable:
|
||||
"ArchiverCommands",
|
||||
"show_queue",
|
||||
{"guild_id": ctx.guild.id},
|
||||
ErrorSeverity.MEDIUM
|
||||
)
|
||||
ErrorSeverity.MEDIUM,
|
||||
),
|
||||
)
|
||||
|
||||
await cog.processor.show_queue_details(ctx)
|
||||
@@ -222,8 +220,8 @@ def setup_archiver_commands(cog: Any) -> Callable:
|
||||
"ArchiverCommands",
|
||||
"show_queue",
|
||||
{"guild_id": ctx.guild.id},
|
||||
ErrorSeverity.MEDIUM
|
||||
)
|
||||
ErrorSeverity.MEDIUM,
|
||||
),
|
||||
)
|
||||
|
||||
@archiver.command(name="status")
|
||||
@@ -235,22 +233,32 @@ def setup_archiver_commands(cog: Any) -> Callable:
|
||||
try:
|
||||
# Get comprehensive status
|
||||
status = {
|
||||
"enabled": await cog.config_manager.get_setting(ctx.guild.id, "enabled"),
|
||||
"queue": cog.queue_manager.get_queue_status(ctx.guild.id) if cog.queue_manager else None,
|
||||
"enabled": await cog.config_manager.get_setting(
|
||||
ctx.guild.id, "enabled"
|
||||
),
|
||||
"queue": (
|
||||
cog.queue_manager.get_queue_status(ctx.guild.id)
|
||||
if cog.queue_manager
|
||||
else None
|
||||
),
|
||||
"processor": cog.processor.get_status() if cog.processor else None,
|
||||
"components": cog.component_manager.get_component_status(),
|
||||
"health": cog.status_tracker.get_status()
|
||||
"health": cog.status_tracker.get_status(),
|
||||
}
|
||||
|
||||
# Create status embed
|
||||
embed = discord.Embed(
|
||||
title="VideoArchiver Status",
|
||||
color=discord.Color.blue() if status["enabled"] else discord.Color.red()
|
||||
color=(
|
||||
discord.Color.blue()
|
||||
if status["enabled"]
|
||||
else discord.Color.red()
|
||||
),
|
||||
)
|
||||
embed.add_field(
|
||||
name="Status",
|
||||
value="Enabled" if status["enabled"] else "Disabled",
|
||||
inline=False
|
||||
inline=False,
|
||||
)
|
||||
|
||||
if status["queue"]:
|
||||
@@ -261,7 +269,7 @@ def setup_archiver_commands(cog: Any) -> Callable:
|
||||
f"Processing: {status['queue']['processing']}\n"
|
||||
f"Completed: {status['queue']['completed']}"
|
||||
),
|
||||
inline=True
|
||||
inline=True,
|
||||
)
|
||||
|
||||
if status["processor"]:
|
||||
@@ -271,7 +279,7 @@ def setup_archiver_commands(cog: Any) -> Callable:
|
||||
f"Active: {status['processor']['active']}\n"
|
||||
f"Health: {status['processor']['health']}"
|
||||
),
|
||||
inline=True
|
||||
inline=True,
|
||||
)
|
||||
|
||||
embed.add_field(
|
||||
@@ -280,7 +288,7 @@ def setup_archiver_commands(cog: Any) -> Callable:
|
||||
f"State: {status['health']['state']}\n"
|
||||
f"Errors: {status['health']['error_count']}"
|
||||
),
|
||||
inline=True
|
||||
inline=True,
|
||||
)
|
||||
|
||||
await ctx.send(embed=embed)
|
||||
@@ -294,8 +302,8 @@ def setup_archiver_commands(cog: Any) -> Callable:
|
||||
"ArchiverCommands",
|
||||
"show_status",
|
||||
{"guild_id": ctx.guild.id},
|
||||
ErrorSeverity.MEDIUM
|
||||
)
|
||||
ErrorSeverity.MEDIUM,
|
||||
),
|
||||
)
|
||||
|
||||
# Store commands in cog for access
|
||||
|
||||
@@ -5,31 +5,30 @@ from datetime import datetime
|
||||
from enum import Enum, auto
|
||||
from typing import Optional, Any, Dict, TypedDict
|
||||
|
||||
import discord
|
||||
from discord import app_commands
|
||||
from redbot.core import commands
|
||||
from redbot.core.commands import Context, hybrid_group, guild_only, admin_or_permissions
|
||||
import discord # type: ignore
|
||||
from discord import app_commands # type: ignore
|
||||
from redbot.core import commands # type: ignore
|
||||
from redbot.core.commands import Context, hybrid_group, guild_only, admin_or_permissions # type: ignore
|
||||
|
||||
from .core.response_handler import handle_response, ResponseType
|
||||
from .utils.exceptions import (
|
||||
CommandError,
|
||||
ErrorContext,
|
||||
ErrorSeverity,
|
||||
DatabaseError
|
||||
)
|
||||
from .database.video_archive_db import VideoArchiveDB
|
||||
from core.response_handler import handle_response, ResponseType
|
||||
from utils.exceptions import CommandError, ErrorContext, ErrorSeverity, DatabaseError
|
||||
from database.video_archive_db import VideoArchiveDB
|
||||
|
||||
logger = logging.getLogger("VideoArchiver")
|
||||
|
||||
|
||||
class DatabaseOperation(Enum):
|
||||
"""Database operation types"""
|
||||
|
||||
ENABLE = auto()
|
||||
DISABLE = auto()
|
||||
QUERY = auto()
|
||||
MAINTENANCE = auto()
|
||||
|
||||
|
||||
class DatabaseStatus(TypedDict):
|
||||
"""Type definition for database status"""
|
||||
|
||||
enabled: bool
|
||||
connected: bool
|
||||
initialized: bool
|
||||
@@ -37,8 +36,10 @@ class DatabaseStatus(TypedDict):
|
||||
last_operation: Optional[str]
|
||||
operation_time: Optional[str]
|
||||
|
||||
|
||||
class ArchivedVideo(TypedDict):
|
||||
"""Type definition for archived video data"""
|
||||
|
||||
url: str
|
||||
discord_url: str
|
||||
message_id: int
|
||||
@@ -46,6 +47,7 @@ class ArchivedVideo(TypedDict):
|
||||
guild_id: int
|
||||
archived_at: str
|
||||
|
||||
|
||||
async def check_database_status(cog: Any) -> DatabaseStatus:
|
||||
"""
|
||||
Check database status.
|
||||
@@ -57,9 +59,11 @@ async def check_database_status(cog: Any) -> DatabaseStatus:
|
||||
Database status information
|
||||
"""
|
||||
try:
|
||||
enabled = await cog.config_manager.get_setting(
|
||||
None, "use_database"
|
||||
) if cog.config_manager else False
|
||||
enabled = (
|
||||
await cog.config_manager.get_setting(None, "use_database")
|
||||
if cog.config_manager
|
||||
else False
|
||||
)
|
||||
|
||||
return DatabaseStatus(
|
||||
enabled=enabled,
|
||||
@@ -67,7 +71,7 @@ async def check_database_status(cog: Any) -> DatabaseStatus:
|
||||
initialized=cog.db is not None,
|
||||
error=None,
|
||||
last_operation=None,
|
||||
operation_time=datetime.utcnow().isoformat()
|
||||
operation_time=datetime.utcnow().isoformat(),
|
||||
)
|
||||
except Exception as e:
|
||||
return DatabaseStatus(
|
||||
@@ -76,9 +80,10 @@ async def check_database_status(cog: Any) -> DatabaseStatus:
|
||||
initialized=False,
|
||||
error=str(e),
|
||||
last_operation=None,
|
||||
operation_time=datetime.utcnow().isoformat()
|
||||
operation_time=datetime.utcnow().isoformat(),
|
||||
)
|
||||
|
||||
|
||||
def setup_database_commands(cog: Any) -> Any:
|
||||
"""
|
||||
Set up database commands for the cog.
|
||||
@@ -102,35 +107,35 @@ def setup_database_commands(cog: Any) -> Any:
|
||||
# Create status embed
|
||||
embed = discord.Embed(
|
||||
title="Video Archive Database Status",
|
||||
color=discord.Color.blue() if status["enabled"] else discord.Color.red()
|
||||
color=(
|
||||
discord.Color.blue()
|
||||
if status["enabled"]
|
||||
else discord.Color.red()
|
||||
),
|
||||
)
|
||||
embed.add_field(
|
||||
name="Status",
|
||||
value="Enabled" if status["enabled"] else "Disabled",
|
||||
inline=False
|
||||
inline=False,
|
||||
)
|
||||
embed.add_field(
|
||||
name="Connection",
|
||||
value="Connected" if status["connected"] else "Disconnected",
|
||||
inline=True
|
||||
inline=True,
|
||||
)
|
||||
embed.add_field(
|
||||
name="Initialization",
|
||||
value="Initialized" if status["initialized"] else "Not Initialized",
|
||||
inline=True
|
||||
inline=True,
|
||||
)
|
||||
if status["error"]:
|
||||
embed.add_field(
|
||||
name="Error",
|
||||
value=status["error"],
|
||||
inline=False
|
||||
)
|
||||
embed.add_field(name="Error", value=status["error"], inline=False)
|
||||
|
||||
await handle_response(
|
||||
ctx,
|
||||
"Use `/help archivedb` for a list of commands.",
|
||||
embed=embed,
|
||||
response_type=ResponseType.INFO
|
||||
response_type=ResponseType.INFO,
|
||||
)
|
||||
except Exception as e:
|
||||
error = f"Failed to get database status: {str(e)}"
|
||||
@@ -141,8 +146,8 @@ def setup_database_commands(cog: Any) -> Any:
|
||||
"DatabaseCommands",
|
||||
"show_status",
|
||||
{"guild_id": ctx.guild.id},
|
||||
ErrorSeverity.MEDIUM
|
||||
)
|
||||
ErrorSeverity.MEDIUM,
|
||||
),
|
||||
)
|
||||
|
||||
@archivedb.command(name="enable")
|
||||
@@ -159,8 +164,8 @@ def setup_database_commands(cog: Any) -> Any:
|
||||
"DatabaseCommands",
|
||||
"enable_database",
|
||||
{"guild_id": ctx.guild.id},
|
||||
ErrorSeverity.HIGH
|
||||
)
|
||||
ErrorSeverity.HIGH,
|
||||
),
|
||||
)
|
||||
|
||||
# Defer the response immediately for slash commands
|
||||
@@ -175,7 +180,7 @@ def setup_database_commands(cog: Any) -> Any:
|
||||
await handle_response(
|
||||
ctx,
|
||||
"The video archive database is already enabled.",
|
||||
response_type=ResponseType.WARNING
|
||||
response_type=ResponseType.WARNING,
|
||||
)
|
||||
return
|
||||
|
||||
@@ -190,8 +195,8 @@ def setup_database_commands(cog: Any) -> Any:
|
||||
"DatabaseCommands",
|
||||
"enable_database",
|
||||
{"guild_id": ctx.guild.id},
|
||||
ErrorSeverity.HIGH
|
||||
)
|
||||
ErrorSeverity.HIGH,
|
||||
),
|
||||
)
|
||||
|
||||
# Update processor with database
|
||||
@@ -201,17 +206,13 @@ def setup_database_commands(cog: Any) -> Any:
|
||||
cog.processor.queue_handler.db = cog.db
|
||||
|
||||
# Update setting
|
||||
await cog.config_manager.update_setting(
|
||||
ctx.guild.id,
|
||||
"use_database",
|
||||
True
|
||||
)
|
||||
await cog.config_manager.update_setting(ctx.guild.id, "use_database", True)
|
||||
|
||||
# Send success message
|
||||
await handle_response(
|
||||
ctx,
|
||||
"Video archive database has been enabled.",
|
||||
response_type=ResponseType.SUCCESS
|
||||
response_type=ResponseType.SUCCESS,
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
@@ -223,8 +224,8 @@ def setup_database_commands(cog: Any) -> Any:
|
||||
"DatabaseCommands",
|
||||
"enable_database",
|
||||
{"guild_id": ctx.guild.id},
|
||||
ErrorSeverity.HIGH
|
||||
)
|
||||
ErrorSeverity.HIGH,
|
||||
),
|
||||
)
|
||||
|
||||
@archivedb.command(name="disable")
|
||||
@@ -241,8 +242,8 @@ def setup_database_commands(cog: Any) -> Any:
|
||||
"DatabaseCommands",
|
||||
"disable_database",
|
||||
{"guild_id": ctx.guild.id},
|
||||
ErrorSeverity.HIGH
|
||||
)
|
||||
ErrorSeverity.HIGH,
|
||||
),
|
||||
)
|
||||
|
||||
# Defer the response immediately for slash commands
|
||||
@@ -250,14 +251,13 @@ def setup_database_commands(cog: Any) -> Any:
|
||||
await ctx.defer()
|
||||
|
||||
current_setting = await cog.config_manager.get_setting(
|
||||
ctx.guild.id,
|
||||
"use_database"
|
||||
ctx.guild.id, "use_database"
|
||||
)
|
||||
if not current_setting:
|
||||
await handle_response(
|
||||
ctx,
|
||||
"The video archive database is already disabled.",
|
||||
response_type=ResponseType.WARNING
|
||||
response_type=ResponseType.WARNING,
|
||||
)
|
||||
return
|
||||
|
||||
@@ -275,15 +275,11 @@ def setup_database_commands(cog: Any) -> Any:
|
||||
if cog.processor.queue_handler:
|
||||
cog.processor.queue_handler.db = None
|
||||
|
||||
await cog.config_manager.update_setting(
|
||||
ctx.guild.id,
|
||||
"use_database",
|
||||
False
|
||||
)
|
||||
await cog.config_manager.update_setting(ctx.guild.id, "use_database", False)
|
||||
await handle_response(
|
||||
ctx,
|
||||
"Video archive database has been disabled.",
|
||||
response_type=ResponseType.SUCCESS
|
||||
response_type=ResponseType.SUCCESS,
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
@@ -295,8 +291,8 @@ def setup_database_commands(cog: Any) -> Any:
|
||||
"DatabaseCommands",
|
||||
"disable_database",
|
||||
{"guild_id": ctx.guild.id},
|
||||
ErrorSeverity.HIGH
|
||||
)
|
||||
ErrorSeverity.HIGH,
|
||||
),
|
||||
)
|
||||
|
||||
@archivedb.command(name="check")
|
||||
@@ -310,7 +306,7 @@ def setup_database_commands(cog: Any) -> Any:
|
||||
await handle_response(
|
||||
ctx,
|
||||
"The archive database is not enabled. Ask an admin to enable it with `/archivedb enable`",
|
||||
response_type=ResponseType.ERROR
|
||||
response_type=ResponseType.ERROR,
|
||||
)
|
||||
return
|
||||
|
||||
@@ -327,8 +323,8 @@ def setup_database_commands(cog: Any) -> Any:
|
||||
"DatabaseCommands",
|
||||
"checkarchived",
|
||||
{"guild_id": ctx.guild.id, "url": url},
|
||||
ErrorSeverity.MEDIUM
|
||||
)
|
||||
ErrorSeverity.MEDIUM,
|
||||
),
|
||||
)
|
||||
|
||||
if result:
|
||||
@@ -338,30 +334,12 @@ def setup_database_commands(cog: Any) -> Any:
|
||||
description=f"This video has been archived!\n\nOriginal URL: {url}",
|
||||
color=discord.Color.green(),
|
||||
)
|
||||
embed.add_field(
|
||||
name="Archived Link",
|
||||
value=discord_url,
|
||||
inline=False
|
||||
)
|
||||
embed.add_field(
|
||||
name="Message ID",
|
||||
value=str(message_id),
|
||||
inline=True
|
||||
)
|
||||
embed.add_field(
|
||||
name="Channel ID",
|
||||
value=str(channel_id),
|
||||
inline=True
|
||||
)
|
||||
embed.add_field(
|
||||
name="Guild ID",
|
||||
value=str(guild_id),
|
||||
inline=True
|
||||
)
|
||||
embed.add_field(name="Archived Link", value=discord_url, inline=False)
|
||||
embed.add_field(name="Message ID", value=str(message_id), inline=True)
|
||||
embed.add_field(name="Channel ID", value=str(channel_id), inline=True)
|
||||
embed.add_field(name="Guild ID", value=str(guild_id), inline=True)
|
||||
await handle_response(
|
||||
ctx,
|
||||
embed=embed,
|
||||
response_type=ResponseType.SUCCESS
|
||||
ctx, embed=embed, response_type=ResponseType.SUCCESS
|
||||
)
|
||||
else:
|
||||
embed = discord.Embed(
|
||||
@@ -370,9 +348,7 @@ def setup_database_commands(cog: Any) -> Any:
|
||||
color=discord.Color.red(),
|
||||
)
|
||||
await handle_response(
|
||||
ctx,
|
||||
embed=embed,
|
||||
response_type=ResponseType.WARNING
|
||||
ctx, embed=embed, response_type=ResponseType.WARNING
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
@@ -384,8 +360,8 @@ def setup_database_commands(cog: Any) -> Any:
|
||||
"DatabaseCommands",
|
||||
"checkarchived",
|
||||
{"guild_id": ctx.guild.id, "url": url},
|
||||
ErrorSeverity.MEDIUM
|
||||
)
|
||||
ErrorSeverity.MEDIUM,
|
||||
),
|
||||
)
|
||||
|
||||
@archivedb.command(name="status")
|
||||
@@ -410,53 +386,49 @@ def setup_database_commands(cog: Any) -> Any:
|
||||
|
||||
embed = discord.Embed(
|
||||
title="Database Status",
|
||||
color=discord.Color.green() if status["connected"] else discord.Color.red()
|
||||
color=(
|
||||
discord.Color.green()
|
||||
if status["connected"]
|
||||
else discord.Color.red()
|
||||
),
|
||||
)
|
||||
embed.add_field(
|
||||
name="Status",
|
||||
value="Enabled" if status["enabled"] else "Disabled",
|
||||
inline=False
|
||||
inline=False,
|
||||
)
|
||||
embed.add_field(
|
||||
name="Connection",
|
||||
value="Connected" if status["connected"] else "Disconnected",
|
||||
inline=True
|
||||
inline=True,
|
||||
)
|
||||
embed.add_field(
|
||||
name="Initialization",
|
||||
value="Initialized" if status["initialized"] else "Not Initialized",
|
||||
inline=True
|
||||
inline=True,
|
||||
)
|
||||
|
||||
if stats:
|
||||
embed.add_field(
|
||||
name="Total Videos",
|
||||
value=str(stats.get("total_videos", 0)),
|
||||
inline=True
|
||||
inline=True,
|
||||
)
|
||||
embed.add_field(
|
||||
name="Total Size",
|
||||
value=f"{stats.get('total_size', 0)} MB",
|
||||
inline=True
|
||||
inline=True,
|
||||
)
|
||||
embed.add_field(
|
||||
name="Last Update",
|
||||
value=stats.get("last_update", "Never"),
|
||||
inline=True
|
||||
inline=True,
|
||||
)
|
||||
|
||||
if status["error"]:
|
||||
embed.add_field(
|
||||
name="Error",
|
||||
value=status["error"],
|
||||
inline=False
|
||||
)
|
||||
embed.add_field(name="Error", value=status["error"], inline=False)
|
||||
|
||||
await handle_response(
|
||||
ctx,
|
||||
embed=embed,
|
||||
response_type=ResponseType.INFO
|
||||
)
|
||||
await handle_response(ctx, embed=embed, response_type=ResponseType.INFO)
|
||||
|
||||
except Exception as e:
|
||||
error = f"Failed to get database status: {str(e)}"
|
||||
@@ -467,8 +439,8 @@ def setup_database_commands(cog: Any) -> Any:
|
||||
"DatabaseCommands",
|
||||
"database_status",
|
||||
{"guild_id": ctx.guild.id},
|
||||
ErrorSeverity.MEDIUM
|
||||
)
|
||||
ErrorSeverity.MEDIUM,
|
||||
),
|
||||
)
|
||||
|
||||
# Store commands in cog for access
|
||||
|
||||
@@ -4,45 +4,46 @@ import logging
|
||||
from enum import Enum, auto
|
||||
from typing import Optional, Any, Dict, TypedDict
|
||||
|
||||
import discord
|
||||
from discord import app_commands
|
||||
from redbot.core import commands
|
||||
from redbot.core.commands import Context, hybrid_group, guild_only, admin_or_permissions
|
||||
import discord # type: ignore
|
||||
from discord import app_commands # type: ignore
|
||||
from redbot.core import commands # type: ignore
|
||||
from redbot.core.commands import Context, hybrid_group, guild_only, admin_or_permissions # type: ignore
|
||||
|
||||
from .core.settings import VideoFormat, VideoQuality
|
||||
from .core.response_handler import handle_response, ResponseType
|
||||
from .utils.exceptions import (
|
||||
CommandError,
|
||||
ErrorContext,
|
||||
ErrorSeverity
|
||||
)
|
||||
from core.settings import VideoFormat, VideoQuality
|
||||
from core.response_handler import handle_response, ResponseType
|
||||
from utils.exceptions import CommandError, ErrorContext, ErrorSeverity
|
||||
|
||||
logger = logging.getLogger("VideoArchiver")
|
||||
|
||||
|
||||
class SettingCategory(Enum):
|
||||
"""Setting categories"""
|
||||
|
||||
CHANNELS = auto()
|
||||
VIDEO = auto()
|
||||
MESSAGES = auto()
|
||||
PERFORMANCE = auto()
|
||||
|
||||
|
||||
class SettingValidation(TypedDict):
|
||||
"""Type definition for setting validation"""
|
||||
|
||||
valid: bool
|
||||
error: Optional[str]
|
||||
details: Dict[str, Any]
|
||||
|
||||
|
||||
class SettingUpdate(TypedDict):
|
||||
"""Type definition for setting update"""
|
||||
|
||||
setting: str
|
||||
old_value: Any
|
||||
new_value: Any
|
||||
category: SettingCategory
|
||||
|
||||
|
||||
async def validate_setting(
|
||||
category: SettingCategory,
|
||||
setting: str,
|
||||
value: Any
|
||||
category: SettingCategory, setting: str, value: Any
|
||||
) -> SettingValidation:
|
||||
"""
|
||||
Validate a setting value.
|
||||
@@ -58,61 +59,68 @@ async def validate_setting(
|
||||
validation = SettingValidation(
|
||||
valid=True,
|
||||
error=None,
|
||||
details={"category": category.name, "setting": setting, "value": value}
|
||||
details={"category": category.name, "setting": setting, "value": value},
|
||||
)
|
||||
|
||||
try:
|
||||
if category == SettingCategory.VIDEO:
|
||||
if setting == "format":
|
||||
if value not in [f.value for f in VideoFormat]:
|
||||
validation.update({
|
||||
"valid": False,
|
||||
"error": f"Invalid format. Must be one of: {', '.join(f.value for f in VideoFormat)}"
|
||||
})
|
||||
validation.update(
|
||||
{
|
||||
"valid": False,
|
||||
"error": f"Invalid format. Must be one of: {', '.join(f.value for f in VideoFormat)}",
|
||||
}
|
||||
)
|
||||
elif setting == "quality":
|
||||
if not 144 <= value <= 4320:
|
||||
validation.update({
|
||||
"valid": False,
|
||||
"error": "Quality must be between 144 and 4320"
|
||||
})
|
||||
validation.update(
|
||||
{
|
||||
"valid": False,
|
||||
"error": "Quality must be between 144 and 4320",
|
||||
}
|
||||
)
|
||||
elif setting == "max_file_size":
|
||||
if not 1 <= value <= 100:
|
||||
validation.update({
|
||||
"valid": False,
|
||||
"error": "Size must be between 1 and 100 MB"
|
||||
})
|
||||
validation.update(
|
||||
{"valid": False, "error": "Size must be between 1 and 100 MB"}
|
||||
)
|
||||
|
||||
elif category == SettingCategory.MESSAGES:
|
||||
if setting == "duration":
|
||||
if not 0 <= value <= 168:
|
||||
validation.update({
|
||||
"valid": False,
|
||||
"error": "Duration must be between 0 and 168 hours (1 week)"
|
||||
})
|
||||
validation.update(
|
||||
{
|
||||
"valid": False,
|
||||
"error": "Duration must be between 0 and 168 hours (1 week)",
|
||||
}
|
||||
)
|
||||
elif setting == "template":
|
||||
placeholders = ["{author}", "{channel}", "{original_message}"]
|
||||
if not any(ph in value for ph in placeholders):
|
||||
validation.update({
|
||||
"valid": False,
|
||||
"error": f"Template must include at least one placeholder: {', '.join(placeholders)}"
|
||||
})
|
||||
validation.update(
|
||||
{
|
||||
"valid": False,
|
||||
"error": f"Template must include at least one placeholder: {', '.join(placeholders)}",
|
||||
}
|
||||
)
|
||||
|
||||
elif category == SettingCategory.PERFORMANCE:
|
||||
if setting == "concurrent_downloads":
|
||||
if not 1 <= value <= 5:
|
||||
validation.update({
|
||||
"valid": False,
|
||||
"error": "Concurrent downloads must be between 1 and 5"
|
||||
})
|
||||
validation.update(
|
||||
{
|
||||
"valid": False,
|
||||
"error": "Concurrent downloads must be between 1 and 5",
|
||||
}
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
validation.update({
|
||||
"valid": False,
|
||||
"error": f"Validation error: {str(e)}"
|
||||
})
|
||||
validation.update({"valid": False, "error": f"Validation error: {str(e)}"})
|
||||
|
||||
return validation
|
||||
|
||||
|
||||
def setup_settings_commands(cog: Any) -> Any:
|
||||
"""
|
||||
Set up settings commands for the cog.
|
||||
@@ -137,8 +145,8 @@ def setup_settings_commands(cog: Any) -> Any:
|
||||
"SettingsCommands",
|
||||
"show_settings",
|
||||
{"guild_id": ctx.guild.id},
|
||||
ErrorSeverity.HIGH
|
||||
)
|
||||
ErrorSeverity.HIGH,
|
||||
),
|
||||
)
|
||||
|
||||
# Defer the response immediately for slash commands
|
||||
@@ -146,11 +154,7 @@ def setup_settings_commands(cog: Any) -> Any:
|
||||
await ctx.defer()
|
||||
|
||||
embed = await cog.config_manager.format_settings_embed(ctx.guild)
|
||||
await handle_response(
|
||||
ctx,
|
||||
embed=embed,
|
||||
response_type=ResponseType.INFO
|
||||
)
|
||||
await handle_response(ctx, embed=embed, response_type=ResponseType.INFO)
|
||||
|
||||
except Exception as e:
|
||||
error = f"Failed to show settings: {str(e)}"
|
||||
@@ -161,8 +165,8 @@ def setup_settings_commands(cog: Any) -> Any:
|
||||
"SettingsCommands",
|
||||
"show_settings",
|
||||
{"guild_id": ctx.guild.id},
|
||||
ErrorSeverity.MEDIUM
|
||||
)
|
||||
ErrorSeverity.MEDIUM,
|
||||
),
|
||||
)
|
||||
|
||||
@settings.command(name="setchannel")
|
||||
@@ -180,8 +184,8 @@ def setup_settings_commands(cog: Any) -> Any:
|
||||
"SettingsCommands",
|
||||
"set_archive_channel",
|
||||
{"guild_id": ctx.guild.id},
|
||||
ErrorSeverity.HIGH
|
||||
)
|
||||
ErrorSeverity.HIGH,
|
||||
),
|
||||
)
|
||||
|
||||
# Defer the response immediately for slash commands
|
||||
@@ -194,7 +198,7 @@ def setup_settings_commands(cog: Any) -> Any:
|
||||
send_messages=True,
|
||||
embed_links=True,
|
||||
attach_files=True,
|
||||
read_message_history=True
|
||||
read_message_history=True,
|
||||
)
|
||||
channel_perms = channel.permissions_for(bot_member)
|
||||
if not all(getattr(channel_perms, perm) for perm in required_perms):
|
||||
@@ -207,23 +211,22 @@ def setup_settings_commands(cog: Any) -> Any:
|
||||
"guild_id": ctx.guild.id,
|
||||
"channel_id": channel.id,
|
||||
"missing_perms": [
|
||||
perm for perm in required_perms
|
||||
perm
|
||||
for perm in required_perms
|
||||
if not getattr(channel_perms, perm)
|
||||
]
|
||||
],
|
||||
},
|
||||
ErrorSeverity.MEDIUM
|
||||
)
|
||||
ErrorSeverity.MEDIUM,
|
||||
),
|
||||
)
|
||||
|
||||
await cog.config_manager.update_setting(
|
||||
ctx.guild.id,
|
||||
"archive_channel",
|
||||
channel.id
|
||||
ctx.guild.id, "archive_channel", channel.id
|
||||
)
|
||||
await handle_response(
|
||||
ctx,
|
||||
f"Archive channel has been set to {channel.mention}.",
|
||||
response_type=ResponseType.SUCCESS
|
||||
response_type=ResponseType.SUCCESS,
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
@@ -235,8 +238,8 @@ def setup_settings_commands(cog: Any) -> Any:
|
||||
"SettingsCommands",
|
||||
"set_archive_channel",
|
||||
{"guild_id": ctx.guild.id, "channel_id": channel.id},
|
||||
ErrorSeverity.HIGH
|
||||
)
|
||||
ErrorSeverity.HIGH,
|
||||
),
|
||||
)
|
||||
|
||||
@settings.command(name="setlog")
|
||||
@@ -254,8 +257,8 @@ def setup_settings_commands(cog: Any) -> Any:
|
||||
"SettingsCommands",
|
||||
"set_log_channel",
|
||||
{"guild_id": ctx.guild.id},
|
||||
ErrorSeverity.HIGH
|
||||
)
|
||||
ErrorSeverity.HIGH,
|
||||
),
|
||||
)
|
||||
|
||||
# Defer the response immediately for slash commands
|
||||
@@ -265,9 +268,7 @@ def setup_settings_commands(cog: Any) -> Any:
|
||||
# Check channel permissions
|
||||
bot_member = ctx.guild.me
|
||||
required_perms = discord.Permissions(
|
||||
send_messages=True,
|
||||
embed_links=True,
|
||||
read_message_history=True
|
||||
send_messages=True, embed_links=True, read_message_history=True
|
||||
)
|
||||
channel_perms = channel.permissions_for(bot_member)
|
||||
if not all(getattr(channel_perms, perm) for perm in required_perms):
|
||||
@@ -280,23 +281,22 @@ def setup_settings_commands(cog: Any) -> Any:
|
||||
"guild_id": ctx.guild.id,
|
||||
"channel_id": channel.id,
|
||||
"missing_perms": [
|
||||
perm for perm in required_perms
|
||||
perm
|
||||
for perm in required_perms
|
||||
if not getattr(channel_perms, perm)
|
||||
]
|
||||
],
|
||||
},
|
||||
ErrorSeverity.MEDIUM
|
||||
)
|
||||
ErrorSeverity.MEDIUM,
|
||||
),
|
||||
)
|
||||
|
||||
await cog.config_manager.update_setting(
|
||||
ctx.guild.id,
|
||||
"log_channel",
|
||||
channel.id
|
||||
ctx.guild.id, "log_channel", channel.id
|
||||
)
|
||||
await handle_response(
|
||||
ctx,
|
||||
f"Log channel has been set to {channel.mention}.",
|
||||
response_type=ResponseType.SUCCESS
|
||||
response_type=ResponseType.SUCCESS,
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
@@ -308,8 +308,8 @@ def setup_settings_commands(cog: Any) -> Any:
|
||||
"SettingsCommands",
|
||||
"set_log_channel",
|
||||
{"guild_id": ctx.guild.id, "channel_id": channel.id},
|
||||
ErrorSeverity.HIGH
|
||||
)
|
||||
ErrorSeverity.HIGH,
|
||||
),
|
||||
)
|
||||
|
||||
@settings.command(name="addchannel")
|
||||
@@ -327,8 +327,8 @@ def setup_settings_commands(cog: Any) -> Any:
|
||||
"SettingsCommands",
|
||||
"add_enabled_channel",
|
||||
{"guild_id": ctx.guild.id},
|
||||
ErrorSeverity.HIGH
|
||||
)
|
||||
ErrorSeverity.HIGH,
|
||||
),
|
||||
)
|
||||
|
||||
# Defer the response immediately for slash commands
|
||||
@@ -338,8 +338,7 @@ def setup_settings_commands(cog: Any) -> Any:
|
||||
# Check channel permissions
|
||||
bot_member = ctx.guild.me
|
||||
required_perms = discord.Permissions(
|
||||
read_messages=True,
|
||||
read_message_history=True
|
||||
read_messages=True, read_message_history=True
|
||||
)
|
||||
channel_perms = channel.permissions_for(bot_member)
|
||||
if not all(getattr(channel_perms, perm) for perm in required_perms):
|
||||
@@ -352,36 +351,34 @@ def setup_settings_commands(cog: Any) -> Any:
|
||||
"guild_id": ctx.guild.id,
|
||||
"channel_id": channel.id,
|
||||
"missing_perms": [
|
||||
perm for perm in required_perms
|
||||
perm
|
||||
for perm in required_perms
|
||||
if not getattr(channel_perms, perm)
|
||||
]
|
||||
],
|
||||
},
|
||||
ErrorSeverity.MEDIUM
|
||||
)
|
||||
ErrorSeverity.MEDIUM,
|
||||
),
|
||||
)
|
||||
|
||||
enabled_channels = await cog.config_manager.get_setting(
|
||||
ctx.guild.id,
|
||||
"enabled_channels"
|
||||
ctx.guild.id, "enabled_channels"
|
||||
)
|
||||
if channel.id in enabled_channels:
|
||||
await handle_response(
|
||||
ctx,
|
||||
f"{channel.mention} is already being monitored.",
|
||||
response_type=ResponseType.WARNING
|
||||
response_type=ResponseType.WARNING,
|
||||
)
|
||||
return
|
||||
|
||||
enabled_channels.append(channel.id)
|
||||
await cog.config_manager.update_setting(
|
||||
ctx.guild.id,
|
||||
"enabled_channels",
|
||||
enabled_channels
|
||||
ctx.guild.id, "enabled_channels", enabled_channels
|
||||
)
|
||||
await handle_response(
|
||||
ctx,
|
||||
f"Now monitoring {channel.mention} for videos.",
|
||||
response_type=ResponseType.SUCCESS
|
||||
response_type=ResponseType.SUCCESS,
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
@@ -393,15 +390,17 @@ def setup_settings_commands(cog: Any) -> Any:
|
||||
"SettingsCommands",
|
||||
"add_enabled_channel",
|
||||
{"guild_id": ctx.guild.id, "channel_id": channel.id},
|
||||
ErrorSeverity.HIGH
|
||||
)
|
||||
ErrorSeverity.HIGH,
|
||||
),
|
||||
)
|
||||
|
||||
@settings.command(name="removechannel")
|
||||
@guild_only()
|
||||
@admin_or_permissions(administrator=True)
|
||||
@app_commands.describe(channel="The channel to stop monitoring")
|
||||
async def remove_enabled_channel(ctx: Context, channel: discord.TextChannel) -> None:
|
||||
async def remove_enabled_channel(
|
||||
ctx: Context, channel: discord.TextChannel
|
||||
) -> None:
|
||||
"""Remove a channel from video monitoring."""
|
||||
try:
|
||||
# Check if config manager is ready
|
||||
@@ -412,8 +411,8 @@ def setup_settings_commands(cog: Any) -> Any:
|
||||
"SettingsCommands",
|
||||
"remove_enabled_channel",
|
||||
{"guild_id": ctx.guild.id},
|
||||
ErrorSeverity.HIGH
|
||||
)
|
||||
ErrorSeverity.HIGH,
|
||||
),
|
||||
)
|
||||
|
||||
# Defer the response immediately for slash commands
|
||||
@@ -421,27 +420,24 @@ def setup_settings_commands(cog: Any) -> Any:
|
||||
await ctx.defer()
|
||||
|
||||
enabled_channels = await cog.config_manager.get_setting(
|
||||
ctx.guild.id,
|
||||
"enabled_channels"
|
||||
ctx.guild.id, "enabled_channels"
|
||||
)
|
||||
if channel.id not in enabled_channels:
|
||||
await handle_response(
|
||||
ctx,
|
||||
f"{channel.mention} is not being monitored.",
|
||||
response_type=ResponseType.WARNING
|
||||
response_type=ResponseType.WARNING,
|
||||
)
|
||||
return
|
||||
|
||||
enabled_channels.remove(channel.id)
|
||||
await cog.config_manager.update_setting(
|
||||
ctx.guild.id,
|
||||
"enabled_channels",
|
||||
enabled_channels
|
||||
ctx.guild.id, "enabled_channels", enabled_channels
|
||||
)
|
||||
await handle_response(
|
||||
ctx,
|
||||
f"Stopped monitoring {channel.mention} for videos.",
|
||||
response_type=ResponseType.SUCCESS
|
||||
response_type=ResponseType.SUCCESS,
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
@@ -453,8 +449,8 @@ def setup_settings_commands(cog: Any) -> Any:
|
||||
"SettingsCommands",
|
||||
"remove_enabled_channel",
|
||||
{"guild_id": ctx.guild.id, "channel_id": channel.id},
|
||||
ErrorSeverity.HIGH
|
||||
)
|
||||
ErrorSeverity.HIGH,
|
||||
),
|
||||
)
|
||||
|
||||
@settings.command(name="setformat")
|
||||
@@ -472,8 +468,8 @@ def setup_settings_commands(cog: Any) -> Any:
|
||||
"SettingsCommands",
|
||||
"set_video_format",
|
||||
{"guild_id": ctx.guild.id},
|
||||
ErrorSeverity.HIGH
|
||||
)
|
||||
ErrorSeverity.HIGH,
|
||||
),
|
||||
)
|
||||
|
||||
# Defer the response immediately for slash commands
|
||||
@@ -482,28 +478,20 @@ def setup_settings_commands(cog: Any) -> Any:
|
||||
|
||||
# Validate format
|
||||
format = format.lower()
|
||||
validation = await validate_setting(
|
||||
SettingCategory.VIDEO,
|
||||
"format",
|
||||
format
|
||||
)
|
||||
validation = await validate_setting(SettingCategory.VIDEO, "format", format)
|
||||
if not validation["valid"]:
|
||||
await handle_response(
|
||||
ctx,
|
||||
validation["error"],
|
||||
response_type=ResponseType.ERROR
|
||||
ctx, validation["error"], response_type=ResponseType.ERROR
|
||||
)
|
||||
return
|
||||
|
||||
await cog.config_manager.update_setting(
|
||||
ctx.guild.id,
|
||||
"video_format",
|
||||
format
|
||||
ctx.guild.id, "video_format", format
|
||||
)
|
||||
await handle_response(
|
||||
ctx,
|
||||
f"Video format has been set to {format}.",
|
||||
response_type=ResponseType.SUCCESS
|
||||
response_type=ResponseType.SUCCESS,
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
@@ -515,8 +503,8 @@ def setup_settings_commands(cog: Any) -> Any:
|
||||
"SettingsCommands",
|
||||
"set_video_format",
|
||||
{"guild_id": ctx.guild.id, "format": format},
|
||||
ErrorSeverity.HIGH
|
||||
)
|
||||
ErrorSeverity.HIGH,
|
||||
),
|
||||
)
|
||||
|
||||
@settings.command(name="setquality")
|
||||
@@ -534,8 +522,8 @@ def setup_settings_commands(cog: Any) -> Any:
|
||||
"SettingsCommands",
|
||||
"set_video_quality",
|
||||
{"guild_id": ctx.guild.id},
|
||||
ErrorSeverity.HIGH
|
||||
)
|
||||
ErrorSeverity.HIGH,
|
||||
),
|
||||
)
|
||||
|
||||
# Defer the response immediately for slash commands
|
||||
@@ -544,27 +532,21 @@ def setup_settings_commands(cog: Any) -> Any:
|
||||
|
||||
# Validate quality
|
||||
validation = await validate_setting(
|
||||
SettingCategory.VIDEO,
|
||||
"quality",
|
||||
quality
|
||||
SettingCategory.VIDEO, "quality", quality
|
||||
)
|
||||
if not validation["valid"]:
|
||||
await handle_response(
|
||||
ctx,
|
||||
validation["error"],
|
||||
response_type=ResponseType.ERROR
|
||||
ctx, validation["error"], response_type=ResponseType.ERROR
|
||||
)
|
||||
return
|
||||
|
||||
await cog.config_manager.update_setting(
|
||||
ctx.guild.id,
|
||||
"video_quality",
|
||||
quality
|
||||
ctx.guild.id, "video_quality", quality
|
||||
)
|
||||
await handle_response(
|
||||
ctx,
|
||||
f"Video quality has been set to {quality}p.",
|
||||
response_type=ResponseType.SUCCESS
|
||||
response_type=ResponseType.SUCCESS,
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
@@ -576,8 +558,8 @@ def setup_settings_commands(cog: Any) -> Any:
|
||||
"SettingsCommands",
|
||||
"set_video_quality",
|
||||
{"guild_id": ctx.guild.id, "quality": quality},
|
||||
ErrorSeverity.HIGH
|
||||
)
|
||||
ErrorSeverity.HIGH,
|
||||
),
|
||||
)
|
||||
|
||||
@settings.command(name="setmaxsize")
|
||||
@@ -595,8 +577,8 @@ def setup_settings_commands(cog: Any) -> Any:
|
||||
"SettingsCommands",
|
||||
"set_max_file_size",
|
||||
{"guild_id": ctx.guild.id},
|
||||
ErrorSeverity.HIGH
|
||||
)
|
||||
ErrorSeverity.HIGH,
|
||||
),
|
||||
)
|
||||
|
||||
# Defer the response immediately for slash commands
|
||||
@@ -605,27 +587,19 @@ def setup_settings_commands(cog: Any) -> Any:
|
||||
|
||||
# Validate size
|
||||
validation = await validate_setting(
|
||||
SettingCategory.VIDEO,
|
||||
"max_file_size",
|
||||
size
|
||||
SettingCategory.VIDEO, "max_file_size", size
|
||||
)
|
||||
if not validation["valid"]:
|
||||
await handle_response(
|
||||
ctx,
|
||||
validation["error"],
|
||||
response_type=ResponseType.ERROR
|
||||
ctx, validation["error"], response_type=ResponseType.ERROR
|
||||
)
|
||||
return
|
||||
|
||||
await cog.config_manager.update_setting(
|
||||
ctx.guild.id,
|
||||
"max_file_size",
|
||||
size
|
||||
)
|
||||
await cog.config_manager.update_setting(ctx.guild.id, "max_file_size", size)
|
||||
await handle_response(
|
||||
ctx,
|
||||
f"Maximum file size has been set to {size}MB.",
|
||||
response_type=ResponseType.SUCCESS
|
||||
response_type=ResponseType.SUCCESS,
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
@@ -637,8 +611,8 @@ def setup_settings_commands(cog: Any) -> Any:
|
||||
"SettingsCommands",
|
||||
"set_max_file_size",
|
||||
{"guild_id": ctx.guild.id, "size": size},
|
||||
ErrorSeverity.HIGH
|
||||
)
|
||||
ErrorSeverity.HIGH,
|
||||
),
|
||||
)
|
||||
|
||||
@settings.command(name="setmessageduration")
|
||||
@@ -656,8 +630,8 @@ def setup_settings_commands(cog: Any) -> Any:
|
||||
"SettingsCommands",
|
||||
"set_message_duration",
|
||||
{"guild_id": ctx.guild.id},
|
||||
ErrorSeverity.HIGH
|
||||
)
|
||||
ErrorSeverity.HIGH,
|
||||
),
|
||||
)
|
||||
|
||||
# Defer the response immediately for slash commands
|
||||
@@ -666,27 +640,21 @@ def setup_settings_commands(cog: Any) -> Any:
|
||||
|
||||
# Validate duration
|
||||
validation = await validate_setting(
|
||||
SettingCategory.MESSAGES,
|
||||
"duration",
|
||||
hours
|
||||
SettingCategory.MESSAGES, "duration", hours
|
||||
)
|
||||
if not validation["valid"]:
|
||||
await handle_response(
|
||||
ctx,
|
||||
validation["error"],
|
||||
response_type=ResponseType.ERROR
|
||||
ctx, validation["error"], response_type=ResponseType.ERROR
|
||||
)
|
||||
return
|
||||
|
||||
await cog.config_manager.update_setting(
|
||||
ctx.guild.id,
|
||||
"message_duration",
|
||||
hours
|
||||
ctx.guild.id, "message_duration", hours
|
||||
)
|
||||
await handle_response(
|
||||
ctx,
|
||||
f"Message duration has been set to {hours} hours.",
|
||||
response_type=ResponseType.SUCCESS
|
||||
response_type=ResponseType.SUCCESS,
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
@@ -698,8 +666,8 @@ def setup_settings_commands(cog: Any) -> Any:
|
||||
"SettingsCommands",
|
||||
"set_message_duration",
|
||||
{"guild_id": ctx.guild.id, "hours": hours},
|
||||
ErrorSeverity.HIGH
|
||||
)
|
||||
ErrorSeverity.HIGH,
|
||||
),
|
||||
)
|
||||
|
||||
@settings.command(name="settemplate")
|
||||
@@ -719,8 +687,8 @@ def setup_settings_commands(cog: Any) -> Any:
|
||||
"SettingsCommands",
|
||||
"set_message_template",
|
||||
{"guild_id": ctx.guild.id},
|
||||
ErrorSeverity.HIGH
|
||||
)
|
||||
ErrorSeverity.HIGH,
|
||||
),
|
||||
)
|
||||
|
||||
# Defer the response immediately for slash commands
|
||||
@@ -729,27 +697,21 @@ def setup_settings_commands(cog: Any) -> Any:
|
||||
|
||||
# Validate template
|
||||
validation = await validate_setting(
|
||||
SettingCategory.MESSAGES,
|
||||
"template",
|
||||
template
|
||||
SettingCategory.MESSAGES, "template", template
|
||||
)
|
||||
if not validation["valid"]:
|
||||
await handle_response(
|
||||
ctx,
|
||||
validation["error"],
|
||||
response_type=ResponseType.ERROR
|
||||
ctx, validation["error"], response_type=ResponseType.ERROR
|
||||
)
|
||||
return
|
||||
|
||||
await cog.config_manager.update_setting(
|
||||
ctx.guild.id,
|
||||
"message_template",
|
||||
template
|
||||
ctx.guild.id, "message_template", template
|
||||
)
|
||||
await handle_response(
|
||||
ctx,
|
||||
f"Message template has been set to: {template}",
|
||||
response_type=ResponseType.SUCCESS
|
||||
response_type=ResponseType.SUCCESS,
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
@@ -761,8 +723,8 @@ def setup_settings_commands(cog: Any) -> Any:
|
||||
"SettingsCommands",
|
||||
"set_message_template",
|
||||
{"guild_id": ctx.guild.id},
|
||||
ErrorSeverity.HIGH
|
||||
)
|
||||
ErrorSeverity.HIGH,
|
||||
),
|
||||
)
|
||||
|
||||
@settings.command(name="setconcurrent")
|
||||
@@ -780,8 +742,8 @@ def setup_settings_commands(cog: Any) -> Any:
|
||||
"SettingsCommands",
|
||||
"set_concurrent_downloads",
|
||||
{"guild_id": ctx.guild.id},
|
||||
ErrorSeverity.HIGH
|
||||
)
|
||||
ErrorSeverity.HIGH,
|
||||
),
|
||||
)
|
||||
|
||||
# Defer the response immediately for slash commands
|
||||
@@ -790,27 +752,21 @@ def setup_settings_commands(cog: Any) -> Any:
|
||||
|
||||
# Validate count
|
||||
validation = await validate_setting(
|
||||
SettingCategory.PERFORMANCE,
|
||||
"concurrent_downloads",
|
||||
count
|
||||
SettingCategory.PERFORMANCE, "concurrent_downloads", count
|
||||
)
|
||||
if not validation["valid"]:
|
||||
await handle_response(
|
||||
ctx,
|
||||
validation["error"],
|
||||
response_type=ResponseType.ERROR
|
||||
ctx, validation["error"], response_type=ResponseType.ERROR
|
||||
)
|
||||
return
|
||||
|
||||
await cog.config_manager.update_setting(
|
||||
ctx.guild.id,
|
||||
"concurrent_downloads",
|
||||
count
|
||||
ctx.guild.id, "concurrent_downloads", count
|
||||
)
|
||||
await handle_response(
|
||||
ctx,
|
||||
f"Concurrent downloads has been set to {count}.",
|
||||
response_type=ResponseType.SUCCESS
|
||||
response_type=ResponseType.SUCCESS,
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
@@ -822,8 +778,8 @@ def setup_settings_commands(cog: Any) -> Any:
|
||||
"SettingsCommands",
|
||||
"set_concurrent_downloads",
|
||||
{"guild_id": ctx.guild.id, "count": count},
|
||||
ErrorSeverity.HIGH
|
||||
)
|
||||
ErrorSeverity.HIGH,
|
||||
),
|
||||
)
|
||||
|
||||
# Store commands in cog for access
|
||||
|
||||
@@ -8,6 +8,7 @@ from typing import (
|
||||
Optional,
|
||||
Set,
|
||||
List,
|
||||
Tuple,
|
||||
TypedDict,
|
||||
ClassVar,
|
||||
Type,
|
||||
@@ -19,12 +20,12 @@ from datetime import datetime
|
||||
from pathlib import Path
|
||||
import importlib
|
||||
|
||||
from .utils.exceptions import ComponentError, ErrorContext, ErrorSeverity
|
||||
from .utils.path_manager import ensure_directory
|
||||
from .config_manager import ConfigManager
|
||||
from .processor.core import Processor
|
||||
from .queue.manager import EnhancedVideoQueueManager
|
||||
from .ffmpeg.ffmpeg_manager import FFmpegManager
|
||||
from ..utils.exceptions import ComponentError, ErrorContext, ErrorSeverity
|
||||
from ..utils.path_manager import ensure_directory
|
||||
from ..config_manager import ConfigManager
|
||||
from ..processor.core import Processor
|
||||
from ..queue.manager import EnhancedVideoQueueManager
|
||||
from ..ffmpeg.ffmpeg_manager import FFmpegManager
|
||||
|
||||
logger = logging.getLogger("VideoArchiver")
|
||||
|
||||
|
||||
@@ -4,8 +4,8 @@ import logging
|
||||
import traceback
|
||||
from typing import Dict, Optional, Tuple, Type, TypedDict, ClassVar
|
||||
from enum import Enum, auto
|
||||
import discord
|
||||
from redbot.core.commands import (
|
||||
import discord # type: ignore
|
||||
from redbot.core.commands import ( # type: ignore
|
||||
Context,
|
||||
MissingPermissions,
|
||||
BotMissingPermissions,
|
||||
@@ -14,7 +14,7 @@ from redbot.core.commands import (
|
||||
CommandError
|
||||
)
|
||||
|
||||
from .utils.exceptions import (
|
||||
from ..utils.exceptions import (
|
||||
VideoArchiverError,
|
||||
ErrorSeverity,
|
||||
ErrorContext,
|
||||
@@ -33,7 +33,7 @@ from .utils.exceptions import (
|
||||
ResourceExhaustedError,
|
||||
ConfigurationError
|
||||
)
|
||||
from .core.response_handler import response_manager
|
||||
from ..core.response_handler import response_manager
|
||||
|
||||
logger = logging.getLogger("VideoArchiver")
|
||||
|
||||
|
||||
@@ -7,23 +7,24 @@ from datetime import datetime
|
||||
from enum import Enum, auto
|
||||
from typing import TYPE_CHECKING, Dict, Any, Optional, TypedDict, ClassVar, List
|
||||
|
||||
import discord
|
||||
import discord # type: ignore
|
||||
|
||||
from .processor.constants import REACTIONS
|
||||
from .processor.reactions import handle_archived_reaction
|
||||
from .core.guild import initialize_guild_components, cleanup_guild_components
|
||||
from .core.error_handler import error_manager
|
||||
from .core.response_handler import response_manager
|
||||
from .utils.exceptions import EventError, ErrorContext, ErrorSeverity
|
||||
from ..processor.constants import REACTIONS
|
||||
from ..processor.reactions import handle_archived_reaction
|
||||
from ..core.guild import initialize_guild_components, cleanup_guild_components
|
||||
from ..core.error_handler import error_manager
|
||||
from ..core.response_handler import response_manager
|
||||
from ..utils.exceptions import EventError, ErrorContext, ErrorSeverity
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .core.base import VideoArchiver
|
||||
from ..core.base import VideoArchiver
|
||||
|
||||
logger = logging.getLogger("VideoArchiver")
|
||||
|
||||
|
||||
class EventType(Enum):
|
||||
"""Types of Discord events"""
|
||||
|
||||
GUILD_JOIN = auto()
|
||||
GUILD_REMOVE = auto()
|
||||
MESSAGE = auto()
|
||||
@@ -34,6 +35,7 @@ class EventType(Enum):
|
||||
|
||||
class EventStats(TypedDict):
|
||||
"""Type definition for event statistics"""
|
||||
|
||||
counts: Dict[str, int]
|
||||
last_events: Dict[str, str]
|
||||
errors: Dict[str, int]
|
||||
@@ -43,6 +45,7 @@ class EventStats(TypedDict):
|
||||
|
||||
class EventHistory(TypedDict):
|
||||
"""Type definition for event history entry"""
|
||||
|
||||
event_type: str
|
||||
timestamp: str
|
||||
guild_id: Optional[int]
|
||||
@@ -94,7 +97,7 @@ class EventTracker:
|
||||
|
||||
# Cleanup old history
|
||||
if len(self.history) > self.MAX_HISTORY:
|
||||
self.history = self.history[-self.MAX_HISTORY:]
|
||||
self.history = self.history[-self.MAX_HISTORY :]
|
||||
|
||||
def record_error(
|
||||
self, event_type: EventType, error: str, duration: float = 0.0
|
||||
|
||||
@@ -4,16 +4,17 @@ import logging
|
||||
from pathlib import Path
|
||||
from typing import TYPE_CHECKING, Dict, Any, Optional
|
||||
|
||||
from .utils.download_core import DownloadCore
|
||||
from .utils.message_manager import MessageManager
|
||||
from .utils.file_ops import cleanup_downloads
|
||||
from .utils.exceptions import VideoArchiverError as ProcessingError
|
||||
from ..utils.download_core import DownloadCore
|
||||
from ..utils.message_manager import MessageManager
|
||||
from ..utils.file_ops import cleanup_downloads
|
||||
from ..utils.exceptions import VideoArchiverError as ProcessingError
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .core.base import VideoArchiver
|
||||
from ..core.base import VideoArchiver
|
||||
|
||||
logger = logging.getLogger("VideoArchiver")
|
||||
|
||||
|
||||
async def initialize_guild_components(cog: "VideoArchiver", guild_id: int) -> None:
|
||||
"""Initialize or update components for a guild with error handling"""
|
||||
try:
|
||||
@@ -53,6 +54,7 @@ async def initialize_guild_components(cog: "VideoArchiver", guild_id: int) -> No
|
||||
logger.error(f"Failed to initialize guild {guild_id}: {str(e)}")
|
||||
raise ProcessingError(f"Guild initialization failed: {str(e)}")
|
||||
|
||||
|
||||
async def cleanup_guild_components(cog: "VideoArchiver", guild_id: int) -> None:
|
||||
"""Clean up components for a specific guild"""
|
||||
try:
|
||||
|
||||
@@ -4,15 +4,15 @@ from typing import TYPE_CHECKING, Optional, Dict, Any
|
||||
import asyncio
|
||||
import logging
|
||||
|
||||
from .utils.exceptions import (
|
||||
from ..utils.exceptions import (
|
||||
ComponentError,
|
||||
ErrorContext,
|
||||
ErrorSeverity
|
||||
)
|
||||
from .core.lifecycle import LifecycleState
|
||||
from ..core.lifecycle import LifecycleState
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .core.base import VideoArchiver
|
||||
from ..core.base import VideoArchiver
|
||||
|
||||
logger = logging.getLogger("VideoArchiver")
|
||||
|
||||
|
||||
@@ -7,54 +7,65 @@ from typing import Optional, Dict, Any, Set, List, Callable, TypedDict, ClassVar
|
||||
from enum import Enum, auto
|
||||
from datetime import datetime
|
||||
|
||||
from .core.cleanup import cleanup_resources, force_cleanup_resources
|
||||
from .utils.exceptions import (
|
||||
from ..core.cleanup import cleanup_resources, force_cleanup_resources
|
||||
from ..utils.exceptions import (
|
||||
VideoArchiverError,
|
||||
ErrorContext,
|
||||
ErrorSeverity,
|
||||
ComponentError,
|
||||
CleanupError
|
||||
CleanupError,
|
||||
)
|
||||
|
||||
logger = logging.getLogger("VideoArchiver")
|
||||
|
||||
|
||||
class LifecycleState(Enum):
|
||||
"""Possible states in the cog lifecycle"""
|
||||
|
||||
UNINITIALIZED = auto()
|
||||
INITIALIZING = auto()
|
||||
READY = auto()
|
||||
UNLOADING = auto()
|
||||
ERROR = auto()
|
||||
|
||||
|
||||
class TaskStatus(Enum):
|
||||
"""Task execution status"""
|
||||
|
||||
RUNNING = auto()
|
||||
COMPLETED = auto()
|
||||
CANCELLED = auto()
|
||||
FAILED = auto()
|
||||
|
||||
|
||||
class TaskHistory(TypedDict):
|
||||
"""Type definition for task history entry"""
|
||||
|
||||
start_time: str
|
||||
end_time: Optional[str]
|
||||
status: str
|
||||
error: Optional[str]
|
||||
duration: float
|
||||
|
||||
|
||||
class StateHistory(TypedDict):
|
||||
"""Type definition for state history entry"""
|
||||
|
||||
state: str
|
||||
timestamp: str
|
||||
duration: float
|
||||
details: Optional[Dict[str, Any]]
|
||||
|
||||
|
||||
class LifecycleStatus(TypedDict):
|
||||
"""Type definition for lifecycle status"""
|
||||
|
||||
state: str
|
||||
state_history: List[StateHistory]
|
||||
tasks: Dict[str, Any]
|
||||
health: bool
|
||||
|
||||
|
||||
class TaskManager:
|
||||
"""Manages asyncio tasks"""
|
||||
|
||||
@@ -69,7 +80,7 @@ class TaskManager:
|
||||
name: str,
|
||||
coro: Callable[..., Any],
|
||||
callback: Optional[Callable[[asyncio.Task], None]] = None,
|
||||
timeout: Optional[float] = None
|
||||
timeout: Optional[float] = None,
|
||||
) -> asyncio.Task:
|
||||
"""
|
||||
Create and track a task.
|
||||
@@ -94,14 +105,16 @@ class TaskManager:
|
||||
end_time=None,
|
||||
status=TaskStatus.RUNNING.name,
|
||||
error=None,
|
||||
duration=0.0
|
||||
duration=0.0,
|
||||
)
|
||||
|
||||
if timeout:
|
||||
asyncio.create_task(self._handle_timeout(name, task, timeout))
|
||||
|
||||
if callback:
|
||||
task.add_done_callback(lambda t: self._handle_completion(name, t, callback))
|
||||
task.add_done_callback(
|
||||
lambda t: self._handle_completion(name, t, callback)
|
||||
)
|
||||
else:
|
||||
task.add_done_callback(lambda t: self._handle_completion(name, t))
|
||||
|
||||
@@ -116,15 +129,12 @@ class TaskManager:
|
||||
"TaskManager",
|
||||
"create_task",
|
||||
{"task_name": name},
|
||||
ErrorSeverity.HIGH
|
||||
)
|
||||
ErrorSeverity.HIGH,
|
||||
),
|
||||
)
|
||||
|
||||
async def _handle_timeout(
|
||||
self,
|
||||
name: str,
|
||||
task: asyncio.Task,
|
||||
timeout: float
|
||||
self, name: str, task: asyncio.Task, timeout: float
|
||||
) -> None:
|
||||
"""Handle task timeout"""
|
||||
try:
|
||||
@@ -134,16 +144,14 @@ class TaskManager:
|
||||
logger.warning(f"Task {name} timed out after {timeout}s")
|
||||
task.cancel()
|
||||
self._update_task_history(
|
||||
name,
|
||||
TaskStatus.FAILED,
|
||||
f"Task timed out after {timeout}s"
|
||||
name, TaskStatus.FAILED, f"Task timed out after {timeout}s"
|
||||
)
|
||||
|
||||
def _handle_completion(
|
||||
self,
|
||||
name: str,
|
||||
task: asyncio.Task,
|
||||
callback: Optional[Callable[[asyncio.Task], None]] = None
|
||||
callback: Optional[Callable[[asyncio.Task], None]] = None,
|
||||
) -> None:
|
||||
"""Handle task completion"""
|
||||
try:
|
||||
@@ -169,21 +177,20 @@ class TaskManager:
|
||||
self._tasks.pop(name, None)
|
||||
|
||||
def _update_task_history(
|
||||
self,
|
||||
name: str,
|
||||
status: TaskStatus,
|
||||
error: Optional[str] = None
|
||||
self, name: str, status: TaskStatus, error: Optional[str] = None
|
||||
) -> None:
|
||||
"""Update task history entry"""
|
||||
if name in self._task_history:
|
||||
end_time = datetime.utcnow()
|
||||
start_time = datetime.fromisoformat(self._task_history[name]["start_time"])
|
||||
self._task_history[name].update({
|
||||
"end_time": end_time.isoformat(),
|
||||
"status": status.name,
|
||||
"error": error,
|
||||
"duration": (end_time - start_time).total_seconds()
|
||||
})
|
||||
self._task_history[name].update(
|
||||
{
|
||||
"end_time": end_time.isoformat(),
|
||||
"status": status.name,
|
||||
"error": error,
|
||||
"duration": (end_time - start_time).total_seconds(),
|
||||
}
|
||||
)
|
||||
|
||||
async def cancel_task(self, name: str) -> None:
|
||||
"""
|
||||
@@ -216,9 +223,10 @@ class TaskManager:
|
||||
"""
|
||||
return {
|
||||
"active_tasks": list(self._tasks.keys()),
|
||||
"history": self._task_history.copy()
|
||||
"history": self._task_history.copy(),
|
||||
}
|
||||
|
||||
|
||||
class StateTracker:
|
||||
"""Tracks lifecycle state and transitions"""
|
||||
|
||||
@@ -228,9 +236,7 @@ class StateTracker:
|
||||
self._record_state()
|
||||
|
||||
def set_state(
|
||||
self,
|
||||
state: LifecycleState,
|
||||
details: Optional[Dict[str, Any]] = None
|
||||
self, state: LifecycleState, details: Optional[Dict[str, Any]] = None
|
||||
) -> None:
|
||||
"""
|
||||
Set current state.
|
||||
@@ -242,10 +248,7 @@ class StateTracker:
|
||||
self.state = state
|
||||
self._record_state(details)
|
||||
|
||||
def _record_state(
|
||||
self,
|
||||
details: Optional[Dict[str, Any]] = None
|
||||
) -> None:
|
||||
def _record_state(self, details: Optional[Dict[str, Any]] = None) -> None:
|
||||
"""Record state transition"""
|
||||
now = datetime.utcnow()
|
||||
duration = 0.0
|
||||
@@ -253,17 +256,20 @@ class StateTracker:
|
||||
last_state = datetime.fromisoformat(self.state_history[-1]["timestamp"])
|
||||
duration = (now - last_state).total_seconds()
|
||||
|
||||
self.state_history.append(StateHistory(
|
||||
state=self.state.name,
|
||||
timestamp=now.isoformat(),
|
||||
duration=duration,
|
||||
details=details
|
||||
))
|
||||
self.state_history.append(
|
||||
StateHistory(
|
||||
state=self.state.name,
|
||||
timestamp=now.isoformat(),
|
||||
duration=duration,
|
||||
details=details,
|
||||
)
|
||||
)
|
||||
|
||||
def get_state_history(self) -> List[StateHistory]:
|
||||
"""Get state transition history"""
|
||||
return self.state_history.copy()
|
||||
|
||||
|
||||
class LifecycleManager:
|
||||
"""Manages the lifecycle of the VideoArchiver cog"""
|
||||
|
||||
@@ -278,8 +284,7 @@ class LifecycleManager:
|
||||
self._cleanup_handlers: Set[Callable] = set()
|
||||
|
||||
def register_cleanup_handler(
|
||||
self,
|
||||
handler: Union[Callable[[], None], Callable[[], Any]]
|
||||
self, handler: Union[Callable[[], None], Callable[[], Any]]
|
||||
) -> None:
|
||||
"""
|
||||
Register a cleanup handler.
|
||||
@@ -311,11 +316,8 @@ class LifecycleManager:
|
||||
raise ComponentError(
|
||||
error,
|
||||
context=ErrorContext(
|
||||
"LifecycleManager",
|
||||
"initialize_cog",
|
||||
None,
|
||||
ErrorSeverity.HIGH
|
||||
)
|
||||
"LifecycleManager", "initialize_cog", None, ErrorSeverity.HIGH
|
||||
),
|
||||
)
|
||||
|
||||
def init_callback(self, task: asyncio.Task) -> None:
|
||||
@@ -326,17 +328,11 @@ class LifecycleManager:
|
||||
self.state_tracker.set_state(LifecycleState.READY)
|
||||
except asyncio.CancelledError:
|
||||
logger.warning("Initialization was cancelled")
|
||||
self.state_tracker.set_state(
|
||||
LifecycleState.ERROR,
|
||||
{"reason": "cancelled"}
|
||||
)
|
||||
self.state_tracker.set_state(LifecycleState.ERROR, {"reason": "cancelled"})
|
||||
asyncio.create_task(cleanup_resources(self.cog))
|
||||
except Exception as e:
|
||||
logger.error(f"Initialization failed: {str(e)}", exc_info=True)
|
||||
self.state_tracker.set_state(
|
||||
LifecycleState.ERROR,
|
||||
{"error": str(e)}
|
||||
)
|
||||
self.state_tracker.set_state(LifecycleState.ERROR, {"error": str(e)})
|
||||
asyncio.create_task(cleanup_resources(self.cog))
|
||||
|
||||
async def handle_load(self) -> None:
|
||||
@@ -354,7 +350,7 @@ class LifecycleManager:
|
||||
"initialization",
|
||||
self.initialize_cog(),
|
||||
self.init_callback,
|
||||
timeout=self.INIT_TIMEOUT
|
||||
timeout=self.INIT_TIMEOUT,
|
||||
)
|
||||
logger.info("Initialization started in background")
|
||||
|
||||
@@ -363,19 +359,15 @@ class LifecycleManager:
|
||||
# Ensure cleanup on any error
|
||||
try:
|
||||
await asyncio.wait_for(
|
||||
force_cleanup_resources(self.cog),
|
||||
timeout=self.CLEANUP_TIMEOUT
|
||||
force_cleanup_resources(self.cog), timeout=self.CLEANUP_TIMEOUT
|
||||
)
|
||||
except asyncio.TimeoutError:
|
||||
logger.error("Force cleanup during load error timed out")
|
||||
raise VideoArchiverError(
|
||||
f"Error during cog load: {str(e)}",
|
||||
context=ErrorContext(
|
||||
"LifecycleManager",
|
||||
"handle_load",
|
||||
None,
|
||||
ErrorSeverity.HIGH
|
||||
)
|
||||
"LifecycleManager", "handle_load", None, ErrorSeverity.HIGH
|
||||
),
|
||||
)
|
||||
|
||||
async def handle_unload(self) -> None:
|
||||
@@ -397,9 +389,7 @@ class LifecycleManager:
|
||||
# Try normal cleanup
|
||||
try:
|
||||
cleanup_task = await self.task_manager.create_task(
|
||||
"cleanup",
|
||||
cleanup_resources(self.cog),
|
||||
timeout=self.UNLOAD_TIMEOUT
|
||||
"cleanup", cleanup_resources(self.cog), timeout=self.UNLOAD_TIMEOUT
|
||||
)
|
||||
await cleanup_task
|
||||
logger.info("Normal cleanup completed")
|
||||
@@ -413,8 +403,7 @@ class LifecycleManager:
|
||||
# Force cleanup
|
||||
try:
|
||||
await asyncio.wait_for(
|
||||
force_cleanup_resources(self.cog),
|
||||
timeout=self.CLEANUP_TIMEOUT
|
||||
force_cleanup_resources(self.cog), timeout=self.CLEANUP_TIMEOUT
|
||||
)
|
||||
logger.info("Force cleanup completed")
|
||||
except asyncio.TimeoutError:
|
||||
@@ -426,8 +415,8 @@ class LifecycleManager:
|
||||
"LifecycleManager",
|
||||
"handle_unload",
|
||||
None,
|
||||
ErrorSeverity.CRITICAL
|
||||
)
|
||||
ErrorSeverity.CRITICAL,
|
||||
),
|
||||
)
|
||||
except Exception as e:
|
||||
error = f"Error during force cleanup: {str(e)}"
|
||||
@@ -438,25 +427,19 @@ class LifecycleManager:
|
||||
"LifecycleManager",
|
||||
"handle_unload",
|
||||
None,
|
||||
ErrorSeverity.CRITICAL
|
||||
)
|
||||
ErrorSeverity.CRITICAL,
|
||||
),
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
error = f"Error during cog unload: {str(e)}"
|
||||
logger.error(error, exc_info=True)
|
||||
self.state_tracker.set_state(
|
||||
LifecycleState.ERROR,
|
||||
{"error": str(e)}
|
||||
)
|
||||
self.state_tracker.set_state(LifecycleState.ERROR, {"error": str(e)})
|
||||
raise CleanupError(
|
||||
error,
|
||||
context=ErrorContext(
|
||||
"LifecycleManager",
|
||||
"handle_unload",
|
||||
None,
|
||||
ErrorSeverity.CRITICAL
|
||||
)
|
||||
"LifecycleManager", "handle_unload", None, ErrorSeverity.CRITICAL
|
||||
),
|
||||
)
|
||||
finally:
|
||||
# Clear all references
|
||||
@@ -495,5 +478,5 @@ class LifecycleManager:
|
||||
state=self.state_tracker.state.name,
|
||||
state_history=self.state_tracker.get_state_history(),
|
||||
tasks=self.task_manager.get_task_status(),
|
||||
health=self.state_tracker.state == LifecycleState.READY
|
||||
health=self.state_tracker.state == LifecycleState.READY,
|
||||
)
|
||||
|
||||
@@ -4,10 +4,10 @@ import logging
|
||||
from enum import Enum, auto
|
||||
from typing import Optional, Union, Dict, Any, TypedDict, ClassVar
|
||||
from datetime import datetime
|
||||
import discord
|
||||
from redbot.core.commands import Context
|
||||
import discord # type: ignore
|
||||
from redbot.core.commands import Context # type: ignore
|
||||
|
||||
from .utils.exceptions import ErrorSeverity
|
||||
from ..utils.exceptions import ErrorSeverity
|
||||
|
||||
logger = logging.getLogger("VideoArchiver")
|
||||
|
||||
|
||||
@@ -4,7 +4,7 @@ from typing import Dict, Any, List, Optional, Union, TypedDict, ClassVar
|
||||
from dataclasses import dataclass, field
|
||||
from enum import Enum, auto
|
||||
|
||||
from .utils.exceptions import (
|
||||
from ..utils.exceptions import (
|
||||
ConfigurationError,
|
||||
ErrorContext,
|
||||
ErrorSeverity
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
"""Module for managing FFmpeg processes"""
|
||||
|
||||
import logging
|
||||
import psutil
|
||||
import psutil # type: ignore
|
||||
import subprocess
|
||||
import time
|
||||
from typing import Set, Optional
|
||||
|
||||
@@ -1,27 +1,27 @@
|
||||
"""Video processing module for VideoArchiver"""
|
||||
|
||||
from typing import Dict, Any, Optional, Union, List, Tuple
|
||||
import discord
|
||||
import discord # type: ignore
|
||||
|
||||
from .processor.core import VideoProcessor
|
||||
from .processor.constants import (
|
||||
from ..processor.core import VideoProcessor
|
||||
from ..processor.constants import (
|
||||
REACTIONS,
|
||||
ReactionType,
|
||||
ReactionEmojis,
|
||||
ProgressEmojis,
|
||||
get_reaction,
|
||||
get_progress_emoji
|
||||
get_progress_emoji,
|
||||
)
|
||||
from .processor.url_extractor import (
|
||||
from ..processor.url_extractor import (
|
||||
URLExtractor,
|
||||
URLMetadata,
|
||||
URLPattern,
|
||||
URLType,
|
||||
URLPatternManager,
|
||||
URLValidator,
|
||||
URLMetadataExtractor
|
||||
URLMetadataExtractor,
|
||||
)
|
||||
from .processor.message_validator import (
|
||||
from ..processor.message_validator import (
|
||||
MessageValidator,
|
||||
ValidationContext,
|
||||
ValidationRule,
|
||||
@@ -30,17 +30,17 @@ from .processor.message_validator import (
|
||||
ValidationCache,
|
||||
ValidationStats,
|
||||
ValidationCacheEntry,
|
||||
ValidationError
|
||||
ValidationError,
|
||||
)
|
||||
from .processor.message_handler import MessageHandler
|
||||
from .processor.queue_handler import QueueHandler
|
||||
from .processor.reactions import (
|
||||
from ..processor.message_handler import MessageHandler
|
||||
from ..processor.queue_handler import QueueHandler
|
||||
from ..processor.reactions import (
|
||||
handle_archived_reaction,
|
||||
update_queue_position_reaction,
|
||||
update_progress_reaction,
|
||||
update_download_progress_reaction
|
||||
update_download_progress_reaction,
|
||||
)
|
||||
from .utils import progress_tracker
|
||||
from ..utils import progress_tracker
|
||||
|
||||
# Export public classes and constants
|
||||
__all__ = [
|
||||
@@ -48,7 +48,6 @@ __all__ = [
|
||||
"VideoProcessor",
|
||||
"MessageHandler",
|
||||
"QueueHandler",
|
||||
|
||||
# URL Extraction
|
||||
"URLExtractor",
|
||||
"URLMetadata",
|
||||
@@ -57,7 +56,6 @@ __all__ = [
|
||||
"URLPatternManager",
|
||||
"URLValidator",
|
||||
"URLMetadataExtractor",
|
||||
|
||||
# Message Validation
|
||||
"MessageValidator",
|
||||
"ValidationContext",
|
||||
@@ -68,13 +66,11 @@ __all__ = [
|
||||
"ValidationStats",
|
||||
"ValidationCacheEntry",
|
||||
"ValidationError",
|
||||
|
||||
# Constants and enums
|
||||
"REACTIONS",
|
||||
"ReactionType",
|
||||
"ReactionEmojis",
|
||||
"ProgressEmojis",
|
||||
|
||||
# Helper functions
|
||||
"get_reaction",
|
||||
"get_progress_emoji",
|
||||
@@ -87,7 +83,6 @@ __all__ = [
|
||||
"get_active_operations",
|
||||
"get_validation_stats",
|
||||
"clear_caches",
|
||||
|
||||
# Reaction handlers
|
||||
"handle_archived_reaction",
|
||||
"update_queue_position_reaction",
|
||||
@@ -104,10 +99,10 @@ __description__ = "Video processing module for archiving Discord videos"
|
||||
url_extractor = URLExtractor()
|
||||
message_validator = MessageValidator()
|
||||
|
||||
|
||||
# URL extraction helper functions
|
||||
async def extract_urls(
|
||||
message: discord.Message,
|
||||
enabled_sites: Optional[List[str]] = None
|
||||
message: discord.Message, enabled_sites: Optional[List[str]] = None
|
||||
) -> List[URLMetadata]:
|
||||
"""
|
||||
Extract video URLs from a Discord message.
|
||||
@@ -121,9 +116,9 @@ async def extract_urls(
|
||||
"""
|
||||
return await url_extractor.extract_urls(message, enabled_sites)
|
||||
|
||||
|
||||
async def validate_message(
|
||||
message: discord.Message,
|
||||
settings: Dict[str, Any]
|
||||
message: discord.Message, settings: Dict[str, Any]
|
||||
) -> Tuple[bool, Optional[str]]:
|
||||
"""
|
||||
Validate a Discord message.
|
||||
@@ -140,6 +135,7 @@ async def validate_message(
|
||||
"""
|
||||
return await message_validator.validate_message(message, settings)
|
||||
|
||||
|
||||
# Progress tracking helper functions
|
||||
def update_download_progress(url: str, progress_data: Dict[str, Any]) -> None:
|
||||
"""
|
||||
@@ -151,6 +147,7 @@ def update_download_progress(url: str, progress_data: Dict[str, Any]) -> None:
|
||||
"""
|
||||
progress_tracker.update_download_progress(url, progress_data)
|
||||
|
||||
|
||||
def complete_download(url: str) -> None:
|
||||
"""
|
||||
Mark a download as complete.
|
||||
@@ -160,6 +157,7 @@ def complete_download(url: str) -> None:
|
||||
"""
|
||||
progress_tracker.complete_download(url)
|
||||
|
||||
|
||||
def increment_download_retries(url: str) -> None:
|
||||
"""
|
||||
Increment retry count for a download.
|
||||
@@ -169,7 +167,10 @@ def increment_download_retries(url: str) -> None:
|
||||
"""
|
||||
progress_tracker.increment_download_retries(url)
|
||||
|
||||
def get_download_progress(url: Optional[str] = None) -> Union[Dict[str, Any], Dict[str, Dict[str, Any]]]:
|
||||
|
||||
def get_download_progress(
|
||||
url: Optional[str] = None,
|
||||
) -> Union[Dict[str, Any], Dict[str, Dict[str, Any]]]:
|
||||
"""
|
||||
Get download progress for a specific URL or all downloads.
|
||||
|
||||
@@ -181,6 +182,7 @@ def get_download_progress(url: Optional[str] = None) -> Union[Dict[str, Any], Di
|
||||
"""
|
||||
return progress_tracker.get_download_progress(url)
|
||||
|
||||
|
||||
def get_active_operations() -> Dict[str, Dict[str, Any]]:
|
||||
"""
|
||||
Get all active operations.
|
||||
@@ -190,6 +192,7 @@ def get_active_operations() -> Dict[str, Dict[str, Any]]:
|
||||
"""
|
||||
return progress_tracker.get_active_operations()
|
||||
|
||||
|
||||
def get_validation_stats() -> ValidationStats:
|
||||
"""
|
||||
Get message validation statistics.
|
||||
@@ -199,6 +202,7 @@ def get_validation_stats() -> ValidationStats:
|
||||
"""
|
||||
return message_validator.get_stats()
|
||||
|
||||
|
||||
def clear_caches(message_id: Optional[int] = None) -> None:
|
||||
"""
|
||||
Clear URL and validation caches.
|
||||
|
||||
@@ -18,9 +18,9 @@ from typing import (
|
||||
)
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
from .processor.queue_handler import QueueHandler
|
||||
from .ffmpeg.ffmpeg_manager import FFmpegManager
|
||||
from .utils.exceptions import CleanupError
|
||||
from ..processor.queue_handler import QueueHandler
|
||||
from ..ffmpeg.ffmpeg_manager import FFmpegManager
|
||||
from ..utils.exceptions import CleanupError
|
||||
|
||||
logger = logging.getLogger("VideoArchiver")
|
||||
|
||||
|
||||
@@ -1,29 +1,33 @@
|
||||
"""Core VideoProcessor class that manages video processing operations"""
|
||||
|
||||
import logging
|
||||
import asyncio
|
||||
from enum import Enum, auto
|
||||
from typing import Optional, Tuple, Dict, Any, List, TypedDict, ClassVar
|
||||
import logging
|
||||
from datetime import datetime, timedelta
|
||||
import discord
|
||||
from discord.ext import commands
|
||||
from enum import auto, Enum
|
||||
from typing import Any, ClassVar, Dict, List, Optional, Tuple, TypedDict
|
||||
|
||||
from .processor.message_handler import MessageHandler
|
||||
from .processor.queue_handler import QueueHandler
|
||||
from .utils import progress_tracker
|
||||
from .processor.status_display import StatusDisplay
|
||||
from .processor.cleanup_manager import CleanupManager, CleanupStrategy
|
||||
from .processor.constants import REACTIONS
|
||||
from .queue.manager import EnhancedVideoQueueManager
|
||||
from .ffmpeg.ffmpeg_manager import FFmpegManager
|
||||
from .database.video_archive_db import VideoArchiveDB
|
||||
from .config_manager import ConfigManager
|
||||
from .utils.exceptions import ProcessorError
|
||||
import discord # type: ignore
|
||||
from discord.ext import commands # type: ignore
|
||||
|
||||
from ..config_manager import ConfigManager
|
||||
from ..database.video_archive_db import VideoArchiveDB
|
||||
from ..ffmpeg.ffmpeg_manager import FFmpegManager
|
||||
from ..processor.cleanup_manager import CleanupManager, CleanupStrategy
|
||||
from ..processor.constants import REACTIONS
|
||||
|
||||
from ..processor.message_handler import MessageHandler
|
||||
from ..processor.queue_handler import QueueHandler
|
||||
from ..processor.status_display import StatusDisplay
|
||||
from ..queue.manager import EnhancedVideoQueueManager
|
||||
from ..utils import progress_tracker
|
||||
from ..utils.exceptions import ProcessorError
|
||||
|
||||
logger = logging.getLogger("VideoArchiver")
|
||||
|
||||
|
||||
class ProcessorState(Enum):
|
||||
"""Possible states of the video processor"""
|
||||
|
||||
INITIALIZING = auto()
|
||||
READY = auto()
|
||||
PROCESSING = auto()
|
||||
@@ -31,15 +35,19 @@ class ProcessorState(Enum):
|
||||
ERROR = auto()
|
||||
SHUTDOWN = auto()
|
||||
|
||||
|
||||
class OperationType(Enum):
|
||||
"""Types of processor operations"""
|
||||
|
||||
MESSAGE_PROCESSING = auto()
|
||||
VIDEO_PROCESSING = auto()
|
||||
QUEUE_MANAGEMENT = auto()
|
||||
CLEANUP = auto()
|
||||
|
||||
|
||||
class OperationDetails(TypedDict):
|
||||
"""Type definition for operation details"""
|
||||
|
||||
type: str
|
||||
start_time: datetime
|
||||
end_time: Optional[datetime]
|
||||
@@ -47,16 +55,20 @@ class OperationDetails(TypedDict):
|
||||
details: Dict[str, Any]
|
||||
error: Optional[str]
|
||||
|
||||
|
||||
class OperationStats(TypedDict):
|
||||
"""Type definition for operation statistics"""
|
||||
|
||||
total_operations: int
|
||||
active_operations: int
|
||||
success_count: int
|
||||
error_count: int
|
||||
success_rate: float
|
||||
|
||||
|
||||
class ProcessorStatus(TypedDict):
|
||||
"""Type definition for processor status"""
|
||||
|
||||
state: str
|
||||
health: bool
|
||||
operations: OperationStats
|
||||
@@ -64,6 +76,7 @@ class ProcessorStatus(TypedDict):
|
||||
last_health_check: Optional[str]
|
||||
health_status: Dict[str, bool]
|
||||
|
||||
|
||||
class OperationTracker:
|
||||
"""Tracks processor operations"""
|
||||
|
||||
@@ -75,11 +88,7 @@ class OperationTracker:
|
||||
self.error_count = 0
|
||||
self.success_count = 0
|
||||
|
||||
def start_operation(
|
||||
self,
|
||||
op_type: OperationType,
|
||||
details: Dict[str, Any]
|
||||
) -> str:
|
||||
def start_operation(self, op_type: OperationType, details: Dict[str, Any]) -> str:
|
||||
"""
|
||||
Start tracking an operation.
|
||||
|
||||
@@ -97,15 +106,12 @@ class OperationTracker:
|
||||
end_time=None,
|
||||
status="running",
|
||||
details=details,
|
||||
error=None
|
||||
error=None,
|
||||
)
|
||||
return op_id
|
||||
|
||||
def end_operation(
|
||||
self,
|
||||
op_id: str,
|
||||
success: bool,
|
||||
error: Optional[str] = None
|
||||
self, op_id: str, success: bool, error: Optional[str] = None
|
||||
) -> None:
|
||||
"""
|
||||
End tracking an operation.
|
||||
@@ -116,11 +122,13 @@ class OperationTracker:
|
||||
error: Optional error message
|
||||
"""
|
||||
if op_id in self.operations:
|
||||
self.operations[op_id].update({
|
||||
"end_time": datetime.utcnow(),
|
||||
"status": "success" if success else "error",
|
||||
"error": error
|
||||
})
|
||||
self.operations[op_id].update(
|
||||
{
|
||||
"end_time": datetime.utcnow(),
|
||||
"status": "success" if success else "error",
|
||||
"error": error,
|
||||
}
|
||||
)
|
||||
# Move to history
|
||||
self.operation_history.append(self.operations.pop(op_id))
|
||||
# Update counts
|
||||
@@ -131,7 +139,7 @@ class OperationTracker:
|
||||
|
||||
# Cleanup old history if needed
|
||||
if len(self.operation_history) > self.MAX_HISTORY:
|
||||
self.operation_history = self.operation_history[-self.MAX_HISTORY:]
|
||||
self.operation_history = self.operation_history[-self.MAX_HISTORY :]
|
||||
|
||||
def get_active_operations(self) -> Dict[str, OperationDetails]:
|
||||
"""
|
||||
@@ -155,9 +163,10 @@ class OperationTracker:
|
||||
active_operations=len(self.operations),
|
||||
success_count=self.success_count,
|
||||
error_count=self.error_count,
|
||||
success_rate=self.success_count / total if total > 0 else 0.0
|
||||
success_rate=self.success_count / total if total > 0 else 0.0,
|
||||
)
|
||||
|
||||
|
||||
class HealthMonitor:
|
||||
"""Monitors processor health"""
|
||||
|
||||
@@ -165,7 +174,7 @@ class HealthMonitor:
|
||||
ERROR_CHECK_INTERVAL: ClassVar[int] = 30 # Seconds between checks after error
|
||||
SUCCESS_RATE_THRESHOLD: ClassVar[float] = 0.9 # 90% success rate threshold
|
||||
|
||||
def __init__(self, processor: 'VideoProcessor') -> None:
|
||||
def __init__(self, processor: "VideoProcessor") -> None:
|
||||
self.processor = processor
|
||||
self.last_check: Optional[datetime] = None
|
||||
self.health_status: Dict[str, bool] = {}
|
||||
@@ -193,11 +202,13 @@ class HealthMonitor:
|
||||
self.last_check = datetime.utcnow()
|
||||
|
||||
# Check component health
|
||||
self.health_status.update({
|
||||
"queue_handler": self.processor.queue_handler.is_healthy(),
|
||||
"message_handler": self.processor.message_handler.is_healthy(),
|
||||
"progress_tracker": progress_tracker.is_healthy()
|
||||
})
|
||||
self.health_status.update(
|
||||
{
|
||||
"queue_handler": self.processor.queue_handler.is_healthy(),
|
||||
"message_handler": self.processor.message_handler.is_healthy(),
|
||||
"progress_tracker": progress_tracker.is_healthy(),
|
||||
}
|
||||
)
|
||||
|
||||
# Check operation health
|
||||
op_stats = self.processor.operation_tracker.get_operation_stats()
|
||||
@@ -220,6 +231,7 @@ class HealthMonitor:
|
||||
"""
|
||||
return all(self.health_status.values())
|
||||
|
||||
|
||||
class VideoProcessor:
|
||||
"""Handles video processing operations"""
|
||||
|
||||
@@ -230,7 +242,7 @@ class VideoProcessor:
|
||||
components: Dict[int, Dict[str, Any]],
|
||||
queue_manager: Optional[EnhancedVideoQueueManager] = None,
|
||||
ffmpeg_mgr: Optional[FFmpegManager] = None,
|
||||
db: Optional[VideoArchiveDB] = None
|
||||
db: Optional[VideoArchiveDB] = None,
|
||||
) -> None:
|
||||
self.bot = bot
|
||||
self.config = config_manager
|
||||
@@ -249,9 +261,7 @@ class VideoProcessor:
|
||||
self.queue_handler = QueueHandler(bot, config_manager, components)
|
||||
self.message_handler = MessageHandler(bot, config_manager, queue_manager)
|
||||
self.cleanup_manager = CleanupManager(
|
||||
self.queue_handler,
|
||||
ffmpeg_mgr,
|
||||
CleanupStrategy.NORMAL
|
||||
self.queue_handler, ffmpeg_mgr, CleanupStrategy.NORMAL
|
||||
)
|
||||
|
||||
# Pass db to queue handler if it exists
|
||||
@@ -299,8 +309,7 @@ class VideoProcessor:
|
||||
ProcessorError: If processing fails
|
||||
"""
|
||||
op_id = self.operation_tracker.start_operation(
|
||||
OperationType.VIDEO_PROCESSING,
|
||||
{"item": str(item)}
|
||||
OperationType.VIDEO_PROCESSING, {"item": str(item)}
|
||||
)
|
||||
|
||||
try:
|
||||
@@ -329,8 +338,7 @@ class VideoProcessor:
|
||||
ProcessorError: If processing fails
|
||||
"""
|
||||
op_id = self.operation_tracker.start_operation(
|
||||
OperationType.MESSAGE_PROCESSING,
|
||||
{"message_id": message.id}
|
||||
OperationType.MESSAGE_PROCESSING, {"message_id": message.id}
|
||||
)
|
||||
|
||||
try:
|
||||
@@ -350,8 +358,7 @@ class VideoProcessor:
|
||||
ProcessorError: If cleanup fails
|
||||
"""
|
||||
op_id = self.operation_tracker.start_operation(
|
||||
OperationType.CLEANUP,
|
||||
{"type": "normal"}
|
||||
OperationType.CLEANUP, {"type": "normal"}
|
||||
)
|
||||
|
||||
try:
|
||||
@@ -373,8 +380,7 @@ class VideoProcessor:
|
||||
ProcessorError: If force cleanup fails
|
||||
"""
|
||||
op_id = self.operation_tracker.start_operation(
|
||||
OperationType.CLEANUP,
|
||||
{"type": "force"}
|
||||
OperationType.CLEANUP, {"type": "force"}
|
||||
)
|
||||
|
||||
try:
|
||||
@@ -408,8 +414,7 @@ class VideoProcessor:
|
||||
|
||||
# Create and send status embed
|
||||
embed = await StatusDisplay.create_queue_status_embed(
|
||||
queue_status,
|
||||
active_ops
|
||||
queue_status, active_ops
|
||||
)
|
||||
await ctx.send(embed=embed)
|
||||
|
||||
@@ -445,5 +450,5 @@ class VideoProcessor:
|
||||
if self.health_monitor.last_check
|
||||
else None
|
||||
),
|
||||
health_status=self.health_monitor.health_status
|
||||
health_status=self.health_monitor.health_status,
|
||||
)
|
||||
|
||||
@@ -1,25 +1,29 @@
|
||||
"""Message processing and URL extraction for VideoProcessor"""
|
||||
|
||||
import logging
|
||||
import asyncio
|
||||
from enum import Enum, auto
|
||||
from typing import Optional, Dict, Any, List, Tuple, Set, TypedDict, ClassVar
|
||||
import logging
|
||||
from datetime import datetime, timedelta
|
||||
import discord
|
||||
from discord.ext import commands
|
||||
from enum import auto, Enum
|
||||
from typing import Any, ClassVar, Dict, List, Optional, Set, Tuple, TypedDict
|
||||
|
||||
from .processor.url_extractor import URLExtractor, URLMetadata
|
||||
from .processor.message_validator import MessageValidator, ValidationError
|
||||
from .processor.queue_processor import QueueProcessor, QueuePriority
|
||||
from .processor.constants import REACTIONS
|
||||
from .queue.manager import EnhancedVideoQueueManager
|
||||
from .config_manager import ConfigManager
|
||||
from .utils.exceptions import MessageHandlerError
|
||||
import discord # type: ignore
|
||||
from discord.ext import commands # type: ignore
|
||||
|
||||
from ..config_manager import ConfigManager
|
||||
from ..processor.constants import REACTIONS
|
||||
from ..processor.message_validator import MessageValidator, ValidationError
|
||||
from ..processor.queue_processor import QueuePriority, QueueProcessor
|
||||
|
||||
from ..processor.url_extractor import URLExtractor, URLMetadata
|
||||
from ..queue.manager import EnhancedVideoQueueManager
|
||||
from ..utils.exceptions import MessageHandlerError
|
||||
|
||||
logger = logging.getLogger("VideoArchiver")
|
||||
|
||||
|
||||
class MessageState(Enum):
|
||||
"""Possible states of message processing"""
|
||||
|
||||
RECEIVED = auto()
|
||||
VALIDATING = auto()
|
||||
EXTRACTING = auto()
|
||||
@@ -28,21 +32,27 @@ class MessageState(Enum):
|
||||
FAILED = auto()
|
||||
IGNORED = auto()
|
||||
|
||||
|
||||
class ProcessingStage(Enum):
|
||||
"""Message processing stages"""
|
||||
|
||||
VALIDATION = auto()
|
||||
EXTRACTION = auto()
|
||||
QUEUEING = auto()
|
||||
COMPLETION = auto()
|
||||
|
||||
|
||||
class MessageCacheEntry(TypedDict):
|
||||
"""Type definition for message cache entry"""
|
||||
|
||||
valid: bool
|
||||
reason: Optional[str]
|
||||
timestamp: str
|
||||
|
||||
|
||||
class MessageStatus(TypedDict):
|
||||
"""Type definition for message status"""
|
||||
|
||||
state: Optional[MessageState]
|
||||
stage: Optional[ProcessingStage]
|
||||
error: Optional[str]
|
||||
@@ -50,6 +60,7 @@ class MessageStatus(TypedDict):
|
||||
end_time: Optional[datetime]
|
||||
duration: Optional[float]
|
||||
|
||||
|
||||
class MessageCache:
|
||||
"""Caches message validation results"""
|
||||
|
||||
@@ -94,6 +105,7 @@ class MessageCache:
|
||||
del self._cache[oldest]
|
||||
del self._access_times[oldest]
|
||||
|
||||
|
||||
class ProcessingTracker:
|
||||
"""Tracks message processing state and progress"""
|
||||
|
||||
@@ -121,7 +133,7 @@ class ProcessingTracker:
|
||||
message_id: int,
|
||||
state: MessageState,
|
||||
stage: Optional[ProcessingStage] = None,
|
||||
error: Optional[str] = None
|
||||
error: Optional[str] = None,
|
||||
) -> None:
|
||||
"""
|
||||
Update message state.
|
||||
@@ -163,7 +175,7 @@ class ProcessingTracker:
|
||||
(end_time - start_time).total_seconds()
|
||||
if end_time and start_time
|
||||
else None
|
||||
)
|
||||
),
|
||||
)
|
||||
|
||||
def is_message_stuck(self, message_id: int) -> bool:
|
||||
@@ -183,9 +195,12 @@ class ProcessingTracker:
|
||||
if state in (MessageState.COMPLETED, MessageState.FAILED, MessageState.IGNORED):
|
||||
return False
|
||||
|
||||
processing_time = (datetime.utcnow() - self.start_times[message_id]).total_seconds()
|
||||
processing_time = (
|
||||
datetime.utcnow() - self.start_times[message_id]
|
||||
).total_seconds()
|
||||
return processing_time > self.MAX_PROCESSING_TIME
|
||||
|
||||
|
||||
class MessageHandler:
|
||||
"""Handles processing of messages for video content"""
|
||||
|
||||
@@ -193,7 +208,7 @@ class MessageHandler:
|
||||
self,
|
||||
bot: discord.Client,
|
||||
config_manager: ConfigManager,
|
||||
queue_manager: EnhancedVideoQueueManager
|
||||
queue_manager: EnhancedVideoQueueManager,
|
||||
) -> None:
|
||||
self.bot = bot
|
||||
self.config_manager = config_manager
|
||||
@@ -224,11 +239,7 @@ class MessageHandler:
|
||||
await self._process_message_internal(message)
|
||||
except Exception as e:
|
||||
logger.error(f"Error processing message: {str(e)}", exc_info=True)
|
||||
self.tracker.update_state(
|
||||
message.id,
|
||||
MessageState.FAILED,
|
||||
error=str(e)
|
||||
)
|
||||
self.tracker.update_state(message.id, MessageState.FAILED, error=str(e))
|
||||
try:
|
||||
await message.add_reaction(REACTIONS["error"])
|
||||
except Exception as react_error:
|
||||
@@ -260,43 +271,38 @@ class MessageHandler:
|
||||
else:
|
||||
# Validate message
|
||||
self.tracker.update_state(
|
||||
message.id,
|
||||
MessageState.VALIDATING,
|
||||
ProcessingStage.VALIDATION
|
||||
message.id, MessageState.VALIDATING, ProcessingStage.VALIDATION
|
||||
)
|
||||
try:
|
||||
is_valid, reason = await self.message_validator.validate_message(
|
||||
message,
|
||||
settings
|
||||
message, settings
|
||||
)
|
||||
# Cache result
|
||||
self.validation_cache.add(message.id, MessageCacheEntry(
|
||||
valid=is_valid,
|
||||
reason=reason,
|
||||
timestamp=datetime.utcnow().isoformat()
|
||||
))
|
||||
self.validation_cache.add(
|
||||
message.id,
|
||||
MessageCacheEntry(
|
||||
valid=is_valid,
|
||||
reason=reason,
|
||||
timestamp=datetime.utcnow().isoformat(),
|
||||
),
|
||||
)
|
||||
except ValidationError as e:
|
||||
raise MessageHandlerError(f"Validation failed: {str(e)}")
|
||||
|
||||
if not is_valid:
|
||||
logger.debug(f"Message validation failed: {reason}")
|
||||
self.tracker.update_state(
|
||||
message.id,
|
||||
MessageState.IGNORED,
|
||||
error=reason
|
||||
message.id, MessageState.IGNORED, error=reason
|
||||
)
|
||||
return
|
||||
|
||||
# Extract URLs
|
||||
self.tracker.update_state(
|
||||
message.id,
|
||||
MessageState.EXTRACTING,
|
||||
ProcessingStage.EXTRACTION
|
||||
message.id, MessageState.EXTRACTING, ProcessingStage.EXTRACTION
|
||||
)
|
||||
try:
|
||||
urls: List[URLMetadata] = await self.url_extractor.extract_urls(
|
||||
message,
|
||||
enabled_sites=settings.get("enabled_sites")
|
||||
message, enabled_sites=settings.get("enabled_sites")
|
||||
)
|
||||
if not urls:
|
||||
logger.debug("No valid URLs found in message")
|
||||
@@ -307,24 +313,18 @@ class MessageHandler:
|
||||
|
||||
# Process URLs
|
||||
self.tracker.update_state(
|
||||
message.id,
|
||||
MessageState.PROCESSING,
|
||||
ProcessingStage.QUEUEING
|
||||
message.id, MessageState.PROCESSING, ProcessingStage.QUEUEING
|
||||
)
|
||||
try:
|
||||
await self.queue_processor.process_urls(
|
||||
message,
|
||||
urls,
|
||||
priority=QueuePriority.NORMAL
|
||||
message, urls, priority=QueuePriority.NORMAL
|
||||
)
|
||||
except Exception as e:
|
||||
raise MessageHandlerError(f"Queue processing failed: {str(e)}")
|
||||
|
||||
# Mark completion
|
||||
self.tracker.update_state(
|
||||
message.id,
|
||||
MessageState.COMPLETED,
|
||||
ProcessingStage.COMPLETION
|
||||
message.id, MessageState.COMPLETED, ProcessingStage.COMPLETION
|
||||
)
|
||||
|
||||
except MessageHandlerError:
|
||||
@@ -333,10 +333,7 @@ class MessageHandler:
|
||||
raise MessageHandlerError(f"Unexpected error: {str(e)}")
|
||||
|
||||
async def format_archive_message(
|
||||
self,
|
||||
author: Optional[discord.Member],
|
||||
channel: discord.TextChannel,
|
||||
url: str
|
||||
self, author: Optional[discord.Member], channel: discord.TextChannel, url: str
|
||||
) -> str:
|
||||
"""
|
||||
Format message for archive channel.
|
||||
@@ -349,11 +346,7 @@ class MessageHandler:
|
||||
Returns:
|
||||
Formatted message string
|
||||
"""
|
||||
return await self.queue_processor.format_archive_message(
|
||||
author,
|
||||
channel,
|
||||
url
|
||||
)
|
||||
return await self.queue_processor.format_archive_message(author, channel, url)
|
||||
|
||||
def get_message_status(self, message_id: int) -> MessageStatus:
|
||||
"""
|
||||
@@ -378,7 +371,9 @@ class MessageHandler:
|
||||
# Check for any stuck messages
|
||||
for message_id in self.tracker.states:
|
||||
if self.tracker.is_message_stuck(message_id):
|
||||
logger.warning(f"Message {message_id} appears to be stuck in processing")
|
||||
logger.warning(
|
||||
f"Message {message_id} appears to be stuck in processing"
|
||||
)
|
||||
return False
|
||||
return True
|
||||
except Exception as e:
|
||||
|
||||
@@ -5,9 +5,9 @@ from enum import Enum, auto
|
||||
from dataclasses import dataclass, field
|
||||
from typing import Dict, Optional, Tuple, List, Any, Callable, Set, TypedDict, ClassVar
|
||||
from datetime import datetime
|
||||
import discord
|
||||
import discord # type: ignore
|
||||
|
||||
from .utils.exceptions import ValidationError
|
||||
from ..utils.exceptions import ValidationError
|
||||
|
||||
logger = logging.getLogger("VideoArchiver")
|
||||
|
||||
|
||||
@@ -6,29 +6,33 @@ import os
|
||||
from enum import Enum, auto
|
||||
from typing import Optional, Dict, Any, List, Tuple, Set, TypedDict, ClassVar, Callable
|
||||
from datetime import datetime
|
||||
import discord
|
||||
import discord # type: ignore
|
||||
|
||||
from .utils import progress_tracker
|
||||
from .database.video_archive_db import VideoArchiveDB
|
||||
from .utils.download_manager import DownloadManager
|
||||
from .utils.message_manager import MessageManager
|
||||
from .utils.exceptions import QueueHandlerError
|
||||
from .queue.models import QueueItem
|
||||
from .config_manager import ConfigManager
|
||||
from .processor.constants import REACTIONS
|
||||
from ..utils import progress_tracker
|
||||
from ..database.video_archive_db import VideoArchiveDB
|
||||
from ..utils.download_manager import DownloadManager
|
||||
from ..utils.message_manager import MessageManager
|
||||
from ..utils.exceptions import QueueHandlerError
|
||||
from ..queue.models import QueueItem
|
||||
from ..config_manager import ConfigManager
|
||||
from ..processor.constants import REACTIONS
|
||||
|
||||
logger = logging.getLogger("VideoArchiver")
|
||||
|
||||
|
||||
class QueueItemStatus(Enum):
|
||||
"""Status of a queue item"""
|
||||
|
||||
PENDING = auto()
|
||||
PROCESSING = auto()
|
||||
COMPLETED = auto()
|
||||
FAILED = auto()
|
||||
CANCELLED = auto()
|
||||
|
||||
|
||||
class QueueStats(TypedDict):
|
||||
"""Type definition for queue statistics"""
|
||||
|
||||
active_downloads: int
|
||||
processing_items: int
|
||||
completed_items: int
|
||||
@@ -37,6 +41,7 @@ class QueueStats(TypedDict):
|
||||
last_processed: Optional[str]
|
||||
is_healthy: bool
|
||||
|
||||
|
||||
class QueueHandler:
|
||||
"""Handles queue processing and video operations"""
|
||||
|
||||
@@ -48,7 +53,7 @@ class QueueHandler:
|
||||
bot: discord.Client,
|
||||
config_manager: ConfigManager,
|
||||
components: Dict[int, Dict[str, Any]],
|
||||
db: Optional[VideoArchiveDB] = None
|
||||
db: Optional[VideoArchiveDB] = None,
|
||||
) -> None:
|
||||
self.bot = bot
|
||||
self.config_manager = config_manager
|
||||
@@ -64,7 +69,7 @@ class QueueHandler:
|
||||
"failed_items": 0,
|
||||
"average_processing_time": 0.0,
|
||||
"last_processed": None,
|
||||
"is_healthy": True
|
||||
"is_healthy": True,
|
||||
}
|
||||
|
||||
async def process_video(self, item: QueueItem) -> Tuple[bool, Optional[str]]:
|
||||
|
||||
@@ -5,30 +5,35 @@ import asyncio
|
||||
from enum import Enum, auto
|
||||
from typing import List, Optional, Dict, Any, Set, Union, TypedDict, ClassVar
|
||||
from datetime import datetime
|
||||
import discord
|
||||
import discord # type: ignore
|
||||
|
||||
from .queue.models import QueueItem
|
||||
from .queue.manager import EnhancedVideoQueueManager
|
||||
from .processor.constants import REACTIONS
|
||||
from .processor.url_extractor import URLMetadata
|
||||
from .utils.exceptions import QueueProcessingError
|
||||
from ..queue.models import QueueItem
|
||||
from ..queue.manager import EnhancedVideoQueueManager
|
||||
from ..processor.constants import REACTIONS
|
||||
from ..processor.url_extractor import URLMetadata
|
||||
from ..utils.exceptions import QueueProcessingError
|
||||
|
||||
logger = logging.getLogger("VideoArchiver")
|
||||
|
||||
|
||||
class QueuePriority(Enum):
|
||||
"""Priority levels for queue processing"""
|
||||
|
||||
HIGH = auto()
|
||||
NORMAL = auto()
|
||||
LOW = auto()
|
||||
|
||||
|
||||
class QueueMetrics(TypedDict):
|
||||
"""Type definition for queue metrics"""
|
||||
|
||||
total_items: int
|
||||
processing_time: float
|
||||
success_rate: float
|
||||
error_rate: float
|
||||
average_size: float
|
||||
|
||||
|
||||
class QueueProcessor:
|
||||
"""Handles processing of video queue items"""
|
||||
|
||||
@@ -38,10 +43,10 @@ class QueueProcessor:
|
||||
def __init__(self, queue_manager: EnhancedVideoQueueManager):
|
||||
self.queue_manager = queue_manager
|
||||
self._metrics: Dict[str, Any] = {
|
||||
'processed_count': 0,
|
||||
'error_count': 0,
|
||||
'total_size': 0,
|
||||
'total_time': 0
|
||||
"processed_count": 0,
|
||||
"error_count": 0,
|
||||
"total_size": 0,
|
||||
"total_time": 0,
|
||||
}
|
||||
|
||||
async def process_item(self, item: QueueItem) -> bool:
|
||||
@@ -80,31 +85,31 @@ class QueueProcessor:
|
||||
|
||||
def _update_metrics(self, processing_time: float, success: bool, size: int) -> None:
|
||||
"""Update processing metrics"""
|
||||
self._metrics['processed_count'] += 1
|
||||
self._metrics['total_time'] += processing_time
|
||||
self._metrics["processed_count"] += 1
|
||||
self._metrics["total_time"] += processing_time
|
||||
|
||||
if not success:
|
||||
self._metrics['error_count'] += 1
|
||||
self._metrics["error_count"] += 1
|
||||
|
||||
if size > 0:
|
||||
self._metrics['total_size'] += size
|
||||
self._metrics["total_size"] += size
|
||||
|
||||
def get_metrics(self) -> QueueMetrics:
|
||||
"""Get current processing metrics"""
|
||||
total = self._metrics['processed_count']
|
||||
total = self._metrics["processed_count"]
|
||||
if total == 0:
|
||||
return QueueMetrics(
|
||||
total_items=0,
|
||||
processing_time=0,
|
||||
success_rate=0,
|
||||
error_rate=0,
|
||||
average_size=0
|
||||
average_size=0,
|
||||
)
|
||||
|
||||
return QueueMetrics(
|
||||
total_items=total,
|
||||
processing_time=self._metrics['total_time'],
|
||||
success_rate=(total - self._metrics['error_count']) / total,
|
||||
error_rate=self._metrics['error_count'] / total,
|
||||
average_size=self._metrics['total_size'] / total
|
||||
processing_time=self._metrics["total_time"],
|
||||
success_rate=(total - self._metrics["error_count"]) / total,
|
||||
error_rate=self._metrics["error_count"] / total,
|
||||
average_size=self._metrics["total_size"] / total,
|
||||
)
|
||||
|
||||
@@ -4,18 +4,22 @@ import logging
|
||||
import asyncio
|
||||
import re
|
||||
from typing import List, Optional
|
||||
import discord
|
||||
import discord # type: ignore
|
||||
from urllib.parse import urlparse
|
||||
|
||||
from .processor.constants import REACTIONS, ReactionType, get_reaction, get_progress_emoji
|
||||
from .database.video_archive_db import VideoArchiveDB
|
||||
from ..processor.constants import (
|
||||
REACTIONS,
|
||||
ReactionType,
|
||||
get_reaction,
|
||||
get_progress_emoji,
|
||||
)
|
||||
from ..database.video_archive_db import VideoArchiveDB
|
||||
|
||||
logger = logging.getLogger("VideoArchiver")
|
||||
|
||||
|
||||
async def handle_archived_reaction(
|
||||
message: discord.Message,
|
||||
user: discord.User,
|
||||
db: VideoArchiveDB
|
||||
message: discord.Message, user: discord.User, db: VideoArchiveDB
|
||||
) -> None:
|
||||
"""
|
||||
Handle reaction to archived video message.
|
||||
@@ -27,7 +31,9 @@ async def handle_archived_reaction(
|
||||
"""
|
||||
try:
|
||||
# Check if the reaction is from a user (not the bot) and is the archived reaction
|
||||
if user.bot or str(message.reactions[0].emoji) != get_reaction(ReactionType.ARCHIVED):
|
||||
if user.bot or str(message.reactions[0].emoji) != get_reaction(
|
||||
ReactionType.ARCHIVED
|
||||
):
|
||||
return
|
||||
|
||||
# Extract URLs from the message using regex
|
||||
@@ -37,8 +43,8 @@ async def handle_archived_reaction(
|
||||
# Check each URL in the database
|
||||
for url in urls:
|
||||
# Ensure URL has proper scheme
|
||||
if url.startswith('www.'):
|
||||
url = 'http://' + url
|
||||
if url.startswith("www."):
|
||||
url = "http://" + url
|
||||
|
||||
# Validate URL
|
||||
try:
|
||||
@@ -59,10 +65,9 @@ async def handle_archived_reaction(
|
||||
except Exception as e:
|
||||
logger.error(f"Error handling archived reaction: {e}", exc_info=True)
|
||||
|
||||
|
||||
async def update_queue_position_reaction(
|
||||
message: discord.Message,
|
||||
position: int,
|
||||
bot_user: discord.ClientUser
|
||||
message: discord.Message, position: int, bot_user: discord.ClientUser
|
||||
) -> None:
|
||||
"""
|
||||
Update queue position reaction.
|
||||
@@ -100,10 +105,9 @@ async def update_queue_position_reaction(
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to update queue position reaction: {e}", exc_info=True)
|
||||
|
||||
|
||||
async def update_progress_reaction(
|
||||
message: discord.Message,
|
||||
progress: float,
|
||||
bot_user: discord.ClientUser
|
||||
message: discord.Message, progress: float, bot_user: discord.ClientUser
|
||||
) -> None:
|
||||
"""
|
||||
Update progress reaction based on FFmpeg progress.
|
||||
@@ -142,10 +146,9 @@ async def update_progress_reaction(
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to update progress reaction: {e}", exc_info=True)
|
||||
|
||||
|
||||
async def update_download_progress_reaction(
|
||||
message: discord.Message,
|
||||
progress: float,
|
||||
bot_user: discord.ClientUser
|
||||
message: discord.Message, progress: float, bot_user: discord.ClientUser
|
||||
) -> None:
|
||||
"""
|
||||
Update download progress reaction.
|
||||
|
||||
@@ -4,40 +4,59 @@ import logging
|
||||
from enum import Enum, auto
|
||||
from dataclasses import dataclass, field
|
||||
from datetime import datetime
|
||||
from typing import Dict, Any, List, Optional, Callable, TypeVar, Union, TypedDict, ClassVar, Tuple
|
||||
import discord
|
||||
from typing import (
|
||||
Dict,
|
||||
Any,
|
||||
List,
|
||||
Optional,
|
||||
Callable,
|
||||
TypeVar,
|
||||
Union,
|
||||
TypedDict,
|
||||
ClassVar,
|
||||
Tuple,
|
||||
)
|
||||
import discord # type: ignore
|
||||
|
||||
from .utils.exceptions import DisplayError
|
||||
from ..utils.exceptions import DisplayError
|
||||
|
||||
logger = logging.getLogger("VideoArchiver")
|
||||
|
||||
T = TypeVar('T')
|
||||
T = TypeVar("T")
|
||||
|
||||
|
||||
class DisplayTheme(TypedDict):
|
||||
"""Type definition for display theme"""
|
||||
|
||||
title_color: discord.Color
|
||||
success_color: discord.Color
|
||||
warning_color: discord.Color
|
||||
error_color: discord.Color
|
||||
info_color: discord.Color
|
||||
|
||||
|
||||
class DisplaySection(Enum):
|
||||
"""Available display sections"""
|
||||
|
||||
QUEUE_STATS = auto()
|
||||
DOWNLOADS = auto()
|
||||
COMPRESSIONS = auto()
|
||||
ERRORS = auto()
|
||||
HARDWARE = auto()
|
||||
|
||||
|
||||
class DisplayCondition(Enum):
|
||||
"""Display conditions for sections"""
|
||||
|
||||
HAS_ERRORS = "has_errors"
|
||||
HAS_DOWNLOADS = "has_downloads"
|
||||
HAS_COMPRESSIONS = "has_compressions"
|
||||
|
||||
|
||||
@dataclass
|
||||
class DisplayTemplate:
|
||||
"""Template for status display sections"""
|
||||
|
||||
name: str
|
||||
format_string: str
|
||||
inline: bool = False
|
||||
@@ -46,14 +65,15 @@ class DisplayTemplate:
|
||||
formatter: Optional[Callable[[Dict[str, Any]], str]] = None
|
||||
max_items: int = field(default=5) # Maximum items to display in lists
|
||||
|
||||
|
||||
class StatusFormatter:
|
||||
"""Formats status information for display"""
|
||||
|
||||
BYTE_UNITS: ClassVar[List[str]] = ['B', 'KB', 'MB', 'GB', 'TB']
|
||||
BYTE_UNITS: ClassVar[List[str]] = ["B", "KB", "MB", "GB", "TB"]
|
||||
TIME_THRESHOLDS: ClassVar[List[Tuple[float, str]]] = [
|
||||
(60, 's'),
|
||||
(3600, 'm'),
|
||||
(float('inf'), 'h')
|
||||
(60, "s"),
|
||||
(3600, "m"),
|
||||
(float("inf"), "h"),
|
||||
]
|
||||
|
||||
@staticmethod
|
||||
@@ -140,6 +160,7 @@ class StatusFormatter:
|
||||
raise ValueError("max_length must be at least 4")
|
||||
return f"{url[:max_length]}..." if len(url) > max_length else url
|
||||
|
||||
|
||||
class DisplayManager:
|
||||
"""Manages status display configuration"""
|
||||
|
||||
@@ -148,7 +169,7 @@ class DisplayManager:
|
||||
success_color=discord.Color.green(),
|
||||
warning_color=discord.Color.gold(),
|
||||
error_color=discord.Color.red(),
|
||||
info_color=discord.Color.blurple()
|
||||
info_color=discord.Color.blurple(),
|
||||
)
|
||||
|
||||
def __init__(self) -> None:
|
||||
@@ -165,7 +186,7 @@ class DisplayManager:
|
||||
"Avg Processing Time: {avg_processing_time}\n"
|
||||
"```"
|
||||
),
|
||||
order=1
|
||||
order=1,
|
||||
),
|
||||
DisplaySection.DOWNLOADS: DisplayTemplate(
|
||||
name="Active Downloads",
|
||||
@@ -181,7 +202,7 @@ class DisplayManager:
|
||||
"```"
|
||||
),
|
||||
order=2,
|
||||
condition=DisplayCondition.HAS_DOWNLOADS
|
||||
condition=DisplayCondition.HAS_DOWNLOADS,
|
||||
),
|
||||
DisplaySection.COMPRESSIONS: DisplayTemplate(
|
||||
name="Active Compressions",
|
||||
@@ -198,13 +219,13 @@ class DisplayManager:
|
||||
"```"
|
||||
),
|
||||
order=3,
|
||||
condition=DisplayCondition.HAS_COMPRESSIONS
|
||||
condition=DisplayCondition.HAS_COMPRESSIONS,
|
||||
),
|
||||
DisplaySection.ERRORS: DisplayTemplate(
|
||||
name="Error Statistics",
|
||||
format_string="```\n{error_stats}```",
|
||||
condition=DisplayCondition.HAS_ERRORS,
|
||||
order=4
|
||||
order=4,
|
||||
),
|
||||
DisplaySection.HARDWARE: DisplayTemplate(
|
||||
name="Hardware Statistics",
|
||||
@@ -215,11 +236,12 @@ class DisplayManager:
|
||||
"Peak Memory Usage: {memory_usage}\n"
|
||||
"```"
|
||||
),
|
||||
order=5
|
||||
)
|
||||
order=5,
|
||||
),
|
||||
}
|
||||
self.theme = self.DEFAULT_THEME.copy()
|
||||
|
||||
|
||||
class StatusDisplay:
|
||||
"""Handles formatting and display of queue status information"""
|
||||
|
||||
@@ -229,9 +251,7 @@ class StatusDisplay:
|
||||
|
||||
@classmethod
|
||||
async def create_queue_status_embed(
|
||||
cls,
|
||||
queue_status: Dict[str, Any],
|
||||
active_ops: Dict[str, Any]
|
||||
cls, queue_status: Dict[str, Any], active_ops: Dict[str, Any]
|
||||
) -> discord.Embed:
|
||||
"""
|
||||
Create an embed displaying queue status and active operations.
|
||||
@@ -251,13 +271,12 @@ class StatusDisplay:
|
||||
embed = discord.Embed(
|
||||
title="Queue Status Details",
|
||||
color=display.display_manager.theme["title_color"],
|
||||
timestamp=datetime.utcnow()
|
||||
timestamp=datetime.utcnow(),
|
||||
)
|
||||
|
||||
# Add sections in order
|
||||
sections = sorted(
|
||||
display.display_manager.templates.items(),
|
||||
key=lambda x: x[1].order
|
||||
display.display_manager.templates.items(), key=lambda x: x[1].order
|
||||
)
|
||||
|
||||
for section, template in sections:
|
||||
@@ -265,9 +284,7 @@ class StatusDisplay:
|
||||
# Check condition if exists
|
||||
if template.condition:
|
||||
if not display._check_condition(
|
||||
template.condition,
|
||||
queue_status,
|
||||
active_ops
|
||||
template.condition, queue_status, active_ops
|
||||
):
|
||||
continue
|
||||
|
||||
@@ -275,9 +292,13 @@ class StatusDisplay:
|
||||
if section == DisplaySection.QUEUE_STATS:
|
||||
display._add_queue_statistics(embed, queue_status, template)
|
||||
elif section == DisplaySection.DOWNLOADS:
|
||||
display._add_active_downloads(embed, active_ops.get('downloads', {}), template)
|
||||
display._add_active_downloads(
|
||||
embed, active_ops.get("downloads", {}), template
|
||||
)
|
||||
elif section == DisplaySection.COMPRESSIONS:
|
||||
display._add_active_compressions(embed, active_ops.get('compressions', {}), template)
|
||||
display._add_active_compressions(
|
||||
embed, active_ops.get("compressions", {}), template
|
||||
)
|
||||
elif section == DisplaySection.ERRORS:
|
||||
display._add_error_statistics(embed, queue_status, template)
|
||||
elif section == DisplaySection.HARDWARE:
|
||||
@@ -297,7 +318,7 @@ class StatusDisplay:
|
||||
self,
|
||||
condition: DisplayCondition,
|
||||
queue_status: Dict[str, Any],
|
||||
active_ops: Dict[str, Any]
|
||||
active_ops: Dict[str, Any],
|
||||
) -> bool:
|
||||
"""Check if condition for displaying section is met"""
|
||||
try:
|
||||
@@ -316,185 +337,214 @@ class StatusDisplay:
|
||||
self,
|
||||
embed: discord.Embed,
|
||||
queue_status: Dict[str, Any],
|
||||
template: DisplayTemplate
|
||||
template: DisplayTemplate,
|
||||
) -> None:
|
||||
"""Add queue statistics to the embed"""
|
||||
try:
|
||||
metrics = queue_status.get('metrics', {})
|
||||
metrics = queue_status.get("metrics", {})
|
||||
embed.add_field(
|
||||
name=template.name,
|
||||
value=template.format_string.format(
|
||||
pending=queue_status.get('pending', 0),
|
||||
processing=queue_status.get('processing', 0),
|
||||
completed=queue_status.get('completed', 0),
|
||||
failed=queue_status.get('failed', 0),
|
||||
pending=queue_status.get("pending", 0),
|
||||
processing=queue_status.get("processing", 0),
|
||||
completed=queue_status.get("completed", 0),
|
||||
failed=queue_status.get("failed", 0),
|
||||
success_rate=self.formatter.format_percentage(
|
||||
metrics.get('success_rate', 0) * 100
|
||||
metrics.get("success_rate", 0) * 100
|
||||
),
|
||||
avg_processing_time=self.formatter.format_time(
|
||||
metrics.get('avg_processing_time', 0)
|
||||
)
|
||||
metrics.get("avg_processing_time", 0)
|
||||
),
|
||||
),
|
||||
inline=template.inline
|
||||
inline=template.inline,
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"Error adding queue statistics: {e}")
|
||||
embed.add_field(
|
||||
name=template.name,
|
||||
value="```\nError displaying queue statistics```",
|
||||
inline=template.inline
|
||||
inline=template.inline,
|
||||
)
|
||||
|
||||
def _add_active_downloads(
|
||||
self,
|
||||
embed: discord.Embed,
|
||||
downloads: Dict[str, Any],
|
||||
template: DisplayTemplate
|
||||
self, embed: discord.Embed, downloads: Dict[str, Any], template: DisplayTemplate
|
||||
) -> None:
|
||||
"""Add active downloads information to the embed"""
|
||||
try:
|
||||
if downloads:
|
||||
content = []
|
||||
for url, progress in list(downloads.items())[:template.max_items]:
|
||||
for url, progress in list(downloads.items())[: template.max_items]:
|
||||
try:
|
||||
content.append(template.format_string.format(
|
||||
url=self.formatter.truncate_url(url),
|
||||
percent=self.formatter.format_percentage(progress.get('percent', 0)),
|
||||
speed=progress.get('speed', 'N/A'),
|
||||
eta=progress.get('eta', 'N/A'),
|
||||
size=f"{self.formatter.format_bytes(progress.get('downloaded_bytes', 0))}/"
|
||||
f"{self.formatter.format_bytes(progress.get('total_bytes', 0))}",
|
||||
start_time=progress.get('start_time', 'N/A'),
|
||||
retries=progress.get('retries', 0)
|
||||
))
|
||||
content.append(
|
||||
template.format_string.format(
|
||||
url=self.formatter.truncate_url(url),
|
||||
percent=self.formatter.format_percentage(
|
||||
progress.get("percent", 0)
|
||||
),
|
||||
speed=progress.get("speed", "N/A"),
|
||||
eta=progress.get("eta", "N/A"),
|
||||
size=f"{self.formatter.format_bytes(progress.get('downloaded_bytes', 0))}/"
|
||||
f"{self.formatter.format_bytes(progress.get('total_bytes', 0))}",
|
||||
start_time=progress.get("start_time", "N/A"),
|
||||
retries=progress.get("retries", 0),
|
||||
)
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"Error formatting download {url}: {e}")
|
||||
continue
|
||||
|
||||
if len(downloads) > template.max_items:
|
||||
content.append(f"\n... and {len(downloads) - template.max_items} more")
|
||||
content.append(
|
||||
f"\n... and {len(downloads) - template.max_items} more"
|
||||
)
|
||||
|
||||
embed.add_field(
|
||||
name=template.name,
|
||||
value="".join(content) if content else "```\nNo active downloads```",
|
||||
inline=template.inline
|
||||
value=(
|
||||
"".join(content) if content else "```\nNo active downloads```"
|
||||
),
|
||||
inline=template.inline,
|
||||
)
|
||||
else:
|
||||
embed.add_field(
|
||||
name=template.name,
|
||||
value="```\nNo active downloads```",
|
||||
inline=template.inline
|
||||
inline=template.inline,
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"Error adding active downloads: {e}")
|
||||
embed.add_field(
|
||||
name=template.name,
|
||||
value="```\nError displaying downloads```",
|
||||
inline=template.inline
|
||||
inline=template.inline,
|
||||
)
|
||||
|
||||
def _add_active_compressions(
|
||||
self,
|
||||
embed: discord.Embed,
|
||||
compressions: Dict[str, Any],
|
||||
template: DisplayTemplate
|
||||
template: DisplayTemplate,
|
||||
) -> None:
|
||||
"""Add active compressions information to the embed"""
|
||||
try:
|
||||
if compressions:
|
||||
content = []
|
||||
for file_id, progress in list(compressions.items())[:template.max_items]:
|
||||
for file_id, progress in list(compressions.items())[
|
||||
: template.max_items
|
||||
]:
|
||||
try:
|
||||
content.append(template.format_string.format(
|
||||
filename=progress.get('filename', 'Unknown'),
|
||||
percent=self.formatter.format_percentage(progress.get('percent', 0)),
|
||||
elapsed_time=progress.get('elapsed_time', 'N/A'),
|
||||
input_size=self.formatter.format_bytes(progress.get('input_size', 0)),
|
||||
current_size=self.formatter.format_bytes(progress.get('current_size', 0)),
|
||||
target_size=self.formatter.format_bytes(progress.get('target_size', 0)),
|
||||
codec=progress.get('codec', 'Unknown'),
|
||||
hardware_accel=progress.get('hardware_accel', False)
|
||||
))
|
||||
content.append(
|
||||
template.format_string.format(
|
||||
filename=progress.get("filename", "Unknown"),
|
||||
percent=self.formatter.format_percentage(
|
||||
progress.get("percent", 0)
|
||||
),
|
||||
elapsed_time=progress.get("elapsed_time", "N/A"),
|
||||
input_size=self.formatter.format_bytes(
|
||||
progress.get("input_size", 0)
|
||||
),
|
||||
current_size=self.formatter.format_bytes(
|
||||
progress.get("current_size", 0)
|
||||
),
|
||||
target_size=self.formatter.format_bytes(
|
||||
progress.get("target_size", 0)
|
||||
),
|
||||
codec=progress.get("codec", "Unknown"),
|
||||
hardware_accel=progress.get("hardware_accel", False),
|
||||
)
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"Error formatting compression {file_id}: {e}")
|
||||
continue
|
||||
|
||||
if len(compressions) > template.max_items:
|
||||
content.append(f"\n... and {len(compressions) - template.max_items} more")
|
||||
content.append(
|
||||
f"\n... and {len(compressions) - template.max_items} more"
|
||||
)
|
||||
|
||||
embed.add_field(
|
||||
name=template.name,
|
||||
value="".join(content) if content else "```\nNo active compressions```",
|
||||
inline=template.inline
|
||||
value=(
|
||||
"".join(content)
|
||||
if content
|
||||
else "```\nNo active compressions```"
|
||||
),
|
||||
inline=template.inline,
|
||||
)
|
||||
else:
|
||||
embed.add_field(
|
||||
name=template.name,
|
||||
value="```\nNo active compressions```",
|
||||
inline=template.inline
|
||||
inline=template.inline,
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"Error adding active compressions: {e}")
|
||||
embed.add_field(
|
||||
name=template.name,
|
||||
value="```\nError displaying compressions```",
|
||||
inline=template.inline
|
||||
inline=template.inline,
|
||||
)
|
||||
|
||||
def _add_error_statistics(
|
||||
self,
|
||||
embed: discord.Embed,
|
||||
queue_status: Dict[str, Any],
|
||||
template: DisplayTemplate
|
||||
template: DisplayTemplate,
|
||||
) -> None:
|
||||
"""Add error statistics to the embed"""
|
||||
try:
|
||||
metrics = queue_status.get('metrics', {})
|
||||
errors_by_type = metrics.get('errors_by_type', {})
|
||||
metrics = queue_status.get("metrics", {})
|
||||
errors_by_type = metrics.get("errors_by_type", {})
|
||||
if errors_by_type:
|
||||
error_stats = "\n".join(
|
||||
f"{error_type}: {count}"
|
||||
for error_type, count in list(errors_by_type.items())[:template.max_items]
|
||||
for error_type, count in list(errors_by_type.items())[
|
||||
: template.max_items
|
||||
]
|
||||
)
|
||||
if len(errors_by_type) > template.max_items:
|
||||
error_stats += f"\n... and {len(errors_by_type) - template.max_items} more"
|
||||
error_stats += (
|
||||
f"\n... and {len(errors_by_type) - template.max_items} more"
|
||||
)
|
||||
embed.add_field(
|
||||
name=template.name,
|
||||
value=template.format_string.format(error_stats=error_stats),
|
||||
inline=template.inline
|
||||
inline=template.inline,
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"Error adding error statistics: {e}")
|
||||
embed.add_field(
|
||||
name=template.name,
|
||||
value="```\nError displaying error statistics```",
|
||||
inline=template.inline
|
||||
inline=template.inline,
|
||||
)
|
||||
|
||||
def _add_hardware_statistics(
|
||||
self,
|
||||
embed: discord.Embed,
|
||||
queue_status: Dict[str, Any],
|
||||
template: DisplayTemplate
|
||||
template: DisplayTemplate,
|
||||
) -> None:
|
||||
"""Add hardware statistics to the embed"""
|
||||
try:
|
||||
metrics = queue_status.get('metrics', {})
|
||||
metrics = queue_status.get("metrics", {})
|
||||
embed.add_field(
|
||||
name=template.name,
|
||||
value=template.format_string.format(
|
||||
hw_failures=metrics.get('hardware_accel_failures', 0),
|
||||
comp_failures=metrics.get('compression_failures', 0),
|
||||
hw_failures=metrics.get("hardware_accel_failures", 0),
|
||||
comp_failures=metrics.get("compression_failures", 0),
|
||||
memory_usage=self.formatter.format_bytes(
|
||||
metrics.get('peak_memory_usage', 0) * 1024 * 1024 # Convert MB to bytes
|
||||
)
|
||||
metrics.get("peak_memory_usage", 0)
|
||||
* 1024
|
||||
* 1024 # Convert MB to bytes
|
||||
),
|
||||
),
|
||||
inline=template.inline
|
||||
inline=template.inline,
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"Error adding hardware statistics: {e}")
|
||||
embed.add_field(
|
||||
name=template.name,
|
||||
value="```\nError displaying hardware statistics```",
|
||||
inline=template.inline
|
||||
inline=template.inline,
|
||||
)
|
||||
|
||||
@@ -6,7 +6,7 @@ from enum import Enum
|
||||
from dataclasses import dataclass, field
|
||||
from typing import List, Dict, Optional, Set, Pattern, ClassVar
|
||||
from datetime import datetime
|
||||
import discord
|
||||
import discord # type: ignore
|
||||
from urllib.parse import urlparse, parse_qs, ParseResult
|
||||
|
||||
logger = logging.getLogger("VideoArchiver")
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
"""Module for queue health checks"""
|
||||
|
||||
import logging
|
||||
import psutil
|
||||
import psutil # type: ignore
|
||||
import time
|
||||
from enum import Enum
|
||||
from dataclasses import dataclass, field
|
||||
|
||||
@@ -1,10 +1,11 @@
|
||||
"""Update checker for yt-dlp"""
|
||||
|
||||
import logging
|
||||
from importlib.metadata import version as get_package_version
|
||||
from datetime import datetime, timedelta
|
||||
import aiohttp
|
||||
from packaging import version
|
||||
import discord
|
||||
import discord # type: ignore
|
||||
from typing import Optional, Tuple, Dict, Any
|
||||
import asyncio
|
||||
import sys
|
||||
@@ -15,14 +16,15 @@ import tempfile
|
||||
import os
|
||||
import shutil
|
||||
|
||||
from .exceptions import UpdateError
|
||||
from .utils.exceptions import UpdateError
|
||||
|
||||
logger = logging.getLogger("VideoArchiver")
|
||||
|
||||
logger = logging.getLogger('VideoArchiver')
|
||||
|
||||
class UpdateChecker:
|
||||
"""Handles checking for yt-dlp updates"""
|
||||
|
||||
GITHUB_API_URL = 'https://api.github.com/repos/yt-dlp/yt-dlp/releases/latest'
|
||||
GITHUB_API_URL = "https://api.github.com/repos/yt-dlp/yt-dlp/releases/latest"
|
||||
UPDATE_CHECK_INTERVAL = 21600 # 6 hours in seconds
|
||||
MAX_RETRIES = 3
|
||||
RETRY_DELAY = 5
|
||||
@@ -44,10 +46,10 @@ class UpdateChecker:
|
||||
if self._session is None or self._session.closed:
|
||||
self._session = aiohttp.ClientSession(
|
||||
headers={
|
||||
'Accept': 'application/vnd.github.v3+json',
|
||||
'User-Agent': 'VideoArchiver-Bot'
|
||||
"Accept": "application/vnd.github.v3+json",
|
||||
"User-Agent": "VideoArchiver-Bot",
|
||||
},
|
||||
timeout=aiohttp.ClientTimeout(total=self.REQUEST_TIMEOUT)
|
||||
timeout=aiohttp.ClientTimeout(total=self.REQUEST_TIMEOUT),
|
||||
)
|
||||
|
||||
async def start(self) -> None:
|
||||
@@ -82,15 +84,21 @@ class UpdateChecker:
|
||||
try:
|
||||
for guild in self.bot.guilds:
|
||||
try:
|
||||
settings = await self.config_manager.get_guild_settings(guild.id)
|
||||
if settings.get('disable_update_check', False):
|
||||
settings = await self.config_manager.get_guild_settings(
|
||||
guild.id
|
||||
)
|
||||
if settings.get("disable_update_check", False):
|
||||
continue
|
||||
|
||||
current_time = datetime.utcnow()
|
||||
|
||||
# Check if we've checked recently
|
||||
last_check = self._last_version_check.get(guild.id)
|
||||
if last_check and (current_time - last_check).total_seconds() < self.UPDATE_CHECK_INTERVAL:
|
||||
if (
|
||||
last_check
|
||||
and (current_time - last_check).total_seconds()
|
||||
< self.UPDATE_CHECK_INTERVAL
|
||||
):
|
||||
continue
|
||||
|
||||
# Check rate limits
|
||||
@@ -105,7 +113,9 @@ class UpdateChecker:
|
||||
self._last_version_check[guild.id] = current_time
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error checking updates for guild {guild.id}: {str(e)}")
|
||||
logger.error(
|
||||
f"Error checking updates for guild {guild.id}: {str(e)}"
|
||||
)
|
||||
continue
|
||||
|
||||
except asyncio.CancelledError:
|
||||
@@ -124,7 +134,7 @@ class UpdateChecker:
|
||||
await self._log_error(
|
||||
guild,
|
||||
UpdateError("Could not determine current yt-dlp version"),
|
||||
"checking current version"
|
||||
"checking current version",
|
||||
)
|
||||
return
|
||||
|
||||
@@ -134,14 +144,14 @@ class UpdateChecker:
|
||||
|
||||
# Update last check time
|
||||
await self.config_manager.update_setting(
|
||||
guild.id,
|
||||
"last_update_check",
|
||||
datetime.utcnow().isoformat()
|
||||
guild.id, "last_update_check", datetime.utcnow().isoformat()
|
||||
)
|
||||
|
||||
# Compare versions
|
||||
if version.parse(current_version) < version.parse(latest_version):
|
||||
await self._notify_update(guild, current_version, latest_version, settings)
|
||||
await self._notify_update(
|
||||
guild, current_version, latest_version, settings
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
await self._log_error(guild, e, "checking for updates")
|
||||
@@ -149,7 +159,7 @@ class UpdateChecker:
|
||||
async def _get_current_version(self) -> Optional[str]:
|
||||
"""Get current yt-dlp version with error handling"""
|
||||
try:
|
||||
return get_package_version('yt-dlp')
|
||||
return get_package_version("yt-dlp")
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting current version: {str(e)}")
|
||||
return None
|
||||
@@ -162,27 +172,40 @@ class UpdateChecker:
|
||||
try:
|
||||
async with self._session.get(self.GITHUB_API_URL) as response:
|
||||
# Update rate limit info
|
||||
self._remaining_requests = int(response.headers.get('X-RateLimit-Remaining', 0))
|
||||
self._rate_limit_reset = int(response.headers.get('X-RateLimit-Reset', 0))
|
||||
self._remaining_requests = int(
|
||||
response.headers.get("X-RateLimit-Remaining", 0)
|
||||
)
|
||||
self._rate_limit_reset = int(
|
||||
response.headers.get("X-RateLimit-Reset", 0)
|
||||
)
|
||||
|
||||
if response.status == 200:
|
||||
data = await response.json()
|
||||
return data['tag_name'].lstrip('v')
|
||||
elif response.status == 403 and 'X-RateLimit-Remaining' in response.headers:
|
||||
return data["tag_name"].lstrip("v")
|
||||
elif (
|
||||
response.status == 403
|
||||
and "X-RateLimit-Remaining" in response.headers
|
||||
):
|
||||
logger.warning("GitHub API rate limit reached")
|
||||
return None
|
||||
elif response.status == 404:
|
||||
raise UpdateError("GitHub API endpoint not found")
|
||||
else:
|
||||
raise UpdateError(f"GitHub API returned status {response.status}")
|
||||
raise UpdateError(
|
||||
f"GitHub API returned status {response.status}"
|
||||
)
|
||||
|
||||
except asyncio.TimeoutError:
|
||||
logger.error(f"Timeout getting latest version (attempt {attempt + 1}/{self.MAX_RETRIES})")
|
||||
logger.error(
|
||||
f"Timeout getting latest version (attempt {attempt + 1}/{self.MAX_RETRIES})"
|
||||
)
|
||||
if attempt == self.MAX_RETRIES - 1:
|
||||
return None
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting latest version (attempt {attempt + 1}/{self.MAX_RETRIES}): {str(e)}")
|
||||
logger.error(
|
||||
f"Error getting latest version (attempt {attempt + 1}/{self.MAX_RETRIES}): {str(e)}"
|
||||
)
|
||||
if attempt == self.MAX_RETRIES - 1:
|
||||
return None
|
||||
|
||||
@@ -195,7 +218,7 @@ class UpdateChecker:
|
||||
guild: discord.Guild,
|
||||
current_version: str,
|
||||
latest_version: str,
|
||||
settings: dict
|
||||
settings: dict,
|
||||
) -> None:
|
||||
"""Notify about available updates with retry mechanism"""
|
||||
owner = self.bot.get_user(self.bot.owner_id)
|
||||
@@ -203,7 +226,7 @@ class UpdateChecker:
|
||||
await self._log_error(
|
||||
guild,
|
||||
UpdateError("Could not find bot owner"),
|
||||
"sending update notification"
|
||||
"sending update notification",
|
||||
)
|
||||
return
|
||||
|
||||
@@ -223,12 +246,14 @@ class UpdateChecker:
|
||||
await self._log_error(
|
||||
guild,
|
||||
UpdateError(f"Failed to send update notification: {str(e)}"),
|
||||
"sending update notification"
|
||||
"sending update notification",
|
||||
)
|
||||
else:
|
||||
await asyncio.sleep(settings.get("discord_retry_delay", 5))
|
||||
|
||||
async def _log_error(self, guild: discord.Guild, error: Exception, context: str) -> None:
|
||||
async def _log_error(
|
||||
self, guild: discord.Guild, error: Exception, context: str
|
||||
) -> None:
|
||||
"""Log an error to the guild's log channel with enhanced formatting"""
|
||||
timestamp = datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S")
|
||||
error_message = f"[{timestamp}] Error {context}: {str(error)}"
|
||||
@@ -247,32 +272,29 @@ class UpdateChecker:
|
||||
temp_dir = None
|
||||
try:
|
||||
# Create temporary directory for pip output
|
||||
temp_dir = tempfile.mkdtemp(prefix='ytdlp_update_')
|
||||
log_file = Path(temp_dir) / 'pip_log.txt'
|
||||
temp_dir = tempfile.mkdtemp(prefix="ytdlp_update_")
|
||||
log_file = Path(temp_dir) / "pip_log.txt"
|
||||
|
||||
# Prepare pip command
|
||||
cmd = [
|
||||
sys.executable,
|
||||
'-m',
|
||||
'pip',
|
||||
'install',
|
||||
'--upgrade',
|
||||
'yt-dlp',
|
||||
'--log',
|
||||
str(log_file)
|
||||
"-m",
|
||||
"pip",
|
||||
"install",
|
||||
"--upgrade",
|
||||
"yt-dlp",
|
||||
"--log",
|
||||
str(log_file),
|
||||
]
|
||||
|
||||
# Run pip in subprocess with timeout
|
||||
process = await asyncio.create_subprocess_exec(
|
||||
*cmd,
|
||||
stdout=asyncio.subprocess.PIPE,
|
||||
stderr=asyncio.subprocess.PIPE
|
||||
*cmd, stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE
|
||||
)
|
||||
|
||||
try:
|
||||
stdout, stderr = await asyncio.wait_for(
|
||||
process.communicate(),
|
||||
timeout=self.SUBPROCESS_TIMEOUT
|
||||
process.communicate(), timeout=self.SUBPROCESS_TIMEOUT
|
||||
)
|
||||
except asyncio.TimeoutError:
|
||||
process.kill()
|
||||
@@ -288,7 +310,7 @@ class UpdateChecker:
|
||||
error_details = "Unknown error"
|
||||
if log_file.exists():
|
||||
try:
|
||||
error_details = log_file.read_text(errors='ignore')
|
||||
error_details = log_file.read_text(errors="ignore")
|
||||
except Exception:
|
||||
pass
|
||||
return False, f"Failed to update: {error_details}"
|
||||
|
||||
@@ -7,11 +7,11 @@ import subprocess
|
||||
from datetime import datetime
|
||||
from typing import Dict, Optional, Callable, Set, Tuple
|
||||
|
||||
from .ffmpeg.ffmpeg_manager import FFmpegManager
|
||||
from .ffmpeg.exceptions import CompressionError
|
||||
from .utils.exceptions import VideoVerificationError
|
||||
from .utils.file_operations import FileOperations
|
||||
from .utils.progress_handler import ProgressHandler
|
||||
from ..ffmpeg.ffmpeg_manager import FFmpegManager
|
||||
from ..ffmpeg.exceptions import CompressionError
|
||||
from ..utils.exceptions import VideoVerificationError
|
||||
from ..utils.file_operations import FileOperations
|
||||
from ..utils.progress_handler import ProgressHandler
|
||||
|
||||
logger = logging.getLogger("VideoArchiver")
|
||||
|
||||
|
||||
@@ -7,14 +7,15 @@ import subprocess
|
||||
from datetime import datetime
|
||||
from typing import Dict, Any, Optional, Callable, List, Set, Tuple
|
||||
|
||||
from .processor import _compression_progress
|
||||
from .utils.compression_handler import CompressionHandler
|
||||
from .utils.progress_handler import ProgressHandler
|
||||
from .utils.file_operations import FileOperations
|
||||
from .utils.exceptions import CompressionError, VideoVerificationError
|
||||
from ..processor import _compression_progress
|
||||
from ..utils.compression_handler import CompressionHandler
|
||||
from ..utils.progress_handler import ProgressHandler
|
||||
from ..utils.file_operations import FileOperations
|
||||
from ..utils.exceptions import CompressionError, VideoVerificationError
|
||||
|
||||
logger = logging.getLogger("VideoArchiver")
|
||||
|
||||
|
||||
class CompressionManager:
|
||||
"""Manages video compression operations"""
|
||||
|
||||
|
||||
@@ -6,11 +6,12 @@ import asyncio
|
||||
from pathlib import Path
|
||||
from typing import List, Optional, Tuple
|
||||
|
||||
from .utils.exceptions import FileCleanupError
|
||||
from .utils.file_deletion import SecureFileDeleter
|
||||
from ..utils.exceptions import FileCleanupError
|
||||
from ..utils.file_deletion import SecureFileDeleter
|
||||
|
||||
logger = logging.getLogger("DirectoryManager")
|
||||
|
||||
|
||||
class DirectoryManager:
|
||||
"""Handles directory operations and cleanup"""
|
||||
|
||||
@@ -18,10 +19,7 @@ class DirectoryManager:
|
||||
self.file_deleter = SecureFileDeleter()
|
||||
|
||||
async def cleanup_directory(
|
||||
self,
|
||||
directory_path: str,
|
||||
recursive: bool = True,
|
||||
delete_empty: bool = True
|
||||
self, directory_path: str, recursive: bool = True, delete_empty: bool = True
|
||||
) -> Tuple[int, List[str]]:
|
||||
"""Clean up a directory by removing files and optionally empty subdirectories
|
||||
|
||||
@@ -45,9 +43,7 @@ class DirectoryManager:
|
||||
try:
|
||||
# Process files and directories
|
||||
deleted, errs = await self._process_directory_contents(
|
||||
directory_path,
|
||||
recursive,
|
||||
delete_empty
|
||||
directory_path, recursive, delete_empty
|
||||
)
|
||||
deleted_count += deleted
|
||||
errors.extend(errs)
|
||||
@@ -69,10 +65,7 @@ class DirectoryManager:
|
||||
raise FileCleanupError(f"Directory cleanup failed: {str(e)}")
|
||||
|
||||
async def _process_directory_contents(
|
||||
self,
|
||||
directory_path: str,
|
||||
recursive: bool,
|
||||
delete_empty: bool
|
||||
self, directory_path: str, recursive: bool, delete_empty: bool
|
||||
) -> Tuple[int, List[str]]:
|
||||
"""Process contents of a directory"""
|
||||
deleted_count = 0
|
||||
@@ -90,9 +83,7 @@ class DirectoryManager:
|
||||
elif entry.is_dir() and recursive:
|
||||
# Process subdirectory
|
||||
subdir_deleted, subdir_errors = await self.cleanup_directory(
|
||||
entry.path,
|
||||
recursive=True,
|
||||
delete_empty=delete_empty
|
||||
entry.path, recursive=True, delete_empty=delete_empty
|
||||
)
|
||||
deleted_count += subdir_deleted
|
||||
errors.extend(subdir_errors)
|
||||
|
||||
@@ -3,16 +3,16 @@
|
||||
import os
|
||||
import asyncio
|
||||
import logging
|
||||
import yt_dlp
|
||||
import yt_dlp # type: ignore
|
||||
from typing import Dict, Optional, Callable, Tuple
|
||||
from pathlib import Path
|
||||
|
||||
from .utils.url_validator import check_url_support
|
||||
from .utils.progress_handler import ProgressHandler, CancellableYTDLLogger
|
||||
from .utils.file_operations import FileOperations
|
||||
from .utils.compression_handler import CompressionHandler
|
||||
from .utils.process_manager import ProcessManager
|
||||
from .ffmpeg.ffmpeg_manager import FFmpegManager
|
||||
from ..utils.url_validator import check_url_support
|
||||
from ..utils.progress_handler import ProgressHandler, CancellableYTDLLogger
|
||||
from ..utils.file_operations import FileOperations
|
||||
from ..utils.compression_handler import CompressionHandler
|
||||
from ..utils.process_manager import ProcessManager
|
||||
from ..ffmpeg.ffmpeg_manager import FFmpegManager
|
||||
|
||||
logger = logging.getLogger("VideoArchiver")
|
||||
|
||||
|
||||
@@ -3,18 +3,19 @@
|
||||
import os
|
||||
import logging
|
||||
import asyncio
|
||||
import yt_dlp
|
||||
import yt_dlp # type: ignore
|
||||
from datetime import datetime
|
||||
from concurrent.futures import ThreadPoolExecutor
|
||||
from typing import Dict, List, Optional, Tuple, Callable, Any
|
||||
from pathlib import Path
|
||||
|
||||
from .utils.verification_manager import VideoVerificationManager
|
||||
from .utils.compression_manager import CompressionManager
|
||||
from .utils import progress_tracker
|
||||
from ..ffmpeg.verification_manager import VerificationManager
|
||||
from ..utils.compression_manager import CompressionManager
|
||||
from ..utils import progress_tracker
|
||||
|
||||
logger = logging.getLogger("DownloadManager")
|
||||
|
||||
|
||||
class CancellableYTDLLogger:
|
||||
"""Custom yt-dlp logger that can be cancelled"""
|
||||
|
||||
@@ -36,6 +37,7 @@ class CancellableYTDLLogger:
|
||||
raise Exception("Download cancelled")
|
||||
logger.error(msg)
|
||||
|
||||
|
||||
class DownloadManager:
|
||||
"""Manages video downloads and processing"""
|
||||
|
||||
@@ -53,20 +55,20 @@ class DownloadManager:
|
||||
max_file_size: int,
|
||||
enabled_sites: Optional[List[str]] = None,
|
||||
concurrent_downloads: int = 2,
|
||||
ffmpeg_mgr = None
|
||||
ffmpeg_mgr=None,
|
||||
):
|
||||
self.download_path = Path(download_path)
|
||||
self.download_path.mkdir(parents=True, exist_ok=True)
|
||||
os.chmod(str(self.download_path), 0o755)
|
||||
|
||||
# Initialize components
|
||||
self.verification_manager = VideoVerificationManager(ffmpeg_mgr)
|
||||
self.verification_manager = VerificationManager(ffmpeg_mgr)
|
||||
self.compression_manager = CompressionManager(ffmpeg_mgr, max_file_size)
|
||||
|
||||
# Create thread pool
|
||||
self.download_pool = ThreadPoolExecutor(
|
||||
max_workers=max(1, min(3, concurrent_downloads)),
|
||||
thread_name_prefix="videoarchiver_download"
|
||||
thread_name_prefix="videoarchiver_download",
|
||||
)
|
||||
|
||||
# Initialize state
|
||||
@@ -75,18 +77,11 @@ class DownloadManager:
|
||||
|
||||
# Configure yt-dlp options
|
||||
self.ydl_opts = self._configure_ydl_opts(
|
||||
video_format,
|
||||
max_quality,
|
||||
max_file_size,
|
||||
ffmpeg_mgr
|
||||
video_format, max_quality, max_file_size, ffmpeg_mgr
|
||||
)
|
||||
|
||||
def _configure_ydl_opts(
|
||||
self,
|
||||
video_format: str,
|
||||
max_quality: int,
|
||||
max_file_size: int,
|
||||
ffmpeg_mgr
|
||||
self, video_format: str, max_quality: int, max_file_size: int, ffmpeg_mgr
|
||||
) -> Dict[str, Any]:
|
||||
"""Configure yt-dlp options"""
|
||||
return {
|
||||
@@ -124,7 +119,9 @@ class DownloadManager:
|
||||
try:
|
||||
size = os.path.getsize(info["filepath"])
|
||||
if size > self.compression_manager.max_file_size:
|
||||
logger.info(f"File exceeds size limit, will compress: {info['filepath']}")
|
||||
logger.info(
|
||||
f"File exceeds size limit, will compress: {info['filepath']}"
|
||||
)
|
||||
except OSError as e:
|
||||
logger.error(f"Error checking file size: {str(e)}")
|
||||
|
||||
@@ -155,9 +152,7 @@ class DownloadManager:
|
||||
progress_tracker.clear_progress()
|
||||
|
||||
async def download_video(
|
||||
self,
|
||||
url: str,
|
||||
progress_callback: Optional[Callable[[float], None]] = None
|
||||
self, url: str, progress_callback: Optional[Callable[[float], None]] = None
|
||||
) -> Tuple[bool, str, str]:
|
||||
"""Download and process a video"""
|
||||
if self._shutting_down:
|
||||
@@ -168,17 +163,13 @@ class DownloadManager:
|
||||
try:
|
||||
# Download video
|
||||
success, file_path, error = await self._safe_download(
|
||||
url,
|
||||
progress_callback
|
||||
url, progress_callback
|
||||
)
|
||||
if not success:
|
||||
return False, "", error
|
||||
|
||||
# Verify and compress if needed
|
||||
return await self._process_downloaded_file(
|
||||
file_path,
|
||||
progress_callback
|
||||
)
|
||||
return await self._process_downloaded_file(file_path, progress_callback)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Download error: {str(e)}")
|
||||
@@ -188,18 +179,14 @@ class DownloadManager:
|
||||
progress_tracker.end_download(url)
|
||||
|
||||
async def _safe_download(
|
||||
self,
|
||||
url: str,
|
||||
progress_callback: Optional[Callable[[float], None]]
|
||||
self, url: str, progress_callback: Optional[Callable[[float], None]]
|
||||
) -> Tuple[bool, str, str]:
|
||||
"""Safely download video with retries"""
|
||||
# Implementation moved to separate method for clarity
|
||||
pass # Implementation would be similar to original but using new components
|
||||
|
||||
async def _process_downloaded_file(
|
||||
self,
|
||||
file_path: str,
|
||||
progress_callback: Optional[Callable[[float], None]]
|
||||
self, file_path: str, progress_callback: Optional[Callable[[float], None]]
|
||||
) -> Tuple[bool, str, str]:
|
||||
"""Process a downloaded file (verify and compress if needed)"""
|
||||
# Implementation moved to separate method for clarity
|
||||
|
||||
@@ -7,10 +7,11 @@ import logging
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
|
||||
from .utils.exceptions import FileCleanupError
|
||||
from ..utils.exceptions import FileCleanupError
|
||||
|
||||
logger = logging.getLogger("FileDeleter")
|
||||
|
||||
|
||||
class SecureFileDeleter:
|
||||
"""Handles secure file deletion operations"""
|
||||
|
||||
@@ -65,7 +66,9 @@ class SecureFileDeleter:
|
||||
async def _delete_large_file(self, file_path: str) -> bool:
|
||||
"""Delete a large file directly"""
|
||||
try:
|
||||
logger.debug(f"File {file_path} exceeds max size for secure deletion, performing direct removal")
|
||||
logger.debug(
|
||||
f"File {file_path} exceeds max size for secure deletion, performing direct removal"
|
||||
)
|
||||
os.remove(file_path)
|
||||
return True
|
||||
except OSError as e:
|
||||
@@ -84,11 +87,13 @@ class SecureFileDeleter:
|
||||
async def _zero_file_content(self, file_path: str, file_size: int) -> None:
|
||||
"""Zero out file content in chunks"""
|
||||
try:
|
||||
chunk_size = min(1024 * 1024, file_size) # 1MB chunks or file size if smaller
|
||||
chunk_size = min(
|
||||
1024 * 1024, file_size
|
||||
) # 1MB chunks or file size if smaller
|
||||
with open(file_path, "wb") as f:
|
||||
for offset in range(0, file_size, chunk_size):
|
||||
write_size = min(chunk_size, file_size - offset)
|
||||
f.write(b'\0' * write_size)
|
||||
f.write(b"\0" * write_size)
|
||||
await asyncio.sleep(0) # Allow other tasks to run
|
||||
f.flush()
|
||||
os.fsync(f.fileno())
|
||||
|
||||
@@ -9,11 +9,12 @@ import subprocess
|
||||
from typing import Tuple
|
||||
from pathlib import Path
|
||||
|
||||
from .utils.exceptions import VideoVerificationError
|
||||
from .utils.file_deletion import secure_delete_file
|
||||
from ..utils.exceptions import VideoVerificationError
|
||||
from ..utils.file_deletion import SecureFileDeleter
|
||||
|
||||
logger = logging.getLogger("VideoArchiver")
|
||||
|
||||
|
||||
class FileOperations:
|
||||
"""Handles safe file operations with retries"""
|
||||
|
||||
@@ -26,7 +27,7 @@ class FileOperations:
|
||||
for attempt in range(self.max_retries):
|
||||
try:
|
||||
if os.path.exists(file_path):
|
||||
await secure_delete_file(file_path)
|
||||
await SecureFileDeleter(file_path)
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.error(f"Delete attempt {attempt + 1} failed: {str(e)}")
|
||||
|
||||
@@ -4,10 +4,10 @@ import logging
|
||||
from pathlib import Path
|
||||
from typing import List, Tuple, Optional
|
||||
|
||||
from .utils.exceptions import FileCleanupError
|
||||
from .utils.file_deletion import SecureFileDeleter
|
||||
from .utils.directory_manager import DirectoryManager
|
||||
from .utils.permission_manager import PermissionManager
|
||||
from ..utils.exceptions import FileCleanupError
|
||||
from ..utils.file_deletion import SecureFileDeleter
|
||||
from ..utils.directory_manager import DirectoryManager
|
||||
from ..utils.permission_manager import PermissionManager
|
||||
|
||||
logger = logging.getLogger("VideoArchiver")
|
||||
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
"""Path management utilities"""
|
||||
|
||||
import asyncio
|
||||
import os
|
||||
import tempfile
|
||||
import shutil
|
||||
@@ -7,11 +8,11 @@ import stat
|
||||
import logging
|
||||
import contextlib
|
||||
import time
|
||||
from typing import Generator, List, Optional
|
||||
from typing import List, Optional, AsyncGenerator
|
||||
from pathlib import Path
|
||||
|
||||
from .utils.exceptions import FileCleanupError
|
||||
from .utils.permission_manager import PermissionManager
|
||||
from ..utils.exceptions import FileCleanupError
|
||||
from ..utils.permission_manager import PermissionManager
|
||||
|
||||
logger = logging.getLogger("PathManager")
|
||||
|
||||
@@ -162,7 +163,7 @@ class PathManager:
|
||||
async def temp_path_context(
|
||||
self,
|
||||
prefix: str = "videoarchiver_"
|
||||
) -> Generator[str, None, None]:
|
||||
) -> AsyncGenerator[str, None]:
|
||||
"""Async context manager for temporary path creation and cleanup
|
||||
|
||||
Args:
|
||||
|
||||
@@ -6,7 +6,7 @@ import logging
|
||||
from pathlib import Path
|
||||
from typing import Optional, Union, List
|
||||
|
||||
from .utils.exceptions import FileCleanupError
|
||||
from ..utils.exceptions import FileCleanupError
|
||||
|
||||
logger = logging.getLogger("PermissionManager")
|
||||
|
||||
|
||||
@@ -1,8 +1,7 @@
|
||||
"""Progress tracking and logging utilities for video downloads"""
|
||||
|
||||
import logging
|
||||
from datetime import datetime
|
||||
from typing import Dict, Any, Optional, Callable
|
||||
import os
|
||||
|
||||
logger = logging.getLogger("VideoArchiver")
|
||||
|
||||
@@ -13,17 +12,17 @@ class CancellableYTDLLogger:
|
||||
|
||||
def debug(self, msg):
|
||||
if self.cancelled:
|
||||
raise yt_dlp.utils.DownloadError("Download cancelled")
|
||||
raise yt_dlp.utils.DownloadError("Download cancelled") # type: ignore
|
||||
logger.debug(msg)
|
||||
|
||||
def warning(self, msg):
|
||||
if self.cancelled:
|
||||
raise yt_dlp.utils.DownloadError("Download cancelled")
|
||||
raise yt_dlp.utils.DownloadError("Download cancelled") # type: ignore
|
||||
logger.warning(msg)
|
||||
|
||||
def error(self, msg):
|
||||
if self.cancelled:
|
||||
raise yt_dlp.utils.DownloadError("Download cancelled")
|
||||
raise yt_dlp.utils.DownloadError("Download cancelled") # type: ignore
|
||||
logger.error(msg)
|
||||
|
||||
class ProgressHandler:
|
||||
@@ -123,4 +122,4 @@ class ProgressHandler:
|
||||
progress_callback(progress)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error upda
|
||||
logger.error(f"Error updating compression progress: {str(e)}")
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
import re
|
||||
import logging
|
||||
import yt_dlp
|
||||
import yt_dlp # type: ignore
|
||||
from typing import List, Optional
|
||||
|
||||
logger = logging.getLogger("VideoArchiver")
|
||||
|
||||
Reference in New Issue
Block a user