mirror of
https://github.com/pacnpal/Pac-cogs.git
synced 2025-12-20 10:51:05 -05:00
Fix URL detection and validation
- Add proper URL validation (must start with http/https or contain dot) - Remove duplicate URL check that caused double processing - Add detailed debug logging for URL detection process - Fix enabled_sites filtering logic This fixes issues with URL detection and improves validation to prevent processing non-URL text.
This commit is contained in:
@@ -211,11 +211,17 @@ class VideoProcessor:
|
|||||||
for word in message.content.split():
|
for word in message.content.split():
|
||||||
# Log each word being checked
|
# Log each word being checked
|
||||||
logger.debug(f"Checking word: {word}")
|
logger.debug(f"Checking word: {word}")
|
||||||
# If no sites are enabled, accept all URLs
|
# Basic URL validation - must start with http/https or contain a dot
|
||||||
# Otherwise, check if URL contains any enabled site
|
if word.startswith(('http://', 'https://')) or '.' in word:
|
||||||
if not enabled_sites or any(site in word.lower() for site in enabled_sites):
|
# If no sites are enabled, accept all URLs
|
||||||
logger.debug(f"Found matching URL: {word}")
|
# Otherwise, check if URL contains any enabled site
|
||||||
urls.append(word)
|
if not enabled_sites or any(site in word.lower() for site in enabled_sites):
|
||||||
|
logger.debug(f"Found matching URL: {word}")
|
||||||
|
urls.append(word)
|
||||||
|
else:
|
||||||
|
logger.debug(f"URL {word} doesn't match any enabled sites")
|
||||||
|
else:
|
||||||
|
logger.debug(f"Word {word} is not a valid URL")
|
||||||
|
|
||||||
# Add attachment URLs
|
# Add attachment URLs
|
||||||
for attachment in message.attachments:
|
for attachment in message.attachments:
|
||||||
@@ -250,7 +256,7 @@ class VideoProcessor:
|
|||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"Error processing message: {traceback.format_exc()}")
|
logger.error(f"Error processing message: {traceback.format_exc()}")
|
||||||
try:
|
try:
|
||||||
await message.add_reaction(REACTIONS['error'])
|
await message.add_reaction(REACTIONS["error"])
|
||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@@ -281,8 +287,10 @@ class VideoProcessor:
|
|||||||
return False, f"Channel {item.channel_id} not found"
|
return False, f"Channel {item.channel_id} not found"
|
||||||
original_message = await channel.fetch_message(item.message_id)
|
original_message = await channel.fetch_message(item.message_id)
|
||||||
|
|
||||||
await original_message.remove_reaction(REACTIONS['queued'], self.bot.user)
|
await original_message.remove_reaction(
|
||||||
await original_message.add_reaction(REACTIONS['processing'])
|
REACTIONS["queued"], self.bot.user
|
||||||
|
)
|
||||||
|
await original_message.add_reaction(REACTIONS["processing"])
|
||||||
logger.info(f"Started processing message {item.message_id}")
|
logger.info(f"Started processing message {item.message_id}")
|
||||||
except discord.NotFound:
|
except discord.NotFound:
|
||||||
original_message = None
|
original_message = None
|
||||||
@@ -303,23 +311,24 @@ class VideoProcessor:
|
|||||||
loop = self.bot.loop
|
loop = self.bot.loop
|
||||||
|
|
||||||
if not loop.is_running():
|
if not loop.is_running():
|
||||||
logger.warning("Event loop is not running, skipping progress update")
|
logger.warning(
|
||||||
|
"Event loop is not running, skipping progress update"
|
||||||
|
)
|
||||||
return
|
return
|
||||||
|
|
||||||
# Create a task to update the reaction
|
# Create a task to update the reaction
|
||||||
asyncio.run_coroutine_threadsafe(
|
asyncio.run_coroutine_threadsafe(
|
||||||
self.update_download_progress_reaction(original_message, progress),
|
self.update_download_progress_reaction(
|
||||||
loop
|
original_message, progress
|
||||||
|
),
|
||||||
|
loop,
|
||||||
)
|
)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"Error in progress callback: {e}")
|
logger.error(f"Error in progress callback: {e}")
|
||||||
|
|
||||||
# Create and track download task
|
# Create and track download task
|
||||||
download_task = asyncio.create_task(
|
download_task = asyncio.create_task(
|
||||||
downloader.download_video(
|
downloader.download_video(item.url, progress_callback=progress_callback)
|
||||||
item.url,
|
|
||||||
progress_callback=progress_callback
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
async with self._active_downloads_lock:
|
async with self._active_downloads_lock:
|
||||||
@@ -329,16 +338,20 @@ class VideoProcessor:
|
|||||||
success, file_path, error = await download_task
|
success, file_path, error = await download_task
|
||||||
if not success:
|
if not success:
|
||||||
if original_message:
|
if original_message:
|
||||||
await original_message.add_reaction(REACTIONS['error'])
|
await original_message.add_reaction(REACTIONS["error"])
|
||||||
logger.error(f"Download failed for message {item.message_id}: {error}")
|
logger.error(
|
||||||
|
f"Download failed for message {item.message_id}: {error}"
|
||||||
|
)
|
||||||
return False, f"Failed to download video: {error}"
|
return False, f"Failed to download video: {error}"
|
||||||
except asyncio.CancelledError:
|
except asyncio.CancelledError:
|
||||||
logger.info(f"Download cancelled for {item.url}")
|
logger.info(f"Download cancelled for {item.url}")
|
||||||
return False, "Download cancelled"
|
return False, "Download cancelled"
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
if original_message:
|
if original_message:
|
||||||
await original_message.add_reaction(REACTIONS['error'])
|
await original_message.add_reaction(REACTIONS["error"])
|
||||||
logger.error(f"Download error for message {item.message_id}: {str(e)}")
|
logger.error(
|
||||||
|
f"Download error for message {item.message_id}: {str(e)}"
|
||||||
|
)
|
||||||
return False, f"Download error: {str(e)}"
|
return False, f"Download error: {str(e)}"
|
||||||
finally:
|
finally:
|
||||||
async with self._active_downloads_lock:
|
async with self._active_downloads_lock:
|
||||||
@@ -357,9 +370,7 @@ class VideoProcessor:
|
|||||||
try:
|
try:
|
||||||
author = original_message.author if original_message else None
|
author = original_message.author if original_message else None
|
||||||
message = await message_manager.format_message(
|
message = await message_manager.format_message(
|
||||||
author=author,
|
author=author, channel=channel, url=item.url
|
||||||
channel=channel,
|
|
||||||
url=item.url
|
|
||||||
)
|
)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
return False, f"Failed to format message: {str(e)}"
|
return False, f"Failed to format message: {str(e)}"
|
||||||
@@ -370,26 +381,31 @@ class VideoProcessor:
|
|||||||
return False, "Processed file not found"
|
return False, "Processed file not found"
|
||||||
|
|
||||||
await archive_channel.send(
|
await archive_channel.send(
|
||||||
content=message,
|
content=message, file=discord.File(file_path)
|
||||||
file=discord.File(file_path)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
if original_message:
|
if original_message:
|
||||||
await original_message.remove_reaction(REACTIONS['processing'], self.bot.user)
|
await original_message.remove_reaction(
|
||||||
await original_message.add_reaction(REACTIONS['success'])
|
REACTIONS["processing"], self.bot.user
|
||||||
|
)
|
||||||
|
await original_message.add_reaction(REACTIONS["success"])
|
||||||
logger.info(f"Successfully processed message {item.message_id}")
|
logger.info(f"Successfully processed message {item.message_id}")
|
||||||
|
|
||||||
return True, None
|
return True, None
|
||||||
|
|
||||||
except discord.HTTPException as e:
|
except discord.HTTPException as e:
|
||||||
if original_message:
|
if original_message:
|
||||||
await original_message.add_reaction(REACTIONS['error'])
|
await original_message.add_reaction(REACTIONS["error"])
|
||||||
logger.error(f"Failed to upload to Discord for message {item.message_id}: {str(e)}")
|
logger.error(
|
||||||
|
f"Failed to upload to Discord for message {item.message_id}: {str(e)}"
|
||||||
|
)
|
||||||
return False, f"Failed to upload to Discord: {str(e)}"
|
return False, f"Failed to upload to Discord: {str(e)}"
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
if original_message:
|
if original_message:
|
||||||
await original_message.add_reaction(REACTIONS['error'])
|
await original_message.add_reaction(REACTIONS["error"])
|
||||||
logger.error(f"Failed to archive video for message {item.message_id}: {str(e)}")
|
logger.error(
|
||||||
|
f"Failed to archive video for message {item.message_id}: {str(e)}"
|
||||||
|
)
|
||||||
return False, f"Failed to archive video: {str(e)}"
|
return False, f"Failed to archive video: {str(e)}"
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
@@ -406,15 +422,17 @@ class VideoProcessor:
|
|||||||
async def update_queue_position_reaction(self, message, position):
|
async def update_queue_position_reaction(self, message, position):
|
||||||
"""Update queue position reaction"""
|
"""Update queue position reaction"""
|
||||||
try:
|
try:
|
||||||
for reaction in REACTIONS['numbers']:
|
for reaction in REACTIONS["numbers"]:
|
||||||
try:
|
try:
|
||||||
await message.remove_reaction(reaction, self.bot.user)
|
await message.remove_reaction(reaction, self.bot.user)
|
||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
if 0 <= position < len(REACTIONS['numbers']):
|
if 0 <= position < len(REACTIONS["numbers"]):
|
||||||
await message.add_reaction(REACTIONS['numbers'][position])
|
await message.add_reaction(REACTIONS["numbers"][position])
|
||||||
logger.info(f"Updated queue position reaction to {position + 1} for message {message.id}")
|
logger.info(
|
||||||
|
f"Updated queue position reaction to {position + 1} for message {message.id}"
|
||||||
|
)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"Failed to update queue position reaction: {e}")
|
logger.error(f"Failed to update queue position reaction: {e}")
|
||||||
|
|
||||||
@@ -428,7 +446,7 @@ class VideoProcessor:
|
|||||||
loop = asyncio.get_running_loop()
|
loop = asyncio.get_running_loop()
|
||||||
|
|
||||||
# Remove old reactions in the event loop
|
# Remove old reactions in the event loop
|
||||||
for reaction in REACTIONS['progress']:
|
for reaction in REACTIONS["progress"]:
|
||||||
try:
|
try:
|
||||||
await message.remove_reaction(reaction, self.bot.user)
|
await message.remove_reaction(reaction, self.bot.user)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
@@ -438,11 +456,11 @@ class VideoProcessor:
|
|||||||
# Add new reaction based on progress
|
# Add new reaction based on progress
|
||||||
try:
|
try:
|
||||||
if progress < 33:
|
if progress < 33:
|
||||||
await message.add_reaction(REACTIONS['progress'][0])
|
await message.add_reaction(REACTIONS["progress"][0])
|
||||||
elif progress < 66:
|
elif progress < 66:
|
||||||
await message.add_reaction(REACTIONS['progress'][1])
|
await message.add_reaction(REACTIONS["progress"][1])
|
||||||
else:
|
else:
|
||||||
await message.add_reaction(REACTIONS['progress'][2])
|
await message.add_reaction(REACTIONS["progress"][2])
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"Failed to add progress reaction: {e}")
|
logger.error(f"Failed to add progress reaction: {e}")
|
||||||
|
|
||||||
@@ -456,7 +474,7 @@ class VideoProcessor:
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
# Remove old reactions in the event loop
|
# Remove old reactions in the event loop
|
||||||
for reaction in REACTIONS['download']:
|
for reaction in REACTIONS["download"]:
|
||||||
try:
|
try:
|
||||||
await message.remove_reaction(reaction, self.bot.user)
|
await message.remove_reaction(reaction, self.bot.user)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
@@ -466,17 +484,17 @@ class VideoProcessor:
|
|||||||
# Add new reaction based on progress
|
# Add new reaction based on progress
|
||||||
try:
|
try:
|
||||||
if progress <= 20:
|
if progress <= 20:
|
||||||
await message.add_reaction(REACTIONS['download'][0])
|
await message.add_reaction(REACTIONS["download"][0])
|
||||||
elif progress <= 40:
|
elif progress <= 40:
|
||||||
await message.add_reaction(REACTIONS['download'][1])
|
await message.add_reaction(REACTIONS["download"][1])
|
||||||
elif progress <= 60:
|
elif progress <= 60:
|
||||||
await message.add_reaction(REACTIONS['download'][2])
|
await message.add_reaction(REACTIONS["download"][2])
|
||||||
elif progress <= 80:
|
elif progress <= 80:
|
||||||
await message.add_reaction(REACTIONS['download'][3])
|
await message.add_reaction(REACTIONS["download"][3])
|
||||||
elif progress < 100:
|
elif progress < 100:
|
||||||
await message.add_reaction(REACTIONS['download'][4])
|
await message.add_reaction(REACTIONS["download"][4])
|
||||||
else:
|
else:
|
||||||
await message.add_reaction(REACTIONS['download'][5])
|
await message.add_reaction(REACTIONS["download"][5])
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"Failed to add download reaction: {e}")
|
logger.error(f"Failed to add download reaction: {e}")
|
||||||
|
|
||||||
@@ -493,27 +511,27 @@ class VideoProcessor:
|
|||||||
embed = discord.Embed(
|
embed = discord.Embed(
|
||||||
title="Queue Status Details",
|
title="Queue Status Details",
|
||||||
color=discord.Color.blue(),
|
color=discord.Color.blue(),
|
||||||
timestamp=datetime.utcnow()
|
timestamp=datetime.utcnow(),
|
||||||
)
|
)
|
||||||
|
|
||||||
# Queue statistics
|
# Queue statistics
|
||||||
embed.add_field(
|
embed.add_field(
|
||||||
name="Queue Statistics",
|
name="Queue Statistics",
|
||||||
value=f"```\n"
|
value=f"```\n"
|
||||||
f"Pending: {queue_status['pending']}\n"
|
f"Pending: {queue_status['pending']}\n"
|
||||||
f"Processing: {queue_status['processing']}\n"
|
f"Processing: {queue_status['processing']}\n"
|
||||||
f"Completed: {queue_status['completed']}\n"
|
f"Completed: {queue_status['completed']}\n"
|
||||||
f"Failed: {queue_status['failed']}\n"
|
f"Failed: {queue_status['failed']}\n"
|
||||||
f"Success Rate: {queue_status['metrics']['success_rate']:.1%}\n"
|
f"Success Rate: {queue_status['metrics']['success_rate']:.1%}\n"
|
||||||
f"Avg Processing Time: {queue_status['metrics']['avg_processing_time']:.1f}s\n"
|
f"Avg Processing Time: {queue_status['metrics']['avg_processing_time']:.1f}s\n"
|
||||||
f"```",
|
f"```",
|
||||||
inline=False
|
inline=False,
|
||||||
)
|
)
|
||||||
|
|
||||||
# Active downloads
|
# Active downloads
|
||||||
active_downloads = ""
|
active_downloads = ""
|
||||||
for url, progress in _download_progress.items():
|
for url, progress in _download_progress.items():
|
||||||
if progress.get('active', False):
|
if progress.get("active", False):
|
||||||
active_downloads += (
|
active_downloads += (
|
||||||
f"URL: {url[:50]}...\n"
|
f"URL: {url[:50]}...\n"
|
||||||
f"Progress: {progress.get('percent', 0):.1f}%\n"
|
f"Progress: {progress.get('percent', 0):.1f}%\n"
|
||||||
@@ -529,19 +547,19 @@ class VideoProcessor:
|
|||||||
embed.add_field(
|
embed.add_field(
|
||||||
name="Active Downloads",
|
name="Active Downloads",
|
||||||
value=f"```\n{active_downloads}```",
|
value=f"```\n{active_downloads}```",
|
||||||
inline=False
|
inline=False,
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
embed.add_field(
|
embed.add_field(
|
||||||
name="Active Downloads",
|
name="Active Downloads",
|
||||||
value="```\nNo active downloads```",
|
value="```\nNo active downloads```",
|
||||||
inline=False
|
inline=False,
|
||||||
)
|
)
|
||||||
|
|
||||||
# Active compressions
|
# Active compressions
|
||||||
active_compressions = ""
|
active_compressions = ""
|
||||||
for url, progress in _compression_progress.items():
|
for url, progress in _compression_progress.items():
|
||||||
if progress.get('active', False):
|
if progress.get("active", False):
|
||||||
active_compressions += (
|
active_compressions += (
|
||||||
f"File: {progress.get('filename', 'Unknown')}\n"
|
f"File: {progress.get('filename', 'Unknown')}\n"
|
||||||
f"Progress: {progress.get('percent', 0):.1f}%\n"
|
f"Progress: {progress.get('percent', 0):.1f}%\n"
|
||||||
@@ -558,36 +576,38 @@ class VideoProcessor:
|
|||||||
embed.add_field(
|
embed.add_field(
|
||||||
name="Active Compressions",
|
name="Active Compressions",
|
||||||
value=f"```\n{active_compressions}```",
|
value=f"```\n{active_compressions}```",
|
||||||
inline=False
|
inline=False,
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
embed.add_field(
|
embed.add_field(
|
||||||
name="Active Compressions",
|
name="Active Compressions",
|
||||||
value="```\nNo active compressions```",
|
value="```\nNo active compressions```",
|
||||||
inline=False
|
inline=False,
|
||||||
)
|
)
|
||||||
|
|
||||||
# Error statistics
|
# Error statistics
|
||||||
if queue_status['metrics']['errors_by_type']:
|
if queue_status["metrics"]["errors_by_type"]:
|
||||||
error_stats = "\n".join(
|
error_stats = "\n".join(
|
||||||
f"{error_type}: {count}"
|
f"{error_type}: {count}"
|
||||||
for error_type, count in queue_status['metrics']['errors_by_type'].items()
|
for error_type, count in queue_status["metrics"][
|
||||||
|
"errors_by_type"
|
||||||
|
].items()
|
||||||
)
|
)
|
||||||
embed.add_field(
|
embed.add_field(
|
||||||
name="Error Statistics",
|
name="Error Statistics",
|
||||||
value=f"```\n{error_stats}```",
|
value=f"```\n{error_stats}```",
|
||||||
inline=False
|
inline=False,
|
||||||
)
|
)
|
||||||
|
|
||||||
# Hardware acceleration statistics
|
# Hardware acceleration statistics
|
||||||
embed.add_field(
|
embed.add_field(
|
||||||
name="Hardware Statistics",
|
name="Hardware Statistics",
|
||||||
value=f"```\n"
|
value=f"```\n"
|
||||||
f"Hardware Accel Failures: {queue_status['metrics']['hardware_accel_failures']}\n"
|
f"Hardware Accel Failures: {queue_status['metrics']['hardware_accel_failures']}\n"
|
||||||
f"Compression Failures: {queue_status['metrics']['compression_failures']}\n"
|
f"Compression Failures: {queue_status['metrics']['compression_failures']}\n"
|
||||||
f"Peak Memory Usage: {queue_status['metrics']['peak_memory_usage']:.1f}MB\n"
|
f"Peak Memory Usage: {queue_status['metrics']['peak_memory_usage']:.1f}MB\n"
|
||||||
f"```",
|
f"```",
|
||||||
inline=False
|
inline=False,
|
||||||
)
|
)
|
||||||
|
|
||||||
await ctx.send(embed=embed)
|
await ctx.send(embed=embed)
|
||||||
|
|||||||
Reference in New Issue
Block a user