Add logger for ytdlp
This commit is contained in:
@@ -14,6 +14,34 @@ import json
|
|||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class YtDlpErrorFilter:
|
||||||
|
"""Filter to suppress non-critical yt-dlp errors from stderr"""
|
||||||
|
|
||||||
|
def __init__(self, original_stderr):
|
||||||
|
self.original_stderr = original_stderr
|
||||||
|
self.buffer = []
|
||||||
|
|
||||||
|
def write(self, text):
|
||||||
|
"""Filter stderr output from yt-dlp"""
|
||||||
|
# Suppress "Unable to extract title" errors - they're not critical
|
||||||
|
if "Unable to extract title" in text:
|
||||||
|
# Log as debug instead of error
|
||||||
|
logger.debug(f"yt-dlp: {text.strip()}")
|
||||||
|
return
|
||||||
|
|
||||||
|
# Suppress other non-critical extraction errors
|
||||||
|
if "Unable to extract" in text and ("title" in text.lower() or "metadata" in text.lower()):
|
||||||
|
logger.debug(f"yt-dlp: {text.strip()}")
|
||||||
|
return
|
||||||
|
|
||||||
|
# Write everything else to original stderr
|
||||||
|
self.original_stderr.write(text)
|
||||||
|
self.original_stderr.flush()
|
||||||
|
|
||||||
|
def flush(self):
|
||||||
|
self.original_stderr.flush()
|
||||||
|
|
||||||
|
|
||||||
async def fix_video_aspect_ratio(video_path: str) -> Optional[str]:
|
async def fix_video_aspect_ratio(video_path: str) -> Optional[str]:
|
||||||
"""
|
"""
|
||||||
Fix video aspect ratio metadata for mobile compatibility
|
Fix video aspect ratio metadata for mobile compatibility
|
||||||
@@ -311,7 +339,7 @@ async def download_media(
|
|||||||
# Additional options for better quality
|
# Additional options for better quality
|
||||||
'writesubtitles': False,
|
'writesubtitles': False,
|
||||||
'writeautomaticsub': False,
|
'writeautomaticsub': False,
|
||||||
'ignoreerrors': False,
|
'ignoreerrors': True, # Continue on extraction errors (e.g., missing title)
|
||||||
# Network settings for better reliability
|
# Network settings for better reliability
|
||||||
'socket_timeout': 60, # Increase socket timeout to 60 seconds
|
'socket_timeout': 60, # Increase socket timeout to 60 seconds
|
||||||
'retries': 3, # Retry failed downloads up to 3 times
|
'retries': 3, # Retry failed downloads up to 3 times
|
||||||
@@ -396,7 +424,15 @@ async def download_media(
|
|||||||
# This function runs in a separate thread (ThreadPoolExecutor)
|
# This function runs in a separate thread (ThreadPoolExecutor)
|
||||||
# progress hook will be called from this thread and use
|
# progress hook will be called from this thread and use
|
||||||
# run_coroutine_threadsafe for safe call in main event loop
|
# run_coroutine_threadsafe for safe call in main event loop
|
||||||
|
import sys
|
||||||
|
original_stderr = sys.stderr
|
||||||
|
error_filter = None
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
# Redirect stderr to filter non-critical errors
|
||||||
|
error_filter = YtDlpErrorFilter(original_stderr)
|
||||||
|
sys.stderr = error_filter
|
||||||
|
|
||||||
with yt_dlp.YoutubeDL(ydl_opts) as ydl:
|
with yt_dlp.YoutubeDL(ydl_opts) as ydl:
|
||||||
# Check for cancellation before start
|
# Check for cancellation before start
|
||||||
if cancel_event and cancel_event.is_set():
|
if cancel_event and cancel_event.is_set():
|
||||||
@@ -418,8 +454,16 @@ async def download_media(
|
|||||||
error_msg = str(download_error)
|
error_msg = str(download_error)
|
||||||
error_lower = error_msg.lower()
|
error_lower = error_msg.lower()
|
||||||
|
|
||||||
|
# Check if it's a non-critical extraction error (e.g., missing title)
|
||||||
|
# These errors don't prevent download, just metadata extraction
|
||||||
|
if "Unable to extract" in error_msg and ("title" in error_lower or "metadata" in error_lower):
|
||||||
|
logger.debug(
|
||||||
|
f"Non-critical extraction error (metadata may be missing): {error_msg}. "
|
||||||
|
f"Video file should still be available. Will check file existence."
|
||||||
|
)
|
||||||
|
# Don't raise - video is likely already downloaded
|
||||||
# Check if it's just a postprocessing error (video is already downloaded)
|
# Check if it's just a postprocessing error (video is already downloaded)
|
||||||
if "Postprocessing" in error_msg or "aspect ratio" in error_lower:
|
elif "Postprocessing" in error_msg or "aspect ratio" in error_lower:
|
||||||
logger.warning(
|
logger.warning(
|
||||||
f"Postprocessing error (non-critical): {error_msg}. "
|
f"Postprocessing error (non-critical): {error_msg}. "
|
||||||
f"Video file should still be available. Will check file existence."
|
f"Video file should still be available. Will check file existence."
|
||||||
@@ -466,6 +510,10 @@ async def download_media(
|
|||||||
# Interrupt download on cancellation
|
# Interrupt download on cancellation
|
||||||
logger.info("Download interrupted")
|
logger.info("Download interrupted")
|
||||||
raise
|
raise
|
||||||
|
finally:
|
||||||
|
# Restore original stderr
|
||||||
|
if error_filter:
|
||||||
|
sys.stderr = original_stderr
|
||||||
|
|
||||||
# Execute in executor for non-blocking download
|
# Execute in executor for non-blocking download
|
||||||
# None uses ThreadPoolExecutor by default
|
# None uses ThreadPoolExecutor by default
|
||||||
@@ -806,4 +854,3 @@ async def get_videos_list(url: str, cookies_file: Optional[str] = None) -> Optio
|
|||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"Error getting videos list: {e}", exc_info=True)
|
logger.error(f"Error getting videos list: {e}", exc_info=True)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|||||||
Reference in New Issue
Block a user