Add download video by link

This commit is contained in:
2025-12-05 21:57:25 +03:00
parent 97c85572e9
commit 5c280a4d3a
3 changed files with 301 additions and 0 deletions

View File

@@ -648,3 +648,121 @@ async def get_media_info(url: str, cookies_file: Optional[str] = None) -> Option
logger.error(f"Error getting media info: {e}", exc_info=True)
return None
async def get_videos_list(url: str, cookies_file: Optional[str] = None) -> Optional[Dict]:
"""
Get list of videos from webpage
Args:
url: Webpage URL
cookies_file: Path to cookies file (optional)
Returns:
Dictionary with:
- 'type': 'playlist' or 'video'
- 'videos': List of video dictionaries with 'id', 'url', 'title', 'duration', 'thumbnail'
- 'playlist_title': Title of playlist/page (if playlist)
or None if error
"""
try:
loop = asyncio.get_running_loop()
ydl_opts = {
'quiet': True,
'no_warnings': True,
'extract_flat': 'in_playlist', # Extract flat for playlist entries, full for single videos
}
# Add cookies if specified
if cookies_file:
cookies_path = None
original_path = Path(cookies_file)
search_paths = []
if original_path.is_absolute():
search_paths.append(original_path)
else:
project_root = Path(__file__).parent.parent.parent.parent
search_paths.append(project_root / cookies_file)
import os
cwd = Path(os.getcwd())
search_paths.append(cwd / cookies_file)
search_paths.append(Path(cookies_file).resolve())
for path in search_paths:
if path.exists() and path.is_file():
cookies_path = path
break
if cookies_path and cookies_path.exists():
ydl_opts['cookiefile'] = str(cookies_path)
def extract_info_sync():
"""Synchronous function for extracting information"""
with yt_dlp.YoutubeDL(ydl_opts) as ydl:
return ydl.extract_info(url, download=False)
# Extract info without downloading
info = await loop.run_in_executor(None, extract_info_sync)
if not info:
return None
# Check if it's a playlist or single video
_type = info.get('_type', 'video')
entries = info.get('entries', [])
if _type == 'playlist' and entries:
# It's a playlist - extract entries
videos = []
for entry in entries[:20]: # Limit to 20 videos to avoid timeout
if entry:
entry_url = entry.get('url') or entry.get('webpage_url')
if not entry_url:
continue
videos.append({
'id': entry.get('id'),
'url': entry_url,
'title': entry.get('title', 'Unknown'),
'duration': entry.get('duration'),
'thumbnail': entry.get('thumbnail'),
})
if videos:
return {
'type': 'playlist',
'videos': videos,
'playlist_title': info.get('title', 'Playlist'),
}
else:
# No valid entries found, treat as single video
return {
'type': 'video',
'videos': [{
'id': info.get('id'),
'url': url,
'title': info.get('title', 'Video'),
'duration': info.get('duration'),
'thumbnail': info.get('thumbnail'),
}],
'playlist_title': None,
}
else:
# Single video
return {
'type': 'video',
'videos': [{
'id': info.get('id'),
'url': url,
'title': info.get('title', 'Video'),
'duration': info.get('duration'),
'thumbnail': info.get('thumbnail'),
}],
'playlist_title': None,
}
except Exception as e:
logger.error(f"Error getting videos list: {e}", exc_info=True)
return None