feat(cli-service-download): basic implementation

This commit is contained in:
Benexl
2025-07-28 21:22:11 +03:00
parent 40065478cc
commit 93c0f2ab83
10 changed files with 604 additions and 19 deletions

View File

@@ -0,0 +1,249 @@
from typing import TYPE_CHECKING, Dict, List
import click
from fastanime.cli.utils.completion import anime_titles_shell_complete
from fastanime.core.config import AppConfig
from fastanime.core.exceptions import FastAnimeError
from fastanime.libs.media_api.types import (
MediaFormat,
MediaGenre,
MediaItem,
MediaSeason,
MediaSort,
MediaStatus,
MediaTag,
MediaType,
MediaYear,
)
from .. import examples
if TYPE_CHECKING:
from typing import TypedDict
from typing_extensions import Unpack
class DownloadOptions(TypedDict, total=False):
title: str | None
episode_range: str | None
page: int
per_page: int | None
season: str | None
status: tuple[str, ...]
status_not: tuple[str, ...]
sort: str | None
genres: tuple[str, ...]
genres_not: tuple[str, ...]
tags: tuple[str, ...]
tags_not: tuple[str, ...]
media_format: tuple[str, ...]
media_type: str | None
year: str | None
popularity_greater: int | None
popularity_lesser: int | None
score_greater: int | None
score_lesser: int | None
start_date_greater: int | None
start_date_lesser: int | None
end_date_greater: int | None
end_date_lesser: int | None
on_list: bool | None
yes: bool
@click.command(
help="Search for anime on AniList and download episodes.",
short_help="Search and download anime.",
epilog=examples.download,
)
# --- Re-using all search options ---
@click.option("--title", "-t", shell_complete=anime_titles_shell_complete)
@click.option("--page", "-p", type=click.IntRange(min=1), default=1)
@click.option("--per-page", type=click.IntRange(min=1, max=50))
@click.option("--season", type=click.Choice([s.value for s in MediaSeason]))
@click.option(
"--status", "-S", multiple=True, type=click.Choice([s.value for s in MediaStatus])
)
@click.option(
"--status-not", multiple=True, type=click.Choice([s.value for s in MediaStatus])
)
@click.option("--sort", "-s", type=click.Choice([s.value for s in MediaSort]))
@click.option(
"--genres", "-g", multiple=True, type=click.Choice([g.value for g in MediaGenre])
)
@click.option(
"--genres-not", multiple=True, type=click.Choice([g.value for g in MediaGenre])
)
@click.option(
"--tags", "-T", multiple=True, type=click.Choice([t.value for t in MediaTag])
)
@click.option(
"--tags-not", multiple=True, type=click.Choice([t.value for t in MediaTag])
)
@click.option(
"--media-format",
"-f",
multiple=True,
type=click.Choice([f.value for f in MediaFormat]),
)
@click.option("--media-type", type=click.Choice([t.value for t in MediaType]))
@click.option("--year", "-y", type=click.Choice([y.value for y in MediaYear]))
@click.option("--popularity-greater", type=click.IntRange(min=0))
@click.option("--popularity-lesser", type=click.IntRange(min=0))
@click.option("--score-greater", type=click.IntRange(min=0, max=100))
@click.option("--score-lesser", type=click.IntRange(min=0, max=100))
@click.option("--start-date-greater", type=int)
@click.option("--start-date-lesser", type=int)
@click.option("--end-date-greater", type=int)
@click.option("--end-date-lesser", type=int)
@click.option("--on-list/--not-on-list", "-L/-no-L", type=bool, default=None)
# --- Download specific options ---
@click.option(
"--episode-range",
"-r",
help="Range of episodes to download (e.g., '1-10', '5', '8:12'). Required.",
required=True,
)
@click.option(
"--yes",
"-y",
is_flag=True,
help="Automatically download from all found anime without prompting for selection.",
)
@click.pass_obj
def download(config: AppConfig, **options: "Unpack[DownloadOptions]"):
from fastanime.cli.service.download.service import DownloadService
from fastanime.cli.service.feedback import FeedbackService
from fastanime.cli.service.registry import MediaRegistryService
from fastanime.cli.service.watch_history import WatchHistoryService
from fastanime.cli.utils.parser import parse_episode_range
from fastanime.libs.media_api.api import create_api_client
from fastanime.libs.media_api.params import MediaSearchParams
from fastanime.libs.provider.anime.provider import create_provider
from fastanime.libs.selectors import create_selector
from rich.progress import Progress
feedback = FeedbackService(config.general.icons)
selector = create_selector(config)
media_api = create_api_client(config.general.media_api, config)
provider = create_provider(config.general.provider)
registry = MediaRegistryService(config.general.media_api, config.media_registry)
watch_history = WatchHistoryService(config, registry, media_api)
download_service = DownloadService(config, registry, media_api, provider)
try:
sort_val = options.get("sort")
status_val = options.get("status")
status_not_val = options.get("status_not")
genres_val = options.get("genres")
genres_not_val = options.get("genres_not")
tags_val = options.get("tags")
tags_not_val = options.get("tags_not")
media_format_val = options.get("media_format")
media_type_val = options.get("media_type")
season_val = options.get("season")
year_val = options.get("year")
search_params = MediaSearchParams(
query=options.get("title"),
page=options.get("page", 1),
per_page=options.get("per_page"),
sort=MediaSort(sort_val) if sort_val else None,
status_in=[MediaStatus(s) for s in status_val] if status_val else None,
status_not_in=[MediaStatus(s) for s in status_not_val]
if status_not_val
else None,
genre_in=[MediaGenre(g) for g in genres_val] if genres_val else None,
genre_not_in=[MediaGenre(g) for g in genres_not_val]
if genres_not_val
else None,
tag_in=[MediaTag(t) for t in tags_val] if tags_val else None,
tag_not_in=[MediaTag(t) for t in tags_not_val] if tags_not_val else None,
format_in=[MediaFormat(f) for f in media_format_val]
if media_format_val
else None,
type=MediaType(media_type_val) if media_type_val else None,
season=MediaSeason(season_val) if season_val else None,
seasonYear=int(year_val) if year_val else None,
popularity_greater=options.get("popularity_greater"),
popularity_lesser=options.get("popularity_lesser"),
averageScore_greater=options.get("score_greater"),
averageScore_lesser=options.get("score_lesser"),
startDate_greater=options.get("start_date_greater"),
startDate_lesser=options.get("start_date_lesser"),
endDate_greater=options.get("end_date_greater"),
endDate_lesser=options.get("end_date_lesser"),
on_list=options.get("on_list"),
)
with Progress() as progress:
progress.add_task("Searching AniList...", total=None)
search_result = media_api.search_media(search_params)
if not search_result or not search_result.media:
raise FastAnimeError("No anime found matching your search criteria.")
anime_to_download: List[MediaItem]
if options.get("yes"):
anime_to_download = search_result.media
else:
choice_map: Dict[str, MediaItem] = {
(item.title.english or item.title.romaji or f"ID: {item.id}"): item
for item in search_result.media
}
selected_titles = selector.choose_multiple(
"Select anime to download (use TAB to select, ENTER to confirm)",
list(choice_map.keys()),
)
if not selected_titles:
feedback.warning("No anime selected. Aborting download.")
return
anime_to_download = [choice_map[title] for title in selected_titles]
total_downloaded = 0
episode_range_str = options.get("episode_range")
if not episode_range_str:
raise FastAnimeError("--episode-range is required.")
for media_item in anime_to_download:
watch_history.add_media_to_list_if_not_present(media_item)
available_episodes = [str(i + 1) for i in range(media_item.episodes or 0)]
if not available_episodes:
feedback.warning(
f"No episode information for '{media_item.title.english}', skipping."
)
continue
try:
episodes_to_download = list(
parse_episode_range(episode_range_str, available_episodes)
)
if not episodes_to_download:
feedback.warning(
f"Episode range '{episode_range_str}' resulted in no episodes for '{media_item.title.english}'."
)
continue
feedback.info(
f"Preparing to download {len(episodes_to_download)} episodes for '{media_item.title.english}'."
)
download_service.download_episodes_sync(
media_item, episodes_to_download
)
total_downloaded += len(episodes_to_download)
except (ValueError, IndexError) as e:
feedback.error(
f"Invalid episode range for '{media_item.title.english}': {e}"
)
continue
feedback.success(
f"Finished. Successfully downloaded a total of {total_downloaded} episodes."
)
except FastAnimeError as e:
feedback.error("Download command failed", str(e))
except Exception as e:
feedback.error("An unexpected error occurred", str(e))

View File

@@ -0,0 +1,244 @@
import logging
import time
from datetime import datetime
from typing import TYPE_CHECKING, List, Optional
from ....core.config.model import AppConfig
from ....core.downloader import DownloadParams, DownloadResult, create_downloader
from ....core.utils.concurrency import ManagedBackgroundWorker, thread_manager
from ....core.utils.fuzzy import fuzz
from ....core.utils.normalizer import normalize_title
from ....libs.media_api.types import MediaItem
from ....libs.provider.anime.params import (
AnimeParams,
EpisodeStreamsParams,
SearchParams,
)
from ..registry.models import DownloadStatus
if TYPE_CHECKING:
from ....libs.media_api.api import BaseApiClient
from ....libs.provider.anime.provider import BaseAnimeProvider
from ..registry.service import MediaRegistryService
logger = logging.getLogger(__name__)
class DownloadService:
def __init__(
self,
config: AppConfig,
registry_service: "MediaRegistryService",
media_api_service: "BaseApiClient",
provider_service: "BaseAnimeProvider",
):
self.config = config
self.registry = registry_service
self.media_api = media_api_service
self.provider = provider_service
self.downloader = create_downloader(config.downloads)
# Worker is kept for potential future background commands
self._worker = ManagedBackgroundWorker(
max_workers=config.downloads.max_concurrent_downloads,
name="DownloadWorker",
)
thread_manager.register_worker("download_worker", self._worker)
def start(self):
"""Starts the download worker for background tasks."""
if not self._worker.is_running():
self._worker.start()
# We can still resume background tasks on startup if any exist
self.resume_unfinished_downloads()
def stop(self):
"""Stops the download worker."""
self._worker.shutdown(wait=False)
def add_to_queue(self, media_item: MediaItem, episode_number: str) -> bool:
"""Adds a download job to the ASYNCHRONOUS queue."""
logger.info(
f"Queueing background download for '{media_item.title.english}' Episode {episode_number}"
)
self.registry.get_or_create_record(media_item)
updated = self.registry.update_episode_download_status(
media_id=media_item.id,
episode_number=episode_number,
status=DownloadStatus.QUEUED,
)
if not updated:
return False
self._worker.submit_function(
self._execute_download_job, media_item, episode_number
)
return True
def download_episodes_sync(self, media_item: MediaItem, episodes: List[str]):
"""
Performs downloads SYNCHRONOUSLY and blocks until complete.
This is for the direct `download` command.
"""
for episode_number in episodes:
title = (
media_item.title.english
or media_item.title.romaji
or f"ID: {media_item.id}"
)
logger.info(
f"Starting synchronous download for '{title}' Episode {episode_number}"
)
self._execute_download_job(media_item, episode_number)
def resume_unfinished_downloads(self):
"""Finds and re-queues any downloads that were left in an unfinished state."""
logger.info("Checking for unfinished downloads to resume...")
queued_jobs = self.registry.get_episodes_by_download_status(
DownloadStatus.QUEUED
)
downloading_jobs = self.registry.get_episodes_by_download_status(
DownloadStatus.DOWNLOADING
)
unfinished_jobs = queued_jobs + downloading_jobs
if not unfinished_jobs:
logger.info("No unfinished downloads found.")
return
logger.info(
f"Found {len(unfinished_jobs)} unfinished downloads. Re-queueing..."
)
for media_id, episode_number in unfinished_jobs:
record = self.registry.get_media_record(media_id)
if record and record.media_item:
self.add_to_queue(record.media_item, episode_number)
else:
logger.error(
f"Could not find metadata for media ID {media_id}. Cannot resume. Please run 'fastanime registry sync'."
)
def _execute_download_job(self, media_item: MediaItem, episode_number: str):
"""The core download logic, can be called by worker or synchronously."""
self.registry.get_or_create_record(media_item)
try:
self.registry.update_episode_download_status(
media_id=media_item.id,
episode_number=episode_number,
status=DownloadStatus.DOWNLOADING,
)
media_title = (
media_item.title.english or media_item.title.romaji or "Unknown"
)
# --- START OF FIX: REPLICATE WORKING LOGIC ---
# 1. Search the provider to get the provider-specific ID
provider_search_title = normalize_title(
media_title,
self.config.general.provider.value,
use_provider_mapping=True,
)
provider_search_results = self.provider.search(
SearchParams(query=provider_search_title)
)
if not provider_search_results or not provider_search_results.results:
raise ValueError(
f"Could not find '{media_title}' on provider '{self.config.general.provider.value}'"
)
# 2. Find the best match using fuzzy logic (like auto-select)
provider_results_map = {
result.title: result for result in provider_search_results.results
}
best_match_title = max(
provider_results_map.keys(),
key=lambda p_title: fuzz.ratio(
normalize_title(
p_title, self.config.general.provider.value
).lower(),
media_title.lower(),
),
)
provider_anime_ref = provider_results_map[best_match_title]
# 3. Get full provider anime details (contains the correct episode list)
provider_anime = self.provider.get(
AnimeParams(id=provider_anime_ref.id, query=media_title)
)
if not provider_anime:
raise ValueError(
f"Failed to get full details for '{best_match_title}' from provider."
)
# --- END OF FIX ---
# 4. Get stream links using the now-validated provider_anime ID
streams_iterator = self.provider.episode_streams(
EpisodeStreamsParams(
anime_id=provider_anime.id, # Use the ID from the provider, not AniList
query=media_title,
episode=episode_number,
translation_type=self.config.stream.translation_type,
)
)
if not streams_iterator:
raise ValueError("Provider returned no stream iterator.")
server = next(streams_iterator, None)
if not server or not server.links:
raise ValueError(f"No stream links found for Episode {episode_number}")
stream_link = server.links[0]
# 5. Perform the download
download_params = DownloadParams(
url=stream_link.link,
anime_title=media_title,
episode_title=f"{media_title} - Episode {episode_number}",
silent=False,
headers=server.headers,
subtitles=[sub.url for sub in server.subtitles],
merge=self.config.downloads.merge_subtitles,
clean=self.config.downloads.cleanup_after_merge,
)
result = self.downloader.download(download_params)
# 6. Update registry based on result
if result.success and result.video_path:
file_size = (
result.video_path.stat().st_size
if result.video_path.exists()
else None
)
self.registry.update_episode_download_status(
media_id=media_item.id,
episode_number=episode_number,
status=DownloadStatus.COMPLETED,
file_path=result.merged_path or result.video_path,
file_size=file_size,
quality=stream_link.quality,
provider_name=self.config.general.provider.value,
server_name=server.name,
subtitle_paths=result.subtitle_paths,
download_date=datetime.now(),
)
logger.info(
f"Successfully downloaded Episode {episode_number} of '{media_title}'"
)
else:
raise ValueError(result.error_message or "Unknown download error")
except Exception as e:
logger.error(
f"Download failed for '{media_item.title.english}' Ep {episode_number}: {e}",
exc_info=True,
)
self.registry.update_episode_download_status(
media_id=media_item.id,
episode_number=episode_number,
status=DownloadStatus.FAILED,
error_message=str(e),
)

View File

@@ -532,6 +532,7 @@ class MediaRegistryService:
server_name: Optional[str] = None,
subtitle_paths: Optional[list[Path]] = None,
error_message: Optional[str] = None,
download_date: Optional[datetime] = None,
) -> bool:
"""Update the download status and metadata for a specific episode."""
try:

View File

@@ -116,3 +116,15 @@ class WatchHistoryService:
progress=progress,
)
)
def add_media_to_list_if_not_present(self, media_item: MediaItem):
"""Adds a media item to the user's PLANNING list if it's not already on any list."""
if not self.media_api or not self.media_api.is_authenticated():
return
# If user_status is None, it means the item is not on the user's list.
if media_item.user_status is None:
logger.info(
f"'{media_item.title.english}' not on list. Adding to 'Planning'."
)
self.update(media_item, status=UserMediaListStatus.PLANNING)

View File

@@ -69,6 +69,12 @@ ANILIST_PREFERRED_LANGUAGE = "english"
DOWNLOADS_DOWNLOADER = "auto"
DOWNLOADS_DOWNLOADS_DIR = USER_VIDEOS_DIR
DOWNLOADS_ENABLE_TRACKING = True
DOWNLOADS_MAX_CONCURRENT = 3
DOWNLOADS_RETRY_ATTEMPTS = 2
DOWNLOADS_RETRY_DELAY = 60
DOWNLOADS_MERGE_SUBTITLES = True
DOWNLOADS_CLEANUP_AFTER_MERGE = True
# RegistryConfig
MEDIA_REGISTRY_DIR = USER_VIDEOS_DIR / ".registry"

View File

@@ -1,3 +1,6 @@
# GeneralConfig
from .defaults import SESSIONS_DIR
GENERAL_PYGMENT_STYLE = "The pygment style to use"
GENERAL_API_CLIENT = "The media database API to use (e.g., 'anilist', 'jikan')."
GENERAL_PREFERRED_TRACKER = (
@@ -99,22 +102,16 @@ ANILIST_PREFERRED_LANGUAGE = "Preferred language for anime titles from AniList."
DOWNLOADS_DOWNLOADER = "The downloader to use"
DOWNLOADS_DOWNLOADS_DIR = "The default directory to save downloaded anime."
DOWNLOADS_ENABLE_TRACKING = "Enable download tracking and management"
DOWNLOADS_AUTO_ORGANIZE = "Automatically organize downloads by anime title"
DOWNLOADS_MAX_CONCURRENT = "Maximum concurrent downloads"
DOWNLOADS_AUTO_CLEANUP_FAILED = "Automatically cleanup failed downloads"
DOWNLOADS_RETENTION_DAYS = "Days to keep failed downloads before cleanup"
DOWNLOADS_SYNC_WITH_WATCH_HISTORY = "Sync download status with watch history"
DOWNLOADS_AUTO_MARK_OFFLINE = (
"Automatically mark downloaded episodes as available offline"
)
DOWNLOADS_NAMING_TEMPLATE = "File naming template for downloaded episodes"
DOWNLOADS_PREFERRED_QUALITY = "Preferred download quality"
DOWNLOADS_DOWNLOAD_SUBTITLES = "Download subtitles when available"
DOWNLOADS_SUBTITLE_LANGUAGES = "Preferred subtitle languages"
DOWNLOADS_QUEUE_MAX_SIZE = "Maximum number of items in download queue"
DOWNLOADS_AUTO_START_DOWNLOADS = "Automatically start downloads when items are queued"
DOWNLOADS_MAX_CONCURRENT = "Maximum number of concurrent downloads"
DOWNLOADS_RETRY_ATTEMPTS = "Number of retry attempts for failed downloads"
DOWNLOADS_RETRY_DELAY = "Delay between retry attempts in seconds"
DOWNLOADS_MERGE_SUBTITLES = (
"Automatically merge subtitles into the video file after download."
)
DOWNLOADS_CLEANUP_AFTER_MERGE = (
"Delete the original video and subtitle files after a successful merge."
)
# RegistryConfig
MEDIA_REGISTRY_DIR = "The default directory to save media registry"

View File

@@ -284,17 +284,37 @@ class DownloadsConfig(OtherConfig):
downloader: Literal["auto", "default", "yt-dlp"] = Field(
default=defaults.DOWNLOADS_DOWNLOADER, description=desc.DOWNLOADS_DOWNLOADER
)
downloads_dir: Path = Field(
default_factory=lambda: defaults.DOWNLOADS_DOWNLOADS_DIR,
description=desc.DOWNLOADS_DOWNLOADS_DIR,
)
# Download tracking configuration
enable_tracking: bool = Field(
default=defaults.DOWNLOADS_ENABLE_TRACKING,
description=desc.DOWNLOADS_ENABLE_TRACKING,
)
max_concurrent_downloads: int = Field(
default=defaults.DOWNLOADS_MAX_CONCURRENT,
ge=1,
description=desc.DOWNLOADS_MAX_CONCURRENT,
)
retry_attempts: int = Field(
default=defaults.DOWNLOADS_RETRY_ATTEMPTS,
ge=0,
description=desc.DOWNLOADS_RETRY_ATTEMPTS,
)
retry_delay: int = Field(
default=defaults.DOWNLOADS_RETRY_DELAY,
ge=0,
description=desc.DOWNLOADS_RETRY_DELAY,
)
merge_subtitles: bool = Field(
default=defaults.DOWNLOADS_MERGE_SUBTITLES,
description=desc.DOWNLOADS_MERGE_SUBTITLES,
)
cleanup_after_merge: bool = Field(
default=defaults.DOWNLOADS_CLEANUP_AFTER_MERGE,
description=desc.DOWNLOADS_CLEANUP_AFTER_MERGE,
)
class MediaRegistryConfig(OtherConfig):

View File

@@ -3,8 +3,8 @@ from abc import ABC, abstractmethod
import httpx
from ..config.model import DownloadsConfig
from .params import DownloadParams
from .model import DownloadResult
from .params import DownloadParams
class BaseDownloader(ABC):
@@ -13,7 +13,13 @@ class BaseDownloader(ABC):
def __init__(self, config: DownloadsConfig):
self.config = config
self.client = httpx.Client()
# Increase timeouts and add retries for robustness
transport = httpx.HTTPTransport(retries=3)
self.client = httpx.Client(
transport=transport,
timeout=httpx.Timeout(15.0, connect=60.0),
follow_redirects=True,
)
@abstractmethod
def download(self, params: DownloadParams) -> DownloadResult:

View File

@@ -9,8 +9,10 @@ dependencies = [
"click>=8.1.7",
"httpx>=0.28.1",
"inquirerpy>=0.3.4",
"pycryptodomex>=3.23.0",
"pydantic>=2.11.7",
"rich>=13.9.2",
"yt-dlp>=2025.7.21",
]
[project.scripts]

48
uv.lock generated
View File

@@ -103,8 +103,10 @@ dependencies = [
{ name = "click" },
{ name = "httpx" },
{ name = "inquirerpy" },
{ name = "pycryptodomex" },
{ name = "pydantic" },
{ name = "rich" },
{ name = "yt-dlp" },
]
[package.optional-dependencies]
@@ -155,11 +157,13 @@ requires-dist = [
{ name = "mpv", marker = "extra == 'standard'", specifier = ">=1.0.7" },
{ name = "plyer", marker = "extra == 'notifications'", specifier = ">=2.1.0" },
{ name = "plyer", marker = "extra == 'standard'", specifier = ">=2.1.0" },
{ name = "pycryptodomex", specifier = ">=3.23.0" },
{ name = "pydantic", specifier = ">=2.11.7" },
{ name = "pypresence", marker = "extra == 'discord'", specifier = ">=4.3.0" },
{ name = "pypresence", marker = "extra == 'standard'", specifier = ">=4.3.0" },
{ name = "rich", specifier = ">=13.9.2" },
{ name = "thefuzz", marker = "extra == 'standard'", specifier = ">=0.22.1" },
{ name = "yt-dlp", specifier = ">=2025.7.21" },
]
provides-extras = ["standard", "notifications", "mpv", "torrent", "lxml", "discord"]
@@ -509,6 +513,41 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/ce/4f/5249960887b1fbe561d9ff265496d170b55a735b76724f10ef19f9e40716/prompt_toolkit-3.0.51-py3-none-any.whl", hash = "sha256:52742911fde84e2d423e2f9a4cf1de7d7ac4e51958f648d9540e0fb8db077b07", size = 387810, upload-time = "2025-04-15T09:18:44.753Z" },
]
[[package]]
name = "pycryptodomex"
version = "3.23.0"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/c9/85/e24bf90972a30b0fcd16c73009add1d7d7cd9140c2498a68252028899e41/pycryptodomex-3.23.0.tar.gz", hash = "sha256:71909758f010c82bc99b0abf4ea12012c98962fbf0583c2164f8b84533c2e4da", size = 4922157, upload-time = "2025-05-17T17:23:41.434Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/2e/00/10edb04777069a42490a38c137099d4b17ba6e36a4e6e28bdc7470e9e853/pycryptodomex-3.23.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:7b37e08e3871efe2187bc1fd9320cc81d87caf19816c648f24443483005ff886", size = 2498764, upload-time = "2025-05-17T17:22:21.453Z" },
{ url = "https://files.pythonhosted.org/packages/6b/3f/2872a9c2d3a27eac094f9ceaa5a8a483b774ae69018040ea3240d5b11154/pycryptodomex-3.23.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:91979028227543010d7b2ba2471cf1d1e398b3f183cb105ac584df0c36dac28d", size = 1643012, upload-time = "2025-05-17T17:22:23.702Z" },
{ url = "https://files.pythonhosted.org/packages/70/af/774c2e2b4f6570fbf6a4972161adbb183aeeaa1863bde31e8706f123bf92/pycryptodomex-3.23.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b8962204c47464d5c1c4038abeadd4514a133b28748bcd9fa5b6d62e3cec6fa", size = 2187643, upload-time = "2025-05-17T17:22:26.37Z" },
{ url = "https://files.pythonhosted.org/packages/de/a3/71065b24cb889d537954cedc3ae5466af00a2cabcff8e29b73be047e9a19/pycryptodomex-3.23.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a33986a0066860f7fcf7c7bd2bc804fa90e434183645595ae7b33d01f3c91ed8", size = 2273762, upload-time = "2025-05-17T17:22:28.313Z" },
{ url = "https://files.pythonhosted.org/packages/c9/0b/ff6f43b7fbef4d302c8b981fe58467b8871902cdc3eb28896b52421422cc/pycryptodomex-3.23.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c7947ab8d589e3178da3d7cdeabe14f841b391e17046954f2fbcd941705762b5", size = 2313012, upload-time = "2025-05-17T17:22:30.57Z" },
{ url = "https://files.pythonhosted.org/packages/02/de/9d4772c0506ab6da10b41159493657105d3f8bb5c53615d19452afc6b315/pycryptodomex-3.23.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:c25e30a20e1b426e1f0fa00131c516f16e474204eee1139d1603e132acffc314", size = 2186856, upload-time = "2025-05-17T17:22:32.819Z" },
{ url = "https://files.pythonhosted.org/packages/28/ad/8b30efcd6341707a234e5eba5493700a17852ca1ac7a75daa7945fcf6427/pycryptodomex-3.23.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:da4fa650cef02db88c2b98acc5434461e027dce0ae8c22dd5a69013eaf510006", size = 2347523, upload-time = "2025-05-17T17:22:35.386Z" },
{ url = "https://files.pythonhosted.org/packages/0f/02/16868e9f655b7670dbb0ac4f2844145cbc42251f916fc35c414ad2359849/pycryptodomex-3.23.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:58b851b9effd0d072d4ca2e4542bf2a4abcf13c82a29fd2c93ce27ee2a2e9462", size = 2272825, upload-time = "2025-05-17T17:22:37.632Z" },
{ url = "https://files.pythonhosted.org/packages/ca/18/4ca89ac737230b52ac8ffaca42f9c6f1fd07c81a6cd821e91af79db60632/pycryptodomex-3.23.0-cp313-cp313t-win32.whl", hash = "sha256:a9d446e844f08299236780f2efa9898c818fe7e02f17263866b8550c7d5fb328", size = 1772078, upload-time = "2025-05-17T17:22:40Z" },
{ url = "https://files.pythonhosted.org/packages/73/34/13e01c322db027682e00986873eca803f11c56ade9ba5bbf3225841ea2d4/pycryptodomex-3.23.0-cp313-cp313t-win_amd64.whl", hash = "sha256:bc65bdd9fc8de7a35a74cab1c898cab391a4add33a8fe740bda00f5976ca4708", size = 1803656, upload-time = "2025-05-17T17:22:42.139Z" },
{ url = "https://files.pythonhosted.org/packages/54/68/9504c8796b1805d58f4425002bcca20f12880e6fa4dc2fc9a668705c7a08/pycryptodomex-3.23.0-cp313-cp313t-win_arm64.whl", hash = "sha256:c885da45e70139464f082018ac527fdaad26f1657a99ee13eecdce0f0ca24ab4", size = 1707172, upload-time = "2025-05-17T17:22:44.704Z" },
{ url = "https://files.pythonhosted.org/packages/dd/9c/1a8f35daa39784ed8adf93a694e7e5dc15c23c741bbda06e1d45f8979e9e/pycryptodomex-3.23.0-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:06698f957fe1ab229a99ba2defeeae1c09af185baa909a31a5d1f9d42b1aaed6", size = 2499240, upload-time = "2025-05-17T17:22:46.953Z" },
{ url = "https://files.pythonhosted.org/packages/7a/62/f5221a191a97157d240cf6643747558759126c76ee92f29a3f4aee3197a5/pycryptodomex-3.23.0-cp37-abi3-macosx_10_9_x86_64.whl", hash = "sha256:b2c2537863eccef2d41061e82a881dcabb04944c5c06c5aa7110b577cc487545", size = 1644042, upload-time = "2025-05-17T17:22:49.098Z" },
{ url = "https://files.pythonhosted.org/packages/8c/fd/5a054543c8988d4ed7b612721d7e78a4b9bf36bc3c5ad45ef45c22d0060e/pycryptodomex-3.23.0-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:43c446e2ba8df8889e0e16f02211c25b4934898384c1ec1ec04d7889c0333587", size = 2186227, upload-time = "2025-05-17T17:22:51.139Z" },
{ url = "https://files.pythonhosted.org/packages/c8/a9/8862616a85cf450d2822dbd4fff1fcaba90877907a6ff5bc2672cafe42f8/pycryptodomex-3.23.0-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f489c4765093fb60e2edafdf223397bc716491b2b69fe74367b70d6999257a5c", size = 2272578, upload-time = "2025-05-17T17:22:53.676Z" },
{ url = "https://files.pythonhosted.org/packages/46/9f/bda9c49a7c1842820de674ab36c79f4fbeeee03f8ff0e4f3546c3889076b/pycryptodomex-3.23.0-cp37-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bdc69d0d3d989a1029df0eed67cc5e8e5d968f3724f4519bd03e0ec68df7543c", size = 2312166, upload-time = "2025-05-17T17:22:56.585Z" },
{ url = "https://files.pythonhosted.org/packages/03/cc/870b9bf8ca92866ca0186534801cf8d20554ad2a76ca959538041b7a7cf4/pycryptodomex-3.23.0-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:6bbcb1dd0f646484939e142462d9e532482bc74475cecf9c4903d4e1cd21f003", size = 2185467, upload-time = "2025-05-17T17:22:59.237Z" },
{ url = "https://files.pythonhosted.org/packages/96/e3/ce9348236d8e669fea5dd82a90e86be48b9c341210f44e25443162aba187/pycryptodomex-3.23.0-cp37-abi3-musllinux_1_2_i686.whl", hash = "sha256:8a4fcd42ccb04c31268d1efeecfccfd1249612b4de6374205376b8f280321744", size = 2346104, upload-time = "2025-05-17T17:23:02.112Z" },
{ url = "https://files.pythonhosted.org/packages/a5/e9/e869bcee87beb89040263c416a8a50204f7f7a83ac11897646c9e71e0daf/pycryptodomex-3.23.0-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:55ccbe27f049743a4caf4f4221b166560d3438d0b1e5ab929e07ae1702a4d6fd", size = 2271038, upload-time = "2025-05-17T17:23:04.872Z" },
{ url = "https://files.pythonhosted.org/packages/8d/67/09ee8500dd22614af5fbaa51a4aee6e342b5fa8aecf0a6cb9cbf52fa6d45/pycryptodomex-3.23.0-cp37-abi3-win32.whl", hash = "sha256:189afbc87f0b9f158386bf051f720e20fa6145975f1e76369303d0f31d1a8d7c", size = 1771969, upload-time = "2025-05-17T17:23:07.115Z" },
{ url = "https://files.pythonhosted.org/packages/69/96/11f36f71a865dd6df03716d33bd07a67e9d20f6b8d39820470b766af323c/pycryptodomex-3.23.0-cp37-abi3-win_amd64.whl", hash = "sha256:52e5ca58c3a0b0bd5e100a9fbc8015059b05cffc6c66ce9d98b4b45e023443b9", size = 1803124, upload-time = "2025-05-17T17:23:09.267Z" },
{ url = "https://files.pythonhosted.org/packages/f9/93/45c1cdcbeb182ccd2e144c693eaa097763b08b38cded279f0053ed53c553/pycryptodomex-3.23.0-cp37-abi3-win_arm64.whl", hash = "sha256:02d87b80778c171445d67e23d1caef279bf4b25c3597050ccd2e13970b57fd51", size = 1707161, upload-time = "2025-05-17T17:23:11.414Z" },
{ url = "https://files.pythonhosted.org/packages/f3/b8/3e76d948c3c4ac71335bbe75dac53e154b40b0f8f1f022dfa295257a0c96/pycryptodomex-3.23.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:ebfff755c360d674306e5891c564a274a47953562b42fb74a5c25b8fc1fb1cb5", size = 1627695, upload-time = "2025-05-17T17:23:17.38Z" },
{ url = "https://files.pythonhosted.org/packages/6a/cf/80f4297a4820dfdfd1c88cf6c4666a200f204b3488103d027b5edd9176ec/pycryptodomex-3.23.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eca54f4bb349d45afc17e3011ed4264ef1cc9e266699874cdd1349c504e64798", size = 1675772, upload-time = "2025-05-17T17:23:19.202Z" },
{ url = "https://files.pythonhosted.org/packages/d1/42/1e969ee0ad19fe3134b0e1b856c39bd0b70d47a4d0e81c2a8b05727394c9/pycryptodomex-3.23.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f2596e643d4365e14d0879dc5aafe6355616c61c2176009270f3048f6d9a61f", size = 1668083, upload-time = "2025-05-17T17:23:21.867Z" },
{ url = "https://files.pythonhosted.org/packages/6e/c3/1de4f7631fea8a992a44ba632aa40e0008764c0fb9bf2854b0acf78c2cf2/pycryptodomex-3.23.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fdfac7cda115bca3a5abb2f9e43bc2fb66c2b65ab074913643803ca7083a79ea", size = 1706056, upload-time = "2025-05-17T17:23:24.031Z" },
{ url = "https://files.pythonhosted.org/packages/f2/5f/af7da8e6f1e42b52f44a24d08b8e4c726207434e2593732d39e7af5e7256/pycryptodomex-3.23.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:14c37aaece158d0ace436f76a7bb19093db3b4deade9797abfc39ec6cd6cc2fe", size = 1806478, upload-time = "2025-05-17T17:23:26.066Z" },
]
[[package]]
name = "pydantic"
version = "2.11.7"
@@ -997,3 +1036,12 @@ sdist = { url = "https://files.pythonhosted.org/packages/6c/63/53559446a878410fc
wheels = [
{ url = "https://files.pythonhosted.org/packages/fd/84/fd2ba7aafacbad3c4201d395674fc6348826569da3c0937e75505ead3528/wcwidth-0.2.13-py2.py3-none-any.whl", hash = "sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859", size = 34166, upload-time = "2024-01-06T02:10:55.763Z" },
]
[[package]]
name = "yt-dlp"
version = "2025.7.21"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/7e/3a/343f7a0024ddd4c30f150e8d8f57fd7b924846f97d99fc0dcd75ea8d2773/yt_dlp-2025.7.21.tar.gz", hash = "sha256:46fbb53eab1afbe184c45b4c17e9a6eba614be680e4c09de58b782629d0d7f43", size = 3050219, upload-time = "2025-07-21T23:59:03.826Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/e8/2f/abe59a3204c749fed494849ea29176bcefa186ec8898def9e43f649ddbcf/yt_dlp-2025.7.21-py3-none-any.whl", hash = "sha256:d7aa2b53f9b2f35453346360f41811a0dad1e956e70b35a4ae95039d4d815d15", size = 3288681, upload-time = "2025-07-21T23:59:01.788Z" },
]