mirror of
https://github.com/Benexl/FastAnime.git
synced 2025-12-05 20:40:09 -08:00
feat: working with ai is a mess lol
This commit is contained in:
@@ -6,6 +6,8 @@ commands = {
|
||||
"trending": "trending.trending",
|
||||
"recent": "recent.recent",
|
||||
"search": "search.search",
|
||||
"download": "download.download",
|
||||
"downloads": "downloads.downloads",
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -2,7 +2,7 @@ from typing import TYPE_CHECKING
|
||||
|
||||
import click
|
||||
|
||||
from fastanime.cli.utils.completion_functions import anime_titles_shell_complete
|
||||
from fastanime.cli.utils.completions import anime_titles_shell_complete
|
||||
from .data import (
|
||||
genres_available,
|
||||
media_formats_available,
|
||||
178
fastanime/cli/commands/anilist/download.py
Normal file
178
fastanime/cli/commands/anilist/download.py
Normal file
@@ -0,0 +1,178 @@
|
||||
"""
|
||||
Single download command for the anilist CLI.
|
||||
|
||||
Handles downloading specific episodes or continuing from watch history.
|
||||
"""
|
||||
|
||||
import click
|
||||
from pathlib import Path
|
||||
from typing import List, Optional
|
||||
|
||||
from ....core.config.model import AppConfig
|
||||
from ....libs.api.types import MediaItem
|
||||
from ...services.downloads import get_download_manager
|
||||
from ...services.watch_history.manager import WatchHistoryManager
|
||||
|
||||
|
||||
def parse_episode_range(range_str: str) -> List[int]:
|
||||
"""Parse episode range string into list of episode numbers."""
|
||||
episodes = []
|
||||
|
||||
for part in range_str.split(','):
|
||||
part = part.strip()
|
||||
if '-' in part:
|
||||
start, end = map(int, part.split('-', 1))
|
||||
episodes.extend(range(start, end + 1))
|
||||
else:
|
||||
episodes.append(int(part))
|
||||
|
||||
return sorted(set(episodes)) # Remove duplicates and sort
|
||||
|
||||
|
||||
@click.command(name="download")
|
||||
@click.argument("query", required=False)
|
||||
@click.option("--episode", "-e", type=int, help="Specific episode number")
|
||||
@click.option("--range", "-r", help="Episode range (e.g., 1-12, 5,7,9)")
|
||||
@click.option("--quality", "-q",
|
||||
type=click.Choice(["360", "480", "720", "1080", "best"]),
|
||||
help="Preferred download quality")
|
||||
@click.option("--continue", "continue_watch", is_flag=True,
|
||||
help="Continue from watch history")
|
||||
@click.option("--background", "-b", is_flag=True,
|
||||
help="Download in background")
|
||||
@click.option("--path", type=click.Path(exists=True, file_okay=False, dir_okay=True),
|
||||
help="Custom download location")
|
||||
@click.option("--subtitles/--no-subtitles", default=None,
|
||||
help="Include subtitles (overrides config)")
|
||||
@click.option("--priority", type=int, default=0,
|
||||
help="Download priority (higher number = higher priority)")
|
||||
@click.pass_context
|
||||
def download(ctx: click.Context, query: Optional[str], episode: Optional[int],
|
||||
range: Optional[str], quality: Optional[str], continue_watch: bool,
|
||||
background: bool, path: Optional[str], subtitles: Optional[bool],
|
||||
priority: int):
|
||||
"""
|
||||
Download anime episodes with tracking.
|
||||
|
||||
Examples:
|
||||
|
||||
\b
|
||||
# Download specific episode
|
||||
fastanime anilist download "Attack on Titan" --episode 1
|
||||
|
||||
\b
|
||||
# Download episode range
|
||||
fastanime anilist download "Naruto" --range "1-5,10,15-20"
|
||||
|
||||
\b
|
||||
# Continue from watch history
|
||||
fastanime anilist download --continue
|
||||
|
||||
\b
|
||||
# Download with custom quality
|
||||
fastanime anilist download "One Piece" --episode 1000 --quality 720
|
||||
"""
|
||||
|
||||
config: AppConfig = ctx.obj
|
||||
download_manager = get_download_manager(config.downloads)
|
||||
|
||||
try:
|
||||
# Handle continue from watch history
|
||||
if continue_watch:
|
||||
if query:
|
||||
click.echo("--continue flag cannot be used with a search query", err=True)
|
||||
ctx.exit(1)
|
||||
|
||||
# Get current watching anime from history
|
||||
watch_manager = WatchHistoryManager()
|
||||
current_watching = watch_manager.get_currently_watching()
|
||||
|
||||
if not current_watching:
|
||||
click.echo("No anime currently being watched found in history", err=True)
|
||||
ctx.exit(1)
|
||||
|
||||
if len(current_watching) == 1:
|
||||
media_item = current_watching[0].media_item
|
||||
next_episode = current_watching[0].last_watched_episode + 1
|
||||
episodes_to_download = [next_episode]
|
||||
else:
|
||||
# Multiple anime, let user choose
|
||||
click.echo("Multiple anime found in watch history:")
|
||||
for i, entry in enumerate(current_watching):
|
||||
title = entry.media_item.title.english or entry.media_item.title.romaji
|
||||
next_ep = entry.last_watched_episode + 1
|
||||
click.echo(f" {i + 1}. {title} (next episode: {next_ep})")
|
||||
|
||||
choice = click.prompt("Select anime to download", type=int)
|
||||
if choice < 1 or choice > len(current_watching):
|
||||
click.echo("Invalid selection", err=True)
|
||||
ctx.exit(1)
|
||||
|
||||
selected_entry = current_watching[choice - 1]
|
||||
media_item = selected_entry.media_item
|
||||
next_episode = selected_entry.last_watched_episode + 1
|
||||
episodes_to_download = [next_episode]
|
||||
|
||||
else:
|
||||
# Search for anime
|
||||
if not query:
|
||||
click.echo("Query is required when not using --continue", err=True)
|
||||
ctx.exit(1)
|
||||
|
||||
# TODO: Integrate with search functionality
|
||||
# For now, this is a placeholder - you'll need to integrate with your existing search system
|
||||
click.echo(f"Searching for: {query}")
|
||||
click.echo("Note: Search integration not yet implemented in this example")
|
||||
ctx.exit(1)
|
||||
|
||||
# Determine episodes to download
|
||||
if episode:
|
||||
episodes_to_download = [episode]
|
||||
elif range:
|
||||
try:
|
||||
episodes_to_download = parse_episode_range(range)
|
||||
except ValueError as e:
|
||||
click.echo(f"Invalid episode range: {e}", err=True)
|
||||
ctx.exit(1)
|
||||
elif not continue_watch:
|
||||
# Default to episode 1 if nothing specified
|
||||
episodes_to_download = [1]
|
||||
|
||||
# Validate episodes
|
||||
if not episodes_to_download:
|
||||
click.echo("No episodes specified for download", err=True)
|
||||
ctx.exit(1)
|
||||
|
||||
if media_item.episodes and max(episodes_to_download) > media_item.episodes:
|
||||
click.echo(f"Episode {max(episodes_to_download)} exceeds total episodes ({media_item.episodes})", err=True)
|
||||
ctx.exit(1)
|
||||
|
||||
# Use quality from config if not specified
|
||||
if not quality:
|
||||
quality = config.downloads.preferred_quality
|
||||
|
||||
# Add to download queue
|
||||
success = download_manager.add_to_queue(
|
||||
media_item=media_item,
|
||||
episodes=episodes_to_download,
|
||||
quality=quality,
|
||||
priority=priority
|
||||
)
|
||||
|
||||
if success:
|
||||
title = media_item.title.english or media_item.title.romaji
|
||||
episode_text = f"episode {episodes_to_download[0]}" if len(episodes_to_download) == 1 else f"{len(episodes_to_download)} episodes"
|
||||
|
||||
click.echo(f"✓ Added {episode_text} of '{title}' to download queue")
|
||||
|
||||
if background:
|
||||
click.echo("Download will continue in the background")
|
||||
else:
|
||||
click.echo("Run 'fastanime anilist downloads status' to monitor progress")
|
||||
else:
|
||||
click.echo("Failed to add episodes to download queue", err=True)
|
||||
ctx.exit(1)
|
||||
|
||||
except Exception as e:
|
||||
click.echo(f"Error: {e}", err=True)
|
||||
ctx.exit(1)
|
||||
381
fastanime/cli/commands/anilist/downloads.py
Normal file
381
fastanime/cli/commands/anilist/downloads.py
Normal file
@@ -0,0 +1,381 @@
|
||||
"""
|
||||
Downloads management commands for the anilist CLI.
|
||||
|
||||
Provides comprehensive download management including listing, status monitoring,
|
||||
cleanup, and verification operations.
|
||||
"""
|
||||
|
||||
import click
|
||||
import json
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
|
||||
from ....core.config.model import AppConfig
|
||||
from ...services.downloads import get_download_manager
|
||||
from ...services.downloads.validator import DownloadValidator
|
||||
|
||||
|
||||
def format_size(size_bytes: int) -> str:
|
||||
"""Format file size in human-readable format."""
|
||||
for unit in ['B', 'KB', 'MB', 'GB', 'TB']:
|
||||
if size_bytes < 1024.0:
|
||||
return f"{size_bytes:.1f} {unit}"
|
||||
size_bytes /= 1024.0
|
||||
return f"{size_bytes:.1f} PB"
|
||||
|
||||
|
||||
def format_duration(seconds: Optional[float]) -> str:
|
||||
"""Format duration in human-readable format."""
|
||||
if seconds is None:
|
||||
return "Unknown"
|
||||
|
||||
if seconds < 60:
|
||||
return f"{seconds:.0f}s"
|
||||
elif seconds < 3600:
|
||||
return f"{seconds/60:.0f}m {seconds%60:.0f}s"
|
||||
else:
|
||||
hours = seconds // 3600
|
||||
minutes = (seconds % 3600) // 60
|
||||
return f"{hours:.0f}h {minutes:.0f}m"
|
||||
|
||||
|
||||
@click.group(name="downloads")
|
||||
@click.pass_context
|
||||
def downloads(ctx: click.Context):
|
||||
"""Manage downloaded anime."""
|
||||
pass
|
||||
|
||||
|
||||
@downloads.command()
|
||||
@click.option("--status",
|
||||
type=click.Choice(["all", "completed", "active", "failed", "paused"]),
|
||||
default="all",
|
||||
help="Filter by download status")
|
||||
@click.option("--format", "output_format",
|
||||
type=click.Choice(["table", "json", "simple"]),
|
||||
default="table",
|
||||
help="Output format")
|
||||
@click.option("--limit", type=int, help="Limit number of results")
|
||||
@click.pass_context
|
||||
def list(ctx: click.Context, status: str, output_format: str, limit: Optional[int]):
|
||||
"""List all downloads."""
|
||||
|
||||
config: AppConfig = ctx.obj
|
||||
download_manager = get_download_manager(config.downloads)
|
||||
|
||||
try:
|
||||
# Get download records
|
||||
status_filter = None if status == "all" else status
|
||||
records = download_manager.list_downloads(status_filter=status_filter, limit=limit)
|
||||
|
||||
if not records:
|
||||
click.echo("No downloads found")
|
||||
return
|
||||
|
||||
if output_format == "json":
|
||||
# JSON output
|
||||
output_data = []
|
||||
for record in records:
|
||||
output_data.append({
|
||||
"media_id": record.media_item.id,
|
||||
"title": record.display_title,
|
||||
"status": record.status,
|
||||
"episodes_downloaded": record.total_episodes_downloaded,
|
||||
"total_episodes": record.media_item.episodes or 0,
|
||||
"completion_percentage": record.completion_percentage,
|
||||
"total_size_gb": record.total_size_gb,
|
||||
"last_updated": record.last_updated.isoformat()
|
||||
})
|
||||
|
||||
click.echo(json.dumps(output_data, indent=2))
|
||||
|
||||
elif output_format == "simple":
|
||||
# Simple text output
|
||||
for record in records:
|
||||
title = record.display_title
|
||||
status_emoji = {
|
||||
"completed": "✓",
|
||||
"active": "⬇",
|
||||
"failed": "✗",
|
||||
"paused": "⏸"
|
||||
}.get(record.status, "?")
|
||||
|
||||
click.echo(f"{status_emoji} {title} ({record.total_episodes_downloaded}/{record.media_item.episodes or 0} episodes)")
|
||||
|
||||
else:
|
||||
# Table output (default)
|
||||
click.echo()
|
||||
click.echo("Downloads:")
|
||||
click.echo("=" * 80)
|
||||
|
||||
# Header
|
||||
header = f"{'Title':<30} {'Status':<10} {'Episodes':<12} {'Size':<10} {'Updated':<15}"
|
||||
click.echo(header)
|
||||
click.echo("-" * 80)
|
||||
|
||||
# Rows
|
||||
for record in records:
|
||||
title = record.display_title
|
||||
if len(title) > 28:
|
||||
title = title[:25] + "..."
|
||||
|
||||
status_display = record.status.capitalize()
|
||||
|
||||
episodes_display = f"{record.total_episodes_downloaded}/{record.media_item.episodes or '?'}"
|
||||
|
||||
size_display = format_size(record.total_size_bytes)
|
||||
|
||||
updated_display = record.last_updated.strftime("%Y-%m-%d")
|
||||
|
||||
row = f"{title:<30} {status_display:<10} {episodes_display:<12} {size_display:<10} {updated_display:<15}"
|
||||
click.echo(row)
|
||||
|
||||
click.echo("-" * 80)
|
||||
click.echo(f"Total: {len(records)} anime")
|
||||
|
||||
except Exception as e:
|
||||
click.echo(f"Error listing downloads: {e}", err=True)
|
||||
ctx.exit(1)
|
||||
|
||||
|
||||
@downloads.command()
|
||||
@click.pass_context
|
||||
def status(ctx: click.Context):
|
||||
"""Show download queue status and statistics."""
|
||||
|
||||
config: AppConfig = ctx.obj
|
||||
download_manager = get_download_manager(config.downloads)
|
||||
|
||||
try:
|
||||
# Get statistics
|
||||
stats = download_manager.get_download_stats()
|
||||
|
||||
click.echo()
|
||||
click.echo("Download Statistics:")
|
||||
click.echo("=" * 40)
|
||||
click.echo(f"Total Anime: {stats.get('total_anime', 0)}")
|
||||
click.echo(f"Total Episodes: {stats.get('total_episodes', 0)}")
|
||||
click.echo(f"Total Size: {stats.get('total_size_gb', 0):.2f} GB")
|
||||
click.echo(f"Queue Size: {stats.get('queue_size', 0)}")
|
||||
|
||||
# Show completion stats
|
||||
completion_stats = stats.get('completion_stats', {})
|
||||
if completion_stats:
|
||||
click.echo()
|
||||
click.echo("Status Breakdown:")
|
||||
click.echo("-" * 20)
|
||||
for status, count in completion_stats.items():
|
||||
click.echo(f" {status.capitalize()}: {count}")
|
||||
|
||||
# Show active downloads
|
||||
queue = download_manager._load_queue()
|
||||
if queue.items:
|
||||
click.echo()
|
||||
click.echo("Download Queue:")
|
||||
click.echo("-" * 30)
|
||||
for item in queue.items[:5]: # Show first 5 items
|
||||
title = f"Media {item.media_id}" # Would need to lookup title
|
||||
click.echo(f" Episode {item.episode_number} of {title} ({item.quality_preference})")
|
||||
|
||||
if len(queue.items) > 5:
|
||||
click.echo(f" ... and {len(queue.items) - 5} more items")
|
||||
|
||||
except Exception as e:
|
||||
click.echo(f"Error getting download status: {e}", err=True)
|
||||
ctx.exit(1)
|
||||
|
||||
|
||||
@downloads.command()
|
||||
@click.option("--dry-run", is_flag=True, help="Show what would be cleaned without doing it")
|
||||
@click.pass_context
|
||||
def clean(ctx: click.Context, dry_run: bool):
|
||||
"""Clean up failed downloads and orphaned entries."""
|
||||
|
||||
config: AppConfig = ctx.obj
|
||||
download_manager = get_download_manager(config.downloads)
|
||||
|
||||
try:
|
||||
if dry_run:
|
||||
click.echo("Dry run mode - no changes will be made")
|
||||
click.echo()
|
||||
|
||||
# Clean up failed downloads
|
||||
if not dry_run:
|
||||
failed_count = download_manager.cleanup_failed_downloads()
|
||||
click.echo(f"Cleaned up {failed_count} failed downloads")
|
||||
else:
|
||||
click.echo("Would clean up failed downloads older than retention period")
|
||||
|
||||
# Clean up orphaned files
|
||||
validator = DownloadValidator(download_manager)
|
||||
if not dry_run:
|
||||
orphaned_count = validator.cleanup_orphaned_files()
|
||||
click.echo(f"Cleaned up {orphaned_count} orphaned files")
|
||||
else:
|
||||
click.echo("Would clean up orphaned files and fix index inconsistencies")
|
||||
|
||||
if dry_run:
|
||||
click.echo()
|
||||
click.echo("Run without --dry-run to perform actual cleanup")
|
||||
|
||||
except Exception as e:
|
||||
click.echo(f"Error during cleanup: {e}", err=True)
|
||||
ctx.exit(1)
|
||||
|
||||
|
||||
@downloads.command()
|
||||
@click.argument("media_id", type=int, required=False)
|
||||
@click.option("--all", "verify_all", is_flag=True, help="Verify all downloads")
|
||||
@click.pass_context
|
||||
def verify(ctx: click.Context, media_id: Optional[int], verify_all: bool):
|
||||
"""Verify download integrity for specific anime or all downloads."""
|
||||
|
||||
config: AppConfig = ctx.obj
|
||||
download_manager = get_download_manager(config.downloads)
|
||||
|
||||
try:
|
||||
validator = DownloadValidator(download_manager)
|
||||
|
||||
if verify_all:
|
||||
click.echo("Generating comprehensive validation report...")
|
||||
report = validator.generate_validation_report()
|
||||
|
||||
click.echo()
|
||||
click.echo("Validation Report:")
|
||||
click.echo("=" * 50)
|
||||
click.echo(f"Total Records: {report['total_records']}")
|
||||
click.echo(f"Valid Records: {report['valid_records']}")
|
||||
click.echo(f"Invalid Records: {report['invalid_records']}")
|
||||
click.echo(f"Integrity Issues: {report['integrity_issues']}")
|
||||
click.echo(f"Path Issues: {report['path_issues']}")
|
||||
click.echo(f"Orphaned Files: {report['orphaned_files']}")
|
||||
|
||||
if report['details']['invalid_files']:
|
||||
click.echo()
|
||||
click.echo("Invalid Files:")
|
||||
for file_path in report['details']['invalid_files']:
|
||||
click.echo(f" - {file_path}")
|
||||
|
||||
if report['details']['integrity_failures']:
|
||||
click.echo()
|
||||
click.echo("Integrity Failures:")
|
||||
for failure in report['details']['integrity_failures']:
|
||||
click.echo(f" - {failure['title']}: Episodes {failure['failed_episodes']}")
|
||||
|
||||
elif media_id:
|
||||
record = download_manager.get_download_record(media_id)
|
||||
if not record:
|
||||
click.echo(f"No download record found for media ID {media_id}", err=True)
|
||||
ctx.exit(1)
|
||||
|
||||
click.echo(f"Verifying downloads for: {record.display_title}")
|
||||
|
||||
# Verify integrity
|
||||
integrity_results = validator.verify_file_integrity(record)
|
||||
|
||||
# Verify paths
|
||||
path_issues = validator.validate_file_paths(record)
|
||||
|
||||
# Display results
|
||||
click.echo()
|
||||
click.echo("Episode Verification:")
|
||||
click.echo("-" * 30)
|
||||
|
||||
for episode_num, episode_download in record.episodes.items():
|
||||
status_emoji = "✓" if integrity_results.get(episode_num, False) else "✗"
|
||||
click.echo(f" {status_emoji} Episode {episode_num} ({episode_download.status})")
|
||||
|
||||
if not integrity_results.get(episode_num, False):
|
||||
if not episode_download.file_path.exists():
|
||||
click.echo(f" - File missing: {episode_download.file_path}")
|
||||
elif episode_download.checksum and not episode_download.verify_integrity():
|
||||
click.echo(f" - Checksum mismatch")
|
||||
|
||||
if path_issues:
|
||||
click.echo()
|
||||
click.echo("Path Issues:")
|
||||
for issue in path_issues:
|
||||
click.echo(f" - {issue}")
|
||||
|
||||
else:
|
||||
click.echo("Specify --all to verify all downloads or provide a media ID", err=True)
|
||||
ctx.exit(1)
|
||||
|
||||
except Exception as e:
|
||||
click.echo(f"Error during verification: {e}", err=True)
|
||||
ctx.exit(1)
|
||||
|
||||
|
||||
@downloads.command()
|
||||
@click.argument("output_file", type=click.Path())
|
||||
@click.option("--format", "export_format",
|
||||
type=click.Choice(["json", "csv"]),
|
||||
default="json",
|
||||
help="Export format")
|
||||
@click.pass_context
|
||||
def export(ctx: click.Context, output_file: str, export_format: str):
|
||||
"""Export download list to a file."""
|
||||
|
||||
config: AppConfig = ctx.obj
|
||||
download_manager = get_download_manager(config.downloads)
|
||||
|
||||
try:
|
||||
records = download_manager.list_downloads()
|
||||
output_path = Path(output_file)
|
||||
|
||||
if export_format == "json":
|
||||
export_data = []
|
||||
for record in records:
|
||||
export_data.append({
|
||||
"media_id": record.media_item.id,
|
||||
"title": record.display_title,
|
||||
"status": record.status,
|
||||
"episodes": {
|
||||
str(ep_num): {
|
||||
"episode_number": ep.episode_number,
|
||||
"file_path": str(ep.file_path),
|
||||
"file_size": ep.file_size,
|
||||
"quality": ep.quality,
|
||||
"status": ep.status,
|
||||
"download_date": ep.download_date.isoformat()
|
||||
}
|
||||
for ep_num, ep in record.episodes.items()
|
||||
},
|
||||
"download_path": str(record.download_path),
|
||||
"created_date": record.created_date.isoformat(),
|
||||
"last_updated": record.last_updated.isoformat()
|
||||
})
|
||||
|
||||
with open(output_path, 'w', encoding='utf-8') as f:
|
||||
json.dump(export_data, f, indent=2, ensure_ascii=False)
|
||||
|
||||
elif export_format == "csv":
|
||||
import csv
|
||||
|
||||
with open(output_path, 'w', newline='', encoding='utf-8') as f:
|
||||
writer = csv.writer(f)
|
||||
|
||||
# Write header
|
||||
writer.writerow([
|
||||
"Media ID", "Title", "Status", "Episodes Downloaded",
|
||||
"Total Episodes", "Total Size (GB)", "Last Updated"
|
||||
])
|
||||
|
||||
# Write data
|
||||
for record in records:
|
||||
writer.writerow([
|
||||
record.media_item.id,
|
||||
record.display_title,
|
||||
record.status,
|
||||
record.total_episodes_downloaded,
|
||||
record.media_item.episodes or 0,
|
||||
f"{record.total_size_gb:.2f}",
|
||||
record.last_updated.strftime("%Y-%m-%d %H:%M:%S")
|
||||
])
|
||||
|
||||
click.echo(f"Exported {len(records)} download records to {output_path}")
|
||||
|
||||
except Exception as e:
|
||||
click.echo(f"Error exporting downloads: {e}", err=True)
|
||||
ctx.exit(1)
|
||||
@@ -1,271 +0,0 @@
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
import click
|
||||
|
||||
from ...core.config import AppConfig
|
||||
from ...core.exceptions import FastAnimeError
|
||||
from ..utils.completion_functions import anime_titles_shell_complete
|
||||
from . import examples
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from pathlib import Path
|
||||
from typing import TypedDict
|
||||
|
||||
from typing_extensions import Unpack
|
||||
|
||||
from ...libs.players.base import BasePlayer
|
||||
from ...libs.providers.anime.base import BaseAnimeProvider
|
||||
from ...libs.providers.anime.types import Anime
|
||||
from ...libs.selectors.base import BaseSelector
|
||||
|
||||
class Options(TypedDict):
|
||||
anime_title: tuple
|
||||
episode_range: str
|
||||
file: Path | None
|
||||
force_unknown_ext: bool
|
||||
silent: bool
|
||||
verbose: bool
|
||||
merge: bool
|
||||
clean: bool
|
||||
wait_time: int
|
||||
prompt: bool
|
||||
force_ffmpeg: bool
|
||||
hls_use_mpegts: bool
|
||||
hls_use_h264: bool
|
||||
|
||||
|
||||
@click.command(
|
||||
help="Download anime using the anime provider for a specified range",
|
||||
short_help="Download anime",
|
||||
epilog=examples.download,
|
||||
)
|
||||
@click.option(
|
||||
"--anime_title",
|
||||
"-t",
|
||||
required=True,
|
||||
shell_complete=anime_titles_shell_complete,
|
||||
multiple=True,
|
||||
help="Specify which anime to download",
|
||||
)
|
||||
@click.option(
|
||||
"--episode-range",
|
||||
"-r",
|
||||
help="A range of episodes to download (start-end)",
|
||||
)
|
||||
@click.option(
|
||||
"--file",
|
||||
"-f",
|
||||
type=click.File(),
|
||||
help="A file to read from all anime to download",
|
||||
)
|
||||
@click.option(
|
||||
"--force-unknown-ext",
|
||||
"-F",
|
||||
help="This option forces yt-dlp to download extensions its not aware of",
|
||||
is_flag=True,
|
||||
)
|
||||
@click.option(
|
||||
"--silent/--no-silent",
|
||||
"-q/-V",
|
||||
type=bool,
|
||||
help="Download silently (during download)",
|
||||
default=True,
|
||||
)
|
||||
@click.option("--verbose", "-v", is_flag=True, help="Download verbosely (everywhere)")
|
||||
@click.option(
|
||||
"--merge", "-m", is_flag=True, help="Merge the subfile with video using ffmpeg"
|
||||
)
|
||||
@click.option(
|
||||
"--clean",
|
||||
"-c",
|
||||
is_flag=True,
|
||||
help="After merging delete the original files",
|
||||
)
|
||||
@click.option(
|
||||
"--prompt/--no-prompt",
|
||||
help="Whether to prompt for anything instead just do the best thing",
|
||||
default=True,
|
||||
)
|
||||
@click.option(
|
||||
"--force-ffmpeg",
|
||||
is_flag=True,
|
||||
help="Force the use of FFmpeg for downloading (supports large variety of streams but slower)",
|
||||
)
|
||||
@click.option(
|
||||
"--hls-use-mpegts",
|
||||
is_flag=True,
|
||||
help="Use mpegts for hls streams, resulted in .ts file (useful for some streams: see Docs) (this option forces --force-ffmpeg to be True)",
|
||||
)
|
||||
@click.option(
|
||||
"--hls-use-h264",
|
||||
is_flag=True,
|
||||
help="Use H.264 (MP4) for hls streams, resulted in .mp4 file (useful for some streams: see Docs) (this option forces --force-ffmpeg to be True)",
|
||||
)
|
||||
@click.pass_obj
|
||||
def download(config: AppConfig, **options: "Unpack[Options]"):
|
||||
from rich import print
|
||||
from rich.progress import Progress
|
||||
|
||||
from ...core.exceptions import FastAnimeError
|
||||
from ...libs.players.player import create_player
|
||||
from ...libs.providers.anime.params import (
|
||||
AnimeParams,
|
||||
SearchParams,
|
||||
)
|
||||
from ...libs.providers.anime.provider import create_provider
|
||||
from ...libs.selectors.selector import create_selector
|
||||
|
||||
provider = create_provider(config.general.provider)
|
||||
player = create_player(config)
|
||||
selector = create_selector(config)
|
||||
|
||||
anime_titles = options["anime_title"]
|
||||
print(f"[green bold]Streaming:[/] {anime_titles}")
|
||||
for anime_title in anime_titles:
|
||||
# ---- search for anime ----
|
||||
print(f"[green bold]Searching for:[/] {anime_title}")
|
||||
with Progress() as progress:
|
||||
progress.add_task("Fetching Search Results...", total=None)
|
||||
search_results = provider.search(
|
||||
SearchParams(
|
||||
query=anime_title, translation_type=config.stream.translation_type
|
||||
)
|
||||
)
|
||||
if not search_results:
|
||||
raise FastAnimeError("No results were found matching your query")
|
||||
|
||||
_search_results = {
|
||||
search_result.title: search_result
|
||||
for search_result in search_results.results
|
||||
}
|
||||
|
||||
selected_anime_title = selector.choose(
|
||||
"Select Anime", list(_search_results.keys())
|
||||
)
|
||||
if not selected_anime_title:
|
||||
raise FastAnimeError("No title selected")
|
||||
anime_result = _search_results[selected_anime_title]
|
||||
|
||||
# ---- fetch selected anime ----
|
||||
with Progress() as progress:
|
||||
progress.add_task("Fetching Anime...", total=None)
|
||||
anime = provider.get(AnimeParams(id=anime_result.id))
|
||||
|
||||
if not anime:
|
||||
raise FastAnimeError(f"Failed to fetch anime {anime_result.title}")
|
||||
episodes_range = []
|
||||
episodes: list[str] = sorted(
|
||||
getattr(anime.episodes, config.stream.translation_type), key=float
|
||||
)
|
||||
if options["episode_range"]:
|
||||
if ":" in options["episode_range"]:
|
||||
ep_range_tuple = options["episode_range"].split(":")
|
||||
if len(ep_range_tuple) == 3 and all(ep_range_tuple):
|
||||
episodes_start, episodes_end, step = ep_range_tuple
|
||||
episodes_range = episodes[
|
||||
int(episodes_start) : int(episodes_end) : int(step)
|
||||
]
|
||||
|
||||
elif len(ep_range_tuple) == 2 and all(ep_range_tuple):
|
||||
episodes_start, episodes_end = ep_range_tuple
|
||||
episodes_range = episodes[int(episodes_start) : int(episodes_end)]
|
||||
else:
|
||||
episodes_start, episodes_end = ep_range_tuple
|
||||
if episodes_start.strip():
|
||||
episodes_range = episodes[int(episodes_start) :]
|
||||
elif episodes_end.strip():
|
||||
episodes_range = episodes[: int(episodes_end)]
|
||||
else:
|
||||
episodes_range = episodes
|
||||
else:
|
||||
episodes_range = episodes[int(options["episode_range"]) :]
|
||||
|
||||
episodes_range = iter(episodes_range)
|
||||
|
||||
for episode in episodes_range:
|
||||
download_anime(
|
||||
config, options, provider, selector, player, anime, episode
|
||||
)
|
||||
else:
|
||||
episode = selector.choose(
|
||||
"Select Episode",
|
||||
getattr(anime.episodes, config.stream.translation_type),
|
||||
)
|
||||
if not episode:
|
||||
raise FastAnimeError("No episode selected")
|
||||
download_anime(config, options, provider, selector, player, anime, episode)
|
||||
|
||||
|
||||
def download_anime(
|
||||
config: AppConfig,
|
||||
download_options: "Options",
|
||||
provider: "BaseAnimeProvider",
|
||||
selector: "BaseSelector",
|
||||
player: "BasePlayer",
|
||||
anime: "Anime",
|
||||
episode: str,
|
||||
):
|
||||
from rich import print
|
||||
from rich.progress import Progress
|
||||
|
||||
from ...core.downloader import DownloadParams, create_downloader
|
||||
from ...libs.players.params import PlayerParams
|
||||
from ...libs.providers.anime.params import EpisodeStreamsParams
|
||||
|
||||
downloader = create_downloader(config.downloads)
|
||||
|
||||
with Progress() as progress:
|
||||
progress.add_task("Fetching Episode Streams...", total=None)
|
||||
streams = provider.episode_streams(
|
||||
EpisodeStreamsParams(
|
||||
anime_id=anime.id,
|
||||
episode=episode,
|
||||
translation_type=config.stream.translation_type,
|
||||
)
|
||||
)
|
||||
if not streams:
|
||||
raise FastAnimeError(
|
||||
f"Failed to get streams for anime: {anime.title}, episode: {episode}"
|
||||
)
|
||||
|
||||
if config.stream.server == "TOP":
|
||||
with Progress() as progress:
|
||||
progress.add_task("Fetching top server...", total=None)
|
||||
server = next(streams, None)
|
||||
if not server:
|
||||
raise FastAnimeError(
|
||||
f"Failed to get server for anime: {anime.title}, episode: {episode}"
|
||||
)
|
||||
else:
|
||||
with Progress() as progress:
|
||||
progress.add_task("Fetching servers", total=None)
|
||||
servers = {server.name: server for server in streams}
|
||||
servers_names = list(servers.keys())
|
||||
if config.stream.server in servers_names:
|
||||
server = servers[config.stream.server]
|
||||
else:
|
||||
server_name = selector.choose("Select Server", servers_names)
|
||||
if not server_name:
|
||||
raise FastAnimeError("Server not selected")
|
||||
server = servers[server_name]
|
||||
stream_link = server.links[0].link
|
||||
if not stream_link:
|
||||
raise FastAnimeError(
|
||||
f"Failed to get stream link for anime: {anime.title}, episode: {episode}"
|
||||
)
|
||||
print(f"[green bold]Now Downloading:[/] {anime.title} Episode: {episode}")
|
||||
downloader.download(
|
||||
DownloadParams(
|
||||
url=stream_link,
|
||||
anime_title=anime.title,
|
||||
episode_title=f"{anime.title}; Episode {episode}",
|
||||
subtitles=[sub.url for sub in server.subtitles],
|
||||
headers=server.headers,
|
||||
vid_format=config.stream.ytdlp_format,
|
||||
force_unknown_ext=download_options["force_unknown_ext"],
|
||||
verbose=download_options["verbose"],
|
||||
hls_use_mpegts=download_options["hls_use_mpegts"],
|
||||
hls_use_h264=download_options["hls_use_h264"],
|
||||
silent=download_options["silent"],
|
||||
)
|
||||
)
|
||||
@@ -1,358 +0,0 @@
|
||||
import logging
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
import click
|
||||
|
||||
from ..utils.completion_functions import downloaded_anime_titles
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
if TYPE_CHECKING:
|
||||
from ..config import Config
|
||||
|
||||
|
||||
@click.command(
|
||||
help="View and watch your downloads using mpv",
|
||||
short_help="Watch downloads",
|
||||
epilog="""
|
||||
\b
|
||||
\b\bExamples:
|
||||
fastanime downloads
|
||||
\b
|
||||
# view individual episodes
|
||||
fastanime downloads --view-episodes
|
||||
# --- or ---
|
||||
fastanime downloads -v
|
||||
\b
|
||||
# to set seek time when using ffmpegthumbnailer for local previews
|
||||
# -1 means random and is the default
|
||||
fastanime downloads --time-to-seek <intRange(-1,100)>
|
||||
# --- or ---
|
||||
fastanime downloads -t <intRange(-1,100)>
|
||||
\b
|
||||
# to watch a specific title
|
||||
# be sure to get the completions for the best experience
|
||||
fastanime downloads --title <title>
|
||||
\b
|
||||
# to get the path to the downloads folder set
|
||||
fastanime downloads --path
|
||||
# useful when you want to use the value for other programs
|
||||
""",
|
||||
)
|
||||
@click.option("--path", "-p", help="print the downloads folder and exit", is_flag=True)
|
||||
@click.option(
|
||||
"--title",
|
||||
"-T",
|
||||
shell_complete=downloaded_anime_titles,
|
||||
help="watch a specific title",
|
||||
)
|
||||
@click.option("--view-episodes", "-v", help="View individual episodes", is_flag=True)
|
||||
@click.option(
|
||||
"--ffmpegthumbnailer-seek-time",
|
||||
"--time-to-seek",
|
||||
"-t",
|
||||
type=click.IntRange(-1, 100),
|
||||
help="ffmpegthumbnailer seek time",
|
||||
)
|
||||
@click.pass_obj
|
||||
def downloads(
|
||||
config: "Config", path: bool, title, view_episodes, ffmpegthumbnailer_seek_time
|
||||
):
|
||||
import os
|
||||
|
||||
from ...cli.utils.mpv import run_mpv
|
||||
from ...libs.fzf import fzf
|
||||
from ...libs.rofi import Rofi
|
||||
from ...Utility.utils import sort_by_episode_number
|
||||
from ..utils.tools import exit_app
|
||||
from ..utils.utils import fuzzy_inquirer
|
||||
|
||||
if not ffmpegthumbnailer_seek_time:
|
||||
ffmpegthumbnailer_seek_time = config.ffmpegthumbnailer_seek_time
|
||||
USER_VIDEOS_DIR = config.downloads_dir
|
||||
if path:
|
||||
print(USER_VIDEOS_DIR)
|
||||
return
|
||||
if not os.path.exists(USER_VIDEOS_DIR):
|
||||
print("Downloads directory specified does not exist")
|
||||
return
|
||||
anime_downloads = sorted(
|
||||
os.listdir(USER_VIDEOS_DIR),
|
||||
)
|
||||
anime_downloads.append("Exit")
|
||||
|
||||
def create_thumbnails(video_path, anime_title, downloads_thumbnail_cache_dir):
|
||||
import os
|
||||
import shutil
|
||||
import subprocess
|
||||
|
||||
FFMPEG_THUMBNAILER = shutil.which("ffmpegthumbnailer")
|
||||
if not FFMPEG_THUMBNAILER:
|
||||
return
|
||||
|
||||
out = os.path.join(downloads_thumbnail_cache_dir, anime_title)
|
||||
if ffmpegthumbnailer_seek_time == -1:
|
||||
import random
|
||||
|
||||
seektime = str(random.randrange(0, 100))
|
||||
else:
|
||||
seektime = str(ffmpegthumbnailer_seek_time)
|
||||
_ = subprocess.run(
|
||||
[
|
||||
FFMPEG_THUMBNAILER,
|
||||
"-i",
|
||||
video_path,
|
||||
"-o",
|
||||
out,
|
||||
"-s",
|
||||
"0",
|
||||
"-t",
|
||||
seektime,
|
||||
],
|
||||
stderr=subprocess.PIPE,
|
||||
stdout=subprocess.PIPE,
|
||||
check=False,
|
||||
)
|
||||
|
||||
def get_previews_anime(workers=None, bg=True):
|
||||
import concurrent.futures
|
||||
import random
|
||||
import shutil
|
||||
from pathlib import Path
|
||||
|
||||
if not shutil.which("ffmpegthumbnailer"):
|
||||
print("ffmpegthumbnailer not found")
|
||||
logger.error("ffmpegthumbnailer not found")
|
||||
return
|
||||
|
||||
from ...constants import APP_CACHE_DIR
|
||||
from ..utils.scripts import bash_functions
|
||||
|
||||
downloads_thumbnail_cache_dir = os.path.join(APP_CACHE_DIR, "video_thumbnails")
|
||||
Path(downloads_thumbnail_cache_dir).mkdir(parents=True, exist_ok=True)
|
||||
|
||||
def _worker():
|
||||
# use concurrency to download the images as fast as possible
|
||||
with concurrent.futures.ThreadPoolExecutor(max_workers=workers) as executor:
|
||||
# load the jobs
|
||||
future_to_url = {}
|
||||
for anime_title in anime_downloads:
|
||||
anime_path = os.path.join(USER_VIDEOS_DIR, anime_title)
|
||||
if not os.path.isdir(anime_path):
|
||||
continue
|
||||
playlist = [
|
||||
anime
|
||||
for anime in sorted(
|
||||
os.listdir(anime_path),
|
||||
)
|
||||
if "mp4" in anime
|
||||
]
|
||||
if playlist:
|
||||
# actual link to download image from
|
||||
video_path = os.path.join(anime_path, random.choice(playlist))
|
||||
future_to_url[
|
||||
executor.submit(
|
||||
create_thumbnails,
|
||||
video_path,
|
||||
anime_title,
|
||||
downloads_thumbnail_cache_dir,
|
||||
)
|
||||
] = anime_title
|
||||
|
||||
# execute the jobs
|
||||
for future in concurrent.futures.as_completed(future_to_url):
|
||||
url = future_to_url[future]
|
||||
try:
|
||||
future.result()
|
||||
except Exception as e:
|
||||
logger.error("%r generated an exception: %s" % (url, e))
|
||||
|
||||
if bg:
|
||||
from threading import Thread
|
||||
|
||||
worker = Thread(target=_worker)
|
||||
worker.daemon = True
|
||||
worker.start()
|
||||
else:
|
||||
_worker()
|
||||
os.environ["SHELL"] = shutil.which("bash") or "bash"
|
||||
preview = """
|
||||
%s
|
||||
if [ -s %s/{} ]; then
|
||||
if ! fzf-preview %s/{} 2>/dev/null; then
|
||||
echo Loading...
|
||||
fi
|
||||
else echo Loading...
|
||||
fi
|
||||
""" % (
|
||||
bash_functions,
|
||||
downloads_thumbnail_cache_dir,
|
||||
downloads_thumbnail_cache_dir,
|
||||
)
|
||||
return preview
|
||||
|
||||
def get_previews_episodes(anime_playlist_path, workers=None, bg=True):
|
||||
import shutil
|
||||
from pathlib import Path
|
||||
|
||||
from ...constants import APP_CACHE_DIR
|
||||
from ..utils.scripts import bash_functions
|
||||
|
||||
if not shutil.which("ffmpegthumbnailer"):
|
||||
print("ffmpegthumbnailer not found")
|
||||
logger.error("ffmpegthumbnailer not found")
|
||||
return
|
||||
|
||||
downloads_thumbnail_cache_dir = os.path.join(APP_CACHE_DIR, "video_thumbnails")
|
||||
Path(downloads_thumbnail_cache_dir).mkdir(parents=True, exist_ok=True)
|
||||
|
||||
def _worker():
|
||||
import concurrent.futures
|
||||
|
||||
# use concurrency to download the images as fast as possible
|
||||
# anime_playlist_path = os.path.join(USER_VIDEOS_DIR, anime_playlist_path)
|
||||
if not os.path.isdir(anime_playlist_path):
|
||||
return
|
||||
anime_episodes = sorted(
|
||||
os.listdir(anime_playlist_path), key=sort_by_episode_number
|
||||
)
|
||||
with concurrent.futures.ThreadPoolExecutor(max_workers=workers) as executor:
|
||||
# load the jobs
|
||||
future_to_url = {}
|
||||
for episode_title in anime_episodes:
|
||||
episode_path = os.path.join(anime_playlist_path, episode_title)
|
||||
|
||||
# actual link to download image from
|
||||
future_to_url[
|
||||
executor.submit(
|
||||
create_thumbnails,
|
||||
episode_path,
|
||||
episode_title,
|
||||
downloads_thumbnail_cache_dir,
|
||||
)
|
||||
] = episode_title
|
||||
|
||||
# execute the jobs
|
||||
for future in concurrent.futures.as_completed(future_to_url):
|
||||
url = future_to_url[future]
|
||||
try:
|
||||
future.result()
|
||||
except Exception as e:
|
||||
logger.error("%r generated an exception: %s" % (url, e))
|
||||
|
||||
if bg:
|
||||
from threading import Thread
|
||||
|
||||
worker = Thread(target=_worker)
|
||||
worker.daemon = True
|
||||
worker.start()
|
||||
else:
|
||||
_worker()
|
||||
os.environ["SHELL"] = shutil.which("bash") or "bash"
|
||||
preview = """
|
||||
%s
|
||||
if [ -s %s/{} ]; then
|
||||
if ! fzf-preview %s/{} 2>/dev/null; then
|
||||
echo Loading...
|
||||
fi
|
||||
else echo Loading...
|
||||
fi
|
||||
""" % (
|
||||
bash_functions,
|
||||
downloads_thumbnail_cache_dir,
|
||||
downloads_thumbnail_cache_dir,
|
||||
)
|
||||
return preview
|
||||
|
||||
def stream_episode(
|
||||
anime_playlist_path,
|
||||
):
|
||||
if view_episodes:
|
||||
if not os.path.isdir(anime_playlist_path):
|
||||
print(anime_playlist_path, "is not dir")
|
||||
exit_app(1)
|
||||
return
|
||||
episodes = sorted(
|
||||
os.listdir(anime_playlist_path), key=sort_by_episode_number
|
||||
)
|
||||
downloaded_episodes = [*episodes, "Back"]
|
||||
|
||||
if config.use_fzf:
|
||||
if not config.preview:
|
||||
episode_title = fzf.run(
|
||||
downloaded_episodes,
|
||||
"Enter Episode ",
|
||||
)
|
||||
else:
|
||||
preview = get_previews_episodes(anime_playlist_path)
|
||||
episode_title = fzf.run(
|
||||
downloaded_episodes,
|
||||
"Enter Episode ",
|
||||
preview=preview,
|
||||
)
|
||||
elif config.use_rofi:
|
||||
episode_title = Rofi.run(downloaded_episodes, "Enter Episode")
|
||||
else:
|
||||
episode_title = fuzzy_inquirer(
|
||||
downloaded_episodes,
|
||||
"Enter Playlist Name",
|
||||
)
|
||||
if episode_title == "Back":
|
||||
stream_anime()
|
||||
return
|
||||
episode_path = os.path.join(anime_playlist_path, episode_title)
|
||||
if config.sync_play:
|
||||
from ..utils.syncplay import SyncPlayer
|
||||
|
||||
SyncPlayer(episode_path)
|
||||
else:
|
||||
run_mpv(
|
||||
episode_path,
|
||||
player=config.player,
|
||||
)
|
||||
stream_episode(anime_playlist_path)
|
||||
|
||||
def stream_anime(title=None):
|
||||
if title:
|
||||
from thefuzz import fuzz
|
||||
|
||||
playlist_name = max(anime_downloads, key=lambda t: fuzz.ratio(title, t))
|
||||
elif config.use_fzf:
|
||||
if not config.preview:
|
||||
playlist_name = fzf.run(
|
||||
anime_downloads,
|
||||
"Enter Playlist Name",
|
||||
)
|
||||
else:
|
||||
preview = get_previews_anime()
|
||||
playlist_name = fzf.run(
|
||||
anime_downloads,
|
||||
"Enter Playlist Name",
|
||||
preview=preview,
|
||||
)
|
||||
elif config.use_rofi:
|
||||
playlist_name = Rofi.run(anime_downloads, "Enter Playlist Name")
|
||||
else:
|
||||
playlist_name = fuzzy_inquirer(
|
||||
anime_downloads,
|
||||
"Enter Playlist Name",
|
||||
)
|
||||
if playlist_name == "Exit":
|
||||
exit_app()
|
||||
return
|
||||
playlist = os.path.join(USER_VIDEOS_DIR, playlist_name)
|
||||
if view_episodes:
|
||||
stream_episode(
|
||||
playlist,
|
||||
)
|
||||
elif config.sync_play:
|
||||
from ..utils.syncplay import SyncPlayer
|
||||
|
||||
SyncPlayer(playlist)
|
||||
else:
|
||||
run_mpv(
|
||||
playlist,
|
||||
player=config.player,
|
||||
)
|
||||
stream_anime()
|
||||
|
||||
stream_anime(title)
|
||||
@@ -2,7 +2,7 @@ from typing import TYPE_CHECKING
|
||||
|
||||
import click
|
||||
|
||||
from ..utils.completion_functions import anime_titles_shell_complete
|
||||
from ..utils.completions import anime_titles_shell_complete
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ..config import Config
|
||||
|
||||
@@ -1,299 +0,0 @@
|
||||
"""
|
||||
Queue command for manual download queue management.
|
||||
"""
|
||||
|
||||
import logging
|
||||
import uuid
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
import click
|
||||
from rich.console import Console
|
||||
from rich.progress import Progress
|
||||
from rich.table import Table
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from fastanime.core.config import AppConfig
|
||||
|
||||
from ..utils.download_queue import DownloadJob, DownloadStatus, QueueManager
|
||||
from ..utils.feedback import create_feedback_manager
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@click.command(
|
||||
help="Manage the download queue",
|
||||
short_help="Download queue management",
|
||||
epilog="""
|
||||
\b
|
||||
\b\bExamples:
|
||||
# Show queue status
|
||||
fastanime queue
|
||||
|
||||
# Add anime to download queue
|
||||
fastanime queue --add "Attack on Titan" --episode "1"
|
||||
|
||||
# Add with specific quality and priority
|
||||
fastanime queue --add "Demon Slayer" --episode "5" --quality "720" --priority 2
|
||||
|
||||
# Clear completed jobs
|
||||
fastanime queue --clean
|
||||
|
||||
# Remove specific job
|
||||
fastanime queue --remove <job-id>
|
||||
|
||||
# Show detailed queue information
|
||||
fastanime queue --detailed
|
||||
""",
|
||||
)
|
||||
@click.option(
|
||||
"--add", "-a",
|
||||
help="Add anime to download queue (anime title)"
|
||||
)
|
||||
@click.option(
|
||||
"--episode", "-e",
|
||||
help="Episode number to download (required with --add)"
|
||||
)
|
||||
@click.option(
|
||||
"--quality", "-q",
|
||||
type=click.Choice(["360", "480", "720", "1080"]),
|
||||
default="1080",
|
||||
help="Video quality preference"
|
||||
)
|
||||
@click.option(
|
||||
"--priority", "-p",
|
||||
type=click.IntRange(1, 10),
|
||||
default=5,
|
||||
help="Download priority (1=highest, 10=lowest)"
|
||||
)
|
||||
@click.option(
|
||||
"--translation-type", "-t",
|
||||
type=click.Choice(["sub", "dub"]),
|
||||
default="sub",
|
||||
help="Audio/subtitle preference"
|
||||
)
|
||||
@click.option(
|
||||
"--remove", "-r",
|
||||
help="Remove job from queue by ID"
|
||||
)
|
||||
@click.option(
|
||||
"--clean", "-c",
|
||||
is_flag=True,
|
||||
help="Remove completed/failed jobs older than 7 days"
|
||||
)
|
||||
@click.option(
|
||||
"--detailed", "-d",
|
||||
is_flag=True,
|
||||
help="Show detailed queue information"
|
||||
)
|
||||
@click.option(
|
||||
"--cancel",
|
||||
help="Cancel a specific job by ID"
|
||||
)
|
||||
@click.pass_obj
|
||||
def queue(
|
||||
config: "AppConfig",
|
||||
add: str,
|
||||
episode: str,
|
||||
quality: str,
|
||||
priority: int,
|
||||
translation_type: str,
|
||||
remove: str,
|
||||
clean: bool,
|
||||
detailed: bool,
|
||||
cancel: str
|
||||
):
|
||||
"""Manage the download queue for automated and manual downloads."""
|
||||
|
||||
console = Console()
|
||||
feedback = create_feedback_manager(config.general.icons)
|
||||
queue_manager = QueueManager()
|
||||
|
||||
try:
|
||||
# Add new job to queue
|
||||
if add:
|
||||
if not episode:
|
||||
feedback.error("Episode number is required when adding to queue",
|
||||
"Use --episode to specify the episode number")
|
||||
raise click.Abort()
|
||||
|
||||
job_id = str(uuid.uuid4())
|
||||
job = DownloadJob(
|
||||
id=job_id,
|
||||
anime_title=add,
|
||||
episode=episode,
|
||||
quality=quality,
|
||||
translation_type=translation_type,
|
||||
priority=priority,
|
||||
auto_added=False
|
||||
)
|
||||
|
||||
success = queue_manager.add_job(job)
|
||||
if success:
|
||||
feedback.success(
|
||||
f"Added to queue: {add} Episode {episode}",
|
||||
f"Job ID: {job_id[:8]}... Priority: {priority}"
|
||||
)
|
||||
else:
|
||||
feedback.error("Failed to add job to queue", "Check logs for details")
|
||||
raise click.Abort()
|
||||
return
|
||||
|
||||
# Remove job from queue
|
||||
if remove:
|
||||
# Allow partial job ID matching
|
||||
matching_jobs = [
|
||||
job_id for job_id in queue_manager.queue.jobs.keys()
|
||||
if job_id.startswith(remove)
|
||||
]
|
||||
|
||||
if not matching_jobs:
|
||||
feedback.error(f"No job found with ID starting with: {remove}")
|
||||
raise click.Abort()
|
||||
elif len(matching_jobs) > 1:
|
||||
feedback.error(f"Multiple jobs match ID: {remove}",
|
||||
f"Be more specific. Matches: {[job_id[:8] for job_id in matching_jobs]}")
|
||||
raise click.Abort()
|
||||
|
||||
job_id = matching_jobs[0]
|
||||
job = queue_manager.get_job_by_id(job_id)
|
||||
success = queue_manager.remove_job(job_id)
|
||||
|
||||
if success:
|
||||
feedback.success(
|
||||
f"Removed from queue: {job.anime_title} Episode {job.episode}",
|
||||
f"Job ID: {job_id[:8]}..."
|
||||
)
|
||||
else:
|
||||
feedback.error("Failed to remove job from queue", "Check logs for details")
|
||||
raise click.Abort()
|
||||
return
|
||||
|
||||
# Cancel job
|
||||
if cancel:
|
||||
# Allow partial job ID matching
|
||||
matching_jobs = [
|
||||
job_id for job_id in queue_manager.queue.jobs.keys()
|
||||
if job_id.startswith(cancel)
|
||||
]
|
||||
|
||||
if not matching_jobs:
|
||||
feedback.error(f"No job found with ID starting with: {cancel}")
|
||||
raise click.Abort()
|
||||
elif len(matching_jobs) > 1:
|
||||
feedback.error(f"Multiple jobs match ID: {cancel}",
|
||||
f"Be more specific. Matches: {[job_id[:8] for job_id in matching_jobs]}")
|
||||
raise click.Abort()
|
||||
|
||||
job_id = matching_jobs[0]
|
||||
job = queue_manager.get_job_by_id(job_id)
|
||||
success = queue_manager.update_job_status(job_id, DownloadStatus.CANCELLED)
|
||||
|
||||
if success:
|
||||
feedback.success(
|
||||
f"Cancelled job: {job.anime_title} Episode {job.episode}",
|
||||
f"Job ID: {job_id[:8]}..."
|
||||
)
|
||||
else:
|
||||
feedback.error("Failed to cancel job", "Check logs for details")
|
||||
raise click.Abort()
|
||||
return
|
||||
|
||||
# Clean old completed jobs
|
||||
if clean:
|
||||
with Progress() as progress:
|
||||
task = progress.add_task("Cleaning old jobs...", total=None)
|
||||
cleaned_count = queue_manager.clean_completed_jobs()
|
||||
progress.update(task, completed=True)
|
||||
|
||||
if cleaned_count > 0:
|
||||
feedback.success(f"Cleaned {cleaned_count} old jobs from queue")
|
||||
else:
|
||||
feedback.info("No old jobs to clean")
|
||||
return
|
||||
|
||||
# Show queue status (default action)
|
||||
_display_queue_status(console, queue_manager, detailed, config.general.icons)
|
||||
|
||||
except Exception as e:
|
||||
feedback.error("An error occurred while managing the queue", str(e))
|
||||
logger.error(f"Queue command error: {e}")
|
||||
raise click.Abort()
|
||||
|
||||
|
||||
def _display_queue_status(console: Console, queue_manager: QueueManager, detailed: bool, icons: bool):
|
||||
"""Display the current queue status."""
|
||||
|
||||
stats = queue_manager.get_queue_stats()
|
||||
|
||||
# Display summary
|
||||
console.print()
|
||||
console.print(f"{'📥 ' if icons else ''}[bold cyan]Download Queue Status[/bold cyan]")
|
||||
console.print()
|
||||
|
||||
summary_table = Table(title="Queue Summary")
|
||||
summary_table.add_column("Status", style="cyan")
|
||||
summary_table.add_column("Count", justify="right", style="green")
|
||||
|
||||
summary_table.add_row("Total Jobs", str(stats["total"]))
|
||||
summary_table.add_row("Pending", str(stats["pending"]))
|
||||
summary_table.add_row("Downloading", str(stats["downloading"]))
|
||||
summary_table.add_row("Completed", str(stats["completed"]))
|
||||
summary_table.add_row("Failed", str(stats["failed"]))
|
||||
summary_table.add_row("Cancelled", str(stats["cancelled"]))
|
||||
|
||||
console.print(summary_table)
|
||||
console.print()
|
||||
|
||||
if detailed or stats["total"] > 0:
|
||||
_display_detailed_queue(console, queue_manager, icons)
|
||||
|
||||
|
||||
def _display_detailed_queue(console: Console, queue_manager: QueueManager, icons: bool):
|
||||
"""Display detailed information about jobs in the queue."""
|
||||
|
||||
jobs = queue_manager.get_all_jobs()
|
||||
if not jobs:
|
||||
console.print(f"{'ℹ️ ' if icons else ''}[dim]No jobs in queue[/dim]")
|
||||
return
|
||||
|
||||
# Sort jobs by status and creation time
|
||||
jobs.sort(key=lambda x: (x.status.value, x.created_at))
|
||||
|
||||
table = Table(title="Job Details")
|
||||
table.add_column("ID", width=8)
|
||||
table.add_column("Anime", style="cyan")
|
||||
table.add_column("Episode", justify="center")
|
||||
table.add_column("Status", justify="center")
|
||||
table.add_column("Priority", justify="center")
|
||||
table.add_column("Quality", justify="center")
|
||||
table.add_column("Type", justify="center")
|
||||
table.add_column("Created", style="dim")
|
||||
|
||||
status_colors = {
|
||||
DownloadStatus.PENDING: "yellow",
|
||||
DownloadStatus.DOWNLOADING: "blue",
|
||||
DownloadStatus.COMPLETED: "green",
|
||||
DownloadStatus.FAILED: "red",
|
||||
DownloadStatus.CANCELLED: "dim"
|
||||
}
|
||||
|
||||
for job in jobs:
|
||||
status_color = status_colors.get(job.status, "white")
|
||||
auto_marker = f"{'🤖' if icons else 'A'}" if job.auto_added else f"{'👤' if icons else 'M'}"
|
||||
|
||||
table.add_row(
|
||||
job.id[:8],
|
||||
job.anime_title[:30] + "..." if len(job.anime_title) > 30 else job.anime_title,
|
||||
job.episode,
|
||||
f"[{status_color}]{job.status.value}[/{status_color}]",
|
||||
str(job.priority),
|
||||
job.quality,
|
||||
f"{auto_marker} {job.translation_type}",
|
||||
job.created_at.strftime("%m-%d %H:%M")
|
||||
)
|
||||
|
||||
console.print(table)
|
||||
|
||||
if icons:
|
||||
console.print()
|
||||
console.print("[dim]🤖 = Auto-added, 👤 = Manual[/dim]")
|
||||
@@ -4,7 +4,7 @@ import click
|
||||
|
||||
from ...core.config import AppConfig
|
||||
from ...core.exceptions import FastAnimeError
|
||||
from ..utils.completion_functions import anime_titles_shell_complete
|
||||
from ..utils.completions import anime_titles_shell_complete
|
||||
from . import examples
|
||||
|
||||
if TYPE_CHECKING:
|
||||
|
||||
@@ -1,547 +0,0 @@
|
||||
"""
|
||||
Background service for automated download queue processing and episode monitoring.
|
||||
"""
|
||||
|
||||
import json
|
||||
import logging
|
||||
import signal
|
||||
import sys
|
||||
import threading
|
||||
import time
|
||||
import uuid
|
||||
from concurrent.futures import ThreadPoolExecutor, as_completed
|
||||
from datetime import datetime, timedelta
|
||||
from pathlib import Path
|
||||
from typing import TYPE_CHECKING, Dict, List, Optional, Set, cast, Literal
|
||||
|
||||
import click
|
||||
from rich.console import Console
|
||||
from rich.progress import Progress
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from fastanime.core.config import AppConfig
|
||||
from fastanime.libs.api.base import BaseApiClient
|
||||
from fastanime.libs.api.types import MediaItem
|
||||
|
||||
from ..utils.download_queue import DownloadJob, DownloadStatus, QueueManager
|
||||
from ..utils.feedback import create_feedback_manager
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class DownloadService:
|
||||
"""Background service for processing download queue and monitoring new episodes."""
|
||||
|
||||
def __init__(self, config: "AppConfig"):
|
||||
self.config = config
|
||||
self.queue_manager = QueueManager()
|
||||
self.console = Console()
|
||||
self.feedback = create_feedback_manager(config.general.icons)
|
||||
self._running = False
|
||||
self._shutdown_event = threading.Event()
|
||||
|
||||
# Service state
|
||||
self.last_watchlist_check = datetime.now() - timedelta(hours=1) # Force initial check
|
||||
self.known_episodes: Dict[int, Set[str]] = {} # media_id -> set of episode numbers
|
||||
self.last_notification_check = datetime.now() - timedelta(minutes=10)
|
||||
|
||||
# Configuration
|
||||
self.watchlist_check_interval = self.config.service.watchlist_check_interval * 60 # Convert to seconds
|
||||
self.queue_process_interval = self.config.service.queue_process_interval * 60 # Convert to seconds
|
||||
self.notification_check_interval = 2 * 60 # 2 minutes in seconds
|
||||
self.max_concurrent_downloads = self.config.service.max_concurrent_downloads
|
||||
|
||||
# State file for persistence
|
||||
from fastanime.core.constants import APP_DATA_DIR
|
||||
self.state_file = APP_DATA_DIR / "service_state.json"
|
||||
|
||||
def _load_state(self):
|
||||
"""Load service state from file."""
|
||||
try:
|
||||
if self.state_file.exists():
|
||||
with open(self.state_file, 'r') as f:
|
||||
data = json.load(f)
|
||||
self.known_episodes = {
|
||||
int(k): set(v) for k, v in data.get('known_episodes', {}).items()
|
||||
}
|
||||
self.last_watchlist_check = datetime.fromisoformat(
|
||||
data.get('last_watchlist_check', datetime.now().isoformat())
|
||||
)
|
||||
logger.info("Service state loaded successfully")
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to load service state: {e}")
|
||||
|
||||
def _save_state(self):
|
||||
"""Save service state to file."""
|
||||
try:
|
||||
data = {
|
||||
'known_episodes': {
|
||||
str(k): list(v) for k, v in self.known_episodes.items()
|
||||
},
|
||||
'last_watchlist_check': self.last_watchlist_check.isoformat(),
|
||||
'last_saved': datetime.now().isoformat()
|
||||
}
|
||||
with open(self.state_file, 'w') as f:
|
||||
json.dump(data, f, indent=2)
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to save service state: {e}")
|
||||
|
||||
def start(self):
|
||||
"""Start the background service."""
|
||||
logger.info("Starting FastAnime download service...")
|
||||
self.console.print(f"{'🚀 ' if self.config.general.icons else ''}[bold green]Starting FastAnime Download Service[/bold green]")
|
||||
|
||||
# Load previous state
|
||||
self._load_state()
|
||||
|
||||
# Set up signal handlers for graceful shutdown
|
||||
signal.signal(signal.SIGINT, self._signal_handler)
|
||||
signal.signal(signal.SIGTERM, self._signal_handler)
|
||||
|
||||
self._running = True
|
||||
|
||||
# Start worker threads
|
||||
watchlist_thread = threading.Thread(target=self._watchlist_monitor, daemon=True)
|
||||
queue_thread = threading.Thread(target=self._queue_processor, daemon=True)
|
||||
|
||||
watchlist_thread.start()
|
||||
queue_thread.start()
|
||||
|
||||
self.console.print(f"{'✅ ' if self.config.general.icons else ''}Service started successfully")
|
||||
self.console.print(f"{'📊 ' if self.config.general.icons else ''}Monitoring watchlist every {self.watchlist_check_interval // 60} minutes")
|
||||
self.console.print(f"{'⚙️ ' if self.config.general.icons else ''}Processing queue every {self.queue_process_interval} seconds")
|
||||
self.console.print(f"{'🛑 ' if self.config.general.icons else ''}Press Ctrl+C to stop")
|
||||
|
||||
try:
|
||||
# Main loop - just wait for shutdown
|
||||
while self._running and not self._shutdown_event.wait(timeout=10):
|
||||
self._save_state() # Periodic state saving
|
||||
|
||||
except KeyboardInterrupt:
|
||||
pass
|
||||
finally:
|
||||
self._shutdown()
|
||||
|
||||
def _signal_handler(self, signum, frame):
|
||||
"""Handle shutdown signals."""
|
||||
logger.info(f"Received signal {signum}, shutting down...")
|
||||
self._running = False
|
||||
self._shutdown_event.set()
|
||||
|
||||
def _shutdown(self):
|
||||
"""Gracefully shutdown the service."""
|
||||
logger.info("Shutting down download service...")
|
||||
self.console.print(f"{'🛑 ' if self.config.general.icons else ''}[yellow]Shutting down service...[/yellow]")
|
||||
|
||||
self._running = False
|
||||
self._shutdown_event.set()
|
||||
|
||||
# Save final state
|
||||
self._save_state()
|
||||
|
||||
# Cancel any running downloads
|
||||
active_jobs = self.queue_manager.get_active_jobs()
|
||||
for job in active_jobs:
|
||||
self.queue_manager.update_job_status(job.id, DownloadStatus.CANCELLED)
|
||||
|
||||
self.console.print(f"{'✅ ' if self.config.general.icons else ''}Service stopped")
|
||||
logger.info("Download service shutdown complete")
|
||||
|
||||
def _watchlist_monitor(self):
|
||||
"""Monitor user's AniList watching list for new episodes."""
|
||||
logger.info("Starting watchlist monitor thread")
|
||||
|
||||
while self._running:
|
||||
try:
|
||||
if (datetime.now() - self.last_watchlist_check).total_seconds() >= self.watchlist_check_interval:
|
||||
self._check_for_new_episodes()
|
||||
self.last_watchlist_check = datetime.now()
|
||||
|
||||
# Check for notifications (like the existing notifier)
|
||||
if (datetime.now() - self.last_notification_check).total_seconds() >= self.notification_check_interval:
|
||||
self._check_notifications()
|
||||
self.last_notification_check = datetime.now()
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error in watchlist monitor: {e}")
|
||||
|
||||
# Sleep with check for shutdown
|
||||
if self._shutdown_event.wait(timeout=60):
|
||||
break
|
||||
|
||||
logger.info("Watchlist monitor thread stopped")
|
||||
|
||||
def _queue_processor(self):
|
||||
"""Process the download queue."""
|
||||
logger.info("Starting queue processor thread")
|
||||
|
||||
while self._running:
|
||||
try:
|
||||
self._process_download_queue()
|
||||
except Exception as e:
|
||||
logger.error(f"Error in queue processor: {e}")
|
||||
|
||||
# Sleep with check for shutdown
|
||||
if self._shutdown_event.wait(timeout=self.queue_process_interval):
|
||||
break
|
||||
|
||||
logger.info("Queue processor thread stopped")
|
||||
|
||||
def _check_for_new_episodes(self):
|
||||
"""Check user's watching list for newly released episodes."""
|
||||
try:
|
||||
logger.info("Checking for new episodes in watchlist...")
|
||||
|
||||
# Get authenticated API client
|
||||
from fastanime.libs.api.factory import create_api_client
|
||||
from fastanime.libs.api.params import UserListParams
|
||||
|
||||
api_client = create_api_client(self.config.general.api_client, self.config)
|
||||
|
||||
# Check if user is authenticated
|
||||
user_profile = api_client.get_viewer_profile()
|
||||
if not user_profile:
|
||||
logger.warning("User not authenticated, skipping watchlist check")
|
||||
return
|
||||
|
||||
# Fetch currently watching anime
|
||||
with Progress() as progress:
|
||||
task = progress.add_task("Checking watchlist...", total=None)
|
||||
|
||||
list_params = UserListParams(
|
||||
status="CURRENT", # Currently watching
|
||||
page=1,
|
||||
per_page=50
|
||||
)
|
||||
user_list = api_client.fetch_user_list(list_params)
|
||||
progress.update(task, completed=True)
|
||||
|
||||
if not user_list or not user_list.media:
|
||||
logger.info("No anime found in watching list")
|
||||
return
|
||||
|
||||
new_episodes_found = 0
|
||||
|
||||
for media_item in user_list.media:
|
||||
try:
|
||||
media_id = media_item.id
|
||||
|
||||
# Get available episodes from provider
|
||||
available_episodes = self._get_available_episodes(media_item)
|
||||
if not available_episodes:
|
||||
continue
|
||||
|
||||
# Check if we have new episodes
|
||||
known_eps = self.known_episodes.get(media_id, set())
|
||||
new_episodes = set(available_episodes) - known_eps
|
||||
|
||||
if new_episodes:
|
||||
logger.info(f"Found {len(new_episodes)} new episodes for {media_item.title.romaji or media_item.title.english}")
|
||||
|
||||
# Add new episodes to download queue
|
||||
for episode in sorted(new_episodes, key=lambda x: float(x) if x.isdigit() else 0):
|
||||
self._add_episode_to_queue(media_item, episode)
|
||||
new_episodes_found += 1
|
||||
|
||||
# Update known episodes
|
||||
self.known_episodes[media_id] = set(available_episodes)
|
||||
else:
|
||||
# Update known episodes even if no new ones (in case some were removed)
|
||||
self.known_episodes[media_id] = set(available_episodes)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error checking episodes for {media_item.title.romaji}: {e}")
|
||||
|
||||
if new_episodes_found > 0:
|
||||
logger.info(f"Added {new_episodes_found} new episodes to download queue")
|
||||
self.console.print(f"{'📺 ' if self.config.general.icons else ''}Found {new_episodes_found} new episodes, added to queue")
|
||||
else:
|
||||
logger.info("No new episodes found")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error checking for new episodes: {e}")
|
||||
|
||||
def _get_available_episodes(self, media_item: "MediaItem") -> List[str]:
|
||||
"""Get available episodes for a media item from the provider."""
|
||||
try:
|
||||
from fastanime.libs.providers.anime.provider import create_provider
|
||||
from fastanime.libs.providers.anime.params import AnimeParams, SearchParams
|
||||
from httpx import Client
|
||||
|
||||
client = Client()
|
||||
provider = create_provider(self.config.general.provider)
|
||||
|
||||
# Search for the anime
|
||||
search_results = provider.search(SearchParams(
|
||||
query=media_item.title.romaji or media_item.title.english or "Unknown",
|
||||
translation_type=self.config.stream.translation_type
|
||||
))
|
||||
|
||||
if not search_results or not search_results.results:
|
||||
return []
|
||||
|
||||
# Get the first result (should be the best match)
|
||||
anime_result = search_results.results[0]
|
||||
|
||||
# Get anime details
|
||||
anime = provider.get(AnimeParams(id=anime_result.id))
|
||||
if not anime or not anime.episodes:
|
||||
return []
|
||||
|
||||
# Get episodes for the configured translation type
|
||||
episodes = getattr(anime.episodes, self.config.stream.translation_type, [])
|
||||
return sorted(episodes, key=lambda x: float(x) if x.replace('.', '').isdigit() else 0)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting available episodes: {e}")
|
||||
return []
|
||||
|
||||
def _add_episode_to_queue(self, media_item: "MediaItem", episode: str):
|
||||
"""Add an episode to the download queue."""
|
||||
try:
|
||||
job_id = str(uuid.uuid4())
|
||||
job = DownloadJob(
|
||||
id=job_id,
|
||||
anime_title=media_item.title.romaji or media_item.title.english or "Unknown",
|
||||
episode=episode,
|
||||
media_id=media_item.id,
|
||||
quality=self.config.stream.quality,
|
||||
translation_type=self.config.stream.translation_type,
|
||||
priority=1, # High priority for auto-added episodes
|
||||
auto_added=True
|
||||
)
|
||||
|
||||
success = self.queue_manager.add_job(job)
|
||||
if success:
|
||||
logger.info(f"Auto-queued: {job.anime_title} Episode {episode}")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error adding episode to queue: {e}")
|
||||
|
||||
def _check_notifications(self):
|
||||
"""Check for AniList notifications (similar to existing notifier)."""
|
||||
try:
|
||||
# This is similar to the existing notifier functionality
|
||||
# We can reuse the notification logic here if needed
|
||||
pass
|
||||
except Exception as e:
|
||||
logger.error(f"Error checking notifications: {e}")
|
||||
|
||||
def _process_download_queue(self):
|
||||
"""Process pending downloads in the queue."""
|
||||
try:
|
||||
# Get currently active downloads
|
||||
active_jobs = self.queue_manager.get_active_jobs()
|
||||
available_slots = max(0, self.max_concurrent_downloads - len(active_jobs))
|
||||
|
||||
if available_slots == 0:
|
||||
return # All slots busy
|
||||
|
||||
# Get pending jobs
|
||||
pending_jobs = self.queue_manager.get_pending_jobs(limit=available_slots)
|
||||
if not pending_jobs:
|
||||
return # No pending jobs
|
||||
|
||||
logger.info(f"Processing {len(pending_jobs)} download jobs")
|
||||
|
||||
# Process jobs concurrently
|
||||
with ThreadPoolExecutor(max_workers=available_slots) as executor:
|
||||
futures = {
|
||||
executor.submit(self._download_episode, job): job
|
||||
for job in pending_jobs
|
||||
}
|
||||
|
||||
for future in as_completed(futures):
|
||||
job = futures[future]
|
||||
try:
|
||||
success = future.result()
|
||||
if success:
|
||||
logger.info(f"Successfully downloaded: {job.anime_title} Episode {job.episode}")
|
||||
else:
|
||||
logger.error(f"Failed to download: {job.anime_title} Episode {job.episode}")
|
||||
except Exception as e:
|
||||
logger.error(f"Error downloading {job.anime_title} Episode {job.episode}: {e}")
|
||||
self.queue_manager.update_job_status(job.id, DownloadStatus.FAILED, str(e))
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error processing download queue: {e}")
|
||||
|
||||
def _download_episode(self, job: DownloadJob) -> bool:
|
||||
"""Download a specific episode."""
|
||||
try:
|
||||
logger.info(f"Starting download: {job.anime_title} Episode {job.episode}")
|
||||
|
||||
# Update job status to downloading
|
||||
self.queue_manager.update_job_status(job.id, DownloadStatus.DOWNLOADING)
|
||||
|
||||
# Import download functionality
|
||||
from fastanime.libs.providers.anime.provider import create_provider
|
||||
from fastanime.libs.providers.anime.params import AnimeParams, SearchParams, EpisodeStreamsParams
|
||||
from fastanime.libs.selectors.selector import create_selector
|
||||
from fastanime.libs.players.player import create_player
|
||||
from fastanime.core.downloader.downloader import create_downloader
|
||||
from httpx import Client
|
||||
|
||||
# Create required components
|
||||
client = Client()
|
||||
provider = create_provider(self.config.general.provider)
|
||||
selector = create_selector(self.config)
|
||||
player = create_player(self.config)
|
||||
downloader = create_downloader(self.config.downloads)
|
||||
|
||||
# Search for anime
|
||||
translation_type = cast(Literal["sub", "dub"], job.translation_type if job.translation_type in ["sub", "dub"] else "sub")
|
||||
search_results = provider.search(SearchParams(
|
||||
query=job.anime_title,
|
||||
translation_type=translation_type
|
||||
))
|
||||
|
||||
if not search_results or not search_results.results:
|
||||
raise Exception("No search results found")
|
||||
|
||||
# Get anime details
|
||||
anime_result = search_results.results[0]
|
||||
anime = provider.get(AnimeParams(id=anime_result.id))
|
||||
|
||||
if not anime:
|
||||
raise Exception("Failed to get anime details")
|
||||
|
||||
# Get episode streams
|
||||
# Ensure translation_type is valid Literal type
|
||||
valid_translation = cast(Literal["sub", "dub"],
|
||||
job.translation_type if job.translation_type in ["sub", "dub"] else "sub")
|
||||
|
||||
streams = provider.episode_streams(EpisodeStreamsParams(
|
||||
anime_id=anime.id,
|
||||
episode=job.episode,
|
||||
translation_type=valid_translation
|
||||
))
|
||||
|
||||
if not streams:
|
||||
raise Exception("No streams found")
|
||||
|
||||
# Get the first available server
|
||||
server = next(streams, None)
|
||||
if not server:
|
||||
raise Exception("No server available")
|
||||
|
||||
# Download using the first available link
|
||||
if server.links:
|
||||
link = server.links[0]
|
||||
logger.info(f"Starting download: {link.link} for {job.anime_title} Episode {job.episode}")
|
||||
|
||||
# Import downloader
|
||||
from fastanime.core.downloader import create_downloader, DownloadParams
|
||||
|
||||
# Create downloader with config
|
||||
downloader = create_downloader(self.config.downloads)
|
||||
|
||||
# Prepare download parameters
|
||||
download_params = DownloadParams(
|
||||
url=link.link,
|
||||
anime_title=job.anime_title,
|
||||
episode_title=f"Episode {job.episode}",
|
||||
silent=True, # Run silently in background
|
||||
headers=server.headers, # Use server headers
|
||||
subtitles=[sub.url for sub in server.subtitles], # Extract subtitle URLs
|
||||
merge=False, # Default to false
|
||||
clean=False, # Default to false
|
||||
prompt=False, # No prompts in background service
|
||||
force_ffmpeg=False, # Default to false
|
||||
hls_use_mpegts=False, # Default to false
|
||||
hls_use_h264=False # Default to false
|
||||
)
|
||||
|
||||
# Download the episode
|
||||
try:
|
||||
downloader.download(download_params)
|
||||
logger.info(f"Successfully downloaded: {job.anime_title} Episode {job.episode}")
|
||||
self.queue_manager.update_job_status(job.id, DownloadStatus.COMPLETED)
|
||||
return True
|
||||
except Exception as download_error:
|
||||
error_msg = f"Download failed: {str(download_error)}"
|
||||
raise Exception(error_msg)
|
||||
else:
|
||||
raise Exception("No download links available")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Download failed for {job.anime_title} Episode {job.episode}: {e}")
|
||||
|
||||
# Handle retry logic
|
||||
job.retry_count += 1
|
||||
if job.retry_count < self.queue_manager.queue.auto_retry_count:
|
||||
# Reset to pending for retry
|
||||
self.queue_manager.update_job_status(job.id, DownloadStatus.PENDING, f"Retry {job.retry_count}: {str(e)}")
|
||||
else:
|
||||
# Mark as failed after max retries
|
||||
self.queue_manager.update_job_status(job.id, DownloadStatus.FAILED, f"Max retries exceeded: {str(e)}")
|
||||
|
||||
return False
|
||||
|
||||
|
||||
@click.command(
|
||||
help="Run background service for automated downloads and episode monitoring",
|
||||
short_help="Background download service",
|
||||
epilog="""
|
||||
\b
|
||||
\b\bExamples:
|
||||
# Start the service
|
||||
fastanime service
|
||||
|
||||
# Run in the background (Linux/macOS)
|
||||
nohup fastanime service > /dev/null 2>&1 &
|
||||
|
||||
# Run with logging
|
||||
fastanime --log service
|
||||
|
||||
# Run with file logging
|
||||
fastanime --log-to-file service
|
||||
""",
|
||||
)
|
||||
@click.option(
|
||||
"--watchlist-interval",
|
||||
type=int,
|
||||
help="Minutes between watchlist checks (default from config)"
|
||||
)
|
||||
@click.option(
|
||||
"--queue-interval",
|
||||
type=int,
|
||||
help="Minutes between queue processing (default from config)"
|
||||
)
|
||||
@click.option(
|
||||
"--max-concurrent",
|
||||
type=int,
|
||||
help="Maximum concurrent downloads (default from config)"
|
||||
)
|
||||
@click.pass_obj
|
||||
def service(config: "AppConfig", watchlist_interval: Optional[int], queue_interval: Optional[int], max_concurrent: Optional[int]):
|
||||
"""
|
||||
Run the FastAnime background service for automated downloads.
|
||||
|
||||
The service will:
|
||||
- Monitor your AniList watching list for new episodes
|
||||
- Automatically queue new episodes for download
|
||||
- Process the download queue
|
||||
- Provide notifications for new episodes
|
||||
"""
|
||||
|
||||
try:
|
||||
# Update configuration with command line options if provided
|
||||
service_instance = DownloadService(config)
|
||||
if watchlist_interval is not None:
|
||||
service_instance.watchlist_check_interval = watchlist_interval * 60
|
||||
if queue_interval is not None:
|
||||
service_instance.queue_process_interval = queue_interval * 60
|
||||
if max_concurrent is not None:
|
||||
service_instance.max_concurrent_downloads = max_concurrent
|
||||
|
||||
# Start the service
|
||||
service_instance.start()
|
||||
|
||||
except KeyboardInterrupt:
|
||||
pass
|
||||
except Exception as e:
|
||||
console = Console()
|
||||
console.print(f"[red]Service error: {e}[/red]")
|
||||
logger.error(f"Service error: {e}")
|
||||
sys.exit(1)
|
||||
@@ -6,7 +6,7 @@ from rich.console import Console
|
||||
from ....libs.api.params import ApiSearchParams, UserListParams
|
||||
from ....libs.api.types import MediaSearchResult, MediaStatus, UserListStatusType
|
||||
from ...utils.feedback import create_feedback_manager, execute_with_feedback
|
||||
from ...utils.auth_utils import format_auth_menu_header, check_authentication_required
|
||||
from ...utils.auth.utils import format_auth_menu_header, check_authentication_required
|
||||
from ..session import Context, session
|
||||
from ..state import ControlFlow, MediaApiState, State
|
||||
|
||||
|
||||
@@ -7,7 +7,7 @@ from ....libs.api.params import UpdateListEntryParams
|
||||
from ....libs.api.types import MediaItem
|
||||
from ....libs.players.params import PlayerParams
|
||||
from ...utils.feedback import create_feedback_manager, execute_with_feedback
|
||||
from ...utils.auth_utils import check_authentication_required, get_auth_status_indicator
|
||||
from ...utils.auth.utils import check_authentication_required, get_auth_status_indicator
|
||||
from ..session import Context, session
|
||||
from ..state import ControlFlow, ProviderState, State
|
||||
|
||||
|
||||
@@ -2,7 +2,7 @@ from rich.console import Console
|
||||
|
||||
from ....libs.api.types import MediaItem
|
||||
from ....libs.api.params import ApiSearchParams, UserListParams
|
||||
from ...utils.auth_utils import get_auth_status_indicator
|
||||
from ...utils.auth.utils import get_auth_status_indicator
|
||||
from ...utils.feedback import create_feedback_manager, execute_with_feedback
|
||||
from ..session import Context, session
|
||||
from ..state import ControlFlow, MediaApiState, State
|
||||
|
||||
@@ -15,7 +15,7 @@ from ...libs.players.base import BasePlayer
|
||||
from ...libs.providers.anime.base import BaseAnimeProvider
|
||||
from ...libs.selectors.base import BaseSelector
|
||||
from ..config import ConfigLoader
|
||||
from ..utils.session_manager import SessionManager
|
||||
from ..utils.session.manager import SessionManager
|
||||
from .state import ControlFlow, State
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -5,9 +5,9 @@ Provides functions to check authentication status and display user information.
|
||||
|
||||
from typing import Optional
|
||||
|
||||
from ...libs.api.base import BaseApiClient
|
||||
from ...libs.api.types import UserProfile
|
||||
from .feedback import FeedbackManager
|
||||
from ....libs.api.base import BaseApiClient
|
||||
from ....libs.api.types import UserProfile
|
||||
from ..feedback import FeedbackManager
|
||||
|
||||
|
||||
def get_auth_status_indicator(
|
||||
7
fastanime/cli/services/integration/__init__.py
Normal file
7
fastanime/cli/services/integration/__init__.py
Normal file
@@ -0,0 +1,7 @@
|
||||
"""
|
||||
Integration services for synchronizing watch history and download tracking.
|
||||
"""
|
||||
|
||||
from .sync import HistoryDownloadSync
|
||||
|
||||
__all__ = ["HistoryDownloadSync"]
|
||||
301
fastanime/cli/services/integration/sync.py
Normal file
301
fastanime/cli/services/integration/sync.py
Normal file
@@ -0,0 +1,301 @@
|
||||
"""
|
||||
Synchronization service between watch history and download tracking.
|
||||
|
||||
This module provides functionality to keep watch history and download status
|
||||
in sync, enabling features like offline availability markers and smart
|
||||
download suggestions based on viewing patterns.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import List, Optional
|
||||
|
||||
from ....libs.api.types import MediaItem
|
||||
from ..downloads.manager import DownloadManager
|
||||
from ..watch_history.manager import WatchHistoryManager
|
||||
from ..watch_history.types import WatchHistoryEntry
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class HistoryDownloadSync:
|
||||
"""
|
||||
Service to synchronize watch history and download tracking.
|
||||
|
||||
Provides bidirectional synchronization between viewing history and
|
||||
download status, enabling features like offline availability and
|
||||
smart download recommendations.
|
||||
"""
|
||||
|
||||
def __init__(self, watch_manager: WatchHistoryManager, download_manager: DownloadManager):
|
||||
self.watch_manager = watch_manager
|
||||
self.download_manager = download_manager
|
||||
|
||||
def sync_download_status(self, media_id: int) -> bool:
|
||||
"""
|
||||
Update watch history with download availability status.
|
||||
|
||||
Args:
|
||||
media_id: The media ID to sync
|
||||
|
||||
Returns:
|
||||
True if sync was successful
|
||||
"""
|
||||
try:
|
||||
# Get download record
|
||||
download_record = self.download_manager.get_download_record(media_id)
|
||||
if not download_record:
|
||||
return False
|
||||
|
||||
# Get watch history entry
|
||||
watch_entry = self.watch_manager.get_entry_by_media_id(media_id)
|
||||
if not watch_entry:
|
||||
return False
|
||||
|
||||
# Check if any episodes are downloaded
|
||||
has_downloads = any(
|
||||
ep.is_completed for ep in download_record.episodes.values()
|
||||
)
|
||||
|
||||
# Check if current/next episode is available offline
|
||||
current_episode = watch_entry.last_watched_episode
|
||||
next_episode = current_episode + 1
|
||||
|
||||
offline_available = (
|
||||
current_episode in download_record.episodes and
|
||||
download_record.episodes[current_episode].is_completed
|
||||
) or (
|
||||
next_episode in download_record.episodes and
|
||||
download_record.episodes[next_episode].is_completed
|
||||
)
|
||||
|
||||
# Update watch history entry
|
||||
updated_entry = watch_entry.model_copy(update={
|
||||
"has_downloads": has_downloads,
|
||||
"offline_available": offline_available
|
||||
})
|
||||
|
||||
return self.watch_manager.save_entry(updated_entry)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to sync download status for media {media_id}: {e}")
|
||||
return False
|
||||
|
||||
def mark_episodes_offline_available(self, media_id: int, episodes: List[int]) -> bool:
|
||||
"""
|
||||
Mark specific episodes as available offline in watch history.
|
||||
|
||||
Args:
|
||||
media_id: The media ID
|
||||
episodes: List of episode numbers that are available offline
|
||||
|
||||
Returns:
|
||||
True if successful
|
||||
"""
|
||||
try:
|
||||
watch_entry = self.watch_manager.get_entry_by_media_id(media_id)
|
||||
if not watch_entry:
|
||||
return False
|
||||
|
||||
# Check if current or next episode is in the available episodes
|
||||
current_episode = watch_entry.last_watched_episode
|
||||
next_episode = current_episode + 1
|
||||
|
||||
offline_available = (
|
||||
current_episode in episodes or
|
||||
next_episode in episodes or
|
||||
len(episodes) > 0 # Any episodes available
|
||||
)
|
||||
|
||||
updated_entry = watch_entry.model_copy(update={
|
||||
"has_downloads": len(episodes) > 0,
|
||||
"offline_available": offline_available
|
||||
})
|
||||
|
||||
return self.watch_manager.save_entry(updated_entry)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to mark episodes offline available for media {media_id}: {e}")
|
||||
return False
|
||||
|
||||
def suggest_downloads_for_watching(self, media_id: int, lookahead: int = 3) -> List[int]:
|
||||
"""
|
||||
Suggest episodes to download based on watch history.
|
||||
|
||||
Args:
|
||||
media_id: The media ID
|
||||
lookahead: Number of episodes ahead to suggest
|
||||
|
||||
Returns:
|
||||
List of episode numbers to download
|
||||
"""
|
||||
try:
|
||||
watch_entry = self.watch_manager.get_entry_by_media_id(media_id)
|
||||
if not watch_entry or watch_entry.status != "watching":
|
||||
return []
|
||||
|
||||
download_record = self.download_manager.get_download_record(media_id)
|
||||
if not download_record:
|
||||
return []
|
||||
|
||||
# Get currently downloaded episodes
|
||||
downloaded_episodes = set(
|
||||
ep_num for ep_num, ep in download_record.episodes.items()
|
||||
if ep.is_completed
|
||||
)
|
||||
|
||||
# Suggest next episodes
|
||||
current_episode = watch_entry.last_watched_episode
|
||||
total_episodes = watch_entry.media_item.episodes or 999
|
||||
|
||||
suggestions = []
|
||||
for i in range(1, lookahead + 1):
|
||||
next_episode = current_episode + i
|
||||
if (next_episode <= total_episodes and
|
||||
next_episode not in downloaded_episodes):
|
||||
suggestions.append(next_episode)
|
||||
|
||||
return suggestions
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to suggest downloads for media {media_id}: {e}")
|
||||
return []
|
||||
|
||||
def suggest_downloads_for_completed(self, limit: int = 5) -> List[MediaItem]:
|
||||
"""
|
||||
Suggest anime to download based on completed watch history.
|
||||
|
||||
Args:
|
||||
limit: Maximum number of suggestions
|
||||
|
||||
Returns:
|
||||
List of MediaItems to consider for download
|
||||
"""
|
||||
try:
|
||||
# Get completed anime from watch history
|
||||
completed_entries = self.watch_manager.get_entries_by_status("completed")
|
||||
|
||||
suggestions = []
|
||||
for entry in completed_entries[:limit]:
|
||||
# Check if not already fully downloaded
|
||||
download_record = self.download_manager.get_download_record(entry.media_item.id)
|
||||
|
||||
if not download_record:
|
||||
suggestions.append(entry.media_item)
|
||||
elif download_record.completion_percentage < 100:
|
||||
suggestions.append(entry.media_item)
|
||||
|
||||
return suggestions
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to suggest downloads for completed anime: {e}")
|
||||
return []
|
||||
|
||||
def sync_all_entries(self) -> int:
|
||||
"""
|
||||
Sync download status for all watch history entries.
|
||||
|
||||
Returns:
|
||||
Number of entries successfully synced
|
||||
"""
|
||||
try:
|
||||
watch_entries = self.watch_manager.get_all_entries()
|
||||
synced_count = 0
|
||||
|
||||
for entry in watch_entries:
|
||||
if self.sync_download_status(entry.media_item.id):
|
||||
synced_count += 1
|
||||
|
||||
logger.info(f"Synced download status for {synced_count}/{len(watch_entries)} entries")
|
||||
return synced_count
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to sync all entries: {e}")
|
||||
return 0
|
||||
|
||||
def update_watch_progress_from_downloads(self, media_id: int) -> bool:
|
||||
"""
|
||||
Update watch progress based on downloaded episodes.
|
||||
|
||||
Useful when episodes are watched outside the app but files exist.
|
||||
|
||||
Args:
|
||||
media_id: The media ID to update
|
||||
|
||||
Returns:
|
||||
True if successful
|
||||
"""
|
||||
try:
|
||||
download_record = self.download_manager.get_download_record(media_id)
|
||||
if not download_record:
|
||||
return False
|
||||
|
||||
watch_entry = self.watch_manager.get_entry_by_media_id(media_id)
|
||||
if not watch_entry:
|
||||
# Create new watch entry if none exists
|
||||
watch_entry = WatchHistoryEntry(
|
||||
media_item=download_record.media_item,
|
||||
status="watching"
|
||||
)
|
||||
|
||||
# Find highest downloaded episode
|
||||
downloaded_episodes = [
|
||||
ep_num for ep_num, ep in download_record.episodes.items()
|
||||
if ep.is_completed
|
||||
]
|
||||
|
||||
if downloaded_episodes:
|
||||
max_downloaded = max(downloaded_episodes)
|
||||
|
||||
# Only update if we have more episodes downloaded than watched
|
||||
if max_downloaded > watch_entry.last_watched_episode:
|
||||
updated_entry = watch_entry.model_copy(update={
|
||||
"last_watched_episode": max_downloaded,
|
||||
"watch_progress": 1.0, # Assume completed if downloaded
|
||||
"has_downloads": True,
|
||||
"offline_available": True
|
||||
})
|
||||
|
||||
return self.watch_manager.save_entry(updated_entry)
|
||||
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to update watch progress from downloads for media {media_id}: {e}")
|
||||
return False
|
||||
|
||||
def get_offline_watchable_anime(self) -> List[WatchHistoryEntry]:
|
||||
"""
|
||||
Get list of anime that can be watched offline.
|
||||
|
||||
Returns:
|
||||
List of watch history entries with offline episodes available
|
||||
"""
|
||||
try:
|
||||
watch_entries = self.watch_manager.get_all_entries()
|
||||
offline_entries = []
|
||||
|
||||
for entry in watch_entries:
|
||||
if entry.offline_available:
|
||||
offline_entries.append(entry)
|
||||
else:
|
||||
# Double-check by looking at downloads
|
||||
download_record = self.download_manager.get_download_record(entry.media_item.id)
|
||||
if download_record:
|
||||
next_episode = entry.last_watched_episode + 1
|
||||
if (next_episode in download_record.episodes and
|
||||
download_record.episodes[next_episode].is_completed):
|
||||
offline_entries.append(entry)
|
||||
|
||||
return offline_entries
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to get offline watchable anime: {e}")
|
||||
return []
|
||||
|
||||
|
||||
def create_sync_service(watch_manager: WatchHistoryManager,
|
||||
download_manager: DownloadManager) -> HistoryDownloadSync:
|
||||
"""Factory function to create a synchronization service."""
|
||||
return HistoryDownloadSync(watch_manager, download_manager)
|
||||
0
fastanime/cli/services/session/__init__.py
Normal file
0
fastanime/cli/services/session/__init__.py
Normal file
@@ -8,8 +8,8 @@ from datetime import datetime
|
||||
from pathlib import Path
|
||||
from typing import Dict, List, Optional
|
||||
|
||||
from ...core.constants import APP_DATA_DIR
|
||||
from ..interactive.state import State
|
||||
from ....core.constants import APP_DATA_DIR
|
||||
from ...interactive.state import State
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
0
fastanime/cli/services/watch_history/__init__.py
Normal file
0
fastanime/cli/services/watch_history/__init__.py
Normal file
@@ -8,9 +8,9 @@ import logging
|
||||
from pathlib import Path
|
||||
from typing import List, Optional
|
||||
|
||||
from ...core.constants import USER_WATCH_HISTORY_PATH
|
||||
from ...libs.api.types import MediaItem
|
||||
from .watch_history_types import WatchHistoryData, WatchHistoryEntry
|
||||
from ....core.constants import USER_WATCH_HISTORY_PATH
|
||||
from ....libs.api.types import MediaItem
|
||||
from .types import WatchHistoryData, WatchHistoryEntry
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -327,3 +327,16 @@ class WatchHistoryManager:
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to backup watch history: {e}")
|
||||
return False
|
||||
|
||||
def get_entry_by_media_id(self, media_id: int) -> Optional[WatchHistoryEntry]:
|
||||
"""Get watch history entry by media ID (alias for get_entry)."""
|
||||
return self.get_entry(media_id)
|
||||
|
||||
def save_entry(self, entry: WatchHistoryEntry) -> bool:
|
||||
"""Save a watch history entry (alias for add_or_update_entry)."""
|
||||
return self.add_or_update_entry(entry.media_item, entry.status,
|
||||
entry.last_watched_episode, entry.watch_progress)
|
||||
|
||||
def get_currently_watching(self) -> List[WatchHistoryEntry]:
|
||||
"""Get entries that are currently being watched."""
|
||||
return self.get_watching_entries()
|
||||
@@ -6,8 +6,8 @@ Provides automatic watch history updates during episode viewing.
|
||||
import logging
|
||||
from typing import Optional
|
||||
|
||||
from ...libs.api.types import MediaItem
|
||||
from ..utils.watch_history_manager import WatchHistoryManager
|
||||
from ....libs.api.types import MediaItem
|
||||
from .manager import WatchHistoryManager
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -11,7 +11,7 @@ from typing import Dict, List, Optional
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
from ...libs.api.types import MediaItem
|
||||
from ....libs.api.types import MediaItem
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -31,6 +31,10 @@ class WatchHistoryEntry(BaseModel):
|
||||
status: str = "watching" # watching, completed, dropped, paused
|
||||
notes: str = ""
|
||||
|
||||
# Download integration fields
|
||||
has_downloads: bool = Field(default=False, description="Whether episodes are downloaded")
|
||||
offline_available: bool = Field(default=False, description="Can watch offline")
|
||||
|
||||
# With Pydantic, serialization is automatic!
|
||||
# No need for manual to_dict() and from_dict() methods
|
||||
# Use: entry.model_dump() and WatchHistoryEntry.model_validate(data)
|
||||
@@ -2,9 +2,9 @@
|
||||
Utility modules for the FastAnime CLI.
|
||||
"""
|
||||
|
||||
from .watch_history_manager import WatchHistoryManager
|
||||
from .watch_history_tracker import WatchHistoryTracker, watch_tracker
|
||||
from .watch_history_types import WatchHistoryEntry, WatchHistoryData
|
||||
from ..services.watch_history.manager import WatchHistoryManager
|
||||
from ..services.watch_history.tracker import WatchHistoryTracker, watch_tracker
|
||||
from ..services.watch_history.types import WatchHistoryEntry, WatchHistoryData
|
||||
|
||||
__all__ = [
|
||||
"WatchHistoryManager",
|
||||
|
||||
@@ -1,208 +0,0 @@
|
||||
"""
|
||||
Download queue management system for FastAnime.
|
||||
Handles queuing, processing, and tracking of download jobs.
|
||||
"""
|
||||
|
||||
import json
|
||||
import logging
|
||||
from datetime import datetime
|
||||
from enum import Enum
|
||||
from pathlib import Path
|
||||
from typing import Dict, List, Optional
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
from ...core.constants import APP_DATA_DIR
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class DownloadStatus(str, Enum):
|
||||
"""Status of a download job."""
|
||||
PENDING = "pending"
|
||||
DOWNLOADING = "downloading"
|
||||
COMPLETED = "completed"
|
||||
FAILED = "failed"
|
||||
CANCELLED = "cancelled"
|
||||
|
||||
|
||||
class DownloadJob(BaseModel):
|
||||
"""Represents a single download job in the queue."""
|
||||
id: str = Field(description="Unique identifier for the job")
|
||||
anime_title: str = Field(description="Title of the anime")
|
||||
episode: str = Field(description="Episode number or identifier")
|
||||
media_id: Optional[int] = Field(default=None, description="AniList media ID if available")
|
||||
provider_id: Optional[str] = Field(default=None, description="Provider-specific anime ID")
|
||||
quality: str = Field(default="1080", description="Preferred quality")
|
||||
translation_type: str = Field(default="sub", description="sub or dub")
|
||||
priority: int = Field(default=5, description="Priority level (1-10, lower is higher priority)")
|
||||
status: DownloadStatus = Field(default=DownloadStatus.PENDING)
|
||||
created_at: datetime = Field(default_factory=datetime.now)
|
||||
started_at: Optional[datetime] = Field(default=None)
|
||||
completed_at: Optional[datetime] = Field(default=None)
|
||||
error_message: Optional[str] = Field(default=None)
|
||||
retry_count: int = Field(default=0)
|
||||
auto_added: bool = Field(default=False, description="Whether this was auto-added by the service")
|
||||
|
||||
|
||||
class DownloadQueue(BaseModel):
|
||||
"""Container for all download jobs."""
|
||||
jobs: Dict[str, DownloadJob] = Field(default_factory=dict)
|
||||
max_concurrent: int = Field(default=3, description="Maximum concurrent downloads")
|
||||
auto_retry_count: int = Field(default=3, description="Maximum retry attempts")
|
||||
|
||||
|
||||
class QueueManager:
|
||||
"""Manages the download queue operations."""
|
||||
|
||||
def __init__(self, queue_file_path: Optional[Path] = None):
|
||||
self.queue_file_path = queue_file_path or APP_DATA_DIR / "download_queue.json"
|
||||
self._queue: Optional[DownloadQueue] = None
|
||||
|
||||
def _load_queue(self) -> DownloadQueue:
|
||||
"""Load queue from file."""
|
||||
if self.queue_file_path.exists():
|
||||
try:
|
||||
with open(self.queue_file_path, 'r', encoding='utf-8') as f:
|
||||
data = json.load(f)
|
||||
return DownloadQueue.model_validate(data)
|
||||
except (json.JSONDecodeError, ValueError) as e:
|
||||
logger.error(f"Failed to load queue from {self.queue_file_path}: {e}")
|
||||
return DownloadQueue()
|
||||
return DownloadQueue()
|
||||
|
||||
def _save_queue(self, queue: DownloadQueue) -> bool:
|
||||
"""Save queue to file."""
|
||||
try:
|
||||
with open(self.queue_file_path, 'w', encoding='utf-8') as f:
|
||||
json.dump(queue.model_dump(), f, indent=2, default=str)
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to save queue to {self.queue_file_path}: {e}")
|
||||
return False
|
||||
|
||||
@property
|
||||
def queue(self) -> DownloadQueue:
|
||||
"""Get the current queue, loading it if necessary."""
|
||||
if self._queue is None:
|
||||
self._queue = self._load_queue()
|
||||
return self._queue
|
||||
|
||||
def add_job(self, job: DownloadJob) -> bool:
|
||||
"""Add a new download job to the queue."""
|
||||
try:
|
||||
self.queue.jobs[job.id] = job
|
||||
success = self._save_queue(self.queue)
|
||||
if success:
|
||||
logger.info(f"Added download job: {job.anime_title} Episode {job.episode}")
|
||||
return success
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to add job to queue: {e}")
|
||||
return False
|
||||
|
||||
def remove_job(self, job_id: str) -> bool:
|
||||
"""Remove a job from the queue."""
|
||||
try:
|
||||
if job_id in self.queue.jobs:
|
||||
job = self.queue.jobs.pop(job_id)
|
||||
success = self._save_queue(self.queue)
|
||||
if success:
|
||||
logger.info(f"Removed download job: {job.anime_title} Episode {job.episode}")
|
||||
return success
|
||||
return False
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to remove job from queue: {e}")
|
||||
return False
|
||||
|
||||
def update_job_status(self, job_id: str, status: DownloadStatus, error_message: Optional[str] = None) -> bool:
|
||||
"""Update the status of a job."""
|
||||
try:
|
||||
if job_id in self.queue.jobs:
|
||||
job = self.queue.jobs[job_id]
|
||||
job.status = status
|
||||
if error_message:
|
||||
job.error_message = error_message
|
||||
|
||||
if status == DownloadStatus.DOWNLOADING:
|
||||
job.started_at = datetime.now()
|
||||
elif status in (DownloadStatus.COMPLETED, DownloadStatus.FAILED, DownloadStatus.CANCELLED):
|
||||
job.completed_at = datetime.now()
|
||||
|
||||
return self._save_queue(self.queue)
|
||||
return False
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to update job status: {e}")
|
||||
return False
|
||||
|
||||
def get_pending_jobs(self, limit: Optional[int] = None) -> List[DownloadJob]:
|
||||
"""Get pending jobs sorted by priority and creation time."""
|
||||
pending = [
|
||||
job for job in self.queue.jobs.values()
|
||||
if job.status == DownloadStatus.PENDING
|
||||
]
|
||||
# Sort by priority (lower number = higher priority), then by creation time
|
||||
pending.sort(key=lambda x: (x.priority, x.created_at))
|
||||
|
||||
if limit:
|
||||
return pending[:limit]
|
||||
return pending
|
||||
|
||||
def get_active_jobs(self) -> List[DownloadJob]:
|
||||
"""Get currently downloading jobs."""
|
||||
return [
|
||||
job for job in self.queue.jobs.values()
|
||||
if job.status == DownloadStatus.DOWNLOADING
|
||||
]
|
||||
|
||||
def get_job_by_id(self, job_id: str) -> Optional[DownloadJob]:
|
||||
"""Get a specific job by ID."""
|
||||
return self.queue.jobs.get(job_id)
|
||||
|
||||
def get_all_jobs(self) -> List[DownloadJob]:
|
||||
"""Get all jobs."""
|
||||
return list(self.queue.jobs.values())
|
||||
|
||||
def clean_completed_jobs(self, max_age_days: int = 7) -> int:
|
||||
"""Remove completed jobs older than specified days."""
|
||||
cutoff_date = datetime.now().replace(hour=0, minute=0, second=0, microsecond=0)
|
||||
cutoff_date = cutoff_date.replace(day=cutoff_date.day - max_age_days)
|
||||
|
||||
jobs_to_remove = []
|
||||
for job_id, job in self.queue.jobs.items():
|
||||
if (job.status in (DownloadStatus.COMPLETED, DownloadStatus.FAILED, DownloadStatus.CANCELLED)
|
||||
and job.completed_at and job.completed_at < cutoff_date):
|
||||
jobs_to_remove.append(job_id)
|
||||
|
||||
for job_id in jobs_to_remove:
|
||||
del self.queue.jobs[job_id]
|
||||
|
||||
if jobs_to_remove:
|
||||
self._save_queue(self.queue)
|
||||
logger.info(f"Cleaned {len(jobs_to_remove)} old completed jobs")
|
||||
|
||||
return len(jobs_to_remove)
|
||||
|
||||
def get_queue_stats(self) -> Dict[str, int]:
|
||||
"""Get statistics about the queue."""
|
||||
stats = {
|
||||
"total": len(self.queue.jobs),
|
||||
"pending": 0,
|
||||
"downloading": 0,
|
||||
"completed": 0,
|
||||
"failed": 0,
|
||||
"cancelled": 0
|
||||
}
|
||||
|
||||
for job in self.queue.jobs.values():
|
||||
if job.status == DownloadStatus.PENDING:
|
||||
stats["pending"] += 1
|
||||
elif job.status == DownloadStatus.DOWNLOADING:
|
||||
stats["downloading"] += 1
|
||||
elif job.status == DownloadStatus.COMPLETED:
|
||||
stats["completed"] += 1
|
||||
elif job.status == DownloadStatus.FAILED:
|
||||
stats["failed"] += 1
|
||||
elif job.status == DownloadStatus.CANCELLED:
|
||||
stats["cancelled"] += 1
|
||||
|
||||
return stats
|
||||
@@ -1,68 +0,0 @@
|
||||
bash_functions = r"""
|
||||
generate_sha256() {
|
||||
local input
|
||||
|
||||
# Check if input is passed as an argument or piped
|
||||
if [ -n "$1" ]; then
|
||||
input="$1"
|
||||
else
|
||||
input=$(cat)
|
||||
fi
|
||||
|
||||
if command -v sha256sum &>/dev/null; then
|
||||
echo -n "$input" | sha256sum | awk '{print $1}'
|
||||
elif command -v shasum &>/dev/null; then
|
||||
echo -n "$input" | shasum -a 256 | awk '{print $1}'
|
||||
elif command -v sha256 &>/dev/null; then
|
||||
echo -n "$input" | sha256 | awk '{print $1}'
|
||||
elif command -v openssl &>/dev/null; then
|
||||
echo -n "$input" | openssl dgst -sha256 | awk '{print $2}'
|
||||
else
|
||||
echo -n "$input" | base64 | tr '/+' '_-' | tr -d '\n'
|
||||
fi
|
||||
}
|
||||
fzf_preview() {
|
||||
file=$1
|
||||
|
||||
dim=${FZF_PREVIEW_COLUMNS}x${FZF_PREVIEW_LINES}
|
||||
if [ "$dim" = x ]; then
|
||||
dim=$(stty size </dev/tty | awk "{print \$2 \"x\" \$1}")
|
||||
fi
|
||||
if ! [ "$FASTANIME_IMAGE_RENDERER" = "icat" ] && [ -z "$KITTY_WINDOW_ID" ] && [ "$((FZF_PREVIEW_TOP + FZF_PREVIEW_LINES))" -eq "$(stty size </dev/tty | awk "{print \$1}")" ]; then
|
||||
dim=${FZF_PREVIEW_COLUMNS}x$((FZF_PREVIEW_LINES - 1))
|
||||
fi
|
||||
|
||||
if [ "$FASTANIME_IMAGE_RENDERER" = "icat" ] && [ -z "$GHOSTTY_BIN_DIR" ]; then
|
||||
if command -v kitten >/dev/null 2>&1; then
|
||||
kitten icat --clear --transfer-mode=memory --unicode-placeholder --stdin=no --place="$dim@0x0" "$file" | sed "\$d" | sed "$(printf "\$s/\$/\033[m/")"
|
||||
elif command -v icat >/dev/null 2>&1; then
|
||||
icat --clear --transfer-mode=memory --unicode-placeholder --stdin=no --place="$dim@0x0" "$file" | sed "\$d" | sed "$(printf "\$s/\$/\033[m/")"
|
||||
else
|
||||
kitty icat --clear --transfer-mode=memory --unicode-placeholder --stdin=no --place="$dim@0x0" "$file" | sed "\$d" | sed "$(printf "\$s/\$/\033[m/")"
|
||||
fi
|
||||
|
||||
elif [ -n "$GHOSTTY_BIN_DIR" ]; then
|
||||
if command -v kitten >/dev/null 2>&1; then
|
||||
kitten icat --clear --transfer-mode=memory --unicode-placeholder --stdin=no --place="$dim@0x0" "$file" | sed "\$d" | sed "$(printf "\$s/\$/\033[m/")"
|
||||
elif command -v icat >/dev/null 2>&1; then
|
||||
icat --clear --transfer-mode=memory --unicode-placeholder --stdin=no --place="$dim@0x0" "$file" | sed "\$d" | sed "$(printf "\$s/\$/\033[m/")"
|
||||
else
|
||||
chafa -s "$dim" "$file"
|
||||
fi
|
||||
elif command -v chafa >/dev/null 2>&1; then
|
||||
case "$PLATFORM" in
|
||||
android) chafa -s "$dim" "$file" ;;
|
||||
windows) chafa -f sixel -s "$dim" "$file" ;;
|
||||
*) chafa -s "$dim" "$file" ;;
|
||||
esac
|
||||
echo
|
||||
|
||||
elif command -v imgcat >/dev/null; then
|
||||
imgcat -W "${dim%%x*}" -H "${dim##*x}" "$file"
|
||||
|
||||
else
|
||||
echo please install a terminal image viewer
|
||||
echo either icat for kitty terminal and wezterm or imgcat or chafa
|
||||
fi
|
||||
}
|
||||
"""
|
||||
@@ -1,15 +0,0 @@
|
||||
import logging
|
||||
|
||||
from requests import get
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def fetch_anime_info_from_bal(anilist_id):
|
||||
try:
|
||||
url = f"https://raw.githubusercontent.com/bal-mackup/mal-backup/master/anilist/anime/{anilist_id}.json"
|
||||
response = get(url, timeout=11)
|
||||
if response.status_code == 200:
|
||||
return response.json()
|
||||
except Exception as e:
|
||||
logger.error(e)
|
||||
@@ -1,323 +0,0 @@
|
||||
import logging
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from requests import post
|
||||
from thefuzz import fuzz
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ..anilist.types import AnilistDataSchema
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
ANILIST_ENDPOINT = "https://graphql.anilist.co"
|
||||
"""
|
||||
query ($query: String) {
|
||||
Page(perPage: 50) {
|
||||
pageInfo {
|
||||
total
|
||||
currentPage
|
||||
hasNextPage
|
||||
}
|
||||
media(search: $query, type: ANIME) {
|
||||
id
|
||||
idMal
|
||||
title {
|
||||
romaji
|
||||
english
|
||||
}
|
||||
episodes
|
||||
status
|
||||
nextAiringEpisode {
|
||||
timeUntilAiring
|
||||
airingAt
|
||||
episode
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
"""
|
||||
|
||||
|
||||
def search_for_manga_with_anilist(manga_title: str):
|
||||
query = """
|
||||
query ($query: String) {
|
||||
Page(perPage: 50) {
|
||||
pageInfo {
|
||||
currentPage
|
||||
}
|
||||
media(search: $query, type: MANGA,genre_not_in: ["hentai"]) {
|
||||
id
|
||||
idMal
|
||||
title {
|
||||
romaji
|
||||
english
|
||||
}
|
||||
chapters
|
||||
status
|
||||
coverImage {
|
||||
medium
|
||||
large
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
"""
|
||||
response = post(
|
||||
ANILIST_ENDPOINT,
|
||||
json={"query": query, "variables": {"query": manga_title}},
|
||||
timeout=10,
|
||||
)
|
||||
if response.status_code == 200:
|
||||
anilist_data: AnilistDataSchema = response.json()
|
||||
return {
|
||||
"pageInfo": anilist_data["data"]["Page"]["pageInfo"],
|
||||
"results": [
|
||||
{
|
||||
"id": anime_result["id"],
|
||||
"poster": anime_result["coverImage"]["large"],
|
||||
"title": (
|
||||
anime_result["title"]["romaji"]
|
||||
or anime_result["title"]["english"]
|
||||
)
|
||||
+ f" [Chapters: {anime_result['chapters']}]",
|
||||
"type": "manga",
|
||||
"availableChapters": list(
|
||||
range(
|
||||
1,
|
||||
(
|
||||
anime_result["chapters"]
|
||||
if anime_result["chapters"]
|
||||
else 0
|
||||
),
|
||||
)
|
||||
),
|
||||
}
|
||||
for anime_result in anilist_data["data"]["Page"]["media"]
|
||||
],
|
||||
}
|
||||
|
||||
|
||||
def search_for_anime_with_anilist(anime_title: str, prefer_eng_titles=False):
|
||||
query = """
|
||||
query ($query: String) {
|
||||
Page(perPage: 50) {
|
||||
pageInfo {
|
||||
total
|
||||
currentPage
|
||||
hasNextPage
|
||||
}
|
||||
media(search: $query, type: ANIME, genre_not_in: ["hentai"]) {
|
||||
id
|
||||
idMal
|
||||
title {
|
||||
romaji
|
||||
english
|
||||
}
|
||||
episodes
|
||||
status
|
||||
synonyms
|
||||
nextAiringEpisode {
|
||||
timeUntilAiring
|
||||
airingAt
|
||||
episode
|
||||
}
|
||||
coverImage {
|
||||
large
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
"""
|
||||
response = post(
|
||||
ANILIST_ENDPOINT,
|
||||
json={"query": query, "variables": {"query": anime_title}},
|
||||
timeout=10,
|
||||
)
|
||||
if response.status_code == 200:
|
||||
anilist_data: AnilistDataSchema = response.json()
|
||||
return {
|
||||
"pageInfo": anilist_data["data"]["Page"]["pageInfo"],
|
||||
"results": [
|
||||
{
|
||||
"id": str(anime_result["id"]),
|
||||
"title": (
|
||||
(
|
||||
anime_result["title"]["english"]
|
||||
or anime_result["title"]["romaji"]
|
||||
)
|
||||
if prefer_eng_titles
|
||||
else (
|
||||
anime_result["title"]["romaji"]
|
||||
or anime_result["title"]["english"]
|
||||
)
|
||||
),
|
||||
"otherTitles": [
|
||||
(
|
||||
(
|
||||
anime_result["title"]["romaji"]
|
||||
or anime_result["title"]["english"]
|
||||
)
|
||||
if prefer_eng_titles
|
||||
else (
|
||||
anime_result["title"]["english"]
|
||||
or anime_result["title"]["romaji"]
|
||||
)
|
||||
),
|
||||
*(anime_result["synonyms"] or []),
|
||||
],
|
||||
"type": "anime",
|
||||
"poster": anime_result["coverImage"]["large"],
|
||||
"availableEpisodes": list(
|
||||
map(
|
||||
str,
|
||||
range(
|
||||
1,
|
||||
(
|
||||
anime_result["episodes"]
|
||||
if not anime_result["status"] == "RELEASING"
|
||||
and anime_result["episodes"]
|
||||
else (
|
||||
(
|
||||
anime_result["nextAiringEpisode"]["episode"]
|
||||
- 1
|
||||
if anime_result["nextAiringEpisode"]
|
||||
else 0
|
||||
)
|
||||
if not anime_result["episodes"]
|
||||
else anime_result["episodes"]
|
||||
)
|
||||
)
|
||||
+ 1,
|
||||
),
|
||||
)
|
||||
),
|
||||
}
|
||||
for anime_result in anilist_data["data"]["Page"]["media"]
|
||||
],
|
||||
}
|
||||
|
||||
|
||||
def get_mal_id_and_anilist_id(anime_title: str) -> "dict[str,int] | None":
|
||||
"""the abstraction over all none authenticated requests and that returns data of a similar type
|
||||
|
||||
Args:
|
||||
query: the anilist query
|
||||
variables: the anilist api variables
|
||||
|
||||
Returns:
|
||||
a boolean indicating success and none or an anilist object depending on success
|
||||
"""
|
||||
query = """
|
||||
query ($query: String) {
|
||||
Page(perPage: 50) {
|
||||
pageInfo {
|
||||
total
|
||||
currentPage
|
||||
hasNextPage
|
||||
}
|
||||
media(search: $query, type: ANIME) {
|
||||
id
|
||||
idMal
|
||||
title {
|
||||
romaji
|
||||
english
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
"""
|
||||
|
||||
try:
|
||||
variables = {"query": anime_title}
|
||||
response = post(
|
||||
ANILIST_ENDPOINT,
|
||||
json={"query": query, "variables": variables},
|
||||
timeout=10,
|
||||
)
|
||||
anilist_data: AnilistDataSchema = response.json()
|
||||
if response.status_code == 200:
|
||||
anime = max(
|
||||
anilist_data["data"]["Page"]["media"],
|
||||
key=lambda anime: max(
|
||||
(
|
||||
fuzz.ratio(anime, str(anime["title"]["romaji"])),
|
||||
fuzz.ratio(anime_title, str(anime["title"]["english"])),
|
||||
)
|
||||
),
|
||||
)
|
||||
return {"id_anilist": anime["id"], "id_mal": anime["idMal"]}
|
||||
except Exception as e:
|
||||
logger.error(f"Something unexpected occurred {e}")
|
||||
|
||||
|
||||
def get_basic_anime_info_by_title(anime_title: str):
|
||||
"""the abstraction over all none authenticated requests and that returns data of a similar type
|
||||
|
||||
Args:
|
||||
query: the anilist query
|
||||
variables: the anilist api variables
|
||||
|
||||
Returns:
|
||||
a boolean indicating success and none or an anilist object depending on success
|
||||
"""
|
||||
query = """
|
||||
query ($query: String) {
|
||||
Page(perPage: 50) {
|
||||
pageInfo {
|
||||
total
|
||||
}
|
||||
media(search: $query, type: ANIME,genre_not_in: ["hentai"]) {
|
||||
id
|
||||
idMal
|
||||
title {
|
||||
romaji
|
||||
english
|
||||
}
|
||||
streamingEpisodes {
|
||||
title
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
"""
|
||||
|
||||
from ...Utility.data import anime_normalizer
|
||||
|
||||
# normalize the title
|
||||
anime_title = anime_normalizer.get(anime_title, anime_title)
|
||||
try:
|
||||
variables = {"query": anime_title}
|
||||
response = post(
|
||||
ANILIST_ENDPOINT,
|
||||
json={"query": query, "variables": variables},
|
||||
timeout=10,
|
||||
)
|
||||
anilist_data: AnilistDataSchema = response.json()
|
||||
if response.status_code == 200:
|
||||
anime = max(
|
||||
anilist_data["data"]["Page"]["media"],
|
||||
key=lambda anime: max(
|
||||
(
|
||||
fuzz.ratio(
|
||||
anime_title.lower(), str(anime["title"]["romaji"]).lower()
|
||||
),
|
||||
fuzz.ratio(
|
||||
anime_title.lower(), str(anime["title"]["english"]).lower()
|
||||
),
|
||||
)
|
||||
),
|
||||
)
|
||||
return {
|
||||
"idAnilist": anime["id"],
|
||||
"idMal": anime["idMal"],
|
||||
"title": {
|
||||
"english": anime["title"]["english"],
|
||||
"romaji": anime["title"]["romaji"],
|
||||
},
|
||||
"episodes": [
|
||||
{"title": episode["title"]}
|
||||
for episode in anime["streamingEpisodes"]
|
||||
if episode
|
||||
],
|
||||
}
|
||||
except Exception as e:
|
||||
logger.error(f"Something unexpected occurred {e}")
|
||||
@@ -1,221 +0,0 @@
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
import time
|
||||
from datetime import datetime
|
||||
from urllib.parse import urlencode
|
||||
|
||||
import requests
|
||||
|
||||
from .sqlitedb_helper import SqliteDB
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
caching_mimetypes = {
|
||||
"application": {
|
||||
"json",
|
||||
"xml",
|
||||
"x-www-form-urlencoded",
|
||||
"x-javascript",
|
||||
"javascript",
|
||||
},
|
||||
"text": {"html", "css", "javascript", "plain", "xml", "xsl", "x-javascript"},
|
||||
}
|
||||
|
||||
|
||||
class CachedRequestsSession(requests.Session):
|
||||
__request_functions__ = (
|
||||
"get",
|
||||
"options",
|
||||
"head",
|
||||
"post",
|
||||
"put",
|
||||
"patch",
|
||||
"delete",
|
||||
)
|
||||
|
||||
def __new__(cls, *args, **kwargs):
|
||||
def caching_params(name: str):
|
||||
def wrapper(self, *args, **kwargs):
|
||||
return cls.request(self, name, *args, **kwargs)
|
||||
|
||||
return wrapper
|
||||
|
||||
for func in cls.__request_functions__:
|
||||
setattr(cls, func, caching_params(func))
|
||||
|
||||
return super().__new__(cls)
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
cache_db_path: str,
|
||||
max_lifetime: int = 259200,
|
||||
max_size: int = (1024**2) * 10,
|
||||
table_name: str = "fastanime_requests_cache",
|
||||
clean_db=False,
|
||||
*args,
|
||||
**kwargs,
|
||||
):
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
self.cache_db_path = cache_db_path
|
||||
self.max_lifetime = max_lifetime
|
||||
self.max_size = max_size
|
||||
self.table_name = table_name
|
||||
self.sqlite_db_connection = SqliteDB(self.cache_db_path)
|
||||
|
||||
# Prepare the cache table if it doesn't exist
|
||||
self._create_cache_table()
|
||||
|
||||
def _create_cache_table(self):
|
||||
"""Create cache table if it doesn't exist."""
|
||||
with self.sqlite_db_connection as conn:
|
||||
conn.execute(
|
||||
f"""
|
||||
CREATE TABLE IF NOT EXISTS {self.table_name} (
|
||||
url TEXT,
|
||||
status_code INTEGER,
|
||||
request_headers TEXT,
|
||||
response_headers TEXT,
|
||||
data BLOB,
|
||||
redirection_policy INT,
|
||||
cache_expiry INTEGER,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
)"""
|
||||
)
|
||||
|
||||
def request(
|
||||
self,
|
||||
method,
|
||||
url,
|
||||
params=None,
|
||||
force_caching=False,
|
||||
fresh=int(os.environ.get("FASTANIME_FRESH_REQUESTS", 0)),
|
||||
*args,
|
||||
**kwargs,
|
||||
):
|
||||
if params:
|
||||
url += "?" + urlencode(params)
|
||||
|
||||
redirection_policy = int(kwargs.get("force_redirects", False))
|
||||
|
||||
with self.sqlite_db_connection as conn:
|
||||
cursor = conn.cursor()
|
||||
time_before_access_db = datetime.now()
|
||||
|
||||
logger.debug("Checking for existing request in cache")
|
||||
cursor.execute(
|
||||
f"""
|
||||
SELECT
|
||||
status_code,
|
||||
request_headers,
|
||||
response_headers,
|
||||
data,
|
||||
redirection_policy
|
||||
FROM {self.table_name}
|
||||
WHERE
|
||||
url = ?
|
||||
AND redirection_policy = ?
|
||||
AND cache_expiry > ?
|
||||
ORDER BY created_at DESC
|
||||
LIMIT 1
|
||||
""",
|
||||
(url, redirection_policy, int(time.time())),
|
||||
)
|
||||
cached_request = cursor.fetchone()
|
||||
time_after_access_db = datetime.now()
|
||||
|
||||
if cached_request and not fresh:
|
||||
logger.debug("Found existing request in cache")
|
||||
(
|
||||
status_code,
|
||||
request_headers,
|
||||
response_headers,
|
||||
data,
|
||||
redirection_policy,
|
||||
) = cached_request
|
||||
|
||||
response = requests.Response()
|
||||
response.headers.update(json.loads(response_headers))
|
||||
response.status_code = status_code
|
||||
response._content = data
|
||||
|
||||
if "timeout" in kwargs:
|
||||
kwargs.pop("timeout")
|
||||
if "headers" in kwargs:
|
||||
kwargs.pop("headers")
|
||||
_request = requests.Request(
|
||||
method, url, headers=json.loads(request_headers), *args, **kwargs
|
||||
)
|
||||
response.request = _request.prepare()
|
||||
response.elapsed = time_after_access_db - time_before_access_db
|
||||
|
||||
return response
|
||||
|
||||
# Perform the request and cache it
|
||||
response = super().request(method, url, *args, **kwargs)
|
||||
if response.ok and (
|
||||
force_caching
|
||||
or (
|
||||
self.is_content_type_cachable(
|
||||
response.headers.get("content-type"), caching_mimetypes
|
||||
)
|
||||
and len(response.content) < self.max_size
|
||||
)
|
||||
):
|
||||
logger.debug("Caching the current request")
|
||||
cursor.execute(
|
||||
f"""
|
||||
INSERT INTO {self.table_name} (
|
||||
url,
|
||||
status_code,
|
||||
request_headers,
|
||||
response_headers,
|
||||
data,
|
||||
redirection_policy,
|
||||
cache_expiry
|
||||
) VALUES (?, ?, ?, ?, ?, ?, ?)
|
||||
""",
|
||||
(
|
||||
url,
|
||||
response.status_code,
|
||||
json.dumps(dict(response.request.headers)),
|
||||
json.dumps(dict(response.headers)),
|
||||
response.content,
|
||||
redirection_policy,
|
||||
int(time.time()) + self.max_lifetime,
|
||||
),
|
||||
)
|
||||
|
||||
return response
|
||||
|
||||
@staticmethod
|
||||
def is_content_type_cachable(content_type, caching_mimetypes):
|
||||
"""Checks whether the given encoding is supported by the cacher"""
|
||||
if content_type is None:
|
||||
return True
|
||||
|
||||
mime, contents = content_type.split("/")
|
||||
|
||||
contents = re.sub(r";.*$", "", contents)
|
||||
|
||||
return mime in caching_mimetypes and any(
|
||||
content in caching_mimetypes[mime] for content in contents.split("+")
|
||||
)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
with CachedRequestsSession("cache.db") as session:
|
||||
response = session.get(
|
||||
"https://google.com",
|
||||
)
|
||||
|
||||
response_b = session.get(
|
||||
"https://google.com",
|
||||
)
|
||||
|
||||
print("A: ", response.elapsed)
|
||||
print("B: ", response_b.elapsed)
|
||||
|
||||
print(response_b.text[0:30])
|
||||
@@ -1,32 +0,0 @@
|
||||
import logging
|
||||
import sqlite3
|
||||
import time
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class SqliteDB:
|
||||
def __init__(self, db_path: str) -> None:
|
||||
self.db_path = db_path
|
||||
self.connection = sqlite3.connect(self.db_path)
|
||||
logger.debug("Enabling WAL mode for concurrent access")
|
||||
self.connection.execute("PRAGMA journal_mode=WAL;")
|
||||
self.connection.close()
|
||||
self.connection = None
|
||||
|
||||
def __enter__(self):
|
||||
logger.debug("Starting new connection...")
|
||||
start_time = time.time()
|
||||
self.connection = sqlite3.connect(self.db_path)
|
||||
logger.debug(
|
||||
f"Successfully got a new connection in {time.time() - start_time} seconds"
|
||||
)
|
||||
return self.connection
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
if self.connection:
|
||||
logger.debug("Closing connection to cache db")
|
||||
self.connection.commit()
|
||||
self.connection.close()
|
||||
self.connection = None
|
||||
logger.debug("Successfully closed connection to cache db")
|
||||
@@ -1,6 +1,6 @@
|
||||
import os
|
||||
from pathlib import Path
|
||||
from typing import Literal
|
||||
from typing import List, Literal
|
||||
|
||||
from pydantic import BaseModel, Field, PrivateAttr, computed_field, field_validator
|
||||
|
||||
@@ -157,6 +157,62 @@ class DownloadsConfig(OtherConfig):
|
||||
default_factory=lambda: USER_VIDEOS_DIR,
|
||||
description="The default directory to save downloaded anime.",
|
||||
)
|
||||
|
||||
# Download tracking configuration
|
||||
enable_tracking: bool = Field(
|
||||
default=True, description="Enable download tracking and management"
|
||||
)
|
||||
auto_organize: bool = Field(
|
||||
default=True, description="Automatically organize downloads by anime title"
|
||||
)
|
||||
max_concurrent: int = Field(
|
||||
default=3, gt=0, le=10, description="Maximum concurrent downloads"
|
||||
)
|
||||
auto_cleanup_failed: bool = Field(
|
||||
default=True, description="Automatically cleanup failed downloads"
|
||||
)
|
||||
retention_days: int = Field(
|
||||
default=30, gt=0, description="Days to keep failed downloads before cleanup"
|
||||
)
|
||||
|
||||
# Integration with watch history
|
||||
sync_with_watch_history: bool = Field(
|
||||
default=True, description="Sync download status with watch history"
|
||||
)
|
||||
auto_mark_offline: bool = Field(
|
||||
default=True, description="Automatically mark downloaded episodes as available offline"
|
||||
)
|
||||
|
||||
# File organization
|
||||
naming_template: str = Field(
|
||||
default="{title}/Season {season:02d}/{episode:02d} - {episode_title}.{ext}",
|
||||
description="File naming template for downloaded episodes"
|
||||
)
|
||||
|
||||
# Quality and subtitles
|
||||
preferred_quality: Literal["360", "480", "720", "1080", "best"] = Field(
|
||||
default="1080", description="Preferred download quality"
|
||||
)
|
||||
download_subtitles: bool = Field(
|
||||
default=True, description="Download subtitles when available"
|
||||
)
|
||||
subtitle_languages: List[str] = Field(
|
||||
default=["en"], description="Preferred subtitle languages"
|
||||
)
|
||||
|
||||
# Queue management
|
||||
queue_max_size: int = Field(
|
||||
default=100, gt=0, description="Maximum number of items in download queue"
|
||||
)
|
||||
auto_start_downloads: bool = Field(
|
||||
default=True, description="Automatically start downloads when items are queued"
|
||||
)
|
||||
retry_attempts: int = Field(
|
||||
default=3, ge=0, description="Number of retry attempts for failed downloads"
|
||||
)
|
||||
retry_delay: int = Field(
|
||||
default=300, ge=0, description="Delay between retry attempts in seconds"
|
||||
)
|
||||
|
||||
|
||||
class GeneralConfig(BaseModel):
|
||||
|
||||
Reference in New Issue
Block a user