mirror of
https://github.com/Benexl/FastAnime.git
synced 2026-01-04 08:47:14 -08:00
chore: correct package issues
This commit is contained in:
0
viu_cli/cli/utils/__init__.py
Normal file
0
viu_cli/cli/utils/__init__.py
Normal file
29
viu_cli/cli/utils/ansi.py
Normal file
29
viu_cli/cli/utils/ansi.py
Normal file
@@ -0,0 +1,29 @@
|
||||
# Define ANSI escape codes as constants
|
||||
RESET = "\033[0m"
|
||||
BOLD = "\033[1m"
|
||||
INVISIBLE_CURSOR = "\033[?25l"
|
||||
VISIBLE_CURSOR = "\033[?25h"
|
||||
UNDERLINE = "\033[4m"
|
||||
|
||||
|
||||
def get_true_fg(color: list[str], bold: bool = True) -> str:
|
||||
"""Custom helper function that enables colored text in the terminal
|
||||
|
||||
Args:
|
||||
bold: whether to bolden the text
|
||||
string: string to color
|
||||
r: red
|
||||
g: green
|
||||
b: blue
|
||||
|
||||
Returns:
|
||||
colored string
|
||||
"""
|
||||
# NOTE: Currently only supports terminals that support true color
|
||||
r = color[0]
|
||||
g = color[1]
|
||||
b = color[2]
|
||||
if bold:
|
||||
return f"{BOLD}\033[38;2;{r};{g};{b};m"
|
||||
else:
|
||||
return f"\033[38;2;{r};{g};{b};m"
|
||||
79
viu_cli/cli/utils/completion.py
Normal file
79
viu_cli/cli/utils/completion.py
Normal file
@@ -0,0 +1,79 @@
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
ANILIST_ENDPOINT = "https://graphql.anilist.co"
|
||||
|
||||
|
||||
anime_title_query = """
|
||||
query ($query: String) {
|
||||
Page(perPage: 50) {
|
||||
pageInfo {
|
||||
total
|
||||
}
|
||||
media(search: $query, type: ANIME) {
|
||||
id
|
||||
idMal
|
||||
title {
|
||||
romaji
|
||||
english
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
"""
|
||||
|
||||
|
||||
def get_anime_titles(query: str, variables: dict = {}):
|
||||
"""the abstraction over all none authenticated requests and that returns data of a similar type
|
||||
|
||||
Args:
|
||||
query: the anilist query
|
||||
variables: the anilist api variables
|
||||
|
||||
Returns:
|
||||
a boolean indicating success and none or an anilist object depending on success
|
||||
"""
|
||||
from httpx import post
|
||||
|
||||
try:
|
||||
response = post(
|
||||
ANILIST_ENDPOINT,
|
||||
json={"query": query, "variables": variables},
|
||||
timeout=10,
|
||||
)
|
||||
anilist_data = response.json()
|
||||
|
||||
if response.status_code == 200:
|
||||
eng_titles = [
|
||||
anime["title"]["english"]
|
||||
for anime in anilist_data["data"]["Page"]["media"]
|
||||
if anime["title"]["english"]
|
||||
]
|
||||
romaji_titles = [
|
||||
anime["title"]["romaji"]
|
||||
for anime in anilist_data["data"]["Page"]["media"]
|
||||
if anime["title"]["romaji"]
|
||||
]
|
||||
return [*eng_titles, *romaji_titles]
|
||||
else:
|
||||
return []
|
||||
except Exception as e:
|
||||
logger.error(f"Something unexpected occurred {e}")
|
||||
return []
|
||||
|
||||
|
||||
def anime_titles_shell_complete(ctx, param, incomplete):
|
||||
incomplete = incomplete.strip()
|
||||
if not incomplete:
|
||||
incomplete = None
|
||||
variables = {}
|
||||
else:
|
||||
variables = {"query": incomplete}
|
||||
return get_anime_titles(anime_title_query, variables)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
t = input("Enter title")
|
||||
results = get_anime_titles(anime_title_query, {"query": t})
|
||||
print(results)
|
||||
24
viu_cli/cli/utils/exception.py
Normal file
24
viu_cli/cli/utils/exception.py
Normal file
@@ -0,0 +1,24 @@
|
||||
import sys
|
||||
|
||||
from rich.traceback import install as rich_install
|
||||
|
||||
|
||||
def custom_exception_hook(exc_type, exc_value, exc_traceback):
|
||||
print(f"{exc_type.__name__}: {exc_value}")
|
||||
|
||||
|
||||
default_exception_hook = sys.excepthook
|
||||
|
||||
|
||||
def setup_exceptions_handler(
|
||||
trace: bool | None,
|
||||
dev: bool | None,
|
||||
rich_traceback: bool | None,
|
||||
rich_traceback_theme: str,
|
||||
):
|
||||
if trace or dev:
|
||||
sys.excepthook = default_exception_hook
|
||||
if rich_traceback:
|
||||
rich_install(show_locals=True, theme=rich_traceback_theme)
|
||||
else:
|
||||
sys.excepthook = custom_exception_hook
|
||||
12
viu_cli/cli/utils/feh.py
Normal file
12
viu_cli/cli/utils/feh.py
Normal file
@@ -0,0 +1,12 @@
|
||||
import shutil
|
||||
import subprocess
|
||||
from sys import exit
|
||||
|
||||
|
||||
def feh_manga_viewer(image_links: list[str], window_title: str):
|
||||
FEH_EXECUTABLE = shutil.which("feh")
|
||||
if not FEH_EXECUTABLE:
|
||||
print("feh not found")
|
||||
exit(1)
|
||||
commands = [FEH_EXECUTABLE, *image_links, "--title", window_title]
|
||||
subprocess.run(commands, check=False)
|
||||
103
viu_cli/cli/utils/icat.py
Normal file
103
viu_cli/cli/utils/icat.py
Normal file
@@ -0,0 +1,103 @@
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
import termios
|
||||
import tty
|
||||
from sys import exit
|
||||
|
||||
from rich.align import Align
|
||||
from rich.console import Console
|
||||
from rich.panel import Panel
|
||||
from rich.text import Text
|
||||
|
||||
console = Console()
|
||||
|
||||
|
||||
def get_key():
|
||||
"""Read a single keypress (including arrows)."""
|
||||
fd = sys.stdin.fileno()
|
||||
old = termios.tcgetattr(fd)
|
||||
try:
|
||||
tty.setraw(fd)
|
||||
ch1 = sys.stdin.read(1)
|
||||
if ch1 == "\x1b":
|
||||
ch2 = sys.stdin.read(2)
|
||||
return ch1 + ch2
|
||||
return ch1
|
||||
finally:
|
||||
termios.tcsetattr(fd, termios.TCSADRAIN, old)
|
||||
|
||||
|
||||
def draw_banner_at(msg: str, row: int):
|
||||
"""Move cursor to `row`, then render a centered, cyan-bordered panel."""
|
||||
sys.stdout.write(f"\x1b[{row};1H")
|
||||
text = Text(msg, justify="center")
|
||||
panel = Panel(Align(text, align="center"), border_style="cyan", padding=(1, 2))
|
||||
console.print(panel)
|
||||
|
||||
|
||||
def icat_manga_viewer(image_links: list[str], window_title: str):
|
||||
ICAT = shutil.which("kitty")
|
||||
if not ICAT:
|
||||
console.print("[bold red]kitty (for icat) not found[/]")
|
||||
exit(1)
|
||||
|
||||
idx, total = 0, len(image_links)
|
||||
title = f"{window_title} ({total} images)"
|
||||
show_banner = True
|
||||
|
||||
try:
|
||||
while True:
|
||||
console.clear()
|
||||
term_width, term_height = shutil.get_terminal_size((80, 24))
|
||||
panel_height = 0
|
||||
|
||||
# Calculate space for image based on banner visibility
|
||||
if show_banner:
|
||||
msg_lines = 3 # Title + blank + controls
|
||||
panel_height = msg_lines + 4 # Padding and borders
|
||||
image_height = term_height - panel_height - 1
|
||||
else:
|
||||
image_height = term_height
|
||||
|
||||
subprocess.run(
|
||||
[
|
||||
ICAT,
|
||||
"+kitten",
|
||||
"icat",
|
||||
"--clear",
|
||||
"--scale-up",
|
||||
"--place",
|
||||
f"{term_width}x{image_height}@0x0",
|
||||
"--z-index",
|
||||
"-1",
|
||||
image_links[idx],
|
||||
],
|
||||
check=False,
|
||||
)
|
||||
|
||||
if show_banner:
|
||||
controls = (
|
||||
f"[{idx + 1}/{total}] Prev: [h/←] Next: [l/→] "
|
||||
f"Toggle Banner: [b] Quit: [q/Ctrl-C]"
|
||||
)
|
||||
msg = f"{title}\n\n{controls}"
|
||||
start_row = term_height - panel_height
|
||||
draw_banner_at(msg, start_row)
|
||||
|
||||
# key handling
|
||||
key = get_key()
|
||||
if key in ("l", "\x1b[C"):
|
||||
idx = (idx + 1) % total
|
||||
elif key in ("h", "\x1b[D"):
|
||||
idx = (idx - 1) % total
|
||||
elif key == "b":
|
||||
show_banner = not show_banner
|
||||
elif key in ("q", "\x03"):
|
||||
break
|
||||
|
||||
except KeyboardInterrupt:
|
||||
pass
|
||||
finally:
|
||||
console.clear()
|
||||
console.print("Exited viewer.", style="bold")
|
||||
180
viu_cli/cli/utils/image.py
Normal file
180
viu_cli/cli/utils/image.py
Normal file
@@ -0,0 +1,180 @@
|
||||
import logging
|
||||
import shutil
|
||||
import subprocess
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
|
||||
import click
|
||||
import httpx
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def resize_image_from_url(
|
||||
client: httpx.Client,
|
||||
url: str,
|
||||
new_width: int,
|
||||
new_height: int,
|
||||
output_path: Optional[Path] = None,
|
||||
maintain_aspect_ratio: bool = False,
|
||||
return_bytes: bool = True,
|
||||
) -> bytes | None:
|
||||
"""
|
||||
Fetches an image from a URL using a provided synchronous httpx.Client,
|
||||
resizes it with Pillow. Can either save the resized image to a file
|
||||
or return its bytes.
|
||||
|
||||
Args:
|
||||
client (httpx.Client): An initialized synchronous httpx.Client instance.
|
||||
url (str): The URL of the image.
|
||||
new_width (int): The desired new width of the image.
|
||||
new_height (int): The desired new height of the image.
|
||||
output_path (str, optional): The path to save the resized image.
|
||||
Required if return_bytes is False.
|
||||
maintain_aspect_ratio (bool, optional): If True, resizes while maintaining
|
||||
the aspect ratio using thumbnail().
|
||||
Defaults to False.
|
||||
return_bytes (bool, optional): If True, returns the resized image as bytes.
|
||||
If False, saves to output_path. Defaults to False.
|
||||
|
||||
Returns:
|
||||
bytes | None: The bytes of the resized image if return_bytes is True,
|
||||
otherwise None.
|
||||
"""
|
||||
from io import BytesIO
|
||||
|
||||
from PIL import Image
|
||||
|
||||
if not return_bytes and output_path is None:
|
||||
raise ValueError("output_path must be provided if return_bytes is False.")
|
||||
|
||||
try:
|
||||
# Use the provided synchronous client
|
||||
response = client.get(url)
|
||||
response.raise_for_status() # Raise an exception for bad status codes
|
||||
|
||||
image_bytes = response.content
|
||||
image_stream = BytesIO(image_bytes)
|
||||
img = Image.open(image_stream)
|
||||
|
||||
if maintain_aspect_ratio:
|
||||
img_copy = img.copy()
|
||||
img_copy.thumbnail((new_width, new_height), Image.Resampling.LANCZOS)
|
||||
resized_img = img_copy
|
||||
else:
|
||||
resized_img = img.resize((new_width, new_height), Image.Resampling.LANCZOS)
|
||||
|
||||
if return_bytes:
|
||||
# Determine the output format. Default to JPEG if original is unknown or problematic.
|
||||
# Handle RGBA to RGB conversion for JPEG output.
|
||||
output_format = (
|
||||
img.format if img.format in ["JPEG", "PNG", "WEBP"] else "JPEG"
|
||||
)
|
||||
if output_format == "JPEG":
|
||||
if resized_img.mode in ("RGBA", "P"):
|
||||
resized_img = resized_img.convert("RGB")
|
||||
|
||||
byte_arr = BytesIO()
|
||||
resized_img.save(byte_arr, format=output_format)
|
||||
logger.info(
|
||||
f"Image from {url} resized to {resized_img.width}x{resized_img.height} and returned as bytes ({output_format} format)."
|
||||
)
|
||||
return byte_arr.getvalue()
|
||||
else:
|
||||
# Ensure the directory exists before saving
|
||||
if output_path:
|
||||
output_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
resized_img.save(output_path)
|
||||
logger.info(
|
||||
f"Image from {url} resized to {resized_img.width}x{resized_img.height} and saved as '{output_path}'"
|
||||
)
|
||||
return None
|
||||
|
||||
except httpx.RequestError as e:
|
||||
logger.error(f"An error occurred while requesting {url}: {e}")
|
||||
return None
|
||||
except httpx.HTTPStatusError as e:
|
||||
logger.error(
|
||||
f"HTTP error occurred: {e.response.status_code} - {e.response.text}"
|
||||
)
|
||||
return None
|
||||
except ValueError as e:
|
||||
logger.error(f"Configuration error: {e}")
|
||||
return None
|
||||
except Exception as e:
|
||||
logger.error(f"An unexpected error occurred: {e}")
|
||||
return None
|
||||
|
||||
|
||||
def render(url: str, capture: bool = False, size: str = "30x30") -> Optional[str]:
|
||||
"""
|
||||
Renders an image from a URL in the terminal using icat or chafa.
|
||||
|
||||
This function automatically detects the best available tool.
|
||||
|
||||
Args:
|
||||
url: The URL of the image to render.
|
||||
capture: If True, returns the terminal-formatted image as a string
|
||||
instead of printing it. Defaults to False.
|
||||
size: The size parameter to pass to the rendering tool (e.g., "WxH").
|
||||
|
||||
Returns:
|
||||
If capture is True, returns the image data as a string.
|
||||
If capture is False, prints directly to the terminal and returns None.
|
||||
Returns None on any failure.
|
||||
"""
|
||||
# --- Common subprocess arguments ---
|
||||
subprocess_kwargs = {
|
||||
"check": False, # We will handle errors manually
|
||||
"capture_output": capture,
|
||||
"text": capture, # Decode stdout/stderr as text if capturing
|
||||
}
|
||||
|
||||
# --- Try icat (Kitty terminal) first ---
|
||||
if icat_executable := shutil.which("icat"):
|
||||
process = subprocess.run(
|
||||
[icat_executable, "--align", "left", url], **subprocess_kwargs
|
||||
)
|
||||
if process.returncode == 0:
|
||||
return process.stdout if capture else None
|
||||
logger.warning(f"icat failed for URL {url} with code {process.returncode}")
|
||||
|
||||
# --- Fallback to chafa ---
|
||||
if chafa_executable := shutil.which("chafa"):
|
||||
try:
|
||||
# Chafa requires downloading the image data first
|
||||
with httpx.Client() as client:
|
||||
response = client.get(url, follow_redirects=True, timeout=20)
|
||||
response.raise_for_status()
|
||||
img_bytes = response.content
|
||||
|
||||
# Add stdin input to the subprocess arguments
|
||||
subprocess_kwargs["input"] = img_bytes
|
||||
|
||||
process = subprocess.run(
|
||||
[chafa_executable, f"--size={size}", "-"], **subprocess_kwargs
|
||||
)
|
||||
if process.returncode == 0:
|
||||
return process.stdout if capture else None
|
||||
logger.warning(f"chafa failed for URL {url} with code {process.returncode}")
|
||||
|
||||
except httpx.HTTPStatusError as e:
|
||||
logger.error(
|
||||
f"HTTP error fetching image for chafa: {e.response.status_code}"
|
||||
)
|
||||
click.echo(
|
||||
f"[dim]Error fetching image: {e.response.status_code}[/dim]", err=True
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"An exception occurred while running chafa: {e}")
|
||||
|
||||
return None
|
||||
|
||||
# --- Final fallback if no tool is found ---
|
||||
if not capture:
|
||||
# Only show this message if the user expected to see something.
|
||||
click.echo(
|
||||
"[dim](Image preview skipped: icat or chafa not found)[/dim]", err=True
|
||||
)
|
||||
|
||||
return None
|
||||
41
viu_cli/cli/utils/lazyloader.py
Normal file
41
viu_cli/cli/utils/lazyloader.py
Normal file
@@ -0,0 +1,41 @@
|
||||
import importlib
|
||||
|
||||
import click
|
||||
|
||||
|
||||
# TODO: since command structure is pretty obvious default to only requiring mapping of command names to their function name(cause some have special names like import)
|
||||
class LazyGroup(click.Group):
|
||||
def __init__(self, root: str, *args, lazy_subcommands=None, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
# lazy_subcommands is a map of the form:
|
||||
#
|
||||
# {command-name} -> {module-name}.{command-object-name}
|
||||
#
|
||||
self.root = root
|
||||
self.lazy_subcommands = lazy_subcommands or {}
|
||||
|
||||
def list_commands(self, ctx):
|
||||
base = super().list_commands(ctx)
|
||||
lazy = sorted(self.lazy_subcommands.keys())
|
||||
return base + lazy
|
||||
|
||||
def get_command(self, ctx, cmd_name): # pyright:ignore
|
||||
if cmd_name in self.lazy_subcommands:
|
||||
return self._lazy_load(cmd_name)
|
||||
return super().get_command(ctx, cmd_name)
|
||||
|
||||
def _lazy_load(self, cmd_name: str):
|
||||
# lazily loading a command, first get the module name and attribute name
|
||||
import_path: str = self.lazy_subcommands[cmd_name]
|
||||
modname, cmd_object_name = import_path.rsplit(".", 1)
|
||||
# do the import
|
||||
mod = importlib.import_module(f".{modname}", package=self.root)
|
||||
# get the Command object from that module
|
||||
cmd_object = getattr(mod, cmd_object_name)
|
||||
# check the result to make debugging easier
|
||||
if not isinstance(cmd_object, click.Command):
|
||||
raise ValueError(
|
||||
f"Lazy loading of {import_path} failed by returning "
|
||||
"a non-command object"
|
||||
)
|
||||
return cmd_object
|
||||
42
viu_cli/cli/utils/logging.py
Normal file
42
viu_cli/cli/utils/logging.py
Normal file
@@ -0,0 +1,42 @@
|
||||
import logging
|
||||
from logging.handlers import RotatingFileHandler
|
||||
from pathlib import Path
|
||||
|
||||
from ...core.constants import LOG_FILE
|
||||
|
||||
root_logger = logging.getLogger()
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def setup_logging(log: bool | None) -> None:
|
||||
"""Configures the application's logging based on CLI flags."""
|
||||
|
||||
_setup_default_logger()
|
||||
if log:
|
||||
from rich.logging import RichHandler
|
||||
|
||||
root_logger.addHandler(RichHandler())
|
||||
logger.info("Rich logging initialized.")
|
||||
|
||||
|
||||
def _setup_default_logger(
|
||||
log_file_path: Path = LOG_FILE,
|
||||
max_bytes=10 * 1024 * 1024, # 10mb
|
||||
backup_count=5,
|
||||
level=logging.DEBUG,
|
||||
):
|
||||
root_logger.setLevel(level)
|
||||
|
||||
formatter = logging.Formatter(
|
||||
"%(asctime)s - [%(process)d:%(thread)d] - %(levelname)-8s - %(name)s - %(filename)s:%(lineno)d - %(message)s"
|
||||
)
|
||||
|
||||
file_handler = RotatingFileHandler(
|
||||
log_file_path,
|
||||
maxBytes=max_bytes,
|
||||
backupCount=backup_count,
|
||||
encoding="utf-8",
|
||||
)
|
||||
file_handler.setLevel(level)
|
||||
file_handler.setFormatter(formatter)
|
||||
root_logger.addHandler(file_handler)
|
||||
134
viu_cli/cli/utils/parser.py
Normal file
134
viu_cli/cli/utils/parser.py
Normal file
@@ -0,0 +1,134 @@
|
||||
"""Episode range parsing utilities for Viu CLI commands."""
|
||||
|
||||
from typing import Iterator
|
||||
|
||||
|
||||
def parse_episode_range(
|
||||
episode_range_str: str | None, available_episodes: list[str]
|
||||
) -> Iterator[str]:
|
||||
"""
|
||||
Parse an episode range string and return an iterator of episode numbers.
|
||||
|
||||
This function handles various episode range formats:
|
||||
- Single episode: "5" -> episodes from index 5 onwards
|
||||
- Range with start and end: "5:10" -> episodes from index 5 to 10 (exclusive)
|
||||
- Range with step: "5:10:2" -> episodes from index 5 to 10 with step 2
|
||||
- Start only: "5:" -> episodes from index 5 onwards
|
||||
- End only: ":10" -> episodes from beginning to index 10
|
||||
- All episodes: ":" -> all episodes
|
||||
|
||||
Args:
|
||||
episode_range_str: The episode range string to parse (e.g., "5:10", "5:", ":10", "5")
|
||||
available_episodes: List of available episode numbers/identifiers
|
||||
|
||||
Returns:
|
||||
Iterator over the selected episode numbers
|
||||
|
||||
Raises:
|
||||
ValueError: If the episode range format is invalid
|
||||
IndexError: If the specified indices are out of range
|
||||
|
||||
Examples:
|
||||
>>> episodes = ["1", "2", "3", "4", "5", "6", "7", "8", "9", "10"]
|
||||
>>> list(parse_episode_range("2:5", episodes))
|
||||
['3', '4', '5']
|
||||
>>> list(parse_episode_range("5:", episodes))
|
||||
['6', '7', '8', '9', '10']
|
||||
>>> list(parse_episode_range(":3", episodes))
|
||||
['1', '2', '3']
|
||||
>>> list(parse_episode_range("2:8:2", episodes))
|
||||
['3', '5', '7']
|
||||
"""
|
||||
if not episode_range_str:
|
||||
# No range specified, return all episodes
|
||||
return iter(available_episodes)
|
||||
|
||||
# Sort episodes numerically for consistent ordering
|
||||
episodes = sorted(available_episodes, key=float)
|
||||
|
||||
if ":" in episode_range_str:
|
||||
# Handle colon-separated ranges
|
||||
parts = episode_range_str.split(":")
|
||||
|
||||
if len(parts) == 3:
|
||||
# Format: start:end:step
|
||||
start_str, end_str, step_str = parts
|
||||
if not all([start_str, end_str, step_str]):
|
||||
raise ValueError(
|
||||
f"Invalid episode range format: '{episode_range_str}'. "
|
||||
"When using 3 parts (start:end:step), all parts must be non-empty."
|
||||
)
|
||||
|
||||
try:
|
||||
start_idx = int(start_str)
|
||||
end_idx = int(end_str)
|
||||
step = int(step_str)
|
||||
|
||||
if step <= 0:
|
||||
raise ValueError("Step value must be positive")
|
||||
|
||||
return iter(episodes[start_idx:end_idx:step])
|
||||
except ValueError as e:
|
||||
if "invalid literal" in str(e):
|
||||
raise ValueError(
|
||||
f"Invalid episode range format: '{episode_range_str}'. "
|
||||
"All parts must be valid integers."
|
||||
) from e
|
||||
raise
|
||||
|
||||
elif len(parts) == 2:
|
||||
# Format: start:end or start: or :end
|
||||
start_str, end_str = parts
|
||||
|
||||
if start_str and end_str:
|
||||
# Both start and end specified: start:end
|
||||
try:
|
||||
start_idx = int(start_str)
|
||||
end_idx = int(end_str)
|
||||
return iter(episodes[start_idx:end_idx])
|
||||
except ValueError as e:
|
||||
raise ValueError(
|
||||
f"Invalid episode range format: '{episode_range_str}'. "
|
||||
"Start and end must be valid integers."
|
||||
) from e
|
||||
|
||||
elif start_str and not end_str:
|
||||
# Only start specified: start:
|
||||
try:
|
||||
start_idx = int(start_str)
|
||||
return iter(episodes[start_idx:])
|
||||
except ValueError as e:
|
||||
raise ValueError(
|
||||
f"Invalid episode range format: '{episode_range_str}'. "
|
||||
"Start must be a valid integer."
|
||||
) from e
|
||||
|
||||
elif not start_str and end_str:
|
||||
# Only end specified: :end
|
||||
try:
|
||||
end_idx = int(end_str)
|
||||
return iter(episodes[:end_idx])
|
||||
except ValueError as e:
|
||||
raise ValueError(
|
||||
f"Invalid episode range format: '{episode_range_str}'. "
|
||||
"End must be a valid integer."
|
||||
) from e
|
||||
|
||||
else:
|
||||
# Both empty: ":"
|
||||
return iter(episodes)
|
||||
else:
|
||||
raise ValueError(
|
||||
f"Invalid episode range format: '{episode_range_str}'. "
|
||||
"Too many colon separators."
|
||||
)
|
||||
else:
|
||||
# Single number: start from that index onwards
|
||||
try:
|
||||
start_idx = int(episode_range_str)
|
||||
return iter(episodes[start_idx:])
|
||||
except ValueError as e:
|
||||
raise ValueError(
|
||||
f"Invalid episode range format: '{episode_range_str}'. "
|
||||
"Must be a valid integer."
|
||||
) from e
|
||||
580
viu_cli/cli/utils/preview.py
Normal file
580
viu_cli/cli/utils/preview.py
Normal file
@@ -0,0 +1,580 @@
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
from hashlib import sha256
|
||||
from typing import Dict, List, Optional
|
||||
|
||||
import httpx
|
||||
|
||||
from ...core.config import AppConfig
|
||||
from ...core.constants import APP_CACHE_DIR, PLATFORM, SCRIPTS_DIR
|
||||
from ...core.utils.file import AtomicWriter
|
||||
from ...libs.media_api.types import (
|
||||
AiringScheduleResult,
|
||||
Character,
|
||||
MediaItem,
|
||||
MediaReview,
|
||||
)
|
||||
from . import ansi
|
||||
from .preview_workers import PreviewWorkerManager
|
||||
|
||||
|
||||
def get_rofi_preview(
|
||||
media_items: List[MediaItem], titles: List[str], config: AppConfig
|
||||
) -> str:
|
||||
# Ensure cache directories exist on startup
|
||||
IMAGES_CACHE_DIR.mkdir(parents=True, exist_ok=True)
|
||||
INFO_CACHE_DIR.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
return (
|
||||
"".join(
|
||||
[
|
||||
f"{title}\0icon\x1f{_get_image(item)}\n"
|
||||
for item, title in zip(media_items, titles)
|
||||
]
|
||||
)
|
||||
+ "Back\nExit"
|
||||
)
|
||||
|
||||
|
||||
def _get_image(item: MediaItem) -> str:
|
||||
if not item.cover_image:
|
||||
return ""
|
||||
|
||||
hash_id = sha256(item.title.english.encode("utf-8")).hexdigest()
|
||||
image_path = IMAGES_CACHE_DIR / f"{hash_id}.png"
|
||||
|
||||
if image_path.exists():
|
||||
return str(image_path)
|
||||
|
||||
if not item.cover_image.large:
|
||||
return ""
|
||||
|
||||
try:
|
||||
with httpx.stream(
|
||||
"GET", item.cover_image.large, follow_redirects=True
|
||||
) as response:
|
||||
response.raise_for_status()
|
||||
with AtomicWriter(image_path, "wb", encoding=None) as f:
|
||||
for chunk in response.iter_bytes():
|
||||
f.write(chunk)
|
||||
return str(image_path)
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to download image {item.cover_image.large}: {e}")
|
||||
return ""
|
||||
|
||||
|
||||
def get_rofi_episode_preview(
|
||||
episodes: List[str], media_item: MediaItem, config: AppConfig
|
||||
) -> str:
|
||||
# Ensure cache directories exist on startup
|
||||
IMAGES_CACHE_DIR.mkdir(parents=True, exist_ok=True)
|
||||
INFO_CACHE_DIR.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
return (
|
||||
"".join(
|
||||
[
|
||||
f"{episode}\0icon\x1f{_get_episode_image(episode, media_item)}\n"
|
||||
for episode in episodes
|
||||
]
|
||||
)
|
||||
+ "Back\nExit"
|
||||
)
|
||||
|
||||
|
||||
def _get_episode_image(episode: str, media_item: MediaItem) -> str:
|
||||
if media_item.streaming_episodes and media_item.streaming_episodes.get(episode):
|
||||
stream = media_item.streaming_episodes[episode]
|
||||
image_url = stream.thumbnail
|
||||
else:
|
||||
if not media_item.cover_image:
|
||||
return ""
|
||||
image_url = media_item.cover_image.large
|
||||
if not image_url:
|
||||
return ""
|
||||
|
||||
hash_id = sha256(
|
||||
f"{media_item.title.english}_Episode_{episode}".encode("utf-8")
|
||||
).hexdigest()
|
||||
image_path = IMAGES_CACHE_DIR / f"{hash_id}.png"
|
||||
|
||||
if image_path.exists():
|
||||
return str(image_path)
|
||||
|
||||
try:
|
||||
with httpx.stream("GET", image_url, follow_redirects=True) as response:
|
||||
response.raise_for_status()
|
||||
with AtomicWriter(image_path, "wb", encoding=None) as f:
|
||||
for chunk in response.iter_bytes():
|
||||
f.write(chunk)
|
||||
return str(image_path)
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Failed to download image {image_url} for {media_item.title.english}: {e}"
|
||||
)
|
||||
return ""
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
os.environ["SHELL"] = "bash"
|
||||
|
||||
PREVIEWS_CACHE_DIR = APP_CACHE_DIR / "previews"
|
||||
IMAGES_CACHE_DIR = PREVIEWS_CACHE_DIR / "images"
|
||||
INFO_CACHE_DIR = PREVIEWS_CACHE_DIR / "info"
|
||||
REVIEWS_CACHE_DIR = PREVIEWS_CACHE_DIR / "reviews"
|
||||
CHARACTERS_CACHE_DIR = PREVIEWS_CACHE_DIR / "characters"
|
||||
AIRING_SCHEDULE_CACHE_DIR = PREVIEWS_CACHE_DIR / "airing_schedule"
|
||||
|
||||
FZF_SCRIPTS_DIR = SCRIPTS_DIR / "fzf"
|
||||
TEMPLATE_PREVIEW_SCRIPT = (FZF_SCRIPTS_DIR / "preview.template.sh").read_text(
|
||||
encoding="utf-8"
|
||||
)
|
||||
TEMPLATE_REVIEW_PREVIEW_SCRIPT = (
|
||||
FZF_SCRIPTS_DIR / "review-preview.template.sh"
|
||||
).read_text(encoding="utf-8")
|
||||
TEMPLATE_CHARACTER_PREVIEW_SCRIPT = (
|
||||
FZF_SCRIPTS_DIR / "character-preview.template.sh"
|
||||
).read_text(encoding="utf-8")
|
||||
TEMPLATE_AIRING_SCHEDULE_PREVIEW_SCRIPT = (
|
||||
FZF_SCRIPTS_DIR / "airing-schedule-preview.template.sh"
|
||||
).read_text(encoding="utf-8")
|
||||
DYNAMIC_PREVIEW_SCRIPT = (FZF_SCRIPTS_DIR / "dynamic-preview.template.sh").read_text(
|
||||
encoding="utf-8"
|
||||
)
|
||||
|
||||
EPISODE_PATTERN = re.compile(r"^Episode\s+(\d+)\s-\s.*")
|
||||
|
||||
# Global preview worker manager instance
|
||||
_preview_manager: Optional[PreviewWorkerManager] = None
|
||||
|
||||
|
||||
def create_preview_context():
|
||||
"""
|
||||
Create a context manager for preview operations.
|
||||
|
||||
This can be used in menu functions to ensure proper cleanup:
|
||||
|
||||
```python
|
||||
with create_preview_context() as preview_ctx:
|
||||
preview_script = preview_ctx.get_anime_preview(items, titles, config)
|
||||
# ... use preview_script
|
||||
# Workers are automatically cleaned up here
|
||||
```
|
||||
|
||||
Returns:
|
||||
PreviewContext: A context manager for preview operations
|
||||
"""
|
||||
return PreviewContext()
|
||||
|
||||
|
||||
class PreviewContext:
|
||||
"""Context manager for preview operations with automatic cleanup."""
|
||||
|
||||
def __init__(self):
|
||||
self._manager = None
|
||||
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
if self._manager:
|
||||
try:
|
||||
self._manager.shutdown_all(wait=False, timeout=3.0)
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to cleanup preview context: {e}")
|
||||
|
||||
def get_anime_preview(
|
||||
self, items: List[MediaItem], titles: List[str], config: AppConfig
|
||||
) -> str:
|
||||
"""Get anime preview script with managed workers."""
|
||||
if not self._manager:
|
||||
self._manager = _get_preview_manager()
|
||||
return get_anime_preview(items, titles, config)
|
||||
|
||||
def get_episode_preview(
|
||||
self, episodes: List[str], media_item: MediaItem, config: AppConfig
|
||||
) -> str:
|
||||
"""Get episode preview script with managed workers."""
|
||||
if not self._manager:
|
||||
self._manager = _get_preview_manager()
|
||||
return get_episode_preview(episodes, media_item, config)
|
||||
|
||||
def get_dynamic_anime_preview(self, config: AppConfig) -> str:
|
||||
"""Get dynamic anime preview script for search functionality."""
|
||||
if not self._manager:
|
||||
self._manager = _get_preview_manager()
|
||||
return get_dynamic_anime_preview(config)
|
||||
|
||||
def get_review_preview(
|
||||
self, choice_map: Dict[str, MediaReview], config: AppConfig
|
||||
) -> str:
|
||||
"""Get review preview script with managed workers."""
|
||||
if not self._manager:
|
||||
self._manager = _get_preview_manager()
|
||||
return get_review_preview(choice_map, config)
|
||||
|
||||
def get_character_preview(
|
||||
self, choice_map: Dict[str, Character], config: AppConfig
|
||||
) -> str:
|
||||
"""Get character preview script with managed workers."""
|
||||
if not self._manager:
|
||||
self._manager = _get_preview_manager()
|
||||
return get_character_preview(choice_map, config)
|
||||
|
||||
def get_airing_schedule_preview(
|
||||
self,
|
||||
schedule_result: AiringScheduleResult,
|
||||
config: AppConfig,
|
||||
anime_title: str = "Anime",
|
||||
) -> str:
|
||||
"""Get airing schedule preview script with managed workers."""
|
||||
if not self._manager:
|
||||
self._manager = _get_preview_manager()
|
||||
return get_airing_schedule_preview(schedule_result, config, anime_title)
|
||||
|
||||
def cancel_all_tasks(self) -> int:
|
||||
"""Cancel all running preview tasks."""
|
||||
if not self._manager:
|
||||
return 0
|
||||
|
||||
cancelled = 0
|
||||
if self._manager._preview_worker:
|
||||
cancelled += self._manager._preview_worker.cancel_all_tasks()
|
||||
if self._manager._episode_worker:
|
||||
cancelled += self._manager._episode_worker.cancel_all_tasks()
|
||||
if self._manager._review_worker:
|
||||
cancelled += self._manager._review_worker.cancel_all_tasks()
|
||||
if self._manager._character_worker:
|
||||
cancelled += self._manager._character_worker.cancel_all_tasks()
|
||||
if self._manager._airing_schedule_worker:
|
||||
cancelled += self._manager._airing_schedule_worker.cancel_all_tasks()
|
||||
return cancelled
|
||||
|
||||
def get_status(self) -> dict:
|
||||
"""Get status of workers in this context."""
|
||||
if self._manager:
|
||||
return self._manager.get_status()
|
||||
return {
|
||||
"preview_worker": None,
|
||||
"episode_worker": None,
|
||||
"review_worker": None,
|
||||
"character_worker": None,
|
||||
"airing_schedule_worker": None,
|
||||
}
|
||||
|
||||
|
||||
def get_anime_preview(
|
||||
items: List[MediaItem], titles: List[str], config: AppConfig
|
||||
) -> str:
|
||||
if config.general.selector == "rofi":
|
||||
return get_rofi_preview(items, titles, config)
|
||||
|
||||
"""
|
||||
Generate anime preview script and start background caching.
|
||||
|
||||
Args:
|
||||
items: List of media items to preview
|
||||
titles: Corresponding titles for each media item
|
||||
config: Application configuration
|
||||
|
||||
Returns:
|
||||
Preview script content for fzf
|
||||
"""
|
||||
# Ensure cache directories exist on startup
|
||||
IMAGES_CACHE_DIR.mkdir(parents=True, exist_ok=True)
|
||||
INFO_CACHE_DIR.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
HEADER_COLOR = config.fzf.preview_header_color.split(",")
|
||||
SEPARATOR_COLOR = config.fzf.preview_separator_color.split(",")
|
||||
|
||||
preview_script = TEMPLATE_PREVIEW_SCRIPT
|
||||
|
||||
# Start the managed background caching
|
||||
try:
|
||||
preview_manager = _get_preview_manager()
|
||||
worker = preview_manager.get_preview_worker()
|
||||
worker.cache_anime_previews(items, titles, config)
|
||||
logger.debug("Started background caching for anime previews")
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to start background caching: {e}")
|
||||
# Continue with script generation even if caching fails
|
||||
|
||||
# Prepare values to inject into the template
|
||||
path_sep = "\\" if PLATFORM == "win32" else "/"
|
||||
|
||||
# Format the template with the dynamic values
|
||||
replacements = {
|
||||
"PREVIEW_MODE": config.general.preview,
|
||||
"IMAGE_CACHE_PATH": str(IMAGES_CACHE_DIR),
|
||||
"INFO_CACHE_PATH": str(INFO_CACHE_DIR),
|
||||
"PATH_SEP": path_sep,
|
||||
"IMAGE_RENDERER": config.general.image_renderer,
|
||||
# Color codes
|
||||
"C_TITLE": ansi.get_true_fg(HEADER_COLOR, bold=True),
|
||||
"C_KEY": ansi.get_true_fg(HEADER_COLOR, bold=True),
|
||||
"C_VALUE": ansi.get_true_fg(HEADER_COLOR, bold=True),
|
||||
"C_RULE": ansi.get_true_fg(SEPARATOR_COLOR, bold=True),
|
||||
"RESET": ansi.RESET,
|
||||
"PREFIX": "",
|
||||
"SCALE_UP": " --scale-up" if config.general.preview_scale_up else "",
|
||||
}
|
||||
|
||||
for key, value in replacements.items():
|
||||
preview_script = preview_script.replace(f"{{{key}}}", value)
|
||||
|
||||
return preview_script
|
||||
|
||||
|
||||
def get_episode_preview(
|
||||
episodes: List[str], media_item: MediaItem, config: AppConfig
|
||||
) -> str:
|
||||
"""
|
||||
Generate episode preview script and start background caching.
|
||||
|
||||
Args:
|
||||
episodes: List of episode identifiers
|
||||
media_item: Media item containing episode data
|
||||
config: Application configuration
|
||||
|
||||
Returns:
|
||||
Preview script content for fzf
|
||||
"""
|
||||
if config.general.selector == "rofi":
|
||||
return get_rofi_episode_preview(episodes, media_item, config)
|
||||
IMAGES_CACHE_DIR.mkdir(parents=True, exist_ok=True)
|
||||
INFO_CACHE_DIR.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
HEADER_COLOR = config.fzf.preview_header_color.split(",")
|
||||
SEPARATOR_COLOR = config.fzf.preview_separator_color.split(",")
|
||||
|
||||
preview_script = TEMPLATE_PREVIEW_SCRIPT
|
||||
|
||||
# Start managed background caching for episodes
|
||||
try:
|
||||
preview_manager = _get_preview_manager()
|
||||
worker = preview_manager.get_episode_worker()
|
||||
worker.cache_episode_previews(episodes, media_item, config)
|
||||
logger.debug("Started background caching for episode previews")
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to start episode background caching: {e}")
|
||||
# Continue with script generation even if caching fails
|
||||
|
||||
# Prepare values to inject into the template
|
||||
path_sep = "\\" if PLATFORM == "win32" else "/"
|
||||
|
||||
# Format the template with the dynamic values
|
||||
replacements = {
|
||||
"PREVIEW_MODE": config.general.preview,
|
||||
"IMAGE_CACHE_PATH": str(IMAGES_CACHE_DIR),
|
||||
"INFO_CACHE_PATH": str(INFO_CACHE_DIR),
|
||||
"PATH_SEP": path_sep,
|
||||
"IMAGE_RENDERER": config.general.image_renderer,
|
||||
# Color codes
|
||||
"C_TITLE": ansi.get_true_fg(HEADER_COLOR, bold=True),
|
||||
"C_KEY": ansi.get_true_fg(HEADER_COLOR, bold=True),
|
||||
"C_VALUE": ansi.get_true_fg(HEADER_COLOR, bold=True),
|
||||
"C_RULE": ansi.get_true_fg(SEPARATOR_COLOR, bold=True),
|
||||
"RESET": ansi.RESET,
|
||||
"PREFIX": f"{media_item.title.english}_Episode_",
|
||||
"SCALE_UP": " --scale-up" if config.general.preview_scale_up else "",
|
||||
}
|
||||
|
||||
for key, value in replacements.items():
|
||||
preview_script = preview_script.replace(f"{{{key}}}", value)
|
||||
|
||||
return preview_script
|
||||
|
||||
|
||||
def get_dynamic_anime_preview(config: AppConfig) -> str:
|
||||
"""
|
||||
Generate dynamic anime preview script for search functionality.
|
||||
|
||||
This is different from regular anime preview because:
|
||||
1. We don't have media items upfront
|
||||
2. The preview needs to work with search results as they come in
|
||||
3. Preview is handled entirely in shell by parsing JSON results
|
||||
|
||||
Args:
|
||||
config: Application configuration
|
||||
|
||||
Returns:
|
||||
Preview script content for fzf dynamic search
|
||||
"""
|
||||
# Ensure cache directories exist
|
||||
IMAGES_CACHE_DIR.mkdir(parents=True, exist_ok=True)
|
||||
INFO_CACHE_DIR.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
HEADER_COLOR = config.fzf.preview_header_color.split(",")
|
||||
SEPARATOR_COLOR = config.fzf.preview_separator_color.split(",")
|
||||
|
||||
# Use the dynamic preview script template
|
||||
preview_script = DYNAMIC_PREVIEW_SCRIPT
|
||||
|
||||
search_cache_dir = APP_CACHE_DIR / "search"
|
||||
search_results_file = search_cache_dir / "current_search_results.json"
|
||||
|
||||
# Prepare values to inject into the template
|
||||
path_sep = "\\" if PLATFORM == "win32" else "/"
|
||||
|
||||
# Format the template with the dynamic values
|
||||
replacements = {
|
||||
"PREVIEW_MODE": config.general.preview,
|
||||
"IMAGE_CACHE_PATH": str(IMAGES_CACHE_DIR),
|
||||
"INFO_CACHE_PATH": str(INFO_CACHE_DIR),
|
||||
"PATH_SEP": path_sep,
|
||||
"IMAGE_RENDERER": config.general.image_renderer,
|
||||
"SEARCH_RESULTS_FILE": str(search_results_file),
|
||||
# Color codes
|
||||
"C_TITLE": ansi.get_true_fg(HEADER_COLOR, bold=True),
|
||||
"C_KEY": ansi.get_true_fg(HEADER_COLOR, bold=True),
|
||||
"C_VALUE": ansi.get_true_fg(HEADER_COLOR, bold=True),
|
||||
"C_RULE": ansi.get_true_fg(SEPARATOR_COLOR, bold=True),
|
||||
"RESET": ansi.RESET,
|
||||
"SCALE_UP": " --scale-up" if config.general.preview_scale_up else "",
|
||||
}
|
||||
|
||||
for key, value in replacements.items():
|
||||
preview_script = preview_script.replace(f"{{{key}}}", value)
|
||||
|
||||
return preview_script
|
||||
|
||||
|
||||
def _get_preview_manager() -> PreviewWorkerManager:
|
||||
"""Get or create the global preview worker manager."""
|
||||
global _preview_manager
|
||||
if _preview_manager is None:
|
||||
_preview_manager = PreviewWorkerManager(
|
||||
IMAGES_CACHE_DIR, INFO_CACHE_DIR, REVIEWS_CACHE_DIR
|
||||
)
|
||||
return _preview_manager
|
||||
|
||||
|
||||
def shutdown_preview_workers(wait: bool = True, timeout: Optional[float] = 5.0) -> None:
|
||||
"""
|
||||
Shutdown all preview workers.
|
||||
|
||||
Args:
|
||||
wait: Whether to wait for tasks to complete
|
||||
timeout: Maximum time to wait for shutdown
|
||||
"""
|
||||
global _preview_manager
|
||||
if _preview_manager:
|
||||
_preview_manager.shutdown_all(wait=wait, timeout=timeout)
|
||||
_preview_manager = None
|
||||
|
||||
|
||||
def get_preview_worker_status() -> dict:
|
||||
"""Get status of all preview workers."""
|
||||
global _preview_manager
|
||||
if _preview_manager:
|
||||
return _preview_manager.get_status()
|
||||
return {"preview_worker": None, "episode_worker": None}
|
||||
|
||||
|
||||
def get_review_preview(choice_map: Dict[str, MediaReview], config: AppConfig) -> str:
|
||||
"""
|
||||
Generate the generic loader script for review previews and start background caching.
|
||||
"""
|
||||
|
||||
REVIEWS_CACHE_DIR.mkdir(parents=True, exist_ok=True)
|
||||
preview_manager = _get_preview_manager()
|
||||
worker = preview_manager.get_review_worker()
|
||||
worker.cache_review_previews(choice_map, config)
|
||||
logger.debug("Started background caching for review previews")
|
||||
|
||||
# Use the generic loader script
|
||||
preview_script = TEMPLATE_REVIEW_PREVIEW_SCRIPT
|
||||
path_sep = "\\" if PLATFORM == "win32" else "/"
|
||||
|
||||
# Inject the correct cache path and color codes
|
||||
replacements = {
|
||||
"PREVIEW_MODE": config.general.preview,
|
||||
"INFO_CACHE_DIR": str(REVIEWS_CACHE_DIR),
|
||||
"PATH_SEP": path_sep,
|
||||
"C_TITLE": ansi.get_true_fg(config.fzf.header_color.split(","), bold=True),
|
||||
"C_KEY": ansi.get_true_fg(config.fzf.header_color.split(","), bold=True),
|
||||
"C_VALUE": ansi.get_true_fg(config.fzf.header_color.split(","), bold=True),
|
||||
"C_RULE": ansi.get_true_fg(
|
||||
config.fzf.preview_separator_color.split(","), bold=True
|
||||
),
|
||||
"RESET": ansi.RESET,
|
||||
}
|
||||
|
||||
for key, value in replacements.items():
|
||||
preview_script = preview_script.replace(f"{{{key}}}", value)
|
||||
|
||||
return preview_script
|
||||
|
||||
|
||||
def get_character_preview(choice_map: Dict[str, Character], config: AppConfig) -> str:
|
||||
"""
|
||||
Generate the generic loader script for character previews and start background caching.
|
||||
"""
|
||||
|
||||
INFO_CACHE_DIR.mkdir(parents=True, exist_ok=True)
|
||||
preview_manager = _get_preview_manager()
|
||||
worker = preview_manager.get_character_worker()
|
||||
worker.cache_character_previews(choice_map, config)
|
||||
logger.debug("Started background caching for character previews")
|
||||
|
||||
# Use the generic loader script
|
||||
preview_script = TEMPLATE_CHARACTER_PREVIEW_SCRIPT
|
||||
path_sep = "\\" if PLATFORM == "win32" else "/"
|
||||
|
||||
# Inject the correct cache path and color codes
|
||||
replacements = {
|
||||
"PREVIEW_MODE": config.general.preview,
|
||||
"INFO_CACHE_DIR": str(INFO_CACHE_DIR),
|
||||
"IMAGE_CACHE_DIR": str(IMAGES_CACHE_DIR),
|
||||
"PATH_SEP": path_sep,
|
||||
"C_TITLE": ansi.get_true_fg(config.fzf.header_color.split(","), bold=True),
|
||||
"C_KEY": ansi.get_true_fg(config.fzf.header_color.split(","), bold=True),
|
||||
"C_VALUE": ansi.get_true_fg(config.fzf.header_color.split(","), bold=True),
|
||||
"C_RULE": ansi.get_true_fg(
|
||||
config.fzf.preview_separator_color.split(","), bold=True
|
||||
),
|
||||
"RESET": ansi.RESET,
|
||||
}
|
||||
|
||||
for key, value in replacements.items():
|
||||
preview_script = preview_script.replace(f"{{{key}}}", value)
|
||||
|
||||
return preview_script
|
||||
|
||||
|
||||
def get_airing_schedule_preview(
|
||||
schedule_result: AiringScheduleResult, config: AppConfig, anime_title: str = "Anime"
|
||||
) -> str:
|
||||
"""
|
||||
Generate the generic loader script for airing schedule previews and start background caching.
|
||||
"""
|
||||
|
||||
INFO_CACHE_DIR.mkdir(parents=True, exist_ok=True)
|
||||
preview_manager = _get_preview_manager()
|
||||
worker = preview_manager.get_airing_schedule_worker()
|
||||
worker.cache_airing_schedule_preview(anime_title, schedule_result, config)
|
||||
logger.debug("Started background caching for airing schedule previews")
|
||||
|
||||
# Use the generic loader script
|
||||
preview_script = TEMPLATE_AIRING_SCHEDULE_PREVIEW_SCRIPT
|
||||
path_sep = "\\" if PLATFORM == "win32" else "/"
|
||||
|
||||
# Inject the correct cache path and color codes
|
||||
replacements = {
|
||||
"PREVIEW_MODE": config.general.preview,
|
||||
"INFO_CACHE_DIR": str(INFO_CACHE_DIR),
|
||||
"PATH_SEP": path_sep,
|
||||
"C_TITLE": ansi.get_true_fg(config.fzf.header_color.split(","), bold=True),
|
||||
"C_KEY": ansi.get_true_fg(config.fzf.header_color.split(","), bold=True),
|
||||
"C_VALUE": ansi.get_true_fg(config.fzf.header_color.split(","), bold=True),
|
||||
"C_RULE": ansi.get_true_fg(
|
||||
config.fzf.preview_separator_color.split(","), bold=True
|
||||
),
|
||||
"RESET": ansi.RESET,
|
||||
}
|
||||
|
||||
for key, value in replacements.items():
|
||||
preview_script = preview_script.replace(f"{{{key}}}", value)
|
||||
|
||||
return preview_script
|
||||
900
viu_cli/cli/utils/preview_workers.py
Normal file
900
viu_cli/cli/utils/preview_workers.py
Normal file
@@ -0,0 +1,900 @@
|
||||
"""
|
||||
Preview-specific background workers for caching anime and episode data.
|
||||
|
||||
This module provides specialized workers for handling anime preview caching,
|
||||
including image downloads and info text generation with proper lifecycle management.
|
||||
"""
|
||||
|
||||
import logging
|
||||
from typing import Dict, List, Optional
|
||||
|
||||
import httpx
|
||||
|
||||
from ...core.config import AppConfig
|
||||
from ...core.constants import SCRIPTS_DIR
|
||||
from ...core.utils import formatter
|
||||
from ...core.utils.concurrency import (
|
||||
ManagedBackgroundWorker,
|
||||
WorkerTask,
|
||||
thread_manager,
|
||||
)
|
||||
from ...core.utils.file import AtomicWriter
|
||||
from ...libs.media_api.types import (
|
||||
AiringScheduleResult,
|
||||
Character,
|
||||
MediaItem,
|
||||
MediaReview,
|
||||
)
|
||||
from . import image
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
FZF_SCRIPTS_DIR = SCRIPTS_DIR / "fzf"
|
||||
TEMPLATE_INFO_SCRIPT = (FZF_SCRIPTS_DIR / "info.template.sh").read_text(
|
||||
encoding="utf-8"
|
||||
)
|
||||
TEMPLATE_EPISODE_INFO_SCRIPT = (FZF_SCRIPTS_DIR / "episode-info.template.sh").read_text(
|
||||
encoding="utf-8"
|
||||
)
|
||||
TEMPLATE_REVIEW_INFO_SCRIPT = (FZF_SCRIPTS_DIR / "review-info.template.sh").read_text(
|
||||
encoding="utf-8"
|
||||
)
|
||||
TEMPLATE_CHARACTER_INFO_SCRIPT = (
|
||||
FZF_SCRIPTS_DIR / "character-info.template.sh"
|
||||
).read_text(encoding="utf-8")
|
||||
TEMPLATE_AIRING_SCHEDULE_INFO_SCRIPT = (
|
||||
FZF_SCRIPTS_DIR / "airing-schedule-info.template.sh"
|
||||
).read_text(encoding="utf-8")
|
||||
|
||||
|
||||
class PreviewCacheWorker(ManagedBackgroundWorker):
|
||||
"""
|
||||
Specialized background worker for caching anime preview data.
|
||||
|
||||
Handles downloading images and generating info text for anime previews
|
||||
with proper error handling and resource management.
|
||||
"""
|
||||
|
||||
def __init__(self, images_cache_dir, info_cache_dir, max_workers: int = 10):
|
||||
"""
|
||||
Initialize the preview cache worker.
|
||||
|
||||
Args:
|
||||
images_cache_dir: Directory to cache images
|
||||
info_cache_dir: Directory to cache info text
|
||||
max_workers: Maximum number of concurrent workers
|
||||
"""
|
||||
super().__init__(max_workers=max_workers, name="PreviewCacheWorker")
|
||||
self.images_cache_dir = images_cache_dir
|
||||
self.info_cache_dir = info_cache_dir
|
||||
self._http_client: Optional[httpx.Client] = None
|
||||
|
||||
def start(self) -> None:
|
||||
"""Start the worker and initialize HTTP client."""
|
||||
super().start()
|
||||
self._http_client = httpx.Client(
|
||||
timeout=20.0,
|
||||
follow_redirects=True,
|
||||
limits=httpx.Limits(max_connections=self.max_workers),
|
||||
)
|
||||
logger.debug("PreviewCacheWorker HTTP client initialized")
|
||||
|
||||
def shutdown(self, wait: bool = True, timeout: Optional[float] = 30.0) -> None:
|
||||
"""Shutdown the worker and cleanup HTTP client."""
|
||||
super().shutdown(wait=wait, timeout=timeout)
|
||||
if self._http_client:
|
||||
self._http_client.close()
|
||||
self._http_client = None
|
||||
logger.debug("PreviewCacheWorker HTTP client closed")
|
||||
|
||||
def cache_anime_previews(
|
||||
self, media_items: List[MediaItem], titles: List[str], config: AppConfig
|
||||
) -> None:
|
||||
"""
|
||||
Cache preview data for multiple anime items.
|
||||
|
||||
Args:
|
||||
media_items: List of media items to cache
|
||||
titles: Corresponding titles for each media item
|
||||
config: Application configuration
|
||||
"""
|
||||
if not self.is_running():
|
||||
raise RuntimeError("PreviewCacheWorker is not running")
|
||||
|
||||
for media_item, title_str in zip(media_items, titles):
|
||||
hash_id = self._get_cache_hash(title_str)
|
||||
|
||||
# Submit image download task if needed
|
||||
if config.general.preview in ("full", "image") and media_item.cover_image:
|
||||
image_path = self.images_cache_dir / f"{hash_id}.png"
|
||||
if not image_path.exists():
|
||||
self.submit_function(
|
||||
self._download_and_save_image,
|
||||
media_item.cover_image.large,
|
||||
hash_id,
|
||||
)
|
||||
|
||||
# Submit info generation task if needed
|
||||
if config.general.preview in ("full", "text"):
|
||||
info_text = self._generate_info_text(media_item, config)
|
||||
self.submit_function(self._save_info_text, info_text, hash_id)
|
||||
|
||||
def _download_and_save_image(self, url: str, hash_id: str) -> None:
|
||||
"""Download an image and save it to cache."""
|
||||
if not self._http_client:
|
||||
raise RuntimeError("HTTP client not initialized")
|
||||
|
||||
image_path = self.images_cache_dir / f"{hash_id}.png"
|
||||
|
||||
try:
|
||||
with self._http_client.stream("GET", url) as response:
|
||||
response.raise_for_status()
|
||||
|
||||
with AtomicWriter(image_path, "wb", encoding=None) as f:
|
||||
for chunk in response.iter_bytes():
|
||||
f.write(chunk)
|
||||
|
||||
logger.debug(f"Successfully cached image: {hash_id}")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to download image {url}: {e}")
|
||||
raise
|
||||
|
||||
def _generate_info_text(self, media_item: MediaItem, config: AppConfig) -> str:
|
||||
"""Generate formatted info text for a media item."""
|
||||
# Import here to avoid circular imports
|
||||
info_script = TEMPLATE_INFO_SCRIPT
|
||||
description = formatter.clean_html(
|
||||
media_item.description or "No description available."
|
||||
)
|
||||
|
||||
# Escape all variables before injecting them into the script
|
||||
replacements = {
|
||||
"TITLE": formatter.shell_safe(
|
||||
media_item.title.english or media_item.title.romaji
|
||||
),
|
||||
"STATUS": formatter.shell_safe(media_item.status.value),
|
||||
"FORMAT": formatter.shell_safe(
|
||||
media_item.format.value if media_item.format else "UNKNOWN"
|
||||
),
|
||||
"NEXT_EPISODE": formatter.shell_safe(
|
||||
f"Episode {media_item.next_airing.episode} on {formatter.format_date(media_item.next_airing.airing_at, '%A, %d %B %Y at %X)')}"
|
||||
if media_item.next_airing
|
||||
else "N/A"
|
||||
),
|
||||
"EPISODES": formatter.shell_safe(str(media_item.episodes)),
|
||||
"DURATION": formatter.shell_safe(
|
||||
formatter.format_media_duration(media_item.duration)
|
||||
),
|
||||
"SCORE": formatter.shell_safe(
|
||||
formatter.format_score_stars_full(media_item.average_score)
|
||||
),
|
||||
"FAVOURITES": formatter.shell_safe(
|
||||
formatter.format_number_with_commas(media_item.favourites)
|
||||
),
|
||||
"POPULARITY": formatter.shell_safe(
|
||||
formatter.format_number_with_commas(media_item.popularity)
|
||||
),
|
||||
"GENRES": formatter.shell_safe(
|
||||
formatter.format_list_with_commas([v.value for v in media_item.genres])
|
||||
),
|
||||
"TAGS": formatter.shell_safe(
|
||||
formatter.format_list_with_commas(
|
||||
[t.name.value for t in media_item.tags]
|
||||
)
|
||||
),
|
||||
"STUDIOS": formatter.shell_safe(
|
||||
formatter.format_list_with_commas(
|
||||
[t.name for t in media_item.studios if t.name]
|
||||
)
|
||||
),
|
||||
"SYNONYMNS": formatter.shell_safe(
|
||||
formatter.format_list_with_commas(media_item.synonymns)
|
||||
),
|
||||
"USER_STATUS": formatter.shell_safe(
|
||||
media_item.user_status.status.value
|
||||
if media_item.user_status and media_item.user_status.status
|
||||
else "NOT_ON_LIST"
|
||||
),
|
||||
"USER_PROGRESS": formatter.shell_safe(
|
||||
f"Episode {media_item.user_status.progress}"
|
||||
if media_item.user_status
|
||||
else "0"
|
||||
),
|
||||
"START_DATE": formatter.shell_safe(
|
||||
formatter.format_date(media_item.start_date)
|
||||
),
|
||||
"END_DATE": formatter.shell_safe(
|
||||
formatter.format_date(media_item.end_date)
|
||||
),
|
||||
"SYNOPSIS": formatter.shell_safe(description),
|
||||
}
|
||||
|
||||
for key, value in replacements.items():
|
||||
info_script = info_script.replace(f"{{{key}}}", value)
|
||||
|
||||
return info_script
|
||||
|
||||
def _save_info_text(self, info_text: str, hash_id: str) -> None:
|
||||
"""Save info text to cache."""
|
||||
try:
|
||||
info_path = self.info_cache_dir / hash_id
|
||||
with AtomicWriter(info_path) as f:
|
||||
f.write(info_text)
|
||||
logger.debug(f"Successfully cached info: {hash_id}")
|
||||
except IOError as e:
|
||||
logger.error(f"Failed to write info cache for {hash_id}: {e}")
|
||||
raise
|
||||
|
||||
def _get_cache_hash(self, text: str) -> str:
|
||||
"""Generate a cache hash for the given text."""
|
||||
from hashlib import sha256
|
||||
|
||||
return sha256(text.encode("utf-8")).hexdigest()
|
||||
|
||||
def _on_task_completed(self, task: WorkerTask, future) -> None:
|
||||
"""Handle task completion with enhanced logging."""
|
||||
super()._on_task_completed(task, future)
|
||||
|
||||
if future.exception():
|
||||
logger.warning(f"Preview cache task failed: {future.exception()}")
|
||||
else:
|
||||
logger.debug("Preview cache task completed successfully")
|
||||
|
||||
|
||||
class EpisodeCacheWorker(ManagedBackgroundWorker):
|
||||
"""
|
||||
Specialized background worker for caching episode preview data.
|
||||
|
||||
Handles episode-specific caching including thumbnails and episode info
|
||||
with proper error handling and resource management.
|
||||
"""
|
||||
|
||||
def __init__(self, images_cache_dir, info_cache_dir, max_workers: int = 5):
|
||||
"""
|
||||
Initialize the episode cache worker.
|
||||
|
||||
Args:
|
||||
images_cache_dir: Directory to cache images
|
||||
info_cache_dir: Directory to cache info text
|
||||
max_workers: Maximum number of concurrent workers
|
||||
"""
|
||||
super().__init__(max_workers=max_workers, name="EpisodeCacheWorker")
|
||||
self.images_cache_dir = images_cache_dir
|
||||
self.info_cache_dir = info_cache_dir
|
||||
self._http_client: Optional[httpx.Client] = None
|
||||
|
||||
def start(self) -> None:
|
||||
"""Start the worker and initialize HTTP client."""
|
||||
super().start()
|
||||
self._http_client = httpx.Client(
|
||||
timeout=20.0,
|
||||
follow_redirects=True,
|
||||
limits=httpx.Limits(max_connections=self.max_workers),
|
||||
)
|
||||
logger.debug("EpisodeCacheWorker HTTP client initialized")
|
||||
|
||||
def shutdown(self, wait: bool = True, timeout: Optional[float] = 30.0) -> None:
|
||||
"""Shutdown the worker and cleanup HTTP client."""
|
||||
super().shutdown(wait=wait, timeout=timeout)
|
||||
if self._http_client:
|
||||
self._http_client.close()
|
||||
self._http_client = None
|
||||
logger.debug("EpisodeCacheWorker HTTP client closed")
|
||||
|
||||
def cache_episode_previews(
|
||||
self, episodes: List[str], media_item: MediaItem, config: AppConfig
|
||||
) -> None:
|
||||
"""
|
||||
Cache preview data for multiple episodes.
|
||||
|
||||
Args:
|
||||
episodes: List of episode identifiers
|
||||
media_item: Media item containing episode data
|
||||
config: Application configuration
|
||||
"""
|
||||
if not self.is_running():
|
||||
raise RuntimeError("EpisodeCacheWorker is not running")
|
||||
|
||||
streaming_episodes = media_item.streaming_episodes
|
||||
|
||||
for episode_str in episodes:
|
||||
hash_id = self._get_cache_hash(
|
||||
f"{media_item.title.english}_Episode_{episode_str}"
|
||||
)
|
||||
|
||||
# Find episode data
|
||||
episode_data = streaming_episodes.get(episode_str)
|
||||
title = episode_data.title if episode_data else f"Episode {episode_str}"
|
||||
thumbnail = None
|
||||
|
||||
if episode_data and episode_data.thumbnail:
|
||||
thumbnail = episode_data.thumbnail
|
||||
elif media_item.cover_image:
|
||||
thumbnail = media_item.cover_image.large
|
||||
|
||||
# Submit thumbnail download task
|
||||
if thumbnail:
|
||||
self.submit_function(self._download_and_save_image, thumbnail, hash_id)
|
||||
|
||||
# Submit episode info generation task
|
||||
episode_info = self._generate_episode_info(config, title, media_item)
|
||||
self.submit_function(self._save_info_text, episode_info, hash_id)
|
||||
|
||||
def _download_and_save_image(self, url: str, hash_id: str) -> None:
|
||||
"""Download an image and save it to cache."""
|
||||
if not self._http_client:
|
||||
raise RuntimeError("HTTP client not initialized")
|
||||
|
||||
image_path = self.images_cache_dir / f"{hash_id}.png"
|
||||
|
||||
try:
|
||||
with self._http_client.stream("GET", url) as response:
|
||||
response.raise_for_status()
|
||||
|
||||
with AtomicWriter(image_path, "wb", encoding=None) as f:
|
||||
for chunk in response.iter_bytes():
|
||||
f.write(chunk)
|
||||
|
||||
logger.debug(f"Successfully cached episode image: {hash_id}")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to download episode image {url}: {e}")
|
||||
raise
|
||||
|
||||
def _generate_episode_info(
|
||||
self, config: AppConfig, title: str, media_item: MediaItem
|
||||
) -> str:
|
||||
"""Generate formatted episode info text."""
|
||||
episode_info_script = TEMPLATE_EPISODE_INFO_SCRIPT
|
||||
|
||||
replacements = {
|
||||
"TITLE": formatter.shell_safe(title),
|
||||
"NEXT_EPISODE": formatter.shell_safe(
|
||||
f"Episode {media_item.next_airing.episode} on {formatter.format_date(media_item.next_airing.airing_at, '%A, %d %B %Y at %X)')}"
|
||||
if media_item.next_airing
|
||||
else "N/A"
|
||||
),
|
||||
"DURATION": formatter.format_media_duration(media_item.duration),
|
||||
"STATUS": formatter.shell_safe(media_item.status.value),
|
||||
"EPISODES": formatter.shell_safe(str(media_item.episodes)),
|
||||
"USER_STATUS": formatter.shell_safe(
|
||||
media_item.user_status.status.value
|
||||
if media_item.user_status and media_item.user_status.status
|
||||
else "NOT_ON_LIST"
|
||||
),
|
||||
"USER_PROGRESS": formatter.shell_safe(
|
||||
f"Episode {media_item.user_status.progress}"
|
||||
if media_item.user_status
|
||||
else "0"
|
||||
),
|
||||
"START_DATE": formatter.shell_safe(
|
||||
formatter.format_date(media_item.start_date)
|
||||
),
|
||||
"END_DATE": formatter.shell_safe(
|
||||
formatter.format_date(media_item.end_date)
|
||||
),
|
||||
}
|
||||
|
||||
for key, value in replacements.items():
|
||||
episode_info_script = episode_info_script.replace(f"{{{key}}}", value)
|
||||
|
||||
return episode_info_script
|
||||
|
||||
def _save_info_text(self, info_text: str, hash_id: str) -> None:
|
||||
"""Save episode info text to cache."""
|
||||
try:
|
||||
info_path = self.info_cache_dir / hash_id
|
||||
with AtomicWriter(info_path) as f:
|
||||
f.write(info_text)
|
||||
logger.debug(f"Successfully cached episode info: {hash_id}")
|
||||
except IOError as e:
|
||||
logger.error(f"Failed to write episode info cache for {hash_id}: {e}")
|
||||
raise
|
||||
|
||||
def _get_cache_hash(self, text: str) -> str:
|
||||
"""Generate a cache hash for the given text."""
|
||||
from hashlib import sha256
|
||||
|
||||
return sha256(text.encode("utf-8")).hexdigest()
|
||||
|
||||
def _on_task_completed(self, task: WorkerTask, future) -> None:
|
||||
"""Handle task completion with enhanced logging."""
|
||||
super()._on_task_completed(task, future)
|
||||
|
||||
if future.exception():
|
||||
logger.warning(f"Episode cache task failed: {future.exception()}")
|
||||
else:
|
||||
logger.debug("Episode cache task completed successfully")
|
||||
|
||||
|
||||
class ReviewCacheWorker(ManagedBackgroundWorker):
|
||||
"""
|
||||
Specialized background worker for caching fully-rendered media review previews.
|
||||
"""
|
||||
|
||||
def __init__(self, reviews_cache_dir, max_workers: int = 10):
|
||||
super().__init__(max_workers=max_workers, name="ReviewCacheWorker")
|
||||
self.reviews_cache_dir = reviews_cache_dir
|
||||
|
||||
def cache_review_previews(
|
||||
self, choice_map: Dict[str, MediaReview], config: AppConfig
|
||||
) -> None:
|
||||
"""
|
||||
Creates cache files containing the final, formatted preview content for each review.
|
||||
|
||||
Args:
|
||||
choice_map: Dictionary mapping the fzf choice string to the MediaReview object.
|
||||
config: The application configuration.
|
||||
"""
|
||||
if not self.is_running():
|
||||
raise RuntimeError("ReviewCacheWorker is not running")
|
||||
|
||||
for choice_str, review in choice_map.items():
|
||||
hash_id = self._get_cache_hash(choice_str)
|
||||
|
||||
preview_content = self._generate_review_preview_content(review, config)
|
||||
self.submit_function(self._save_preview_content, preview_content, hash_id)
|
||||
|
||||
def _generate_review_preview_content(
|
||||
self, review: MediaReview, config: AppConfig
|
||||
) -> str:
|
||||
"""
|
||||
Generates the final, formatted preview content by injecting data into the template.
|
||||
"""
|
||||
|
||||
# Prepare the data for injection
|
||||
reviewer = review.user.name
|
||||
summary = review.summary or "N/A"
|
||||
body = review.body
|
||||
|
||||
# Inject data into the presentation template
|
||||
template = TEMPLATE_REVIEW_INFO_SCRIPT
|
||||
replacements = {
|
||||
"REVIEWER_NAME": formatter.shell_safe(reviewer),
|
||||
"REVIEW_SUMMARY": formatter.shell_safe(summary),
|
||||
"REVIEW_BODY": formatter.shell_safe(body),
|
||||
}
|
||||
for key, value in replacements.items():
|
||||
template = template.replace(f"{{{key}}}", value)
|
||||
|
||||
return template
|
||||
|
||||
def _save_preview_content(self, content: str, hash_id: str) -> None:
|
||||
"""Saves the final preview content to the cache."""
|
||||
try:
|
||||
info_path = self.reviews_cache_dir / hash_id
|
||||
with AtomicWriter(info_path) as f:
|
||||
f.write(content)
|
||||
logger.debug(f"Successfully cached review preview: {hash_id}")
|
||||
except IOError as e:
|
||||
logger.error(f"Failed to write review preview cache for {hash_id}: {e}")
|
||||
raise
|
||||
|
||||
def _get_cache_hash(self, text: str) -> str:
|
||||
from hashlib import sha256
|
||||
|
||||
return sha256(text.encode("utf-8")).hexdigest()
|
||||
|
||||
def _on_task_completed(self, task: WorkerTask, future) -> None:
|
||||
super()._on_task_completed(task, future)
|
||||
if future.exception():
|
||||
logger.warning(f"Review cache task failed: {future.exception()}")
|
||||
|
||||
|
||||
class CharacterCacheWorker(ManagedBackgroundWorker):
|
||||
"""
|
||||
Specialized background worker for caching character preview data.
|
||||
"""
|
||||
|
||||
def __init__(self, characters_cache_dir, image_cache_dir, max_workers: int = 10):
|
||||
super().__init__(max_workers=max_workers, name="CharacterCacheWorker")
|
||||
self.characters_cache_dir = characters_cache_dir
|
||||
self.image_cache_dir = image_cache_dir
|
||||
|
||||
self._http_client: Optional[httpx.Client] = None
|
||||
|
||||
def start(self) -> None:
|
||||
"""Start the worker and initialize HTTP client."""
|
||||
super().start()
|
||||
self._http_client = httpx.Client(
|
||||
timeout=20.0,
|
||||
follow_redirects=True,
|
||||
limits=httpx.Limits(max_connections=self.max_workers),
|
||||
)
|
||||
logger.debug("EpisodeCacheWorker HTTP client initialized")
|
||||
|
||||
def cache_character_previews(
|
||||
self, choice_map: Dict[str, Character], config: AppConfig
|
||||
) -> None:
|
||||
"""
|
||||
Creates cache files containing the final, formatted preview content for each character.
|
||||
|
||||
Args:
|
||||
choice_map: Dictionary mapping the fzf choice string to the Character object.
|
||||
config: The application configuration.
|
||||
"""
|
||||
if not self.is_running():
|
||||
raise RuntimeError("CharacterCacheWorker is not running")
|
||||
|
||||
for choice_str, character in choice_map.items():
|
||||
hash_id = self._get_cache_hash(choice_str)
|
||||
|
||||
preview_content = self._generate_character_preview_content(
|
||||
character, config
|
||||
)
|
||||
# NOTE: Disabled due to issue of the text overlapping with the image
|
||||
if (
|
||||
character.image
|
||||
and (character.image.medium or character.image.large)
|
||||
and False
|
||||
):
|
||||
image_url = character.image.medium or character.image.large
|
||||
self.submit_function(self._download_and_save_image, image_url, hash_id)
|
||||
self.submit_function(self._save_preview_content, preview_content, hash_id)
|
||||
|
||||
def _generate_character_preview_content(
|
||||
self, character: Character, config: AppConfig
|
||||
) -> str:
|
||||
"""
|
||||
Generates the final, formatted preview content by injecting character data into the template.
|
||||
"""
|
||||
character_name = (
|
||||
character.name.full or character.name.first or "Unknown Character"
|
||||
)
|
||||
native_name = character.name.native or "N/A"
|
||||
gender = character.gender or "Unknown"
|
||||
age = str(character.age) if character.age else "Unknown"
|
||||
blood_type = character.blood_type or "N/A"
|
||||
favourites = f"{character.favourites:,}" if character.favourites else "0"
|
||||
birthday = (
|
||||
character.date_of_birth.strftime("%B %d, %Y")
|
||||
if character.date_of_birth
|
||||
else "N/A"
|
||||
)
|
||||
|
||||
# Clean and format description
|
||||
description = character.description or "No description available"
|
||||
if description:
|
||||
description = formatter.clean_html(description)
|
||||
|
||||
# Inject data into the presentation template
|
||||
template = TEMPLATE_CHARACTER_INFO_SCRIPT
|
||||
replacements = {
|
||||
"CHARACTER_NAME": formatter.shell_safe(character_name),
|
||||
"CHARACTER_NATIVE_NAME": formatter.shell_safe(native_name),
|
||||
"CHARACTER_GENDER": formatter.shell_safe(gender),
|
||||
"CHARACTER_AGE": formatter.shell_safe(age),
|
||||
"CHARACTER_BLOOD_TYPE": formatter.shell_safe(blood_type),
|
||||
"CHARACTER_BIRTHDAY": formatter.shell_safe(birthday),
|
||||
"CHARACTER_FAVOURITES": formatter.shell_safe(favourites),
|
||||
"CHARACTER_DESCRIPTION": formatter.shell_safe(description),
|
||||
}
|
||||
for key, value in replacements.items():
|
||||
template = template.replace(f"{{{key}}}", value)
|
||||
|
||||
return template
|
||||
|
||||
def _download_and_save_image(self, url: str, hash_id: str) -> None:
|
||||
"""Download an image and save it to cache."""
|
||||
if not self._http_client:
|
||||
raise RuntimeError("HTTP client not initialized")
|
||||
|
||||
image_path = self.image_cache_dir / f"{hash_id}.png"
|
||||
|
||||
try:
|
||||
if img_bytes := image.resize_image_from_url(
|
||||
self._http_client, url, 300, 300
|
||||
):
|
||||
with AtomicWriter(image_path, "wb", encoding=None) as f:
|
||||
f.write(img_bytes)
|
||||
|
||||
logger.debug(f"Successfully cached image: {hash_id}")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to download image {url}: {e}")
|
||||
raise
|
||||
|
||||
def _save_preview_content(self, content: str, hash_id: str) -> None:
|
||||
"""Saves the final preview content to the cache."""
|
||||
try:
|
||||
info_path = self.characters_cache_dir / hash_id
|
||||
with AtomicWriter(info_path) as f:
|
||||
f.write(content)
|
||||
logger.debug(f"Successfully cached character preview: {hash_id}")
|
||||
except IOError as e:
|
||||
logger.error(f"Failed to write character preview cache for {hash_id}: {e}")
|
||||
raise
|
||||
|
||||
def _get_cache_hash(self, text: str) -> str:
|
||||
from hashlib import sha256
|
||||
|
||||
return sha256(text.encode("utf-8")).hexdigest()
|
||||
|
||||
def _on_task_completed(self, task: WorkerTask, future) -> None:
|
||||
super()._on_task_completed(task, future)
|
||||
if future.exception():
|
||||
logger.warning(f"Character cache task failed: {future.exception()}")
|
||||
|
||||
|
||||
class AiringScheduleCacheWorker(ManagedBackgroundWorker):
|
||||
"""
|
||||
Specialized background worker for caching airing schedule preview data.
|
||||
"""
|
||||
|
||||
def __init__(self, airing_schedule_cache_dir, max_workers: int = 10):
|
||||
super().__init__(max_workers=max_workers, name="AiringScheduleCacheWorker")
|
||||
self.airing_schedule_cache_dir = airing_schedule_cache_dir
|
||||
|
||||
def cache_airing_schedule_preview(
|
||||
self, anime_title: str, schedule_result: AiringScheduleResult, config: AppConfig
|
||||
) -> None:
|
||||
"""
|
||||
Creates cache files containing the final, formatted preview content for airing schedule.
|
||||
|
||||
Args:
|
||||
anime_title: The title of the anime
|
||||
schedule_result: The airing schedule result object
|
||||
config: The application configuration.
|
||||
"""
|
||||
if not self.is_running():
|
||||
raise RuntimeError("AiringScheduleCacheWorker is not running")
|
||||
|
||||
hash_id = self._get_cache_hash(anime_title)
|
||||
|
||||
preview_content = self._generate_airing_schedule_preview_content(
|
||||
anime_title, schedule_result, config
|
||||
)
|
||||
self.submit_function(self._save_preview_content, preview_content, hash_id)
|
||||
|
||||
def _generate_airing_schedule_preview_content(
|
||||
self, anime_title: str, schedule_result: AiringScheduleResult, config: AppConfig
|
||||
) -> str:
|
||||
"""
|
||||
Generates the final, formatted preview content by injecting schedule data into the template.
|
||||
"""
|
||||
from datetime import datetime
|
||||
|
||||
total_episodes = len(schedule_result.schedule_items)
|
||||
upcoming_episodes = sum(
|
||||
1
|
||||
for ep in schedule_result.schedule_items
|
||||
if ep.airing_at and ep.airing_at > datetime.now()
|
||||
)
|
||||
|
||||
# Generate schedule table text
|
||||
schedule_lines = []
|
||||
sorted_episodes = sorted(
|
||||
schedule_result.schedule_items, key=lambda x: x.episode
|
||||
)
|
||||
|
||||
for episode in sorted_episodes[:10]: # Show next 10 episodes
|
||||
ep_num = str(episode.episode)
|
||||
|
||||
if episode.airing_at:
|
||||
formatted_date = episode.airing_at.strftime("%Y-%m-%d %H:%M")
|
||||
now = datetime.now()
|
||||
if episode.airing_at < now:
|
||||
status = "Aired"
|
||||
else:
|
||||
status = "Upcoming"
|
||||
else:
|
||||
formatted_date = "Unknown"
|
||||
status = "TBA"
|
||||
|
||||
# Format time until airing
|
||||
if episode.time_until_airing and episode.time_until_airing > 0:
|
||||
time_until = episode.time_until_airing
|
||||
days = time_until // 86400
|
||||
hours = (time_until % 86400) // 3600
|
||||
if days > 0:
|
||||
time_str = f"{days}d {hours}h"
|
||||
elif hours > 0:
|
||||
time_str = f"{hours}h"
|
||||
else:
|
||||
time_str = "<1h"
|
||||
elif episode.airing_at and episode.airing_at < datetime.now():
|
||||
time_str = "Aired"
|
||||
else:
|
||||
time_str = "Unknown"
|
||||
|
||||
schedule_lines.append(
|
||||
f"Episode {ep_num:>3}: {formatted_date} ({time_str}) - {status}"
|
||||
)
|
||||
|
||||
schedule_table = "\n".join(schedule_lines)
|
||||
|
||||
# Inject data into the presentation template
|
||||
template = TEMPLATE_AIRING_SCHEDULE_INFO_SCRIPT
|
||||
replacements = {
|
||||
"ANIME_TITLE": formatter.shell_safe(anime_title),
|
||||
"TOTAL_EPISODES": formatter.shell_safe(str(total_episodes)),
|
||||
"UPCOMING_EPISODES": formatter.shell_safe(str(upcoming_episodes)),
|
||||
"SCHEDULE_TABLE": formatter.shell_safe(schedule_table),
|
||||
}
|
||||
for key, value in replacements.items():
|
||||
template = template.replace(f"{{{key}}}", value)
|
||||
|
||||
return template
|
||||
|
||||
def _save_preview_content(self, content: str, hash_id: str) -> None:
|
||||
"""Saves the final preview content to the cache."""
|
||||
try:
|
||||
info_path = self.airing_schedule_cache_dir / hash_id
|
||||
with AtomicWriter(info_path) as f:
|
||||
f.write(content)
|
||||
logger.debug(f"Successfully cached airing schedule preview: {hash_id}")
|
||||
except IOError as e:
|
||||
logger.error(
|
||||
f"Failed to write airing schedule preview cache for {hash_id}: {e}"
|
||||
)
|
||||
raise
|
||||
|
||||
def _get_cache_hash(self, text: str) -> str:
|
||||
from hashlib import sha256
|
||||
|
||||
return sha256(text.encode("utf-8")).hexdigest()
|
||||
|
||||
def _on_task_completed(self, task: WorkerTask, future) -> None:
|
||||
super()._on_task_completed(task, future)
|
||||
if future.exception():
|
||||
logger.warning(f"Airing schedule cache task failed: {future.exception()}")
|
||||
|
||||
|
||||
class PreviewWorkerManager:
|
||||
"""
|
||||
High-level manager for preview caching workers.
|
||||
|
||||
Provides a simple interface for managing both anime and episode preview
|
||||
caching workers with automatic lifecycle management.
|
||||
"""
|
||||
|
||||
def __init__(self, images_cache_dir, info_cache_dir, reviews_cache_dir):
|
||||
"""
|
||||
Initialize the preview worker manager.
|
||||
|
||||
Args:
|
||||
images_cache_dir: Directory to cache images
|
||||
info_cache_dir: Directory to cache info text
|
||||
reviews_cache_dir: Directory to cache reviews
|
||||
"""
|
||||
self.images_cache_dir = images_cache_dir
|
||||
self.info_cache_dir = info_cache_dir
|
||||
self.reviews_cache_dir = reviews_cache_dir
|
||||
self._preview_worker: Optional[PreviewCacheWorker] = None
|
||||
self._episode_worker: Optional[EpisodeCacheWorker] = None
|
||||
self._review_worker: Optional[ReviewCacheWorker] = None
|
||||
self._character_worker: Optional[CharacterCacheWorker] = None
|
||||
self._airing_schedule_worker: Optional[AiringScheduleCacheWorker] = None
|
||||
|
||||
def get_preview_worker(self) -> PreviewCacheWorker:
|
||||
"""Get or create the preview cache worker."""
|
||||
if self._preview_worker is None or not self._preview_worker.is_running():
|
||||
if self._preview_worker:
|
||||
# Clean up old worker
|
||||
thread_manager.shutdown_worker("preview_cache_worker")
|
||||
|
||||
self._preview_worker = PreviewCacheWorker(
|
||||
self.images_cache_dir, self.info_cache_dir
|
||||
)
|
||||
self._preview_worker.start()
|
||||
thread_manager.register_worker("preview_cache_worker", self._preview_worker)
|
||||
|
||||
return self._preview_worker
|
||||
|
||||
def get_episode_worker(self) -> EpisodeCacheWorker:
|
||||
"""Get or create the episode cache worker."""
|
||||
if self._episode_worker is None or not self._episode_worker.is_running():
|
||||
if self._episode_worker:
|
||||
# Clean up old worker
|
||||
thread_manager.shutdown_worker("episode_cache_worker")
|
||||
|
||||
self._episode_worker = EpisodeCacheWorker(
|
||||
self.images_cache_dir, self.info_cache_dir
|
||||
)
|
||||
self._episode_worker.start()
|
||||
thread_manager.register_worker("episode_cache_worker", self._episode_worker)
|
||||
|
||||
return self._episode_worker
|
||||
|
||||
def get_review_worker(self) -> ReviewCacheWorker:
|
||||
"""Get or create the review cache worker."""
|
||||
if self._review_worker is None or not self._review_worker.is_running():
|
||||
if self._review_worker:
|
||||
# Clean up old worker
|
||||
thread_manager.shutdown_worker("review_cache_worker")
|
||||
|
||||
self._review_worker = ReviewCacheWorker(self.reviews_cache_dir)
|
||||
self._review_worker.start()
|
||||
thread_manager.register_worker("review_cache_worker", self._review_worker)
|
||||
|
||||
return self._review_worker
|
||||
|
||||
def get_character_worker(self) -> CharacterCacheWorker:
|
||||
"""Get or create the character cache worker."""
|
||||
if self._character_worker is None or not self._character_worker.is_running():
|
||||
if self._character_worker:
|
||||
# Clean up old worker
|
||||
thread_manager.shutdown_worker("character_cache_worker")
|
||||
|
||||
self._character_worker = CharacterCacheWorker(
|
||||
self.info_cache_dir, self.images_cache_dir
|
||||
)
|
||||
self._character_worker.start()
|
||||
thread_manager.register_worker(
|
||||
"character_cache_worker", self._character_worker
|
||||
)
|
||||
|
||||
return self._character_worker
|
||||
|
||||
def get_airing_schedule_worker(self) -> AiringScheduleCacheWorker:
|
||||
"""Get or create the airing schedule cache worker."""
|
||||
if (
|
||||
self._airing_schedule_worker is None
|
||||
or not self._airing_schedule_worker.is_running()
|
||||
):
|
||||
if self._airing_schedule_worker:
|
||||
# Clean up old worker
|
||||
thread_manager.shutdown_worker("airing_schedule_cache_worker")
|
||||
|
||||
self._airing_schedule_worker = AiringScheduleCacheWorker(
|
||||
self.info_cache_dir
|
||||
)
|
||||
self._airing_schedule_worker.start()
|
||||
thread_manager.register_worker(
|
||||
"airing_schedule_cache_worker", self._airing_schedule_worker
|
||||
)
|
||||
|
||||
return self._airing_schedule_worker
|
||||
|
||||
def shutdown_all(self, wait: bool = True, timeout: Optional[float] = 30.0) -> None:
|
||||
"""Shutdown all managed workers."""
|
||||
thread_manager.shutdown_worker(
|
||||
"preview_cache_worker", wait=wait, timeout=timeout
|
||||
)
|
||||
thread_manager.shutdown_worker(
|
||||
"episode_cache_worker", wait=wait, timeout=timeout
|
||||
)
|
||||
thread_manager.shutdown_worker(
|
||||
"review_cache_worker", wait=wait, timeout=timeout
|
||||
)
|
||||
thread_manager.shutdown_worker(
|
||||
"character_cache_worker", wait=wait, timeout=timeout
|
||||
)
|
||||
thread_manager.shutdown_worker(
|
||||
"airing_schedule_cache_worker", wait=wait, timeout=timeout
|
||||
)
|
||||
self._preview_worker = None
|
||||
self._episode_worker = None
|
||||
self._review_worker = None
|
||||
self._character_worker = None
|
||||
self._airing_schedule_worker = None
|
||||
|
||||
def get_status(self) -> dict:
|
||||
"""Get status of all managed workers."""
|
||||
return {
|
||||
"preview_worker": self._preview_worker.get_completion_stats()
|
||||
if self._preview_worker
|
||||
else None,
|
||||
"episode_worker": self._episode_worker.get_completion_stats()
|
||||
if self._episode_worker
|
||||
else None,
|
||||
"review_worker": self._review_worker.get_completion_stats()
|
||||
if self._review_worker
|
||||
else None,
|
||||
"character_worker": self._character_worker.get_completion_stats()
|
||||
if self._character_worker
|
||||
else None,
|
||||
"airing_schedule_worker": self._airing_schedule_worker.get_completion_stats()
|
||||
if self._airing_schedule_worker
|
||||
else None,
|
||||
}
|
||||
|
||||
def __enter__(self):
|
||||
"""Context manager entry - workers are created on demand."""
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
"""Context manager exit with automatic cleanup."""
|
||||
self.shutdown_all(wait=False, timeout=5.0)
|
||||
150
viu_cli/cli/utils/update.py
Normal file
150
viu_cli/cli/utils/update.py
Normal file
@@ -0,0 +1,150 @@
|
||||
import os
|
||||
import pathlib
|
||||
import re
|
||||
import shlex
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
from httpx import get
|
||||
from rich import print
|
||||
|
||||
from ...core.constants import AUTHOR, GIT_REPO, PROJECT_NAME_LOWER, __version__
|
||||
|
||||
API_URL = f"https://api.{GIT_REPO}/repos/{AUTHOR}/{PROJECT_NAME_LOWER}/releases/latest"
|
||||
|
||||
|
||||
def check_for_updates():
|
||||
USER_AGENT = f"{PROJECT_NAME_LOWER} user"
|
||||
try:
|
||||
response = get(
|
||||
API_URL,
|
||||
headers={
|
||||
"User-Agent": USER_AGENT,
|
||||
"X-GitHub-Api-Version": "2022-11-28",
|
||||
"Accept": "application/vnd.github+json",
|
||||
},
|
||||
)
|
||||
except Exception:
|
||||
print("You are not connected to the internet")
|
||||
return True, {}
|
||||
|
||||
if response.status_code == 200:
|
||||
release_json = response.json()
|
||||
remote_tag = list(
|
||||
map(int, release_json["tag_name"].replace("v", "").split("."))
|
||||
)
|
||||
local_tag = list(map(int, __version__.replace("v", "").split(".")))
|
||||
if (
|
||||
(remote_tag[0] > local_tag[0])
|
||||
or (remote_tag[1] > local_tag[1] and remote_tag[0] == local_tag[0])
|
||||
or (
|
||||
remote_tag[2] > local_tag[2]
|
||||
and remote_tag[0] == local_tag[0]
|
||||
and remote_tag[1] == local_tag[1]
|
||||
)
|
||||
):
|
||||
is_latest = False
|
||||
else:
|
||||
is_latest = True
|
||||
|
||||
return (is_latest, release_json)
|
||||
else:
|
||||
print("Failed to check for updates")
|
||||
print(response.text)
|
||||
return (True, {})
|
||||
|
||||
|
||||
def is_git_repo(author, repository):
|
||||
# Check if the current directory contains a .git folder
|
||||
git_dir = pathlib.Path(".git")
|
||||
if not git_dir.exists() or not git_dir.is_dir():
|
||||
return False
|
||||
|
||||
# Check if the config file exists
|
||||
config_path = git_dir / "config"
|
||||
if not config_path.exists():
|
||||
return False
|
||||
|
||||
try:
|
||||
# Read the .git/config file to find the remote repository URL
|
||||
with config_path.open("r") as git_config:
|
||||
git_config_content = git_config.read()
|
||||
except (FileNotFoundError, PermissionError):
|
||||
return False
|
||||
|
||||
# Use regex to find the repository URL in the config file
|
||||
repo_name_pattern = r"url\s*=\s*.+/([^/]+/[^/]+)\.git"
|
||||
match = re.search(repo_name_pattern, git_config_content)
|
||||
|
||||
# Return True if match found and repository name matches
|
||||
return bool(match) and match.group(1) == f"{author}/{repository}"
|
||||
|
||||
|
||||
def update_app(force=False):
|
||||
is_latest, release_json = check_for_updates()
|
||||
if is_latest and not force:
|
||||
print("[green]App is up to date[/]")
|
||||
return False, release_json
|
||||
tag_name = release_json["tag_name"]
|
||||
|
||||
print("[cyan]Updating app to version %s[/]" % tag_name)
|
||||
if os.path.exists("/nix/store") and os.path.exists("/run/current-system"):
|
||||
NIX = shutil.which("nix")
|
||||
if not NIX:
|
||||
print("[red]Cannot find nix, it looks like your system is broken.[/]")
|
||||
return False, release_json
|
||||
|
||||
process = subprocess.run(
|
||||
[NIX, "profile", "upgrade", PROJECT_NAME_LOWER], check=False
|
||||
)
|
||||
elif is_git_repo(AUTHOR, PROJECT_NAME_LOWER):
|
||||
GIT_EXECUTABLE = shutil.which("git")
|
||||
args = [
|
||||
GIT_EXECUTABLE,
|
||||
"pull",
|
||||
]
|
||||
|
||||
print(f"Pulling latest changes from the repository via git: {shlex.join(args)}")
|
||||
|
||||
if not GIT_EXECUTABLE:
|
||||
print("[red]Cannot find git please install it.[/]")
|
||||
return False, release_json
|
||||
|
||||
process = subprocess.run(
|
||||
args,
|
||||
check=False,
|
||||
)
|
||||
|
||||
elif UV := shutil.which("uv"):
|
||||
process = subprocess.run(
|
||||
[UV, "tool", "upgrade", PROJECT_NAME_LOWER], check=False
|
||||
)
|
||||
elif PIPX := shutil.which("pipx"):
|
||||
process = subprocess.run([PIPX, "upgrade", PROJECT_NAME_LOWER], check=False)
|
||||
else:
|
||||
PYTHON_EXECUTABLE = sys.executable
|
||||
|
||||
args = [
|
||||
PYTHON_EXECUTABLE,
|
||||
"-m",
|
||||
"pip",
|
||||
"install",
|
||||
PROJECT_NAME_LOWER,
|
||||
"-U",
|
||||
"--no-warn-script-location",
|
||||
]
|
||||
if sys.prefix == sys.base_prefix:
|
||||
# ensure NOT in a venv, where --user flag can cause an error.
|
||||
# TODO: Get value of 'include-system-site-packages' in pyenv.cfg.
|
||||
args.append("--user")
|
||||
|
||||
process = subprocess.run(args, check=False)
|
||||
if process.returncode == 0:
|
||||
print(
|
||||
"[green]Its recommended to run the following after updating:\n\tviu config --update (to get the latest config docs)\n\tviu cache --clean (to get rid of any potential issues)[/]",
|
||||
file=sys.stderr,
|
||||
)
|
||||
return True, release_json
|
||||
else:
|
||||
return False, release_json
|
||||
Reference in New Issue
Block a user