mirror of
https://github.com/Benexl/FastAnime.git
synced 2025-12-09 06:10:39 -08:00
Compare commits
152 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
14e1f44696 | ||
|
|
36b71c0751 | ||
|
|
6a5d7a0116 | ||
|
|
91efee9065 | ||
|
|
69d3d2e032 | ||
|
|
29ba77f795 | ||
|
|
a4950efa02 | ||
|
|
bbd7931790 | ||
|
|
c3ae5f9053 | ||
|
|
bf06d7ee2c | ||
|
|
41aaf92bae | ||
|
|
d38dc3194f | ||
|
|
54233aca79 | ||
|
|
6b8dfba57e | ||
|
|
3b008696d5 | ||
|
|
ece1f77e99 | ||
|
|
7b9de8620b | ||
|
|
725754ea1a | ||
|
|
80771f65ea | ||
|
|
c8c4e1b2c0 | ||
|
|
f4958cc0cc | ||
|
|
1f72e0a579 | ||
|
|
803c8316a7 | ||
|
|
26bc84e2eb | ||
|
|
901d1e87c5 | ||
|
|
523766868c | ||
|
|
bd9bf24e1c | ||
|
|
f27c0b8548 | ||
|
|
76c1dcd5ac | ||
|
|
25a46bd242 | ||
|
|
a70db611f7 | ||
|
|
091edb3a9b | ||
|
|
9050dd7787 | ||
|
|
393b9e6ed6 | ||
|
|
5193df2197 | ||
|
|
6ccd96d252 | ||
|
|
e8387f3db9 | ||
|
|
23ebff3f42 | ||
|
|
8e803e8ecb | ||
|
|
61fcd39188 | ||
|
|
313f8369d7 | ||
|
|
bee73b3f9a | ||
|
|
f647b7419a | ||
|
|
901c4422b5 | ||
|
|
08ae8786c3 | ||
|
|
64093204ad | ||
|
|
8440ffb5e5 | ||
|
|
6e287d320d | ||
|
|
a7b0f21deb | ||
|
|
71b668894b | ||
|
|
8b3a57ed07 | ||
|
|
b2f9c8349a | ||
|
|
25fe1e5e01 | ||
|
|
45ff463f7a | ||
|
|
29ce664e4c | ||
|
|
2217f011af | ||
|
|
5960a7c502 | ||
|
|
bd0309ee85 | ||
|
|
3724f06e33 | ||
|
|
d20af89fc8 | ||
|
|
3872b4c8a8 | ||
|
|
9545b893e1 | ||
|
|
1519c8be17 | ||
|
|
9a619b41f4 | ||
|
|
0c3a963cc4 | ||
|
|
192818362b | ||
|
|
2d8c1d3569 | ||
|
|
e37f9213f6 | ||
|
|
097db713bc | ||
|
|
106278e386 | ||
|
|
44b3663644 | ||
|
|
925c30c06e | ||
|
|
7401a1ad8f | ||
|
|
9a0bb65e52 | ||
|
|
1d129a5771 | ||
|
|
515660b0f6 | ||
|
|
9f5c895bf5 | ||
|
|
5634214fb8 | ||
|
|
66c0ada29d | ||
|
|
02465b4ddb | ||
|
|
5ffd94ac24 | ||
|
|
d2864df6d0 | ||
|
|
2a28e3b9a3 | ||
|
|
7b8027a8b3 | ||
|
|
2a36152c38 | ||
|
|
2048c7b743 | ||
|
|
133fd4c1c8 | ||
|
|
e22120fe99 | ||
|
|
44e6220662 | ||
|
|
1fea1335c6 | ||
|
|
8b664fae36 | ||
|
|
19a85511b4 | ||
|
|
205299108b | ||
|
|
7670bdd2f3 | ||
|
|
cd3f7f7fb8 | ||
|
|
5be03ed5b8 | ||
|
|
6581179336 | ||
|
|
2bb674f4a0 | ||
|
|
642e77f601 | ||
|
|
a5e99122f5 | ||
|
|
39bd7bed61 | ||
|
|
869072633b | ||
|
|
cbd788a573 | ||
|
|
11fe54b146 | ||
|
|
a13bdb1aa0 | ||
|
|
627b09a723 | ||
|
|
aecec5c75b | ||
|
|
49b298ed52 | ||
|
|
9a90fa196b | ||
|
|
4ac059e873 | ||
|
|
8b39a28e32 | ||
|
|
066cc89b74 | ||
|
|
db16758d9f | ||
|
|
78e17b2ba0 | ||
|
|
c5326eb8d9 | ||
|
|
4a2d95e75e | ||
|
|
3a92ba69df | ||
|
|
cf59f4822e | ||
|
|
1cea6d0179 | ||
|
|
4bc1edcc4e | ||
|
|
0c546af99c | ||
|
|
1b49e186c8 | ||
|
|
fe831f9658 | ||
|
|
72f0e2e5b9 | ||
|
|
8530da23ef | ||
|
|
1e01b6e54a | ||
|
|
aa6ba9018d | ||
|
|
354ba6256a | ||
|
|
eae31420f9 | ||
|
|
01432a0fec | ||
|
|
c158d3fb99 | ||
|
|
877bc043a0 | ||
|
|
4968f8030a | ||
|
|
c5c7644d0d | ||
|
|
ff2a5d635a | ||
|
|
8626d1991c | ||
|
|
75d15a100d | ||
|
|
25d9895c52 | ||
|
|
f1b796d72b | ||
|
|
3f63198563 | ||
|
|
8d61463156 | ||
|
|
2daa51d384 | ||
|
|
43a0d77e1b | ||
|
|
eaedf3268d | ||
|
|
ade0465ea4 | ||
|
|
5e82db4ea8 | ||
|
|
a10e56cb6f | ||
|
|
fbd95e1966 | ||
|
|
d37a441ccf | ||
|
|
cbc1ceccbb | ||
|
|
249a207cad | ||
|
|
c8a42c4920 |
7
.envrc
7
.envrc
@@ -1,3 +1,6 @@
|
||||
if command -v nix >/dev/null;then
|
||||
use flake
|
||||
VIU_APP_NAME="viu-dev"
|
||||
PATH="$PWD/.venv/bin:$PATH"
|
||||
export PATH VIU_APP_NAME
|
||||
if command -v nix >/dev/null; then
|
||||
use flake
|
||||
fi
|
||||
|
||||
57
.github/workflows/stale.yml
vendored
Normal file
57
.github/workflows/stale.yml
vendored
Normal file
@@ -0,0 +1,57 @@
|
||||
name: Mark Stale Issues and Pull Requests
|
||||
|
||||
on:
|
||||
schedule:
|
||||
# Runs every day at 6:30 UTC
|
||||
- cron: "30 6 * * *"
|
||||
# Allows you to run this workflow manually from the Actions tab for testing
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
stale:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
issues: write
|
||||
pull-requests: write
|
||||
|
||||
steps:
|
||||
- uses: actions/stale@v5
|
||||
with:
|
||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
stale-issue-message: |
|
||||
Greetings @{{author}},
|
||||
|
||||
This bug report is like an ancient scroll detailing a legendary beast. Our small guild of developers is often on many quests at once, so our response times can be slower than a tortoise in a time-stop spell. We deeply appreciate your patience!
|
||||
|
||||
**Seeking Immediate Help or Discussion?**
|
||||
Our **[Discord Tavern](https://discord.gg/HBEmAwvbHV)** is the best place to get a quick response from the community for general questions or setup help!
|
||||
|
||||
**Want to Be the Hero?**
|
||||
You could try to tame this beast yourself! With modern grimoires (like AI coding assistants) and our **[Contribution Guide](https://github.com/viu-media/Viu/blob/master/CONTRIBUTIONS.md)**, you might just be the hero we're waiting for. We would be thrilled to review your solution!
|
||||
|
||||
---
|
||||
To keep our quest board tidy, we need to know if this creature is still roaming the lands in the latest version of `viu`. If we don't get an update within **7 days**, we'll assume it has vanished and archive the scroll.
|
||||
|
||||
Thanks for being our trusted scout!
|
||||
|
||||
stale-pr-message: |
|
||||
Hello @{{author}}, it looks like this powerful contribution has been left in the middle of its training arc! 💪
|
||||
|
||||
Our review dojo is managed by just a few senseis who are sometimes away on long missions, so thank you for your patience as we work through the queue.
|
||||
|
||||
We were excited to see this new technique being developed. Are you still planning to complete its training, or have you embarked on a different quest? If you need a sparring partner (reviewer) or some guidance from a senpai, just let us know!
|
||||
|
||||
To keep our dojo tidy, we'll be archiving unfinished techniques. If we don't hear back within **7 days**, we'll assume it's time to close this PR for now. You can always resume your training and reopen it when you're ready.
|
||||
|
||||
Thank you for your incredible effort!
|
||||
|
||||
# --- Labels and Timing ---
|
||||
stale-issue-label: "stale"
|
||||
stale-pr-label: "stale"
|
||||
|
||||
# How many days of inactivity before an issue/PR is marked as stale.
|
||||
days-before-stale: 14
|
||||
|
||||
# How many days of inactivity to wait before closing a stale issue/PR.
|
||||
days-before-close: 7
|
||||
8
.github/workflows/test.yml
vendored
8
.github/workflows/test.yml
vendored
@@ -13,7 +13,7 @@ jobs:
|
||||
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ["3.10", "3.11"] # List the Python versions you want to test
|
||||
python-version: ["3.11", "3.12"]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
@@ -41,5 +41,7 @@ jobs:
|
||||
- name: Run type checking
|
||||
run: uv run pyright
|
||||
|
||||
- name: Run tests
|
||||
run: uv run pytest tests
|
||||
# TODO: write tests
|
||||
|
||||
# - name: Run tests
|
||||
# run: uv run pytest tests
|
||||
|
||||
@@ -1,33 +1,10 @@
|
||||
default_language_version:
|
||||
python: python3.12
|
||||
|
||||
repos:
|
||||
- repo: https://github.com/pycqa/isort
|
||||
rev: 5.12.0
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
# Ruff version.
|
||||
rev: v0.14.2
|
||||
hooks:
|
||||
- id: isort
|
||||
name: isort (python)
|
||||
args: ["--profile", "black"]
|
||||
|
||||
- repo: https://github.com/PyCQA/autoflake
|
||||
rev: v2.2.1
|
||||
hooks:
|
||||
- id: autoflake
|
||||
args:
|
||||
[
|
||||
"--in-place",
|
||||
"--remove-unused-variables",
|
||||
"--remove-all-unused-imports",
|
||||
]
|
||||
# - repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
# rev: v0.4.10
|
||||
# hooks:
|
||||
# - id: ruff
|
||||
# args: [--fix]
|
||||
|
||||
- repo: https://github.com/psf/black-pre-commit-mirror
|
||||
rev: 24.4.2
|
||||
hooks:
|
||||
- id: black
|
||||
name: black
|
||||
#language_version: python3.10
|
||||
# Run the linter.
|
||||
- id: ruff-check
|
||||
args: [--fix]
|
||||
# Run the formatter.
|
||||
- id: ruff-format
|
||||
|
||||
1
.python-version
Normal file
1
.python-version
Normal file
@@ -0,0 +1 @@
|
||||
3.11
|
||||
1
.repomixignore
Normal file
1
.repomixignore
Normal file
@@ -0,0 +1 @@
|
||||
**/generated/**/*
|
||||
@@ -6,7 +6,7 @@ First off, thank you for considering contributing to Viu! We welcome any help, w
|
||||
|
||||
There are many ways to contribute to the Viu project:
|
||||
|
||||
* **Reporting Bugs:** If you find a bug, please create an issue in our [issue tracker](https://github.com/Benexl/Viu/issues).
|
||||
* **Reporting Bugs:** If you find a bug, please create an issue in our [issue tracker](https://github.com/viu-media/Viu/issues).
|
||||
* **Suggesting Enhancements:** Have an idea for a new feature or an improvement to an existing one? We'd love to hear it.
|
||||
* **Writing Code:** Help us fix bugs or implement new features.
|
||||
* **Improving Documentation:** Enhance our README, add examples, or clarify our contribution guidelines.
|
||||
@@ -16,7 +16,7 @@ There are many ways to contribute to the Viu project:
|
||||
|
||||
We follow the standard GitHub Fork & Pull Request workflow.
|
||||
|
||||
1. **Create an Issue:** Before starting work on a new feature or a significant bug fix, please [create an issue](https://github.com/Benexl/Viu/issues/new/choose) to discuss your idea. This allows us to give feedback and prevent duplicate work. For small bugs or documentation typos, you can skip this step.
|
||||
1. **Create an Issue:** Before starting work on a new feature or a significant bug fix, please [create an issue](https://github.com/viu-media/Viu/issues/new/choose) to discuss your idea. This allows us to give feedback and prevent duplicate work. For small bugs or documentation typos, you can skip this step.
|
||||
|
||||
2. **Fork the Repository:** Create your own fork of the Viu repository.
|
||||
|
||||
|
||||
60
README.md
60
README.md
@@ -1,15 +1,3 @@
|
||||
>[!IMPORTANT]
|
||||
> looking for a new project name
|
||||
>
|
||||
>if you have any that is not already being used by someone on pypi please share on discord
|
||||
>
|
||||
>and let me warn yah am not good at naming things so help before disaster strikes again lol
|
||||
>
|
||||
>i dont want it to end up like viu where i added cli lol since viu was taken
|
||||
>
|
||||
>
|
||||
|
||||
|
||||
<p align="center">
|
||||
<h1 align="center">Viu</h1>
|
||||
</p>
|
||||
@@ -22,10 +10,10 @@
|
||||
|
||||
[](https://pypi.org/project/viu-media/)
|
||||
[](https://pypi.org/project/viu-media/)
|
||||
[](https://github.com/Benexl/Viu/actions)
|
||||
[](https://github.com/viu-media/Viu/actions)
|
||||
[](https://discord.gg/HBEmAwvbHV)
|
||||
[](https://github.com/Benexl/Viu/issues)
|
||||
[](https://github.com/Benexl/Viu/blob/master/LICENSE)
|
||||
[](https://github.com/viu-media/Viu/issues)
|
||||
[](https://github.com/viu-media/Viu/blob/master/LICENSE)
|
||||
|
||||
</div>
|
||||
|
||||
@@ -35,6 +23,19 @@
|
||||
</a>
|
||||
</p>
|
||||
|
||||
[viu-showcase.webm](https://github.com/user-attachments/assets/5da0ec87-7780-4310-9ca2-33fae7cadd5f)
|
||||
|
||||
<details>
|
||||
<summary>Rofi</summary>
|
||||
|
||||
[viu-showcase-rofi.webm](https://github.com/user-attachments/assets/01f197d9-5ac9-45e6-a00b-8e8cd5ab459c)
|
||||
|
||||
</details>
|
||||
|
||||
> [!IMPORTANT]
|
||||
> This project scrapes public-facing websites for its streaming / downloading capabilities and primarily acts as an anilist, jikan and many other media apis tui client. The developer(s) of this application have no affiliation with these content providers. This application hosts zero content and is intended for educational and personal use only. Use at your own risk.
|
||||
>
|
||||
> [**Read the Full Disclaimer**](DISCLAIMER.md)
|
||||
|
||||
## Core Features
|
||||
|
||||
@@ -84,18 +85,32 @@ uv tool install "viu-media[notifications]" # For desktop notifications
|
||||
<summary><b>Platform-Specific and Alternative Installers</b></summary>
|
||||
|
||||
#### Nix / NixOS
|
||||
##### Ephemeral / One-Off Run (No Installation)
|
||||
```bash
|
||||
nix profile install github:Benexl/viu
|
||||
nix run github:viu-media/viu
|
||||
```
|
||||
##### Imperative Installation
|
||||
```bash
|
||||
nix profile install github:viu-media/viu
|
||||
```
|
||||
##### Declarative Installation
|
||||
###### in your flake.nix
|
||||
```nix
|
||||
viu.url = "github:viu-media/viu";
|
||||
```
|
||||
###### in your system or home-manager packages
|
||||
```nix
|
||||
inputs.viu.packages.${pkgs.system}.default
|
||||
```
|
||||
|
||||
#### Arch Linux (AUR)
|
||||
Use an AUR helper like `yay` or `paru`.
|
||||
```bash
|
||||
# Stable version (recommended)
|
||||
yay -S viu
|
||||
yay -S viu-media
|
||||
|
||||
# Git version (latest commit)
|
||||
yay -S viu-git
|
||||
yay -S viu-media-git
|
||||
```
|
||||
|
||||
#### Using pipx (for isolated environments)
|
||||
@@ -114,7 +129,7 @@ uv tool install "viu-media[notifications]" # For desktop notifications
|
||||
|
||||
Requires [Git](https://git-scm.com/), [Python 3.10+](https://www.python.org/), and [uv](https://astral.sh/blog/uv).
|
||||
```bash
|
||||
git clone https://github.com/Benexl/Viu.git --depth 1
|
||||
git clone https://github.com/viu-media/Viu.git --depth 1
|
||||
cd Viu
|
||||
uv tool install .
|
||||
viu --version
|
||||
@@ -317,10 +332,3 @@ You can run the background worker as a systemd service for persistence.
|
||||
## Contributing
|
||||
|
||||
Contributions are welcome! Whether it's reporting a bug, proposing a feature, or writing code, your help is appreciated. Please read our [**Contributing Guidelines**](CONTRIBUTIONS.md) to get started.
|
||||
|
||||
## Disclaimer
|
||||
|
||||
> [!IMPORTANT]
|
||||
> This project scrapes public-facing websites. The developer(s) of this application have no affiliation with these content providers. This application hosts zero content and is intended for educational and personal use only. Use at your own risk.
|
||||
>
|
||||
> [**Read the Full Disclaimer**](DISCLAIMER.md)
|
||||
|
||||
66
dev/generate_anilist_media_tags.py
Executable file
66
dev/generate_anilist_media_tags.py
Executable file
@@ -0,0 +1,66 @@
|
||||
#!/usr/bin/env -S uv run --script
|
||||
import json
|
||||
from collections import defaultdict
|
||||
from pathlib import Path
|
||||
|
||||
import httpx
|
||||
from viu_media.core.utils.graphql import execute_graphql
|
||||
|
||||
DEV_DIR = Path(__file__).resolve().parent
|
||||
media_tags_type_py = (
|
||||
DEV_DIR.parent / "viu_media" / "libs" / "media_api" / "_media_tags.py"
|
||||
)
|
||||
media_tags_gql = DEV_DIR / "graphql" / "anilist" / "media_tags.gql"
|
||||
generated_tags_json = DEV_DIR / "generated" / "anilist" / "tags.json"
|
||||
|
||||
media_tags_response = execute_graphql(
|
||||
"https://graphql.anilist.co", httpx.Client(), media_tags_gql, {}
|
||||
)
|
||||
media_tags_response.raise_for_status()
|
||||
|
||||
template = """\
|
||||
# DO NOT EDIT THIS FILE !!! ( 。 •̀ ᴖ •́ 。)
|
||||
# ITS AUTOMATICALLY GENERATED BY RUNNING ./dev/generate_anilist_media_tags.py
|
||||
# FROM THE PROJECT ROOT
|
||||
# SO RUN THAT INSTEAD TO UPDATE THE FILE WITH THE LATEST MEDIA TAGS :)
|
||||
|
||||
|
||||
from enum import Enum
|
||||
|
||||
|
||||
class MediaTag(Enum):\
|
||||
"""
|
||||
|
||||
# 4 spaces
|
||||
tab = " "
|
||||
tags = defaultdict(list)
|
||||
for tag in media_tags_response.json()["data"]["MediaTagCollection"]:
|
||||
tags[tag["category"]].append(
|
||||
{
|
||||
"name": tag["name"],
|
||||
"description": tag["description"],
|
||||
"is_adult": tag["isAdult"],
|
||||
}
|
||||
)
|
||||
# save copy of data used to generate the class
|
||||
json.dump(tags, generated_tags_json.open("w", encoding="utf-8"), indent=2)
|
||||
|
||||
for key, value in tags.items():
|
||||
template = f"{template}\n{tab}#\n{tab}# {key.upper()}\n{tab}#\n"
|
||||
for tag in value:
|
||||
name = tag["name"]
|
||||
_tag_name = name.replace("-", "_").replace(" ", "_").upper()
|
||||
if _tag_name.startswith(("0", "1", "2", "3", "4", "5", "6", "7", "8", "9")):
|
||||
_tag_name = f"_{_tag_name}"
|
||||
|
||||
tag_name = ""
|
||||
# sanitize invalid characters for attribute names
|
||||
for char in _tag_name:
|
||||
if char.isidentifier() or char.isdigit():
|
||||
tag_name += char
|
||||
|
||||
desc = tag["description"].replace("\n", "")
|
||||
is_adult = tag["is_adult"]
|
||||
template = f'{template}\n{tab}# {desc} (is_adult: {is_adult})\n{tab}{tag_name} = "{name}"\n'
|
||||
|
||||
media_tags_type_py.write_text(template, "utf-8")
|
||||
File diff suppressed because it is too large
Load Diff
8
dev/graphql/anilist/media_tags.gql
Normal file
8
dev/graphql/anilist/media_tags.gql
Normal file
@@ -0,0 +1,8 @@
|
||||
query {
|
||||
MediaTagCollection {
|
||||
name
|
||||
description
|
||||
category
|
||||
isAdult
|
||||
}
|
||||
}
|
||||
0
dev/make_release
Normal file → Executable file
0
dev/make_release
Normal file → Executable file
8
flake.lock
generated
8
flake.lock
generated
@@ -20,17 +20,17 @@
|
||||
},
|
||||
"nixpkgs": {
|
||||
"locked": {
|
||||
"lastModified": 1753345091,
|
||||
"narHash": "sha256-CdX2Rtvp5I8HGu9swBmYuq+ILwRxpXdJwlpg8jvN4tU=",
|
||||
"lastModified": 1756386758,
|
||||
"narHash": "sha256-1wxxznpW2CKvI9VdniaUnTT2Os6rdRJcRUf65ZK9OtE=",
|
||||
"owner": "nixos",
|
||||
"repo": "nixpkgs",
|
||||
"rev": "3ff0e34b1383648053bba8ed03f201d3466f90c9",
|
||||
"rev": "dfb2f12e899db4876308eba6d93455ab7da304cd",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "nixos",
|
||||
"ref": "nixos-unstable",
|
||||
"repo": "nixpkgs",
|
||||
"rev": "3ff0e34b1383648053bba8ed03f201d3466f90c9",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
|
||||
15
flake.nix
15
flake.nix
@@ -2,8 +2,7 @@
|
||||
description = "Viu Project Flake";
|
||||
|
||||
inputs = {
|
||||
# The nixpkgs unstable latest commit breaks the plyer python package
|
||||
nixpkgs.url = "github:nixos/nixpkgs/3ff0e34b1383648053bba8ed03f201d3466f90c9";
|
||||
nixpkgs.url = "github:nixos/nixpkgs/nixos-unstable";
|
||||
flake-utils.url = "github:numtide/flake-utils";
|
||||
};
|
||||
|
||||
@@ -17,21 +16,21 @@
|
||||
system:
|
||||
let
|
||||
pkgs = nixpkgs.legacyPackages.${system};
|
||||
inherit (pkgs) lib python3Packages;
|
||||
inherit (pkgs) lib python312Packages;
|
||||
|
||||
version = "3.1.0";
|
||||
in
|
||||
{
|
||||
packages.default = python3Packages.buildPythonApplication {
|
||||
packages.default = python312Packages.buildPythonApplication {
|
||||
pname = "viu";
|
||||
inherit version;
|
||||
pyproject = true;
|
||||
|
||||
src = self;
|
||||
|
||||
build-system = with python3Packages; [ hatchling ];
|
||||
build-system = with python312Packages; [ hatchling ];
|
||||
|
||||
dependencies = with python3Packages; [
|
||||
dependencies = with python312Packages; [
|
||||
click
|
||||
inquirerpy
|
||||
requests
|
||||
@@ -69,8 +68,8 @@
|
||||
|
||||
meta = {
|
||||
description = "Your browser anime experience from the terminal";
|
||||
homepage = "https://github.com/Benexl/Viu";
|
||||
changelog = "https://github.com/Benexl/Viu/releases/tag/v${version}";
|
||||
homepage = "https://github.com/viu-media/Viu";
|
||||
changelog = "https://github.com/viu-media/Viu/releases/tag/v${version}";
|
||||
mainProgram = "viu";
|
||||
license = lib.licenses.unlicense;
|
||||
maintainers = with lib.maintainers; [ theobori ];
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
[project]
|
||||
name = "viu-media"
|
||||
version = "3.2.7"
|
||||
version = "3.3.1"
|
||||
description = "A browser anime site experience from the terminal"
|
||||
license = "UNLICENSE"
|
||||
readme = "README.md"
|
||||
requires-python = ">=3.10"
|
||||
requires-python = ">=3.11"
|
||||
dependencies = [
|
||||
"click>=8.1.7",
|
||||
"httpx>=0.28.1",
|
||||
@@ -49,8 +49,8 @@ torrents = [
|
||||
requires = ["hatchling"]
|
||||
build-backend = "hatchling.build"
|
||||
|
||||
[tool.uv]
|
||||
dev-dependencies = [
|
||||
[dependency-groups]
|
||||
dev = [
|
||||
"pre-commit>=4.0.1",
|
||||
"pyinstaller>=6.11.1",
|
||||
"pyright>=1.1.384",
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
{
|
||||
"venvPath": ".",
|
||||
"venv": ".venv",
|
||||
"pythonVersion": "3.10"
|
||||
"pythonVersion": "3.12"
|
||||
}
|
||||
|
||||
2
tox.ini
2
tox.ini
@@ -1,7 +1,7 @@
|
||||
[tox]
|
||||
requires =
|
||||
tox>=4
|
||||
env_list = lint, pyright, py{310,311}
|
||||
env_list = lint, pyright, py{311,312}
|
||||
|
||||
[testenv]
|
||||
description = run unit tests
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import sys
|
||||
|
||||
if sys.version_info < (3, 10):
|
||||
if sys.version_info < (3, 11):
|
||||
raise ImportError(
|
||||
"You are using an unsupported version of Python. Only Python versions 3.10 and above are supported by Viu"
|
||||
) # noqa: F541
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
|
||||
██╗░░░██╗██╗██╗░░░██╗
|
||||
██║░░░██║██║██║░░░██║
|
||||
╚██╗░██╔╝██║██║░░░██║
|
||||
|
||||
@@ -4,7 +4,8 @@
|
||||
"Magia Record: Mahou Shoujo Madoka☆Magica Gaiden (TV)": "Mahou Shoujo Madoka☆Magica",
|
||||
"Dungeon ni Deai o Motomeru no wa Machigatte Iru Darouka": "Dungeon ni Deai wo Motomeru no wa Machigatteiru Darou ka",
|
||||
"Hazurewaku no \"Joutai Ijou Skill\" de Saikyou ni Natta Ore ga Subete wo Juurin suru made": "Hazure Waku no [Joutai Ijou Skill] de Saikyou ni Natta Ore ga Subete wo Juurin Suru made",
|
||||
"Re:Zero kara Hajimeru Isekai Seikatsu Season 3": "Re:Zero kara Hajimeru Isekai Seikatsu 3rd Season"
|
||||
"Re:Zero kara Hajimeru Isekai Seikatsu Season 3": "Re:Zero kara Hajimeru Isekai Seikatsu 3rd Season",
|
||||
"Hanka×Hanka (2011)": "Hunter × Hunter (2011)"
|
||||
},
|
||||
"hianime": {
|
||||
"My Star": "Oshi no Ko"
|
||||
@@ -13,5 +14,12 @@
|
||||
"Azumanga Daiou The Animation": "Azumanga Daioh",
|
||||
"Mairimashita! Iruma-kun 2nd Season": "Mairimashita! Iruma-kun 2",
|
||||
"Mairimashita! Iruma-kun 3rd Season": "Mairimashita! Iruma-kun 3"
|
||||
},
|
||||
"animeunity": {
|
||||
"Kaiju No. 8": "Kaiju No.8",
|
||||
"Naruto Shippuden": "Naruto: Shippuden",
|
||||
"Psycho-Pass: Sinners of the System Case.1 - Crime and Punishment": "PSYCHO-PASS Sinners of the System: Case.1 Crime and Punishment",
|
||||
"Psycho-Pass: Sinners of the System Case.2 - First Guardian": "PSYCHO-PASS Sinners of the System: Case.2 First Guardian",
|
||||
"Psycho-Pass: Sinners of the System Case.3 - On the Other Side of Love and Hate": "PSYCHO-PASS Sinners of the System: Case.3 Beyond the Pale of Vengeance"
|
||||
}
|
||||
}
|
||||
|
||||
202
viu_media/assets/scripts/fzf/_ansi_utils.py
Normal file
202
viu_media/assets/scripts/fzf/_ansi_utils.py
Normal file
@@ -0,0 +1,202 @@
|
||||
"""
|
||||
ANSI utilities for FZF preview scripts.
|
||||
|
||||
Lightweight stdlib-only utilities to replace Rich dependency in preview scripts.
|
||||
Provides RGB color formatting, table rendering, and markdown stripping.
|
||||
"""
|
||||
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import textwrap
|
||||
import unicodedata
|
||||
|
||||
|
||||
def get_terminal_width() -> int:
|
||||
"""
|
||||
Get terminal width, prioritizing FZF preview environment variables.
|
||||
|
||||
Returns:
|
||||
Terminal width in columns
|
||||
"""
|
||||
fzf_cols = os.environ.get("FZF_PREVIEW_COLUMNS")
|
||||
if fzf_cols:
|
||||
return int(fzf_cols)
|
||||
return shutil.get_terminal_size((80, 24)).columns
|
||||
|
||||
|
||||
def display_width(text: str) -> int:
|
||||
"""
|
||||
Calculate the actual display width of text, accounting for wide characters.
|
||||
|
||||
Args:
|
||||
text: Text to measure
|
||||
|
||||
Returns:
|
||||
Display width in terminal columns
|
||||
"""
|
||||
width = 0
|
||||
for char in text:
|
||||
# East Asian Width property: 'F' (Fullwidth) and 'W' (Wide) take 2 columns
|
||||
if unicodedata.east_asian_width(char) in ("F", "W"):
|
||||
width += 2
|
||||
else:
|
||||
width += 1
|
||||
return width
|
||||
|
||||
|
||||
def rgb_color(r: int, g: int, b: int, text: str, bold: bool = False) -> str:
|
||||
"""
|
||||
Format text with RGB color using ANSI escape codes.
|
||||
|
||||
Args:
|
||||
r: Red component (0-255)
|
||||
g: Green component (0-255)
|
||||
b: Blue component (0-255)
|
||||
text: Text to colorize
|
||||
bold: Whether to make text bold
|
||||
|
||||
Returns:
|
||||
ANSI-escaped colored text
|
||||
"""
|
||||
color_code = f"\x1b[38;2;{r};{g};{b}m"
|
||||
bold_code = "\x1b[1m" if bold else ""
|
||||
reset = "\x1b[0m"
|
||||
return f"{color_code}{bold_code}{text}{reset}"
|
||||
|
||||
|
||||
def parse_color(color_csv: str) -> tuple[int, int, int]:
|
||||
"""
|
||||
Parse RGB color from comma-separated string.
|
||||
|
||||
Args:
|
||||
color_csv: Color as 'R,G,B' string
|
||||
|
||||
Returns:
|
||||
Tuple of (r, g, b) integers
|
||||
"""
|
||||
parts = color_csv.split(",")
|
||||
return int(parts[0]), int(parts[1]), int(parts[2])
|
||||
|
||||
|
||||
def print_rule(sep_color: str) -> None:
|
||||
"""
|
||||
Print a horizontal rule line.
|
||||
|
||||
Args:
|
||||
sep_color: Color as 'R,G,B' string
|
||||
"""
|
||||
width = get_terminal_width()
|
||||
r, g, b = parse_color(sep_color)
|
||||
print(rgb_color(r, g, b, "─" * width))
|
||||
|
||||
|
||||
def print_table_row(
|
||||
key: str, value: str, header_color: str, key_width: int, value_width: int
|
||||
) -> None:
|
||||
"""
|
||||
Print a two-column table row with left-aligned key and right-aligned value.
|
||||
|
||||
Args:
|
||||
key: Left column text (header/key)
|
||||
value: Right column text (value)
|
||||
header_color: Color for key as 'R,G,B' string
|
||||
key_width: Width for key column
|
||||
value_width: Width for value column
|
||||
"""
|
||||
r, g, b = parse_color(header_color)
|
||||
key_styled = rgb_color(r, g, b, key, bold=True)
|
||||
|
||||
# Get actual terminal width
|
||||
term_width = get_terminal_width()
|
||||
|
||||
# Calculate display widths accounting for wide characters
|
||||
key_display_width = display_width(key)
|
||||
|
||||
# Calculate actual value width based on terminal and key display width
|
||||
actual_value_width = max(20, term_width - key_display_width - 2)
|
||||
|
||||
# Wrap value if it's too long (use character count, not display width for wrapping)
|
||||
value_lines = textwrap.wrap(str(value), width=actual_value_width) if value else [""]
|
||||
|
||||
if not value_lines:
|
||||
value_lines = [""]
|
||||
|
||||
# Print first line with properly aligned value
|
||||
first_line = value_lines[0]
|
||||
first_line_display_width = display_width(first_line)
|
||||
|
||||
# Use manual spacing to right-align based on display width
|
||||
spacing = term_width - key_display_width - first_line_display_width - 2
|
||||
if spacing > 0:
|
||||
print(f"{key_styled} {' ' * spacing}{first_line}")
|
||||
else:
|
||||
print(f"{key_styled} {first_line}")
|
||||
|
||||
# Print remaining wrapped lines (left-aligned, indented)
|
||||
for line in value_lines[1:]:
|
||||
print(f"{' ' * (key_display_width + 2)}{line}")
|
||||
|
||||
|
||||
def strip_markdown(text: str) -> str:
|
||||
"""
|
||||
Strip markdown formatting from text.
|
||||
|
||||
Removes:
|
||||
- Headers (# ## ###)
|
||||
- Bold (**text** or __text__)
|
||||
- Italic (*text* or _text_)
|
||||
- Links ([text](url))
|
||||
- Code blocks (```code```)
|
||||
- Inline code (`code`)
|
||||
|
||||
Args:
|
||||
text: Markdown-formatted text
|
||||
|
||||
Returns:
|
||||
Plain text with markdown removed
|
||||
"""
|
||||
if not text:
|
||||
return ""
|
||||
|
||||
# Remove code blocks first
|
||||
text = re.sub(r"```[\s\S]*?```", "", text)
|
||||
|
||||
# Remove inline code
|
||||
text = re.sub(r"`([^`]+)`", r"\1", text)
|
||||
|
||||
# Remove headers
|
||||
text = re.sub(r"^#{1,6}\s+", "", text, flags=re.MULTILINE)
|
||||
|
||||
# Remove bold (** or __)
|
||||
text = re.sub(r"\*\*(.+?)\*\*", r"\1", text)
|
||||
text = re.sub(r"__(.+?)__", r"\1", text)
|
||||
|
||||
# Remove italic (* or _)
|
||||
text = re.sub(r"\*(.+?)\*", r"\1", text)
|
||||
text = re.sub(r"_(.+?)_", r"\1", text)
|
||||
|
||||
# Remove links, keep text
|
||||
text = re.sub(r"\[(.+?)\]\(.+?\)", r"\1", text)
|
||||
|
||||
# Remove images
|
||||
text = re.sub(r"!\[.*?\]\(.+?\)", "", text)
|
||||
|
||||
return text.strip()
|
||||
|
||||
|
||||
def wrap_text(text: str, width: int | None = None) -> str:
|
||||
"""
|
||||
Wrap text to terminal width.
|
||||
|
||||
Args:
|
||||
text: Text to wrap
|
||||
width: Width to wrap to (defaults to terminal width)
|
||||
|
||||
Returns:
|
||||
Wrapped text
|
||||
"""
|
||||
if width is None:
|
||||
width = get_terminal_width()
|
||||
|
||||
return textwrap.fill(text, width=width)
|
||||
@@ -1,22 +0,0 @@
|
||||
#!/bin/sh
|
||||
#
|
||||
# Viu Airing Schedule Info Script Template
|
||||
# This script formats and displays airing schedule details in the FZF preview pane.
|
||||
# Python injects the actual data values into the placeholders.
|
||||
|
||||
draw_rule
|
||||
|
||||
print_kv "Anime Title" "{ANIME_TITLE}"
|
||||
|
||||
draw_rule
|
||||
|
||||
print_kv "Total Episodes" "{TOTAL_EPISODES}"
|
||||
print_kv "Upcoming Episodes" "{UPCOMING_EPISODES}"
|
||||
|
||||
draw_rule
|
||||
|
||||
echo "{C_KEY}Next Episodes:{RESET}"
|
||||
echo
|
||||
echo "{SCHEDULE_TABLE}" | fold -s -w "$WIDTH"
|
||||
|
||||
draw_rule
|
||||
@@ -1,75 +0,0 @@
|
||||
#!/bin/sh
|
||||
#
|
||||
# FZF Airing Schedule Preview Script Template
|
||||
#
|
||||
# This script is a template. The placeholders in curly braces, like {NAME}
|
||||
# are dynamically filled by python using .replace()
|
||||
|
||||
WIDTH=${FZF_PREVIEW_COLUMNS:-80} # Set a fallback width of 80
|
||||
IMAGE_RENDERER="{IMAGE_RENDERER}"
|
||||
|
||||
generate_sha256() {
|
||||
local input
|
||||
|
||||
# Check if input is passed as an argument or piped
|
||||
if [ -n "$1" ]; then
|
||||
input="$1"
|
||||
else
|
||||
input=$(cat)
|
||||
fi
|
||||
|
||||
if command -v sha256sum &>/dev/null; then
|
||||
echo -n "$input" | sha256sum | awk '{print $1}'
|
||||
elif command -v shasum &>/dev/null; then
|
||||
echo -n "$input" | shasum -a 256 | awk '{print $1}'
|
||||
elif command -v sha256 &>/dev/null; then
|
||||
echo -n "$input" | sha256 | awk '{print $1}'
|
||||
elif command -v openssl &>/dev/null; then
|
||||
echo -n "$input" | openssl dgst -sha256 | awk '{print $2}'
|
||||
else
|
||||
echo -n "$input" | base64 | tr '/+' '_-' | tr -d '\n'
|
||||
fi
|
||||
}
|
||||
|
||||
|
||||
print_kv() {
|
||||
local key="$1"
|
||||
local value="$2"
|
||||
local key_len=${#key}
|
||||
local value_len=${#value}
|
||||
local multiplier="${3:-1}"
|
||||
|
||||
# Correctly calculate padding by accounting for the key, the ": ", and the value.
|
||||
local padding_len=$((WIDTH - key_len - 2 - value_len * multiplier))
|
||||
|
||||
# If the text is too long to fit, just add a single space for separation.
|
||||
if [ "$padding_len" -lt 1 ]; then
|
||||
padding_len=1
|
||||
value=$(echo $value| fold -s -w "$((WIDTH - key_len - 3))")
|
||||
printf "{C_KEY}%s:{RESET}%*s%s\\n" "$key" "$padding_len" "" " $value"
|
||||
else
|
||||
printf "{C_KEY}%s:{RESET}%*s%s\\n" "$key" "$padding_len" "" " $value"
|
||||
fi
|
||||
}
|
||||
|
||||
|
||||
draw_rule(){
|
||||
ll=2
|
||||
while [ $ll -le $FZF_PREVIEW_COLUMNS ];do
|
||||
echo -n -e "{C_RULE}─{RESET}"
|
||||
((ll++))
|
||||
done
|
||||
echo
|
||||
}
|
||||
|
||||
title={}
|
||||
hash=$(generate_sha256 "$title")
|
||||
|
||||
if [ "{PREVIEW_MODE}" = "full" ] || [ "{PREVIEW_MODE}" = "text" ]; then
|
||||
info_file="{INFO_CACHE_DIR}{PATH_SEP}$hash"
|
||||
if [ -f "$info_file" ]; then
|
||||
source "$info_file"
|
||||
else
|
||||
echo "📅 Loading airing schedule..."
|
||||
fi
|
||||
fi
|
||||
36
viu_media/assets/scripts/fzf/airing_schedule_info.py
Normal file
36
viu_media/assets/scripts/fzf/airing_schedule_info.py
Normal file
@@ -0,0 +1,36 @@
|
||||
import sys
|
||||
from _ansi_utils import (
|
||||
print_rule,
|
||||
print_table_row,
|
||||
strip_markdown,
|
||||
wrap_text,
|
||||
get_terminal_width,
|
||||
)
|
||||
|
||||
HEADER_COLOR = sys.argv[1]
|
||||
SEPARATOR_COLOR = sys.argv[2]
|
||||
|
||||
# Get terminal dimensions
|
||||
term_width = get_terminal_width()
|
||||
|
||||
# Print title centered
|
||||
print("{ANIME_TITLE}".center(term_width))
|
||||
|
||||
rows = [
|
||||
("Total Episodes", "{TOTAL_EPISODES}"),
|
||||
]
|
||||
|
||||
print_rule(SEPARATOR_COLOR)
|
||||
for key, value in rows:
|
||||
print_table_row(key, value, HEADER_COLOR, 15, term_width - 20)
|
||||
|
||||
rows = [
|
||||
("Upcoming Episodes", "{UPCOMING_EPISODES}"),
|
||||
]
|
||||
|
||||
print_rule(SEPARATOR_COLOR)
|
||||
for key, value in rows:
|
||||
print_table_row(key, value, HEADER_COLOR, 15, term_width - 20)
|
||||
|
||||
print_rule(SEPARATOR_COLOR)
|
||||
print(wrap_text(strip_markdown("""{SCHEDULE_TABLE}"""), term_width))
|
||||
@@ -1,41 +0,0 @@
|
||||
#!/bin/sh
|
||||
#
|
||||
# Viu Character Info Script Template
|
||||
# This script formats and displays character details in the FZF preview pane.
|
||||
# Python injects the actual data values into the placeholders.
|
||||
|
||||
draw_rule
|
||||
|
||||
print_kv "Character Name" "{CHARACTER_NAME}"
|
||||
|
||||
if [ -n "{CHARACTER_NATIVE_NAME}" ] && [ "{CHARACTER_NATIVE_NAME}" != "N/A" ]; then
|
||||
print_kv "Native Name" "{CHARACTER_NATIVE_NAME}"
|
||||
fi
|
||||
|
||||
draw_rule
|
||||
|
||||
if [ -n "{CHARACTER_GENDER}" ] && [ "{CHARACTER_GENDER}" != "Unknown" ]; then
|
||||
print_kv "Gender" "{CHARACTER_GENDER}"
|
||||
fi
|
||||
|
||||
if [ -n "{CHARACTER_AGE}" ] && [ "{CHARACTER_AGE}" != "Unknown" ]; then
|
||||
print_kv "Age" "{CHARACTER_AGE}"
|
||||
fi
|
||||
|
||||
if [ -n "{CHARACTER_BLOOD_TYPE}" ] && [ "{CHARACTER_BLOOD_TYPE}" != "N/A" ]; then
|
||||
print_kv "Blood Type" "{CHARACTER_BLOOD_TYPE}"
|
||||
fi
|
||||
|
||||
if [ -n "{CHARACTER_BIRTHDAY}" ] && [ "{CHARACTER_BIRTHDAY}" != "N/A" ]; then
|
||||
print_kv "Birthday" "{CHARACTER_BIRTHDAY}"
|
||||
fi
|
||||
|
||||
if [ -n "{CHARACTER_FAVOURITES}" ] && [ "{CHARACTER_FAVOURITES}" != "0" ]; then
|
||||
print_kv "Favorites" "{CHARACTER_FAVOURITES}"
|
||||
fi
|
||||
|
||||
draw_rule
|
||||
|
||||
echo "{CHARACTER_DESCRIPTION}" | fold -s -w "$WIDTH"
|
||||
|
||||
draw_rule
|
||||
@@ -1,130 +0,0 @@
|
||||
#!/bin/sh
|
||||
#
|
||||
# FZF Character Preview Script Template
|
||||
#
|
||||
# This script is a template. The placeholders in curly braces, like {NAME}
|
||||
# are dynamically filled by python using .replace()
|
||||
|
||||
WIDTH=${FZF_PREVIEW_COLUMNS:-80} # Set a fallback width of 80
|
||||
IMAGE_RENDERER="{IMAGE_RENDERER}"
|
||||
|
||||
generate_sha256() {
|
||||
local input
|
||||
|
||||
# Check if input is passed as an argument or piped
|
||||
if [ -n "$1" ]; then
|
||||
input="$1"
|
||||
else
|
||||
input=$(cat)
|
||||
fi
|
||||
|
||||
if command -v sha256sum &>/dev/null; then
|
||||
echo -n "$input" | sha256sum | awk '{print $1}'
|
||||
elif command -v shasum &>/dev/null; then
|
||||
echo -n "$input" | shasum -a 256 | awk '{print $1}'
|
||||
elif command -v sha256 &>/dev/null; then
|
||||
echo -n "$input" | sha256 | awk '{print $1}'
|
||||
elif command -v openssl &>/dev/null; then
|
||||
echo -n "$input" | openssl dgst -sha256 | awk '{print $2}'
|
||||
else
|
||||
echo -n "$input" | base64 | tr '/+' '_-' | tr -d '\n'
|
||||
fi
|
||||
}
|
||||
|
||||
fzf_preview() {
|
||||
file=$1
|
||||
|
||||
dim=${FZF_PREVIEW_COLUMNS}x${FZF_PREVIEW_LINES}
|
||||
if [ "$dim" = x ]; then
|
||||
dim=$(stty size </dev/tty | awk "{print \$2 \"x\" \$1}")
|
||||
fi
|
||||
if ! [ "$IMAGE_RENDERER" = "icat" ] && [ -z "$KITTY_WINDOW_ID" ] && [ "$((FZF_PREVIEW_TOP + FZF_PREVIEW_LINES))" -eq "$(stty size </dev/tty | awk "{print \$1}")" ]; then
|
||||
dim=${FZF_PREVIEW_COLUMNS}x$((FZF_PREVIEW_LINES - 1))
|
||||
fi
|
||||
|
||||
if [ "$IMAGE_RENDERER" = "icat" ] && [ -z "$GHOSTTY_BIN_DIR" ]; then
|
||||
if command -v kitten >/dev/null 2>&1; then
|
||||
kitten icat --clear --transfer-mode=memory --unicode-placeholder --stdin=no --place="$dim@0x0" "$file" | sed "\$d" | sed "$(printf "\$s/\$/\033[m/")"
|
||||
elif command -v icat >/dev/null 2>&1; then
|
||||
icat --clear --transfer-mode=memory --unicode-placeholder --stdin=no --place="$dim@0x0" "$file" | sed "\$d" | sed "$(printf "\$s/\$/\033[m/")"
|
||||
else
|
||||
kitty icat --clear --transfer-mode=memory --unicode-placeholder --stdin=no --place="$dim@0x0" "$file" | sed "\$d" | sed "$(printf "\$s/\$/\033[m/")"
|
||||
fi
|
||||
|
||||
elif [ -n "$GHOSTTY_BIN_DIR" ]; then
|
||||
if command -v kitten >/dev/null 2>&1; then
|
||||
kitten icat --clear --transfer-mode=memory --unicode-placeholder --stdin=no --place="$dim@0x0" "$file" | sed "\$d" | sed "$(printf "\$s/\$/\033[m/")"
|
||||
elif command -v icat >/dev/null 2>&1; then
|
||||
icat --clear --transfer-mode=memory --unicode-placeholder --stdin=no --place="$dim@0x0" "$file" | sed "\$d" | sed "$(printf "\$s/\$/\033[m/")"
|
||||
else
|
||||
chafa -s "$dim" "$file"
|
||||
fi
|
||||
elif command -v chafa >/dev/null 2>&1; then
|
||||
case "$PLATFORM" in
|
||||
android) chafa -s "$dim" "$file" ;;
|
||||
windows) chafa -f sixel -s "$dim" "$file" ;;
|
||||
*) chafa -s "$dim" "$file" ;;
|
||||
esac
|
||||
echo
|
||||
|
||||
elif command -v imgcat >/dev/null; then
|
||||
imgcat -W "${dim%%x*}" -H "${dim##*x}" "$file"
|
||||
|
||||
else
|
||||
echo please install a terminal image viewer
|
||||
echo either icat for kitty terminal and wezterm or imgcat or chafa
|
||||
fi
|
||||
}
|
||||
print_kv() {
|
||||
local key="$1"
|
||||
local value="$2"
|
||||
local key_len=${#key}
|
||||
local value_len=${#value}
|
||||
local multiplier="${3:-1}"
|
||||
|
||||
# Correctly calculate padding by accounting for the key, the ": ", and the value.
|
||||
local padding_len=$((WIDTH - key_len - 2 - value_len * multiplier))
|
||||
|
||||
# If the text is too long to fit, just add a single space for separation.
|
||||
if [ "$padding_len" -lt 1 ]; then
|
||||
padding_len=1
|
||||
value=$(echo $value| fold -s -w "$((WIDTH - key_len - 3))")
|
||||
printf "{C_KEY}%s:{RESET}%*s%s\\n" "$key" "$padding_len" "" " $value"
|
||||
else
|
||||
printf "{C_KEY}%s:{RESET}%*s%s\\n" "$key" "$padding_len" "" " $value"
|
||||
fi
|
||||
}
|
||||
|
||||
|
||||
draw_rule(){
|
||||
ll=2
|
||||
while [ $ll -le $FZF_PREVIEW_COLUMNS ];do
|
||||
echo -n -e "{C_RULE}─{RESET}"
|
||||
((ll++))
|
||||
done
|
||||
echo
|
||||
}
|
||||
|
||||
title={}
|
||||
hash=$(generate_sha256 "$title")
|
||||
|
||||
|
||||
# FIXME: Disabled since they cover the text perhaps its aspect ratio related or image format not sure
|
||||
# if [ "{PREVIEW_MODE}" = "full" ] || [ "{PREVIEW_MODE}" = "image" ]; then
|
||||
# image_file="{IMAGE_CACHE_DIR}{PATH_SEP}$hash.png"
|
||||
# if [ -f "$image_file" ]; then
|
||||
# fzf_preview "$image_file"
|
||||
# echo # Add a newline for spacing
|
||||
# fi
|
||||
# fi
|
||||
|
||||
if [ "{PREVIEW_MODE}" = "full" ] || [ "{PREVIEW_MODE}" = "text" ]; then
|
||||
info_file="{INFO_CACHE_DIR}{PATH_SEP}$hash"
|
||||
if [ -f "$info_file" ]; then
|
||||
source "$info_file"
|
||||
else
|
||||
echo "👤 Loading character details..."
|
||||
fi
|
||||
fi
|
||||
|
||||
|
||||
47
viu_media/assets/scripts/fzf/character_info.py
Normal file
47
viu_media/assets/scripts/fzf/character_info.py
Normal file
@@ -0,0 +1,47 @@
|
||||
import sys
|
||||
from _ansi_utils import (
|
||||
print_rule,
|
||||
print_table_row,
|
||||
strip_markdown,
|
||||
wrap_text,
|
||||
get_terminal_width,
|
||||
)
|
||||
|
||||
HEADER_COLOR = sys.argv[1]
|
||||
SEPARATOR_COLOR = sys.argv[2]
|
||||
|
||||
# Get terminal dimensions
|
||||
term_width = get_terminal_width()
|
||||
|
||||
# Print title centered
|
||||
print("{CHARACTER_NAME}".center(term_width))
|
||||
|
||||
rows = [
|
||||
("Native Name", "{CHARACTER_NATIVE_NAME}"),
|
||||
("Gender", "{CHARACTER_GENDER}"),
|
||||
]
|
||||
|
||||
print_rule(SEPARATOR_COLOR)
|
||||
for key, value in rows:
|
||||
print_table_row(key, value, HEADER_COLOR, 15, term_width - 20)
|
||||
|
||||
rows = [
|
||||
("Age", "{CHARACTER_AGE}"),
|
||||
("Blood Type", "{CHARACTER_BLOOD_TYPE}"),
|
||||
]
|
||||
|
||||
print_rule(SEPARATOR_COLOR)
|
||||
for key, value in rows:
|
||||
print_table_row(key, value, HEADER_COLOR, 15, term_width - 20)
|
||||
|
||||
rows = [
|
||||
("Birthday", "{CHARACTER_BIRTHDAY}"),
|
||||
("Favourites", "{CHARACTER_FAVOURITES}"),
|
||||
]
|
||||
|
||||
print_rule(SEPARATOR_COLOR)
|
||||
for key, value in rows:
|
||||
print_table_row(key, value, HEADER_COLOR, 15, term_width - 20)
|
||||
|
||||
print_rule(SEPARATOR_COLOR)
|
||||
print(wrap_text(strip_markdown("""{CHARACTER_DESCRIPTION}"""), term_width))
|
||||
@@ -1,315 +0,0 @@
|
||||
#!/bin/bash
|
||||
#
|
||||
# FZF Dynamic Preview Script Template
|
||||
#
|
||||
# This script handles previews for dynamic search results by parsing the JSON
|
||||
# search results file and extracting info for the selected item.
|
||||
# The placeholders in curly braces are dynamically filled by Python using .replace()
|
||||
|
||||
WIDTH=${FZF_PREVIEW_COLUMNS:-80}
|
||||
IMAGE_RENDERER="{IMAGE_RENDERER}"
|
||||
SEARCH_RESULTS_FILE="{SEARCH_RESULTS_FILE}"
|
||||
IMAGE_CACHE_PATH="{IMAGE_CACHE_PATH}"
|
||||
INFO_CACHE_PATH="{INFO_CACHE_PATH}"
|
||||
PATH_SEP="{PATH_SEP}"
|
||||
|
||||
# Color codes injected by Python
|
||||
C_TITLE="{C_TITLE}"
|
||||
C_KEY="{C_KEY}"
|
||||
C_VALUE="{C_VALUE}"
|
||||
C_RULE="{C_RULE}"
|
||||
RESET="{RESET}"
|
||||
|
||||
# Selected item from fzf
|
||||
SELECTED_ITEM={}
|
||||
|
||||
generate_sha256() {
|
||||
local input="$1"
|
||||
if command -v sha256sum &>/dev/null; then
|
||||
echo -n "$input" | sha256sum | awk '{print $1}'
|
||||
elif command -v shasum &>/dev/null; then
|
||||
echo -n "$input" | shasum -a 256 | awk '{print $1}'
|
||||
elif command -v sha256 &>/dev/null; then
|
||||
echo -n "$input" | sha256 | awk '{print $1}'
|
||||
elif command -v openssl &>/dev/null; then
|
||||
echo -n "$input" | openssl dgst -sha256 | awk '{print $2}'
|
||||
else
|
||||
echo -n "$input" | base64 | tr '/+' '_-' | tr -d '\n'
|
||||
fi
|
||||
}
|
||||
|
||||
fzf_preview() {
|
||||
file=$1
|
||||
dim=${FZF_PREVIEW_COLUMNS}x${FZF_PREVIEW_LINES}
|
||||
if [ "$dim" = x ]; then
|
||||
dim=$(stty size </dev/tty | awk "{print \$2 \"x\" \$1}")
|
||||
fi
|
||||
if ! [ "$IMAGE_RENDERER" = "icat" ] && [ -z "$KITTY_WINDOW_ID" ] && [ "$((FZF_PREVIEW_TOP + FZF_PREVIEW_LINES))" -eq "$(stty size </dev/tty | awk "{print \$1}")" ]; then
|
||||
dim=${FZF_PREVIEW_COLUMNS}x$((FZF_PREVIEW_LINES - 1))
|
||||
fi
|
||||
|
||||
if [ "$IMAGE_RENDERER" = "icat" ] && [ -z "$GHOSTTY_BIN_DIR" ]; then
|
||||
if command -v kitten >/dev/null 2>&1; then
|
||||
kitten icat --clear --transfer-mode=memory --unicode-placeholder --stdin=no --place="$dim@0x0" "$file" | sed "\$d" | sed "$(printf "\$s/\$/\033[m/")"
|
||||
elif command -v icat >/dev/null 2>&1; then
|
||||
icat --clear --transfer-mode=memory --unicode-placeholder --stdin=no --place="$dim@0x0" "$file" | sed "\$d" | sed "$(printf "\$s/\$/\033[m/")"
|
||||
else
|
||||
kitty icat --clear --transfer-mode=memory --unicode-placeholder --stdin=no --place="$dim@0x0" "$file" | sed "\$d" | sed "$(printf "\$s/\$/\033[m/")"
|
||||
fi
|
||||
elif [ -n "$GHOSTTY_BIN_DIR" ]; then
|
||||
if command -v kitten >/dev/null 2>&1; then
|
||||
kitten icat --clear --transfer-mode=memory --unicode-placeholder --stdin=no --place="$dim@0x0" "$file" | sed "\$d" | sed "$(printf "\$s/\$/\033[m/")"
|
||||
elif command -v icat >/dev/null 2>&1; then
|
||||
icat --clear --transfer-mode=memory --unicode-placeholder --stdin=no --place="$dim@0x0" "$file" | sed "\$d" | sed "$(printf "\$s/\$/\033[m/")"
|
||||
else
|
||||
chafa -s "$dim" "$file"
|
||||
fi
|
||||
elif command -v chafa >/dev/null 2>&1; then
|
||||
case "$PLATFORM" in
|
||||
android) chafa -s "$dim" "$file" ;;
|
||||
windows) chafa -f sixel -s "$dim" "$file" ;;
|
||||
*) chafa -s "$dim" "$file" ;;
|
||||
esac
|
||||
echo
|
||||
elif command -v imgcat >/dev/null; then
|
||||
imgcat -W "${dim%%x*}" -H "${dim##*x}" "$file"
|
||||
else
|
||||
echo please install a terminal image viewer
|
||||
echo either icat for kitty terminal and wezterm or imgcat or chafa
|
||||
fi
|
||||
}
|
||||
|
||||
print_kv() {
|
||||
local key="$1"
|
||||
local value="$2"
|
||||
local key_len=${#key}
|
||||
local value_len=${#value}
|
||||
local multiplier="${3:-1}"
|
||||
|
||||
local padding_len=$((WIDTH - key_len - 2 - value_len * multiplier))
|
||||
|
||||
if [ "$padding_len" -lt 1 ]; then
|
||||
padding_len=1
|
||||
value=$(echo $value| fold -s -w "$((WIDTH - key_len - 3))")
|
||||
printf "{C_KEY}%s:{RESET}%*s%s\\n" "$key" "$padding_len" "" " $value"
|
||||
else
|
||||
printf "{C_KEY}%s:{RESET}%*s%s\\n" "$key" "$padding_len" "" " $value"
|
||||
fi
|
||||
}
|
||||
|
||||
draw_rule() {
|
||||
ll=2
|
||||
while [ $ll -le $FZF_PREVIEW_COLUMNS ];do
|
||||
echo -n -e "{C_RULE}─{RESET}"
|
||||
((ll++))
|
||||
done
|
||||
echo
|
||||
}
|
||||
|
||||
clean_html() {
|
||||
echo "$1" | sed 's/<[^>]*>//g' | sed 's/</</g' | sed 's/>/>/g' | sed 's/&/\&/g' | sed 's/"/"/g' | sed "s/'/'/g"
|
||||
}
|
||||
|
||||
format_date() {
|
||||
local date_obj="$1"
|
||||
if [ "$date_obj" = "null" ] || [ -z "$date_obj" ]; then
|
||||
echo "N/A"
|
||||
return
|
||||
fi
|
||||
|
||||
# Extract year, month, day from the date object
|
||||
if command -v jq >/dev/null 2>&1; then
|
||||
year=$(echo "$date_obj" | jq -r '.year // "N/A"' 2>/dev/null || echo "N/A")
|
||||
month=$(echo "$date_obj" | jq -r '.month // ""' 2>/dev/null || echo "")
|
||||
day=$(echo "$date_obj" | jq -r '.day // ""' 2>/dev/null || echo "")
|
||||
else
|
||||
year=$(echo "$date_obj" | python3 -c "import json, sys; data=json.load(sys.stdin); print(data.get('year', 'N/A'))" 2>/dev/null || echo "N/A")
|
||||
month=$(echo "$date_obj" | python3 -c "import json, sys; data=json.load(sys.stdin); print(data.get('month', ''))" 2>/dev/null || echo "")
|
||||
day=$(echo "$date_obj" | python3 -c "import json, sys; data=json.load(sys.stdin); print(data.get('day', ''))" 2>/dev/null || echo "")
|
||||
fi
|
||||
|
||||
if [ "$year" = "N/A" ] || [ "$year" = "null" ]; then
|
||||
echo "N/A"
|
||||
elif [ -n "$month" ] && [ "$month" != "null" ] && [ -n "$day" ] && [ "$day" != "null" ]; then
|
||||
echo "$day/$month/$year"
|
||||
elif [ -n "$month" ] && [ "$month" != "null" ]; then
|
||||
echo "$month/$year"
|
||||
else
|
||||
echo "$year"
|
||||
fi
|
||||
}
|
||||
|
||||
# If no selection or search results file doesn't exist, show placeholder
|
||||
if [ -z "$SELECTED_ITEM" ] || [ ! -f "$SEARCH_RESULTS_FILE" ]; then
|
||||
echo "${C_TITLE}Dynamic Search Preview${RESET}"
|
||||
draw_rule
|
||||
echo "Type to search for anime..."
|
||||
echo "Results will appear here as you type."
|
||||
echo
|
||||
echo "DEBUG:"
|
||||
echo "SELECTED_ITEM='$SELECTED_ITEM'"
|
||||
echo "SEARCH_RESULTS_FILE='$SEARCH_RESULTS_FILE'"
|
||||
if [ -f "$SEARCH_RESULTS_FILE" ]; then
|
||||
echo "Search results file exists"
|
||||
else
|
||||
echo "Search results file missing"
|
||||
fi
|
||||
exit 0
|
||||
fi
|
||||
# Parse the search results JSON and find the matching item
|
||||
if command -v jq >/dev/null 2>&1; then
|
||||
MEDIA_DATA=$(cat "$SEARCH_RESULTS_FILE" | jq --arg anime_title "$SELECTED_ITEM" '
|
||||
.data.Page.media[]? |
|
||||
select((.title.english // .title.romaji // .title.native // "Unknown") == $anime_title )
|
||||
' )
|
||||
else
|
||||
# Fallback to Python for JSON parsing
|
||||
MEDIA_DATA=$(cat "$SEARCH_RESULTS_FILE" | python3 -c "
|
||||
import json
|
||||
import sys
|
||||
|
||||
try:
|
||||
data = json.load(sys.stdin)
|
||||
selected_item = '''$SELECTED_ITEM'''
|
||||
|
||||
if 'data' not in data or 'Page' not in data['data'] or 'media' not in data['data']['Page']:
|
||||
sys.exit(1)
|
||||
|
||||
media_list = data['data']['Page']['media']
|
||||
|
||||
for media in media_list:
|
||||
title = media.get('title', {})
|
||||
english_title = title.get('english') or title.get('romaji') or title.get('native', 'Unknown')
|
||||
year = media.get('startDate', {}).get('year', 'Unknown') if media.get('startDate') else 'Unknown'
|
||||
status = media.get('status', 'Unknown')
|
||||
genres = ', '.join(media.get('genres', [])[:3]) or 'Unknown'
|
||||
display_format = f'{english_title} ({year}) [{status}] - {genres}'
|
||||
# Debug output for matching
|
||||
print(f"DEBUG: selected_item='{selected_item.strip()}' display_format='{display_format.strip()}'", file=sys.stderr)
|
||||
if selected_item.strip() == display_format.strip():
|
||||
json.dump(media, sys.stdout, indent=2)
|
||||
sys.exit(0)
|
||||
print(f"DEBUG: No match found for selected_item='{selected_item.strip()}'", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
except Exception as e:
|
||||
print(f'Error: {e}', file=sys.stderr)
|
||||
sys.exit(1)
|
||||
" 2>/dev/null)
|
||||
fi
|
||||
|
||||
# If we couldn't find the media data, show error
|
||||
if [ $? -ne 0 ] || [ -z "$MEDIA_DATA" ]; then
|
||||
echo "${C_TITLE}Preview Error${RESET}"
|
||||
draw_rule
|
||||
echo "Could not load preview data for:"
|
||||
echo "$SELECTED_ITEM"
|
||||
echo
|
||||
echo "DEBUG INFO:"
|
||||
echo "Search results file: $SEARCH_RESULTS_FILE"
|
||||
if [ -f "$SEARCH_RESULTS_FILE" ]; then
|
||||
echo "File exists, size: $(wc -c < "$SEARCH_RESULTS_FILE") bytes"
|
||||
echo "First few lines of search results:"
|
||||
head -3 "$SEARCH_RESULTS_FILE" 2>/dev/null || echo "Cannot read file"
|
||||
else
|
||||
echo "Search results file does not exist"
|
||||
fi
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Extract information from the media data
|
||||
if command -v jq >/dev/null 2>&1; then
|
||||
# Use jq for faster extraction
|
||||
TITLE=$(echo "$MEDIA_DATA" | jq -r '.title.english // .title.romaji // .title.native // "Unknown"' 2>/dev/null || echo "Unknown")
|
||||
STATUS=$(echo "$MEDIA_DATA" | jq -r '.status // "Unknown"' 2>/dev/null || echo "Unknown")
|
||||
FORMAT=$(echo "$MEDIA_DATA" | jq -r '.format // "Unknown"' 2>/dev/null || echo "Unknown")
|
||||
EPISODES=$(echo "$MEDIA_DATA" | jq -r '.episodes // "Unknown"' 2>/dev/null || echo "Unknown")
|
||||
DURATION=$(echo "$MEDIA_DATA" | jq -r 'if .duration then "\(.duration) min" else "Unknown" end' 2>/dev/null || echo "Unknown")
|
||||
SCORE=$(echo "$MEDIA_DATA" | jq -r 'if .averageScore then "\(.averageScore)/100" else "N/A" end' 2>/dev/null || echo "N/A")
|
||||
FAVOURITES=$(echo "$MEDIA_DATA" | jq -r '.favourites // 0' 2>/dev/null | sed ':a;s/\B[0-9]\{3\}\>/,&/;ta' || echo "0")
|
||||
POPULARITY=$(echo "$MEDIA_DATA" | jq -r '.popularity // 0' 2>/dev/null | sed ':a;s/\B[0-9]\{3\}\>/,&/;ta' || echo "0")
|
||||
GENRES=$(echo "$MEDIA_DATA" | jq -r '(.genres[:5] // []) | join(", ") | if . == "" then "Unknown" else . end' 2>/dev/null || echo "Unknown")
|
||||
DESCRIPTION=$(echo "$MEDIA_DATA" | jq -r '.description // "No description available."' 2>/dev/null || echo "No description available.")
|
||||
|
||||
# Get start and end dates as JSON objects
|
||||
START_DATE_OBJ=$(echo "$MEDIA_DATA" | jq -c '.startDate' 2>/dev/null || echo "null")
|
||||
END_DATE_OBJ=$(echo "$MEDIA_DATA" | jq -c '.endDate' 2>/dev/null || echo "null")
|
||||
|
||||
# Get cover image URL
|
||||
COVER_IMAGE=$(echo "$MEDIA_DATA" | jq -r '.coverImage.large // ""' 2>/dev/null || echo "")
|
||||
else
|
||||
# Fallback to Python for extraction
|
||||
TITLE=$(echo "$MEDIA_DATA" | python3 -c "import json, sys; data=json.load(sys.stdin); title=data.get('title',{}); print(title.get('english') or title.get('romaji') or title.get('native', 'Unknown'))" 2>/dev/null || echo "Unknown")
|
||||
STATUS=$(echo "$MEDIA_DATA" | python3 -c "import json, sys; data=json.load(sys.stdin); print(data.get('status', 'Unknown'))" 2>/dev/null || echo "Unknown")
|
||||
FORMAT=$(echo "$MEDIA_DATA" | python3 -c "import json, sys; data=json.load(sys.stdin); print(data.get('format', 'Unknown'))" 2>/dev/null || echo "Unknown")
|
||||
EPISODES=$(echo "$MEDIA_DATA" | python3 -c "import json, sys; data=json.load(sys.stdin); print(data.get('episodes', 'Unknown'))" 2>/dev/null || echo "Unknown")
|
||||
DURATION=$(echo "$MEDIA_DATA" | python3 -c "import json, sys; data=json.load(sys.stdin); duration=data.get('duration'); print(f'{duration} min' if duration else 'Unknown')" 2>/dev/null || echo "Unknown")
|
||||
SCORE=$(echo "$MEDIA_DATA" | python3 -c "import json, sys; data=json.load(sys.stdin); score=data.get('averageScore'); print(f'{score}/100' if score else 'N/A')" 2>/dev/null || echo "N/A")
|
||||
FAVOURITES=$(echo "$MEDIA_DATA" | python3 -c "import json, sys; data=json.load(sys.stdin); print(f\"{data.get('favourites', 0):,}\")" 2>/dev/null || echo "0")
|
||||
POPULARITY=$(echo "$MEDIA_DATA" | python3 -c "import json, sys; data=json.load(sys.stdin); print(f\"{data.get('popularity', 0):,}\")" 2>/dev/null || echo "0")
|
||||
GENRES=$(echo "$MEDIA_DATA" | python3 -c "import json, sys; data=json.load(sys.stdin); print(', '.join(data.get('genres', [])[:5]))" 2>/dev/null || echo "Unknown")
|
||||
DESCRIPTION=$(echo "$MEDIA_DATA" | python3 -c "import json, sys; data=json.load(sys.stdin); print(data.get('description', 'No description available.'))" 2>/dev/null || echo "No description available.")
|
||||
|
||||
# Get start and end dates
|
||||
START_DATE_OBJ=$(echo "$MEDIA_DATA" | python3 -c "import json, sys; data=json.load(sys.stdin); json.dump(data.get('startDate'), sys.stdout)" 2>/dev/null || echo "null")
|
||||
END_DATE_OBJ=$(echo "$MEDIA_DATA" | python3 -c "import json, sys; data=json.load(sys.stdin); json.dump(data.get('endDate'), sys.stdout)" 2>/dev/null || echo "null")
|
||||
|
||||
# Get cover image URL
|
||||
COVER_IMAGE=$(echo "$MEDIA_DATA" | python3 -c "import json, sys; data=json.load(sys.stdin); cover=data.get('coverImage',{}); print(cover.get('large', ''))" 2>/dev/null || echo "")
|
||||
fi
|
||||
|
||||
# Format the dates
|
||||
START_DATE=$(format_date "$START_DATE_OBJ")
|
||||
END_DATE=$(format_date "$END_DATE_OBJ")
|
||||
|
||||
# Generate cache hash for this item (using selected item like regular preview)
|
||||
CACHE_HASH=$(generate_sha256 "$SELECTED_ITEM")
|
||||
|
||||
# Try to show image if available
|
||||
if [ "{PREVIEW_MODE}" = "full" ] || [ "{PREVIEW_MODE}" = "image" ]; then
|
||||
image_file="{IMAGE_CACHE_PATH}{PATH_SEP}${CACHE_HASH}.png"
|
||||
|
||||
# If image not cached and we have a URL, try to download it quickly
|
||||
if [ ! -f "$image_file" ] && [ -n "$COVER_IMAGE" ]; then
|
||||
if command -v curl >/dev/null 2>&1; then
|
||||
# Quick download with timeout
|
||||
curl -s -m 3 -L "$COVER_IMAGE" -o "$image_file" 2>/dev/null || rm -f "$image_file" 2>/dev/null
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ -f "$image_file" ]; then
|
||||
fzf_preview "$image_file"
|
||||
else
|
||||
echo "🖼️ Loading image..."
|
||||
fi
|
||||
echo
|
||||
fi
|
||||
|
||||
# Display text info if configured
|
||||
if [ "{PREVIEW_MODE}" = "full" ] || [ "{PREVIEW_MODE}" = "text" ]; then
|
||||
draw_rule
|
||||
print_kv "Title" "$TITLE"
|
||||
draw_rule
|
||||
|
||||
print_kv "Score" "$SCORE"
|
||||
print_kv "Favourites" "$FAVOURITES"
|
||||
print_kv "Popularity" "$POPULARITY"
|
||||
print_kv "Status" "$STATUS"
|
||||
|
||||
draw_rule
|
||||
|
||||
print_kv "Episodes" "$EPISODES"
|
||||
print_kv "Duration" "$DURATION"
|
||||
print_kv "Format" "$FORMAT"
|
||||
|
||||
draw_rule
|
||||
|
||||
print_kv "Genres" "$GENRES"
|
||||
print_kv "Start Date" "$START_DATE"
|
||||
print_kv "End Date" "$END_DATE"
|
||||
|
||||
draw_rule
|
||||
|
||||
# Clean and display description
|
||||
CLEAN_DESCRIPTION=$(clean_html "$DESCRIPTION")
|
||||
echo "$CLEAN_DESCRIPTION" | fold -s -w "$WIDTH"
|
||||
fi
|
||||
434
viu_media/assets/scripts/fzf/dynamic_preview.py
Executable file
434
viu_media/assets/scripts/fzf/dynamic_preview.py
Executable file
@@ -0,0 +1,434 @@
|
||||
#!/usr/bin/env python3
|
||||
#
|
||||
# FZF Dynamic Preview Script for Search Results
|
||||
#
|
||||
# This script handles previews for dynamic search by reading from the cached
|
||||
# search results JSON and generating preview content on-the-fly.
|
||||
# Template variables are injected by Python using .replace()
|
||||
|
||||
import json
|
||||
import os
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
from hashlib import sha256
|
||||
from pathlib import Path
|
||||
|
||||
# Import the utility functions
|
||||
from _ansi_utils import (
|
||||
get_terminal_width,
|
||||
print_rule,
|
||||
print_table_row,
|
||||
strip_markdown,
|
||||
wrap_text,
|
||||
)
|
||||
|
||||
|
||||
# --- Template Variables (Injected by Python) ---
|
||||
SEARCH_RESULTS_FILE = Path("{SEARCH_RESULTS_FILE}")
|
||||
IMAGE_CACHE_DIR = Path("{IMAGE_CACHE_DIR}")
|
||||
PREVIEW_MODE = "{PREVIEW_MODE}"
|
||||
IMAGE_RENDERER = "{IMAGE_RENDERER}"
|
||||
HEADER_COLOR = "{HEADER_COLOR}"
|
||||
SEPARATOR_COLOR = "{SEPARATOR_COLOR}"
|
||||
SCALE_UP = "{SCALE_UP}" == "True"
|
||||
|
||||
# --- Arguments ---
|
||||
# sys.argv[1] is the selected anime title from fzf
|
||||
SELECTED_TITLE = sys.argv[1] if len(sys.argv) > 1 else ""
|
||||
|
||||
|
||||
def format_number(num):
|
||||
"""Format number with thousand separators."""
|
||||
if num is None:
|
||||
return "N/A"
|
||||
return f"{num:,}"
|
||||
|
||||
|
||||
def format_date(date_obj):
|
||||
"""Format date object to string."""
|
||||
if not date_obj or date_obj == "null":
|
||||
return "N/A"
|
||||
|
||||
year = date_obj.get("year")
|
||||
month = date_obj.get("month")
|
||||
day = date_obj.get("day")
|
||||
|
||||
if not year:
|
||||
return "N/A"
|
||||
if month and day:
|
||||
return f"{day}/{month}/{year}"
|
||||
if month:
|
||||
return f"{month}/{year}"
|
||||
return str(year)
|
||||
|
||||
|
||||
def get_media_from_results(title):
|
||||
"""Find media item in search results by title."""
|
||||
if not SEARCH_RESULTS_FILE.exists():
|
||||
return None
|
||||
|
||||
try:
|
||||
with open(SEARCH_RESULTS_FILE, "r", encoding="utf-8") as f:
|
||||
data = json.load(f)
|
||||
|
||||
media_list = data.get("data", {}).get("Page", {}).get("media", [])
|
||||
|
||||
for media in media_list:
|
||||
title_obj = media.get("title", {})
|
||||
eng = title_obj.get("english")
|
||||
rom = title_obj.get("romaji")
|
||||
nat = title_obj.get("native")
|
||||
|
||||
if title in (eng, rom, nat):
|
||||
return media
|
||||
|
||||
return None
|
||||
except Exception as e:
|
||||
print(f"Error reading search results: {e}", file=sys.stderr)
|
||||
return None
|
||||
|
||||
|
||||
def download_image(url: str, output_path: Path) -> bool:
|
||||
"""Download image from URL and save to file."""
|
||||
try:
|
||||
# Try using urllib (stdlib)
|
||||
from urllib import request
|
||||
|
||||
req = request.Request(url, headers={"User-Agent": "viu/1.0"})
|
||||
with request.urlopen(req, timeout=5) as response:
|
||||
data = response.read()
|
||||
output_path.write_bytes(data)
|
||||
return True
|
||||
except Exception:
|
||||
# Silently fail - preview will just not show image
|
||||
return False
|
||||
|
||||
|
||||
def which(cmd):
|
||||
"""Check if command exists."""
|
||||
return shutil.which(cmd)
|
||||
|
||||
|
||||
def get_terminal_dimensions():
|
||||
"""Get terminal dimensions from FZF environment."""
|
||||
fzf_cols = os.environ.get("FZF_PREVIEW_COLUMNS")
|
||||
fzf_lines = os.environ.get("FZF_PREVIEW_LINES")
|
||||
|
||||
if fzf_cols and fzf_lines:
|
||||
return int(fzf_cols), int(fzf_lines)
|
||||
|
||||
try:
|
||||
rows, cols = (
|
||||
subprocess.check_output(
|
||||
["stty", "size"], text=True, stderr=subprocess.DEVNULL
|
||||
)
|
||||
.strip()
|
||||
.split()
|
||||
)
|
||||
return int(cols), int(rows)
|
||||
except Exception:
|
||||
return 80, 24
|
||||
|
||||
|
||||
def render_kitty(file_path, width, height, scale_up):
|
||||
"""Render using the Kitty Graphics Protocol (kitten/icat)."""
|
||||
cmd = []
|
||||
if which("kitten"):
|
||||
cmd = ["kitten", "icat"]
|
||||
elif which("icat"):
|
||||
cmd = ["icat"]
|
||||
elif which("kitty"):
|
||||
cmd = ["kitty", "+kitten", "icat"]
|
||||
|
||||
if not cmd:
|
||||
return False
|
||||
|
||||
args = [
|
||||
"--clear",
|
||||
"--transfer-mode=memory",
|
||||
"--unicode-placeholder",
|
||||
"--stdin=no",
|
||||
f"--place={width}x{height}@0x0",
|
||||
]
|
||||
|
||||
if scale_up:
|
||||
args.append("--scale-up")
|
||||
|
||||
args.append(file_path)
|
||||
|
||||
subprocess.run(cmd + args, stdout=sys.stdout, stderr=sys.stderr)
|
||||
return True
|
||||
|
||||
|
||||
def render_sixel(file_path, width, height):
|
||||
"""Render using Sixel."""
|
||||
if which("chafa"):
|
||||
subprocess.run(
|
||||
["chafa", "-f", "sixel", "-s", f"{width}x{height}", file_path],
|
||||
stdout=sys.stdout,
|
||||
stderr=sys.stderr,
|
||||
)
|
||||
return True
|
||||
|
||||
if which("img2sixel"):
|
||||
pixel_width = width * 10
|
||||
pixel_height = height * 20
|
||||
subprocess.run(
|
||||
[
|
||||
"img2sixel",
|
||||
f"--width={pixel_width}",
|
||||
f"--height={pixel_height}",
|
||||
file_path,
|
||||
],
|
||||
stdout=sys.stdout,
|
||||
stderr=sys.stderr,
|
||||
)
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def render_iterm(file_path, width, height):
|
||||
"""Render using iTerm2 Inline Image Protocol."""
|
||||
if which("imgcat"):
|
||||
subprocess.run(
|
||||
["imgcat", "-W", str(width), "-H", str(height), file_path],
|
||||
stdout=sys.stdout,
|
||||
stderr=sys.stderr,
|
||||
)
|
||||
return True
|
||||
|
||||
if which("chafa"):
|
||||
subprocess.run(
|
||||
["chafa", "-f", "iterm", "-s", f"{width}x{height}", file_path],
|
||||
stdout=sys.stdout,
|
||||
stderr=sys.stderr,
|
||||
)
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def render_timg(file_path, width, height):
|
||||
"""Render using timg."""
|
||||
if which("timg"):
|
||||
subprocess.run(
|
||||
["timg", f"-g{width}x{height}", "--upscale", file_path],
|
||||
stdout=sys.stdout,
|
||||
stderr=sys.stderr,
|
||||
)
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def render_chafa_auto(file_path, width, height):
|
||||
"""Render using Chafa in auto mode."""
|
||||
if which("chafa"):
|
||||
subprocess.run(
|
||||
["chafa", "-s", f"{width}x{height}", file_path],
|
||||
stdout=sys.stdout,
|
||||
stderr=sys.stderr,
|
||||
)
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def fzf_image_preview(file_path: str):
|
||||
"""Main dispatch function to choose the best renderer."""
|
||||
cols, lines = get_terminal_dimensions()
|
||||
width = cols
|
||||
height = lines
|
||||
|
||||
# Check explicit configuration
|
||||
if IMAGE_RENDERER == "icat" or IMAGE_RENDERER == "system-kitty":
|
||||
if render_kitty(file_path, width, height, SCALE_UP):
|
||||
return
|
||||
|
||||
elif IMAGE_RENDERER == "sixel" or IMAGE_RENDERER == "system-sixels":
|
||||
if render_sixel(file_path, width, height):
|
||||
return
|
||||
|
||||
elif IMAGE_RENDERER == "imgcat":
|
||||
if render_iterm(file_path, width, height):
|
||||
return
|
||||
|
||||
elif IMAGE_RENDERER == "timg":
|
||||
if render_timg(file_path, width, height):
|
||||
return
|
||||
|
||||
elif IMAGE_RENDERER == "chafa":
|
||||
if render_chafa_auto(file_path, width, height):
|
||||
return
|
||||
|
||||
# Auto-detection / Fallback
|
||||
if os.environ.get("KITTY_WINDOW_ID") or os.environ.get("GHOSTTY_BIN_DIR"):
|
||||
if render_kitty(file_path, width, height, SCALE_UP):
|
||||
return
|
||||
|
||||
if os.environ.get("TERM_PROGRAM") == "iTerm.app":
|
||||
if render_iterm(file_path, width, height):
|
||||
return
|
||||
|
||||
# Try standard tools in order of quality/preference
|
||||
if render_kitty(file_path, width, height, SCALE_UP):
|
||||
return
|
||||
if render_sixel(file_path, width, height):
|
||||
return
|
||||
if render_timg(file_path, width, height):
|
||||
return
|
||||
if render_chafa_auto(file_path, width, height):
|
||||
return
|
||||
|
||||
print("⚠️ No suitable image renderer found (icat, chafa, timg, img2sixel).")
|
||||
|
||||
|
||||
def main():
|
||||
if not SELECTED_TITLE:
|
||||
print("No selection")
|
||||
return
|
||||
|
||||
# Get the media data from cached search results
|
||||
media = get_media_from_results(SELECTED_TITLE)
|
||||
|
||||
if not media:
|
||||
print("Loading preview...")
|
||||
return
|
||||
|
||||
term_width = get_terminal_width()
|
||||
|
||||
# Extract media information
|
||||
title_obj = media.get("title", {})
|
||||
title = (
|
||||
title_obj.get("english")
|
||||
or title_obj.get("romaji")
|
||||
or title_obj.get("native")
|
||||
or "Unknown"
|
||||
)
|
||||
|
||||
# Show image if in image or full mode
|
||||
if PREVIEW_MODE in ("image", "full"):
|
||||
cover_image = media.get("coverImage", {}).get("large", "")
|
||||
if cover_image:
|
||||
# Ensure image cache directory exists
|
||||
IMAGE_CACHE_DIR.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
# Generate hash matching the preview worker pattern
|
||||
# Use "anime-" prefix and hash of just the title (no KEY prefix for dynamic search)
|
||||
hash_id = f"anime-{sha256(SELECTED_TITLE.encode('utf-8')).hexdigest()}"
|
||||
image_file = IMAGE_CACHE_DIR / f"{hash_id}.png"
|
||||
|
||||
# Download image if not cached
|
||||
if not image_file.exists():
|
||||
download_image(cover_image, image_file)
|
||||
|
||||
# Try to render the image
|
||||
if image_file.exists():
|
||||
fzf_image_preview(str(image_file))
|
||||
print() # Spacer
|
||||
else:
|
||||
print("🖼️ Loading image...")
|
||||
print()
|
||||
|
||||
# Show text info if in text or full mode
|
||||
if PREVIEW_MODE in ("text", "full"):
|
||||
# Separator line
|
||||
r, g, b = map(int, SEPARATOR_COLOR.split(","))
|
||||
separator = f"\x1b[38;2;{r};{g};{b}m" + ("─" * term_width) + "\x1b[0m"
|
||||
print(separator, flush=True)
|
||||
|
||||
# Title centered
|
||||
print(title.center(term_width))
|
||||
|
||||
# Extract data
|
||||
status = media.get("status", "Unknown")
|
||||
format_type = media.get("format", "Unknown")
|
||||
episodes = media.get("episodes", "?")
|
||||
duration = media.get("duration")
|
||||
duration_str = f"{duration} min" if duration else "Unknown"
|
||||
|
||||
score = media.get("averageScore")
|
||||
score_str = f"{score}/100" if score else "N/A"
|
||||
|
||||
favourites = format_number(media.get("favourites", 0))
|
||||
popularity = format_number(media.get("popularity", 0))
|
||||
|
||||
genres = ", ".join(media.get("genres", [])[:5]) or "Unknown"
|
||||
|
||||
start_date = format_date(media.get("startDate"))
|
||||
end_date = format_date(media.get("endDate"))
|
||||
|
||||
studios_list = media.get("studios", {}).get("nodes", [])
|
||||
studios = ", ".join([s.get("name", "") for s in studios_list[:3]]) or "Unknown"
|
||||
|
||||
synonyms_list = media.get("synonyms", [])
|
||||
synonyms = ", ".join(synonyms_list[:3]) or "N/A"
|
||||
|
||||
description = media.get("description", "No description available.")
|
||||
description = strip_markdown(description)
|
||||
|
||||
# Print sections matching media_info.py structure
|
||||
rows = [
|
||||
("Score", score_str),
|
||||
("Favorites", favourites),
|
||||
("Popularity", popularity),
|
||||
("Status", status),
|
||||
]
|
||||
|
||||
print_rule(SEPARATOR_COLOR)
|
||||
for key, value in rows:
|
||||
print_table_row(key, value, HEADER_COLOR, 0, 0)
|
||||
|
||||
rows = [
|
||||
("Episodes", str(episodes)),
|
||||
("Duration", duration_str),
|
||||
]
|
||||
|
||||
print_rule(SEPARATOR_COLOR)
|
||||
for key, value in rows:
|
||||
print_table_row(key, value, HEADER_COLOR, 0, 0)
|
||||
|
||||
rows = [
|
||||
("Genres", genres),
|
||||
("Format", format_type),
|
||||
]
|
||||
|
||||
print_rule(SEPARATOR_COLOR)
|
||||
for key, value in rows:
|
||||
print_table_row(key, value, HEADER_COLOR, 0, 0)
|
||||
|
||||
rows = [
|
||||
("Start Date", start_date),
|
||||
("End Date", end_date),
|
||||
]
|
||||
|
||||
print_rule(SEPARATOR_COLOR)
|
||||
for key, value in rows:
|
||||
print_table_row(key, value, HEADER_COLOR, 0, 0)
|
||||
|
||||
rows = [
|
||||
("Studios", studios),
|
||||
]
|
||||
|
||||
print_rule(SEPARATOR_COLOR)
|
||||
for key, value in rows:
|
||||
print_table_row(key, value, HEADER_COLOR, 0, 0)
|
||||
|
||||
rows = [
|
||||
("Synonyms", synonyms),
|
||||
]
|
||||
|
||||
print_rule(SEPARATOR_COLOR)
|
||||
for key, value in rows:
|
||||
print_table_row(key, value, HEADER_COLOR, 0, 0)
|
||||
|
||||
print_rule(SEPARATOR_COLOR)
|
||||
print(wrap_text(description, term_width))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
try:
|
||||
main()
|
||||
except KeyboardInterrupt:
|
||||
pass
|
||||
except Exception as e:
|
||||
print(f"Preview Error: {e}", file=sys.stderr)
|
||||
@@ -1,31 +0,0 @@
|
||||
#!/bin/sh
|
||||
#
|
||||
# Episode Preview Info Script Template
|
||||
# This script formats and displays episode information in the FZF preview pane.
|
||||
# Some values are injected by python those with '{name}' syntax using .replace()
|
||||
|
||||
draw_rule
|
||||
|
||||
echo "{TITLE}" | fold -s -w "$WIDTH"
|
||||
|
||||
draw_rule
|
||||
|
||||
print_kv "Duration" "{DURATION}"
|
||||
print_kv "Status" "{STATUS}"
|
||||
|
||||
draw_rule
|
||||
|
||||
print_kv "Total Episodes" "{EPISODES}"
|
||||
print_kv "Next Episode" "{NEXT_EPISODE}"
|
||||
|
||||
draw_rule
|
||||
|
||||
print_kv "Progress" "{USER_PROGRESS}"
|
||||
print_kv "List Status" "{USER_STATUS}"
|
||||
|
||||
draw_rule
|
||||
|
||||
print_kv "Start Date" "{START_DATE}"
|
||||
print_kv "End Date" "{END_DATE}"
|
||||
|
||||
draw_rule
|
||||
49
viu_media/assets/scripts/fzf/episode_info.py
Normal file
49
viu_media/assets/scripts/fzf/episode_info.py
Normal file
@@ -0,0 +1,49 @@
|
||||
import sys
|
||||
from _ansi_utils import print_rule, print_table_row, get_terminal_width
|
||||
|
||||
HEADER_COLOR = sys.argv[1]
|
||||
SEPARATOR_COLOR = sys.argv[2]
|
||||
|
||||
# Get terminal dimensions
|
||||
term_width = get_terminal_width()
|
||||
|
||||
# Print title centered
|
||||
print("{TITLE}".center(term_width))
|
||||
|
||||
rows = [
|
||||
("Duration", "{DURATION}"),
|
||||
("Status", "{STATUS}"),
|
||||
]
|
||||
|
||||
print_rule(SEPARATOR_COLOR)
|
||||
for key, value in rows:
|
||||
print_table_row(key, value, HEADER_COLOR, 15, term_width - 20)
|
||||
|
||||
rows = [
|
||||
("Total Episodes", "{EPISODES}"),
|
||||
("Next Episode", "{NEXT_EPISODE}"),
|
||||
]
|
||||
|
||||
print_rule(SEPARATOR_COLOR)
|
||||
for key, value in rows:
|
||||
print_table_row(key, value, HEADER_COLOR, 15, term_width - 20)
|
||||
|
||||
rows = [
|
||||
("Progress", "{USER_PROGRESS}"),
|
||||
("List Status", "{USER_STATUS}"),
|
||||
]
|
||||
|
||||
print_rule(SEPARATOR_COLOR)
|
||||
for key, value in rows:
|
||||
print_table_row(key, value, HEADER_COLOR, 15, term_width - 20)
|
||||
|
||||
rows = [
|
||||
("Start Date", "{START_DATE}"),
|
||||
("End Date", "{END_DATE}"),
|
||||
]
|
||||
|
||||
print_rule(SEPARATOR_COLOR)
|
||||
for key, value in rows:
|
||||
print_table_row(key, value, HEADER_COLOR, 15, term_width - 20)
|
||||
|
||||
print_rule(SEPARATOR_COLOR)
|
||||
@@ -1,54 +0,0 @@
|
||||
#!/bin/sh
|
||||
#
|
||||
# Viu Preview Info Script Template
|
||||
# This script formats and displays the textual information in the FZF preview pane.
|
||||
# Some values are injected by python those with '{name}' syntax using .replace()
|
||||
|
||||
draw_rule
|
||||
|
||||
print_kv "Title" "{TITLE}"
|
||||
|
||||
draw_rule
|
||||
|
||||
# Emojis take up double the space
|
||||
score_multiplier=1
|
||||
if ! [ "{SCORE}" = "N/A" ]; then
|
||||
score_multiplier=2
|
||||
fi
|
||||
print_kv "Score" "{SCORE}" $score_multiplier
|
||||
|
||||
print_kv "Favourites" "{FAVOURITES}"
|
||||
print_kv "Popularity" "{POPULARITY}"
|
||||
print_kv "Status" "{STATUS}"
|
||||
|
||||
draw_rule
|
||||
|
||||
print_kv "Episodes" "{EPISODES}"
|
||||
print_kv "Next Episode" "{NEXT_EPISODE}"
|
||||
print_kv "Duration" "{DURATION}"
|
||||
|
||||
draw_rule
|
||||
|
||||
print_kv "Genres" "{GENRES}"
|
||||
print_kv "Format" "{FORMAT}"
|
||||
|
||||
draw_rule
|
||||
|
||||
print_kv "List Status" "{USER_STATUS}"
|
||||
print_kv "Progress" "{USER_PROGRESS}"
|
||||
|
||||
draw_rule
|
||||
|
||||
print_kv "Start Date" "{START_DATE}"
|
||||
print_kv "End Date" "{END_DATE}"
|
||||
|
||||
draw_rule
|
||||
|
||||
print_kv "Studios" "{STUDIOS}"
|
||||
print_kv "Synonymns" "{SYNONYMNS}"
|
||||
print_kv "Tags" "{TAGS}"
|
||||
|
||||
draw_rule
|
||||
|
||||
# Synopsis
|
||||
echo "{SYNOPSIS}" | fold -s -w "$WIDTH"
|
||||
93
viu_media/assets/scripts/fzf/media_info.py
Normal file
93
viu_media/assets/scripts/fzf/media_info.py
Normal file
@@ -0,0 +1,93 @@
|
||||
import sys
|
||||
from _ansi_utils import (
|
||||
print_rule,
|
||||
print_table_row,
|
||||
strip_markdown,
|
||||
wrap_text,
|
||||
get_terminal_width,
|
||||
)
|
||||
|
||||
HEADER_COLOR = sys.argv[1]
|
||||
SEPARATOR_COLOR = sys.argv[2]
|
||||
|
||||
# Get terminal dimensions
|
||||
term_width = get_terminal_width()
|
||||
|
||||
# Print title centered
|
||||
print("{TITLE}".center(term_width))
|
||||
|
||||
# Define table data
|
||||
rows = [
|
||||
("Score", "{SCORE}"),
|
||||
("Favorites", "{FAVOURITES}"),
|
||||
("Popularity", "{POPULARITY}"),
|
||||
("Status", "{STATUS}"),
|
||||
]
|
||||
|
||||
print_rule(SEPARATOR_COLOR)
|
||||
for key, value in rows:
|
||||
print_table_row(key, value, HEADER_COLOR, 15, term_width - 20)
|
||||
|
||||
rows = [
|
||||
("Episodes", "{EPISODES}"),
|
||||
("Duration", "{DURATION}"),
|
||||
("Next Episode", "{NEXT_EPISODE}"),
|
||||
]
|
||||
|
||||
print_rule(SEPARATOR_COLOR)
|
||||
for key, value in rows:
|
||||
print_table_row(key, value, HEADER_COLOR, 15, term_width - 20)
|
||||
|
||||
rows = [
|
||||
("Genres", "{GENRES}"),
|
||||
("Format", "{FORMAT}"),
|
||||
]
|
||||
|
||||
print_rule(SEPARATOR_COLOR)
|
||||
for key, value in rows:
|
||||
print_table_row(key, value, HEADER_COLOR, 15, term_width - 20)
|
||||
|
||||
rows = [
|
||||
("List Status", "{USER_STATUS}"),
|
||||
("Progress", "{USER_PROGRESS}"),
|
||||
]
|
||||
|
||||
print_rule(SEPARATOR_COLOR)
|
||||
for key, value in rows:
|
||||
print_table_row(key, value, HEADER_COLOR, 15, term_width - 20)
|
||||
|
||||
rows = [
|
||||
("Start Date", "{START_DATE}"),
|
||||
("End Date", "{END_DATE}"),
|
||||
]
|
||||
|
||||
print_rule(SEPARATOR_COLOR)
|
||||
for key, value in rows:
|
||||
print_table_row(key, value, HEADER_COLOR, 15, term_width - 20)
|
||||
|
||||
rows = [
|
||||
("Studios", "{STUDIOS}"),
|
||||
]
|
||||
|
||||
print_rule(SEPARATOR_COLOR)
|
||||
for key, value in rows:
|
||||
print_table_row(key, value, HEADER_COLOR, 15, term_width - 20)
|
||||
|
||||
rows = [
|
||||
("Synonyms", "{SYNONYMNS}"),
|
||||
]
|
||||
|
||||
print_rule(SEPARATOR_COLOR)
|
||||
for key, value in rows:
|
||||
print_table_row(key, value, HEADER_COLOR, 15, term_width - 20)
|
||||
|
||||
rows = [
|
||||
("Tags", "{TAGS}"),
|
||||
]
|
||||
|
||||
print_rule(SEPARATOR_COLOR)
|
||||
for key, value in rows:
|
||||
print_table_row(key, value, HEADER_COLOR, 15, term_width - 20)
|
||||
|
||||
print_rule(SEPARATOR_COLOR)
|
||||
print(wrap_text(strip_markdown("""{SYNOPSIS}"""), term_width))
|
||||
288
viu_media/assets/scripts/fzf/preview.py
Normal file
288
viu_media/assets/scripts/fzf/preview.py
Normal file
@@ -0,0 +1,288 @@
|
||||
#!/usr/bin/env python3
|
||||
#
|
||||
# FZF Preview Script Template
|
||||
#
|
||||
# This script is a template. The placeholders in curly braces, like {NAME}
|
||||
# are dynamically filled by python using .replace() during runtime.
|
||||
|
||||
import os
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
from hashlib import sha256
|
||||
from pathlib import Path
|
||||
|
||||
# --- Template Variables (Injected by Python) ---
|
||||
PREVIEW_MODE = "{PREVIEW_MODE}"
|
||||
IMAGE_CACHE_DIR = Path("{IMAGE_CACHE_DIR}")
|
||||
INFO_CACHE_DIR = Path("{INFO_CACHE_DIR}")
|
||||
IMAGE_RENDERER = "{IMAGE_RENDERER}"
|
||||
HEADER_COLOR = "{HEADER_COLOR}"
|
||||
SEPARATOR_COLOR = "{SEPARATOR_COLOR}"
|
||||
PREFIX = "{PREFIX}"
|
||||
SCALE_UP = "{SCALE_UP}" == "True"
|
||||
|
||||
# --- Arguments ---
|
||||
# sys.argv[1] is usually the raw line from FZF (the anime title/key)
|
||||
TITLE = sys.argv[1] if len(sys.argv) > 1 else ""
|
||||
KEY = """{KEY}"""
|
||||
KEY = KEY + "-" if KEY else KEY
|
||||
|
||||
# Generate the hash to find the cached files
|
||||
hash_id = f"{PREFIX}-{sha256((KEY + TITLE).encode('utf-8')).hexdigest()}"
|
||||
|
||||
|
||||
def get_terminal_dimensions():
|
||||
"""
|
||||
Determine the available dimensions (cols x lines) for the preview window.
|
||||
Prioritizes FZF environment variables.
|
||||
"""
|
||||
fzf_cols = os.environ.get("FZF_PREVIEW_COLUMNS")
|
||||
fzf_lines = os.environ.get("FZF_PREVIEW_LINES")
|
||||
|
||||
if fzf_cols and fzf_lines:
|
||||
return int(fzf_cols), int(fzf_lines)
|
||||
|
||||
# Fallback to stty if FZF vars aren't set (unlikely in preview)
|
||||
try:
|
||||
rows, cols = (
|
||||
subprocess.check_output(
|
||||
["stty", "size"], text=True, stderr=subprocess.DEVNULL
|
||||
)
|
||||
.strip()
|
||||
.split()
|
||||
)
|
||||
return int(cols), int(rows)
|
||||
except Exception:
|
||||
return 80, 24
|
||||
|
||||
|
||||
def which(cmd):
|
||||
"""Alias for shutil.which"""
|
||||
return shutil.which(cmd)
|
||||
|
||||
|
||||
def render_kitty(file_path, width, height, scale_up):
|
||||
"""Render using the Kitty Graphics Protocol (kitten/icat)."""
|
||||
# 1. Try 'kitten icat' (Modern)
|
||||
# 2. Try 'icat' (Legacy/Alias)
|
||||
# 3. Try 'kitty +kitten icat' (Fallback)
|
||||
|
||||
cmd = []
|
||||
if which("kitten"):
|
||||
cmd = ["kitten", "icat"]
|
||||
elif which("icat"):
|
||||
cmd = ["icat"]
|
||||
elif which("kitty"):
|
||||
cmd = ["kitty", "+kitten", "icat"]
|
||||
|
||||
if not cmd:
|
||||
return False
|
||||
|
||||
# Build Arguments
|
||||
args = [
|
||||
"--clear",
|
||||
"--transfer-mode=memory",
|
||||
"--unicode-placeholder",
|
||||
"--stdin=no",
|
||||
f"--place={width}x{height}@0x0",
|
||||
]
|
||||
|
||||
if scale_up:
|
||||
args.append("--scale-up")
|
||||
|
||||
args.append(file_path)
|
||||
|
||||
subprocess.run(cmd + args, stdout=sys.stdout, stderr=sys.stderr)
|
||||
return True
|
||||
|
||||
|
||||
def render_sixel(file_path, width, height):
|
||||
"""
|
||||
Render using Sixel.
|
||||
Prioritizes 'chafa' for Sixel as it handles text-cell sizing better than img2sixel.
|
||||
"""
|
||||
|
||||
# Option A: Chafa (Best for Sixel sizing)
|
||||
if which("chafa"):
|
||||
# Chafa automatically detects Sixel support if terminal reports it,
|
||||
# but we force it here if specifically requested via logic flow.
|
||||
subprocess.run(
|
||||
["chafa", "-f", "sixel", "-s", f"{width}x{height}", file_path],
|
||||
stdout=sys.stdout,
|
||||
stderr=sys.stderr,
|
||||
)
|
||||
return True
|
||||
|
||||
# Option B: img2sixel (Libsixel)
|
||||
# Note: img2sixel uses pixels, not cells. We estimate 1 cell ~= 10px width, 20px height
|
||||
if which("img2sixel"):
|
||||
pixel_width = width * 10
|
||||
pixel_height = height * 20
|
||||
subprocess.run(
|
||||
[
|
||||
"img2sixel",
|
||||
f"--width={pixel_width}",
|
||||
f"--height={pixel_height}",
|
||||
file_path,
|
||||
],
|
||||
stdout=sys.stdout,
|
||||
stderr=sys.stderr,
|
||||
)
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def render_iterm(file_path, width, height):
|
||||
"""Render using iTerm2 Inline Image Protocol."""
|
||||
if which("imgcat"):
|
||||
subprocess.run(
|
||||
["imgcat", "-W", str(width), "-H", str(height), file_path],
|
||||
stdout=sys.stdout,
|
||||
stderr=sys.stderr,
|
||||
)
|
||||
return True
|
||||
|
||||
# Chafa also supports iTerm
|
||||
if which("chafa"):
|
||||
subprocess.run(
|
||||
["chafa", "-f", "iterm", "-s", f"{width}x{height}", file_path],
|
||||
stdout=sys.stdout,
|
||||
stderr=sys.stderr,
|
||||
)
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def render_timg(file_path, width, height):
|
||||
"""Render using timg (supports half-blocks, quarter-blocks, sixel, kitty, etc)."""
|
||||
if which("timg"):
|
||||
subprocess.run(
|
||||
["timg", f"-g{width}x{height}", "--upscale", file_path],
|
||||
stdout=sys.stdout,
|
||||
stderr=sys.stderr,
|
||||
)
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def render_chafa_auto(file_path, width, height):
|
||||
"""
|
||||
Render using Chafa in auto mode.
|
||||
It supports Sixel, Kitty, iTerm, and various unicode block modes.
|
||||
"""
|
||||
if which("chafa"):
|
||||
subprocess.run(
|
||||
["chafa", "-s", f"{width}x{height}", file_path],
|
||||
stdout=sys.stdout,
|
||||
stderr=sys.stderr,
|
||||
)
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def fzf_image_preview(file_path: str):
|
||||
"""
|
||||
Main dispatch function to choose the best renderer.
|
||||
"""
|
||||
cols, lines = get_terminal_dimensions()
|
||||
|
||||
# Heuristic: Reserve 1 line for prompt/status if needed, though FZF handles this.
|
||||
# Some renderers behave better with a tiny bit of padding.
|
||||
width = cols
|
||||
height = lines
|
||||
|
||||
# --- 1. Check Explicit Configuration ---
|
||||
|
||||
if IMAGE_RENDERER == "icat" or IMAGE_RENDERER == "system-kitty":
|
||||
if render_kitty(file_path, width, height, SCALE_UP):
|
||||
return
|
||||
|
||||
elif IMAGE_RENDERER == "sixel" or IMAGE_RENDERER == "system-sixels":
|
||||
if render_sixel(file_path, width, height):
|
||||
return
|
||||
|
||||
elif IMAGE_RENDERER == "imgcat":
|
||||
if render_iterm(file_path, width, height):
|
||||
return
|
||||
|
||||
elif IMAGE_RENDERER == "timg":
|
||||
if render_timg(file_path, width, height):
|
||||
return
|
||||
|
||||
elif IMAGE_RENDERER == "chafa":
|
||||
if render_chafa_auto(file_path, width, height):
|
||||
return
|
||||
|
||||
# --- 2. Auto-Detection / Fallback Strategy ---
|
||||
|
||||
# If explicit failed or set to 'auto'/'system-default', try detecting environment
|
||||
|
||||
# Ghostty / Kitty Environment
|
||||
if os.environ.get("KITTY_WINDOW_ID") or os.environ.get("GHOSTTY_BIN_DIR"):
|
||||
if render_kitty(file_path, width, height, SCALE_UP):
|
||||
return
|
||||
|
||||
# iTerm Environment
|
||||
if os.environ.get("TERM_PROGRAM") == "iTerm.app":
|
||||
if render_iterm(file_path, width, height):
|
||||
return
|
||||
|
||||
# Try standard tools in order of quality/preference
|
||||
if render_kitty(file_path, width, height, SCALE_UP):
|
||||
return # Try kitty just in case
|
||||
if render_sixel(file_path, width, height):
|
||||
return
|
||||
if render_timg(file_path, width, height):
|
||||
return
|
||||
if render_chafa_auto(file_path, width, height):
|
||||
return
|
||||
|
||||
print("⚠️ No suitable image renderer found (icat, chafa, timg, img2sixel).")
|
||||
|
||||
|
||||
def fzf_text_info_render():
|
||||
"""Renders the text-based info via the cached python script."""
|
||||
# Get terminal dimensions from FZF environment or fallback
|
||||
cols, lines = get_terminal_dimensions()
|
||||
|
||||
# Print simple separator line with proper width
|
||||
r, g, b = map(int, SEPARATOR_COLOR.split(","))
|
||||
separator = f"\x1b[38;2;{r};{g};{b}m" + ("─" * cols) + "\x1b[0m"
|
||||
print(separator, flush=True)
|
||||
|
||||
if PREVIEW_MODE == "text" or PREVIEW_MODE == "full":
|
||||
preview_info_path = INFO_CACHE_DIR / f"{hash_id}.py"
|
||||
if preview_info_path.exists():
|
||||
subprocess.run(
|
||||
[sys.executable, str(preview_info_path), HEADER_COLOR, SEPARATOR_COLOR]
|
||||
)
|
||||
else:
|
||||
# Print dim text
|
||||
print("\x1b[2m📝 Loading details...\x1b[0m")
|
||||
|
||||
|
||||
def main():
|
||||
# 1. Image Preview
|
||||
if (PREVIEW_MODE == "image" or PREVIEW_MODE == "full") and (
|
||||
PREFIX not in ("character", "review", "airing-schedule")
|
||||
):
|
||||
preview_image_path = IMAGE_CACHE_DIR / f"{hash_id}.png"
|
||||
if preview_image_path.exists():
|
||||
fzf_image_preview(str(preview_image_path))
|
||||
print() # Spacer
|
||||
else:
|
||||
print("🖼️ Loading image...")
|
||||
|
||||
# 2. Text Info Preview
|
||||
fzf_text_info_render()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
try:
|
||||
main()
|
||||
except KeyboardInterrupt:
|
||||
pass
|
||||
except Exception as e:
|
||||
print(f"Preview Error: {e}")
|
||||
@@ -1,147 +0,0 @@
|
||||
#!/bin/sh
|
||||
#
|
||||
# FZF Preview Script Template
|
||||
#
|
||||
# This script is a template. The placeholders in curly braces, like {NAME}
|
||||
# are dynamically filled by python using .replace()
|
||||
|
||||
WIDTH=${FZF_PREVIEW_COLUMNS:-80} # Set a fallback width of 80
|
||||
IMAGE_RENDERER="{IMAGE_RENDERER}"
|
||||
|
||||
generate_sha256() {
|
||||
local input
|
||||
|
||||
# Check if input is passed as an argument or piped
|
||||
if [ -n "$1" ]; then
|
||||
input="$1"
|
||||
else
|
||||
input=$(cat)
|
||||
fi
|
||||
|
||||
if command -v sha256sum &>/dev/null; then
|
||||
echo -n "$input" | sha256sum | awk '{print $1}'
|
||||
elif command -v shasum &>/dev/null; then
|
||||
echo -n "$input" | shasum -a 256 | awk '{print $1}'
|
||||
elif command -v sha256 &>/dev/null; then
|
||||
echo -n "$input" | sha256 | awk '{print $1}'
|
||||
elif command -v openssl &>/dev/null; then
|
||||
echo -n "$input" | openssl dgst -sha256 | awk '{print $2}'
|
||||
else
|
||||
echo -n "$input" | base64 | tr '/+' '_-' | tr -d '\n'
|
||||
fi
|
||||
}
|
||||
|
||||
fzf_preview() {
|
||||
file=$1
|
||||
|
||||
dim=${FZF_PREVIEW_COLUMNS}x${FZF_PREVIEW_LINES}
|
||||
if [ "$dim" = x ]; then
|
||||
dim=$(stty size </dev/tty | awk "{print \$2 \"x\" \$1}")
|
||||
fi
|
||||
if ! [ "$IMAGE_RENDERER" = "icat" ] && [ -z "$KITTY_WINDOW_ID" ] && [ "$((FZF_PREVIEW_TOP + FZF_PREVIEW_LINES))" -eq "$(stty size </dev/tty | awk "{print \$1}")" ]; then
|
||||
dim=${FZF_PREVIEW_COLUMNS}x$((FZF_PREVIEW_LINES - 1))
|
||||
fi
|
||||
|
||||
if [ "$IMAGE_RENDERER" = "icat" ] && [ -z "$GHOSTTY_BIN_DIR" ]; then
|
||||
if command -v kitten >/dev/null 2>&1; then
|
||||
kitten icat --clear --transfer-mode=memory --unicode-placeholder{SCALE_UP} --stdin=no --place="$dim@0x0" "$file" | sed "\$d" | sed "$(printf "\$s/\$/\033[m/")"
|
||||
elif command -v icat >/dev/null 2>&1; then
|
||||
icat --clear --transfer-mode=memory --unicode-placeholder{SCALE_UP} --stdin=no --place="$dim@0x0" "$file" | sed "\$d" | sed "$(printf "\$s/\$/\033[m/")"
|
||||
else
|
||||
kitty icat --clear --transfer-mode=memory --unicode-placeholder{SCALE_UP} --stdin=no --place="$dim@0x0" "$file" | sed "\$d" | sed "$(printf "\$s/\$/\033[m/")"
|
||||
fi
|
||||
|
||||
elif [ -n "$GHOSTTY_BIN_DIR" ]; then
|
||||
dim=$((FZF_PREVIEW_COLUMNS - 1))x${FZF_PREVIEW_LINES}
|
||||
if command -v kitten >/dev/null 2>&1; then
|
||||
kitten icat --clear --transfer-mode=memory --unicode-placeholder{SCALE_UP} --stdin=no --place="$dim@0x0" "$file" | sed "\$d" | sed "$(printf "\$s/\$/\033[m/")"
|
||||
elif command -v icat >/dev/null 2>&1; then
|
||||
icat --clear --transfer-mode=memory --unicode-placeholder{SCALE_UP} --stdin=no --place="$dim@0x0" "$file" | sed "\$d" | sed "$(printf "\$s/\$/\033[m/")"
|
||||
else
|
||||
chafa -s "$dim" "$file"
|
||||
fi
|
||||
elif command -v chafa >/dev/null 2>&1; then
|
||||
case "$PLATFORM" in
|
||||
android) chafa -s "$dim" "$file" ;;
|
||||
windows) chafa -f sixel -s "$dim" "$file" ;;
|
||||
*) chafa -s "$dim" "$file" ;;
|
||||
esac
|
||||
echo
|
||||
|
||||
elif command -v imgcat >/dev/null; then
|
||||
imgcat -W "${dim%%x*}" -H "${dim##*x}" "$file"
|
||||
|
||||
else
|
||||
echo please install a terminal image viewer
|
||||
echo either icat for kitty terminal and wezterm or imgcat or chafa
|
||||
fi
|
||||
}
|
||||
|
||||
|
||||
# --- Helper function for printing a key-value pair, aligning the value to the right ---
|
||||
print_kv() {
|
||||
local key="$1"
|
||||
local value="$2"
|
||||
local key_len=${#key}
|
||||
local value_len=${#value}
|
||||
local multiplier="${3:-1}"
|
||||
|
||||
# Correctly calculate padding by accounting for the key, the ": ", and the value.
|
||||
local padding_len=$((WIDTH - key_len - 2 - value_len * multiplier))
|
||||
|
||||
# If the text is too long to fit, just add a single space for separation.
|
||||
if [ "$padding_len" -lt 1 ]; then
|
||||
padding_len=1
|
||||
value=$(echo "$value"| fold -s -w "$((WIDTH - key_len - 3))")
|
||||
printf "{C_KEY}%s:{RESET}%*s%s\\n" "$key" "$padding_len" "" " $value"
|
||||
else
|
||||
printf "{C_KEY}%s:{RESET}%*s%s\\n" "$key" "$padding_len" "" " $value"
|
||||
fi
|
||||
}
|
||||
|
||||
# --- Draw a rule across the screen ---
|
||||
# TODO: figure out why this method does not work in fzf
|
||||
draw_rule() {
|
||||
local rule
|
||||
# Generate the line of '─' characters, removing the trailing newline `tr` adds.
|
||||
rule=$(printf '%*s' "$WIDTH" | tr ' ' '─' | tr -d '\n')
|
||||
# Print the rule with colors and a single, clean newline.
|
||||
printf "{C_RULE}%s{RESET}\\n" "$rule"
|
||||
}
|
||||
|
||||
|
||||
draw_rule(){
|
||||
ll=2
|
||||
while [ $ll -le $FZF_PREVIEW_COLUMNS ];do
|
||||
echo -n -e "{C_RULE}─{RESET}"
|
||||
((ll++))
|
||||
done
|
||||
echo
|
||||
}
|
||||
|
||||
# Generate the same cache key that the Python worker uses
|
||||
# {PREFIX} is used only on episode previews to make sure they are unique
|
||||
title={}
|
||||
hash=$(generate_sha256 "{PREFIX}$title")
|
||||
|
||||
#
|
||||
# --- Display image if configured and the cached file exists ---
|
||||
#
|
||||
if [ "{PREVIEW_MODE}" = "full" ] || [ "{PREVIEW_MODE}" = "image" ]; then
|
||||
image_file="{IMAGE_CACHE_PATH}{PATH_SEP}$hash.png"
|
||||
if [ -f "$image_file" ]; then
|
||||
fzf_preview "$image_file"
|
||||
else
|
||||
echo "🖼️ Loading image..."
|
||||
fi
|
||||
echo # Add a newline for spacing
|
||||
fi
|
||||
# Display text info if configured and the cached file exists
|
||||
if [ "{PREVIEW_MODE}" = "full" ] || [ "{PREVIEW_MODE}" = "text" ]; then
|
||||
info_file="{INFO_CACHE_PATH}{PATH_SEP}$hash"
|
||||
if [ -f "$info_file" ]; then
|
||||
source "$info_file"
|
||||
else
|
||||
echo "📝 Loading details..."
|
||||
fi
|
||||
fi
|
||||
@@ -1,19 +0,0 @@
|
||||
#!/bin/sh
|
||||
#
|
||||
# Viu Review Info Script Template
|
||||
# This script formats and displays review details in the FZF preview pane.
|
||||
# Python injects the actual data values into the placeholders.
|
||||
|
||||
draw_rule
|
||||
|
||||
print_kv "Review By" "{REVIEWER_NAME}"
|
||||
|
||||
draw_rule
|
||||
|
||||
print_kv "Summary" "{REVIEW_SUMMARY}"
|
||||
|
||||
draw_rule
|
||||
|
||||
echo "{REVIEW_BODY}" | fold -s -w "$WIDTH"
|
||||
|
||||
draw_rule
|
||||
@@ -1,75 +0,0 @@
|
||||
#!/bin/sh
|
||||
#
|
||||
# FZF Preview Script Template
|
||||
#
|
||||
# This script is a template. The placeholders in curly braces, like {NAME}
|
||||
# are dynamically filled by python using .replace()
|
||||
|
||||
WIDTH=${FZF_PREVIEW_COLUMNS:-80} # Set a fallback width of 80
|
||||
IMAGE_RENDERER="{IMAGE_RENDERER}"
|
||||
|
||||
generate_sha256() {
|
||||
local input
|
||||
|
||||
# Check if input is passed as an argument or piped
|
||||
if [ -n "$1" ]; then
|
||||
input="$1"
|
||||
else
|
||||
input=$(cat)
|
||||
fi
|
||||
|
||||
if command -v sha256sum &>/dev/null; then
|
||||
echo -n "$input" | sha256sum | awk '{print $1}'
|
||||
elif command -v shasum &>/dev/null; then
|
||||
echo -n "$input" | shasum -a 256 | awk '{print $1}'
|
||||
elif command -v sha256 &>/dev/null; then
|
||||
echo -n "$input" | sha256 | awk '{print $1}'
|
||||
elif command -v openssl &>/dev/null; then
|
||||
echo -n "$input" | openssl dgst -sha256 | awk '{print $2}'
|
||||
else
|
||||
echo -n "$input" | base64 | tr '/+' '_-' | tr -d '\n'
|
||||
fi
|
||||
}
|
||||
|
||||
|
||||
print_kv() {
|
||||
local key="$1"
|
||||
local value="$2"
|
||||
local key_len=${#key}
|
||||
local value_len=${#value}
|
||||
local multiplier="${3:-1}"
|
||||
|
||||
# Correctly calculate padding by accounting for the key, the ": ", and the value.
|
||||
local padding_len=$((WIDTH - key_len - 2 - value_len * multiplier))
|
||||
|
||||
# If the text is too long to fit, just add a single space for separation.
|
||||
if [ "$padding_len" -lt 1 ]; then
|
||||
padding_len=1
|
||||
value=$(echo $value| fold -s -w "$((WIDTH - key_len - 3))")
|
||||
printf "{C_KEY}%s:{RESET}%*s%s\\n" "$key" "$padding_len" "" " $value"
|
||||
else
|
||||
printf "{C_KEY}%s:{RESET}%*s%s\\n" "$key" "$padding_len" "" " $value"
|
||||
fi
|
||||
}
|
||||
|
||||
|
||||
draw_rule(){
|
||||
ll=2
|
||||
while [ $ll -le $FZF_PREVIEW_COLUMNS ];do
|
||||
echo -n -e "{C_RULE}─{RESET}"
|
||||
((ll++))
|
||||
done
|
||||
echo
|
||||
}
|
||||
|
||||
title={}
|
||||
hash=$(generate_sha256 "$title")
|
||||
|
||||
if [ "{PREVIEW_MODE}" = "full" ] || [ "{PREVIEW_MODE}" = "text" ]; then
|
||||
info_file="{INFO_CACHE_DIR}{PATH_SEP}$hash"
|
||||
if [ -f "$info_file" ]; then
|
||||
source "$info_file"
|
||||
else
|
||||
echo "📝 Loading details..."
|
||||
fi
|
||||
fi
|
||||
28
viu_media/assets/scripts/fzf/review_info.py
Normal file
28
viu_media/assets/scripts/fzf/review_info.py
Normal file
@@ -0,0 +1,28 @@
|
||||
import sys
|
||||
from _ansi_utils import (
|
||||
print_rule,
|
||||
print_table_row,
|
||||
strip_markdown,
|
||||
wrap_text,
|
||||
get_terminal_width,
|
||||
)
|
||||
|
||||
HEADER_COLOR = sys.argv[1]
|
||||
SEPARATOR_COLOR = sys.argv[2]
|
||||
|
||||
# Get terminal dimensions
|
||||
term_width = get_terminal_width()
|
||||
|
||||
# Print title centered
|
||||
print("{REVIEWER_NAME}".center(term_width))
|
||||
|
||||
rows = [
|
||||
("Summary", "{REVIEW_SUMMARY}"),
|
||||
]
|
||||
|
||||
print_rule(SEPARATOR_COLOR)
|
||||
for key, value in rows:
|
||||
print_table_row(key, value, HEADER_COLOR, 15, term_width - 20)
|
||||
|
||||
print_rule(SEPARATOR_COLOR)
|
||||
print(wrap_text(strip_markdown("""{REVIEW_BODY}"""), term_width))
|
||||
145
viu_media/assets/scripts/fzf/search.py
Executable file
145
viu_media/assets/scripts/fzf/search.py
Executable file
@@ -0,0 +1,145 @@
|
||||
#!/usr/bin/env python3
|
||||
#
|
||||
# FZF Dynamic Search Script Template
|
||||
#
|
||||
# This script is a template for dynamic search functionality in fzf.
|
||||
# The placeholders in curly braces, like {GRAPHQL_ENDPOINT} are dynamically
|
||||
# filled by Python using .replace() during runtime.
|
||||
|
||||
import json
|
||||
import sys
|
||||
from pathlib import Path
|
||||
from urllib import request
|
||||
from urllib.error import URLError
|
||||
|
||||
# --- Template Variables (Injected by Python) ---
|
||||
GRAPHQL_ENDPOINT = "{GRAPHQL_ENDPOINT}"
|
||||
SEARCH_RESULTS_FILE = Path("{SEARCH_RESULTS_FILE}")
|
||||
AUTH_HEADER = "{AUTH_HEADER}"
|
||||
|
||||
# The GraphQL query is injected as a properly escaped JSON string
|
||||
GRAPHQL_QUERY = "{GRAPHQL_QUERY}"
|
||||
|
||||
# --- Get Query from fzf ---
|
||||
# fzf passes the current query as the first argument when using --bind change:reload
|
||||
QUERY = sys.argv[1] if len(sys.argv) > 1 else ""
|
||||
|
||||
# If query is empty, exit with empty results
|
||||
if not QUERY.strip():
|
||||
print("")
|
||||
sys.exit(0)
|
||||
|
||||
|
||||
def make_graphql_request(
|
||||
endpoint: str, query: str, variables: dict, auth_token: str = ""
|
||||
) -> dict | None:
|
||||
"""
|
||||
Make a GraphQL request to the specified endpoint.
|
||||
|
||||
Args:
|
||||
endpoint: GraphQL API endpoint URL
|
||||
query: GraphQL query string
|
||||
variables: Query variables as a dictionary
|
||||
auth_token: Optional authorization token (Bearer token)
|
||||
|
||||
Returns:
|
||||
Response JSON as a dictionary, or None if request fails
|
||||
"""
|
||||
payload = {"query": query, "variables": variables}
|
||||
|
||||
headers = {"Content-Type": "application/json", "User-Agent": "viu/1.0"}
|
||||
|
||||
if auth_token:
|
||||
headers["Authorization"] = auth_token
|
||||
|
||||
try:
|
||||
req = request.Request(
|
||||
endpoint,
|
||||
data=json.dumps(payload).encode("utf-8"),
|
||||
headers=headers,
|
||||
method="POST",
|
||||
)
|
||||
|
||||
with request.urlopen(req, timeout=10) as response:
|
||||
return json.loads(response.read().decode("utf-8"))
|
||||
except (URLError, json.JSONDecodeError, Exception) as e:
|
||||
print(f"❌ Request failed: {e}", file=sys.stderr)
|
||||
return None
|
||||
|
||||
|
||||
def extract_title(media_item: dict) -> str:
|
||||
"""
|
||||
Extract the best available title from a media item.
|
||||
|
||||
Args:
|
||||
media_item: Media object from GraphQL response
|
||||
|
||||
Returns:
|
||||
Title string (english > romaji > native > "Unknown")
|
||||
"""
|
||||
title_obj = media_item.get("title", {})
|
||||
return (
|
||||
title_obj.get("english")
|
||||
or title_obj.get("romaji")
|
||||
or title_obj.get("native")
|
||||
or "Unknown"
|
||||
)
|
||||
|
||||
|
||||
def main():
|
||||
# Ensure parent directory exists
|
||||
SEARCH_RESULTS_FILE.parent.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
# Create GraphQL variables
|
||||
variables = {
|
||||
"query": QUERY,
|
||||
"type": "ANIME",
|
||||
"per_page": 50,
|
||||
"genre_not_in": ["Hentai"],
|
||||
}
|
||||
|
||||
# Make the GraphQL request
|
||||
response = make_graphql_request(
|
||||
GRAPHQL_ENDPOINT, GRAPHQL_QUERY, variables, AUTH_HEADER
|
||||
)
|
||||
|
||||
if response is None:
|
||||
print("❌ Search failed")
|
||||
sys.exit(1)
|
||||
|
||||
# Save the raw response for later processing by dynamic_search.py
|
||||
try:
|
||||
with open(SEARCH_RESULTS_FILE, "w", encoding="utf-8") as f:
|
||||
json.dump(response, f, ensure_ascii=False, indent=2)
|
||||
except IOError as e:
|
||||
print(f"❌ Failed to save results: {e}", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
# Parse and display results
|
||||
if "errors" in response:
|
||||
print(f"❌ Search error: {response['errors']}")
|
||||
sys.exit(1)
|
||||
|
||||
# Navigate the response structure
|
||||
data = response.get("data", {})
|
||||
page = data.get("Page", {})
|
||||
media_list = page.get("media", [])
|
||||
|
||||
if not media_list:
|
||||
print("❌ No results found")
|
||||
sys.exit(0)
|
||||
|
||||
# Output titles for fzf (one per line)
|
||||
for media in media_list:
|
||||
title = extract_title(media)
|
||||
print(title)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
try:
|
||||
main()
|
||||
except KeyboardInterrupt:
|
||||
sys.exit(0)
|
||||
except Exception as e:
|
||||
print(f"❌ Unexpected error: {e}", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
@@ -1,118 +0,0 @@
|
||||
#!/bin/bash
|
||||
#
|
||||
# FZF Dynamic Search Script Template
|
||||
#
|
||||
# This script is a template for dynamic search functionality in fzf.
|
||||
# The placeholders in curly braces, like {QUERY} are dynamically filled by Python using .replace()
|
||||
|
||||
# Configuration variables (injected by Python)
|
||||
GRAPHQL_ENDPOINT="{GRAPHQL_ENDPOINT}"
|
||||
CACHE_DIR="{CACHE_DIR}"
|
||||
SEARCH_RESULTS_FILE="{SEARCH_RESULTS_FILE}"
|
||||
AUTH_HEADER="{AUTH_HEADER}"
|
||||
|
||||
# Get the current query from fzf
|
||||
QUERY="{{q}}"
|
||||
|
||||
# If query is empty, exit with empty results
|
||||
if [ -z "$QUERY" ]; then
|
||||
echo ""
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Create GraphQL variables
|
||||
VARIABLES=$(cat <<EOF
|
||||
{
|
||||
"query": "$QUERY",
|
||||
"type": "ANIME",
|
||||
"per_page": 50,
|
||||
"genre_not_in": ["Hentai"]
|
||||
}
|
||||
EOF
|
||||
)
|
||||
|
||||
# The GraphQL query is injected here as a properly escaped string
|
||||
GRAPHQL_QUERY='{GRAPHQL_QUERY}'
|
||||
|
||||
# Create the GraphQL request payload
|
||||
PAYLOAD=$(cat <<EOF
|
||||
{
|
||||
"query": $GRAPHQL_QUERY,
|
||||
"variables": $VARIABLES
|
||||
}
|
||||
EOF
|
||||
)
|
||||
|
||||
# Make the GraphQL request and save raw results
|
||||
if [ -n "$AUTH_HEADER" ]; then
|
||||
RESPONSE=$(curl -s -X POST \
|
||||
-H "Content-Type: application/json" \
|
||||
-H "Authorization: $AUTH_HEADER" \
|
||||
-d "$PAYLOAD" \
|
||||
"$GRAPHQL_ENDPOINT")
|
||||
else
|
||||
RESPONSE=$(curl -s -X POST \
|
||||
-H "Content-Type: application/json" \
|
||||
-d "$PAYLOAD" \
|
||||
"$GRAPHQL_ENDPOINT")
|
||||
fi
|
||||
|
||||
# Check if the request was successful
|
||||
if [ $? -ne 0 ] || [ -z "$RESPONSE" ]; then
|
||||
echo "❌ Search failed"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Save the raw response for later processing
|
||||
echo "$RESPONSE" > "$SEARCH_RESULTS_FILE"
|
||||
|
||||
# Parse and display results
|
||||
if command -v jq >/dev/null 2>&1; then
|
||||
# Use jq for faster and more reliable JSON parsing
|
||||
echo "$RESPONSE" | jq -r '
|
||||
if .errors then
|
||||
"❌ Search error: " + (.errors | tostring)
|
||||
elif (.data.Page.media // []) | length == 0 then
|
||||
"❌ No results found"
|
||||
else
|
||||
.data.Page.media[] | (.title.english // .title.romaji // .title.native // "Unknown")
|
||||
end
|
||||
' 2>/dev/null || echo "❌ Parse error"
|
||||
else
|
||||
# Fallback to Python for JSON parsing
|
||||
echo "$RESPONSE" | python3 -c "
|
||||
import json
|
||||
import sys
|
||||
|
||||
try:
|
||||
data = json.load(sys.stdin)
|
||||
|
||||
if 'errors' in data:
|
||||
print('❌ Search error: ' + str(data['errors']))
|
||||
sys.exit(1)
|
||||
|
||||
if 'data' not in data or 'Page' not in data['data'] or 'media' not in data['data']['Page']:
|
||||
print('❌ No results found')
|
||||
sys.exit(0)
|
||||
|
||||
media_list = data['data']['Page']['media']
|
||||
|
||||
if not media_list:
|
||||
print('❌ No results found')
|
||||
sys.exit(0)
|
||||
|
||||
for media in media_list:
|
||||
title = media.get('title', {})
|
||||
english_title = title.get('english') or title.get('romaji') or title.get('native', 'Unknown')
|
||||
year = media.get('startDate', {}).get('year', 'Unknown') if media.get('startDate') else 'Unknown'
|
||||
status = media.get('status', 'Unknown')
|
||||
genres = ', '.join(media.get('genres', [])[:3]) or 'Unknown'
|
||||
|
||||
# Format: Title (Year) [Status] - Genres
|
||||
print(f'{english_title} ({year}) [{status}] - {genres}')
|
||||
|
||||
except Exception as e:
|
||||
print(f'❌ Parse error: {str(e)}')
|
||||
sys.exit(1)
|
||||
"
|
||||
fi
|
||||
@@ -6,7 +6,7 @@ import click
|
||||
from click.core import ParameterSource
|
||||
|
||||
from ..core.config import AppConfig
|
||||
from ..core.constants import PROJECT_NAME, USER_CONFIG, __version__
|
||||
from ..core.constants import CLI_NAME, USER_CONFIG, __version__
|
||||
from .config import ConfigLoader
|
||||
from .options import options_from_model
|
||||
from .utils.exception import setup_exceptions_handler
|
||||
@@ -47,7 +47,7 @@ commands = {
|
||||
root="viu_media.cli.commands",
|
||||
invoke_without_command=True,
|
||||
lazy_subcommands=commands,
|
||||
context_settings=dict(auto_envvar_prefix=PROJECT_NAME),
|
||||
context_settings=dict(auto_envvar_prefix=CLI_NAME),
|
||||
)
|
||||
@click.version_option(__version__, "--version")
|
||||
@click.option("--no-config", is_flag=True, help="Don't load the user config file.")
|
||||
@@ -108,6 +108,139 @@ def cli(ctx: click.Context, **options: "Unpack[Options]"):
|
||||
else loader.load(cli_overrides)
|
||||
)
|
||||
ctx.obj = config
|
||||
|
||||
if config.general.welcome_screen:
|
||||
import time
|
||||
|
||||
from ..core.constants import APP_CACHE_DIR, USER_NAME, SUPPORT_PROJECT_URL
|
||||
|
||||
last_welcomed_at_file = APP_CACHE_DIR / ".last_welcome"
|
||||
should_welcome = False
|
||||
if last_welcomed_at_file.exists():
|
||||
try:
|
||||
last_welcomed_at = float(
|
||||
last_welcomed_at_file.read_text(encoding="utf-8")
|
||||
)
|
||||
# runs once a day
|
||||
if (time.time() - last_welcomed_at) > 24 * 3600:
|
||||
should_welcome = True
|
||||
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to read welcome screen timestamp: {e}")
|
||||
|
||||
else:
|
||||
should_welcome = True
|
||||
if should_welcome:
|
||||
last_welcomed_at_file.write_text(str(time.time()), encoding="utf-8")
|
||||
|
||||
from rich.prompt import Confirm
|
||||
|
||||
if Confirm.ask(f"""\
|
||||
[green]How are you, {USER_NAME} 🙂?
|
||||
If you enjoy the project and would like to support it, you can buy me a coffee at {SUPPORT_PROJECT_URL}.
|
||||
Would you like to open the support page? Select yes to continue — otherwise, enjoy your terminal-anime browsing experience 😁.[/]
|
||||
You can disable this message by turning off the welcome_screen option in the config. It only appears once every 24 hours.
|
||||
"""):
|
||||
from webbrowser import open
|
||||
|
||||
open(SUPPORT_PROJECT_URL)
|
||||
|
||||
if config.general.show_new_release:
|
||||
import time
|
||||
|
||||
from ..core.constants import APP_CACHE_DIR
|
||||
|
||||
last_release_file = APP_CACHE_DIR / ".last_release"
|
||||
should_print_release_notes = False
|
||||
if last_release_file.exists():
|
||||
last_release = last_release_file.read_text(encoding="utf-8")
|
||||
current_version = list(map(int, __version__.replace("v", "").split(".")))
|
||||
last_saved_version = list(
|
||||
map(int, last_release.replace("v", "").split("."))
|
||||
)
|
||||
if (
|
||||
(current_version[0] > last_saved_version[0])
|
||||
or (
|
||||
current_version[1] > last_saved_version[1]
|
||||
and current_version[0] == last_saved_version[0]
|
||||
)
|
||||
or (
|
||||
current_version[2] > last_saved_version[2]
|
||||
and current_version[0] == last_saved_version[0]
|
||||
and current_version[1] == last_saved_version[1]
|
||||
)
|
||||
):
|
||||
should_print_release_notes = True
|
||||
|
||||
else:
|
||||
should_print_release_notes = True
|
||||
if should_print_release_notes:
|
||||
last_release_file.write_text(__version__, encoding="utf-8")
|
||||
from .service.feedback import FeedbackService
|
||||
from .utils.update import check_for_updates, print_release_json, update_app
|
||||
from rich.prompt import Confirm
|
||||
|
||||
feedback = FeedbackService(config)
|
||||
feedback.info("Getting release notes...")
|
||||
is_latest, release_json = check_for_updates()
|
||||
if Confirm.ask(
|
||||
"Would you also like to update your config with the latest options and config notes"
|
||||
):
|
||||
import subprocess
|
||||
|
||||
cmd = ["viu", "config", "--update"]
|
||||
print(f"running '{' '.join(cmd)}'...")
|
||||
subprocess.run(cmd)
|
||||
|
||||
if is_latest:
|
||||
print_release_json(release_json)
|
||||
else:
|
||||
print_release_json(release_json)
|
||||
print("It seems theres another update waiting for you as well 😁")
|
||||
click.pause("Press Any Key To Proceed...")
|
||||
|
||||
if config.general.check_for_updates:
|
||||
import time
|
||||
|
||||
from ..core.constants import APP_CACHE_DIR
|
||||
|
||||
last_updated_at_file = APP_CACHE_DIR / ".last_update"
|
||||
should_check_for_update = False
|
||||
if last_updated_at_file.exists():
|
||||
try:
|
||||
last_updated_at_time = float(
|
||||
last_updated_at_file.read_text(encoding="utf-8")
|
||||
)
|
||||
if (
|
||||
time.time() - last_updated_at_time
|
||||
) > config.general.update_check_interval * 3600:
|
||||
should_check_for_update = True
|
||||
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to check for update: {e}")
|
||||
|
||||
else:
|
||||
should_check_for_update = True
|
||||
if should_check_for_update:
|
||||
last_updated_at_file.write_text(str(time.time()), encoding="utf-8")
|
||||
from .service.feedback import FeedbackService
|
||||
from .utils.update import check_for_updates, print_release_json, update_app
|
||||
|
||||
feedback = FeedbackService(config)
|
||||
feedback.info("Checking for updates...")
|
||||
is_latest, release_json = check_for_updates()
|
||||
if not is_latest:
|
||||
from ..libs.selectors.selector import create_selector
|
||||
|
||||
selector = create_selector(config)
|
||||
if release_json and selector.confirm(
|
||||
"Theres an update available would you like to see the release notes before deciding to update?"
|
||||
):
|
||||
print_release_json(release_json)
|
||||
selector.ask("Enter to continue...")
|
||||
if selector.confirm("Would you like to update?"):
|
||||
update_app()
|
||||
|
||||
if ctx.invoked_subcommand is None:
|
||||
from .commands.anilist import cmd
|
||||
|
||||
|
||||
@@ -72,7 +72,7 @@ def config(
|
||||
):
|
||||
from ...core.constants import USER_CONFIG
|
||||
from ..config.editor import InteractiveConfigEditor
|
||||
from ..config.generate import generate_config_ini_from_app_model
|
||||
from ..config.generate import generate_config_toml_from_app_model
|
||||
|
||||
if path:
|
||||
print(USER_CONFIG)
|
||||
@@ -81,9 +81,9 @@ def config(
|
||||
from rich.syntax import Syntax
|
||||
|
||||
console = Console()
|
||||
config_ini = generate_config_ini_from_app_model(user_config)
|
||||
config_toml = generate_config_toml_from_app_model(user_config)
|
||||
syntax = Syntax(
|
||||
config_ini,
|
||||
config_toml,
|
||||
"ini",
|
||||
theme=user_config.general.pygment_style,
|
||||
line_numbers=True,
|
||||
@@ -99,12 +99,14 @@ def config(
|
||||
elif interactive:
|
||||
editor = InteractiveConfigEditor(current_config=user_config)
|
||||
new_config = editor.run()
|
||||
with open(USER_CONFIG, "w", encoding="utf-8") as file:
|
||||
file.write(generate_config_ini_from_app_model(new_config))
|
||||
USER_CONFIG.write_text(
|
||||
generate_config_toml_from_app_model(new_config), encoding="utf-8"
|
||||
)
|
||||
click.echo(f"Configuration saved successfully to {USER_CONFIG}")
|
||||
elif update:
|
||||
with open(USER_CONFIG, "w", encoding="utf-8") as file:
|
||||
file.write(generate_config_ini_from_app_model(user_config))
|
||||
USER_CONFIG.write_text(
|
||||
generate_config_toml_from_app_model(user_config), encoding="utf-8"
|
||||
)
|
||||
print("update successfull")
|
||||
else:
|
||||
click.edit(filename=str(USER_CONFIG))
|
||||
@@ -123,9 +125,9 @@ def _generate_desktop_entry():
|
||||
from rich.prompt import Confirm
|
||||
|
||||
from ...core.constants import (
|
||||
CLI_NAME,
|
||||
ICON_PATH,
|
||||
PLATFORM,
|
||||
PROJECT_NAME,
|
||||
USER_APPLICATIONS,
|
||||
__version__,
|
||||
)
|
||||
@@ -149,7 +151,7 @@ def _generate_desktop_entry():
|
||||
desktop_entry = dedent(
|
||||
f"""
|
||||
[Desktop Entry]
|
||||
Name={PROJECT_NAME.title()}
|
||||
Name={CLI_NAME.title()}
|
||||
Type=Application
|
||||
version={__version__}
|
||||
Path={Path().home()}
|
||||
@@ -160,7 +162,7 @@ def _generate_desktop_entry():
|
||||
Categories=Entertainment
|
||||
"""
|
||||
)
|
||||
desktop_entry_path = USER_APPLICATIONS / f"{PROJECT_NAME}.desktop"
|
||||
desktop_entry_path = USER_APPLICATIONS / f"{CLI_NAME}.desktop"
|
||||
if desktop_entry_path.exists():
|
||||
if not Confirm.ask(
|
||||
f"The file already exists {desktop_entry_path}; or would you like to rewrite it",
|
||||
|
||||
@@ -113,6 +113,7 @@ def _create_tar_backup(
|
||||
api: str,
|
||||
):
|
||||
"""Create a tar-based backup."""
|
||||
# TODO: Add support for bz2/xz compression if needed
|
||||
mode = "w:gz" if compress else "w"
|
||||
|
||||
with tarfile.open(output_path, mode) as tar:
|
||||
|
||||
@@ -5,6 +5,7 @@ Registry restore command - restore registry from backup files
|
||||
import json
|
||||
import shutil
|
||||
import tarfile
|
||||
import zipfile
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
|
||||
@@ -25,6 +26,11 @@ from ....service.registry.service import MediaRegistryService
|
||||
is_flag=True,
|
||||
help="Create backup of current registry before restoring",
|
||||
)
|
||||
@click.option(
|
||||
"--backup-current-tar-compression-fmt",
|
||||
type=click.Choice(["gz", "bz2", "xz"], case_sensitive=False),
|
||||
help="The compression format to use for the current registry backup (if enabled)",
|
||||
)
|
||||
@click.option("--verify", is_flag=True, help="Verify backup integrity before restoring")
|
||||
@click.option(
|
||||
"--api",
|
||||
@@ -38,6 +44,7 @@ def restore(
|
||||
backup_file: Path,
|
||||
force: bool,
|
||||
backup_current: bool,
|
||||
backup_current_compression_fmt: str,
|
||||
verify: bool,
|
||||
api: str,
|
||||
):
|
||||
@@ -61,7 +68,7 @@ def restore(
|
||||
"Verification Failed",
|
||||
"Backup file appears to be corrupted or invalid",
|
||||
)
|
||||
raise click.Abort()
|
||||
return
|
||||
feedback.success("Verification", "Backup file integrity verified")
|
||||
|
||||
# Check if current registry exists
|
||||
@@ -77,7 +84,13 @@ def restore(
|
||||
|
||||
# Create backup of current registry if requested
|
||||
if backup_current and registry_exists:
|
||||
_backup_current_registry(registry_service, api, feedback)
|
||||
_backup_current_registry(
|
||||
registry_service,
|
||||
api,
|
||||
feedback,
|
||||
backup_format=backup_format,
|
||||
compression_fmt=backup_current_compression_fmt,
|
||||
)
|
||||
|
||||
# Show restore summary
|
||||
_show_restore_summary(backup_file, backup_format, feedback)
|
||||
@@ -110,7 +123,13 @@ def restore(
|
||||
def _detect_backup_format(backup_file: Path) -> str:
|
||||
"""Detect backup file format."""
|
||||
suffixes = "".join(backup_file.suffixes).lower()
|
||||
if ".tar" in suffixes or ".gz" in suffixes or ".tgz" in suffixes:
|
||||
if (
|
||||
".tar" in suffixes
|
||||
or ".gz" in suffixes
|
||||
or ".tgz" in suffixes
|
||||
or ".bz2" in suffixes
|
||||
or ".xz" in suffixes
|
||||
):
|
||||
return "tar"
|
||||
elif ".zip" in suffixes:
|
||||
return "zip"
|
||||
@@ -122,25 +141,38 @@ def _verify_backup(
|
||||
) -> bool:
|
||||
"""Verify backup file integrity."""
|
||||
try:
|
||||
metadata = {}
|
||||
has_registry = has_index = has_metadata = False
|
||||
if format_type == "tar":
|
||||
with tarfile.open(backup_file, "r:*") as tar:
|
||||
names = tar.getnames()
|
||||
has_registry = any("registry/" in name for name in names)
|
||||
has_index = any("index/" in name for name in names)
|
||||
has_metadata = "backup_metadata.json" in names
|
||||
for name in names:
|
||||
if name == "registry/":
|
||||
has_registry = True
|
||||
continue
|
||||
if name == "index/":
|
||||
has_index = True
|
||||
continue
|
||||
if name == "backup_metadata.json":
|
||||
has_metadata = True
|
||||
continue
|
||||
if has_metadata:
|
||||
metadata_member = tar.getmember("backup_metadata.json")
|
||||
if metadata_file := tar.extractfile(metadata_member):
|
||||
metadata = json.load(metadata_file)
|
||||
else: # zip
|
||||
import zipfile
|
||||
|
||||
with zipfile.ZipFile(backup_file, "r") as zip_file:
|
||||
names = zip_file.namelist()
|
||||
has_registry = any("registry/" in name for name in names)
|
||||
has_index = any("index/" in name for name in names)
|
||||
has_metadata = "backup_metadata.json" in names
|
||||
for name in names:
|
||||
if name == "registry/":
|
||||
has_registry = True
|
||||
continue
|
||||
if name == "index/":
|
||||
has_index = True
|
||||
continue
|
||||
if name == "backup_metadata.json":
|
||||
has_metadata = True
|
||||
continue
|
||||
if has_metadata:
|
||||
with zip_file.open("backup_metadata.json") as metadata_file:
|
||||
metadata = json.load(metadata_file)
|
||||
@@ -163,27 +195,42 @@ def _verify_backup(
|
||||
|
||||
def _check_registry_exists(registry_service: MediaRegistryService) -> bool:
|
||||
"""Check if a registry already exists."""
|
||||
try:
|
||||
stats = registry_service.get_registry_stats()
|
||||
return stats.get("total_media", 0) > 0
|
||||
except Exception:
|
||||
return False
|
||||
# TODO: Improve this check to be more robust
|
||||
return registry_service.media_registry_dir.exists() and any(
|
||||
registry_service.media_registry_dir.iterdir()
|
||||
)
|
||||
|
||||
|
||||
def _backup_current_registry(
|
||||
registry_service: MediaRegistryService, api: str, feedback: FeedbackService
|
||||
registry_service: MediaRegistryService,
|
||||
api: str,
|
||||
feedback: FeedbackService,
|
||||
backup_format: str,
|
||||
compression_fmt: str,
|
||||
):
|
||||
"""Create backup of current registry before restoring."""
|
||||
from .backup import _create_tar_backup
|
||||
|
||||
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
|
||||
backup_path = Path(f"viu_registry_pre_restore_{api}_{timestamp}.tar.gz")
|
||||
if backup_format == "tar":
|
||||
from .backup import _create_tar_backup
|
||||
|
||||
try:
|
||||
_create_tar_backup(registry_service, backup_path, True, False, feedback, api)
|
||||
feedback.success("Current Registry Backed Up", f"Saved to {backup_path}")
|
||||
except Exception as e:
|
||||
feedback.warning("Backup Warning", f"Failed to backup current registry: {e}")
|
||||
backup_path = Path(f"viu_registry_pre_restore_{api}_{timestamp}.tar.gz")
|
||||
|
||||
try:
|
||||
_create_tar_backup(
|
||||
registry_service, backup_path, True, False, feedback, api
|
||||
)
|
||||
feedback.success("Current Registry Backed Up", f"Saved to {backup_path}")
|
||||
except Exception as e:
|
||||
feedback.warning(
|
||||
"Backup Warning", f"Failed to backup current registry: {e}"
|
||||
)
|
||||
else:
|
||||
from .backup import _create_zip_backup
|
||||
|
||||
backup_path = Path(f"viu_registry_pre_restore_{api}_{timestamp}.zip")
|
||||
|
||||
_create_zip_backup(registry_service, backup_path, True, feedback, api)
|
||||
|
||||
|
||||
def _show_restore_summary(
|
||||
|
||||
@@ -2,6 +2,7 @@ from typing import TYPE_CHECKING
|
||||
|
||||
import click
|
||||
|
||||
|
||||
from ...core.config import AppConfig
|
||||
from ...core.exceptions import ViuError
|
||||
from ..utils.completion import anime_titles_shell_complete
|
||||
@@ -30,7 +31,6 @@ if TYPE_CHECKING:
|
||||
@click.option(
|
||||
"--anime-title",
|
||||
"-t",
|
||||
required=True,
|
||||
shell_complete=anime_titles_shell_complete,
|
||||
multiple=True,
|
||||
help="Specify which anime to download",
|
||||
@@ -50,8 +50,13 @@ def search(config: AppConfig, **options: "Unpack[Options]"):
|
||||
SearchParams,
|
||||
)
|
||||
from ...libs.provider.anime.provider import create_provider
|
||||
from viu_media.core.utils.normalizer import normalize_title
|
||||
from ...libs.selectors.selector import create_selector
|
||||
|
||||
if not options["anime_title"]:
|
||||
raw = click.prompt("What are you in the mood for? (comma-separated)")
|
||||
options["anime_title"] = [a.strip() for a in raw.split(",") if a.strip()]
|
||||
|
||||
feedback = FeedbackService(config)
|
||||
provider = create_provider(config.general.provider)
|
||||
selector = create_selector(config)
|
||||
@@ -64,7 +69,10 @@ def search(config: AppConfig, **options: "Unpack[Options]"):
|
||||
with feedback.progress(f"Fetching anime search results for {anime_title}"):
|
||||
search_results = provider.search(
|
||||
SearchParams(
|
||||
query=anime_title, translation_type=config.stream.translation_type
|
||||
query=normalize_title(
|
||||
anime_title, config.general.provider.value, True
|
||||
).lower(),
|
||||
translation_type=config.stream.translation_type,
|
||||
)
|
||||
)
|
||||
if not search_results:
|
||||
@@ -173,6 +181,22 @@ def stream_anime(
|
||||
if not server_name:
|
||||
raise ViuError("Server not selected")
|
||||
server = servers[server_name]
|
||||
quality = [
|
||||
ep_stream.link
|
||||
for ep_stream in server.links
|
||||
if ep_stream.quality == config.stream.quality
|
||||
]
|
||||
if not quality:
|
||||
feedback.warning("Preferred quality not found, selecting quality...")
|
||||
stream_link = selector.choose(
|
||||
"Select Quality", [link.quality for link in server.links]
|
||||
)
|
||||
if not stream_link:
|
||||
raise ViuError("Quality not selected")
|
||||
stream_link = next(
|
||||
(link.link for link in server.links if link.quality == stream_link), None
|
||||
)
|
||||
|
||||
stream_link = server.links[0].link
|
||||
if not stream_link:
|
||||
raise ViuError(
|
||||
|
||||
@@ -1,14 +1,11 @@
|
||||
"""Update command for Viu CLI."""
|
||||
|
||||
import sys
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
import click
|
||||
from rich import print
|
||||
from rich.console import Console
|
||||
from rich.markdown import Markdown
|
||||
|
||||
from ..utils.update import check_for_updates, update_app
|
||||
from ..utils.update import check_for_updates, print_release_json, update_app
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ...core.config import AppConfig
|
||||
@@ -74,87 +71,46 @@ def update(
|
||||
check_only: Whether to only check for updates without updating
|
||||
release_notes: Whether to show release notes for the latest version
|
||||
"""
|
||||
try:
|
||||
if release_notes:
|
||||
print("[cyan]Fetching latest release notes...[/]")
|
||||
is_latest, release_json = check_for_updates()
|
||||
if release_notes:
|
||||
print("[cyan]Fetching latest release notes...[/]")
|
||||
is_latest, release_json = check_for_updates()
|
||||
|
||||
if not release_json:
|
||||
print(
|
||||
"[yellow]Could not fetch release information. Please check your internet connection.[/]"
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
version = release_json.get("tag_name", "unknown")
|
||||
release_name = release_json.get("name", version)
|
||||
release_body = release_json.get("body", "No release notes available.")
|
||||
published_at = release_json.get("published_at", "unknown")
|
||||
|
||||
console = Console()
|
||||
|
||||
print(f"[bold cyan]Release: {release_name}[/]")
|
||||
print(f"[dim]Version: {version}[/]")
|
||||
print(f"[dim]Published: {published_at}[/]")
|
||||
print()
|
||||
|
||||
# Display release notes as markdown if available
|
||||
if release_body.strip():
|
||||
markdown = Markdown(release_body)
|
||||
console.print(markdown)
|
||||
else:
|
||||
print("[dim]No release notes available for this version.[/]")
|
||||
|
||||
return
|
||||
|
||||
elif check_only:
|
||||
print("[cyan]Checking for updates...[/]")
|
||||
is_latest, release_json = check_for_updates()
|
||||
|
||||
if not release_json:
|
||||
print(
|
||||
"[yellow]Could not check for updates. Please check your internet connection.[/]"
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
if is_latest:
|
||||
print("[green]Viu is up to date![/]")
|
||||
print(
|
||||
f"[dim]Current version: {release_json.get('tag_name', 'unknown')}[/]"
|
||||
)
|
||||
else:
|
||||
latest_version = release_json.get("tag_name", "unknown")
|
||||
print(f"[yellow]Update available: {latest_version}[/]")
|
||||
print("[dim]Run 'viu update' to update[/]")
|
||||
sys.exit(1)
|
||||
if not release_json:
|
||||
print(
|
||||
"[yellow]Could not fetch release information. Please check your internet connection.[/]"
|
||||
)
|
||||
else:
|
||||
print("[cyan]Checking for updates and updating if necessary...[/]")
|
||||
success, release_json = update_app(force=force)
|
||||
print_release_json(release_json)
|
||||
|
||||
if not release_json:
|
||||
print(
|
||||
"[red]Could not check for updates. Please check your internet connection.[/]"
|
||||
)
|
||||
sys.exit(1)
|
||||
return
|
||||
|
||||
if success:
|
||||
latest_version = release_json.get("tag_name", "unknown")
|
||||
print(f"[green]Successfully updated to version {latest_version}![/]")
|
||||
else:
|
||||
if force:
|
||||
print(
|
||||
"[red]Update failed. Please check the error messages above.[/]"
|
||||
)
|
||||
sys.exit(1)
|
||||
# If not forced and update failed, it might be because already up to date
|
||||
# The update_app function already prints appropriate messages
|
||||
elif check_only:
|
||||
print("[cyan]Checking for updates...[/]")
|
||||
is_latest, release_json = check_for_updates()
|
||||
|
||||
except KeyboardInterrupt:
|
||||
print("\n[yellow]Update cancelled by user.[/]")
|
||||
sys.exit(1)
|
||||
except Exception as e:
|
||||
print(f"[red]An error occurred during update: {e}[/]")
|
||||
# Get trace option from parent context
|
||||
trace = ctx.parent.params.get("trace", False) if ctx.parent else False
|
||||
if trace:
|
||||
raise
|
||||
sys.exit(1)
|
||||
if not release_json:
|
||||
print(
|
||||
"[yellow]Could not check for updates. Please check your internet connection.[/]"
|
||||
)
|
||||
|
||||
if is_latest:
|
||||
print("[green]Viu is up to date![/]")
|
||||
print(f"[dim]Current version: {release_json.get('tag_name', 'unknown')}[/]")
|
||||
else:
|
||||
latest_version = release_json.get("tag_name", "unknown")
|
||||
print(f"[yellow]Update available: {latest_version}[/]")
|
||||
print("[dim]Run 'viu update' to update[/]")
|
||||
else:
|
||||
print("[cyan]Checking for updates and updating if necessary...[/]")
|
||||
success, release_json = update_app(force=force)
|
||||
|
||||
if not release_json:
|
||||
print(
|
||||
"[red]Could not check for updates. Please check your internet connection.[/]"
|
||||
)
|
||||
if success:
|
||||
latest_version = release_json.get("tag_name", "unknown")
|
||||
print(f"[green]Successfully updated to version {latest_version}![/]")
|
||||
else:
|
||||
if force:
|
||||
print("[red]Update failed. Please check the error messages above.[/]")
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
from .generate import generate_config_ini_from_app_model
|
||||
from .generate import generate_config_toml_from_app_model
|
||||
from .loader import ConfigLoader
|
||||
|
||||
__all__ = ["ConfigLoader", "generate_config_ini_from_app_model"]
|
||||
__all__ = ["ConfigLoader", "generate_config_toml_from_app_model"]
|
||||
|
||||
@@ -2,6 +2,7 @@ import textwrap
|
||||
from pathlib import Path
|
||||
from typing import Any, Literal, get_args, get_origin
|
||||
|
||||
# TODO: should we maintain a separate dependency for InquirerPy or write our own simple prompt system?
|
||||
from InquirerPy import inquirer
|
||||
from InquirerPy.validator import NumberValidator
|
||||
from pydantic import BaseModel
|
||||
@@ -28,7 +29,7 @@ class InteractiveConfigEditor:
|
||||
if not isinstance(section_model, BaseModel):
|
||||
continue
|
||||
|
||||
if not inquirer.confirm(
|
||||
if not inquirer.confirm( # pyright: ignore[reportPrivateImportUsage]
|
||||
message=f"Configure '{section_name.title()}' settings?",
|
||||
default=True,
|
||||
).execute():
|
||||
@@ -83,14 +84,14 @@ class InteractiveConfigEditor:
|
||||
|
||||
# Boolean fields
|
||||
if field_type is bool:
|
||||
return inquirer.confirm(
|
||||
return inquirer.confirm( # pyright: ignore[reportPrivateImportUsage]
|
||||
message=message, default=current_value, long_instruction=help_text
|
||||
)
|
||||
|
||||
# Literal (Choice) fields
|
||||
if hasattr(field_type, "__origin__") and get_origin(field_type) is Literal:
|
||||
choices = list(get_args(field_type))
|
||||
return inquirer.select(
|
||||
return inquirer.select( # pyright: ignore[reportPrivateImportUsage]
|
||||
message=message,
|
||||
choices=choices,
|
||||
default=current_value,
|
||||
@@ -99,7 +100,7 @@ class InteractiveConfigEditor:
|
||||
|
||||
# Numeric fields
|
||||
if field_type is int:
|
||||
return inquirer.number(
|
||||
return inquirer.number( # pyright: ignore[reportPrivateImportUsage]
|
||||
message=message,
|
||||
default=int(current_value),
|
||||
long_instruction=help_text,
|
||||
@@ -110,7 +111,7 @@ class InteractiveConfigEditor:
|
||||
validate=NumberValidator(),
|
||||
)
|
||||
if field_type is float:
|
||||
return inquirer.number(
|
||||
return inquirer.number( # pyright: ignore[reportPrivateImportUsage]
|
||||
message=message,
|
||||
default=float(current_value),
|
||||
float_allowed=True,
|
||||
@@ -120,7 +121,7 @@ class InteractiveConfigEditor:
|
||||
# Path fields
|
||||
if field_type is Path:
|
||||
# Use text prompt for paths to allow '~' expansion, as FilePathPrompt can be tricky
|
||||
return inquirer.text(
|
||||
return inquirer.text( # pyright: ignore[reportPrivateImportUsage]
|
||||
message=message, default=str(current_value), long_instruction=help_text
|
||||
)
|
||||
|
||||
@@ -128,13 +129,13 @@ class InteractiveConfigEditor:
|
||||
if field_type is str:
|
||||
# Check for 'examples' to provide choices
|
||||
if hasattr(field_info, "examples") and field_info.examples:
|
||||
return inquirer.fuzzy(
|
||||
return inquirer.fuzzy( # pyright: ignore[reportPrivateImportUsage]
|
||||
message=message,
|
||||
choices=field_info.examples,
|
||||
default=str(current_value),
|
||||
long_instruction=help_text,
|
||||
)
|
||||
return inquirer.text(
|
||||
return inquirer.text( # pyright: ignore[reportPrivateImportUsage]
|
||||
message=message, default=str(current_value), long_instruction=help_text
|
||||
)
|
||||
|
||||
|
||||
@@ -1,4 +1,6 @@
|
||||
# viu_media/cli/config/generate.py
|
||||
import itertools
|
||||
import json
|
||||
import textwrap
|
||||
from enum import Enum
|
||||
from pathlib import Path
|
||||
@@ -8,7 +10,13 @@ from pydantic.fields import ComputedFieldInfo, FieldInfo
|
||||
from pydantic_core import PydanticUndefined
|
||||
|
||||
from ...core.config import AppConfig
|
||||
from ...core.constants import APP_ASCII_ART, DISCORD_INVITE, PROJECT_NAME, REPO_HOME
|
||||
from ...core.constants import (
|
||||
APP_ASCII_ART,
|
||||
CLI_NAME,
|
||||
DISCORD_INVITE,
|
||||
REPO_HOME,
|
||||
SUPPORT_PROJECT_URL,
|
||||
)
|
||||
|
||||
# The header for the config file.
|
||||
config_asci = "\n".join(
|
||||
@@ -20,40 +28,46 @@ CONFIG_HEADER = f"""
|
||||
{config_asci}
|
||||
#
|
||||
# ==============================================================================
|
||||
# This file was auto-generated from the application's configuration model.
|
||||
# This is the Viu configuration file. It uses the TOML format.
|
||||
# You can modify these values to customize the behavior of Viu.
|
||||
# For path-based options, you can use '~' for your home directory.
|
||||
# For more information on the available options, please refer to the
|
||||
# official documentation on GitHub.
|
||||
# ==============================================================================
|
||||
""".lstrip()
|
||||
|
||||
CONFIG_FOOTER = f"""
|
||||
# ==============================================================================
|
||||
#
|
||||
# HOPE YOU ENJOY {PROJECT_NAME} AND BE SURE TO STAR THE PROJECT ON GITHUB
|
||||
# HOPE YOU ENJOY {CLI_NAME} AND BE SURE TO STAR THE PROJECT ON GITHUB
|
||||
# {REPO_HOME}
|
||||
#
|
||||
# Also join the discord server
|
||||
# where the anime tech community lives :)
|
||||
# {DISCORD_INVITE}
|
||||
# If you like the project and are able to support it please consider buying me a coffee at {SUPPORT_PROJECT_URL}.
|
||||
# If you would like to connect with me join the discord server from there you can dm for hackathons, or even to tell me a joke 😂
|
||||
# Otherwise enjoy your terminal anime browser experience 😁
|
||||
#
|
||||
# ==============================================================================
|
||||
""".lstrip()
|
||||
|
||||
|
||||
def generate_config_ini_from_app_model(app_model: AppConfig) -> str:
|
||||
"""Generate a configuration file content from a Pydantic model."""
|
||||
def generate_config_toml_from_app_model(app_model: AppConfig) -> str:
|
||||
"""Generate a TOML configuration file content from a Pydantic model with comments."""
|
||||
|
||||
config_ini_content = [CONFIG_HEADER]
|
||||
config_content_parts = [CONFIG_HEADER]
|
||||
|
||||
for section_name, section_model in app_model:
|
||||
section_comment = section_model.model_config.get("title", "")
|
||||
section_title = section_model.model_config.get("title", section_name.title())
|
||||
|
||||
config_ini_content.append(f"\n#\n# {section_comment}\n#")
|
||||
config_ini_content.append(f"[{section_name}]")
|
||||
config_content_parts.append(f"\n#\n# {section_title}\n#")
|
||||
config_content_parts.append(f"[{section_name}]")
|
||||
|
||||
for field_name, field_info in itertools.chain(
|
||||
section_model.model_fields.items(),
|
||||
section_model.model_computed_fields.items(),
|
||||
):
|
||||
# --- Generate Comments ---
|
||||
description = field_info.description or ""
|
||||
if description:
|
||||
wrapped_comment = textwrap.fill(
|
||||
@@ -62,7 +76,7 @@ def generate_config_ini_from_app_model(app_model: AppConfig) -> str:
|
||||
initial_indent="# ",
|
||||
subsequent_indent="# ",
|
||||
)
|
||||
config_ini_content.append(f"\n{wrapped_comment}")
|
||||
config_content_parts.append(f"\n{wrapped_comment}")
|
||||
|
||||
field_type_comment = _get_field_type_comment(field_info)
|
||||
if field_type_comment:
|
||||
@@ -72,35 +86,65 @@ def generate_config_ini_from_app_model(app_model: AppConfig) -> str:
|
||||
initial_indent="# ",
|
||||
subsequent_indent="# ",
|
||||
)
|
||||
config_ini_content.append(wrapped_comment)
|
||||
config_content_parts.append(wrapped_comment)
|
||||
|
||||
if (
|
||||
hasattr(field_info, "default")
|
||||
and field_info.default != PydanticUndefined
|
||||
and field_info.default is not PydanticUndefined
|
||||
):
|
||||
default_val = (
|
||||
field_info.default.value
|
||||
if isinstance(field_info.default, Enum)
|
||||
else field_info.default
|
||||
)
|
||||
wrapped_comment = textwrap.fill(
|
||||
f"Default: {field_info.default.value if isinstance(field_info.default, Enum) else field_info.default}",
|
||||
f"Default: {_format_toml_value(default_val)}",
|
||||
width=78,
|
||||
initial_indent="# ",
|
||||
subsequent_indent="# ",
|
||||
)
|
||||
config_ini_content.append(wrapped_comment)
|
||||
config_content_parts.append(wrapped_comment)
|
||||
|
||||
# --- Generate Key-Value Pair ---
|
||||
field_value = getattr(section_model, field_name)
|
||||
if isinstance(field_value, bool):
|
||||
value_str = str(field_value).lower()
|
||||
elif isinstance(field_value, Path):
|
||||
value_str = str(field_value)
|
||||
elif field_value is None:
|
||||
value_str = ""
|
||||
elif isinstance(field_value, Enum):
|
||||
value_str = field_value.value
|
||||
|
||||
if field_value is None:
|
||||
config_content_parts.append(f"# {field_name} =")
|
||||
else:
|
||||
value_str = str(field_value)
|
||||
value_str = _format_toml_value(field_value)
|
||||
config_content_parts.append(f"{field_name} = {value_str}")
|
||||
|
||||
config_ini_content.append(f"{field_name} = {value_str}")
|
||||
config_content_parts.extend(["\n", CONFIG_FOOTER])
|
||||
return "\n".join(config_content_parts)
|
||||
|
||||
config_ini_content.extend(["\n", CONFIG_FOOTER])
|
||||
return "\n".join(config_ini_content)
|
||||
|
||||
def _format_toml_value(value: Any) -> str:
|
||||
"""
|
||||
Manually formats a Python value into a TOML-compliant string.
|
||||
This avoids needing an external TOML writer dependency.
|
||||
"""
|
||||
if isinstance(value, bool):
|
||||
return str(value).lower()
|
||||
if isinstance(value, (int, float)):
|
||||
return str(value)
|
||||
if isinstance(value, Enum):
|
||||
return f'"{value.value}"'
|
||||
|
||||
# Handle strings and Paths, differentiating between single and multi-line
|
||||
if isinstance(value, (str, Path)):
|
||||
str_val = str(value)
|
||||
if "\n" in str_val:
|
||||
# For multi-line strings, use triple quotes.
|
||||
# Also, escape any triple quotes that might be in the string itself.
|
||||
escaped_val = str_val.replace('"""', '\\"\\"\\"')
|
||||
return f'"""\n{escaped_val}"""'
|
||||
else:
|
||||
# For single-line strings, use double quotes and escape relevant characters.
|
||||
escaped_val = str_val.replace("\\", "\\\\").replace('"', '\\"')
|
||||
return f'"{escaped_val}"'
|
||||
|
||||
# Fallback for any other types
|
||||
return f'"{str(value)}"'
|
||||
|
||||
|
||||
def _get_field_type_comment(field_info: FieldInfo | ComputedFieldInfo) -> str:
|
||||
@@ -111,7 +155,6 @@ def _get_field_type_comment(field_info: FieldInfo | ComputedFieldInfo) -> str:
|
||||
else field_info.return_type
|
||||
)
|
||||
|
||||
# Handle Literal and Enum types
|
||||
possible_values = []
|
||||
if field_type is not None:
|
||||
if isinstance(field_type, type) and issubclass(field_type, Enum):
|
||||
@@ -122,9 +165,8 @@ def _get_field_type_comment(field_info: FieldInfo | ComputedFieldInfo) -> str:
|
||||
possible_values = list(args)
|
||||
|
||||
if possible_values:
|
||||
return f"Possible values: [ {', '.join(map(str, possible_values))} ]"
|
||||
|
||||
# Handle basic types and numeric ranges
|
||||
formatted_values = ", ".join(json.dumps(v) for v in possible_values)
|
||||
return f"Possible values: [ {formatted_values} ]"
|
||||
type_name = _get_type_name(field_type)
|
||||
range_info = _get_range_info(field_info)
|
||||
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import configparser
|
||||
import logging
|
||||
import tomllib
|
||||
from pathlib import Path
|
||||
from typing import Dict
|
||||
|
||||
@@ -9,12 +10,14 @@ from ...core.config import AppConfig
|
||||
from ...core.constants import USER_CONFIG
|
||||
from ...core.exceptions import ConfigError
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ConfigLoader:
|
||||
"""
|
||||
Handles loading the application configuration from an .ini file.
|
||||
Handles loading the application configuration from a .toml file.
|
||||
|
||||
It ensures a default configuration exists, reads the .ini file,
|
||||
It ensures a default configuration exists, reads the .toml file,
|
||||
and uses Pydantic to parse and validate the data into a type-safe
|
||||
AppConfig object.
|
||||
"""
|
||||
@@ -24,26 +27,19 @@ class ConfigLoader:
|
||||
Initializes the loader with the path to the configuration file.
|
||||
|
||||
Args:
|
||||
config_path: The path to the user's config.ini file.
|
||||
config_path: The path to the user's config.toml file.
|
||||
"""
|
||||
self.config_path = config_path
|
||||
self.parser = configparser.ConfigParser(
|
||||
interpolation=None,
|
||||
# Allow boolean values without a corresponding value (e.g., `enabled` vs `enabled = true`)
|
||||
allow_no_value=True,
|
||||
# Behave like a dictionary, preserving case sensitivity of keys
|
||||
dict_type=dict,
|
||||
)
|
||||
|
||||
def _handle_first_run(self) -> AppConfig:
|
||||
"""Handles the configuration process when no config file is found."""
|
||||
"""Handles the configuration process when no config.toml file is found."""
|
||||
click.echo(
|
||||
"[bold yellow]Welcome to Viu![/bold yellow] No configuration file found."
|
||||
)
|
||||
from InquirerPy import inquirer
|
||||
|
||||
from .editor import InteractiveConfigEditor
|
||||
from .generate import generate_config_ini_from_app_model
|
||||
from .generate import generate_config_toml_from_app_model
|
||||
|
||||
choice = inquirer.select( # type: ignore
|
||||
message="How would you like to proceed?",
|
||||
@@ -60,16 +56,17 @@ class ConfigLoader:
|
||||
else:
|
||||
app_config = AppConfig()
|
||||
|
||||
config_ini_content = generate_config_ini_from_app_model(app_config)
|
||||
config_toml_content = generate_config_toml_from_app_model(app_config)
|
||||
try:
|
||||
self.config_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
self.config_path.write_text(config_ini_content, encoding="utf-8")
|
||||
self.config_path.write_text(config_toml_content, encoding="utf-8")
|
||||
click.echo(
|
||||
f"Configuration file created at: [green]{self.config_path}[/green]"
|
||||
)
|
||||
except Exception as e:
|
||||
raise ConfigError(
|
||||
f"Could not create configuration file at {self.config_path!s}. Please check permissions. Error: {e}",
|
||||
f"Could not create configuration file at {self.config_path!s}. "
|
||||
f"Please check permissions. Error: {e}",
|
||||
)
|
||||
|
||||
return app_config
|
||||
@@ -78,32 +75,34 @@ class ConfigLoader:
|
||||
"""
|
||||
Loads the configuration and returns a populated, validated AppConfig object.
|
||||
|
||||
Args:
|
||||
update: A dictionary of CLI overrides to apply to the loaded config.
|
||||
|
||||
Returns:
|
||||
An instance of AppConfig with values from the user's .ini file.
|
||||
An instance of AppConfig with values from the user's .toml file.
|
||||
|
||||
Raises:
|
||||
click.ClickException: If the configuration file contains validation errors.
|
||||
ConfigError: If the configuration file contains validation or parsing errors.
|
||||
"""
|
||||
if not self.config_path.exists():
|
||||
return self._handle_first_run()
|
||||
|
||||
try:
|
||||
self.parser.read(self.config_path, encoding="utf-8")
|
||||
except configparser.Error as e:
|
||||
with self.config_path.open("rb") as f:
|
||||
config_dict = tomllib.load(f)
|
||||
except tomllib.TOMLDecodeError as e:
|
||||
raise ConfigError(
|
||||
f"Error parsing configuration file '{self.config_path}':\n{e}"
|
||||
)
|
||||
|
||||
# Convert the configparser object into a nested dictionary that mirrors
|
||||
# the structure of our AppConfig Pydantic model.
|
||||
config_dict = {
|
||||
section: dict(self.parser.items(section))
|
||||
for section in self.parser.sections()
|
||||
}
|
||||
# Apply CLI overrides on top of the loaded configuration
|
||||
if update:
|
||||
for key in config_dict:
|
||||
if key in update:
|
||||
config_dict[key].update(update[key])
|
||||
for section, values in update.items():
|
||||
if section in config_dict:
|
||||
config_dict[section].update(values)
|
||||
else:
|
||||
config_dict[section] = values
|
||||
|
||||
try:
|
||||
app_config = AppConfig.model_validate(config_dict)
|
||||
return app_config
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import json
|
||||
import logging
|
||||
import sys
|
||||
|
||||
from .....core.constants import APP_CACHE_DIR, SCRIPTS_DIR
|
||||
from .....libs.media_api.params import MediaSearchParams
|
||||
@@ -11,9 +12,7 @@ logger = logging.getLogger(__name__)
|
||||
SEARCH_CACHE_DIR = APP_CACHE_DIR / "search"
|
||||
SEARCH_RESULTS_FILE = SEARCH_CACHE_DIR / "current_search_results.json"
|
||||
FZF_SCRIPTS_DIR = SCRIPTS_DIR / "fzf"
|
||||
SEARCH_TEMPLATE_SCRIPT = (FZF_SCRIPTS_DIR / "search.template.sh").read_text(
|
||||
encoding="utf-8"
|
||||
)
|
||||
SEARCH_TEMPLATE_SCRIPT = (FZF_SCRIPTS_DIR / "search.py").read_text(encoding="utf-8")
|
||||
|
||||
|
||||
@session.menu
|
||||
@@ -29,8 +28,8 @@ def dynamic_search(ctx: Context, state: State) -> State | InternalDirective:
|
||||
from .....libs.media_api.anilist import gql
|
||||
|
||||
search_query = gql.SEARCH_MEDIA.read_text(encoding="utf-8")
|
||||
# Properly escape the GraphQL query for JSON
|
||||
search_query_escaped = json.dumps(search_query)
|
||||
# Escape the GraphQL query as a JSON string literal for Python script
|
||||
search_query_json = json.dumps(search_query).replace('"', "")
|
||||
|
||||
# Prepare the search script
|
||||
auth_header = ""
|
||||
@@ -42,8 +41,7 @@ def dynamic_search(ctx: Context, state: State) -> State | InternalDirective:
|
||||
|
||||
replacements = {
|
||||
"GRAPHQL_ENDPOINT": "https://graphql.anilist.co",
|
||||
"GRAPHQL_QUERY": search_query_escaped,
|
||||
"CACHE_DIR": str(SEARCH_CACHE_DIR),
|
||||
"GRAPHQL_QUERY": search_query_json,
|
||||
"SEARCH_RESULTS_FILE": str(SEARCH_RESULTS_FILE),
|
||||
"AUTH_HEADER": auth_header,
|
||||
}
|
||||
@@ -51,6 +49,14 @@ def dynamic_search(ctx: Context, state: State) -> State | InternalDirective:
|
||||
for key, value in replacements.items():
|
||||
search_command = search_command.replace(f"{{{key}}}", str(value))
|
||||
|
||||
# Write the filled template to a cache file
|
||||
search_script_file = SEARCH_CACHE_DIR / "search-script.py"
|
||||
search_script_file.write_text(search_command, encoding="utf-8")
|
||||
|
||||
# Make the search script executable by calling it with python3
|
||||
# fzf will pass the query as {q} which becomes the first argument
|
||||
search_command_final = f"{sys.executable} {search_script_file} {{q}}"
|
||||
|
||||
try:
|
||||
# Prepare preview functionality
|
||||
preview_command = None
|
||||
@@ -62,13 +68,13 @@ def dynamic_search(ctx: Context, state: State) -> State | InternalDirective:
|
||||
|
||||
choice = ctx.selector.search(
|
||||
prompt="Search Anime",
|
||||
search_command=search_command,
|
||||
search_command=search_command_final,
|
||||
preview=preview_command,
|
||||
)
|
||||
else:
|
||||
choice = ctx.selector.search(
|
||||
prompt="Search Anime",
|
||||
search_command=search_command,
|
||||
search_command=search_command_final,
|
||||
)
|
||||
except NotImplementedError:
|
||||
feedback.error("Dynamic search is not supported by your current selector")
|
||||
|
||||
@@ -308,6 +308,8 @@ def _change_provider(ctx: Context, state: State) -> MenuAction:
|
||||
"Select Provider", [provider.value for provider in ProviderName]
|
||||
)
|
||||
ctx.config.general.provider = ProviderName(new_provider)
|
||||
# force a reset of the provider
|
||||
ctx._provider = None
|
||||
return InternalDirective.RELOAD
|
||||
|
||||
return action
|
||||
|
||||
@@ -249,7 +249,8 @@ def _change_quality(ctx: Context, state: State) -> MenuAction:
|
||||
return InternalDirective.BACK
|
||||
|
||||
new_quality = selector.choose(
|
||||
"Select a different server:", list(["360", "480", "720", "1080"])
|
||||
"Select a different quality:",
|
||||
[link.quality for link in state.provider.server.links],
|
||||
)
|
||||
if new_quality:
|
||||
ctx.config.stream.quality = new_quality # type:ignore
|
||||
|
||||
@@ -28,7 +28,9 @@ def provider_search(ctx: Context, state: State) -> State | InternalDirective:
|
||||
|
||||
provider_search_results = provider.search(
|
||||
SearchParams(
|
||||
query=normalize_title(media_title, config.general.provider.value, True),
|
||||
query=normalize_title(
|
||||
media_title, config.general.provider.value, True
|
||||
).lower(),
|
||||
translation_type=config.stream.translation_type,
|
||||
)
|
||||
)
|
||||
|
||||
@@ -66,13 +66,13 @@ class MediaApiState(StateModel):
|
||||
|
||||
@property
|
||||
def search_result(self) -> dict[int, MediaItem]:
|
||||
if not self.search_result_:
|
||||
if self.search_result_ is None:
|
||||
raise RuntimeError("Malformed state, please report")
|
||||
return self.search_result_
|
||||
|
||||
@property
|
||||
def search_params(self) -> Union[MediaSearchParams, UserMediaListSearchParams]:
|
||||
if not self.search_params_:
|
||||
if self.search_params_ is None:
|
||||
raise RuntimeError("Malformed state, please report")
|
||||
return self.search_params_
|
||||
|
||||
@@ -84,7 +84,7 @@ class MediaApiState(StateModel):
|
||||
|
||||
@property
|
||||
def media_id(self) -> int:
|
||||
if not self.media_id_:
|
||||
if self.media_id_ is None:
|
||||
raise RuntimeError("Malformed state, please report")
|
||||
return self.media_id_
|
||||
|
||||
@@ -105,13 +105,13 @@ class ProviderState(StateModel):
|
||||
|
||||
@property
|
||||
def search_results(self) -> SearchResults:
|
||||
if not self.search_results_:
|
||||
if self.search_results_ is None:
|
||||
raise RuntimeError("Malformed state, please report")
|
||||
return self.search_results_
|
||||
|
||||
@property
|
||||
def anime(self) -> Anime:
|
||||
if not self.anime_:
|
||||
if self.anime_ is None:
|
||||
raise RuntimeError("Malformed state, please report")
|
||||
return self.anime_
|
||||
|
||||
@@ -123,13 +123,13 @@ class ProviderState(StateModel):
|
||||
|
||||
@property
|
||||
def servers(self) -> Dict[str, Server]:
|
||||
if not self.servers_:
|
||||
if self.servers_ is None:
|
||||
raise RuntimeError("Malformed state, please report")
|
||||
return self.servers_
|
||||
|
||||
@property
|
||||
def server_name(self) -> str:
|
||||
if not self.server_name_:
|
||||
if self.server_name_ is None:
|
||||
raise RuntimeError("Malformed state, please report")
|
||||
return self.server_name_
|
||||
|
||||
|
||||
@@ -296,8 +296,7 @@ class DownloadService:
|
||||
message=message,
|
||||
app_name="Viu",
|
||||
app_icon=app_icon,
|
||||
timeout=self.app_config.general.desktop_notification_duration
|
||||
* 60,
|
||||
timeout=self.app_config.general.desktop_notification_duration,
|
||||
)
|
||||
except: # noqa: E722
|
||||
pass
|
||||
@@ -318,7 +317,7 @@ class DownloadService:
|
||||
message=message,
|
||||
app_name="Viu",
|
||||
app_icon=app_icon,
|
||||
timeout=self.app_config.general.desktop_notification_duration * 60,
|
||||
timeout=self.app_config.general.desktop_notification_duration,
|
||||
)
|
||||
except: # noqa: E722
|
||||
pass
|
||||
|
||||
@@ -34,14 +34,14 @@ class FeedbackService:
|
||||
try:
|
||||
from plyer import notification
|
||||
|
||||
from ....core.constants import ICON_PATH, PROJECT_NAME
|
||||
from ....core.constants import CLI_NAME, ICON_PATH
|
||||
|
||||
notification.notify( # type: ignore
|
||||
title=f"{PROJECT_NAME} notification".title(),
|
||||
title=f"{CLI_NAME} notification".title(),
|
||||
message=message,
|
||||
app_name=PROJECT_NAME,
|
||||
app_name=CLI_NAME,
|
||||
app_icon=str(ICON_PATH),
|
||||
timeout=self.app_config.general.desktop_notification_duration * 60,
|
||||
timeout=self.app_config.general.desktop_notification_duration,
|
||||
)
|
||||
return
|
||||
except: # noqa: E722
|
||||
@@ -60,14 +60,14 @@ class FeedbackService:
|
||||
try:
|
||||
from plyer import notification
|
||||
|
||||
from ....core.constants import ICON_PATH, PROJECT_NAME
|
||||
from ....core.constants import CLI_NAME, ICON_PATH
|
||||
|
||||
notification.notify( # type: ignore
|
||||
title=f"{PROJECT_NAME} notification".title(),
|
||||
title=f"{CLI_NAME} notification".title(),
|
||||
message=message,
|
||||
app_name=PROJECT_NAME,
|
||||
app_name=CLI_NAME,
|
||||
app_icon=str(ICON_PATH),
|
||||
timeout=self.app_config.general.desktop_notification_duration * 60,
|
||||
timeout=self.app_config.general.desktop_notification_duration,
|
||||
)
|
||||
return
|
||||
except: # noqa: E722
|
||||
@@ -87,14 +87,14 @@ class FeedbackService:
|
||||
try:
|
||||
from plyer import notification
|
||||
|
||||
from ....core.constants import ICON_PATH, PROJECT_NAME
|
||||
from ....core.constants import CLI_NAME, ICON_PATH
|
||||
|
||||
notification.notify( # type: ignore
|
||||
title=f"{PROJECT_NAME} notification".title(),
|
||||
title=f"{CLI_NAME} notification".title(),
|
||||
message=message,
|
||||
app_name=PROJECT_NAME,
|
||||
app_name=CLI_NAME,
|
||||
app_icon=str(ICON_PATH),
|
||||
timeout=self.app_config.general.desktop_notification_duration * 60,
|
||||
timeout=self.app_config.general.desktop_notification_duration,
|
||||
)
|
||||
return
|
||||
except: # noqa: E722
|
||||
@@ -113,14 +113,14 @@ class FeedbackService:
|
||||
try:
|
||||
from plyer import notification
|
||||
|
||||
from ....core.constants import ICON_PATH, PROJECT_NAME
|
||||
from ....core.constants import CLI_NAME, ICON_PATH
|
||||
|
||||
notification.notify( # type: ignore
|
||||
title=f"{PROJECT_NAME} notification".title(),
|
||||
title=f"{CLI_NAME} notification".title(),
|
||||
message=message,
|
||||
app_name=PROJECT_NAME,
|
||||
app_name=CLI_NAME,
|
||||
app_icon=str(ICON_PATH),
|
||||
timeout=self.app_config.general.desktop_notification_duration * 60,
|
||||
timeout=self.app_config.general.desktop_notification_duration,
|
||||
)
|
||||
return
|
||||
except: # noqa: E722
|
||||
@@ -169,14 +169,14 @@ class FeedbackService:
|
||||
try:
|
||||
from plyer import notification
|
||||
|
||||
from ....core.constants import ICON_PATH, PROJECT_NAME
|
||||
from ....core.constants import CLI_NAME, ICON_PATH
|
||||
|
||||
notification.notify( # type: ignore
|
||||
title=f"{PROJECT_NAME} notification".title(),
|
||||
title=f"{CLI_NAME} notification".title(),
|
||||
message="No current way to display info in rofi, use fzf and the terminal instead",
|
||||
app_name=PROJECT_NAME,
|
||||
app_name=CLI_NAME,
|
||||
app_icon=str(ICON_PATH),
|
||||
timeout=self.app_config.general.desktop_notification_duration * 60,
|
||||
timeout=self.app_config.general.desktop_notification_duration,
|
||||
)
|
||||
return
|
||||
except: # noqa: E722
|
||||
|
||||
@@ -101,7 +101,7 @@ class NotificationService:
|
||||
message=message,
|
||||
app_name="Viu",
|
||||
app_icon=app_icon, # plyer supports file paths or URLs depending on platform
|
||||
timeout=self.app_config.general.desktop_notification_duration * 60,
|
||||
timeout=self.app_config.general.desktop_notification_duration,
|
||||
)
|
||||
logger.info(f"Displayed notification: {message}")
|
||||
self._mark_seen(
|
||||
|
||||
@@ -3,6 +3,8 @@ import shutil
|
||||
import subprocess
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
from viu_media.core.exceptions import DependencyNotFoundError
|
||||
import importlib.util
|
||||
|
||||
import click
|
||||
import httpx
|
||||
@@ -43,67 +45,74 @@ def resize_image_from_url(
|
||||
"""
|
||||
from io import BytesIO
|
||||
|
||||
from PIL import Image
|
||||
if importlib.util.find_spec("PIL"):
|
||||
from PIL import Image # pyright: ignore[reportMissingImports]
|
||||
|
||||
if not return_bytes and output_path is None:
|
||||
raise ValueError("output_path must be provided if return_bytes is False.")
|
||||
if not return_bytes and output_path is None:
|
||||
raise ValueError("output_path must be provided if return_bytes is False.")
|
||||
|
||||
try:
|
||||
# Use the provided synchronous client
|
||||
response = client.get(url)
|
||||
response.raise_for_status() # Raise an exception for bad status codes
|
||||
try:
|
||||
# Use the provided synchronous client
|
||||
response = client.get(url)
|
||||
response.raise_for_status() # Raise an exception for bad status codes
|
||||
|
||||
image_bytes = response.content
|
||||
image_stream = BytesIO(image_bytes)
|
||||
img = Image.open(image_stream)
|
||||
image_bytes = response.content
|
||||
image_stream = BytesIO(image_bytes)
|
||||
img = Image.open(image_stream)
|
||||
|
||||
if maintain_aspect_ratio:
|
||||
img_copy = img.copy()
|
||||
img_copy.thumbnail((new_width, new_height), Image.Resampling.LANCZOS)
|
||||
resized_img = img_copy
|
||||
else:
|
||||
resized_img = img.resize((new_width, new_height), Image.Resampling.LANCZOS)
|
||||
|
||||
if return_bytes:
|
||||
# Determine the output format. Default to JPEG if original is unknown or problematic.
|
||||
# Handle RGBA to RGB conversion for JPEG output.
|
||||
output_format = (
|
||||
img.format if img.format in ["JPEG", "PNG", "WEBP"] else "JPEG"
|
||||
)
|
||||
if output_format == "JPEG":
|
||||
if resized_img.mode in ("RGBA", "P"):
|
||||
resized_img = resized_img.convert("RGB")
|
||||
|
||||
byte_arr = BytesIO()
|
||||
resized_img.save(byte_arr, format=output_format)
|
||||
logger.info(
|
||||
f"Image from {url} resized to {resized_img.width}x{resized_img.height} and returned as bytes ({output_format} format)."
|
||||
)
|
||||
return byte_arr.getvalue()
|
||||
else:
|
||||
# Ensure the directory exists before saving
|
||||
if output_path:
|
||||
output_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
resized_img.save(output_path)
|
||||
logger.info(
|
||||
f"Image from {url} resized to {resized_img.width}x{resized_img.height} and saved as '{output_path}'"
|
||||
if maintain_aspect_ratio:
|
||||
img_copy = img.copy()
|
||||
img_copy.thumbnail((new_width, new_height), Image.Resampling.LANCZOS)
|
||||
resized_img = img_copy
|
||||
else:
|
||||
resized_img = img.resize(
|
||||
(new_width, new_height), Image.Resampling.LANCZOS
|
||||
)
|
||||
return None
|
||||
|
||||
except httpx.RequestError as e:
|
||||
logger.error(f"An error occurred while requesting {url}: {e}")
|
||||
return None
|
||||
except httpx.HTTPStatusError as e:
|
||||
logger.error(
|
||||
f"HTTP error occurred: {e.response.status_code} - {e.response.text}"
|
||||
if return_bytes:
|
||||
# Determine the output format. Default to JPEG if original is unknown or problematic.
|
||||
# Handle RGBA to RGB conversion for JPEG output.
|
||||
output_format = (
|
||||
img.format if img.format in ["JPEG", "PNG", "WEBP"] else "JPEG"
|
||||
)
|
||||
if output_format == "JPEG":
|
||||
if resized_img.mode in ("RGBA", "P"):
|
||||
resized_img = resized_img.convert("RGB")
|
||||
|
||||
byte_arr = BytesIO()
|
||||
resized_img.save(byte_arr, format=output_format)
|
||||
logger.info(
|
||||
f"Image from {url} resized to {resized_img.width}x{resized_img.height} and returned as bytes ({output_format} format)."
|
||||
)
|
||||
return byte_arr.getvalue()
|
||||
else:
|
||||
# Ensure the directory exists before saving
|
||||
if output_path:
|
||||
output_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
resized_img.save(output_path)
|
||||
logger.info(
|
||||
f"Image from {url} resized to {resized_img.width}x{resized_img.height} and saved as '{output_path}'"
|
||||
)
|
||||
return None
|
||||
|
||||
except httpx.RequestError as e:
|
||||
logger.error(f"An error occurred while requesting {url}: {e}")
|
||||
return None
|
||||
except httpx.HTTPStatusError as e:
|
||||
logger.error(
|
||||
f"HTTP error occurred: {e.response.status_code} - {e.response.text}"
|
||||
)
|
||||
return None
|
||||
except ValueError as e:
|
||||
logger.error(f"Configuration error: {e}")
|
||||
return None
|
||||
except Exception as e:
|
||||
logger.error(f"An unexpected error occurred: {e}")
|
||||
return None
|
||||
else:
|
||||
raise DependencyNotFoundError(
|
||||
"Pillow library is required for image processing. Please install it via 'uv pip install Pillow'."
|
||||
)
|
||||
return None
|
||||
except ValueError as e:
|
||||
logger.error(f"Configuration error: {e}")
|
||||
return None
|
||||
except Exception as e:
|
||||
logger.error(f"An unexpected error occurred: {e}")
|
||||
return None
|
||||
|
||||
|
||||
def render(url: str, capture: bool = False, size: str = "30x30") -> Optional[str]:
|
||||
@@ -123,17 +132,12 @@ def render(url: str, capture: bool = False, size: str = "30x30") -> Optional[str
|
||||
If capture is False, prints directly to the terminal and returns None.
|
||||
Returns None on any failure.
|
||||
"""
|
||||
# --- Common subprocess arguments ---
|
||||
subprocess_kwargs = {
|
||||
"check": False, # We will handle errors manually
|
||||
"capture_output": capture,
|
||||
"text": capture, # Decode stdout/stderr as text if capturing
|
||||
}
|
||||
|
||||
# --- Try icat (Kitty terminal) first ---
|
||||
if icat_executable := shutil.which("icat"):
|
||||
process = subprocess.run(
|
||||
[icat_executable, "--align", "left", url], **subprocess_kwargs
|
||||
[icat_executable, "--align", "left", url],
|
||||
capture_output=capture,
|
||||
text=capture,
|
||||
)
|
||||
if process.returncode == 0:
|
||||
return process.stdout if capture else None
|
||||
@@ -148,11 +152,11 @@ def render(url: str, capture: bool = False, size: str = "30x30") -> Optional[str
|
||||
response.raise_for_status()
|
||||
img_bytes = response.content
|
||||
|
||||
# Add stdin input to the subprocess arguments
|
||||
subprocess_kwargs["input"] = img_bytes
|
||||
|
||||
process = subprocess.run(
|
||||
[chafa_executable, f"--size={size}", "-"], **subprocess_kwargs
|
||||
[chafa_executable, f"--size={size}", "-"],
|
||||
capture_output=capture,
|
||||
text=capture,
|
||||
input=img_bytes,
|
||||
)
|
||||
if process.returncode == 0:
|
||||
return process.stdout if capture else None
|
||||
|
||||
@@ -1,13 +1,15 @@
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
from hashlib import sha256
|
||||
import sys
|
||||
from typing import Dict, List, Optional
|
||||
|
||||
import httpx
|
||||
|
||||
from viu_media.core.utils import formatter
|
||||
|
||||
from ...core.config import AppConfig
|
||||
from ...core.constants import APP_CACHE_DIR, PLATFORM, SCRIPTS_DIR
|
||||
from ...core.constants import APP_CACHE_DIR, SCRIPTS_DIR
|
||||
from ...core.utils.file import AtomicWriter
|
||||
from ...libs.media_api.types import (
|
||||
AiringScheduleResult,
|
||||
@@ -15,7 +17,6 @@ from ...libs.media_api.types import (
|
||||
MediaItem,
|
||||
MediaReview,
|
||||
)
|
||||
from . import ansi
|
||||
from .preview_workers import PreviewWorkerManager
|
||||
|
||||
|
||||
@@ -117,29 +118,15 @@ def _get_episode_image(episode: str, media_item: MediaItem) -> str:
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
os.environ["SHELL"] = "bash"
|
||||
# os.environ["SHELL"] = sys.executable
|
||||
|
||||
PREVIEWS_CACHE_DIR = APP_CACHE_DIR / "previews"
|
||||
IMAGES_CACHE_DIR = PREVIEWS_CACHE_DIR / "images"
|
||||
INFO_CACHE_DIR = PREVIEWS_CACHE_DIR / "info"
|
||||
REVIEWS_CACHE_DIR = PREVIEWS_CACHE_DIR / "reviews"
|
||||
CHARACTERS_CACHE_DIR = PREVIEWS_CACHE_DIR / "characters"
|
||||
AIRING_SCHEDULE_CACHE_DIR = PREVIEWS_CACHE_DIR / "airing_schedule"
|
||||
|
||||
FZF_SCRIPTS_DIR = SCRIPTS_DIR / "fzf"
|
||||
TEMPLATE_PREVIEW_SCRIPT = (FZF_SCRIPTS_DIR / "preview.template.sh").read_text(
|
||||
encoding="utf-8"
|
||||
)
|
||||
TEMPLATE_REVIEW_PREVIEW_SCRIPT = (
|
||||
FZF_SCRIPTS_DIR / "review-preview.template.sh"
|
||||
).read_text(encoding="utf-8")
|
||||
TEMPLATE_CHARACTER_PREVIEW_SCRIPT = (
|
||||
FZF_SCRIPTS_DIR / "character-preview.template.sh"
|
||||
).read_text(encoding="utf-8")
|
||||
TEMPLATE_AIRING_SCHEDULE_PREVIEW_SCRIPT = (
|
||||
FZF_SCRIPTS_DIR / "airing-schedule-preview.template.sh"
|
||||
).read_text(encoding="utf-8")
|
||||
DYNAMIC_PREVIEW_SCRIPT = (FZF_SCRIPTS_DIR / "dynamic-preview.template.sh").read_text(
|
||||
TEMPLATE_PREVIEW_SCRIPT = (FZF_SCRIPTS_DIR / "preview.py").read_text(encoding="utf-8")
|
||||
DYNAMIC_PREVIEW_SCRIPT = (FZF_SCRIPTS_DIR / "dynamic_preview.py").read_text(
|
||||
encoding="utf-8"
|
||||
)
|
||||
|
||||
@@ -149,6 +136,23 @@ EPISODE_PATTERN = re.compile(r"^Episode\s+(\d+)\s-\s.*")
|
||||
_preview_manager: Optional[PreviewWorkerManager] = None
|
||||
|
||||
|
||||
def _ensure_ansi_utils_in_cache():
|
||||
"""Copy _ansi_utils.py to the info cache directory so cached scripts can import it."""
|
||||
source = FZF_SCRIPTS_DIR / "_ansi_utils.py"
|
||||
dest = INFO_CACHE_DIR / "_ansi_utils.py"
|
||||
|
||||
if source.exists() and (
|
||||
not dest.exists() or source.stat().st_mtime > dest.stat().st_mtime
|
||||
):
|
||||
try:
|
||||
import shutil
|
||||
|
||||
shutil.copy2(source, dest)
|
||||
logger.debug(f"Copied _ansi_utils.py to {INFO_CACHE_DIR}")
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to copy _ansi_utils.py to cache: {e}")
|
||||
|
||||
|
||||
def create_preview_context():
|
||||
"""
|
||||
Create a context manager for preview operations.
|
||||
@@ -284,6 +288,7 @@ def get_anime_preview(
|
||||
# Ensure cache directories exist on startup
|
||||
IMAGES_CACHE_DIR.mkdir(parents=True, exist_ok=True)
|
||||
INFO_CACHE_DIR.mkdir(parents=True, exist_ok=True)
|
||||
_ensure_ansi_utils_in_cache()
|
||||
|
||||
HEADER_COLOR = config.fzf.preview_header_color.split(",")
|
||||
SEPARATOR_COLOR = config.fzf.preview_separator_color.split(",")
|
||||
@@ -300,30 +305,28 @@ def get_anime_preview(
|
||||
logger.error(f"Failed to start background caching: {e}")
|
||||
# Continue with script generation even if caching fails
|
||||
|
||||
# Prepare values to inject into the template
|
||||
path_sep = "\\" if PLATFORM == "win32" else "/"
|
||||
|
||||
# Format the template with the dynamic values
|
||||
replacements = {
|
||||
"PREVIEW_MODE": config.general.preview,
|
||||
"IMAGE_CACHE_PATH": str(IMAGES_CACHE_DIR),
|
||||
"INFO_CACHE_PATH": str(INFO_CACHE_DIR),
|
||||
"PATH_SEP": path_sep,
|
||||
"IMAGE_CACHE_DIR": str(IMAGES_CACHE_DIR),
|
||||
"INFO_CACHE_DIR": str(INFO_CACHE_DIR),
|
||||
"IMAGE_RENDERER": config.general.image_renderer,
|
||||
# Color codes
|
||||
"C_TITLE": ansi.get_true_fg(HEADER_COLOR, bold=True),
|
||||
"C_KEY": ansi.get_true_fg(HEADER_COLOR, bold=True),
|
||||
"C_VALUE": ansi.get_true_fg(HEADER_COLOR, bold=True),
|
||||
"C_RULE": ansi.get_true_fg(SEPARATOR_COLOR, bold=True),
|
||||
"RESET": ansi.RESET,
|
||||
"PREFIX": "",
|
||||
"SCALE_UP": " --scale-up" if config.general.preview_scale_up else "",
|
||||
"HEADER_COLOR": ",".join(HEADER_COLOR),
|
||||
"SEPARATOR_COLOR": ",".join(SEPARATOR_COLOR),
|
||||
"PREFIX": "search-result",
|
||||
"KEY": "",
|
||||
"SCALE_UP": str(config.general.preview_scale_up),
|
||||
}
|
||||
|
||||
for key, value in replacements.items():
|
||||
preview_script = preview_script.replace(f"{{{key}}}", value)
|
||||
|
||||
return preview_script
|
||||
preview_file = PREVIEWS_CACHE_DIR / "search-result-preview-script.py"
|
||||
preview_file.write_text(preview_script, encoding="utf-8")
|
||||
|
||||
preview_script_final = f"{sys.executable} {preview_file} {{}}"
|
||||
return preview_script_final
|
||||
|
||||
|
||||
def get_episode_preview(
|
||||
@@ -360,30 +363,169 @@ def get_episode_preview(
|
||||
logger.error(f"Failed to start episode background caching: {e}")
|
||||
# Continue with script generation even if caching fails
|
||||
|
||||
# Prepare values to inject into the template
|
||||
path_sep = "\\" if PLATFORM == "win32" else "/"
|
||||
|
||||
# Format the template with the dynamic values
|
||||
replacements = {
|
||||
"PREVIEW_MODE": config.general.preview,
|
||||
"IMAGE_CACHE_PATH": str(IMAGES_CACHE_DIR),
|
||||
"INFO_CACHE_PATH": str(INFO_CACHE_DIR),
|
||||
"PATH_SEP": path_sep,
|
||||
"IMAGE_CACHE_DIR": str(IMAGES_CACHE_DIR),
|
||||
"INFO_CACHE_DIR": str(INFO_CACHE_DIR),
|
||||
"IMAGE_RENDERER": config.general.image_renderer,
|
||||
# Color codes
|
||||
"C_TITLE": ansi.get_true_fg(HEADER_COLOR, bold=True),
|
||||
"C_KEY": ansi.get_true_fg(HEADER_COLOR, bold=True),
|
||||
"C_VALUE": ansi.get_true_fg(HEADER_COLOR, bold=True),
|
||||
"C_RULE": ansi.get_true_fg(SEPARATOR_COLOR, bold=True),
|
||||
"RESET": ansi.RESET,
|
||||
"PREFIX": f"{media_item.title.english}_Episode_",
|
||||
"SCALE_UP": " --scale-up" if config.general.preview_scale_up else "",
|
||||
"HEADER_COLOR": ",".join(HEADER_COLOR),
|
||||
"SEPARATOR_COLOR": ",".join(SEPARATOR_COLOR),
|
||||
"PREFIX": "episode",
|
||||
"KEY": f"{media_item.title.english.replace(formatter.DOUBLE_QUOTE, formatter.SINGLE_QUOTE)}",
|
||||
"SCALE_UP": str(config.general.preview_scale_up),
|
||||
}
|
||||
|
||||
for key, value in replacements.items():
|
||||
preview_script = preview_script.replace(f"{{{key}}}", value)
|
||||
|
||||
return preview_script
|
||||
preview_file = PREVIEWS_CACHE_DIR / "episode-preview-script.py"
|
||||
preview_file.write_text(preview_script, encoding="utf-8")
|
||||
|
||||
preview_script_final = f"{sys.executable} {preview_file} {{}}"
|
||||
return preview_script_final
|
||||
|
||||
|
||||
def get_character_preview(choice_map: Dict[str, Character], config: AppConfig) -> str:
|
||||
"""
|
||||
Generate the generic loader script for character previews and start background caching.
|
||||
"""
|
||||
|
||||
IMAGES_CACHE_DIR.mkdir(parents=True, exist_ok=True)
|
||||
INFO_CACHE_DIR.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
HEADER_COLOR = config.fzf.preview_header_color.split(",")
|
||||
SEPARATOR_COLOR = config.fzf.preview_separator_color.split(",")
|
||||
|
||||
# Start managed background caching for episodes
|
||||
try:
|
||||
preview_manager = _get_preview_manager()
|
||||
worker = preview_manager.get_character_worker()
|
||||
worker.cache_character_previews(choice_map, config)
|
||||
logger.debug("Started background caching for character previews")
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to start episode background caching: {e}")
|
||||
|
||||
# Use the generic loader script
|
||||
preview_script = TEMPLATE_PREVIEW_SCRIPT
|
||||
|
||||
replacements = {
|
||||
"PREVIEW_MODE": config.general.preview,
|
||||
"IMAGE_CACHE_DIR": str(IMAGES_CACHE_DIR),
|
||||
"INFO_CACHE_DIR": str(INFO_CACHE_DIR),
|
||||
"IMAGE_RENDERER": config.general.image_renderer,
|
||||
# Color codes
|
||||
"HEADER_COLOR": ",".join(HEADER_COLOR),
|
||||
"SEPARATOR_COLOR": ",".join(SEPARATOR_COLOR),
|
||||
"PREFIX": "character",
|
||||
"KEY": "",
|
||||
"SCALE_UP": str(config.general.preview_scale_up),
|
||||
}
|
||||
|
||||
for key, value in replacements.items():
|
||||
preview_script = preview_script.replace(f"{{{key}}}", value)
|
||||
|
||||
preview_file = PREVIEWS_CACHE_DIR / "character-preview-script.py"
|
||||
preview_file.write_text(preview_script, encoding="utf-8")
|
||||
|
||||
preview_script_final = f"{sys.executable} {preview_file} {{}}"
|
||||
return preview_script_final
|
||||
|
||||
|
||||
def get_review_preview(choice_map: Dict[str, MediaReview], config: AppConfig) -> str:
|
||||
"""
|
||||
Generate the generic loader script for review previews and start background caching.
|
||||
"""
|
||||
|
||||
IMAGES_CACHE_DIR.mkdir(parents=True, exist_ok=True)
|
||||
INFO_CACHE_DIR.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
HEADER_COLOR = config.fzf.preview_header_color.split(",")
|
||||
SEPARATOR_COLOR = config.fzf.preview_separator_color.split(",")
|
||||
|
||||
# Start managed background caching for episodes
|
||||
try:
|
||||
preview_manager = _get_preview_manager()
|
||||
worker = preview_manager.get_review_worker()
|
||||
worker.cache_review_previews(choice_map, config)
|
||||
logger.debug("Started background caching for review previews")
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to start episode background caching: {e}")
|
||||
|
||||
# Use the generic loader script
|
||||
preview_script = TEMPLATE_PREVIEW_SCRIPT
|
||||
|
||||
replacements = {
|
||||
"PREVIEW_MODE": config.general.preview,
|
||||
"IMAGE_CACHE_DIR": str(IMAGES_CACHE_DIR),
|
||||
"INFO_CACHE_DIR": str(INFO_CACHE_DIR),
|
||||
"IMAGE_RENDERER": config.general.image_renderer,
|
||||
# Color codes
|
||||
"HEADER_COLOR": ",".join(HEADER_COLOR),
|
||||
"SEPARATOR_COLOR": ",".join(SEPARATOR_COLOR),
|
||||
"PREFIX": "review",
|
||||
"KEY": "",
|
||||
"SCALE_UP": str(config.general.preview_scale_up),
|
||||
}
|
||||
|
||||
for key, value in replacements.items():
|
||||
preview_script = preview_script.replace(f"{{{key}}}", value)
|
||||
|
||||
preview_file = PREVIEWS_CACHE_DIR / "review-preview-script.py"
|
||||
preview_file.write_text(preview_script, encoding="utf-8")
|
||||
|
||||
preview_script_final = f"{sys.executable} {preview_file} {{}}"
|
||||
return preview_script_final
|
||||
|
||||
|
||||
def get_airing_schedule_preview(
|
||||
schedule_result: AiringScheduleResult, config: AppConfig, anime_title: str = "Anime"
|
||||
) -> str:
|
||||
"""
|
||||
Generate the generic loader script for airing schedule previews and start background caching.
|
||||
"""
|
||||
|
||||
IMAGES_CACHE_DIR.mkdir(parents=True, exist_ok=True)
|
||||
INFO_CACHE_DIR.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
HEADER_COLOR = config.fzf.preview_header_color.split(",")
|
||||
SEPARATOR_COLOR = config.fzf.preview_separator_color.split(",")
|
||||
|
||||
# Start managed background caching for episodes
|
||||
try:
|
||||
preview_manager = _get_preview_manager()
|
||||
worker = preview_manager.get_airing_schedule_worker()
|
||||
worker.cache_airing_schedule_preview(anime_title, schedule_result, config)
|
||||
logger.debug("Started background caching for airing schedule previews")
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to start episode background caching: {e}")
|
||||
|
||||
# Use the generic loader script
|
||||
preview_script = TEMPLATE_PREVIEW_SCRIPT
|
||||
|
||||
replacements = {
|
||||
"PREVIEW_MODE": config.general.preview,
|
||||
"IMAGE_CACHE_DIR": str(IMAGES_CACHE_DIR),
|
||||
"INFO_CACHE_DIR": str(INFO_CACHE_DIR),
|
||||
"IMAGE_RENDERER": config.general.image_renderer,
|
||||
# Color codes
|
||||
"HEADER_COLOR": ",".join(HEADER_COLOR),
|
||||
"SEPARATOR_COLOR": ",".join(SEPARATOR_COLOR),
|
||||
"PREFIX": "airing-schedule",
|
||||
"KEY": "",
|
||||
"SCALE_UP": str(config.general.preview_scale_up),
|
||||
}
|
||||
|
||||
for key, value in replacements.items():
|
||||
preview_script = preview_script.replace(f"{{{key}}}", value)
|
||||
|
||||
preview_file = PREVIEWS_CACHE_DIR / "airing-schedule-preview-script.py"
|
||||
preview_file.write_text(preview_script, encoding="utf-8")
|
||||
|
||||
# preview_script_final = f"{sys.executable} {preview_file} {{}}"
|
||||
# NOTE: disabled cause not very useful
|
||||
return ""
|
||||
|
||||
|
||||
def get_dynamic_anime_preview(config: AppConfig) -> str:
|
||||
@@ -393,17 +535,30 @@ def get_dynamic_anime_preview(config: AppConfig) -> str:
|
||||
This is different from regular anime preview because:
|
||||
1. We don't have media items upfront
|
||||
2. The preview needs to work with search results as they come in
|
||||
3. Preview is handled entirely in shell by parsing JSON results
|
||||
3. Preview script dynamically loads data from search results JSON
|
||||
|
||||
Args:
|
||||
config: Application configuration
|
||||
|
||||
Returns:
|
||||
Preview script content for fzf dynamic search
|
||||
Preview script command for fzf dynamic search
|
||||
"""
|
||||
# Ensure cache directories exist
|
||||
IMAGES_CACHE_DIR.mkdir(parents=True, exist_ok=True)
|
||||
INFO_CACHE_DIR.mkdir(parents=True, exist_ok=True)
|
||||
source = FZF_SCRIPTS_DIR / "_ansi_utils.py"
|
||||
dest = PREVIEWS_CACHE_DIR / "_ansi_utils.py"
|
||||
|
||||
if source.exists() and (
|
||||
not dest.exists() or source.stat().st_mtime > dest.stat().st_mtime
|
||||
):
|
||||
try:
|
||||
import shutil
|
||||
|
||||
shutil.copy2(source, dest)
|
||||
logger.debug(f"Copied _ansi_utils.py to {INFO_CACHE_DIR}")
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to copy _ansi_utils.py to cache: {e}")
|
||||
|
||||
HEADER_COLOR = config.fzf.preview_header_color.split(",")
|
||||
SEPARATOR_COLOR = config.fzf.preview_separator_color.split(",")
|
||||
@@ -414,39 +569,34 @@ def get_dynamic_anime_preview(config: AppConfig) -> str:
|
||||
search_cache_dir = APP_CACHE_DIR / "search"
|
||||
search_results_file = search_cache_dir / "current_search_results.json"
|
||||
|
||||
# Prepare values to inject into the template
|
||||
path_sep = "\\" if PLATFORM == "win32" else "/"
|
||||
|
||||
# Format the template with the dynamic values
|
||||
# Prepare replacements for the template
|
||||
replacements = {
|
||||
"PREVIEW_MODE": config.general.preview,
|
||||
"IMAGE_CACHE_PATH": str(IMAGES_CACHE_DIR),
|
||||
"INFO_CACHE_PATH": str(INFO_CACHE_DIR),
|
||||
"PATH_SEP": path_sep,
|
||||
"IMAGE_RENDERER": config.general.image_renderer,
|
||||
"SEARCH_RESULTS_FILE": str(search_results_file),
|
||||
# Color codes
|
||||
"C_TITLE": ansi.get_true_fg(HEADER_COLOR, bold=True),
|
||||
"C_KEY": ansi.get_true_fg(HEADER_COLOR, bold=True),
|
||||
"C_VALUE": ansi.get_true_fg(HEADER_COLOR, bold=True),
|
||||
"C_RULE": ansi.get_true_fg(SEPARATOR_COLOR, bold=True),
|
||||
"RESET": ansi.RESET,
|
||||
"SCALE_UP": " --scale-up" if config.general.preview_scale_up else "",
|
||||
"IMAGE_CACHE_DIR": str(IMAGES_CACHE_DIR),
|
||||
"PREVIEW_MODE": config.general.preview,
|
||||
"IMAGE_RENDERER": config.general.image_renderer,
|
||||
"HEADER_COLOR": ",".join(HEADER_COLOR),
|
||||
"SEPARATOR_COLOR": ",".join(SEPARATOR_COLOR),
|
||||
"SCALE_UP": str(config.general.preview_scale_up),
|
||||
}
|
||||
|
||||
for key, value in replacements.items():
|
||||
preview_script = preview_script.replace(f"{{{key}}}", value)
|
||||
|
||||
return preview_script
|
||||
# Write the preview script to cache
|
||||
preview_file = PREVIEWS_CACHE_DIR / "dynamic-search-preview-script.py"
|
||||
preview_file.write_text(preview_script, encoding="utf-8")
|
||||
|
||||
# Return the command to execute the preview script
|
||||
preview_script_final = f"{sys.executable} {preview_file} {{}}"
|
||||
return preview_script_final
|
||||
|
||||
|
||||
def _get_preview_manager() -> PreviewWorkerManager:
|
||||
"""Get or create the global preview worker manager."""
|
||||
global _preview_manager
|
||||
if _preview_manager is None:
|
||||
_preview_manager = PreviewWorkerManager(
|
||||
IMAGES_CACHE_DIR, INFO_CACHE_DIR, REVIEWS_CACHE_DIR
|
||||
)
|
||||
_preview_manager = PreviewWorkerManager(IMAGES_CACHE_DIR, INFO_CACHE_DIR)
|
||||
return _preview_manager
|
||||
|
||||
|
||||
@@ -470,111 +620,3 @@ def get_preview_worker_status() -> dict:
|
||||
if _preview_manager:
|
||||
return _preview_manager.get_status()
|
||||
return {"preview_worker": None, "episode_worker": None}
|
||||
|
||||
|
||||
def get_review_preview(choice_map: Dict[str, MediaReview], config: AppConfig) -> str:
|
||||
"""
|
||||
Generate the generic loader script for review previews and start background caching.
|
||||
"""
|
||||
|
||||
REVIEWS_CACHE_DIR.mkdir(parents=True, exist_ok=True)
|
||||
preview_manager = _get_preview_manager()
|
||||
worker = preview_manager.get_review_worker()
|
||||
worker.cache_review_previews(choice_map, config)
|
||||
logger.debug("Started background caching for review previews")
|
||||
|
||||
# Use the generic loader script
|
||||
preview_script = TEMPLATE_REVIEW_PREVIEW_SCRIPT
|
||||
path_sep = "\\" if PLATFORM == "win32" else "/"
|
||||
|
||||
# Inject the correct cache path and color codes
|
||||
replacements = {
|
||||
"PREVIEW_MODE": config.general.preview,
|
||||
"INFO_CACHE_DIR": str(REVIEWS_CACHE_DIR),
|
||||
"PATH_SEP": path_sep,
|
||||
"C_TITLE": ansi.get_true_fg(config.fzf.header_color.split(","), bold=True),
|
||||
"C_KEY": ansi.get_true_fg(config.fzf.header_color.split(","), bold=True),
|
||||
"C_VALUE": ansi.get_true_fg(config.fzf.header_color.split(","), bold=True),
|
||||
"C_RULE": ansi.get_true_fg(
|
||||
config.fzf.preview_separator_color.split(","), bold=True
|
||||
),
|
||||
"RESET": ansi.RESET,
|
||||
}
|
||||
|
||||
for key, value in replacements.items():
|
||||
preview_script = preview_script.replace(f"{{{key}}}", value)
|
||||
|
||||
return preview_script
|
||||
|
||||
|
||||
def get_character_preview(choice_map: Dict[str, Character], config: AppConfig) -> str:
|
||||
"""
|
||||
Generate the generic loader script for character previews and start background caching.
|
||||
"""
|
||||
|
||||
INFO_CACHE_DIR.mkdir(parents=True, exist_ok=True)
|
||||
preview_manager = _get_preview_manager()
|
||||
worker = preview_manager.get_character_worker()
|
||||
worker.cache_character_previews(choice_map, config)
|
||||
logger.debug("Started background caching for character previews")
|
||||
|
||||
# Use the generic loader script
|
||||
preview_script = TEMPLATE_CHARACTER_PREVIEW_SCRIPT
|
||||
path_sep = "\\" if PLATFORM == "win32" else "/"
|
||||
|
||||
# Inject the correct cache path and color codes
|
||||
replacements = {
|
||||
"PREVIEW_MODE": config.general.preview,
|
||||
"INFO_CACHE_DIR": str(INFO_CACHE_DIR),
|
||||
"IMAGE_CACHE_DIR": str(IMAGES_CACHE_DIR),
|
||||
"PATH_SEP": path_sep,
|
||||
"C_TITLE": ansi.get_true_fg(config.fzf.header_color.split(","), bold=True),
|
||||
"C_KEY": ansi.get_true_fg(config.fzf.header_color.split(","), bold=True),
|
||||
"C_VALUE": ansi.get_true_fg(config.fzf.header_color.split(","), bold=True),
|
||||
"C_RULE": ansi.get_true_fg(
|
||||
config.fzf.preview_separator_color.split(","), bold=True
|
||||
),
|
||||
"RESET": ansi.RESET,
|
||||
}
|
||||
|
||||
for key, value in replacements.items():
|
||||
preview_script = preview_script.replace(f"{{{key}}}", value)
|
||||
|
||||
return preview_script
|
||||
|
||||
|
||||
def get_airing_schedule_preview(
|
||||
schedule_result: AiringScheduleResult, config: AppConfig, anime_title: str = "Anime"
|
||||
) -> str:
|
||||
"""
|
||||
Generate the generic loader script for airing schedule previews and start background caching.
|
||||
"""
|
||||
|
||||
INFO_CACHE_DIR.mkdir(parents=True, exist_ok=True)
|
||||
preview_manager = _get_preview_manager()
|
||||
worker = preview_manager.get_airing_schedule_worker()
|
||||
worker.cache_airing_schedule_preview(anime_title, schedule_result, config)
|
||||
logger.debug("Started background caching for airing schedule previews")
|
||||
|
||||
# Use the generic loader script
|
||||
preview_script = TEMPLATE_AIRING_SCHEDULE_PREVIEW_SCRIPT
|
||||
path_sep = "\\" if PLATFORM == "win32" else "/"
|
||||
|
||||
# Inject the correct cache path and color codes
|
||||
replacements = {
|
||||
"PREVIEW_MODE": config.general.preview,
|
||||
"INFO_CACHE_DIR": str(INFO_CACHE_DIR),
|
||||
"PATH_SEP": path_sep,
|
||||
"C_TITLE": ansi.get_true_fg(config.fzf.header_color.split(","), bold=True),
|
||||
"C_KEY": ansi.get_true_fg(config.fzf.header_color.split(","), bold=True),
|
||||
"C_VALUE": ansi.get_true_fg(config.fzf.header_color.split(","), bold=True),
|
||||
"C_RULE": ansi.get_true_fg(
|
||||
config.fzf.preview_separator_color.split(","), bold=True
|
||||
),
|
||||
"RESET": ansi.RESET,
|
||||
}
|
||||
|
||||
for key, value in replacements.items():
|
||||
preview_script = preview_script.replace(f"{{{key}}}", value)
|
||||
|
||||
return preview_script
|
||||
|
||||
@@ -6,6 +6,7 @@ including image downloads and info text generation with proper lifecycle managem
|
||||
"""
|
||||
|
||||
import logging
|
||||
from pathlib import Path
|
||||
from typing import Dict, List, Optional
|
||||
|
||||
import httpx
|
||||
@@ -31,20 +32,20 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
FZF_SCRIPTS_DIR = SCRIPTS_DIR / "fzf"
|
||||
TEMPLATE_INFO_SCRIPT = (FZF_SCRIPTS_DIR / "info.template.sh").read_text(
|
||||
TEMPLATE_MEDIA_INFO_SCRIPT = (FZF_SCRIPTS_DIR / "media_info.py").read_text(
|
||||
encoding="utf-8"
|
||||
)
|
||||
TEMPLATE_EPISODE_INFO_SCRIPT = (FZF_SCRIPTS_DIR / "episode-info.template.sh").read_text(
|
||||
TEMPLATE_EPISODE_INFO_SCRIPT = (FZF_SCRIPTS_DIR / "episode_info.py").read_text(
|
||||
encoding="utf-8"
|
||||
)
|
||||
TEMPLATE_REVIEW_INFO_SCRIPT = (FZF_SCRIPTS_DIR / "review-info.template.sh").read_text(
|
||||
TEMPLATE_REVIEW_INFO_SCRIPT = (FZF_SCRIPTS_DIR / "review_info.py").read_text(
|
||||
encoding="utf-8"
|
||||
)
|
||||
TEMPLATE_CHARACTER_INFO_SCRIPT = (FZF_SCRIPTS_DIR / "character_info.py").read_text(
|
||||
encoding="utf-8"
|
||||
)
|
||||
TEMPLATE_CHARACTER_INFO_SCRIPT = (
|
||||
FZF_SCRIPTS_DIR / "character-info.template.sh"
|
||||
).read_text(encoding="utf-8")
|
||||
TEMPLATE_AIRING_SCHEDULE_INFO_SCRIPT = (
|
||||
FZF_SCRIPTS_DIR / "airing-schedule-info.template.sh"
|
||||
FZF_SCRIPTS_DIR / "airing_schedule_info.py"
|
||||
).read_text(encoding="utf-8")
|
||||
|
||||
|
||||
@@ -103,29 +104,29 @@ class PreviewCacheWorker(ManagedBackgroundWorker):
|
||||
raise RuntimeError("PreviewCacheWorker is not running")
|
||||
|
||||
for media_item, title_str in zip(media_items, titles):
|
||||
hash_id = self._get_cache_hash(title_str)
|
||||
selection_title = self._get_selection_title(title_str)
|
||||
|
||||
# Submit image download task if needed
|
||||
if config.general.preview in ("full", "image") and media_item.cover_image:
|
||||
image_path = self.images_cache_dir / f"{hash_id}.png"
|
||||
image_path = self.images_cache_dir / f"{selection_title}.png"
|
||||
if not image_path.exists():
|
||||
self.submit_function(
|
||||
self._download_and_save_image,
|
||||
media_item.cover_image.large,
|
||||
hash_id,
|
||||
selection_title,
|
||||
)
|
||||
|
||||
# Submit info generation task if needed
|
||||
if config.general.preview in ("full", "text"):
|
||||
info_text = self._generate_info_text(media_item, config)
|
||||
self.submit_function(self._save_info_text, info_text, hash_id)
|
||||
self.submit_function(self._save_info_text, info_text, selection_title)
|
||||
|
||||
def _download_and_save_image(self, url: str, hash_id: str) -> None:
|
||||
def _download_and_save_image(self, url: str, selection_title: str) -> None:
|
||||
"""Download an image and save it to cache."""
|
||||
if not self._http_client:
|
||||
raise RuntimeError("HTTP client not initialized")
|
||||
|
||||
image_path = self.images_cache_dir / f"{hash_id}.png"
|
||||
image_path = self.images_cache_dir / f"{selection_title}.png"
|
||||
|
||||
try:
|
||||
with self._http_client.stream("GET", url) as response:
|
||||
@@ -135,7 +136,7 @@ class PreviewCacheWorker(ManagedBackgroundWorker):
|
||||
for chunk in response.iter_bytes():
|
||||
f.write(chunk)
|
||||
|
||||
logger.debug(f"Successfully cached image: {hash_id}")
|
||||
logger.debug(f"Successfully cached image: {selection_title}")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to download image {url}: {e}")
|
||||
@@ -144,7 +145,7 @@ class PreviewCacheWorker(ManagedBackgroundWorker):
|
||||
def _generate_info_text(self, media_item: MediaItem, config: AppConfig) -> str:
|
||||
"""Generate formatted info text for a media item."""
|
||||
# Import here to avoid circular imports
|
||||
info_script = TEMPLATE_INFO_SCRIPT
|
||||
info_script = TEMPLATE_MEDIA_INFO_SCRIPT
|
||||
description = formatter.clean_html(
|
||||
media_item.description or "No description available."
|
||||
)
|
||||
@@ -159,11 +160,13 @@ class PreviewCacheWorker(ManagedBackgroundWorker):
|
||||
media_item.format.value if media_item.format else "UNKNOWN"
|
||||
),
|
||||
"NEXT_EPISODE": formatter.shell_safe(
|
||||
f"Episode {media_item.next_airing.episode} on {formatter.format_date(media_item.next_airing.airing_at, '%A, %d %B %Y at %X)')}"
|
||||
f"Episode {media_item.next_airing.episode} on {formatter.format_date(media_item.next_airing.airing_at, '%A, %d %B %Y at %X')}"
|
||||
if media_item.next_airing
|
||||
else "N/A"
|
||||
),
|
||||
"EPISODES": formatter.shell_safe(str(media_item.episodes)),
|
||||
"EPISODES": formatter.shell_safe(
|
||||
str(media_item.episodes) if media_item.episodes else "??"
|
||||
),
|
||||
"DURATION": formatter.shell_safe(
|
||||
formatter.format_media_duration(media_item.duration)
|
||||
),
|
||||
@@ -190,7 +193,12 @@ class PreviewCacheWorker(ManagedBackgroundWorker):
|
||||
)
|
||||
),
|
||||
"SYNONYMNS": formatter.shell_safe(
|
||||
formatter.format_list_with_commas(media_item.synonymns)
|
||||
formatter.format_list_with_commas(
|
||||
[media_item.title.romaji] + media_item.synonymns
|
||||
if media_item.title.romaji
|
||||
and media_item.title.romaji not in media_item.synonymns
|
||||
else media_item.synonymns
|
||||
)
|
||||
),
|
||||
"USER_STATUS": formatter.shell_safe(
|
||||
media_item.user_status.status.value
|
||||
@@ -216,22 +224,22 @@ class PreviewCacheWorker(ManagedBackgroundWorker):
|
||||
|
||||
return info_script
|
||||
|
||||
def _save_info_text(self, info_text: str, hash_id: str) -> None:
|
||||
def _save_info_text(self, info_text: str, selection_title: str) -> None:
|
||||
"""Save info text to cache."""
|
||||
try:
|
||||
info_path = self.info_cache_dir / hash_id
|
||||
info_path = self.info_cache_dir / f"{selection_title}.py"
|
||||
with AtomicWriter(info_path) as f:
|
||||
f.write(info_text)
|
||||
logger.debug(f"Successfully cached info: {hash_id}")
|
||||
logger.debug(f"Successfully cached info: {selection_title}")
|
||||
except IOError as e:
|
||||
logger.error(f"Failed to write info cache for {hash_id}: {e}")
|
||||
logger.error(f"Failed to write info cache for {selection_title}: {e}")
|
||||
raise
|
||||
|
||||
def _get_cache_hash(self, text: str) -> str:
|
||||
def _get_selection_title(self, text: str) -> str:
|
||||
"""Generate a cache hash for the given text."""
|
||||
from hashlib import sha256
|
||||
|
||||
return sha256(text.encode("utf-8")).hexdigest()
|
||||
return f"search-result-{sha256(text.encode('utf-8')).hexdigest()}"
|
||||
|
||||
def _on_task_completed(self, task: WorkerTask, future) -> None:
|
||||
"""Handle task completion with enhanced logging."""
|
||||
@@ -301,7 +309,7 @@ class EpisodeCacheWorker(ManagedBackgroundWorker):
|
||||
|
||||
for episode_str in episodes:
|
||||
hash_id = self._get_cache_hash(
|
||||
f"{media_item.title.english}_Episode_{episode_str}"
|
||||
f"{media_item.title.english.replace(formatter.DOUBLE_QUOTE, formatter.SINGLE_QUOTE)}-{episode_str}"
|
||||
)
|
||||
|
||||
# Find episode data
|
||||
@@ -352,7 +360,7 @@ class EpisodeCacheWorker(ManagedBackgroundWorker):
|
||||
replacements = {
|
||||
"TITLE": formatter.shell_safe(title),
|
||||
"NEXT_EPISODE": formatter.shell_safe(
|
||||
f"Episode {media_item.next_airing.episode} on {formatter.format_date(media_item.next_airing.airing_at, '%A, %d %B %Y at %X)')}"
|
||||
f"Episode {media_item.next_airing.episode} on {formatter.format_date(media_item.next_airing.airing_at, '%A, %d %B %Y at %X')}"
|
||||
if media_item.next_airing
|
||||
else "N/A"
|
||||
),
|
||||
@@ -385,7 +393,7 @@ class EpisodeCacheWorker(ManagedBackgroundWorker):
|
||||
def _save_info_text(self, info_text: str, hash_id: str) -> None:
|
||||
"""Save episode info text to cache."""
|
||||
try:
|
||||
info_path = self.info_cache_dir / hash_id
|
||||
info_path = self.info_cache_dir / (hash_id + ".py")
|
||||
with AtomicWriter(info_path) as f:
|
||||
f.write(info_text)
|
||||
logger.debug(f"Successfully cached episode info: {hash_id}")
|
||||
@@ -397,7 +405,7 @@ class EpisodeCacheWorker(ManagedBackgroundWorker):
|
||||
"""Generate a cache hash for the given text."""
|
||||
from hashlib import sha256
|
||||
|
||||
return sha256(text.encode("utf-8")).hexdigest()
|
||||
return "episode-" + sha256(text.encode("utf-8")).hexdigest()
|
||||
|
||||
def _on_task_completed(self, task: WorkerTask, future) -> None:
|
||||
"""Handle task completion with enhanced logging."""
|
||||
@@ -414,9 +422,12 @@ class ReviewCacheWorker(ManagedBackgroundWorker):
|
||||
Specialized background worker for caching fully-rendered media review previews.
|
||||
"""
|
||||
|
||||
def __init__(self, reviews_cache_dir, max_workers: int = 10):
|
||||
def __init__(
|
||||
self, images_cache_dir: Path, info_cache_dir: Path, max_workers: int = 10
|
||||
):
|
||||
super().__init__(max_workers=max_workers, name="ReviewCacheWorker")
|
||||
self.reviews_cache_dir = reviews_cache_dir
|
||||
self.images_cache_dir = images_cache_dir
|
||||
self.info_cache_dir = info_cache_dir
|
||||
|
||||
def cache_review_previews(
|
||||
self, choice_map: Dict[str, MediaReview], config: AppConfig
|
||||
@@ -464,7 +475,7 @@ class ReviewCacheWorker(ManagedBackgroundWorker):
|
||||
def _save_preview_content(self, content: str, hash_id: str) -> None:
|
||||
"""Saves the final preview content to the cache."""
|
||||
try:
|
||||
info_path = self.reviews_cache_dir / hash_id
|
||||
info_path = self.info_cache_dir / hash_id
|
||||
with AtomicWriter(info_path) as f:
|
||||
f.write(content)
|
||||
logger.debug(f"Successfully cached review preview: {hash_id}")
|
||||
@@ -475,7 +486,7 @@ class ReviewCacheWorker(ManagedBackgroundWorker):
|
||||
def _get_cache_hash(self, text: str) -> str:
|
||||
from hashlib import sha256
|
||||
|
||||
return sha256(text.encode("utf-8")).hexdigest()
|
||||
return "review-" + sha256(text.encode("utf-8")).hexdigest() + ".py"
|
||||
|
||||
def _on_task_completed(self, task: WorkerTask, future) -> None:
|
||||
super()._on_task_completed(task, future)
|
||||
@@ -610,7 +621,7 @@ class CharacterCacheWorker(ManagedBackgroundWorker):
|
||||
def _get_cache_hash(self, text: str) -> str:
|
||||
from hashlib import sha256
|
||||
|
||||
return sha256(text.encode("utf-8")).hexdigest()
|
||||
return "character-" + sha256(text.encode("utf-8")).hexdigest() + ".py"
|
||||
|
||||
def _on_task_completed(self, task: WorkerTask, future) -> None:
|
||||
super()._on_task_completed(task, future)
|
||||
@@ -734,7 +745,7 @@ class AiringScheduleCacheWorker(ManagedBackgroundWorker):
|
||||
def _get_cache_hash(self, text: str) -> str:
|
||||
from hashlib import sha256
|
||||
|
||||
return sha256(text.encode("utf-8")).hexdigest()
|
||||
return "airing-schedule-" + sha256(text.encode("utf-8")).hexdigest() + ".py"
|
||||
|
||||
def _on_task_completed(self, task: WorkerTask, future) -> None:
|
||||
super()._on_task_completed(task, future)
|
||||
@@ -750,7 +761,7 @@ class PreviewWorkerManager:
|
||||
caching workers with automatic lifecycle management.
|
||||
"""
|
||||
|
||||
def __init__(self, images_cache_dir, info_cache_dir, reviews_cache_dir):
|
||||
def __init__(self, images_cache_dir, info_cache_dir):
|
||||
"""
|
||||
Initialize the preview worker manager.
|
||||
|
||||
@@ -761,7 +772,6 @@ class PreviewWorkerManager:
|
||||
"""
|
||||
self.images_cache_dir = images_cache_dir
|
||||
self.info_cache_dir = info_cache_dir
|
||||
self.reviews_cache_dir = reviews_cache_dir
|
||||
self._preview_worker: Optional[PreviewCacheWorker] = None
|
||||
self._episode_worker: Optional[EpisodeCacheWorker] = None
|
||||
self._review_worker: Optional[ReviewCacheWorker] = None
|
||||
@@ -805,7 +815,9 @@ class PreviewWorkerManager:
|
||||
# Clean up old worker
|
||||
thread_manager.shutdown_worker("review_cache_worker")
|
||||
|
||||
self._review_worker = ReviewCacheWorker(self.reviews_cache_dir)
|
||||
self._review_worker = ReviewCacheWorker(
|
||||
self.images_cache_dir, self.info_cache_dir
|
||||
)
|
||||
self._review_worker.start()
|
||||
thread_manager.register_worker("review_cache_worker", self._review_worker)
|
||||
|
||||
|
||||
@@ -8,14 +8,41 @@ import sys
|
||||
|
||||
from httpx import get
|
||||
from rich import print
|
||||
from rich.console import Console
|
||||
from rich.markdown import Markdown
|
||||
|
||||
from ...core.constants import AUTHOR, GIT_REPO, PROJECT_NAME_LOWER, __version__
|
||||
from ...core.constants import (
|
||||
AUTHOR,
|
||||
CLI_NAME_LOWER,
|
||||
GIT_REPO,
|
||||
PROJECT_NAME,
|
||||
__version__,
|
||||
)
|
||||
|
||||
API_URL = f"https://api.{GIT_REPO}/repos/{AUTHOR}/{PROJECT_NAME_LOWER}/releases/latest"
|
||||
API_URL = f"https://api.{GIT_REPO}/repos/{AUTHOR}/{CLI_NAME_LOWER}/releases/latest"
|
||||
|
||||
|
||||
def print_release_json(release_json):
|
||||
version = release_json.get("tag_name", "unknown")
|
||||
release_name = release_json.get("name", version)
|
||||
release_body = release_json.get("body", "No release notes available.")
|
||||
published_at = release_json.get("published_at", "unknown")
|
||||
|
||||
console = Console()
|
||||
|
||||
print(f"[bold cyan]Release: {release_name}[/]")
|
||||
print(f"[dim]Version: {version}[/]")
|
||||
print(f"[dim]Published: {published_at}[/]")
|
||||
print()
|
||||
|
||||
# Display release notes as markdown if available
|
||||
if release_body and release_body.strip():
|
||||
markdown = Markdown(release_body)
|
||||
console.print(markdown)
|
||||
|
||||
|
||||
def check_for_updates():
|
||||
USER_AGENT = f"{PROJECT_NAME_LOWER} user"
|
||||
USER_AGENT = f"{CLI_NAME_LOWER} user"
|
||||
try:
|
||||
response = get(
|
||||
API_URL,
|
||||
@@ -96,9 +123,9 @@ def update_app(force=False):
|
||||
return False, release_json
|
||||
|
||||
process = subprocess.run(
|
||||
[NIX, "profile", "upgrade", PROJECT_NAME_LOWER], check=False
|
||||
[NIX, "profile", "upgrade", CLI_NAME_LOWER], check=False
|
||||
)
|
||||
elif is_git_repo(AUTHOR, PROJECT_NAME_LOWER):
|
||||
elif is_git_repo(AUTHOR, CLI_NAME_LOWER):
|
||||
GIT_EXECUTABLE = shutil.which("git")
|
||||
args = [
|
||||
GIT_EXECUTABLE,
|
||||
@@ -117,11 +144,9 @@ def update_app(force=False):
|
||||
)
|
||||
|
||||
elif UV := shutil.which("uv"):
|
||||
process = subprocess.run(
|
||||
[UV, "tool", "upgrade", PROJECT_NAME_LOWER], check=False
|
||||
)
|
||||
process = subprocess.run([UV, "tool", "upgrade", PROJECT_NAME], check=False)
|
||||
elif PIPX := shutil.which("pipx"):
|
||||
process = subprocess.run([PIPX, "upgrade", PROJECT_NAME_LOWER], check=False)
|
||||
process = subprocess.run([PIPX, "upgrade", PROJECT_NAME], check=False)
|
||||
else:
|
||||
PYTHON_EXECUTABLE = sys.executable
|
||||
|
||||
@@ -130,7 +155,7 @@ def update_app(force=False):
|
||||
"-m",
|
||||
"pip",
|
||||
"install",
|
||||
PROJECT_NAME_LOWER,
|
||||
PROJECT_NAME,
|
||||
"-U",
|
||||
"--no-warn-script-location",
|
||||
]
|
||||
|
||||
@@ -2,11 +2,12 @@ from ..constants import APP_DATA_DIR, DEFAULTS_DIR, PLATFORM, USER_VIDEOS_DIR
|
||||
from ..utils import detect
|
||||
|
||||
# GeneralConfig
|
||||
GENERAL_WELCOME_SCREEN = True
|
||||
GENERAL_PYGMENT_STYLE = "github-dark"
|
||||
GENERAL_PREFERRED_SPINNER = "smiley"
|
||||
GENERAL_API_CLIENT = "anilist"
|
||||
GENERAL_PREFERRED_TRACKER = "local"
|
||||
GENERAL_DESKTOP_NOTIFICATION_DURATION = 5
|
||||
GENERAL_DESKTOP_NOTIFICATION_DURATION = 5 * 60
|
||||
GENERAL_PROVIDER = "allanime"
|
||||
|
||||
|
||||
@@ -32,6 +33,8 @@ def GENERAL_IMAGE_RENDERER():
|
||||
|
||||
GENERAL_MANGA_VIEWER = "feh"
|
||||
GENERAL_CHECK_FOR_UPDATES = True
|
||||
GENERAL_SHOW_NEW_RELEASE = True
|
||||
GENERAL_UPDATE_CHECK_INTERVAL = 12
|
||||
GENERAL_CACHE_REQUESTS = True
|
||||
GENERAL_MAX_CACHE_LIFETIME = "03:00:00"
|
||||
GENERAL_NORMALIZE_TITLES = True
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
# GeneralConfig
|
||||
|
||||
GENERAL_WELCOME_SCREEN = "Whether to enable the welcome screen, that runs once per day"
|
||||
GENERAL_PYGMENT_STYLE = "The pygment style to use"
|
||||
GENERAL_PREFERRED_SPINNER = "The spinner to use"
|
||||
GENERAL_API_CLIENT = "The media database API to use (e.g., 'anilist', 'jikan')."
|
||||
@@ -24,6 +25,10 @@ GENERAL_IMAGE_RENDERER = (
|
||||
)
|
||||
GENERAL_MANGA_VIEWER = "The external application to use for viewing manga pages."
|
||||
GENERAL_CHECK_FOR_UPDATES = "Automatically check for new versions of Viu on startup."
|
||||
GENERAL_SHOW_NEW_RELEASE = (
|
||||
"Whether to show release notes after every update when running the new version"
|
||||
)
|
||||
GENERAL_UPDATE_CHECK_INTERVAL = "The interval in hours to check for updates"
|
||||
GENERAL_CACHE_REQUESTS = (
|
||||
"Enable caching of network requests to speed up subsequent operations."
|
||||
)
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
from pathlib import Path
|
||||
from typing import Literal
|
||||
|
||||
from pydantic import BaseModel, Field, PrivateAttr, computed_field
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
from ...libs.media_api.types import MediaSort, UserMediaListSort
|
||||
from ...libs.provider.anime.types import ProviderName, ProviderServer
|
||||
@@ -156,6 +156,9 @@ class GeneralConfig(BaseModel):
|
||||
default=defaults.GENERAL_API_CLIENT,
|
||||
description=desc.GENERAL_API_CLIENT,
|
||||
)
|
||||
welcome_screen: bool = Field(
|
||||
default=defaults.GENERAL_WELCOME_SCREEN, description=desc.GENERAL_WELCOME_SCREEN
|
||||
)
|
||||
provider: ProviderName = Field(
|
||||
default=ProviderName.ALLANIME,
|
||||
description=desc.GENERAL_PROVIDER,
|
||||
@@ -178,7 +181,9 @@ class GeneralConfig(BaseModel):
|
||||
description=desc.GENERAL_SCALE_PREVIEW,
|
||||
)
|
||||
|
||||
image_renderer: Literal["icat", "chafa", "imgcat"] = Field(
|
||||
image_renderer: Literal[
|
||||
"icat", "chafa", "imgcat", "system-sixels", "system-kitty", "system-default"
|
||||
] = Field(
|
||||
default_factory=defaults.GENERAL_IMAGE_RENDERER,
|
||||
description=desc.GENERAL_IMAGE_RENDERER,
|
||||
)
|
||||
@@ -190,6 +195,14 @@ class GeneralConfig(BaseModel):
|
||||
default=defaults.GENERAL_CHECK_FOR_UPDATES,
|
||||
description=desc.GENERAL_CHECK_FOR_UPDATES,
|
||||
)
|
||||
show_new_release: bool = Field(
|
||||
default=defaults.GENERAL_SHOW_NEW_RELEASE,
|
||||
description=desc.GENERAL_SHOW_NEW_RELEASE,
|
||||
)
|
||||
update_check_interval: float = Field(
|
||||
default=defaults.GENERAL_UPDATE_CHECK_INTERVAL,
|
||||
description=desc.GENERAL_UPDATE_CHECK_INTERVAL,
|
||||
)
|
||||
cache_requests: bool = Field(
|
||||
default=defaults.GENERAL_CACHE_REQUESTS,
|
||||
description=desc.GENERAL_CACHE_REQUESTS,
|
||||
@@ -319,14 +332,16 @@ class SessionsConfig(OtherConfig):
|
||||
class FzfConfig(OtherConfig):
|
||||
"""Configuration specific to the FZF selector."""
|
||||
|
||||
_opts: str = PrivateAttr(
|
||||
default_factory=lambda: defaults.FZF_OPTS.read_text(encoding="utf-8")
|
||||
opts: str = Field(
|
||||
default_factory=lambda: defaults.FZF_OPTS.read_text(encoding="utf-8"),
|
||||
description=desc.FZF_OPTS,
|
||||
)
|
||||
header_color: str = Field(
|
||||
default=defaults.FZF_HEADER_COLOR, description=desc.FZF_HEADER_COLOR
|
||||
)
|
||||
_header_ascii_art: str = PrivateAttr(
|
||||
default_factory=lambda: APP_ASCII_ART.read_text(encoding="utf-8")
|
||||
header_ascii_art: str = Field(
|
||||
default_factory=lambda: APP_ASCII_ART.read_text(encoding="utf-8"),
|
||||
description=desc.FZF_HEADER_ASCII_ART,
|
||||
)
|
||||
preview_header_color: str = Field(
|
||||
default=defaults.FZF_PREVIEW_HEADER_COLOR,
|
||||
@@ -337,28 +352,6 @@ class FzfConfig(OtherConfig):
|
||||
description=desc.FZF_PREVIEW_SEPARATOR_COLOR,
|
||||
)
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
opts = kwargs.pop("opts", None)
|
||||
header_ascii_art = kwargs.pop("header_ascii_art", None)
|
||||
|
||||
super().__init__(**kwargs)
|
||||
if opts:
|
||||
self._opts = opts
|
||||
if header_ascii_art:
|
||||
self._header_ascii_art = header_ascii_art
|
||||
|
||||
@computed_field(description=desc.FZF_OPTS)
|
||||
@property
|
||||
def opts(self) -> str:
|
||||
return "\n" + "\n".join([f"\t{line}" for line in self._opts.split()])
|
||||
|
||||
@computed_field(description=desc.FZF_HEADER_ASCII_ART)
|
||||
@property
|
||||
def header_ascii_art(self) -> str:
|
||||
return "\n" + "\n".join(
|
||||
[f"\t{line}" for line in self._header_ascii_art.split()]
|
||||
)
|
||||
|
||||
|
||||
class RofiConfig(OtherConfig):
|
||||
"""Configuration specific to the Rofi selector."""
|
||||
|
||||
@@ -4,15 +4,17 @@ from importlib import metadata, resources
|
||||
from pathlib import Path
|
||||
|
||||
PLATFORM = sys.platform
|
||||
PROJECT_NAME = "VIU"
|
||||
PROJECT_NAME_LOWER = "viu"
|
||||
APP_NAME = os.environ.get(f"{PROJECT_NAME}_APP_NAME", PROJECT_NAME_LOWER)
|
||||
CLI_NAME = "VIU"
|
||||
CLI_NAME_LOWER = "viu"
|
||||
PROJECT_NAME = "viu-media"
|
||||
APP_NAME = os.environ.get(f"{CLI_NAME}_APP_NAME", CLI_NAME_LOWER)
|
||||
|
||||
USER_NAME = os.environ.get("USERNAME", os.environ.get("USER", "User"))
|
||||
|
||||
USER_NAME = os.environ.get("USERNAME", "User")
|
||||
|
||||
__version__ = metadata.version("viu_media")
|
||||
|
||||
AUTHOR = "Benexl"
|
||||
AUTHOR = "viu-media"
|
||||
GIT_REPO = "github.com"
|
||||
GIT_PROTOCOL = "https://"
|
||||
REPO_HOME = f"https://{GIT_REPO}/{AUTHOR}/Viu"
|
||||
@@ -81,6 +83,7 @@ APP_CACHE_DIR.mkdir(parents=True, exist_ok=True)
|
||||
LOG_FOLDER.mkdir(parents=True, exist_ok=True)
|
||||
USER_VIDEOS_DIR.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
USER_CONFIG = APP_DATA_DIR / "config.ini"
|
||||
USER_CONFIG = APP_DATA_DIR / "config.toml"
|
||||
|
||||
LOG_FILE = LOG_FOLDER / "app.log"
|
||||
SUPPORT_PROJECT_URL = "https://buymeacoffee.com/benexl"
|
||||
|
||||
@@ -130,10 +130,11 @@ class YtDLPDownloader(BaseDownloader):
|
||||
}
|
||||
)
|
||||
|
||||
with yt_dlp.YoutubeDL(opts) as ydl:
|
||||
# TODO: Confirm this type issues
|
||||
with yt_dlp.YoutubeDL(opts) as ydl: # type: ignore
|
||||
info = ydl.extract_info(params.url, download=True)
|
||||
if info:
|
||||
_video_path = info["requested_downloads"][0]["filepath"]
|
||||
_video_path = info["requested_downloads"][0]["filepath"] # type: ignore
|
||||
if _video_path.endswith(".unknown_video"):
|
||||
print("Normalizing path...")
|
||||
_vid_path = _video_path.replace(".unknown_video", ".mp4")
|
||||
|
||||
@@ -219,7 +219,7 @@ class BackgroundWorker(ABC):
|
||||
else:
|
||||
# Wait for tasks to complete with timeout
|
||||
try:
|
||||
self._executor.shutdown(wait=True, timeout=timeout)
|
||||
self._executor.shutdown(wait=True)
|
||||
except TimeoutError:
|
||||
logger.warning(
|
||||
f"Worker {self.name} shutdown timed out, forcing cancellation"
|
||||
|
||||
@@ -5,6 +5,8 @@ from typing import Dict, List, Optional, Union
|
||||
from ...libs.media_api.types import AiringSchedule
|
||||
|
||||
COMMA_REGEX = re.compile(r"([0-9]{3})(?=\d)")
|
||||
SINGLE_QUOTE = "'"
|
||||
DOUBLE_QUOTE = '"'
|
||||
|
||||
|
||||
def format_media_duration(total_minutes: Optional[int]) -> str:
|
||||
|
||||
@@ -1,22 +0,0 @@
|
||||
from httpx import get
|
||||
|
||||
ANISKIP_ENDPOINT = "https://api.aniskip.com/v1/skip-times"
|
||||
|
||||
|
||||
# TODO: Finish own implementation of aniskip script
|
||||
class AniSkip:
|
||||
@classmethod
|
||||
def get_skip_times(
|
||||
cls, mal_id: int, episode_number: float | int, types=["op", "ed"]
|
||||
):
|
||||
url = f"{ANISKIP_ENDPOINT}/{mal_id}/{episode_number}?types=op&types=ed"
|
||||
response = get(url)
|
||||
print(response.text)
|
||||
return response.json()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
mal_id = input("Mal id: ")
|
||||
episode_number = input("episode_number: ")
|
||||
skip_times = AniSkip.get_skip_times(int(mal_id), float(episode_number))
|
||||
print(skip_times)
|
||||
@@ -1,3 +0,0 @@
|
||||
from .api import connect
|
||||
|
||||
__all__ = ["connect"]
|
||||
@@ -1,13 +0,0 @@
|
||||
import time
|
||||
|
||||
from pypresence import Presence
|
||||
|
||||
|
||||
def connect(show, episode, switch):
|
||||
presence = Presence(client_id="1292070065583165512")
|
||||
presence.connect()
|
||||
if not switch.is_set():
|
||||
presence.update(details=show, state="Watching episode " + episode)
|
||||
time.sleep(10)
|
||||
else:
|
||||
presence.close()
|
||||
1360
viu_media/libs/media_api/_media_tags.py
Normal file
1360
viu_media/libs/media_api/_media_tags.py
Normal file
File diff suppressed because it is too large
Load Diff
@@ -33,6 +33,7 @@ from ..types import (
|
||||
Studio,
|
||||
UserListItem,
|
||||
UserMediaListStatus,
|
||||
MediaType,
|
||||
UserProfile,
|
||||
)
|
||||
from .types import (
|
||||
@@ -539,7 +540,7 @@ def _to_generic_media_item_from_notification_partial(
|
||||
title=_to_generic_media_title(data["title"]),
|
||||
cover_image=_to_generic_media_image(data["coverImage"]),
|
||||
# Provide default/empty values for fields not in notification payload
|
||||
type="ANIME",
|
||||
type=MediaType.ANIME,
|
||||
status=MediaStatus.RELEASING, # Assume releasing for airing notifications
|
||||
format=None,
|
||||
description=None,
|
||||
|
||||
@@ -6,6 +6,7 @@ from ..types import (
|
||||
MediaImage,
|
||||
MediaItem,
|
||||
MediaSearchResult,
|
||||
MediaStatus,
|
||||
MediaTitle,
|
||||
PageInfo,
|
||||
Studio,
|
||||
@@ -17,9 +18,9 @@ if TYPE_CHECKING:
|
||||
|
||||
# Jikan uses specific strings for status, we can map them to our generic enum.
|
||||
JIKAN_STATUS_MAP = {
|
||||
"Finished Airing": "FINISHED",
|
||||
"Currently Airing": "RELEASING",
|
||||
"Not yet aired": "NOT_YET_RELEASED",
|
||||
"Finished Airing": MediaStatus.FINISHED,
|
||||
"Currently Airing": MediaStatus.RELEASING,
|
||||
"Not yet aired": MediaStatus.NOT_YET_RELEASED,
|
||||
}
|
||||
|
||||
|
||||
@@ -42,7 +43,11 @@ def _to_generic_title(jikan_titles: list[dict]) -> MediaTitle:
|
||||
elif type_ == "Japanese":
|
||||
native = title_
|
||||
|
||||
return MediaTitle(romaji=romaji, english=english, native=native)
|
||||
return MediaTitle(
|
||||
romaji=romaji,
|
||||
english=english or romaji or native or "NOT AVAILABLE",
|
||||
native=native,
|
||||
)
|
||||
|
||||
|
||||
def _to_generic_image(jikan_images: dict) -> MediaImage:
|
||||
@@ -69,7 +74,7 @@ def _to_generic_media_item(data: dict) -> MediaItem:
|
||||
id_mal=data["mal_id"],
|
||||
title=_to_generic_title(data.get("titles", [])),
|
||||
cover_image=_to_generic_image(data.get("images", {})),
|
||||
status=JIKAN_STATUS_MAP.get(data.get("status", ""), None),
|
||||
status=JIKAN_STATUS_MAP.get(data.get("status", ""), MediaStatus.UNKNOWN),
|
||||
episodes=data.get("episodes"),
|
||||
duration=data.get("duration"),
|
||||
average_score=score,
|
||||
@@ -81,7 +86,7 @@ def _to_generic_media_item(data: dict) -> MediaItem:
|
||||
Studio(id=s["mal_id"], name=s["name"]) for s in data.get("studios", [])
|
||||
],
|
||||
# Jikan doesn't provide streaming episodes
|
||||
streaming_episodes=[],
|
||||
streaming_episodes={},
|
||||
# Jikan doesn't provide user list status in its search results.
|
||||
user_status=None,
|
||||
)
|
||||
|
||||
@@ -5,6 +5,7 @@ from enum import Enum
|
||||
from typing import Dict, List, Optional
|
||||
|
||||
from pydantic import BaseModel, ConfigDict, Field
|
||||
from ._media_tags import MediaTag
|
||||
|
||||
|
||||
# ENUMS
|
||||
@@ -14,6 +15,7 @@ class MediaStatus(Enum):
|
||||
NOT_YET_RELEASED = "NOT_YET_RELEASED"
|
||||
CANCELLED = "CANCELLED"
|
||||
HIATUS = "HIATUS"
|
||||
UNKNOWN = "UNKNOWN"
|
||||
|
||||
|
||||
class MediaType(Enum):
|
||||
@@ -285,472 +287,6 @@ class MediaReview(BaseMediaApiModel):
|
||||
user: Reviewer
|
||||
|
||||
|
||||
# ENUMS
|
||||
|
||||
|
||||
class MediaTag(Enum):
|
||||
# Cast
|
||||
POLYAMOROUS = "Polyamorous"
|
||||
|
||||
# Cast Main Cast
|
||||
ANTI_HERO = "Anti-Hero"
|
||||
ELDERLY_PROTAGONIST = "Elderly Protagonist"
|
||||
ENSEMBLE_CAST = "Ensemble Cast"
|
||||
ESTRANGED_FAMILY = "Estranged Family"
|
||||
FEMALE_PROTAGONIST = "Female Protagonist"
|
||||
MALE_PROTAGONIST = "Male Protagonist"
|
||||
PRIMARILY_ADULT_CAST = "Primarily Adult Cast"
|
||||
PRIMARILY_ANIMAL_CAST = "Primarily Animal Cast"
|
||||
PRIMARILY_CHILD_CAST = "Primarily Child Cast"
|
||||
PRIMARILY_FEMALE_CAST = "Primarily Female Cast"
|
||||
PRIMARILY_MALE_CAST = "Primarily Male Cast"
|
||||
PRIMARILY_TEEN_CAST = "Primarily Teen Cast"
|
||||
|
||||
# Cast Traits
|
||||
AGE_REGRESSION = "Age Regression"
|
||||
AGENDER = "Agender"
|
||||
ALIENS = "Aliens"
|
||||
AMNESIA = "Amnesia"
|
||||
ANGELS = "Angels"
|
||||
ANTHROPOMORPHISM = "Anthropomorphism"
|
||||
AROMANTIC = "Aromantic"
|
||||
ARRANGED_MARRIAGE = "Arranged Marriage"
|
||||
ARTIFICIAL_INTELLIGENCE = "Artificial Intelligence"
|
||||
ASEXUAL = "Asexual"
|
||||
BISEXUAL = "Bisexual"
|
||||
BUTLER = "Butler"
|
||||
CENTAUR = "Centaur"
|
||||
CHIMERA = "Chimera"
|
||||
CHUUNIBYOU = "Chuunibyou"
|
||||
CLONE = "Clone"
|
||||
COSPLAY = "Cosplay"
|
||||
COWBOYS = "Cowboys"
|
||||
CROSSDRESSING = "Crossdressing"
|
||||
CYBORG = "Cyborg"
|
||||
DELINQUENTS = "Delinquents"
|
||||
DEMONS = "Demons"
|
||||
DETECTIVE = "Detective"
|
||||
DINOSAURS = "Dinosaurs"
|
||||
DISABILITY = "Disability"
|
||||
DISSOCIATIVE_IDENTITIES = "Dissociative Identities"
|
||||
DRAGONS = "Dragons"
|
||||
DULLAHAN = "Dullahan"
|
||||
ELF = "Elf"
|
||||
FAIRY = "Fairy"
|
||||
FEMBOY = "Femboy"
|
||||
GHOST = "Ghost"
|
||||
GOBLIN = "Goblin"
|
||||
GODS = "Gods"
|
||||
GYARU = "Gyaru"
|
||||
HIKIKOMORI = "Hikikomori"
|
||||
HOMELESS = "Homeless"
|
||||
IDOL = "Idol"
|
||||
KEMONOMIMI = "Kemonomimi"
|
||||
KUUDERE = "Kuudere"
|
||||
MAIDS = "Maids"
|
||||
MERMAID = "Mermaid"
|
||||
MONSTER_BOY = "Monster Boy"
|
||||
MONSTER_GIRL = "Monster Girl"
|
||||
NEKOMIMI = "Nekomimi"
|
||||
NINJA = "Ninja"
|
||||
NUDITY = "Nudity"
|
||||
NUN = "Nun"
|
||||
OFFICE_LADY = "Office Lady"
|
||||
OIRAN = "Oiran"
|
||||
OJOU_SAMA = "Ojou-sama"
|
||||
ORPHAN = "Orphan"
|
||||
PIRATES = "Pirates"
|
||||
ROBOTS = "Robots"
|
||||
SAMURAI = "Samurai"
|
||||
SHRINE_MAIDEN = "Shrine Maiden"
|
||||
SKELETON = "Skeleton"
|
||||
SUCCUBUS = "Succubus"
|
||||
TANNED_SKIN = "Tanned Skin"
|
||||
TEACHER = "Teacher"
|
||||
TOMBOY = "Tomboy"
|
||||
TRANSGENDER = "Transgender"
|
||||
TSUNDERE = "Tsundere"
|
||||
TWINS = "Twins"
|
||||
VAMPIRE = "Vampire"
|
||||
VETERINARIAN = "Veterinarian"
|
||||
VIKINGS = "Vikings"
|
||||
VILLAINESS = "Villainess"
|
||||
VTUBER = "VTuber"
|
||||
WEREWOLF = "Werewolf"
|
||||
WITCH = "Witch"
|
||||
YANDERE = "Yandere"
|
||||
YOUKAI = "Youkai"
|
||||
ZOMBIE = "Zombie"
|
||||
|
||||
# Demographic
|
||||
JOSEI = "Josei"
|
||||
KIDS = "Kids"
|
||||
SEINEN = "Seinen"
|
||||
SHOUJO = "Shoujo"
|
||||
SHOUNEN = "Shounen"
|
||||
|
||||
# Setting
|
||||
MATRIARCHY = "Matriarchy"
|
||||
|
||||
# Setting Scene
|
||||
BAR = "Bar"
|
||||
BOARDING_SCHOOL = "Boarding School"
|
||||
CAMPING = "Camping"
|
||||
CIRCUS = "Circus"
|
||||
COASTAL = "Coastal"
|
||||
COLLEGE = "College"
|
||||
DESERT = "Desert"
|
||||
DUNGEON = "Dungeon"
|
||||
FOREIGN = "Foreign"
|
||||
INN = "Inn"
|
||||
KONBINI = "Konbini"
|
||||
NATURAL_DISASTER = "Natural Disaster"
|
||||
OFFICE = "Office"
|
||||
OUTDOOR_ACTIVITIES = "Outdoor Activities"
|
||||
PRISON = "Prison"
|
||||
RESTAURANT = "Restaurant"
|
||||
RURAL = "Rural"
|
||||
SCHOOL = "School"
|
||||
SCHOOL_CLUB = "School Club"
|
||||
SNOWSCAPE = "Snowscape"
|
||||
URBAN = "Urban"
|
||||
WILDERNESS = "Wilderness"
|
||||
WORK = "Work"
|
||||
|
||||
# Setting Time
|
||||
ACHRONOLOGICAL_ORDER = "Achronological Order"
|
||||
ANACHRONISM = "Anachronism"
|
||||
ANCIENT_CHINA = "Ancient China"
|
||||
DYSTOPIAN = "Dystopian"
|
||||
HISTORICAL = "Historical"
|
||||
MEDIEVAL = "Medieval"
|
||||
TIME_SKIP = "Time Skip"
|
||||
|
||||
# Setting Universe
|
||||
AFTERLIFE = "Afterlife"
|
||||
ALTERNATE_UNIVERSE = "Alternate Universe"
|
||||
AUGMENTED_REALITY = "Augmented Reality"
|
||||
OMEGAVERSE = "Omegaverse"
|
||||
POST_APOCALYPTIC = "Post-Apocalyptic"
|
||||
SPACE = "Space"
|
||||
URBAN_FANTASY = "Urban Fantasy"
|
||||
VIRTUAL_WORLD = "Virtual World"
|
||||
|
||||
# Sexual Content
|
||||
AHEGAO = "Ahegao"
|
||||
AMPUTATION = "Amputation"
|
||||
ANAL_SEX = "Anal Sex"
|
||||
ARMPITS = "Armpits"
|
||||
ASHIKOKI = "Ashikoki"
|
||||
ASPHYXIATION = "Asphyxiation"
|
||||
BONDAGE = "Bondage"
|
||||
BOOBJOB = "Boobjob"
|
||||
CERVIX_PENETRATION = "Cervix Penetration"
|
||||
CHEATING = "Cheating"
|
||||
CUMFLATION = "Cumflation"
|
||||
CUNNILINGUS = "Cunnilingus"
|
||||
DEEPTHROAT = "Deepthroat"
|
||||
DEFLORATION = "Defloration"
|
||||
DILF = "DILF"
|
||||
DOUBLE_PENETRATION = "Double Penetration"
|
||||
EROTIC_PIERCINGS = "Erotic Piercings"
|
||||
EXHIBITIONISM = "Exhibitionism"
|
||||
FACIAL = "Facial"
|
||||
FEET = "Feet"
|
||||
FELLATIO = "Fellatio"
|
||||
FEMDOM = "Femdom"
|
||||
FISTING = "Fisting"
|
||||
FLAT_CHEST = "Flat Chest"
|
||||
FUTANARI = "Futanari"
|
||||
GROUP_SEX = "Group Sex"
|
||||
HAIR_PULLING = "Hair Pulling"
|
||||
HANDJOB = "Handjob"
|
||||
HUMAN_PET = "Human Pet"
|
||||
HYPERSEXUALITY = "Hypersexuality"
|
||||
INCEST = "Incest"
|
||||
INSEKI = "Inseki"
|
||||
IRRUMATIO = "Irrumatio"
|
||||
LACTATION = "Lactation"
|
||||
LARGE_BREASTS = "Large Breasts"
|
||||
MALE_PREGNANCY = "Male Pregnancy"
|
||||
MASOCHISM = "Masochism"
|
||||
MASTURBATION = "Masturbation"
|
||||
MATING_PRESS = "Mating Press"
|
||||
MILF = "MILF"
|
||||
NAKADASHI = "Nakadashi"
|
||||
NETORARE = "Netorare"
|
||||
NETORASE = "Netorase"
|
||||
NETORI = "Netori"
|
||||
PET_PLAY = "Pet Play"
|
||||
PROSTITUTION = "Prostitution"
|
||||
PUBLIC_SEX = "Public Sex"
|
||||
RAPE = "Rape"
|
||||
RIMJOB = "Rimjob"
|
||||
SADISM = "Sadism"
|
||||
SCAT = "Scat"
|
||||
SCISSORING = "Scissoring"
|
||||
SEX_TOYS = "Sex Toys"
|
||||
SHIMAIDON = "Shimaidon"
|
||||
SQUIRTING = "Squirting"
|
||||
SUMATA = "Sumata"
|
||||
SWEAT = "Sweat"
|
||||
TENTACLES = "Tentacles"
|
||||
THREESOME = "Threesome"
|
||||
VIRGINITY = "Virginity"
|
||||
VORE = "Vore"
|
||||
VOYEUR = "Voyeur"
|
||||
WATERSPORTS = "Watersports"
|
||||
ZOOPHILIA = "Zoophilia"
|
||||
|
||||
# Technical
|
||||
_4_KOMA = "4-koma"
|
||||
ACHROMATIC = "Achromatic"
|
||||
ADVERTISEMENT = "Advertisement"
|
||||
ANTHOLOGY = "Anthology"
|
||||
CGI = "CGI"
|
||||
EPISODIC = "Episodic"
|
||||
FLASH = "Flash"
|
||||
FULL_CGI = "Full CGI"
|
||||
FULL_COLOR = "Full Color"
|
||||
LONG_STRIP = "Long Strip"
|
||||
MIXED_MEDIA = "Mixed Media"
|
||||
NO_DIALOGUE = "No Dialogue"
|
||||
NON_FICTION = "Non-fiction"
|
||||
POV = "POV"
|
||||
PUPPETRY = "Puppetry"
|
||||
ROTOSCOPING = "Rotoscoping"
|
||||
STOP_MOTION = "Stop Motion"
|
||||
VERTICAL_VIDEO = "Vertical Video"
|
||||
|
||||
# Theme Action
|
||||
ARCHERY = "Archery"
|
||||
BATTLE_ROYALE = "Battle Royale"
|
||||
ESPIONAGE = "Espionage"
|
||||
FUGITIVE = "Fugitive"
|
||||
GUNS = "Guns"
|
||||
MARTIAL_ARTS = "Martial Arts"
|
||||
SPEARPLAY = "Spearplay"
|
||||
SWORDPLAY = "Swordplay"
|
||||
|
||||
# Theme Arts
|
||||
ACTING = "Acting"
|
||||
CALLIGRAPHY = "Calligraphy"
|
||||
CLASSIC_LITERATURE = "Classic Literature"
|
||||
DRAWING = "Drawing"
|
||||
FASHION = "Fashion"
|
||||
FOOD = "Food"
|
||||
MAKEUP = "Makeup"
|
||||
PHOTOGRAPHY = "Photography"
|
||||
RAKUGO = "Rakugo"
|
||||
WRITING = "Writing"
|
||||
|
||||
# Theme Arts-Music
|
||||
BAND = "Band"
|
||||
CLASSICAL_MUSIC = "Classical Music"
|
||||
DANCING = "Dancing"
|
||||
HIP_HOP_MUSIC = "Hip-hop Music"
|
||||
JAZZ_MUSIC = "Jazz Music"
|
||||
METAL_MUSIC = "Metal Music"
|
||||
MUSICAL_THEATER = "Musical Theater"
|
||||
ROCK_MUSIC = "Rock Music"
|
||||
|
||||
# Theme Comedy
|
||||
PARODY = "Parody"
|
||||
SATIRE = "Satire"
|
||||
SLAPSTICK = "Slapstick"
|
||||
SURREAL_COMEDY = "Surreal Comedy"
|
||||
|
||||
# Theme Drama
|
||||
BULLYING = "Bullying"
|
||||
CLASS_STRUGGLE = "Class Struggle"
|
||||
COMING_OF_AGE = "Coming of Age"
|
||||
CONSPIRACY = "Conspiracy"
|
||||
ECO_HORROR = "Eco-Horror"
|
||||
FAKE_RELATIONSHIP = "Fake Relationship"
|
||||
KINGDOM_MANAGEMENT = "Kingdom Management"
|
||||
REHABILITATION = "Rehabilitation"
|
||||
REVENGE = "Revenge"
|
||||
SUICIDE = "Suicide"
|
||||
TRAGEDY = "Tragedy"
|
||||
|
||||
# Theme Fantasy
|
||||
ALCHEMY = "Alchemy"
|
||||
BODY_SWAPPING = "Body Swapping"
|
||||
CULTIVATION = "Cultivation"
|
||||
CURSES = "Curses"
|
||||
EXORCISM = "Exorcism"
|
||||
FAIRY_TALE = "Fairy Tale"
|
||||
HENSHIN = "Henshin"
|
||||
ISEKAI = "Isekai"
|
||||
KAIJU = "Kaiju"
|
||||
MAGIC = "Magic"
|
||||
MYTHOLOGY = "Mythology"
|
||||
NECROMANCY = "Necromancy"
|
||||
SHAPESHIFTING = "Shapeshifting"
|
||||
STEAMPUNK = "Steampunk"
|
||||
SUPER_POWER = "Super Power"
|
||||
SUPERHERO = "Superhero"
|
||||
WUXIA = "Wuxia"
|
||||
|
||||
# Theme Game
|
||||
BOARD_GAME = "Board Game"
|
||||
E_SPORTS = "E-Sports"
|
||||
VIDEO_GAMES = "Video Games"
|
||||
|
||||
# Theme Game-Card & Board Game
|
||||
CARD_BATTLE = "Card Battle"
|
||||
GO = "Go"
|
||||
KARUTA = "Karuta"
|
||||
MAHJONG = "Mahjong"
|
||||
POKER = "Poker"
|
||||
SHOGI = "Shogi"
|
||||
|
||||
# Theme Game-Sport
|
||||
ACROBATICS = "Acrobatics"
|
||||
AIRSOFT = "Airsoft"
|
||||
AMERICAN_FOOTBALL = "American Football"
|
||||
ATHLETICS = "Athletics"
|
||||
BADMINTON = "Badminton"
|
||||
BASEBALL = "Baseball"
|
||||
BASKETBALL = "Basketball"
|
||||
BOWLING = "Bowling"
|
||||
BOXING = "Boxing"
|
||||
CHEERLEADING = "Cheerleading"
|
||||
CYCLING = "Cycling"
|
||||
FENCING = "Fencing"
|
||||
FISHING = "Fishing"
|
||||
FITNESS = "Fitness"
|
||||
FOOTBALL = "Football"
|
||||
GOLF = "Golf"
|
||||
HANDBALL = "Handball"
|
||||
ICE_SKATING = "Ice Skating"
|
||||
JUDO = "Judo"
|
||||
LACROSSE = "Lacrosse"
|
||||
PARKOUR = "Parkour"
|
||||
RUGBY = "Rugby"
|
||||
SCUBA_DIVING = "Scuba Diving"
|
||||
SKATEBOARDING = "Skateboarding"
|
||||
SUMO = "Sumo"
|
||||
SURFING = "Surfing"
|
||||
SWIMMING = "Swimming"
|
||||
TABLE_TENNIS = "Table Tennis"
|
||||
TENNIS = "Tennis"
|
||||
VOLLEYBALL = "Volleyball"
|
||||
WRESTLING = "Wrestling"
|
||||
|
||||
# Theme Other
|
||||
ADOPTION = "Adoption"
|
||||
ANIMALS = "Animals"
|
||||
ASTRONOMY = "Astronomy"
|
||||
AUTOBIOGRAPHICAL = "Autobiographical"
|
||||
BIOGRAPHICAL = "Biographical"
|
||||
BLACKMAIL = "Blackmail"
|
||||
BODY_HORROR = "Body Horror"
|
||||
BODY_IMAGE = "Body Image"
|
||||
CANNIBALISM = "Cannibalism"
|
||||
CHIBI = "Chibi"
|
||||
COSMIC_HORROR = "Cosmic Horror"
|
||||
CREATURE_TAMING = "Creature Taming"
|
||||
CRIME = "Crime"
|
||||
CROSSOVER = "Crossover"
|
||||
DEATH_GAME = "Death Game"
|
||||
DENPA = "Denpa"
|
||||
DRUGS = "Drugs"
|
||||
ECONOMICS = "Economics"
|
||||
EDUCATIONAL = "Educational"
|
||||
ENVIRONMENTAL = "Environmental"
|
||||
ERO_GURO = "Ero Guro"
|
||||
FILMMAKING = "Filmmaking"
|
||||
FOUND_FAMILY = "Found Family"
|
||||
GAMBLING = "Gambling"
|
||||
GENDER_BENDING = "Gender Bending"
|
||||
GORE = "Gore"
|
||||
INDIGENOUS_CULTURES = "Indigenous Cultures"
|
||||
LANGUAGE_BARRIER = "Language Barrier"
|
||||
LGBTQ_PLUS_THEMES = "LGBTQ+ Themes"
|
||||
LOST_CIVILIZATION = "Lost Civilization"
|
||||
MARRIAGE = "Marriage"
|
||||
MEDICINE = "Medicine"
|
||||
MEMORY_MANIPULATION = "Memory Manipulation"
|
||||
META = "Meta"
|
||||
MOUNTAINEERING = "Mountaineering"
|
||||
NOIR = "Noir"
|
||||
OTAKU_CULTURE = "Otaku Culture"
|
||||
PANDEMIC = "Pandemic"
|
||||
PHILOSOPHY = "Philosophy"
|
||||
POLITICS = "Politics"
|
||||
PREGNANCY = "Pregnancy"
|
||||
PROXY_BATTLE = "Proxy Battle"
|
||||
PSYCHOSEXUAL = "Psychosexual"
|
||||
REINCARNATION = "Reincarnation"
|
||||
RELIGION = "Religion"
|
||||
RESCUE = "Rescue"
|
||||
ROYAL_AFFAIRS = "Royal Affairs"
|
||||
SLAVERY = "Slavery"
|
||||
SOFTWARE_DEVELOPMENT = "Software Development"
|
||||
SURVIVAL = "Survival"
|
||||
TERRORISM = "Terrorism"
|
||||
TORTURE = "Torture"
|
||||
TRAVEL = "Travel"
|
||||
VOCAL_SYNTH = "Vocal Synth"
|
||||
WAR = "War"
|
||||
|
||||
# Theme Other-Organisations
|
||||
ASSASSINS = "Assassins"
|
||||
CRIMINAL_ORGANIZATION = "Criminal Organization"
|
||||
CULT = "Cult"
|
||||
FIREFIGHTERS = "Firefighters"
|
||||
GANGS = "Gangs"
|
||||
MAFIA = "Mafia"
|
||||
MILITARY = "Military"
|
||||
POLICE = "Police"
|
||||
TRIADS = "Triads"
|
||||
YAKUZA = "Yakuza"
|
||||
|
||||
# Theme Other-Vehicle
|
||||
AVIATION = "Aviation"
|
||||
CARS = "Cars"
|
||||
MOPEDS = "Mopeds"
|
||||
MOTORCYCLES = "Motorcycles"
|
||||
SHIPS = "Ships"
|
||||
TANKS = "Tanks"
|
||||
TRAINS = "Trains"
|
||||
|
||||
# Theme Romance
|
||||
AGE_GAP = "Age Gap"
|
||||
BOYS_LOVE = "Boys' Love"
|
||||
COHABITATION = "Cohabitation"
|
||||
FEMALE_HAREM = "Female Harem"
|
||||
HETEROSEXUAL = "Heterosexual"
|
||||
LOVE_TRIANGLE = "Love Triangle"
|
||||
MALE_HAREM = "Male Harem"
|
||||
MATCHMAKING = "Matchmaking"
|
||||
MIXED_GENDER_HAREM = "Mixed Gender Harem"
|
||||
TEENS_LOVE = "Teens' Love"
|
||||
UNREQUITED_LOVE = "Unrequited Love"
|
||||
YURI = "Yuri"
|
||||
|
||||
# Theme Sci-Fi
|
||||
CYBERPUNK = "Cyberpunk"
|
||||
SPACE_OPERA = "Space Opera"
|
||||
TIME_LOOP = "Time Loop"
|
||||
TIME_MANIPULATION = "Time Manipulation"
|
||||
TOKUSATSU = "Tokusatsu"
|
||||
|
||||
# Theme Sci-Fi-Mecha
|
||||
REAL_ROBOT = "Real Robot"
|
||||
SUPER_ROBOT = "Super Robot"
|
||||
|
||||
# Theme Slice of Life
|
||||
AGRICULTURE = "Agriculture"
|
||||
CUTE_BOYS_DOING_CUTE_THINGS = "Cute Boys Doing Cute Things"
|
||||
CUTE_GIRLS_DOING_CUTE_THINGS = "Cute Girls Doing Cute Things"
|
||||
FAMILY_LIFE = "Family Life"
|
||||
HORTICULTURE = "Horticulture"
|
||||
IYASHIKEI = "Iyashikei"
|
||||
PARENTHOOD = "Parenthood"
|
||||
|
||||
|
||||
class MediaSort(Enum):
|
||||
ID = "ID"
|
||||
ID_DESC = "ID_DESC"
|
||||
|
||||
@@ -30,8 +30,6 @@ def test_media_api(api_client: BaseApiClient):
|
||||
"""
|
||||
from ....core.constants import APP_ASCII_ART
|
||||
from ..params import (
|
||||
MediaAiringScheduleParams,
|
||||
MediaCharactersParams,
|
||||
MediaRecommendationParams,
|
||||
MediaRelationsParams,
|
||||
MediaSearchParams,
|
||||
@@ -125,47 +123,10 @@ def test_media_api(api_client: BaseApiClient):
|
||||
print()
|
||||
|
||||
# Test 5: Get Characters
|
||||
print("5. Testing Character Information...")
|
||||
try:
|
||||
characters = api_client.get_characters_of(
|
||||
MediaCharactersParams(id=selected_anime.id)
|
||||
)
|
||||
if characters and characters.get("data"):
|
||||
char_data = characters["data"]["Page"]["media"][0]["characters"]["nodes"]
|
||||
if char_data:
|
||||
print(f" Found {len(char_data)} characters:")
|
||||
for char in char_data[:3]: # Show first 3
|
||||
name = char["name"]["full"] or char["name"]["first"]
|
||||
print(f" - {name}")
|
||||
else:
|
||||
print(" No character data found")
|
||||
else:
|
||||
print(" No characters found")
|
||||
except Exception as e:
|
||||
print(f" Error: {e}")
|
||||
print()
|
||||
# TODO: Recreate this test
|
||||
|
||||
# Test 6: Get Airing Schedule
|
||||
print("6. Testing Airing Schedule...")
|
||||
try:
|
||||
schedule = api_client.get_airing_schedule_for(
|
||||
MediaAiringScheduleParams(id=selected_anime.id)
|
||||
)
|
||||
if schedule and schedule.get("data"):
|
||||
schedule_data = schedule["data"]["Page"]["media"][0]["airingSchedule"][
|
||||
"nodes"
|
||||
]
|
||||
if schedule_data:
|
||||
print(f" Found {len(schedule_data)} upcoming episodes:")
|
||||
for ep in schedule_data[:3]: # Show first 3
|
||||
print(f" - Episode {ep['episode']}")
|
||||
else:
|
||||
print(" No upcoming episodes")
|
||||
else:
|
||||
print(" No airing schedule found")
|
||||
except Exception as e:
|
||||
print(f" Error: {e}")
|
||||
print()
|
||||
# TODO: Recreate this test
|
||||
|
||||
# Test 7: User Media List (if authenticated)
|
||||
if api_client.is_authenticated():
|
||||
|
||||
@@ -1,65 +0,0 @@
|
||||
"""
|
||||
Syncplay integration for Viu.
|
||||
|
||||
This module provides a procedural function to launch Syncplay with the given media and options.
|
||||
"""
|
||||
|
||||
import shutil
|
||||
import subprocess
|
||||
|
||||
from .tools import exit_app
|
||||
|
||||
|
||||
def SyncPlayer(url: str, anime_title=None, headers={}, subtitles=[], *args):
|
||||
"""
|
||||
Launch Syncplay for synchronized playback with friends.
|
||||
|
||||
Args:
|
||||
url: The media URL to play.
|
||||
anime_title: Optional title to display in the player.
|
||||
headers: Optional HTTP headers to pass to the player.
|
||||
subtitles: Optional list of subtitle dicts with 'url' keys.
|
||||
*args: Additional arguments (unused).
|
||||
|
||||
Returns:
|
||||
Tuple of ("0", "0") for compatibility.
|
||||
"""
|
||||
# TODO: handle m3u8 multi quality streams
|
||||
#
|
||||
# check for SyncPlay
|
||||
SYNCPLAY_EXECUTABLE = shutil.which("syncplay")
|
||||
if not SYNCPLAY_EXECUTABLE:
|
||||
print("Syncplay not found")
|
||||
exit_app(1)
|
||||
return "0", "0"
|
||||
# start SyncPlayer
|
||||
mpv_args = []
|
||||
if headers:
|
||||
mpv_headers = "--http-header-fields="
|
||||
for header_name, header_value in headers.items():
|
||||
mpv_headers += f"{header_name}:{header_value},"
|
||||
mpv_args.append(mpv_headers)
|
||||
for subtitle in subtitles:
|
||||
mpv_args.append(f"--sub-file={subtitle['url']}")
|
||||
if not anime_title:
|
||||
subprocess.run(
|
||||
[
|
||||
SYNCPLAY_EXECUTABLE,
|
||||
url,
|
||||
],
|
||||
check=False,
|
||||
)
|
||||
else:
|
||||
subprocess.run(
|
||||
[
|
||||
SYNCPLAY_EXECUTABLE,
|
||||
url,
|
||||
"--",
|
||||
f"--force-media-title={anime_title}",
|
||||
*mpv_args,
|
||||
],
|
||||
check=False,
|
||||
)
|
||||
|
||||
# for compatability
|
||||
return "0", "0"
|
||||
@@ -88,4 +88,5 @@ def decode_hex_string(hex_string):
|
||||
# Decode each hex pair
|
||||
decoded_chars = [hex_to_char.get(pair.lower(), pair) for pair in hex_pairs]
|
||||
|
||||
return "".join(decoded_chars)
|
||||
# TODO: Better type handling
|
||||
return "".join(decoded_chars) # type: ignore
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import re
|
||||
|
||||
ANIMEPAHE = "animepahe.ru"
|
||||
ANIMEPAHE = "animepahe.si"
|
||||
ANIMEPAHE_BASE = f"https://{ANIMEPAHE}"
|
||||
ANIMEPAHE_ENDPOINT = f"{ANIMEPAHE_BASE}/api"
|
||||
|
||||
@@ -19,13 +19,13 @@ REQUEST_HEADERS = {
|
||||
"TE": "trailers",
|
||||
}
|
||||
SERVER_HEADERS = {
|
||||
"Host": "kwik.si",
|
||||
"Host": "kwik.cx",
|
||||
"Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/png,image/svg+xml,*/*;q=0.8",
|
||||
"Accept-Language": "en-US,en;q=0.5",
|
||||
"Accept-Encoding": "Utf-8",
|
||||
"DNT": "1",
|
||||
"Connection": "keep-alive",
|
||||
"Referer": "https://animepahe.ru/",
|
||||
"Referer": "https://animepahe.si/",
|
||||
"Upgrade-Insecure-Requests": "1",
|
||||
"Sec-Fetch-Dest": "iframe",
|
||||
"Sec-Fetch-Mode": "navigate",
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
from typing import Any
|
||||
|
||||
from ..types import (
|
||||
Anime,
|
||||
AnimeEpisodeInfo,
|
||||
@@ -87,13 +85,16 @@ def map_to_anime_result(
|
||||
|
||||
|
||||
def map_to_server(
|
||||
episode: AnimeEpisodeInfo, translation_type: Any, quality: Any, stream_link: Any
|
||||
episode: AnimeEpisodeInfo,
|
||||
translation_type: str,
|
||||
stream_links: list[tuple[str, str]],
|
||||
) -> Server:
|
||||
links = [
|
||||
EpisodeStream(
|
||||
link=stream_link,
|
||||
quality=quality,
|
||||
link=link[1],
|
||||
quality=link[0] if link[0] in ["360", "480", "720", "1080"] else "1080", # type:ignore
|
||||
translation_type=translation_type_map[translation_type],
|
||||
)
|
||||
for link in stream_links
|
||||
]
|
||||
return Server(name="kwik", links=links, episode_title=episode.title)
|
||||
|
||||
@@ -131,15 +131,17 @@ class AnimePahe(BaseAnimeProvider):
|
||||
res_dicts = [extract_attributes(item) for item in resolutionMenuItems]
|
||||
quality = None
|
||||
translation_type = None
|
||||
stream_link = None
|
||||
stream_links = []
|
||||
|
||||
# TODO: better document the scraping process
|
||||
for res_dict in res_dicts:
|
||||
# the actual attributes are data attributes in the original html 'prefixed with data-'
|
||||
embed_url = res_dict["src"]
|
||||
logger.debug(f"Found embed url: {embed_url}")
|
||||
data_audio = "dub" if res_dict["audio"] == "eng" else "sub"
|
||||
|
||||
if data_audio != params.translation_type:
|
||||
logger.debug(f"Found {data_audio} but wanted {params.translation_type}")
|
||||
continue
|
||||
|
||||
if not embed_url:
|
||||
@@ -155,22 +157,26 @@ class AnimePahe(BaseAnimeProvider):
|
||||
)
|
||||
embed_response.raise_for_status()
|
||||
embed_page = embed_response.text
|
||||
logger.debug("Processing embed page for JS decoding")
|
||||
|
||||
decoded_js = process_animepahe_embed_page(embed_page)
|
||||
if not decoded_js:
|
||||
logger.error("failed to decode embed page")
|
||||
continue
|
||||
logger.debug(f"Decoded JS: {decoded_js[:100]}...")
|
||||
juicy_stream = JUICY_STREAM_REGEX.search(decoded_js)
|
||||
if not juicy_stream:
|
||||
logger.error("failed to find juicy stream")
|
||||
continue
|
||||
logger.debug(f"Found juicy stream: {juicy_stream.group(1)}")
|
||||
juicy_stream = juicy_stream.group(1)
|
||||
quality = res_dict["resolution"]
|
||||
logger.debug(f"Found quality: {quality}")
|
||||
translation_type = data_audio
|
||||
stream_link = juicy_stream
|
||||
stream_links.append((quality, juicy_stream))
|
||||
|
||||
if translation_type and quality and stream_link:
|
||||
yield map_to_server(episode, translation_type, quality, stream_link)
|
||||
if translation_type and stream_links:
|
||||
yield map_to_server(episode, translation_type, stream_links)
|
||||
|
||||
@lru_cache()
|
||||
def _get_episode_info(
|
||||
|
||||
14
viu_media/libs/provider/anime/animeunity/constants.py
Normal file
14
viu_media/libs/provider/anime/animeunity/constants.py
Normal file
@@ -0,0 +1,14 @@
|
||||
import re
|
||||
|
||||
ANIMEUNITY = "animeunity.so"
|
||||
ANIMEUNITY_BASE = f"https://www.{ANIMEUNITY}"
|
||||
|
||||
MAX_TIMEOUT = 10
|
||||
TOKEN_REGEX = re.compile(r'<meta.*?name="csrf-token".*?content="([^"]*)".*?>')
|
||||
|
||||
REPLACEMENT_WORDS = {"Season ": "", "Cour": "Part"}
|
||||
|
||||
# Server Specific
|
||||
AVAILABLE_VIDEO_QUALITY = ["1080", "720", "480"]
|
||||
VIDEO_INFO_REGEX = re.compile(r"window.video\s*=\s*(\{[^\}]*\})")
|
||||
DOWNLOAD_URL_REGEX = re.compile(r"window.downloadUrl\s*=\s*'([^']*)'")
|
||||
129
viu_media/libs/provider/anime/animeunity/mappers.py
Normal file
129
viu_media/libs/provider/anime/animeunity/mappers.py
Normal file
@@ -0,0 +1,129 @@
|
||||
from typing import Literal
|
||||
|
||||
from ..types import (
|
||||
Anime,
|
||||
AnimeEpisodeInfo,
|
||||
AnimeEpisodes,
|
||||
EpisodeStream,
|
||||
MediaTranslationType,
|
||||
PageInfo,
|
||||
SearchResult,
|
||||
SearchResults,
|
||||
Server,
|
||||
)
|
||||
from .constants import AVAILABLE_VIDEO_QUALITY
|
||||
|
||||
|
||||
def map_to_search_results(
|
||||
data: dict, translation_type: Literal["sub", "dub"]
|
||||
) -> SearchResults:
|
||||
results = []
|
||||
for result in data:
|
||||
mapped_result = map_to_search_result(result, translation_type)
|
||||
if mapped_result:
|
||||
results.append(mapped_result)
|
||||
|
||||
return SearchResults(
|
||||
page_info=PageInfo(),
|
||||
results=results,
|
||||
)
|
||||
|
||||
|
||||
def map_to_search_result(
|
||||
data: dict, translation_type: Literal["sub", "dub"] | None
|
||||
) -> SearchResult | None:
|
||||
if translation_type and data["dub"] != 1 if translation_type == "dub" else 0:
|
||||
return None
|
||||
return SearchResult(
|
||||
id=str(data["id"]),
|
||||
title=get_titles(data)[0] if get_titles(data) else "Unknown",
|
||||
episodes=AnimeEpisodes(
|
||||
sub=(
|
||||
list(map(str, range(1, get_episodes_count(data) + 1)))
|
||||
if data["dub"] == 0
|
||||
else []
|
||||
),
|
||||
dub=(
|
||||
list(map(str, range(1, get_episodes_count(data) + 1)))
|
||||
if data["dub"] == 1
|
||||
else []
|
||||
),
|
||||
),
|
||||
other_titles=get_titles(data),
|
||||
score=data["score"],
|
||||
poster=data["imageurl"],
|
||||
year=data["date"],
|
||||
)
|
||||
|
||||
|
||||
def map_to_anime_result(data: list, search_result: SearchResult) -> Anime:
|
||||
return Anime(
|
||||
id=search_result.id,
|
||||
title=search_result.title,
|
||||
episodes=AnimeEpisodes(
|
||||
sub=[
|
||||
episode["number"]
|
||||
for episode in data
|
||||
if len(search_result.episodes.sub) > 0
|
||||
],
|
||||
dub=[
|
||||
episode["number"]
|
||||
for episode in data
|
||||
if len(search_result.episodes.dub) > 0
|
||||
],
|
||||
),
|
||||
episodes_info=[
|
||||
AnimeEpisodeInfo(
|
||||
id=str(episode["id"]),
|
||||
episode=episode["number"],
|
||||
title=f"{search_result.title} - Ep {episode['number']}",
|
||||
)
|
||||
for episode in data
|
||||
],
|
||||
type=search_result.media_type,
|
||||
poster=search_result.poster,
|
||||
year=search_result.year,
|
||||
)
|
||||
|
||||
|
||||
def map_to_server(
|
||||
episode: AnimeEpisodeInfo, info: dict, translation_type: Literal["sub", "dub"]
|
||||
) -> Server:
|
||||
return Server(
|
||||
name="vixcloud",
|
||||
links=[
|
||||
EpisodeStream(
|
||||
link=info["link"].replace(str(info["quality"]), quality),
|
||||
title=info["name"],
|
||||
quality=quality, # type: ignore
|
||||
translation_type=MediaTranslationType(translation_type),
|
||||
mp4=True,
|
||||
)
|
||||
for quality in AVAILABLE_VIDEO_QUALITY
|
||||
if int(quality) <= info["quality"]
|
||||
],
|
||||
episode_title=episode.title,
|
||||
)
|
||||
|
||||
|
||||
def get_titles(data: dict) -> list[str]:
|
||||
"""
|
||||
Return the most appropriate title from the record.
|
||||
"""
|
||||
titles = []
|
||||
if data.get("title_eng"):
|
||||
titles.append(data["title_eng"])
|
||||
if data.get("title"):
|
||||
titles.append(data["title"])
|
||||
if data.get("title_it"):
|
||||
titles.append(data["title_it"])
|
||||
return titles
|
||||
|
||||
|
||||
def get_episodes_count(record: dict) -> int:
|
||||
"""
|
||||
Return the number of episodes from the record.
|
||||
"""
|
||||
if (count := record.get("real_episodes_count", 0)) > 0:
|
||||
return count
|
||||
return record.get("episodes_count", 0)
|
||||
175
viu_media/libs/provider/anime/animeunity/provider.py
Normal file
175
viu_media/libs/provider/anime/animeunity/provider.py
Normal file
@@ -0,0 +1,175 @@
|
||||
import logging
|
||||
from functools import lru_cache
|
||||
|
||||
from ...scraping.user_agents import UserAgentGenerator
|
||||
from ..base import BaseAnimeProvider
|
||||
from ..params import AnimeParams, EpisodeStreamsParams, SearchParams
|
||||
from ..types import Anime, AnimeEpisodeInfo, SearchResult, SearchResults
|
||||
from ..utils.debug import debug_provider
|
||||
from .constants import (
|
||||
ANIMEUNITY_BASE,
|
||||
DOWNLOAD_URL_REGEX,
|
||||
MAX_TIMEOUT,
|
||||
REPLACEMENT_WORDS,
|
||||
TOKEN_REGEX,
|
||||
VIDEO_INFO_REGEX,
|
||||
)
|
||||
from .mappers import (
|
||||
map_to_anime_result,
|
||||
map_to_search_result,
|
||||
map_to_search_results,
|
||||
map_to_server,
|
||||
)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class AnimeUnity(BaseAnimeProvider):
|
||||
HEADERS = {
|
||||
"User-Agent": UserAgentGenerator().random(),
|
||||
}
|
||||
_cache = dict[str, SearchResult]()
|
||||
|
||||
@lru_cache
|
||||
def _get_token(self) -> None:
|
||||
response = self.client.get(
|
||||
ANIMEUNITY_BASE,
|
||||
headers=self.HEADERS,
|
||||
timeout=MAX_TIMEOUT,
|
||||
follow_redirects=True,
|
||||
)
|
||||
response.raise_for_status()
|
||||
token_match = TOKEN_REGEX.search(response.text)
|
||||
if token_match:
|
||||
self.HEADERS["x-csrf-token"] = token_match.group(1)
|
||||
self.client.cookies = {
|
||||
"animeunity_session": response.cookies.get("animeunity_session") or ""
|
||||
}
|
||||
self.client.headers = self.HEADERS
|
||||
|
||||
@debug_provider
|
||||
def search(self, params: SearchParams) -> SearchResults | None:
|
||||
if not (res := self._search(params)):
|
||||
return None
|
||||
|
||||
for result in res.results:
|
||||
self._cache[result.id] = result
|
||||
|
||||
return res
|
||||
|
||||
@lru_cache
|
||||
def _search(self, params: SearchParams) -> SearchResults | None:
|
||||
self._get_token()
|
||||
# Replace words in query to
|
||||
query = params.query
|
||||
for old, new in REPLACEMENT_WORDS.items():
|
||||
query = query.replace(old, new)
|
||||
|
||||
response = self.client.post(
|
||||
url=f"{ANIMEUNITY_BASE}/livesearch",
|
||||
data={"title": query},
|
||||
timeout=MAX_TIMEOUT,
|
||||
)
|
||||
|
||||
response.raise_for_status()
|
||||
return map_to_search_results(
|
||||
response.json().get("records", []), params.translation_type
|
||||
)
|
||||
|
||||
@debug_provider
|
||||
def get(self, params: AnimeParams) -> Anime | None:
|
||||
return self._get_anime(params)
|
||||
|
||||
@lru_cache()
|
||||
def _get_search_result(self, params: AnimeParams) -> SearchResult | None:
|
||||
if cached := self._cache.get(params.id):
|
||||
return cached
|
||||
|
||||
response = self.client.get(
|
||||
url=f"{ANIMEUNITY_BASE}/info_api/{params.id}/",
|
||||
timeout=MAX_TIMEOUT,
|
||||
)
|
||||
response.raise_for_status()
|
||||
data = response.json()
|
||||
|
||||
if res := map_to_search_result(data, None):
|
||||
self._cache[params.id] = res
|
||||
return res
|
||||
|
||||
@lru_cache
|
||||
def _get_anime(self, params: AnimeParams) -> Anime | None:
|
||||
if (search_result := self._get_search_result(params)) is None:
|
||||
logger.error(f"No search result found for ID {params.id}")
|
||||
return None
|
||||
|
||||
# Fetch episodes in chunks
|
||||
data = []
|
||||
start_range = 1
|
||||
episode_count = max(
|
||||
len(search_result.episodes.sub), len(search_result.episodes.dub)
|
||||
)
|
||||
while start_range <= episode_count:
|
||||
end_range = min(start_range + 119, episode_count)
|
||||
response = self.client.get(
|
||||
url=f"{ANIMEUNITY_BASE}/info_api/{params.id}/1",
|
||||
params={
|
||||
"start_range": start_range,
|
||||
"end_range": end_range,
|
||||
},
|
||||
timeout=MAX_TIMEOUT,
|
||||
)
|
||||
response.raise_for_status()
|
||||
data.extend(response.json().get("episodes", []))
|
||||
start_range = end_range + 1
|
||||
|
||||
return map_to_anime_result(data, search_result)
|
||||
|
||||
@lru_cache()
|
||||
def _get_episode_info(
|
||||
self, params: EpisodeStreamsParams
|
||||
) -> AnimeEpisodeInfo | None:
|
||||
anime_info = self._get_anime(
|
||||
AnimeParams(id=params.anime_id, query=params.query)
|
||||
)
|
||||
if not anime_info:
|
||||
logger.error(f"No anime info for {params.anime_id}")
|
||||
return
|
||||
if not anime_info.episodes_info:
|
||||
logger.error(f"No episodes info for {params.anime_id}")
|
||||
return
|
||||
for episode in anime_info.episodes_info:
|
||||
if episode.episode == params.episode:
|
||||
return episode
|
||||
|
||||
@debug_provider
|
||||
def episode_streams(self, params: EpisodeStreamsParams):
|
||||
if not (episode := self._get_episode_info(params)):
|
||||
logger.error(
|
||||
f"Episode {params.episode} doesn't exist for anime {params.anime_id}"
|
||||
)
|
||||
return
|
||||
# Get the Server url
|
||||
response = self.client.get(
|
||||
url=f"{ANIMEUNITY_BASE}/embed-url/{episode.id}", timeout=MAX_TIMEOUT
|
||||
)
|
||||
response.raise_for_status()
|
||||
|
||||
# Fetch the Server page
|
||||
video_response = self.client.get(url=response.text.strip(), timeout=MAX_TIMEOUT)
|
||||
video_response.raise_for_status()
|
||||
|
||||
video_info = VIDEO_INFO_REGEX.search(video_response.text)
|
||||
download_url_match = DOWNLOAD_URL_REGEX.search(video_response.text)
|
||||
if not (download_url_match and video_info):
|
||||
logger.error(f"Failed to extract video info for episode {episode.id}")
|
||||
return None
|
||||
|
||||
info = eval(video_info.group(1).replace("null", "None"))
|
||||
info["link"] = download_url_match.group(1)
|
||||
yield map_to_server(episode, info, params.translation_type)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
from ..utils.debug import test_anime_provider
|
||||
|
||||
test_anime_provider(AnimeUnity)
|
||||
@@ -14,6 +14,7 @@ PROVIDERS_AVAILABLE = {
|
||||
"hianime": "provider.HiAnime",
|
||||
"nyaa": "provider.Nyaa",
|
||||
"yugen": "provider.Yugen",
|
||||
"animeunity": "provider.AnimeUnity",
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -11,6 +11,7 @@ from pydantic import BaseModel, ConfigDict
|
||||
class ProviderName(Enum):
|
||||
ALLANIME = "allanime"
|
||||
ANIMEPAHE = "animepahe"
|
||||
ANIMEUNITY = "animeunity"
|
||||
|
||||
|
||||
class ProviderServer(Enum):
|
||||
@@ -28,6 +29,9 @@ class ProviderServer(Enum):
|
||||
# AnimePaheServer values
|
||||
KWIK = "kwik"
|
||||
|
||||
# AnimeUnityServer values
|
||||
VIXCLOUD = "vixcloud"
|
||||
|
||||
|
||||
class MediaTranslationType(Enum):
|
||||
SUB = "sub"
|
||||
|
||||
@@ -69,6 +69,9 @@ def test_anime_provider(AnimeProvider: Type[BaseAnimeProvider]):
|
||||
for i, stream in enumerate(episode_streams):
|
||||
print(f"{i + 1}: {stream.name}")
|
||||
stream = episode_streams[int(input("Select your preferred server: ")) - 1]
|
||||
for i, link in enumerate(stream.links):
|
||||
print(f"{i + 1}: {link.quality}")
|
||||
link = stream.links[int(input("Select your preferred quality: ")) - 1]
|
||||
if executable := shutil.which("mpv"):
|
||||
cmd = executable
|
||||
elif executable := shutil.which("xdg-open"):
|
||||
@@ -84,4 +87,4 @@ def test_anime_provider(AnimeProvider: Type[BaseAnimeProvider]):
|
||||
"Episode: ",
|
||||
stream.episode_title if stream.episode_title else episode_number,
|
||||
)
|
||||
subprocess.run([cmd, stream.links[0].link])
|
||||
subprocess.run([cmd, link.link])
|
||||
|
||||
@@ -1,105 +0,0 @@
|
||||
"""An abstraction over all providers offering added features with a simple and well typed api
|
||||
|
||||
[TODO:description]
|
||||
"""
|
||||
|
||||
import importlib
|
||||
import logging
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from .libs.manga_provider import manga_sources
|
||||
|
||||
if TYPE_CHECKING:
|
||||
pass
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class MangaProvider:
|
||||
"""Class that manages all anime sources adding some extra functionality to them.
|
||||
Attributes:
|
||||
PROVIDERS: [TODO:attribute]
|
||||
provider: [TODO:attribute]
|
||||
provider: [TODO:attribute]
|
||||
dynamic: [TODO:attribute]
|
||||
retries: [TODO:attribute]
|
||||
manga_provider: [TODO:attribute]
|
||||
"""
|
||||
|
||||
PROVIDERS = list(manga_sources.keys())
|
||||
provider = PROVIDERS[0]
|
||||
|
||||
def __init__(self, provider="mangadex", dynamic=False, retries=0) -> None:
|
||||
self.provider = provider
|
||||
self.dynamic = dynamic
|
||||
self.retries = retries
|
||||
self.lazyload_provider(self.provider)
|
||||
|
||||
def lazyload_provider(self, provider):
|
||||
"""updates the current provider being used"""
|
||||
_, anime_provider_cls_name = manga_sources[provider].split(".", 1)
|
||||
package = f"viu_media.libs.manga_provider.{provider}"
|
||||
provider_api = importlib.import_module(".api", package)
|
||||
manga_provider = getattr(provider_api, anime_provider_cls_name)
|
||||
self.manga_provider = manga_provider()
|
||||
|
||||
def search_for_manga(
|
||||
self,
|
||||
user_query,
|
||||
nsfw=True,
|
||||
unknown=True,
|
||||
):
|
||||
"""core abstraction over all providers search functionality
|
||||
|
||||
Args:
|
||||
user_query ([TODO:parameter]): [TODO:description]
|
||||
translation_type ([TODO:parameter]): [TODO:description]
|
||||
nsfw ([TODO:parameter]): [TODO:description]
|
||||
manga_provider ([TODO:parameter]): [TODO:description]
|
||||
anilist_obj: [TODO:description]
|
||||
|
||||
Returns:
|
||||
[TODO:return]
|
||||
"""
|
||||
manga_provider = self.manga_provider
|
||||
try:
|
||||
results = manga_provider.search_for_manga(user_query, nsfw, unknown)
|
||||
except Exception as e:
|
||||
logger.error(e)
|
||||
results = None
|
||||
return results
|
||||
|
||||
def get_manga(
|
||||
self,
|
||||
anime_id: str,
|
||||
):
|
||||
"""core abstraction over getting info of an anime from all providers
|
||||
|
||||
Args:
|
||||
anime_id: [TODO:description]
|
||||
anilist_obj: [TODO:description]
|
||||
|
||||
Returns:
|
||||
[TODO:return]
|
||||
"""
|
||||
manga_provider = self.manga_provider
|
||||
try:
|
||||
results = manga_provider.get_manga(anime_id)
|
||||
except Exception as e:
|
||||
logger.error(e)
|
||||
results = None
|
||||
return results
|
||||
|
||||
def get_chapter_thumbnails(
|
||||
self,
|
||||
manga_id: str,
|
||||
chapter: str,
|
||||
):
|
||||
manga_provider = self.manga_provider
|
||||
try:
|
||||
results = manga_provider.get_chapter_thumbnails(manga_id, chapter)
|
||||
except Exception as e:
|
||||
logger.error(e)
|
||||
results = None
|
||||
return results # pyright:ignore
|
||||
@@ -1 +0,0 @@
|
||||
manga_sources = {"mangadex": "api.MangaDexApi"}
|
||||
@@ -1,18 +0,0 @@
|
||||
from httpx import Client
|
||||
from ....core.utils.networking import random_user_agent
|
||||
|
||||
|
||||
class MangaProvider:
|
||||
session: Client
|
||||
|
||||
USER_AGENT = random_user_agent()
|
||||
HEADERS = {}
|
||||
|
||||
def __init__(self) -> None:
|
||||
self.session = Client(
|
||||
headers={
|
||||
"User-Agent": self.USER_AGENT,
|
||||
**self.HEADERS,
|
||||
},
|
||||
timeout=10,
|
||||
)
|
||||
@@ -1,15 +0,0 @@
|
||||
import logging
|
||||
|
||||
from httpx import get
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def fetch_manga_info_from_bal(anilist_id):
|
||||
try:
|
||||
url = f"https://raw.githubusercontent.com/bal-mackup/mal-backup/master/anilist/manga/{anilist_id}.json"
|
||||
response = get(url, timeout=11)
|
||||
if response.ok:
|
||||
return response.json()
|
||||
except Exception as e:
|
||||
logger.error(e)
|
||||
@@ -1,51 +0,0 @@
|
||||
import logging
|
||||
|
||||
from ...common.mini_anilist import search_for_manga_with_anilist
|
||||
from ..base_provider import MangaProvider
|
||||
from ..common import fetch_manga_info_from_bal
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class MangaDexApi(MangaProvider):
|
||||
def search_for_manga(self, title: str, *args):
|
||||
try:
|
||||
search_results = search_for_manga_with_anilist(title)
|
||||
return search_results
|
||||
except Exception as e:
|
||||
logger.error(f"[MANGADEX-ERROR]: {e}")
|
||||
|
||||
def get_manga(self, anilist_manga_id: str):
|
||||
bal_data = fetch_manga_info_from_bal(anilist_manga_id)
|
||||
if not bal_data:
|
||||
return
|
||||
manga_id, MangaDexManga = next(iter(bal_data["Sites"]["Mangadex"].items()))
|
||||
return {
|
||||
"id": manga_id,
|
||||
"title": MangaDexManga["title"],
|
||||
"poster": MangaDexManga["image"],
|
||||
"availableChapters": [],
|
||||
}
|
||||
|
||||
def get_chapter_thumbnails(self, manga_id, chapter):
|
||||
chapter_info_url = f"https://api.mangadex.org/chapter?manga={manga_id}&translatedLanguage[]=en&chapter={chapter}&includeEmptyPages=0"
|
||||
chapter_info_response = self.session.get(chapter_info_url)
|
||||
if not chapter_info_response.ok:
|
||||
return
|
||||
chapter_info = next(iter(chapter_info_response.json()["data"]))
|
||||
chapters_thumbnails_url = (
|
||||
f"https://api.mangadex.org/at-home/server/{chapter_info['id']}"
|
||||
)
|
||||
chapter_thumbnails_response = self.session.get(chapters_thumbnails_url)
|
||||
if not chapter_thumbnails_response.ok:
|
||||
return
|
||||
chapter_thumbnails_info = chapter_thumbnails_response.json()
|
||||
base_url = chapter_thumbnails_info["baseUrl"]
|
||||
hash = chapter_thumbnails_info["chapter"]["hash"]
|
||||
return {
|
||||
"thumbnails": [
|
||||
f"{base_url}/data/{hash}/{chapter_thumbnail}"
|
||||
for chapter_thumbnail in chapter_thumbnails_info["chapter"]["data"]
|
||||
],
|
||||
"title": chapter_info["attributes"]["title"],
|
||||
}
|
||||
@@ -1,3 +1,4 @@
|
||||
# pyright: reportAttributeAccessIssue=false, reportPossiblyUnboundVariable=false
|
||||
"""
|
||||
HTML parsing utilities with optional lxml support.
|
||||
|
||||
|
||||
@@ -48,6 +48,7 @@ class FzfSelector(BaseSelector):
|
||||
input=fzf_input,
|
||||
stdout=subprocess.PIPE,
|
||||
text=True,
|
||||
encoding="utf-8",
|
||||
)
|
||||
if result.returncode != 0:
|
||||
return None
|
||||
@@ -74,6 +75,7 @@ class FzfSelector(BaseSelector):
|
||||
input=fzf_input,
|
||||
stdout=subprocess.PIPE,
|
||||
text=True,
|
||||
encoding="utf-8",
|
||||
)
|
||||
if result.returncode != 0:
|
||||
return []
|
||||
@@ -137,6 +139,7 @@ class FzfSelector(BaseSelector):
|
||||
input="",
|
||||
stdout=subprocess.PIPE,
|
||||
text=True,
|
||||
encoding="utf-8",
|
||||
)
|
||||
if result.returncode != 0:
|
||||
return None
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user