mirror of
https://github.com/Benexl/FastAnime.git
synced 2025-12-09 22:30:38 -08:00
Compare commits
187 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
5f7e10a510 | ||
|
|
95586eb36f | ||
|
|
c01c08c03b | ||
|
|
14e1f44696 | ||
|
|
36b71c0751 | ||
|
|
6a5d7a0116 | ||
|
|
91efee9065 | ||
|
|
69d3d2e032 | ||
|
|
29ba77f795 | ||
|
|
a4950efa02 | ||
|
|
bbd7931790 | ||
|
|
c3ae5f9053 | ||
|
|
bf06d7ee2c | ||
|
|
41aaf92bae | ||
|
|
d38dc3194f | ||
|
|
54233aca79 | ||
|
|
6b8dfba57e | ||
|
|
3b008696d5 | ||
|
|
ece1f77e99 | ||
|
|
7b9de8620b | ||
|
|
725754ea1a | ||
|
|
80771f65ea | ||
|
|
c8c4e1b2c0 | ||
|
|
f4958cc0cc | ||
|
|
1f72e0a579 | ||
|
|
803c8316a7 | ||
|
|
26bc84e2eb | ||
|
|
901d1e87c5 | ||
|
|
523766868c | ||
|
|
bd9bf24e1c | ||
|
|
f27c0b8548 | ||
|
|
76c1dcd5ac | ||
|
|
25a46bd242 | ||
|
|
a70db611f7 | ||
|
|
091edb3a9b | ||
|
|
9050dd7787 | ||
|
|
393b9e6ed6 | ||
|
|
5193df2197 | ||
|
|
6ccd96d252 | ||
|
|
e8387f3db9 | ||
|
|
23ebff3f42 | ||
|
|
8e803e8ecb | ||
|
|
61fcd39188 | ||
|
|
313f8369d7 | ||
|
|
bee73b3f9a | ||
|
|
f647b7419a | ||
|
|
901c4422b5 | ||
|
|
08ae8786c3 | ||
|
|
64093204ad | ||
|
|
8440ffb5e5 | ||
|
|
6e287d320d | ||
|
|
a7b0f21deb | ||
|
|
71b668894b | ||
|
|
8b3a57ed07 | ||
|
|
b2f9c8349a | ||
|
|
25fe1e5e01 | ||
|
|
45ff463f7a | ||
|
|
29ce664e4c | ||
|
|
2217f011af | ||
|
|
5960a7c502 | ||
|
|
bd0309ee85 | ||
|
|
3724f06e33 | ||
|
|
d20af89fc8 | ||
|
|
3872b4c8a8 | ||
|
|
9545b893e1 | ||
|
|
1519c8be17 | ||
|
|
9a619b41f4 | ||
|
|
0c3a963cc4 | ||
|
|
192818362b | ||
|
|
2d8c1d3569 | ||
|
|
e37f9213f6 | ||
|
|
097db713bc | ||
|
|
106278e386 | ||
|
|
44b3663644 | ||
|
|
925c30c06e | ||
|
|
7401a1ad8f | ||
|
|
9a0bb65e52 | ||
|
|
1d129a5771 | ||
|
|
515660b0f6 | ||
|
|
9f5c895bf5 | ||
|
|
5634214fb8 | ||
|
|
66c0ada29d | ||
|
|
02465b4ddb | ||
|
|
5ffd94ac24 | ||
|
|
d2864df6d0 | ||
|
|
2a28e3b9a3 | ||
|
|
7b8027a8b3 | ||
|
|
2a36152c38 | ||
|
|
2048c7b743 | ||
|
|
133fd4c1c8 | ||
|
|
e22120fe99 | ||
|
|
44e6220662 | ||
|
|
1fea1335c6 | ||
|
|
8b664fae36 | ||
|
|
19a85511b4 | ||
|
|
205299108b | ||
|
|
7670bdd2f3 | ||
|
|
cd3f7f7fb8 | ||
|
|
5be03ed5b8 | ||
|
|
6581179336 | ||
|
|
2bb674f4a0 | ||
|
|
642e77f601 | ||
|
|
a5e99122f5 | ||
|
|
39bd7bed61 | ||
|
|
869072633b | ||
|
|
cbd788a573 | ||
|
|
11fe54b146 | ||
|
|
a13bdb1aa0 | ||
|
|
627b09a723 | ||
|
|
aecec5c75b | ||
|
|
49b298ed52 | ||
|
|
9a90fa196b | ||
|
|
4ac059e873 | ||
|
|
8b39a28e32 | ||
|
|
066cc89b74 | ||
|
|
db16758d9f | ||
|
|
78e17b2ba0 | ||
|
|
c5326eb8d9 | ||
|
|
4a2d95e75e | ||
|
|
3a92ba69df | ||
|
|
cf59f4822e | ||
|
|
1cea6d0179 | ||
|
|
4bc1edcc4e | ||
|
|
0c546af99c | ||
|
|
1b49e186c8 | ||
|
|
fe831f9658 | ||
|
|
72f0e2e5b9 | ||
|
|
8530da23ef | ||
|
|
1e01b6e54a | ||
|
|
aa6ba9018d | ||
|
|
354ba6256a | ||
|
|
eae31420f9 | ||
|
|
01432a0fec | ||
|
|
c158d3fb99 | ||
|
|
877bc043a0 | ||
|
|
4968f8030a | ||
|
|
c5c7644d0d | ||
|
|
ff2a5d635a | ||
|
|
8626d1991c | ||
|
|
75d15a100d | ||
|
|
25d9895c52 | ||
|
|
f1b796d72b | ||
|
|
3f63198563 | ||
|
|
8d61463156 | ||
|
|
2daa51d384 | ||
|
|
43a0d77e1b | ||
|
|
eaedf3268d | ||
|
|
ade0465ea4 | ||
|
|
5e82db4ea8 | ||
|
|
a10e56cb6f | ||
|
|
fbd95e1966 | ||
|
|
d37a441ccf | ||
|
|
cbc1ceccbb | ||
|
|
249a207cad | ||
|
|
c8a42c4920 | ||
|
|
de8b6b7f2f | ||
|
|
54e0942233 | ||
|
|
8ea0c121c2 | ||
|
|
eddaad64e7 | ||
|
|
43be7a52cf | ||
|
|
b689760a25 | ||
|
|
e53246b79b | ||
|
|
b0fc94cdc5 | ||
|
|
449f6c1e59 | ||
|
|
ab4734b79d | ||
|
|
93d0f6a1a5 | ||
|
|
19c75c48b2 | ||
|
|
5341b0a844 | ||
|
|
24e7e6a16b | ||
|
|
4b310e60b8 | ||
|
|
4d50cffd86 | ||
|
|
f6fedf0500 | ||
|
|
7b431450fe | ||
|
|
66b247330b | ||
|
|
c6b8cfc294 | ||
|
|
6895426d67 | ||
|
|
cc69dc35f6 | ||
|
|
ed81f37ae4 | ||
|
|
c6858b00c4 | ||
|
|
a44034a5d4 | ||
|
|
f768518721 | ||
|
|
97f5bb9cb3 | ||
|
|
b09fdbf69b | ||
|
|
071c46cad9 | ||
|
|
5d32503ff9 | ||
|
|
e67532c496 | ||
|
|
819012897d |
7
.envrc
7
.envrc
@@ -1 +1,6 @@
|
||||
use flake
|
||||
VIU_APP_NAME="viu-dev"
|
||||
PATH="$PWD/.venv/bin:$PATH"
|
||||
export PATH VIU_APP_NAME
|
||||
if command -v nix >/dev/null; then
|
||||
use flake
|
||||
fi
|
||||
|
||||
15
.github/FUNDING.yml
vendored
15
.github/FUNDING.yml
vendored
@@ -1,15 +0,0 @@
|
||||
# These are supported funding model platforms
|
||||
|
||||
github: benexl # Replace with up to 4 GitHub Sponsors-enabled usernames e.g., [user1, user2]
|
||||
patreon: # Replace with a single Patreon username
|
||||
open_collective: # Replace with a single Open Collective username
|
||||
ko_fi: benexl # Replace with a single Ko-fi username
|
||||
tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel
|
||||
community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry
|
||||
liberapay: # Replace with a single Liberapay username
|
||||
issuehunt: # Replace with a single IssueHunt username
|
||||
lfx_crowdfunding: # Replace with a single LFX Crowdfunding project-name e.g., cloud-foundry
|
||||
polar: # Replace with a single Polar username
|
||||
buy_me_a_coffee: # Replace with a single Buy Me a Coffee username
|
||||
thanks_dev: # Replace with a single thanks.dev username
|
||||
custom: # Replace with up to 4 custom sponsorship URLs e.g., ['link1', 'link2']
|
||||
6
.github/workflows/publish.yml
vendored
6
.github/workflows/publish.yml
vendored
@@ -42,6 +42,12 @@ jobs:
|
||||
# IMPORTANT: this permission is mandatory for trusted publishing
|
||||
id-token: write
|
||||
|
||||
# Dedicated environments with protections for publishing are strongly recommended.
|
||||
environment:
|
||||
name: pypi
|
||||
# OPTIONAL: uncomment and update to include your PyPI project URL in the deployment status:
|
||||
# url: https://pypi.org/p/YOURPROJECT
|
||||
|
||||
steps:
|
||||
- name: Retrieve release distributions
|
||||
uses: actions/download-artifact@v4
|
||||
|
||||
57
.github/workflows/stale.yml
vendored
Normal file
57
.github/workflows/stale.yml
vendored
Normal file
@@ -0,0 +1,57 @@
|
||||
name: Mark Stale Issues and Pull Requests
|
||||
|
||||
on:
|
||||
schedule:
|
||||
# Runs every day at 6:30 UTC
|
||||
- cron: "30 6 * * *"
|
||||
# Allows you to run this workflow manually from the Actions tab for testing
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
stale:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
issues: write
|
||||
pull-requests: write
|
||||
|
||||
steps:
|
||||
- uses: actions/stale@v5
|
||||
with:
|
||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
stale-issue-message: |
|
||||
Greetings @{{author}},
|
||||
|
||||
This bug report is like an ancient scroll detailing a legendary beast. Our small guild of developers is often on many quests at once, so our response times can be slower than a tortoise in a time-stop spell. We deeply appreciate your patience!
|
||||
|
||||
**Seeking Immediate Help or Discussion?**
|
||||
Our **[Discord Tavern](https://discord.gg/HBEmAwvbHV)** is the best place to get a quick response from the community for general questions or setup help!
|
||||
|
||||
**Want to Be the Hero?**
|
||||
You could try to tame this beast yourself! With modern grimoires (like AI coding assistants) and our **[Contribution Guide](https://github.com/viu-media/Viu/blob/master/CONTRIBUTIONS.md)**, you might just be the hero we're waiting for. We would be thrilled to review your solution!
|
||||
|
||||
---
|
||||
To keep our quest board tidy, we need to know if this creature is still roaming the lands in the latest version of `viu`. If we don't get an update within **7 days**, we'll assume it has vanished and archive the scroll.
|
||||
|
||||
Thanks for being our trusted scout!
|
||||
|
||||
stale-pr-message: |
|
||||
Hello @{{author}}, it looks like this powerful contribution has been left in the middle of its training arc! 💪
|
||||
|
||||
Our review dojo is managed by just a few senseis who are sometimes away on long missions, so thank you for your patience as we work through the queue.
|
||||
|
||||
We were excited to see this new technique being developed. Are you still planning to complete its training, or have you embarked on a different quest? If you need a sparring partner (reviewer) or some guidance from a senpai, just let us know!
|
||||
|
||||
To keep our dojo tidy, we'll be archiving unfinished techniques. If we don't hear back within **7 days**, we'll assume it's time to close this PR for now. You can always resume your training and reopen it when you're ready.
|
||||
|
||||
Thank you for your incredible effort!
|
||||
|
||||
# --- Labels and Timing ---
|
||||
stale-issue-label: "stale"
|
||||
stale-pr-label: "stale"
|
||||
|
||||
# How many days of inactivity before an issue/PR is marked as stale.
|
||||
days-before-stale: 14
|
||||
|
||||
# How many days of inactivity to wait before closing a stale issue/PR.
|
||||
days-before-close: 7
|
||||
13
.github/workflows/test.yml
vendored
13
.github/workflows/test.yml
vendored
@@ -13,7 +13,7 @@ jobs:
|
||||
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ["3.10", "3.11"] # List the Python versions you want to test
|
||||
python-version: ["3.11", "3.12"]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
@@ -22,6 +22,11 @@ jobs:
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
|
||||
- name: Install dbus-python build dependencies
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get -y install libdbus-1-dev libglib2.0-dev
|
||||
|
||||
- name: Install uv
|
||||
uses: astral-sh/setup-uv@v3
|
||||
with:
|
||||
@@ -36,5 +41,7 @@ jobs:
|
||||
- name: Run type checking
|
||||
run: uv run pyright
|
||||
|
||||
- name: Run tests
|
||||
run: uv run pytest tests
|
||||
# TODO: write tests
|
||||
|
||||
# - name: Run tests
|
||||
# run: uv run pytest tests
|
||||
|
||||
@@ -1,33 +1,10 @@
|
||||
default_language_version:
|
||||
python: python3.12
|
||||
|
||||
repos:
|
||||
- repo: https://github.com/pycqa/isort
|
||||
rev: 5.12.0
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
# Ruff version.
|
||||
rev: v0.14.2
|
||||
hooks:
|
||||
- id: isort
|
||||
name: isort (python)
|
||||
args: ["--profile", "black"]
|
||||
|
||||
- repo: https://github.com/PyCQA/autoflake
|
||||
rev: v2.2.1
|
||||
hooks:
|
||||
- id: autoflake
|
||||
args:
|
||||
[
|
||||
"--in-place",
|
||||
"--remove-unused-variables",
|
||||
"--remove-all-unused-imports",
|
||||
]
|
||||
# - repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
# rev: v0.4.10
|
||||
# hooks:
|
||||
# - id: ruff
|
||||
# args: [--fix]
|
||||
|
||||
- repo: https://github.com/psf/black-pre-commit-mirror
|
||||
rev: 24.4.2
|
||||
hooks:
|
||||
- id: black
|
||||
name: black
|
||||
#language_version: python3.10
|
||||
# Run the linter.
|
||||
- id: ruff-check
|
||||
args: [--fix]
|
||||
# Run the formatter.
|
||||
- id: ruff-format
|
||||
|
||||
1
.python-version
Normal file
1
.python-version
Normal file
@@ -0,0 +1 @@
|
||||
3.11
|
||||
1
.repomixignore
Normal file
1
.repomixignore
Normal file
@@ -0,0 +1 @@
|
||||
**/generated/**/*
|
||||
@@ -6,7 +6,7 @@ First off, thank you for considering contributing to Viu! We welcome any help, w
|
||||
|
||||
There are many ways to contribute to the Viu project:
|
||||
|
||||
* **Reporting Bugs:** If you find a bug, please create an issue in our [issue tracker](https://github.com/Benexl/Viu/issues).
|
||||
* **Reporting Bugs:** If you find a bug, please create an issue in our [issue tracker](https://github.com/viu-media/Viu/issues).
|
||||
* **Suggesting Enhancements:** Have an idea for a new feature or an improvement to an existing one? We'd love to hear it.
|
||||
* **Writing Code:** Help us fix bugs or implement new features.
|
||||
* **Improving Documentation:** Enhance our README, add examples, or clarify our contribution guidelines.
|
||||
@@ -16,7 +16,7 @@ There are many ways to contribute to the Viu project:
|
||||
|
||||
We follow the standard GitHub Fork & Pull Request workflow.
|
||||
|
||||
1. **Create an Issue:** Before starting work on a new feature or a significant bug fix, please [create an issue](https://github.com/Benexl/Viu/issues/new/choose) to discuss your idea. This allows us to give feedback and prevent duplicate work. For small bugs or documentation typos, you can skip this step.
|
||||
1. **Create an Issue:** Before starting work on a new feature or a significant bug fix, please [create an issue](https://github.com/viu-media/Viu/issues/new/choose) to discuss your idea. This allows us to give feedback and prevent duplicate work. For small bugs or documentation typos, you can skip this step.
|
||||
|
||||
2. **Fork the Repository:** Create your own fork of the Viu repository.
|
||||
|
||||
|
||||
133
README.md
133
README.md
@@ -8,12 +8,12 @@
|
||||
</p>
|
||||
<div align="center">
|
||||
|
||||
[](https://pypi.org/project/viu_cli/)
|
||||
[](https://pypi.org/project/viu_cli/)
|
||||
[](https://github.com/Benexl/Viu/actions)
|
||||
[](https://pypi.org/project/viu-media/)
|
||||
[](https://pypi.org/project/viu-media/)
|
||||
[](https://github.com/viu-media/Viu/actions)
|
||||
[](https://discord.gg/HBEmAwvbHV)
|
||||
[](https://github.com/Benexl/Viu/issues)
|
||||
[](https://github.com/Benexl/Viu/blob/master/LICENSE)
|
||||
[](https://github.com/viu-media/Viu/issues)
|
||||
[](https://github.com/viu-media/Viu/blob/master/LICENSE)
|
||||
|
||||
</div>
|
||||
|
||||
@@ -23,48 +23,20 @@
|
||||
</a>
|
||||
</p>
|
||||
|
||||

|
||||
[viu-showcase.webm](https://github.com/user-attachments/assets/5da0ec87-7780-4310-9ca2-33fae7cadd5f)
|
||||
|
||||
<details>
|
||||
<summary>
|
||||
<b>Screenshots</b>
|
||||
</summary>
|
||||
<b>Fzf:</b>
|
||||
<img width="1346" height="710" alt="250815_13h29m15s_screenshot" src="https://github.com/user-attachments/assets/d8fb8473-a0fe-47b1-b112-5cd8bec51937" />
|
||||
<img width="1346" height="710" alt="250815_13h29m43s_screenshot" src="https://github.com/user-attachments/assets/16a2555d-f81e-4044-9e65-e61205dfe899" />
|
||||
<img width="1346" height="710" alt="250815_13h30m09s_screenshot" src="https://github.com/user-attachments/assets/f521670a-c04f-4f5e-a62a-6c849fbf49bd" />
|
||||
<img width="1346" height="710" alt="250815_13h30m33s_screenshot" src="https://github.com/user-attachments/assets/27fd2ef9-ec1f-4677-b816-038eaaca1391" />
|
||||
<img width="1346" height="710" alt="250815_13h31m07s_screenshot" src="https://github.com/user-attachments/assets/6a64aa99-507e-449a-9e4a-9daa4fe496a3" />
|
||||
<img width="1346" height="710" alt="250815_13h31m44s_screenshot" src="https://github.com/user-attachments/assets/a2896d1f-0e23-4ff3-b0c6-121d21a9f99a" />
|
||||
|
||||
<b>Rofi:</b>
|
||||
<img width="1366" height="729" alt="250815_13h23m12s_screenshot" src="https://github.com/user-attachments/assets/6d18d950-11e5-41fc-a7fe-1f9eaa481e46" />
|
||||
<img width="1366" height="765" alt="250815_13h24m09s_screenshot" src="https://github.com/user-attachments/assets/af852fee-17bf-4f24-ada9-7cf0e6f3451c" />
|
||||
<img width="1366" height="768" alt="250815_13h24m57s_screenshot" src="https://github.com/user-attachments/assets/d3b4e2ab-10bd-40ae-88ed-0720b57957c1" />
|
||||
<img width="1366" height="735" alt="250815_13h26m47s_screenshot" src="https://github.com/user-attachments/assets/64682b09-c88e-4d4c-ae26-a3aa34dd08a1" />
|
||||
<img width="1366" height="768" alt="250815_13h28m05s_screenshot" src="https://github.com/user-attachments/assets/d6cd6931-0113-462c-86bb-abe6f3e12d68" />
|
||||
|
||||
</details>
|
||||
<summary>Rofi</summary>
|
||||
|
||||
<details>
|
||||
<summary>
|
||||
<b>Riced Preview Examples</b>
|
||||
</summary>
|
||||
|
||||
**Anilist Results Menu (FZF):**
|
||||

|
||||
|
||||
**Episodes Menu with Preview (FZF):**
|
||||

|
||||
|
||||
**No Image Preview Mode:**
|
||||

|
||||
|
||||
**Desktop Notifications + Episodes Menu:**
|
||||

|
||||
[viu-showcase-rofi.webm](https://github.com/user-attachments/assets/01f197d9-5ac9-45e6-a00b-8e8cd5ab459c)
|
||||
|
||||
</details>
|
||||
|
||||
> [!IMPORTANT]
|
||||
> This project scrapes public-facing websites for its streaming / downloading capabilities and primarily acts as an anilist, jikan and many other media apis tui client. The developer(s) of this application have no affiliation with these content providers. This application hosts zero content and is intended for educational and personal use only. Use at your own risk.
|
||||
>
|
||||
> [**Read the Full Disclaimer**](DISCLAIMER.md)
|
||||
|
||||
## Core Features
|
||||
|
||||
* 📺 **Interactive TUI:** Browse, search, and manage your AniList library in a rich terminal interface powered by `fzf`, `rofi`, or a built-in selector.
|
||||
@@ -77,7 +49,7 @@
|
||||
|
||||
## Installation
|
||||
|
||||
Viu runs on any platform with Python 3.10+, including Windows, macOS, Linux, and Android (via Termux).
|
||||
Viu runs on any platform with Python 3.10+, including Windows, macOS, Linux, and Android (via Termux, see other installation methods).
|
||||
|
||||
### Prerequisites
|
||||
|
||||
@@ -98,13 +70,13 @@ The best way to install Viu is with [**uv**](https://github.com/astral-sh/uv), a
|
||||
|
||||
```bash
|
||||
# Install with all optional features for the full experience
|
||||
uv tool install "viu_cli[standard]"
|
||||
uv tool install "viu-media[standard]"
|
||||
|
||||
# Or, pick and choose the extras you need:
|
||||
uv tool install viu_cli # Core functionality only
|
||||
uv tool install "viu_cli[download]" # For advanced downloading with yt-dlp
|
||||
uv tool install "viu_cli[discord]" # For Discord Rich Presence
|
||||
uv tool install "viu_cli[notifications]" # For desktop notifications
|
||||
uv tool install viu-media # Core functionality only
|
||||
uv tool install "viu-media[download]" # For advanced downloading with yt-dlp
|
||||
uv tool install "viu-media[discord]" # For Discord Rich Presence
|
||||
uv tool install "viu-media[notifications]" # For desktop notifications
|
||||
```
|
||||
|
||||
### Other Installation Methods
|
||||
@@ -113,28 +85,76 @@ uv tool install "viu_cli[notifications]" # For desktop notifications
|
||||
<summary><b>Platform-Specific and Alternative Installers</b></summary>
|
||||
|
||||
#### Nix / NixOS
|
||||
##### Ephemeral / One-Off Run (No Installation)
|
||||
```bash
|
||||
nix profile install github:Benexl/viu
|
||||
nix run github:viu-media/viu
|
||||
```
|
||||
##### Imperative Installation
|
||||
```bash
|
||||
nix profile install github:viu-media/viu
|
||||
```
|
||||
##### Declarative Installation
|
||||
###### in your flake.nix
|
||||
```nix
|
||||
viu.url = "github:viu-media/viu";
|
||||
```
|
||||
###### in your system or home-manager packages
|
||||
```nix
|
||||
inputs.viu.packages.${pkgs.system}.default
|
||||
```
|
||||
|
||||
#### Arch Linux (AUR)
|
||||
Use an AUR helper like `yay` or `paru`.
|
||||
```bash
|
||||
# Stable version (recommended)
|
||||
yay -S viu
|
||||
yay -S viu-media
|
||||
|
||||
# Git version (latest commit)
|
||||
yay -S viu-git
|
||||
yay -S viu-media-git
|
||||
```
|
||||
#### Termux
|
||||
You may have to have rust installed see this issue: https://github.com/pydantic/pydantic-core/issues/1012#issuecomment-2511269688.
|
||||
```bash
|
||||
pkg install python # though uv will probably install python for you, but doesn't hurt to have it :)
|
||||
pkg install rust # maybe required cause of pydantic
|
||||
|
||||
|
||||
# Recommended (with pip due to more control)
|
||||
pip install viu-media
|
||||
|
||||
# you may need to install pydantic manually
|
||||
python -m pip install pydantic --extra-index-url https://termux-user-repository.github.io/pypi/ # may also be necessary incase the above fails
|
||||
|
||||
# add yt-dlp by
|
||||
pip install yt-dlp[default,curl-cffi]
|
||||
|
||||
# prefer without standard and manually install the things you need lxml, yt-dlp and
|
||||
pip install viu-media[standard]
|
||||
|
||||
# you may need to manually install lxml and plyer manually eg
|
||||
python -m pip install lxml --extra-index-url https://termux-user-repository.github.io/pypi/ # may also be necessary incase the above fails
|
||||
|
||||
# Alternative With Uv may work, no promises
|
||||
pkg install uv
|
||||
|
||||
uv tool install viu-media
|
||||
|
||||
# and to add yt-dlp only you can do
|
||||
uv tool install viu-media --with yt-dlp[default,curl-cffi]
|
||||
|
||||
# or though may fail, cause of lxml and plyer, in that case try to install manually
|
||||
uv tool install viu-media[standard]
|
||||
|
||||
```
|
||||
|
||||
#### Using pipx (for isolated environments)
|
||||
```bash
|
||||
pipx install "viu_cli[standard]"
|
||||
pipx install "viu-media[standard]"
|
||||
```
|
||||
|
||||
#### Using pip
|
||||
```bash
|
||||
pip install "viu_cli[standard]"
|
||||
pip install "viu-media[standard]"
|
||||
```
|
||||
</details>
|
||||
|
||||
@@ -143,7 +163,7 @@ uv tool install "viu_cli[notifications]" # For desktop notifications
|
||||
|
||||
Requires [Git](https://git-scm.com/), [Python 3.10+](https://www.python.org/), and [uv](https://astral.sh/blog/uv).
|
||||
```bash
|
||||
git clone https://github.com/Benexl/Viu.git --depth 1
|
||||
git clone https://github.com/viu-media/Viu.git --depth 1
|
||||
cd Viu
|
||||
uv tool install .
|
||||
viu --version
|
||||
@@ -346,10 +366,3 @@ You can run the background worker as a systemd service for persistence.
|
||||
## Contributing
|
||||
|
||||
Contributions are welcome! Whether it's reporting a bug, proposing a feature, or writing code, your help is appreciated. Please read our [**Contributing Guidelines**](CONTRIBUTIONS.md) to get started.
|
||||
|
||||
## Disclaimer
|
||||
|
||||
> [!IMPORTANT]
|
||||
> This project scrapes public-facing websites. The developer(s) of this application have no affiliation with these content providers. This application hosts zero content and is intended for educational and personal use only. Use at your own risk.
|
||||
>
|
||||
> [**Read the Full Disclaimer**](DISCLAIMER.md)
|
||||
|
||||
66
dev/generate_anilist_media_tags.py
Executable file
66
dev/generate_anilist_media_tags.py
Executable file
@@ -0,0 +1,66 @@
|
||||
#!/usr/bin/env -S uv run --script
|
||||
import json
|
||||
from collections import defaultdict
|
||||
from pathlib import Path
|
||||
|
||||
import httpx
|
||||
from viu_media.core.utils.graphql import execute_graphql
|
||||
|
||||
DEV_DIR = Path(__file__).resolve().parent
|
||||
media_tags_type_py = (
|
||||
DEV_DIR.parent / "viu_media" / "libs" / "media_api" / "_media_tags.py"
|
||||
)
|
||||
media_tags_gql = DEV_DIR / "graphql" / "anilist" / "media_tags.gql"
|
||||
generated_tags_json = DEV_DIR / "generated" / "anilist" / "tags.json"
|
||||
|
||||
media_tags_response = execute_graphql(
|
||||
"https://graphql.anilist.co", httpx.Client(), media_tags_gql, {}
|
||||
)
|
||||
media_tags_response.raise_for_status()
|
||||
|
||||
template = """\
|
||||
# DO NOT EDIT THIS FILE !!! ( 。 •̀ ᴖ •́ 。)
|
||||
# ITS AUTOMATICALLY GENERATED BY RUNNING ./dev/generate_anilist_media_tags.py
|
||||
# FROM THE PROJECT ROOT
|
||||
# SO RUN THAT INSTEAD TO UPDATE THE FILE WITH THE LATEST MEDIA TAGS :)
|
||||
|
||||
|
||||
from enum import Enum
|
||||
|
||||
|
||||
class MediaTag(Enum):\
|
||||
"""
|
||||
|
||||
# 4 spaces
|
||||
tab = " "
|
||||
tags = defaultdict(list)
|
||||
for tag in media_tags_response.json()["data"]["MediaTagCollection"]:
|
||||
tags[tag["category"]].append(
|
||||
{
|
||||
"name": tag["name"],
|
||||
"description": tag["description"],
|
||||
"is_adult": tag["isAdult"],
|
||||
}
|
||||
)
|
||||
# save copy of data used to generate the class
|
||||
json.dump(tags, generated_tags_json.open("w", encoding="utf-8"), indent=2)
|
||||
|
||||
for key, value in tags.items():
|
||||
template = f"{template}\n{tab}#\n{tab}# {key.upper()}\n{tab}#\n"
|
||||
for tag in value:
|
||||
name = tag["name"]
|
||||
_tag_name = name.replace("-", "_").replace(" ", "_").upper()
|
||||
if _tag_name.startswith(("0", "1", "2", "3", "4", "5", "6", "7", "8", "9")):
|
||||
_tag_name = f"_{_tag_name}"
|
||||
|
||||
tag_name = ""
|
||||
# sanitize invalid characters for attribute names
|
||||
for char in _tag_name:
|
||||
if char.isidentifier() or char.isdigit():
|
||||
tag_name += char
|
||||
|
||||
desc = tag["description"].replace("\n", "")
|
||||
is_adult = tag["is_adult"]
|
||||
template = f'{template}\n{tab}# {desc} (is_adult: {is_adult})\n{tab}{tag_name} = "{name}"\n'
|
||||
|
||||
media_tags_type_py.write_text(template, "utf-8")
|
||||
File diff suppressed because it is too large
Load Diff
8
dev/graphql/anilist/media_tags.gql
Normal file
8
dev/graphql/anilist/media_tags.gql
Normal file
@@ -0,0 +1,8 @@
|
||||
query {
|
||||
MediaTagCollection {
|
||||
name
|
||||
description
|
||||
category
|
||||
isAdult
|
||||
}
|
||||
}
|
||||
0
dev/make_release
Normal file → Executable file
0
dev/make_release
Normal file → Executable file
8
flake.lock
generated
8
flake.lock
generated
@@ -20,17 +20,17 @@
|
||||
},
|
||||
"nixpkgs": {
|
||||
"locked": {
|
||||
"lastModified": 1753345091,
|
||||
"narHash": "sha256-CdX2Rtvp5I8HGu9swBmYuq+ILwRxpXdJwlpg8jvN4tU=",
|
||||
"lastModified": 1756386758,
|
||||
"narHash": "sha256-1wxxznpW2CKvI9VdniaUnTT2Os6rdRJcRUf65ZK9OtE=",
|
||||
"owner": "nixos",
|
||||
"repo": "nixpkgs",
|
||||
"rev": "3ff0e34b1383648053bba8ed03f201d3466f90c9",
|
||||
"rev": "dfb2f12e899db4876308eba6d93455ab7da304cd",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "nixos",
|
||||
"ref": "nixos-unstable",
|
||||
"repo": "nixpkgs",
|
||||
"rev": "3ff0e34b1383648053bba8ed03f201d3466f90c9",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
|
||||
17
flake.nix
17
flake.nix
@@ -2,8 +2,7 @@
|
||||
description = "Viu Project Flake";
|
||||
|
||||
inputs = {
|
||||
# The nixpkgs unstable latest commit breaks the plyer python package
|
||||
nixpkgs.url = "github:nixos/nixpkgs/3ff0e34b1383648053bba8ed03f201d3466f90c9";
|
||||
nixpkgs.url = "github:nixos/nixpkgs/nixos-unstable";
|
||||
flake-utils.url = "github:numtide/flake-utils";
|
||||
};
|
||||
|
||||
@@ -17,21 +16,21 @@
|
||||
system:
|
||||
let
|
||||
pkgs = nixpkgs.legacyPackages.${system};
|
||||
inherit (pkgs) lib python3Packages;
|
||||
inherit (pkgs) lib python312Packages;
|
||||
|
||||
version = "3.1.0";
|
||||
in
|
||||
{
|
||||
packages.default = python3Packages.buildPythonApplication {
|
||||
packages.default = python312Packages.buildPythonApplication {
|
||||
pname = "viu";
|
||||
inherit version;
|
||||
pyproject = true;
|
||||
|
||||
src = self;
|
||||
|
||||
build-system = with python3Packages; [ hatchling ];
|
||||
build-system = with python312Packages; [ hatchling ];
|
||||
|
||||
dependencies = with python3Packages; [
|
||||
dependencies = with python312Packages; [
|
||||
click
|
||||
inquirerpy
|
||||
requests
|
||||
@@ -67,12 +66,10 @@
|
||||
# Needs to be adapted for the nix derivation build
|
||||
doCheck = false;
|
||||
|
||||
pythonImportsCheck = [ "viu" ];
|
||||
|
||||
meta = {
|
||||
description = "Your browser anime experience from the terminal";
|
||||
homepage = "https://github.com/Benexl/Viu";
|
||||
changelog = "https://github.com/Benexl/Viu/releases/tag/v${version}";
|
||||
homepage = "https://github.com/viu-media/Viu";
|
||||
changelog = "https://github.com/viu-media/Viu/releases/tag/v${version}";
|
||||
mainProgram = "viu";
|
||||
license = lib.licenses.unlicense;
|
||||
maintainers = with lib.maintainers; [ theobori ];
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
[project]
|
||||
name = "viu_cli"
|
||||
version = "3.1.0"
|
||||
name = "viu-media"
|
||||
version = "3.3.2"
|
||||
description = "A browser anime site experience from the terminal"
|
||||
license = "UNLICENSE"
|
||||
readme = "README.md"
|
||||
requires-python = ">=3.10"
|
||||
requires-python = ">=3.11"
|
||||
dependencies = [
|
||||
"click>=8.1.7",
|
||||
"httpx>=0.28.1",
|
||||
@@ -14,14 +14,16 @@ dependencies = [
|
||||
]
|
||||
|
||||
[project.scripts]
|
||||
viu = 'viu_cli:Cli'
|
||||
viu = 'viu_media:Cli'
|
||||
|
||||
[project.optional-dependencies]
|
||||
standard = [
|
||||
"thefuzz>=0.22.1",
|
||||
"yt-dlp>=2025.7.21",
|
||||
"pycryptodomex>=3.23.0",
|
||||
"dbus-python>=1.4.0",
|
||||
"pypiwin32; sys_platform == 'win32'", # For Windows-specific functionality
|
||||
"pyobjc; sys_platform == 'darwin'", # For macOS-specific functionality
|
||||
"dbus-python; sys_platform == 'linux'", # For Linux-specific functionality (e.g., notifications),
|
||||
"plyer>=2.1.0",
|
||||
"lxml>=6.0.0"
|
||||
]
|
||||
@@ -47,8 +49,8 @@ torrents = [
|
||||
requires = ["hatchling"]
|
||||
build-backend = "hatchling.build"
|
||||
|
||||
[tool.uv]
|
||||
dev-dependencies = [
|
||||
[dependency-groups]
|
||||
dev = [
|
||||
"pre-commit>=4.0.1",
|
||||
"pyinstaller>=6.11.1",
|
||||
"pyright>=1.1.384",
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
{
|
||||
"venvPath": ".",
|
||||
"venv": ".venv",
|
||||
"pythonVersion": "3.10"
|
||||
"pythonVersion": "3.12"
|
||||
}
|
||||
|
||||
2
tox.ini
2
tox.ini
@@ -1,7 +1,7 @@
|
||||
[tox]
|
||||
requires =
|
||||
tox>=4
|
||||
env_list = lint, pyright, py{310,311}
|
||||
env_list = lint, pyright, py{311,312}
|
||||
|
||||
[testenv]
|
||||
description = run unit tests
|
||||
|
||||
2
fa → viu
Normal file → Executable file
2
fa → viu
Normal file → Executable file
@@ -3,4 +3,4 @@ provider_type=$1
|
||||
provider_name=$2
|
||||
[ -z "$provider_type" ] && echo "Please specify provider type" && exit
|
||||
[ -z "$provider_name" ] && echo "Please specify provider type" && exit
|
||||
uv run python -m viu_cli.libs.provider.${provider_type}.${provider_name}.provider
|
||||
uv run python -m viu_media.libs.provider.${provider_type}.${provider_name}.provider
|
||||
@@ -1,22 +0,0 @@
|
||||
#!/bin/sh
|
||||
#
|
||||
# Viu Airing Schedule Info Script Template
|
||||
# This script formats and displays airing schedule details in the FZF preview pane.
|
||||
# Python injects the actual data values into the placeholders.
|
||||
|
||||
draw_rule
|
||||
|
||||
print_kv "Anime Title" "{ANIME_TITLE}"
|
||||
|
||||
draw_rule
|
||||
|
||||
print_kv "Total Episodes" "{TOTAL_EPISODES}"
|
||||
print_kv "Upcoming Episodes" "{UPCOMING_EPISODES}"
|
||||
|
||||
draw_rule
|
||||
|
||||
echo "{C_KEY}Next Episodes:{RESET}"
|
||||
echo
|
||||
echo "{SCHEDULE_TABLE}" | fold -s -w "$WIDTH"
|
||||
|
||||
draw_rule
|
||||
@@ -1,75 +0,0 @@
|
||||
#!/bin/sh
|
||||
#
|
||||
# FZF Airing Schedule Preview Script Template
|
||||
#
|
||||
# This script is a template. The placeholders in curly braces, like {NAME}
|
||||
# are dynamically filled by python using .replace()
|
||||
|
||||
WIDTH=${FZF_PREVIEW_COLUMNS:-80} # Set a fallback width of 80
|
||||
IMAGE_RENDERER="{IMAGE_RENDERER}"
|
||||
|
||||
generate_sha256() {
|
||||
local input
|
||||
|
||||
# Check if input is passed as an argument or piped
|
||||
if [ -n "$1" ]; then
|
||||
input="$1"
|
||||
else
|
||||
input=$(cat)
|
||||
fi
|
||||
|
||||
if command -v sha256sum &>/dev/null; then
|
||||
echo -n "$input" | sha256sum | awk '{print $1}'
|
||||
elif command -v shasum &>/dev/null; then
|
||||
echo -n "$input" | shasum -a 256 | awk '{print $1}'
|
||||
elif command -v sha256 &>/dev/null; then
|
||||
echo -n "$input" | sha256 | awk '{print $1}'
|
||||
elif command -v openssl &>/dev/null; then
|
||||
echo -n "$input" | openssl dgst -sha256 | awk '{print $2}'
|
||||
else
|
||||
echo -n "$input" | base64 | tr '/+' '_-' | tr -d '\n'
|
||||
fi
|
||||
}
|
||||
|
||||
|
||||
print_kv() {
|
||||
local key="$1"
|
||||
local value="$2"
|
||||
local key_len=${#key}
|
||||
local value_len=${#value}
|
||||
local multiplier="${3:-1}"
|
||||
|
||||
# Correctly calculate padding by accounting for the key, the ": ", and the value.
|
||||
local padding_len=$((WIDTH - key_len - 2 - value_len * multiplier))
|
||||
|
||||
# If the text is too long to fit, just add a single space for separation.
|
||||
if [ "$padding_len" -lt 1 ]; then
|
||||
padding_len=1
|
||||
value=$(echo $value| fold -s -w "$((WIDTH - key_len - 3))")
|
||||
printf "{C_KEY}%s:{RESET}%*s%s\\n" "$key" "$padding_len" "" " $value"
|
||||
else
|
||||
printf "{C_KEY}%s:{RESET}%*s%s\\n" "$key" "$padding_len" "" " $value"
|
||||
fi
|
||||
}
|
||||
|
||||
|
||||
draw_rule(){
|
||||
ll=2
|
||||
while [ $ll -le $FZF_PREVIEW_COLUMNS ];do
|
||||
echo -n -e "{C_RULE}─{RESET}"
|
||||
((ll++))
|
||||
done
|
||||
echo
|
||||
}
|
||||
|
||||
title={}
|
||||
hash=$(generate_sha256 "$title")
|
||||
|
||||
if [ "{PREVIEW_MODE}" = "full" ] || [ "{PREVIEW_MODE}" = "text" ]; then
|
||||
info_file="{INFO_CACHE_DIR}{PATH_SEP}$hash"
|
||||
if [ -f "$info_file" ]; then
|
||||
source "$info_file"
|
||||
else
|
||||
echo "📅 Loading airing schedule..."
|
||||
fi
|
||||
fi
|
||||
@@ -1,41 +0,0 @@
|
||||
#!/bin/sh
|
||||
#
|
||||
# Viu Character Info Script Template
|
||||
# This script formats and displays character details in the FZF preview pane.
|
||||
# Python injects the actual data values into the placeholders.
|
||||
|
||||
draw_rule
|
||||
|
||||
print_kv "Character Name" "{CHARACTER_NAME}"
|
||||
|
||||
if [ -n "{CHARACTER_NATIVE_NAME}" ] && [ "{CHARACTER_NATIVE_NAME}" != "N/A" ]; then
|
||||
print_kv "Native Name" "{CHARACTER_NATIVE_NAME}"
|
||||
fi
|
||||
|
||||
draw_rule
|
||||
|
||||
if [ -n "{CHARACTER_GENDER}" ] && [ "{CHARACTER_GENDER}" != "Unknown" ]; then
|
||||
print_kv "Gender" "{CHARACTER_GENDER}"
|
||||
fi
|
||||
|
||||
if [ -n "{CHARACTER_AGE}" ] && [ "{CHARACTER_AGE}" != "Unknown" ]; then
|
||||
print_kv "Age" "{CHARACTER_AGE}"
|
||||
fi
|
||||
|
||||
if [ -n "{CHARACTER_BLOOD_TYPE}" ] && [ "{CHARACTER_BLOOD_TYPE}" != "N/A" ]; then
|
||||
print_kv "Blood Type" "{CHARACTER_BLOOD_TYPE}"
|
||||
fi
|
||||
|
||||
if [ -n "{CHARACTER_BIRTHDAY}" ] && [ "{CHARACTER_BIRTHDAY}" != "N/A" ]; then
|
||||
print_kv "Birthday" "{CHARACTER_BIRTHDAY}"
|
||||
fi
|
||||
|
||||
if [ -n "{CHARACTER_FAVOURITES}" ] && [ "{CHARACTER_FAVOURITES}" != "0" ]; then
|
||||
print_kv "Favorites" "{CHARACTER_FAVOURITES}"
|
||||
fi
|
||||
|
||||
draw_rule
|
||||
|
||||
echo "{CHARACTER_DESCRIPTION}" | fold -s -w "$WIDTH"
|
||||
|
||||
draw_rule
|
||||
@@ -1,130 +0,0 @@
|
||||
#!/bin/sh
|
||||
#
|
||||
# FZF Character Preview Script Template
|
||||
#
|
||||
# This script is a template. The placeholders in curly braces, like {NAME}
|
||||
# are dynamically filled by python using .replace()
|
||||
|
||||
WIDTH=${FZF_PREVIEW_COLUMNS:-80} # Set a fallback width of 80
|
||||
IMAGE_RENDERER="{IMAGE_RENDERER}"
|
||||
|
||||
generate_sha256() {
|
||||
local input
|
||||
|
||||
# Check if input is passed as an argument or piped
|
||||
if [ -n "$1" ]; then
|
||||
input="$1"
|
||||
else
|
||||
input=$(cat)
|
||||
fi
|
||||
|
||||
if command -v sha256sum &>/dev/null; then
|
||||
echo -n "$input" | sha256sum | awk '{print $1}'
|
||||
elif command -v shasum &>/dev/null; then
|
||||
echo -n "$input" | shasum -a 256 | awk '{print $1}'
|
||||
elif command -v sha256 &>/dev/null; then
|
||||
echo -n "$input" | sha256 | awk '{print $1}'
|
||||
elif command -v openssl &>/dev/null; then
|
||||
echo -n "$input" | openssl dgst -sha256 | awk '{print $2}'
|
||||
else
|
||||
echo -n "$input" | base64 | tr '/+' '_-' | tr -d '\n'
|
||||
fi
|
||||
}
|
||||
|
||||
fzf_preview() {
|
||||
file=$1
|
||||
|
||||
dim=${FZF_PREVIEW_COLUMNS}x${FZF_PREVIEW_LINES}
|
||||
if [ "$dim" = x ]; then
|
||||
dim=$(stty size </dev/tty | awk "{print \$2 \"x\" \$1}")
|
||||
fi
|
||||
if ! [ "$IMAGE_RENDERER" = "icat" ] && [ -z "$KITTY_WINDOW_ID" ] && [ "$((FZF_PREVIEW_TOP + FZF_PREVIEW_LINES))" -eq "$(stty size </dev/tty | awk "{print \$1}")" ]; then
|
||||
dim=${FZF_PREVIEW_COLUMNS}x$((FZF_PREVIEW_LINES - 1))
|
||||
fi
|
||||
|
||||
if [ "$IMAGE_RENDERER" = "icat" ] && [ -z "$GHOSTTY_BIN_DIR" ]; then
|
||||
if command -v kitten >/dev/null 2>&1; then
|
||||
kitten icat --clear --transfer-mode=memory --unicode-placeholder --stdin=no --place="$dim@0x0" "$file" | sed "\$d" | sed "$(printf "\$s/\$/\033[m/")"
|
||||
elif command -v icat >/dev/null 2>&1; then
|
||||
icat --clear --transfer-mode=memory --unicode-placeholder --stdin=no --place="$dim@0x0" "$file" | sed "\$d" | sed "$(printf "\$s/\$/\033[m/")"
|
||||
else
|
||||
kitty icat --clear --transfer-mode=memory --unicode-placeholder --stdin=no --place="$dim@0x0" "$file" | sed "\$d" | sed "$(printf "\$s/\$/\033[m/")"
|
||||
fi
|
||||
|
||||
elif [ -n "$GHOSTTY_BIN_DIR" ]; then
|
||||
if command -v kitten >/dev/null 2>&1; then
|
||||
kitten icat --clear --transfer-mode=memory --unicode-placeholder --stdin=no --place="$dim@0x0" "$file" | sed "\$d" | sed "$(printf "\$s/\$/\033[m/")"
|
||||
elif command -v icat >/dev/null 2>&1; then
|
||||
icat --clear --transfer-mode=memory --unicode-placeholder --stdin=no --place="$dim@0x0" "$file" | sed "\$d" | sed "$(printf "\$s/\$/\033[m/")"
|
||||
else
|
||||
chafa -s "$dim" "$file"
|
||||
fi
|
||||
elif command -v chafa >/dev/null 2>&1; then
|
||||
case "$PLATFORM" in
|
||||
android) chafa -s "$dim" "$file" ;;
|
||||
windows) chafa -f sixel -s "$dim" "$file" ;;
|
||||
*) chafa -s "$dim" "$file" ;;
|
||||
esac
|
||||
echo
|
||||
|
||||
elif command -v imgcat >/dev/null; then
|
||||
imgcat -W "${dim%%x*}" -H "${dim##*x}" "$file"
|
||||
|
||||
else
|
||||
echo please install a terminal image viewer
|
||||
echo either icat for kitty terminal and wezterm or imgcat or chafa
|
||||
fi
|
||||
}
|
||||
print_kv() {
|
||||
local key="$1"
|
||||
local value="$2"
|
||||
local key_len=${#key}
|
||||
local value_len=${#value}
|
||||
local multiplier="${3:-1}"
|
||||
|
||||
# Correctly calculate padding by accounting for the key, the ": ", and the value.
|
||||
local padding_len=$((WIDTH - key_len - 2 - value_len * multiplier))
|
||||
|
||||
# If the text is too long to fit, just add a single space for separation.
|
||||
if [ "$padding_len" -lt 1 ]; then
|
||||
padding_len=1
|
||||
value=$(echo $value| fold -s -w "$((WIDTH - key_len - 3))")
|
||||
printf "{C_KEY}%s:{RESET}%*s%s\\n" "$key" "$padding_len" "" " $value"
|
||||
else
|
||||
printf "{C_KEY}%s:{RESET}%*s%s\\n" "$key" "$padding_len" "" " $value"
|
||||
fi
|
||||
}
|
||||
|
||||
|
||||
draw_rule(){
|
||||
ll=2
|
||||
while [ $ll -le $FZF_PREVIEW_COLUMNS ];do
|
||||
echo -n -e "{C_RULE}─{RESET}"
|
||||
((ll++))
|
||||
done
|
||||
echo
|
||||
}
|
||||
|
||||
title={}
|
||||
hash=$(generate_sha256 "$title")
|
||||
|
||||
|
||||
# FIXME: Disabled since they cover the text perhaps its aspect ratio related or image format not sure
|
||||
# if [ "{PREVIEW_MODE}" = "full" ] || [ "{PREVIEW_MODE}" = "image" ]; then
|
||||
# image_file="{IMAGE_CACHE_DIR}{PATH_SEP}$hash.png"
|
||||
# if [ -f "$image_file" ]; then
|
||||
# fzf_preview "$image_file"
|
||||
# echo # Add a newline for spacing
|
||||
# fi
|
||||
# fi
|
||||
|
||||
if [ "{PREVIEW_MODE}" = "full" ] || [ "{PREVIEW_MODE}" = "text" ]; then
|
||||
info_file="{INFO_CACHE_DIR}{PATH_SEP}$hash"
|
||||
if [ -f "$info_file" ]; then
|
||||
source "$info_file"
|
||||
else
|
||||
echo "👤 Loading character details..."
|
||||
fi
|
||||
fi
|
||||
|
||||
|
||||
@@ -1,315 +0,0 @@
|
||||
#!/bin/bash
|
||||
#
|
||||
# FZF Dynamic Preview Script Template
|
||||
#
|
||||
# This script handles previews for dynamic search results by parsing the JSON
|
||||
# search results file and extracting info for the selected item.
|
||||
# The placeholders in curly braces are dynamically filled by Python using .replace()
|
||||
|
||||
WIDTH=${FZF_PREVIEW_COLUMNS:-80}
|
||||
IMAGE_RENDERER="{IMAGE_RENDERER}"
|
||||
SEARCH_RESULTS_FILE="{SEARCH_RESULTS_FILE}"
|
||||
IMAGE_CACHE_PATH="{IMAGE_CACHE_PATH}"
|
||||
INFO_CACHE_PATH="{INFO_CACHE_PATH}"
|
||||
PATH_SEP="{PATH_SEP}"
|
||||
|
||||
# Color codes injected by Python
|
||||
C_TITLE="{C_TITLE}"
|
||||
C_KEY="{C_KEY}"
|
||||
C_VALUE="{C_VALUE}"
|
||||
C_RULE="{C_RULE}"
|
||||
RESET="{RESET}"
|
||||
|
||||
# Selected item from fzf
|
||||
SELECTED_ITEM={}
|
||||
|
||||
generate_sha256() {
|
||||
local input="$1"
|
||||
if command -v sha256sum &>/dev/null; then
|
||||
echo -n "$input" | sha256sum | awk '{print $1}'
|
||||
elif command -v shasum &>/dev/null; then
|
||||
echo -n "$input" | shasum -a 256 | awk '{print $1}'
|
||||
elif command -v sha256 &>/dev/null; then
|
||||
echo -n "$input" | sha256 | awk '{print $1}'
|
||||
elif command -v openssl &>/dev/null; then
|
||||
echo -n "$input" | openssl dgst -sha256 | awk '{print $2}'
|
||||
else
|
||||
echo -n "$input" | base64 | tr '/+' '_-' | tr -d '\n'
|
||||
fi
|
||||
}
|
||||
|
||||
fzf_preview() {
|
||||
file=$1
|
||||
dim=${FZF_PREVIEW_COLUMNS}x${FZF_PREVIEW_LINES}
|
||||
if [ "$dim" = x ]; then
|
||||
dim=$(stty size </dev/tty | awk "{print \$2 \"x\" \$1}")
|
||||
fi
|
||||
if ! [ "$IMAGE_RENDERER" = "icat" ] && [ -z "$KITTY_WINDOW_ID" ] && [ "$((FZF_PREVIEW_TOP + FZF_PREVIEW_LINES))" -eq "$(stty size </dev/tty | awk "{print \$1}")" ]; then
|
||||
dim=${FZF_PREVIEW_COLUMNS}x$((FZF_PREVIEW_LINES - 1))
|
||||
fi
|
||||
|
||||
if [ "$IMAGE_RENDERER" = "icat" ] && [ -z "$GHOSTTY_BIN_DIR" ]; then
|
||||
if command -v kitten >/dev/null 2>&1; then
|
||||
kitten icat --clear --transfer-mode=memory --unicode-placeholder --stdin=no --place="$dim@0x0" "$file" | sed "\$d" | sed "$(printf "\$s/\$/\033[m/")"
|
||||
elif command -v icat >/dev/null 2>&1; then
|
||||
icat --clear --transfer-mode=memory --unicode-placeholder --stdin=no --place="$dim@0x0" "$file" | sed "\$d" | sed "$(printf "\$s/\$/\033[m/")"
|
||||
else
|
||||
kitty icat --clear --transfer-mode=memory --unicode-placeholder --stdin=no --place="$dim@0x0" "$file" | sed "\$d" | sed "$(printf "\$s/\$/\033[m/")"
|
||||
fi
|
||||
elif [ -n "$GHOSTTY_BIN_DIR" ]; then
|
||||
if command -v kitten >/dev/null 2>&1; then
|
||||
kitten icat --clear --transfer-mode=memory --unicode-placeholder --stdin=no --place="$dim@0x0" "$file" | sed "\$d" | sed "$(printf "\$s/\$/\033[m/")"
|
||||
elif command -v icat >/dev/null 2>&1; then
|
||||
icat --clear --transfer-mode=memory --unicode-placeholder --stdin=no --place="$dim@0x0" "$file" | sed "\$d" | sed "$(printf "\$s/\$/\033[m/")"
|
||||
else
|
||||
chafa -s "$dim" "$file"
|
||||
fi
|
||||
elif command -v chafa >/dev/null 2>&1; then
|
||||
case "$PLATFORM" in
|
||||
android) chafa -s "$dim" "$file" ;;
|
||||
windows) chafa -f sixel -s "$dim" "$file" ;;
|
||||
*) chafa -s "$dim" "$file" ;;
|
||||
esac
|
||||
echo
|
||||
elif command -v imgcat >/dev/null; then
|
||||
imgcat -W "${dim%%x*}" -H "${dim##*x}" "$file"
|
||||
else
|
||||
echo please install a terminal image viewer
|
||||
echo either icat for kitty terminal and wezterm or imgcat or chafa
|
||||
fi
|
||||
}
|
||||
|
||||
print_kv() {
|
||||
local key="$1"
|
||||
local value="$2"
|
||||
local key_len=${#key}
|
||||
local value_len=${#value}
|
||||
local multiplier="${3:-1}"
|
||||
|
||||
local padding_len=$((WIDTH - key_len - 2 - value_len * multiplier))
|
||||
|
||||
if [ "$padding_len" -lt 1 ]; then
|
||||
padding_len=1
|
||||
value=$(echo $value| fold -s -w "$((WIDTH - key_len - 3))")
|
||||
printf "{C_KEY}%s:{RESET}%*s%s\\n" "$key" "$padding_len" "" " $value"
|
||||
else
|
||||
printf "{C_KEY}%s:{RESET}%*s%s\\n" "$key" "$padding_len" "" " $value"
|
||||
fi
|
||||
}
|
||||
|
||||
draw_rule() {
|
||||
ll=2
|
||||
while [ $ll -le $FZF_PREVIEW_COLUMNS ];do
|
||||
echo -n -e "{C_RULE}─{RESET}"
|
||||
((ll++))
|
||||
done
|
||||
echo
|
||||
}
|
||||
|
||||
clean_html() {
|
||||
echo "$1" | sed 's/<[^>]*>//g' | sed 's/</</g' | sed 's/>/>/g' | sed 's/&/\&/g' | sed 's/"/"/g' | sed "s/'/'/g"
|
||||
}
|
||||
|
||||
format_date() {
|
||||
local date_obj="$1"
|
||||
if [ "$date_obj" = "null" ] || [ -z "$date_obj" ]; then
|
||||
echo "N/A"
|
||||
return
|
||||
fi
|
||||
|
||||
# Extract year, month, day from the date object
|
||||
if command -v jq >/dev/null 2>&1; then
|
||||
year=$(echo "$date_obj" | jq -r '.year // "N/A"' 2>/dev/null || echo "N/A")
|
||||
month=$(echo "$date_obj" | jq -r '.month // ""' 2>/dev/null || echo "")
|
||||
day=$(echo "$date_obj" | jq -r '.day // ""' 2>/dev/null || echo "")
|
||||
else
|
||||
year=$(echo "$date_obj" | python3 -c "import json, sys; data=json.load(sys.stdin); print(data.get('year', 'N/A'))" 2>/dev/null || echo "N/A")
|
||||
month=$(echo "$date_obj" | python3 -c "import json, sys; data=json.load(sys.stdin); print(data.get('month', ''))" 2>/dev/null || echo "")
|
||||
day=$(echo "$date_obj" | python3 -c "import json, sys; data=json.load(sys.stdin); print(data.get('day', ''))" 2>/dev/null || echo "")
|
||||
fi
|
||||
|
||||
if [ "$year" = "N/A" ] || [ "$year" = "null" ]; then
|
||||
echo "N/A"
|
||||
elif [ -n "$month" ] && [ "$month" != "null" ] && [ -n "$day" ] && [ "$day" != "null" ]; then
|
||||
echo "$day/$month/$year"
|
||||
elif [ -n "$month" ] && [ "$month" != "null" ]; then
|
||||
echo "$month/$year"
|
||||
else
|
||||
echo "$year"
|
||||
fi
|
||||
}
|
||||
|
||||
# If no selection or search results file doesn't exist, show placeholder
|
||||
if [ -z "$SELECTED_ITEM" ] || [ ! -f "$SEARCH_RESULTS_FILE" ]; then
|
||||
echo "${C_TITLE}Dynamic Search Preview${RESET}"
|
||||
draw_rule
|
||||
echo "Type to search for anime..."
|
||||
echo "Results will appear here as you type."
|
||||
echo
|
||||
echo "DEBUG:"
|
||||
echo "SELECTED_ITEM='$SELECTED_ITEM'"
|
||||
echo "SEARCH_RESULTS_FILE='$SEARCH_RESULTS_FILE'"
|
||||
if [ -f "$SEARCH_RESULTS_FILE" ]; then
|
||||
echo "Search results file exists"
|
||||
else
|
||||
echo "Search results file missing"
|
||||
fi
|
||||
exit 0
|
||||
fi
|
||||
# Parse the search results JSON and find the matching item
|
||||
if command -v jq >/dev/null 2>&1; then
|
||||
MEDIA_DATA=$(cat "$SEARCH_RESULTS_FILE" | jq --arg anime_title "$SELECTED_ITEM" '
|
||||
.data.Page.media[]? |
|
||||
select((.title.english // .title.romaji // .title.native // "Unknown") == $anime_title )
|
||||
' )
|
||||
else
|
||||
# Fallback to Python for JSON parsing
|
||||
MEDIA_DATA=$(cat "$SEARCH_RESULTS_FILE" | python3 -c "
|
||||
import json
|
||||
import sys
|
||||
|
||||
try:
|
||||
data = json.load(sys.stdin)
|
||||
selected_item = '''$SELECTED_ITEM'''
|
||||
|
||||
if 'data' not in data or 'Page' not in data['data'] or 'media' not in data['data']['Page']:
|
||||
sys.exit(1)
|
||||
|
||||
media_list = data['data']['Page']['media']
|
||||
|
||||
for media in media_list:
|
||||
title = media.get('title', {})
|
||||
english_title = title.get('english') or title.get('romaji') or title.get('native', 'Unknown')
|
||||
year = media.get('startDate', {}).get('year', 'Unknown') if media.get('startDate') else 'Unknown'
|
||||
status = media.get('status', 'Unknown')
|
||||
genres = ', '.join(media.get('genres', [])[:3]) or 'Unknown'
|
||||
display_format = f'{english_title} ({year}) [{status}] - {genres}'
|
||||
# Debug output for matching
|
||||
print(f"DEBUG: selected_item='{selected_item.strip()}' display_format='{display_format.strip()}'", file=sys.stderr)
|
||||
if selected_item.strip() == display_format.strip():
|
||||
json.dump(media, sys.stdout, indent=2)
|
||||
sys.exit(0)
|
||||
print(f"DEBUG: No match found for selected_item='{selected_item.strip()}'", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
except Exception as e:
|
||||
print(f'Error: {e}', file=sys.stderr)
|
||||
sys.exit(1)
|
||||
" 2>/dev/null)
|
||||
fi
|
||||
|
||||
# If we couldn't find the media data, show error
|
||||
if [ $? -ne 0 ] || [ -z "$MEDIA_DATA" ]; then
|
||||
echo "${C_TITLE}Preview Error${RESET}"
|
||||
draw_rule
|
||||
echo "Could not load preview data for:"
|
||||
echo "$SELECTED_ITEM"
|
||||
echo
|
||||
echo "DEBUG INFO:"
|
||||
echo "Search results file: $SEARCH_RESULTS_FILE"
|
||||
if [ -f "$SEARCH_RESULTS_FILE" ]; then
|
||||
echo "File exists, size: $(wc -c < "$SEARCH_RESULTS_FILE") bytes"
|
||||
echo "First few lines of search results:"
|
||||
head -3 "$SEARCH_RESULTS_FILE" 2>/dev/null || echo "Cannot read file"
|
||||
else
|
||||
echo "Search results file does not exist"
|
||||
fi
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Extract information from the media data
|
||||
if command -v jq >/dev/null 2>&1; then
|
||||
# Use jq for faster extraction
|
||||
TITLE=$(echo "$MEDIA_DATA" | jq -r '.title.english // .title.romaji // .title.native // "Unknown"' 2>/dev/null || echo "Unknown")
|
||||
STATUS=$(echo "$MEDIA_DATA" | jq -r '.status // "Unknown"' 2>/dev/null || echo "Unknown")
|
||||
FORMAT=$(echo "$MEDIA_DATA" | jq -r '.format // "Unknown"' 2>/dev/null || echo "Unknown")
|
||||
EPISODES=$(echo "$MEDIA_DATA" | jq -r '.episodes // "Unknown"' 2>/dev/null || echo "Unknown")
|
||||
DURATION=$(echo "$MEDIA_DATA" | jq -r 'if .duration then "\(.duration) min" else "Unknown" end' 2>/dev/null || echo "Unknown")
|
||||
SCORE=$(echo "$MEDIA_DATA" | jq -r 'if .averageScore then "\(.averageScore)/100" else "N/A" end' 2>/dev/null || echo "N/A")
|
||||
FAVOURITES=$(echo "$MEDIA_DATA" | jq -r '.favourites // 0' 2>/dev/null | sed ':a;s/\B[0-9]\{3\}\>/,&/;ta' || echo "0")
|
||||
POPULARITY=$(echo "$MEDIA_DATA" | jq -r '.popularity // 0' 2>/dev/null | sed ':a;s/\B[0-9]\{3\}\>/,&/;ta' || echo "0")
|
||||
GENRES=$(echo "$MEDIA_DATA" | jq -r '(.genres[:5] // []) | join(", ") | if . == "" then "Unknown" else . end' 2>/dev/null || echo "Unknown")
|
||||
DESCRIPTION=$(echo "$MEDIA_DATA" | jq -r '.description // "No description available."' 2>/dev/null || echo "No description available.")
|
||||
|
||||
# Get start and end dates as JSON objects
|
||||
START_DATE_OBJ=$(echo "$MEDIA_DATA" | jq -c '.startDate' 2>/dev/null || echo "null")
|
||||
END_DATE_OBJ=$(echo "$MEDIA_DATA" | jq -c '.endDate' 2>/dev/null || echo "null")
|
||||
|
||||
# Get cover image URL
|
||||
COVER_IMAGE=$(echo "$MEDIA_DATA" | jq -r '.coverImage.large // ""' 2>/dev/null || echo "")
|
||||
else
|
||||
# Fallback to Python for extraction
|
||||
TITLE=$(echo "$MEDIA_DATA" | python3 -c "import json, sys; data=json.load(sys.stdin); title=data.get('title',{}); print(title.get('english') or title.get('romaji') or title.get('native', 'Unknown'))" 2>/dev/null || echo "Unknown")
|
||||
STATUS=$(echo "$MEDIA_DATA" | python3 -c "import json, sys; data=json.load(sys.stdin); print(data.get('status', 'Unknown'))" 2>/dev/null || echo "Unknown")
|
||||
FORMAT=$(echo "$MEDIA_DATA" | python3 -c "import json, sys; data=json.load(sys.stdin); print(data.get('format', 'Unknown'))" 2>/dev/null || echo "Unknown")
|
||||
EPISODES=$(echo "$MEDIA_DATA" | python3 -c "import json, sys; data=json.load(sys.stdin); print(data.get('episodes', 'Unknown'))" 2>/dev/null || echo "Unknown")
|
||||
DURATION=$(echo "$MEDIA_DATA" | python3 -c "import json, sys; data=json.load(sys.stdin); duration=data.get('duration'); print(f'{duration} min' if duration else 'Unknown')" 2>/dev/null || echo "Unknown")
|
||||
SCORE=$(echo "$MEDIA_DATA" | python3 -c "import json, sys; data=json.load(sys.stdin); score=data.get('averageScore'); print(f'{score}/100' if score else 'N/A')" 2>/dev/null || echo "N/A")
|
||||
FAVOURITES=$(echo "$MEDIA_DATA" | python3 -c "import json, sys; data=json.load(sys.stdin); print(f\"{data.get('favourites', 0):,}\")" 2>/dev/null || echo "0")
|
||||
POPULARITY=$(echo "$MEDIA_DATA" | python3 -c "import json, sys; data=json.load(sys.stdin); print(f\"{data.get('popularity', 0):,}\")" 2>/dev/null || echo "0")
|
||||
GENRES=$(echo "$MEDIA_DATA" | python3 -c "import json, sys; data=json.load(sys.stdin); print(', '.join(data.get('genres', [])[:5]))" 2>/dev/null || echo "Unknown")
|
||||
DESCRIPTION=$(echo "$MEDIA_DATA" | python3 -c "import json, sys; data=json.load(sys.stdin); print(data.get('description', 'No description available.'))" 2>/dev/null || echo "No description available.")
|
||||
|
||||
# Get start and end dates
|
||||
START_DATE_OBJ=$(echo "$MEDIA_DATA" | python3 -c "import json, sys; data=json.load(sys.stdin); json.dump(data.get('startDate'), sys.stdout)" 2>/dev/null || echo "null")
|
||||
END_DATE_OBJ=$(echo "$MEDIA_DATA" | python3 -c "import json, sys; data=json.load(sys.stdin); json.dump(data.get('endDate'), sys.stdout)" 2>/dev/null || echo "null")
|
||||
|
||||
# Get cover image URL
|
||||
COVER_IMAGE=$(echo "$MEDIA_DATA" | python3 -c "import json, sys; data=json.load(sys.stdin); cover=data.get('coverImage',{}); print(cover.get('large', ''))" 2>/dev/null || echo "")
|
||||
fi
|
||||
|
||||
# Format the dates
|
||||
START_DATE=$(format_date "$START_DATE_OBJ")
|
||||
END_DATE=$(format_date "$END_DATE_OBJ")
|
||||
|
||||
# Generate cache hash for this item (using selected item like regular preview)
|
||||
CACHE_HASH=$(generate_sha256 "$SELECTED_ITEM")
|
||||
|
||||
# Try to show image if available
|
||||
if [ "{PREVIEW_MODE}" = "full" ] || [ "{PREVIEW_MODE}" = "image" ]; then
|
||||
image_file="{IMAGE_CACHE_PATH}{PATH_SEP}${CACHE_HASH}.png"
|
||||
|
||||
# If image not cached and we have a URL, try to download it quickly
|
||||
if [ ! -f "$image_file" ] && [ -n "$COVER_IMAGE" ]; then
|
||||
if command -v curl >/dev/null 2>&1; then
|
||||
# Quick download with timeout
|
||||
curl -s -m 3 -L "$COVER_IMAGE" -o "$image_file" 2>/dev/null || rm -f "$image_file" 2>/dev/null
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ -f "$image_file" ]; then
|
||||
fzf_preview "$image_file"
|
||||
else
|
||||
echo "🖼️ Loading image..."
|
||||
fi
|
||||
echo
|
||||
fi
|
||||
|
||||
# Display text info if configured
|
||||
if [ "{PREVIEW_MODE}" = "full" ] || [ "{PREVIEW_MODE}" = "text" ]; then
|
||||
draw_rule
|
||||
print_kv "Title" "$TITLE"
|
||||
draw_rule
|
||||
|
||||
print_kv "Score" "$SCORE"
|
||||
print_kv "Favourites" "$FAVOURITES"
|
||||
print_kv "Popularity" "$POPULARITY"
|
||||
print_kv "Status" "$STATUS"
|
||||
|
||||
draw_rule
|
||||
|
||||
print_kv "Episodes" "$EPISODES"
|
||||
print_kv "Duration" "$DURATION"
|
||||
print_kv "Format" "$FORMAT"
|
||||
|
||||
draw_rule
|
||||
|
||||
print_kv "Genres" "$GENRES"
|
||||
print_kv "Start Date" "$START_DATE"
|
||||
print_kv "End Date" "$END_DATE"
|
||||
|
||||
draw_rule
|
||||
|
||||
# Clean and display description
|
||||
CLEAN_DESCRIPTION=$(clean_html "$DESCRIPTION")
|
||||
echo "$CLEAN_DESCRIPTION" | fold -s -w "$WIDTH"
|
||||
fi
|
||||
@@ -1,31 +0,0 @@
|
||||
#!/bin/sh
|
||||
#
|
||||
# Episode Preview Info Script Template
|
||||
# This script formats and displays episode information in the FZF preview pane.
|
||||
# Some values are injected by python those with '{name}' syntax using .replace()
|
||||
|
||||
draw_rule
|
||||
|
||||
echo "{TITLE}" | fold -s -w "$WIDTH"
|
||||
|
||||
draw_rule
|
||||
|
||||
print_kv "Duration" "{DURATION}"
|
||||
print_kv "Status" "{STATUS}"
|
||||
|
||||
draw_rule
|
||||
|
||||
print_kv "Total Episodes" "{EPISODES}"
|
||||
print_kv "Next Episode" "{NEXT_EPISODE}"
|
||||
|
||||
draw_rule
|
||||
|
||||
print_kv "Progress" "{USER_PROGRESS}"
|
||||
print_kv "List Status" "{USER_STATUS}"
|
||||
|
||||
draw_rule
|
||||
|
||||
print_kv "Start Date" "{START_DATE}"
|
||||
print_kv "End Date" "{END_DATE}"
|
||||
|
||||
draw_rule
|
||||
@@ -1,54 +0,0 @@
|
||||
#!/bin/sh
|
||||
#
|
||||
# Viu Preview Info Script Template
|
||||
# This script formats and displays the textual information in the FZF preview pane.
|
||||
# Some values are injected by python those with '{name}' syntax using .replace()
|
||||
|
||||
draw_rule
|
||||
|
||||
print_kv "Title" "{TITLE}"
|
||||
|
||||
draw_rule
|
||||
|
||||
# Emojis take up double the space
|
||||
score_multiplier=1
|
||||
if ! [ "{SCORE}" = "N/A" ]; then
|
||||
score_multiplier=2
|
||||
fi
|
||||
print_kv "Score" "{SCORE}" $score_multiplier
|
||||
|
||||
print_kv "Favourites" "{FAVOURITES}"
|
||||
print_kv "Popularity" "{POPULARITY}"
|
||||
print_kv "Status" "{STATUS}"
|
||||
|
||||
draw_rule
|
||||
|
||||
print_kv "Episodes" "{EPISODES}"
|
||||
print_kv "Next Episode" "{NEXT_EPISODE}"
|
||||
print_kv "Duration" "{DURATION}"
|
||||
|
||||
draw_rule
|
||||
|
||||
print_kv "Genres" "{GENRES}"
|
||||
print_kv "Format" "{FORMAT}"
|
||||
|
||||
draw_rule
|
||||
|
||||
print_kv "List Status" "{USER_STATUS}"
|
||||
print_kv "Progress" "{USER_PROGRESS}"
|
||||
|
||||
draw_rule
|
||||
|
||||
print_kv "Start Date" "{START_DATE}"
|
||||
print_kv "End Date" "{END_DATE}"
|
||||
|
||||
draw_rule
|
||||
|
||||
print_kv "Studios" "{STUDIOS}"
|
||||
print_kv "Synonymns" "{SYNONYMNS}"
|
||||
print_kv "Tags" "{TAGS}"
|
||||
|
||||
draw_rule
|
||||
|
||||
# Synopsis
|
||||
echo "{SYNOPSIS}" | fold -s -w "$WIDTH"
|
||||
@@ -1,147 +0,0 @@
|
||||
#!/bin/sh
|
||||
#
|
||||
# FZF Preview Script Template
|
||||
#
|
||||
# This script is a template. The placeholders in curly braces, like {NAME}
|
||||
# are dynamically filled by python using .replace()
|
||||
|
||||
WIDTH=${FZF_PREVIEW_COLUMNS:-80} # Set a fallback width of 80
|
||||
IMAGE_RENDERER="{IMAGE_RENDERER}"
|
||||
|
||||
generate_sha256() {
|
||||
local input
|
||||
|
||||
# Check if input is passed as an argument or piped
|
||||
if [ -n "$1" ]; then
|
||||
input="$1"
|
||||
else
|
||||
input=$(cat)
|
||||
fi
|
||||
|
||||
if command -v sha256sum &>/dev/null; then
|
||||
echo -n "$input" | sha256sum | awk '{print $1}'
|
||||
elif command -v shasum &>/dev/null; then
|
||||
echo -n "$input" | shasum -a 256 | awk '{print $1}'
|
||||
elif command -v sha256 &>/dev/null; then
|
||||
echo -n "$input" | sha256 | awk '{print $1}'
|
||||
elif command -v openssl &>/dev/null; then
|
||||
echo -n "$input" | openssl dgst -sha256 | awk '{print $2}'
|
||||
else
|
||||
echo -n "$input" | base64 | tr '/+' '_-' | tr -d '\n'
|
||||
fi
|
||||
}
|
||||
|
||||
fzf_preview() {
|
||||
file=$1
|
||||
|
||||
dim=${FZF_PREVIEW_COLUMNS}x${FZF_PREVIEW_LINES}
|
||||
if [ "$dim" = x ]; then
|
||||
dim=$(stty size </dev/tty | awk "{print \$2 \"x\" \$1}")
|
||||
fi
|
||||
if ! [ "$IMAGE_RENDERER" = "icat" ] && [ -z "$KITTY_WINDOW_ID" ] && [ "$((FZF_PREVIEW_TOP + FZF_PREVIEW_LINES))" -eq "$(stty size </dev/tty | awk "{print \$1}")" ]; then
|
||||
dim=${FZF_PREVIEW_COLUMNS}x$((FZF_PREVIEW_LINES - 1))
|
||||
fi
|
||||
|
||||
if [ "$IMAGE_RENDERER" = "icat" ] && [ -z "$GHOSTTY_BIN_DIR" ]; then
|
||||
if command -v kitten >/dev/null 2>&1; then
|
||||
kitten icat --clear --transfer-mode=memory --unicode-placeholder{SCALE_UP} --stdin=no --place="$dim@0x0" "$file" | sed "\$d" | sed "$(printf "\$s/\$/\033[m/")"
|
||||
elif command -v icat >/dev/null 2>&1; then
|
||||
icat --clear --transfer-mode=memory --unicode-placeholder{SCALE_UP} --stdin=no --place="$dim@0x0" "$file" | sed "\$d" | sed "$(printf "\$s/\$/\033[m/")"
|
||||
else
|
||||
kitty icat --clear --transfer-mode=memory --unicode-placeholder{SCALE_UP} --stdin=no --place="$dim@0x0" "$file" | sed "\$d" | sed "$(printf "\$s/\$/\033[m/")"
|
||||
fi
|
||||
|
||||
elif [ -n "$GHOSTTY_BIN_DIR" ]; then
|
||||
dim=$((FZF_PREVIEW_COLUMNS - 1))x${FZF_PREVIEW_LINES}
|
||||
if command -v kitten >/dev/null 2>&1; then
|
||||
kitten icat --clear --transfer-mode=memory --unicode-placeholder{SCALE_UP} --stdin=no --place="$dim@0x0" "$file" | sed "\$d" | sed "$(printf "\$s/\$/\033[m/")"
|
||||
elif command -v icat >/dev/null 2>&1; then
|
||||
icat --clear --transfer-mode=memory --unicode-placeholder{SCALE_UP} --stdin=no --place="$dim@0x0" "$file" | sed "\$d" | sed "$(printf "\$s/\$/\033[m/")"
|
||||
else
|
||||
chafa -s "$dim" "$file"
|
||||
fi
|
||||
elif command -v chafa >/dev/null 2>&1; then
|
||||
case "$PLATFORM" in
|
||||
android) chafa -s "$dim" "$file" ;;
|
||||
windows) chafa -f sixel -s "$dim" "$file" ;;
|
||||
*) chafa -s "$dim" "$file" ;;
|
||||
esac
|
||||
echo
|
||||
|
||||
elif command -v imgcat >/dev/null; then
|
||||
imgcat -W "${dim%%x*}" -H "${dim##*x}" "$file"
|
||||
|
||||
else
|
||||
echo please install a terminal image viewer
|
||||
echo either icat for kitty terminal and wezterm or imgcat or chafa
|
||||
fi
|
||||
}
|
||||
|
||||
|
||||
# --- Helper function for printing a key-value pair, aligning the value to the right ---
|
||||
print_kv() {
|
||||
local key="$1"
|
||||
local value="$2"
|
||||
local key_len=${#key}
|
||||
local value_len=${#value}
|
||||
local multiplier="${3:-1}"
|
||||
|
||||
# Correctly calculate padding by accounting for the key, the ": ", and the value.
|
||||
local padding_len=$((WIDTH - key_len - 2 - value_len * multiplier))
|
||||
|
||||
# If the text is too long to fit, just add a single space for separation.
|
||||
if [ "$padding_len" -lt 1 ]; then
|
||||
padding_len=1
|
||||
value=$(echo "$value"| fold -s -w "$((WIDTH - key_len - 3))")
|
||||
printf "{C_KEY}%s:{RESET}%*s%s\\n" "$key" "$padding_len" "" " $value"
|
||||
else
|
||||
printf "{C_KEY}%s:{RESET}%*s%s\\n" "$key" "$padding_len" "" " $value"
|
||||
fi
|
||||
}
|
||||
|
||||
# --- Draw a rule across the screen ---
|
||||
# TODO: figure out why this method does not work in fzf
|
||||
draw_rule() {
|
||||
local rule
|
||||
# Generate the line of '─' characters, removing the trailing newline `tr` adds.
|
||||
rule=$(printf '%*s' "$WIDTH" | tr ' ' '─' | tr -d '\n')
|
||||
# Print the rule with colors and a single, clean newline.
|
||||
printf "{C_RULE}%s{RESET}\\n" "$rule"
|
||||
}
|
||||
|
||||
|
||||
draw_rule(){
|
||||
ll=2
|
||||
while [ $ll -le $FZF_PREVIEW_COLUMNS ];do
|
||||
echo -n -e "{C_RULE}─{RESET}"
|
||||
((ll++))
|
||||
done
|
||||
echo
|
||||
}
|
||||
|
||||
# Generate the same cache key that the Python worker uses
|
||||
# {PREFIX} is used only on episode previews to make sure they are unique
|
||||
title={}
|
||||
hash=$(generate_sha256 "{PREFIX}$title")
|
||||
|
||||
#
|
||||
# --- Display image if configured and the cached file exists ---
|
||||
#
|
||||
if [ "{PREVIEW_MODE}" = "full" ] || [ "{PREVIEW_MODE}" = "image" ]; then
|
||||
image_file="{IMAGE_CACHE_PATH}{PATH_SEP}$hash.png"
|
||||
if [ -f "$image_file" ]; then
|
||||
fzf_preview "$image_file"
|
||||
else
|
||||
echo "🖼️ Loading image..."
|
||||
fi
|
||||
echo # Add a newline for spacing
|
||||
fi
|
||||
# Display text info if configured and the cached file exists
|
||||
if [ "{PREVIEW_MODE}" = "full" ] || [ "{PREVIEW_MODE}" = "text" ]; then
|
||||
info_file="{INFO_CACHE_PATH}{PATH_SEP}$hash"
|
||||
if [ -f "$info_file" ]; then
|
||||
source "$info_file"
|
||||
else
|
||||
echo "📝 Loading details..."
|
||||
fi
|
||||
fi
|
||||
@@ -1,19 +0,0 @@
|
||||
#!/bin/sh
|
||||
#
|
||||
# Viu Review Info Script Template
|
||||
# This script formats and displays review details in the FZF preview pane.
|
||||
# Python injects the actual data values into the placeholders.
|
||||
|
||||
draw_rule
|
||||
|
||||
print_kv "Review By" "{REVIEWER_NAME}"
|
||||
|
||||
draw_rule
|
||||
|
||||
print_kv "Summary" "{REVIEW_SUMMARY}"
|
||||
|
||||
draw_rule
|
||||
|
||||
echo "{REVIEW_BODY}" | fold -s -w "$WIDTH"
|
||||
|
||||
draw_rule
|
||||
@@ -1,75 +0,0 @@
|
||||
#!/bin/sh
|
||||
#
|
||||
# FZF Preview Script Template
|
||||
#
|
||||
# This script is a template. The placeholders in curly braces, like {NAME}
|
||||
# are dynamically filled by python using .replace()
|
||||
|
||||
WIDTH=${FZF_PREVIEW_COLUMNS:-80} # Set a fallback width of 80
|
||||
IMAGE_RENDERER="{IMAGE_RENDERER}"
|
||||
|
||||
generate_sha256() {
|
||||
local input
|
||||
|
||||
# Check if input is passed as an argument or piped
|
||||
if [ -n "$1" ]; then
|
||||
input="$1"
|
||||
else
|
||||
input=$(cat)
|
||||
fi
|
||||
|
||||
if command -v sha256sum &>/dev/null; then
|
||||
echo -n "$input" | sha256sum | awk '{print $1}'
|
||||
elif command -v shasum &>/dev/null; then
|
||||
echo -n "$input" | shasum -a 256 | awk '{print $1}'
|
||||
elif command -v sha256 &>/dev/null; then
|
||||
echo -n "$input" | sha256 | awk '{print $1}'
|
||||
elif command -v openssl &>/dev/null; then
|
||||
echo -n "$input" | openssl dgst -sha256 | awk '{print $2}'
|
||||
else
|
||||
echo -n "$input" | base64 | tr '/+' '_-' | tr -d '\n'
|
||||
fi
|
||||
}
|
||||
|
||||
|
||||
print_kv() {
|
||||
local key="$1"
|
||||
local value="$2"
|
||||
local key_len=${#key}
|
||||
local value_len=${#value}
|
||||
local multiplier="${3:-1}"
|
||||
|
||||
# Correctly calculate padding by accounting for the key, the ": ", and the value.
|
||||
local padding_len=$((WIDTH - key_len - 2 - value_len * multiplier))
|
||||
|
||||
# If the text is too long to fit, just add a single space for separation.
|
||||
if [ "$padding_len" -lt 1 ]; then
|
||||
padding_len=1
|
||||
value=$(echo $value| fold -s -w "$((WIDTH - key_len - 3))")
|
||||
printf "{C_KEY}%s:{RESET}%*s%s\\n" "$key" "$padding_len" "" " $value"
|
||||
else
|
||||
printf "{C_KEY}%s:{RESET}%*s%s\\n" "$key" "$padding_len" "" " $value"
|
||||
fi
|
||||
}
|
||||
|
||||
|
||||
draw_rule(){
|
||||
ll=2
|
||||
while [ $ll -le $FZF_PREVIEW_COLUMNS ];do
|
||||
echo -n -e "{C_RULE}─{RESET}"
|
||||
((ll++))
|
||||
done
|
||||
echo
|
||||
}
|
||||
|
||||
title={}
|
||||
hash=$(generate_sha256 "$title")
|
||||
|
||||
if [ "{PREVIEW_MODE}" = "full" ] || [ "{PREVIEW_MODE}" = "text" ]; then
|
||||
info_file="{INFO_CACHE_DIR}{PATH_SEP}$hash"
|
||||
if [ -f "$info_file" ]; then
|
||||
source "$info_file"
|
||||
else
|
||||
echo "📝 Loading details..."
|
||||
fi
|
||||
fi
|
||||
@@ -1,118 +0,0 @@
|
||||
#!/bin/bash
|
||||
#
|
||||
# FZF Dynamic Search Script Template
|
||||
#
|
||||
# This script is a template for dynamic search functionality in fzf.
|
||||
# The placeholders in curly braces, like {QUERY} are dynamically filled by Python using .replace()
|
||||
|
||||
# Configuration variables (injected by Python)
|
||||
GRAPHQL_ENDPOINT="{GRAPHQL_ENDPOINT}"
|
||||
CACHE_DIR="{CACHE_DIR}"
|
||||
SEARCH_RESULTS_FILE="{SEARCH_RESULTS_FILE}"
|
||||
AUTH_HEADER="{AUTH_HEADER}"
|
||||
|
||||
# Get the current query from fzf
|
||||
QUERY="{{q}}"
|
||||
|
||||
# If query is empty, exit with empty results
|
||||
if [ -z "$QUERY" ]; then
|
||||
echo ""
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Create GraphQL variables
|
||||
VARIABLES=$(cat <<EOF
|
||||
{
|
||||
"query": "$QUERY",
|
||||
"type": "ANIME",
|
||||
"per_page": 50,
|
||||
"genre_not_in": ["Hentai"]
|
||||
}
|
||||
EOF
|
||||
)
|
||||
|
||||
# The GraphQL query is injected here as a properly escaped string
|
||||
GRAPHQL_QUERY='{GRAPHQL_QUERY}'
|
||||
|
||||
# Create the GraphQL request payload
|
||||
PAYLOAD=$(cat <<EOF
|
||||
{
|
||||
"query": $GRAPHQL_QUERY,
|
||||
"variables": $VARIABLES
|
||||
}
|
||||
EOF
|
||||
)
|
||||
|
||||
# Make the GraphQL request and save raw results
|
||||
if [ -n "$AUTH_HEADER" ]; then
|
||||
RESPONSE=$(curl -s -X POST \
|
||||
-H "Content-Type: application/json" \
|
||||
-H "Authorization: $AUTH_HEADER" \
|
||||
-d "$PAYLOAD" \
|
||||
"$GRAPHQL_ENDPOINT")
|
||||
else
|
||||
RESPONSE=$(curl -s -X POST \
|
||||
-H "Content-Type: application/json" \
|
||||
-d "$PAYLOAD" \
|
||||
"$GRAPHQL_ENDPOINT")
|
||||
fi
|
||||
|
||||
# Check if the request was successful
|
||||
if [ $? -ne 0 ] || [ -z "$RESPONSE" ]; then
|
||||
echo "❌ Search failed"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Save the raw response for later processing
|
||||
echo "$RESPONSE" > "$SEARCH_RESULTS_FILE"
|
||||
|
||||
# Parse and display results
|
||||
if command -v jq >/dev/null 2>&1; then
|
||||
# Use jq for faster and more reliable JSON parsing
|
||||
echo "$RESPONSE" | jq -r '
|
||||
if .errors then
|
||||
"❌ Search error: " + (.errors | tostring)
|
||||
elif (.data.Page.media // []) | length == 0 then
|
||||
"❌ No results found"
|
||||
else
|
||||
.data.Page.media[] | (.title.english // .title.romaji // .title.native // "Unknown")
|
||||
end
|
||||
' 2>/dev/null || echo "❌ Parse error"
|
||||
else
|
||||
# Fallback to Python for JSON parsing
|
||||
echo "$RESPONSE" | python3 -c "
|
||||
import json
|
||||
import sys
|
||||
|
||||
try:
|
||||
data = json.load(sys.stdin)
|
||||
|
||||
if 'errors' in data:
|
||||
print('❌ Search error: ' + str(data['errors']))
|
||||
sys.exit(1)
|
||||
|
||||
if 'data' not in data or 'Page' not in data['data'] or 'media' not in data['data']['Page']:
|
||||
print('❌ No results found')
|
||||
sys.exit(0)
|
||||
|
||||
media_list = data['data']['Page']['media']
|
||||
|
||||
if not media_list:
|
||||
print('❌ No results found')
|
||||
sys.exit(0)
|
||||
|
||||
for media in media_list:
|
||||
title = media.get('title', {})
|
||||
english_title = title.get('english') or title.get('romaji') or title.get('native', 'Unknown')
|
||||
year = media.get('startDate', {}).get('year', 'Unknown') if media.get('startDate') else 'Unknown'
|
||||
status = media.get('status', 'Unknown')
|
||||
genres = ', '.join(media.get('genres', [])[:3]) or 'Unknown'
|
||||
|
||||
# Format: Title (Year) [Status] - Genres
|
||||
print(f'{english_title} ({year}) [{status}] - {genres}')
|
||||
|
||||
except Exception as e:
|
||||
print(f'❌ Parse error: {str(e)}')
|
||||
sys.exit(1)
|
||||
"
|
||||
fi
|
||||
@@ -1,114 +0,0 @@
|
||||
import logging
|
||||
import sys
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
import click
|
||||
from click.core import ParameterSource
|
||||
|
||||
from ..core.config import AppConfig
|
||||
from ..core.constants import PROJECT_NAME, USER_CONFIG, __version__
|
||||
from .config import ConfigLoader
|
||||
from .options import options_from_model
|
||||
from .utils.exception import setup_exceptions_handler
|
||||
from .utils.lazyloader import LazyGroup
|
||||
from .utils.logging import setup_logging
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from typing import TypedDict
|
||||
|
||||
from typing_extensions import Unpack
|
||||
|
||||
class Options(TypedDict):
|
||||
no_config: bool | None
|
||||
trace: bool | None
|
||||
dev: bool | None
|
||||
log: bool | None
|
||||
rich_traceback: bool | None
|
||||
rich_traceback_theme: str
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
commands = {
|
||||
"config": "config.config",
|
||||
"search": "search.search",
|
||||
"anilist": "anilist.anilist",
|
||||
"download": "download.download",
|
||||
"update": "update.update",
|
||||
"registry": "registry.registry",
|
||||
"worker": "worker.worker",
|
||||
"queue": "queue.queue",
|
||||
"completions": "completions.completions",
|
||||
}
|
||||
|
||||
|
||||
@click.group(
|
||||
cls=LazyGroup,
|
||||
root="viu_cli.cli.commands",
|
||||
invoke_without_command=True,
|
||||
lazy_subcommands=commands,
|
||||
context_settings=dict(auto_envvar_prefix=PROJECT_NAME),
|
||||
)
|
||||
@click.version_option(__version__, "--version")
|
||||
@click.option("--no-config", is_flag=True, help="Don't load the user config file.")
|
||||
@click.option(
|
||||
"--trace", is_flag=True, help="Controls Whether to display tracebacks or not"
|
||||
)
|
||||
@click.option("--dev", is_flag=True, help="Controls Whether the app is in dev mode")
|
||||
@click.option("--log", is_flag=True, help="Controls Whether to log")
|
||||
@click.option(
|
||||
"--rich-traceback",
|
||||
is_flag=True,
|
||||
help="Controls Whether to display a rich traceback",
|
||||
)
|
||||
@click.option(
|
||||
"--rich-traceback-theme",
|
||||
default="github-dark",
|
||||
help="Controls Whether to display a rich traceback",
|
||||
)
|
||||
@options_from_model(AppConfig)
|
||||
@click.pass_context
|
||||
def cli(ctx: click.Context, **options: "Unpack[Options]"):
|
||||
"""
|
||||
The main entry point for the Viu CLI.
|
||||
"""
|
||||
setup_logging(options["log"])
|
||||
setup_exceptions_handler(
|
||||
options["trace"],
|
||||
options["dev"],
|
||||
options["rich_traceback"],
|
||||
options["rich_traceback_theme"],
|
||||
)
|
||||
|
||||
logger.info(f"Current Command: {' '.join(sys.argv)}")
|
||||
cli_overrides = {}
|
||||
param_lookup = {p.name: p for p in ctx.command.params}
|
||||
|
||||
for param_name, param_value in ctx.params.items():
|
||||
source = ctx.get_parameter_source(param_name)
|
||||
if source in (ParameterSource.ENVIRONMENT, ParameterSource.COMMANDLINE):
|
||||
parameter = param_lookup.get(param_name)
|
||||
|
||||
if (
|
||||
parameter
|
||||
and hasattr(parameter, "model_name")
|
||||
and hasattr(parameter, "field_name")
|
||||
):
|
||||
model_name = getattr(parameter, "model_name")
|
||||
field_name = getattr(parameter, "field_name")
|
||||
|
||||
if model_name not in cli_overrides:
|
||||
cli_overrides[model_name] = {}
|
||||
cli_overrides[model_name][field_name] = param_value
|
||||
|
||||
loader = ConfigLoader(config_path=USER_CONFIG)
|
||||
config = (
|
||||
AppConfig.model_validate(cli_overrides)
|
||||
if options["no_config"]
|
||||
else loader.load(cli_overrides)
|
||||
)
|
||||
ctx.obj = config
|
||||
if ctx.invoked_subcommand is None:
|
||||
from .commands.anilist import cmd
|
||||
|
||||
ctx.invoke(cmd.anilist)
|
||||
@@ -1,160 +0,0 @@
|
||||
"""Update command for Viu CLI."""
|
||||
|
||||
import sys
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
import click
|
||||
from rich import print
|
||||
from rich.console import Console
|
||||
from rich.markdown import Markdown
|
||||
|
||||
from ..utils.update import check_for_updates, update_app
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ...core.config import AppConfig
|
||||
|
||||
|
||||
@click.command(
|
||||
help="Update Viu to the latest version",
|
||||
short_help="Update Viu",
|
||||
epilog="""
|
||||
\b
|
||||
\b\bExamples:
|
||||
# Check for updates and update if available
|
||||
viu update
|
||||
\b
|
||||
# Force update even if already up to date
|
||||
viu update --force
|
||||
\b
|
||||
# Only check for updates without updating
|
||||
viu update --check-only
|
||||
\b
|
||||
# Show release notes for the latest version
|
||||
viu update --release-notes
|
||||
""",
|
||||
)
|
||||
@click.option(
|
||||
"--force",
|
||||
"-f",
|
||||
is_flag=True,
|
||||
help="Force update even if already up to date",
|
||||
)
|
||||
@click.option(
|
||||
"--check-only",
|
||||
"-c",
|
||||
is_flag=True,
|
||||
help="Only check for updates without updating",
|
||||
)
|
||||
@click.option(
|
||||
"--release-notes",
|
||||
"-r",
|
||||
is_flag=True,
|
||||
help="Show release notes for the latest version",
|
||||
)
|
||||
@click.pass_context
|
||||
@click.pass_obj
|
||||
def update(
|
||||
config: "AppConfig",
|
||||
ctx: click.Context,
|
||||
force: bool,
|
||||
check_only: bool,
|
||||
release_notes: bool,
|
||||
) -> None:
|
||||
"""
|
||||
Update Viu to the latest version.
|
||||
|
||||
This command checks for available updates and optionally updates
|
||||
the application to the latest version from the configured sources
|
||||
(pip, uv, pipx, git, or nix depending on installation method).
|
||||
|
||||
Args:
|
||||
config: The application configuration object
|
||||
ctx: The click context containing CLI options
|
||||
force: Whether to force update even if already up to date
|
||||
check_only: Whether to only check for updates without updating
|
||||
release_notes: Whether to show release notes for the latest version
|
||||
"""
|
||||
try:
|
||||
if release_notes:
|
||||
print("[cyan]Fetching latest release notes...[/]")
|
||||
is_latest, release_json = check_for_updates()
|
||||
|
||||
if not release_json:
|
||||
print(
|
||||
"[yellow]Could not fetch release information. Please check your internet connection.[/]"
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
version = release_json.get("tag_name", "unknown")
|
||||
release_name = release_json.get("name", version)
|
||||
release_body = release_json.get("body", "No release notes available.")
|
||||
published_at = release_json.get("published_at", "unknown")
|
||||
|
||||
console = Console()
|
||||
|
||||
print(f"[bold cyan]Release: {release_name}[/]")
|
||||
print(f"[dim]Version: {version}[/]")
|
||||
print(f"[dim]Published: {published_at}[/]")
|
||||
print()
|
||||
|
||||
# Display release notes as markdown if available
|
||||
if release_body.strip():
|
||||
markdown = Markdown(release_body)
|
||||
console.print(markdown)
|
||||
else:
|
||||
print("[dim]No release notes available for this version.[/]")
|
||||
|
||||
return
|
||||
|
||||
elif check_only:
|
||||
print("[cyan]Checking for updates...[/]")
|
||||
is_latest, release_json = check_for_updates()
|
||||
|
||||
if not release_json:
|
||||
print(
|
||||
"[yellow]Could not check for updates. Please check your internet connection.[/]"
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
if is_latest:
|
||||
print("[green]Viu is up to date![/]")
|
||||
print(
|
||||
f"[dim]Current version: {release_json.get('tag_name', 'unknown')}[/]"
|
||||
)
|
||||
else:
|
||||
latest_version = release_json.get("tag_name", "unknown")
|
||||
print(f"[yellow]Update available: {latest_version}[/]")
|
||||
print("[dim]Run 'viu update' to update[/]")
|
||||
sys.exit(1)
|
||||
else:
|
||||
print("[cyan]Checking for updates and updating if necessary...[/]")
|
||||
success, release_json = update_app(force=force)
|
||||
|
||||
if not release_json:
|
||||
print(
|
||||
"[red]Could not check for updates. Please check your internet connection.[/]"
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
if success:
|
||||
latest_version = release_json.get("tag_name", "unknown")
|
||||
print(f"[green]Successfully updated to version {latest_version}![/]")
|
||||
else:
|
||||
if force:
|
||||
print(
|
||||
"[red]Update failed. Please check the error messages above.[/]"
|
||||
)
|
||||
sys.exit(1)
|
||||
# If not forced and update failed, it might be because already up to date
|
||||
# The update_app function already prints appropriate messages
|
||||
|
||||
except KeyboardInterrupt:
|
||||
print("\n[yellow]Update cancelled by user.[/]")
|
||||
sys.exit(1)
|
||||
except Exception as e:
|
||||
print(f"[red]An error occurred during update: {e}[/]")
|
||||
# Get trace option from parent context
|
||||
trace = ctx.parent.params.get("trace", False) if ctx.parent else False
|
||||
if trace:
|
||||
raise
|
||||
sys.exit(1)
|
||||
@@ -1,4 +0,0 @@
|
||||
from .generate import generate_config_ini_from_app_model
|
||||
from .loader import ConfigLoader
|
||||
|
||||
__all__ = ["ConfigLoader", "generate_config_ini_from_app_model"]
|
||||
@@ -1,85 +0,0 @@
|
||||
from enum import Enum
|
||||
from typing import Dict, Optional, Union
|
||||
|
||||
from pydantic import BaseModel, ConfigDict, Field
|
||||
|
||||
from ...libs.media_api.params import MediaSearchParams, UserMediaListSearchParams
|
||||
from ...libs.media_api.types import MediaItem, PageInfo
|
||||
from ...libs.provider.anime.types import Anime, SearchResults, Server
|
||||
|
||||
|
||||
# TODO: is internal directive a good name
|
||||
class InternalDirective(Enum):
|
||||
MAIN = "MAIN"
|
||||
|
||||
BACK = "BACK"
|
||||
|
||||
BACKX2 = "BACKX2"
|
||||
|
||||
BACKX3 = "BACKX3"
|
||||
|
||||
BACKX4 = "BACKX4"
|
||||
|
||||
EXIT = "EXIT"
|
||||
|
||||
CONFIG_EDIT = "CONFIG_EDIT"
|
||||
|
||||
RELOAD = "RELOAD"
|
||||
|
||||
|
||||
class MenuName(Enum):
|
||||
MAIN = "MAIN"
|
||||
AUTH = "AUTH"
|
||||
EPISODES = "EPISODES"
|
||||
RESULTS = "RESULTS"
|
||||
SERVERS = "SERVERS"
|
||||
WATCH_HISTORY = "WATCH_HISTORY"
|
||||
PROVIDER_SEARCH = "PROVIDER_SEARCH"
|
||||
PLAYER_CONTROLS = "PLAYER_CONTROLS"
|
||||
USER_MEDIA_LIST = "USER_MEDIA_LIST"
|
||||
SESSION_MANAGEMENT = "SESSION_MANAGEMENT"
|
||||
MEDIA_ACTIONS = "MEDIA_ACTIONS"
|
||||
DOWNLOADS = "DOWNLOADS"
|
||||
DYNAMIC_SEARCH = "DYNAMIC_SEARCH"
|
||||
MEDIA_REVIEW = "MEDIA_REVIEW"
|
||||
MEDIA_CHARACTERS = "MEDIA_CHARACTERS"
|
||||
MEDIA_AIRING_SCHEDULE = "MEDIA_AIRING_SCHEDULE"
|
||||
PLAY_DOWNLOADS = "PLAY_DOWNLOADS"
|
||||
DOWNLOADS_PLAYER_CONTROLS = "DOWNLOADS_PLAYER_CONTROLS"
|
||||
DOWNLOAD_EPISODES = "DOWNLOAD_EPISODES"
|
||||
|
||||
|
||||
class StateModel(BaseModel):
|
||||
model_config = ConfigDict(frozen=True)
|
||||
|
||||
|
||||
class MediaApiState(StateModel):
|
||||
search_result: Optional[Dict[int, MediaItem]] = None
|
||||
search_params: Optional[Union[MediaSearchParams, UserMediaListSearchParams]] = None
|
||||
page_info: Optional[PageInfo] = None
|
||||
media_id: Optional[int] = None
|
||||
|
||||
@property
|
||||
def media_item(self) -> Optional[MediaItem]:
|
||||
if self.search_result and self.media_id:
|
||||
return self.search_result[self.media_id]
|
||||
|
||||
|
||||
class ProviderState(StateModel):
|
||||
search_results: Optional[SearchResults] = None
|
||||
anime: Optional[Anime] = None
|
||||
episode: Optional[str] = None
|
||||
servers: Optional[Dict[str, Server]] = None
|
||||
server_name: Optional[str] = None
|
||||
start_time: Optional[str] = None
|
||||
|
||||
@property
|
||||
def server(self) -> Optional[Server]:
|
||||
if self.servers and self.server_name:
|
||||
return self.servers[self.server_name]
|
||||
|
||||
|
||||
class State(StateModel):
|
||||
menu_name: MenuName
|
||||
provider: ProviderState = Field(default_factory=ProviderState)
|
||||
media_api: MediaApiState = Field(default_factory=MediaApiState)
|
||||
@@ -1,22 +0,0 @@
|
||||
from httpx import get
|
||||
|
||||
ANISKIP_ENDPOINT = "https://api.aniskip.com/v1/skip-times"
|
||||
|
||||
|
||||
# TODO: Finish own implementation of aniskip script
|
||||
class AniSkip:
|
||||
@classmethod
|
||||
def get_skip_times(
|
||||
cls, mal_id: int, episode_number: float | int, types=["op", "ed"]
|
||||
):
|
||||
url = f"{ANISKIP_ENDPOINT}/{mal_id}/{episode_number}?types=op&types=ed"
|
||||
response = get(url)
|
||||
print(response.text)
|
||||
return response.json()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
mal_id = input("Mal id: ")
|
||||
episode_number = input("episode_number: ")
|
||||
skip_times = AniSkip.get_skip_times(int(mal_id), float(episode_number))
|
||||
print(skip_times)
|
||||
@@ -1,3 +0,0 @@
|
||||
from .api import connect
|
||||
|
||||
__all__ = ["connect"]
|
||||
@@ -1,13 +0,0 @@
|
||||
import time
|
||||
|
||||
from pypresence import Presence
|
||||
|
||||
|
||||
def connect(show, episode, switch):
|
||||
presence = Presence(client_id="1292070065583165512")
|
||||
presence.connect()
|
||||
if not switch.is_set():
|
||||
presence.update(details=show, state="Watching episode " + episode)
|
||||
time.sleep(10)
|
||||
else:
|
||||
presence.close()
|
||||
@@ -1,873 +0,0 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime
|
||||
from enum import Enum
|
||||
from typing import Dict, List, Optional
|
||||
|
||||
from pydantic import BaseModel, ConfigDict, Field
|
||||
|
||||
|
||||
# ENUMS
|
||||
class MediaStatus(Enum):
|
||||
FINISHED = "FINISHED"
|
||||
RELEASING = "RELEASING"
|
||||
NOT_YET_RELEASED = "NOT_YET_RELEASED"
|
||||
CANCELLED = "CANCELLED"
|
||||
HIATUS = "HIATUS"
|
||||
|
||||
|
||||
class MediaType(Enum):
|
||||
ANIME = "ANIME"
|
||||
MANGA = "MANGA"
|
||||
|
||||
|
||||
class UserMediaListStatus(Enum):
|
||||
PLANNING = "planning"
|
||||
WATCHING = "watching"
|
||||
COMPLETED = "completed"
|
||||
DROPPED = "dropped"
|
||||
PAUSED = "paused"
|
||||
REPEATING = "repeating"
|
||||
|
||||
|
||||
class MediaGenre(Enum):
|
||||
ACTION = "Action"
|
||||
ADVENTURE = "Adventure"
|
||||
COMEDY = "Comedy"
|
||||
DRAMA = "Drama"
|
||||
ECCHI = "Ecchi"
|
||||
FANTASY = "Fantasy"
|
||||
HORROR = "Horror"
|
||||
MAHOU_SHOUJO = "Mahou Shoujo"
|
||||
MECHA = "Mecha"
|
||||
MUSIC = "Music"
|
||||
MYSTERY = "Mystery"
|
||||
PSYCHOLOGICAL = "Psychological"
|
||||
ROMANCE = "Romance"
|
||||
SCI_FI = "Sci-Fi"
|
||||
SLICE_OF_LIFE = "Slice of Life"
|
||||
SPORTS = "Sports"
|
||||
SUPERNATURAL = "Supernatural"
|
||||
THRILLER = "Thriller"
|
||||
HENTAI = "Hentai"
|
||||
|
||||
|
||||
class MediaFormat(Enum):
|
||||
TV = "TV"
|
||||
TV_SHORT = "TV_SHORT"
|
||||
MOVIE = "MOVIE"
|
||||
MANGA = "MANGA"
|
||||
SPECIAL = "SPECIAL"
|
||||
OVA = "OVA"
|
||||
ONA = "ONA"
|
||||
MUSIC = "MUSIC"
|
||||
NOVEL = "NOVEL"
|
||||
ONE_SHOT = "ONE_SHOT"
|
||||
|
||||
|
||||
class NotificationType(Enum):
|
||||
AIRING = "AIRING"
|
||||
RELATED_MEDIA_ADDITION = "RELATED_MEDIA_ADDITION"
|
||||
MEDIA_DATA_CHANGE = "MEDIA_DATA_CHANGE"
|
||||
# ... add other types as needed
|
||||
|
||||
|
||||
# MODELS
|
||||
class BaseMediaApiModel(BaseModel):
|
||||
model_config = ConfigDict(frozen=True)
|
||||
|
||||
|
||||
class MediaImage(BaseMediaApiModel):
|
||||
"""A generic representation of media imagery URLs."""
|
||||
|
||||
large: str
|
||||
medium: Optional[str] = None
|
||||
extra_large: Optional[str] = None
|
||||
|
||||
|
||||
class MediaTitle(BaseMediaApiModel):
|
||||
"""A generic representation of media titles."""
|
||||
|
||||
english: str
|
||||
romaji: Optional[str] = None
|
||||
native: Optional[str] = None
|
||||
|
||||
|
||||
class MediaTrailer(BaseMediaApiModel):
|
||||
"""A generic representation of a media trailer."""
|
||||
|
||||
id: str
|
||||
site: str # e.g., "youtube"
|
||||
thumbnail_url: Optional[str] = None
|
||||
|
||||
|
||||
class AiringSchedule(BaseMediaApiModel):
|
||||
"""A generic representation of the next airing episode."""
|
||||
|
||||
episode: int
|
||||
airing_at: Optional[datetime] = None
|
||||
|
||||
|
||||
class CharacterName(BaseMediaApiModel):
|
||||
"""A generic representation of a character's name."""
|
||||
|
||||
first: Optional[str] = None
|
||||
middle: Optional[str] = None
|
||||
last: Optional[str] = None
|
||||
full: Optional[str] = None
|
||||
native: Optional[str] = None
|
||||
|
||||
|
||||
class CharacterImage(BaseMediaApiModel):
|
||||
"""A generic representation of a character's image."""
|
||||
|
||||
medium: Optional[str] = None
|
||||
large: Optional[str] = None
|
||||
|
||||
|
||||
class Character(BaseMediaApiModel):
|
||||
"""A generic representation of an anime character."""
|
||||
|
||||
id: Optional[int] = None
|
||||
name: CharacterName
|
||||
image: Optional[CharacterImage] = None
|
||||
description: Optional[str] = None
|
||||
gender: Optional[str] = None
|
||||
age: Optional[str] = None
|
||||
blood_type: Optional[str] = None
|
||||
favourites: Optional[int] = None
|
||||
date_of_birth: Optional[datetime] = None
|
||||
|
||||
|
||||
class AiringScheduleItem(BaseMediaApiModel):
|
||||
"""A generic representation of an airing schedule item."""
|
||||
|
||||
episode: int
|
||||
airing_at: Optional[datetime] = None
|
||||
time_until_airing: Optional[int] = None # In seconds
|
||||
|
||||
|
||||
class CharacterSearchResult(BaseMediaApiModel):
|
||||
"""A generic representation of character search results."""
|
||||
|
||||
characters: List[Character] = Field(default_factory=list)
|
||||
page_info: Optional[PageInfo] = None
|
||||
|
||||
|
||||
class AiringScheduleResult(BaseMediaApiModel):
|
||||
"""A generic representation of airing schedule results."""
|
||||
|
||||
schedule_items: List[AiringScheduleItem] = Field(default_factory=list)
|
||||
page_info: Optional[PageInfo] = None
|
||||
|
||||
|
||||
class Studio(BaseMediaApiModel):
|
||||
"""A generic representation of an animation studio."""
|
||||
|
||||
id: Optional[int] = None
|
||||
name: Optional[str] = None
|
||||
favourites: Optional[int] = None
|
||||
is_animation_studio: Optional[bool] = None
|
||||
|
||||
|
||||
class MediaTagItem(BaseMediaApiModel):
|
||||
"""A generic representation of a descriptive tag."""
|
||||
|
||||
name: MediaTag
|
||||
rank: Optional[int] = None # Percentage relevance from 0-100
|
||||
|
||||
|
||||
class StreamingEpisode(BaseMediaApiModel):
|
||||
"""A generic representation of a streaming episode."""
|
||||
|
||||
title: str
|
||||
thumbnail: Optional[str] = None
|
||||
|
||||
|
||||
class UserListItem(BaseMediaApiModel):
|
||||
"""Generic representation of a user's list status for a media item."""
|
||||
|
||||
id: Optional[int] = None
|
||||
status: Optional[UserMediaListStatus] = None
|
||||
progress: Optional[int] = None
|
||||
score: Optional[float] = None
|
||||
repeat: Optional[int] = None
|
||||
notes: Optional[str] = None
|
||||
start_date: Optional[datetime] = None
|
||||
completed_at: Optional[datetime] = None
|
||||
created_at: Optional[str] = None
|
||||
|
||||
|
||||
class MediaItem(BaseMediaApiModel):
|
||||
id: int
|
||||
title: MediaTitle
|
||||
id_mal: Optional[int] = None
|
||||
type: MediaType = MediaType.ANIME
|
||||
status: MediaStatus = MediaStatus.FINISHED
|
||||
format: Optional[MediaFormat] = MediaFormat.TV
|
||||
|
||||
cover_image: Optional[MediaImage] = None
|
||||
banner_image: Optional[str] = None
|
||||
trailer: Optional[MediaTrailer] = None
|
||||
|
||||
description: Optional[str] = None
|
||||
episodes: Optional[int] = None
|
||||
duration: Optional[int] = None # In minutes
|
||||
genres: List[MediaGenre] = Field(default_factory=list)
|
||||
tags: List[MediaTagItem] = Field(default_factory=list)
|
||||
studios: List[Studio] = Field(default_factory=list)
|
||||
synonymns: List[str] = Field(default_factory=list)
|
||||
|
||||
average_score: Optional[float] = None
|
||||
popularity: Optional[int] = None
|
||||
favourites: Optional[int] = None
|
||||
|
||||
start_date: Optional[datetime] = None
|
||||
end_date: Optional[datetime] = None
|
||||
|
||||
next_airing: Optional[AiringSchedule] = None
|
||||
|
||||
# streaming episodes
|
||||
streaming_episodes: Dict[str, StreamingEpisode] = Field(default_factory=dict)
|
||||
|
||||
# user related
|
||||
user_status: Optional[UserListItem] = None
|
||||
|
||||
|
||||
class Notification(BaseMediaApiModel):
|
||||
"""A generic representation of a user notification."""
|
||||
|
||||
id: int
|
||||
type: NotificationType
|
||||
episode: Optional[int] = None
|
||||
contexts: List[str] = Field(default_factory=list)
|
||||
created_at: datetime
|
||||
media: MediaItem
|
||||
|
||||
|
||||
class PageInfo(BaseMediaApiModel):
|
||||
"""Generic pagination information."""
|
||||
|
||||
total: int = 1
|
||||
current_page: int = 1
|
||||
has_next_page: bool = False
|
||||
per_page: int = 15
|
||||
|
||||
|
||||
class MediaSearchResult(BaseMediaApiModel):
|
||||
"""A generic representation of a page of media search results."""
|
||||
|
||||
page_info: PageInfo
|
||||
media: List[MediaItem] = Field(default_factory=list)
|
||||
|
||||
|
||||
class UserProfile(BaseMediaApiModel):
|
||||
"""A generic representation of a user's profile."""
|
||||
|
||||
id: int
|
||||
name: str
|
||||
avatar_url: Optional[str] = None
|
||||
banner_url: Optional[str] = None
|
||||
|
||||
|
||||
class Reviewer(BaseMediaApiModel):
|
||||
"""A generic representation of a user who wrote a review."""
|
||||
|
||||
name: str
|
||||
avatar_url: Optional[str] = None
|
||||
|
||||
|
||||
class MediaReview(BaseMediaApiModel):
|
||||
"""A generic representation of a media review."""
|
||||
|
||||
summary: Optional[str] = None
|
||||
body: str
|
||||
user: Reviewer
|
||||
|
||||
|
||||
# ENUMS
|
||||
|
||||
|
||||
class MediaTag(Enum):
|
||||
# Cast
|
||||
POLYAMOROUS = "Polyamorous"
|
||||
|
||||
# Cast Main Cast
|
||||
ANTI_HERO = "Anti-Hero"
|
||||
ELDERLY_PROTAGONIST = "Elderly Protagonist"
|
||||
ENSEMBLE_CAST = "Ensemble Cast"
|
||||
ESTRANGED_FAMILY = "Estranged Family"
|
||||
FEMALE_PROTAGONIST = "Female Protagonist"
|
||||
MALE_PROTAGONIST = "Male Protagonist"
|
||||
PRIMARILY_ADULT_CAST = "Primarily Adult Cast"
|
||||
PRIMARILY_ANIMAL_CAST = "Primarily Animal Cast"
|
||||
PRIMARILY_CHILD_CAST = "Primarily Child Cast"
|
||||
PRIMARILY_FEMALE_CAST = "Primarily Female Cast"
|
||||
PRIMARILY_MALE_CAST = "Primarily Male Cast"
|
||||
PRIMARILY_TEEN_CAST = "Primarily Teen Cast"
|
||||
|
||||
# Cast Traits
|
||||
AGE_REGRESSION = "Age Regression"
|
||||
AGENDER = "Agender"
|
||||
ALIENS = "Aliens"
|
||||
AMNESIA = "Amnesia"
|
||||
ANGELS = "Angels"
|
||||
ANTHROPOMORPHISM = "Anthropomorphism"
|
||||
AROMANTIC = "Aromantic"
|
||||
ARRANGED_MARRIAGE = "Arranged Marriage"
|
||||
ARTIFICIAL_INTELLIGENCE = "Artificial Intelligence"
|
||||
ASEXUAL = "Asexual"
|
||||
BISEXUAL = "Bisexual"
|
||||
BUTLER = "Butler"
|
||||
CENTAUR = "Centaur"
|
||||
CHIMERA = "Chimera"
|
||||
CHUUNIBYOU = "Chuunibyou"
|
||||
CLONE = "Clone"
|
||||
COSPLAY = "Cosplay"
|
||||
COWBOYS = "Cowboys"
|
||||
CROSSDRESSING = "Crossdressing"
|
||||
CYBORG = "Cyborg"
|
||||
DELINQUENTS = "Delinquents"
|
||||
DEMONS = "Demons"
|
||||
DETECTIVE = "Detective"
|
||||
DINOSAURS = "Dinosaurs"
|
||||
DISABILITY = "Disability"
|
||||
DISSOCIATIVE_IDENTITIES = "Dissociative Identities"
|
||||
DRAGONS = "Dragons"
|
||||
DULLAHAN = "Dullahan"
|
||||
ELF = "Elf"
|
||||
FAIRY = "Fairy"
|
||||
FEMBOY = "Femboy"
|
||||
GHOST = "Ghost"
|
||||
GOBLIN = "Goblin"
|
||||
GODS = "Gods"
|
||||
GYARU = "Gyaru"
|
||||
HIKIKOMORI = "Hikikomori"
|
||||
HOMELESS = "Homeless"
|
||||
IDOL = "Idol"
|
||||
KEMONOMIMI = "Kemonomimi"
|
||||
KUUDERE = "Kuudere"
|
||||
MAIDS = "Maids"
|
||||
MERMAID = "Mermaid"
|
||||
MONSTER_BOY = "Monster Boy"
|
||||
MONSTER_GIRL = "Monster Girl"
|
||||
NEKOMIMI = "Nekomimi"
|
||||
NINJA = "Ninja"
|
||||
NUDITY = "Nudity"
|
||||
NUN = "Nun"
|
||||
OFFICE_LADY = "Office Lady"
|
||||
OIRAN = "Oiran"
|
||||
OJOU_SAMA = "Ojou-sama"
|
||||
ORPHAN = "Orphan"
|
||||
PIRATES = "Pirates"
|
||||
ROBOTS = "Robots"
|
||||
SAMURAI = "Samurai"
|
||||
SHRINE_MAIDEN = "Shrine Maiden"
|
||||
SKELETON = "Skeleton"
|
||||
SUCCUBUS = "Succubus"
|
||||
TANNED_SKIN = "Tanned Skin"
|
||||
TEACHER = "Teacher"
|
||||
TOMBOY = "Tomboy"
|
||||
TRANSGENDER = "Transgender"
|
||||
TSUNDERE = "Tsundere"
|
||||
TWINS = "Twins"
|
||||
VAMPIRE = "Vampire"
|
||||
VETERINARIAN = "Veterinarian"
|
||||
VIKINGS = "Vikings"
|
||||
VILLAINESS = "Villainess"
|
||||
VTUBER = "VTuber"
|
||||
WEREWOLF = "Werewolf"
|
||||
WITCH = "Witch"
|
||||
YANDERE = "Yandere"
|
||||
YOUKAI = "Youkai"
|
||||
ZOMBIE = "Zombie"
|
||||
|
||||
# Demographic
|
||||
JOSEI = "Josei"
|
||||
KIDS = "Kids"
|
||||
SEINEN = "Seinen"
|
||||
SHOUJO = "Shoujo"
|
||||
SHOUNEN = "Shounen"
|
||||
|
||||
# Setting
|
||||
MATRIARCHY = "Matriarchy"
|
||||
|
||||
# Setting Scene
|
||||
BAR = "Bar"
|
||||
BOARDING_SCHOOL = "Boarding School"
|
||||
CAMPING = "Camping"
|
||||
CIRCUS = "Circus"
|
||||
COASTAL = "Coastal"
|
||||
COLLEGE = "College"
|
||||
DESERT = "Desert"
|
||||
DUNGEON = "Dungeon"
|
||||
FOREIGN = "Foreign"
|
||||
INN = "Inn"
|
||||
KONBINI = "Konbini"
|
||||
NATURAL_DISASTER = "Natural Disaster"
|
||||
OFFICE = "Office"
|
||||
OUTDOOR_ACTIVITIES = "Outdoor Activities"
|
||||
PRISON = "Prison"
|
||||
RESTAURANT = "Restaurant"
|
||||
RURAL = "Rural"
|
||||
SCHOOL = "School"
|
||||
SCHOOL_CLUB = "School Club"
|
||||
SNOWSCAPE = "Snowscape"
|
||||
URBAN = "Urban"
|
||||
WILDERNESS = "Wilderness"
|
||||
WORK = "Work"
|
||||
|
||||
# Setting Time
|
||||
ACHRONOLOGICAL_ORDER = "Achronological Order"
|
||||
ANACHRONISM = "Anachronism"
|
||||
ANCIENT_CHINA = "Ancient China"
|
||||
DYSTOPIAN = "Dystopian"
|
||||
HISTORICAL = "Historical"
|
||||
MEDIEVAL = "Medieval"
|
||||
TIME_SKIP = "Time Skip"
|
||||
|
||||
# Setting Universe
|
||||
AFTERLIFE = "Afterlife"
|
||||
ALTERNATE_UNIVERSE = "Alternate Universe"
|
||||
AUGMENTED_REALITY = "Augmented Reality"
|
||||
OMEGAVERSE = "Omegaverse"
|
||||
POST_APOCALYPTIC = "Post-Apocalyptic"
|
||||
SPACE = "Space"
|
||||
URBAN_FANTASY = "Urban Fantasy"
|
||||
VIRTUAL_WORLD = "Virtual World"
|
||||
|
||||
# Sexual Content
|
||||
AHEGAO = "Ahegao"
|
||||
AMPUTATION = "Amputation"
|
||||
ANAL_SEX = "Anal Sex"
|
||||
ARMPITS = "Armpits"
|
||||
ASHIKOKI = "Ashikoki"
|
||||
ASPHYXIATION = "Asphyxiation"
|
||||
BONDAGE = "Bondage"
|
||||
BOOBJOB = "Boobjob"
|
||||
CERVIX_PENETRATION = "Cervix Penetration"
|
||||
CHEATING = "Cheating"
|
||||
CUMFLATION = "Cumflation"
|
||||
CUNNILINGUS = "Cunnilingus"
|
||||
DEEPTHROAT = "Deepthroat"
|
||||
DEFLORATION = "Defloration"
|
||||
DILF = "DILF"
|
||||
DOUBLE_PENETRATION = "Double Penetration"
|
||||
EROTIC_PIERCINGS = "Erotic Piercings"
|
||||
EXHIBITIONISM = "Exhibitionism"
|
||||
FACIAL = "Facial"
|
||||
FEET = "Feet"
|
||||
FELLATIO = "Fellatio"
|
||||
FEMDOM = "Femdom"
|
||||
FISTING = "Fisting"
|
||||
FLAT_CHEST = "Flat Chest"
|
||||
FUTANARI = "Futanari"
|
||||
GROUP_SEX = "Group Sex"
|
||||
HAIR_PULLING = "Hair Pulling"
|
||||
HANDJOB = "Handjob"
|
||||
HUMAN_PET = "Human Pet"
|
||||
HYPERSEXUALITY = "Hypersexuality"
|
||||
INCEST = "Incest"
|
||||
INSEKI = "Inseki"
|
||||
IRRUMATIO = "Irrumatio"
|
||||
LACTATION = "Lactation"
|
||||
LARGE_BREASTS = "Large Breasts"
|
||||
MALE_PREGNANCY = "Male Pregnancy"
|
||||
MASOCHISM = "Masochism"
|
||||
MASTURBATION = "Masturbation"
|
||||
MATING_PRESS = "Mating Press"
|
||||
MILF = "MILF"
|
||||
NAKADASHI = "Nakadashi"
|
||||
NETORARE = "Netorare"
|
||||
NETORASE = "Netorase"
|
||||
NETORI = "Netori"
|
||||
PET_PLAY = "Pet Play"
|
||||
PROSTITUTION = "Prostitution"
|
||||
PUBLIC_SEX = "Public Sex"
|
||||
RAPE = "Rape"
|
||||
RIMJOB = "Rimjob"
|
||||
SADISM = "Sadism"
|
||||
SCAT = "Scat"
|
||||
SCISSORING = "Scissoring"
|
||||
SEX_TOYS = "Sex Toys"
|
||||
SHIMAIDON = "Shimaidon"
|
||||
SQUIRTING = "Squirting"
|
||||
SUMATA = "Sumata"
|
||||
SWEAT = "Sweat"
|
||||
TENTACLES = "Tentacles"
|
||||
THREESOME = "Threesome"
|
||||
VIRGINITY = "Virginity"
|
||||
VORE = "Vore"
|
||||
VOYEUR = "Voyeur"
|
||||
WATERSPORTS = "Watersports"
|
||||
ZOOPHILIA = "Zoophilia"
|
||||
|
||||
# Technical
|
||||
_4_KOMA = "4-koma"
|
||||
ACHROMATIC = "Achromatic"
|
||||
ADVERTISEMENT = "Advertisement"
|
||||
ANTHOLOGY = "Anthology"
|
||||
CGI = "CGI"
|
||||
EPISODIC = "Episodic"
|
||||
FLASH = "Flash"
|
||||
FULL_CGI = "Full CGI"
|
||||
FULL_COLOR = "Full Color"
|
||||
LONG_STRIP = "Long Strip"
|
||||
MIXED_MEDIA = "Mixed Media"
|
||||
NO_DIALOGUE = "No Dialogue"
|
||||
NON_FICTION = "Non-fiction"
|
||||
POV = "POV"
|
||||
PUPPETRY = "Puppetry"
|
||||
ROTOSCOPING = "Rotoscoping"
|
||||
STOP_MOTION = "Stop Motion"
|
||||
VERTICAL_VIDEO = "Vertical Video"
|
||||
|
||||
# Theme Action
|
||||
ARCHERY = "Archery"
|
||||
BATTLE_ROYALE = "Battle Royale"
|
||||
ESPIONAGE = "Espionage"
|
||||
FUGITIVE = "Fugitive"
|
||||
GUNS = "Guns"
|
||||
MARTIAL_ARTS = "Martial Arts"
|
||||
SPEARPLAY = "Spearplay"
|
||||
SWORDPLAY = "Swordplay"
|
||||
|
||||
# Theme Arts
|
||||
ACTING = "Acting"
|
||||
CALLIGRAPHY = "Calligraphy"
|
||||
CLASSIC_LITERATURE = "Classic Literature"
|
||||
DRAWING = "Drawing"
|
||||
FASHION = "Fashion"
|
||||
FOOD = "Food"
|
||||
MAKEUP = "Makeup"
|
||||
PHOTOGRAPHY = "Photography"
|
||||
RAKUGO = "Rakugo"
|
||||
WRITING = "Writing"
|
||||
|
||||
# Theme Arts-Music
|
||||
BAND = "Band"
|
||||
CLASSICAL_MUSIC = "Classical Music"
|
||||
DANCING = "Dancing"
|
||||
HIP_HOP_MUSIC = "Hip-hop Music"
|
||||
JAZZ_MUSIC = "Jazz Music"
|
||||
METAL_MUSIC = "Metal Music"
|
||||
MUSICAL_THEATER = "Musical Theater"
|
||||
ROCK_MUSIC = "Rock Music"
|
||||
|
||||
# Theme Comedy
|
||||
PARODY = "Parody"
|
||||
SATIRE = "Satire"
|
||||
SLAPSTICK = "Slapstick"
|
||||
SURREAL_COMEDY = "Surreal Comedy"
|
||||
|
||||
# Theme Drama
|
||||
BULLYING = "Bullying"
|
||||
CLASS_STRUGGLE = "Class Struggle"
|
||||
COMING_OF_AGE = "Coming of Age"
|
||||
CONSPIRACY = "Conspiracy"
|
||||
ECO_HORROR = "Eco-Horror"
|
||||
FAKE_RELATIONSHIP = "Fake Relationship"
|
||||
KINGDOM_MANAGEMENT = "Kingdom Management"
|
||||
REHABILITATION = "Rehabilitation"
|
||||
REVENGE = "Revenge"
|
||||
SUICIDE = "Suicide"
|
||||
TRAGEDY = "Tragedy"
|
||||
|
||||
# Theme Fantasy
|
||||
ALCHEMY = "Alchemy"
|
||||
BODY_SWAPPING = "Body Swapping"
|
||||
CULTIVATION = "Cultivation"
|
||||
CURSES = "Curses"
|
||||
EXORCISM = "Exorcism"
|
||||
FAIRY_TALE = "Fairy Tale"
|
||||
HENSHIN = "Henshin"
|
||||
ISEKAI = "Isekai"
|
||||
KAIJU = "Kaiju"
|
||||
MAGIC = "Magic"
|
||||
MYTHOLOGY = "Mythology"
|
||||
NECROMANCY = "Necromancy"
|
||||
SHAPESHIFTING = "Shapeshifting"
|
||||
STEAMPUNK = "Steampunk"
|
||||
SUPER_POWER = "Super Power"
|
||||
SUPERHERO = "Superhero"
|
||||
WUXIA = "Wuxia"
|
||||
|
||||
# Theme Game
|
||||
BOARD_GAME = "Board Game"
|
||||
E_SPORTS = "E-Sports"
|
||||
VIDEO_GAMES = "Video Games"
|
||||
|
||||
# Theme Game-Card & Board Game
|
||||
CARD_BATTLE = "Card Battle"
|
||||
GO = "Go"
|
||||
KARUTA = "Karuta"
|
||||
MAHJONG = "Mahjong"
|
||||
POKER = "Poker"
|
||||
SHOGI = "Shogi"
|
||||
|
||||
# Theme Game-Sport
|
||||
ACROBATICS = "Acrobatics"
|
||||
AIRSOFT = "Airsoft"
|
||||
AMERICAN_FOOTBALL = "American Football"
|
||||
ATHLETICS = "Athletics"
|
||||
BADMINTON = "Badminton"
|
||||
BASEBALL = "Baseball"
|
||||
BASKETBALL = "Basketball"
|
||||
BOWLING = "Bowling"
|
||||
BOXING = "Boxing"
|
||||
CHEERLEADING = "Cheerleading"
|
||||
CYCLING = "Cycling"
|
||||
FENCING = "Fencing"
|
||||
FISHING = "Fishing"
|
||||
FITNESS = "Fitness"
|
||||
FOOTBALL = "Football"
|
||||
GOLF = "Golf"
|
||||
HANDBALL = "Handball"
|
||||
ICE_SKATING = "Ice Skating"
|
||||
JUDO = "Judo"
|
||||
LACROSSE = "Lacrosse"
|
||||
PARKOUR = "Parkour"
|
||||
RUGBY = "Rugby"
|
||||
SCUBA_DIVING = "Scuba Diving"
|
||||
SKATEBOARDING = "Skateboarding"
|
||||
SUMO = "Sumo"
|
||||
SURFING = "Surfing"
|
||||
SWIMMING = "Swimming"
|
||||
TABLE_TENNIS = "Table Tennis"
|
||||
TENNIS = "Tennis"
|
||||
VOLLEYBALL = "Volleyball"
|
||||
WRESTLING = "Wrestling"
|
||||
|
||||
# Theme Other
|
||||
ADOPTION = "Adoption"
|
||||
ANIMALS = "Animals"
|
||||
ASTRONOMY = "Astronomy"
|
||||
AUTOBIOGRAPHICAL = "Autobiographical"
|
||||
BIOGRAPHICAL = "Biographical"
|
||||
BLACKMAIL = "Blackmail"
|
||||
BODY_HORROR = "Body Horror"
|
||||
BODY_IMAGE = "Body Image"
|
||||
CANNIBALISM = "Cannibalism"
|
||||
CHIBI = "Chibi"
|
||||
COSMIC_HORROR = "Cosmic Horror"
|
||||
CREATURE_TAMING = "Creature Taming"
|
||||
CRIME = "Crime"
|
||||
CROSSOVER = "Crossover"
|
||||
DEATH_GAME = "Death Game"
|
||||
DENPA = "Denpa"
|
||||
DRUGS = "Drugs"
|
||||
ECONOMICS = "Economics"
|
||||
EDUCATIONAL = "Educational"
|
||||
ENVIRONMENTAL = "Environmental"
|
||||
ERO_GURO = "Ero Guro"
|
||||
FILMMAKING = "Filmmaking"
|
||||
FOUND_FAMILY = "Found Family"
|
||||
GAMBLING = "Gambling"
|
||||
GENDER_BENDING = "Gender Bending"
|
||||
GORE = "Gore"
|
||||
INDIGENOUS_CULTURES = "Indigenous Cultures"
|
||||
LANGUAGE_BARRIER = "Language Barrier"
|
||||
LGBTQ_PLUS_THEMES = "LGBTQ+ Themes"
|
||||
LOST_CIVILIZATION = "Lost Civilization"
|
||||
MARRIAGE = "Marriage"
|
||||
MEDICINE = "Medicine"
|
||||
MEMORY_MANIPULATION = "Memory Manipulation"
|
||||
META = "Meta"
|
||||
MOUNTAINEERING = "Mountaineering"
|
||||
NOIR = "Noir"
|
||||
OTAKU_CULTURE = "Otaku Culture"
|
||||
PANDEMIC = "Pandemic"
|
||||
PHILOSOPHY = "Philosophy"
|
||||
POLITICS = "Politics"
|
||||
PREGNANCY = "Pregnancy"
|
||||
PROXY_BATTLE = "Proxy Battle"
|
||||
PSYCHOSEXUAL = "Psychosexual"
|
||||
REINCARNATION = "Reincarnation"
|
||||
RELIGION = "Religion"
|
||||
RESCUE = "Rescue"
|
||||
ROYAL_AFFAIRS = "Royal Affairs"
|
||||
SLAVERY = "Slavery"
|
||||
SOFTWARE_DEVELOPMENT = "Software Development"
|
||||
SURVIVAL = "Survival"
|
||||
TERRORISM = "Terrorism"
|
||||
TORTURE = "Torture"
|
||||
TRAVEL = "Travel"
|
||||
VOCAL_SYNTH = "Vocal Synth"
|
||||
WAR = "War"
|
||||
|
||||
# Theme Other-Organisations
|
||||
ASSASSINS = "Assassins"
|
||||
CRIMINAL_ORGANIZATION = "Criminal Organization"
|
||||
CULT = "Cult"
|
||||
FIREFIGHTERS = "Firefighters"
|
||||
GANGS = "Gangs"
|
||||
MAFIA = "Mafia"
|
||||
MILITARY = "Military"
|
||||
POLICE = "Police"
|
||||
TRIADS = "Triads"
|
||||
YAKUZA = "Yakuza"
|
||||
|
||||
# Theme Other-Vehicle
|
||||
AVIATION = "Aviation"
|
||||
CARS = "Cars"
|
||||
MOPEDS = "Mopeds"
|
||||
MOTORCYCLES = "Motorcycles"
|
||||
SHIPS = "Ships"
|
||||
TANKS = "Tanks"
|
||||
TRAINS = "Trains"
|
||||
|
||||
# Theme Romance
|
||||
AGE_GAP = "Age Gap"
|
||||
BOYS_LOVE = "Boys' Love"
|
||||
COHABITATION = "Cohabitation"
|
||||
FEMALE_HAREM = "Female Harem"
|
||||
HETEROSEXUAL = "Heterosexual"
|
||||
LOVE_TRIANGLE = "Love Triangle"
|
||||
MALE_HAREM = "Male Harem"
|
||||
MATCHMAKING = "Matchmaking"
|
||||
MIXED_GENDER_HAREM = "Mixed Gender Harem"
|
||||
TEENS_LOVE = "Teens' Love"
|
||||
UNREQUITED_LOVE = "Unrequited Love"
|
||||
YURI = "Yuri"
|
||||
|
||||
# Theme Sci-Fi
|
||||
CYBERPUNK = "Cyberpunk"
|
||||
SPACE_OPERA = "Space Opera"
|
||||
TIME_LOOP = "Time Loop"
|
||||
TIME_MANIPULATION = "Time Manipulation"
|
||||
TOKUSATSU = "Tokusatsu"
|
||||
|
||||
# Theme Sci-Fi-Mecha
|
||||
REAL_ROBOT = "Real Robot"
|
||||
SUPER_ROBOT = "Super Robot"
|
||||
|
||||
# Theme Slice of Life
|
||||
AGRICULTURE = "Agriculture"
|
||||
CUTE_BOYS_DOING_CUTE_THINGS = "Cute Boys Doing Cute Things"
|
||||
CUTE_GIRLS_DOING_CUTE_THINGS = "Cute Girls Doing Cute Things"
|
||||
FAMILY_LIFE = "Family Life"
|
||||
HORTICULTURE = "Horticulture"
|
||||
IYASHIKEI = "Iyashikei"
|
||||
PARENTHOOD = "Parenthood"
|
||||
|
||||
|
||||
class MediaSort(Enum):
|
||||
ID = "ID"
|
||||
ID_DESC = "ID_DESC"
|
||||
TITLE_ROMAJI = "TITLE_ROMAJI"
|
||||
TITLE_ROMAJI_DESC = "TITLE_ROMAJI_DESC"
|
||||
TITLE_ENGLISH = "TITLE_ENGLISH"
|
||||
TITLE_ENGLISH_DESC = "TITLE_ENGLISH_DESC"
|
||||
TITLE_NATIVE = "TITLE_NATIVE"
|
||||
TITLE_NATIVE_DESC = "TITLE_NATIVE_DESC"
|
||||
TYPE = "TYPE"
|
||||
TYPE_DESC = "TYPE_DESC"
|
||||
FORMAT = "FORMAT"
|
||||
FORMAT_DESC = "FORMAT_DESC"
|
||||
START_DATE = "START_DATE"
|
||||
START_DATE_DESC = "START_DATE_DESC"
|
||||
END_DATE = "END_DATE"
|
||||
END_DATE_DESC = "END_DATE_DESC"
|
||||
SCORE = "SCORE"
|
||||
SCORE_DESC = "SCORE_DESC"
|
||||
POPULARITY = "POPULARITY"
|
||||
POPULARITY_DESC = "POPULARITY_DESC"
|
||||
TRENDING = "TRENDING"
|
||||
TRENDING_DESC = "TRENDING_DESC"
|
||||
EPISODES = "EPISODES"
|
||||
EPISODES_DESC = "EPISODES_DESC"
|
||||
DURATION = "DURATION"
|
||||
DURATION_DESC = "DURATION_DESC"
|
||||
STATUS = "STATUS"
|
||||
STATUS_DESC = "STATUS_DESC"
|
||||
CHAPTERS = "CHAPTERS"
|
||||
CHAPTERS_DESC = "CHAPTERS_DESC"
|
||||
VOLUMES = "VOLUMES"
|
||||
VOLUMES_DESC = "VOLUMES_DESC"
|
||||
UPDATED_AT = "UPDATED_AT"
|
||||
UPDATED_AT_DESC = "UPDATED_AT_DESC"
|
||||
SEARCH_MATCH = "SEARCH_MATCH"
|
||||
FAVOURITES = "FAVOURITES"
|
||||
FAVOURITES_DESC = "FAVOURITES_DESC"
|
||||
|
||||
|
||||
class UserMediaListSort(Enum):
|
||||
MEDIA_ID = "MEDIA_ID"
|
||||
MEDIA_ID_DESC = "MEDIA_ID_DESC"
|
||||
SCORE = "SCORE"
|
||||
SCORE_DESC = "SCORE_DESC"
|
||||
STATUS = "STATUS"
|
||||
STATUS_DESC = "STATUS_DESC"
|
||||
PROGRESS = "PROGRESS"
|
||||
PROGRESS_DESC = "PROGRESS_DESC"
|
||||
PROGRESS_VOLUMES = "PROGRESS_VOLUMES"
|
||||
PROGRESS_VOLUMES_DESC = "PROGRESS_VOLUMES_DESC"
|
||||
REPEAT = "REPEAT"
|
||||
REPEAT_DESC = "REPEAT_DESC"
|
||||
PRIORITY = "PRIORITY"
|
||||
PRIORITY_DESC = "PRIORITY_DESC"
|
||||
STARTED_ON = "STARTED_ON"
|
||||
STARTED_ON_DESC = "STARTED_ON_DESC"
|
||||
FINISHED_ON = "FINISHED_ON"
|
||||
FINISHED_ON_DESC = "FINISHED_ON_DESC"
|
||||
ADDED_TIME = "ADDED_TIME"
|
||||
ADDED_TIME_DESC = "ADDED_TIME_DESC"
|
||||
UPDATED_TIME = "UPDATED_TIME"
|
||||
UPDATED_TIME_DESC = "UPDATED_TIME_DESC"
|
||||
MEDIA_TITLE_ROMAJI = "MEDIA_TITLE_ROMAJI"
|
||||
MEDIA_TITLE_ROMAJI_DESC = "MEDIA_TITLE_ROMAJI_DESC"
|
||||
MEDIA_TITLE_ENGLISH = "MEDIA_TITLE_ENGLISH"
|
||||
MEDIA_TITLE_ENGLISH_DESC = "MEDIA_TITLE_ENGLISH_DESC"
|
||||
MEDIA_TITLE_NATIVE = "MEDIA_TITLE_NATIVE"
|
||||
MEDIA_TITLE_NATIVE_DESC = "MEDIA_TITLE_NATIVE_DESC"
|
||||
MEDIA_POPULARITY = "MEDIA_POPULARITY"
|
||||
MEDIA_POPULARITY_DESC = "MEDIA_POPULARITY_DESC"
|
||||
MEDIA_SCORE = "MEDIA_SCORE"
|
||||
MEDIA_SCORE_DESC = "MEDIA_SCORE_DESC"
|
||||
MEDIA_START_DATE = "MEDIA_START_DATE"
|
||||
MEDIA_START_DATE_DESC = "MEDIA_START_DATE_DESC"
|
||||
MEDIA_RATING = "MEDIA_RATING"
|
||||
MEDIA_RATING_DESC = "MEDIA_RATING_DESC"
|
||||
|
||||
|
||||
class MediaSeason(Enum):
|
||||
WINTER = "WINTER"
|
||||
SPRING = "SPRING"
|
||||
SUMMER = "SUMMER"
|
||||
FALL = "FALL"
|
||||
|
||||
|
||||
class MediaYear(Enum):
|
||||
_1900 = "1900"
|
||||
_1910 = "1910"
|
||||
_1920 = "1920"
|
||||
_1930 = "1930"
|
||||
_1940 = "1940"
|
||||
_1950 = "1950"
|
||||
_1960 = "1960"
|
||||
_1970 = "1970"
|
||||
_1980 = "1980"
|
||||
_1990 = "1990"
|
||||
_2000 = "2000"
|
||||
_2004 = "2004"
|
||||
_2005 = "2005"
|
||||
_2006 = "2006"
|
||||
_2007 = "2007"
|
||||
_2008 = "2008"
|
||||
_2009 = "2009"
|
||||
_2010 = "2010"
|
||||
_2011 = "2011"
|
||||
_2012 = "2012"
|
||||
_2013 = "2013"
|
||||
_2014 = "2014"
|
||||
_2015 = "2015"
|
||||
_2016 = "2016"
|
||||
_2017 = "2017"
|
||||
_2018 = "2018"
|
||||
_2019 = "2019"
|
||||
_2020 = "2020"
|
||||
_2021 = "2021"
|
||||
_2022 = "2022"
|
||||
_2023 = "2023"
|
||||
_2024 = "2024"
|
||||
_2025 = "2025"
|
||||
@@ -1,65 +0,0 @@
|
||||
"""
|
||||
Syncplay integration for Viu.
|
||||
|
||||
This module provides a procedural function to launch Syncplay with the given media and options.
|
||||
"""
|
||||
|
||||
import shutil
|
||||
import subprocess
|
||||
|
||||
from .tools import exit_app
|
||||
|
||||
|
||||
def SyncPlayer(url: str, anime_title=None, headers={}, subtitles=[], *args):
|
||||
"""
|
||||
Launch Syncplay for synchronized playback with friends.
|
||||
|
||||
Args:
|
||||
url: The media URL to play.
|
||||
anime_title: Optional title to display in the player.
|
||||
headers: Optional HTTP headers to pass to the player.
|
||||
subtitles: Optional list of subtitle dicts with 'url' keys.
|
||||
*args: Additional arguments (unused).
|
||||
|
||||
Returns:
|
||||
Tuple of ("0", "0") for compatibility.
|
||||
"""
|
||||
# TODO: handle m3u8 multi quality streams
|
||||
#
|
||||
# check for SyncPlay
|
||||
SYNCPLAY_EXECUTABLE = shutil.which("syncplay")
|
||||
if not SYNCPLAY_EXECUTABLE:
|
||||
print("Syncplay not found")
|
||||
exit_app(1)
|
||||
return "0", "0"
|
||||
# start SyncPlayer
|
||||
mpv_args = []
|
||||
if headers:
|
||||
mpv_headers = "--http-header-fields="
|
||||
for header_name, header_value in headers.items():
|
||||
mpv_headers += f"{header_name}:{header_value},"
|
||||
mpv_args.append(mpv_headers)
|
||||
for subtitle in subtitles:
|
||||
mpv_args.append(f"--sub-file={subtitle['url']}")
|
||||
if not anime_title:
|
||||
subprocess.run(
|
||||
[
|
||||
SYNCPLAY_EXECUTABLE,
|
||||
url,
|
||||
],
|
||||
check=False,
|
||||
)
|
||||
else:
|
||||
subprocess.run(
|
||||
[
|
||||
SYNCPLAY_EXECUTABLE,
|
||||
url,
|
||||
"--",
|
||||
f"--force-media-title={anime_title}",
|
||||
*mpv_args,
|
||||
],
|
||||
check=False,
|
||||
)
|
||||
|
||||
# for compatability
|
||||
return "0", "0"
|
||||
@@ -1,105 +0,0 @@
|
||||
"""An abstraction over all providers offering added features with a simple and well typed api
|
||||
|
||||
[TODO:description]
|
||||
"""
|
||||
|
||||
import importlib
|
||||
import logging
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from .libs.manga_provider import manga_sources
|
||||
|
||||
if TYPE_CHECKING:
|
||||
pass
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class MangaProvider:
|
||||
"""Class that manages all anime sources adding some extra functionality to them.
|
||||
Attributes:
|
||||
PROVIDERS: [TODO:attribute]
|
||||
provider: [TODO:attribute]
|
||||
provider: [TODO:attribute]
|
||||
dynamic: [TODO:attribute]
|
||||
retries: [TODO:attribute]
|
||||
manga_provider: [TODO:attribute]
|
||||
"""
|
||||
|
||||
PROVIDERS = list(manga_sources.keys())
|
||||
provider = PROVIDERS[0]
|
||||
|
||||
def __init__(self, provider="mangadex", dynamic=False, retries=0) -> None:
|
||||
self.provider = provider
|
||||
self.dynamic = dynamic
|
||||
self.retries = retries
|
||||
self.lazyload_provider(self.provider)
|
||||
|
||||
def lazyload_provider(self, provider):
|
||||
"""updates the current provider being used"""
|
||||
_, anime_provider_cls_name = manga_sources[provider].split(".", 1)
|
||||
package = f"viu_cli.libs.manga_provider.{provider}"
|
||||
provider_api = importlib.import_module(".api", package)
|
||||
manga_provider = getattr(provider_api, anime_provider_cls_name)
|
||||
self.manga_provider = manga_provider()
|
||||
|
||||
def search_for_manga(
|
||||
self,
|
||||
user_query,
|
||||
nsfw=True,
|
||||
unknown=True,
|
||||
):
|
||||
"""core abstraction over all providers search functionality
|
||||
|
||||
Args:
|
||||
user_query ([TODO:parameter]): [TODO:description]
|
||||
translation_type ([TODO:parameter]): [TODO:description]
|
||||
nsfw ([TODO:parameter]): [TODO:description]
|
||||
manga_provider ([TODO:parameter]): [TODO:description]
|
||||
anilist_obj: [TODO:description]
|
||||
|
||||
Returns:
|
||||
[TODO:return]
|
||||
"""
|
||||
manga_provider = self.manga_provider
|
||||
try:
|
||||
results = manga_provider.search_for_manga(user_query, nsfw, unknown)
|
||||
except Exception as e:
|
||||
logger.error(e)
|
||||
results = None
|
||||
return results
|
||||
|
||||
def get_manga(
|
||||
self,
|
||||
anime_id: str,
|
||||
):
|
||||
"""core abstraction over getting info of an anime from all providers
|
||||
|
||||
Args:
|
||||
anime_id: [TODO:description]
|
||||
anilist_obj: [TODO:description]
|
||||
|
||||
Returns:
|
||||
[TODO:return]
|
||||
"""
|
||||
manga_provider = self.manga_provider
|
||||
try:
|
||||
results = manga_provider.get_manga(anime_id)
|
||||
except Exception as e:
|
||||
logger.error(e)
|
||||
results = None
|
||||
return results
|
||||
|
||||
def get_chapter_thumbnails(
|
||||
self,
|
||||
manga_id: str,
|
||||
chapter: str,
|
||||
):
|
||||
manga_provider = self.manga_provider
|
||||
try:
|
||||
results = manga_provider.get_chapter_thumbnails(manga_id, chapter)
|
||||
except Exception as e:
|
||||
logger.error(e)
|
||||
results = None
|
||||
return results # pyright:ignore
|
||||
@@ -1 +0,0 @@
|
||||
manga_sources = {"mangadex": "api.MangaDexApi"}
|
||||
@@ -1,18 +0,0 @@
|
||||
from httpx import Client
|
||||
from ....core.utils.networking import random_user_agent
|
||||
|
||||
|
||||
class MangaProvider:
|
||||
session: Client
|
||||
|
||||
USER_AGENT = random_user_agent()
|
||||
HEADERS = {}
|
||||
|
||||
def __init__(self) -> None:
|
||||
self.session = Client(
|
||||
headers={
|
||||
"User-Agent": self.USER_AGENT,
|
||||
**self.HEADERS,
|
||||
},
|
||||
timeout=10,
|
||||
)
|
||||
@@ -1,15 +0,0 @@
|
||||
import logging
|
||||
|
||||
from httpx import get
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def fetch_manga_info_from_bal(anilist_id):
|
||||
try:
|
||||
url = f"https://raw.githubusercontent.com/bal-mackup/mal-backup/master/anilist/manga/{anilist_id}.json"
|
||||
response = get(url, timeout=11)
|
||||
if response.ok:
|
||||
return response.json()
|
||||
except Exception as e:
|
||||
logger.error(e)
|
||||
@@ -1,51 +0,0 @@
|
||||
import logging
|
||||
|
||||
from ...common.mini_anilist import search_for_manga_with_anilist
|
||||
from ..base_provider import MangaProvider
|
||||
from ..common import fetch_manga_info_from_bal
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class MangaDexApi(MangaProvider):
|
||||
def search_for_manga(self, title: str, *args):
|
||||
try:
|
||||
search_results = search_for_manga_with_anilist(title)
|
||||
return search_results
|
||||
except Exception as e:
|
||||
logger.error(f"[MANGADEX-ERROR]: {e}")
|
||||
|
||||
def get_manga(self, anilist_manga_id: str):
|
||||
bal_data = fetch_manga_info_from_bal(anilist_manga_id)
|
||||
if not bal_data:
|
||||
return
|
||||
manga_id, MangaDexManga = next(iter(bal_data["Sites"]["Mangadex"].items()))
|
||||
return {
|
||||
"id": manga_id,
|
||||
"title": MangaDexManga["title"],
|
||||
"poster": MangaDexManga["image"],
|
||||
"availableChapters": [],
|
||||
}
|
||||
|
||||
def get_chapter_thumbnails(self, manga_id, chapter):
|
||||
chapter_info_url = f"https://api.mangadex.org/chapter?manga={manga_id}&translatedLanguage[]=en&chapter={chapter}&includeEmptyPages=0"
|
||||
chapter_info_response = self.session.get(chapter_info_url)
|
||||
if not chapter_info_response.ok:
|
||||
return
|
||||
chapter_info = next(iter(chapter_info_response.json()["data"]))
|
||||
chapters_thumbnails_url = (
|
||||
f"https://api.mangadex.org/at-home/server/{chapter_info['id']}"
|
||||
)
|
||||
chapter_thumbnails_response = self.session.get(chapters_thumbnails_url)
|
||||
if not chapter_thumbnails_response.ok:
|
||||
return
|
||||
chapter_thumbnails_info = chapter_thumbnails_response.json()
|
||||
base_url = chapter_thumbnails_info["baseUrl"]
|
||||
hash = chapter_thumbnails_info["chapter"]["hash"]
|
||||
return {
|
||||
"thumbnails": [
|
||||
f"{base_url}/data/{hash}/{chapter_thumbnail}"
|
||||
for chapter_thumbnail in chapter_thumbnails_info["chapter"]["data"]
|
||||
],
|
||||
"title": chapter_info["attributes"]["title"],
|
||||
}
|
||||
@@ -1,6 +1,6 @@
|
||||
import sys
|
||||
|
||||
if sys.version_info < (3, 10):
|
||||
if sys.version_info < (3, 11):
|
||||
raise ImportError(
|
||||
"You are using an unsupported version of Python. Only Python versions 3.10 and above are supported by Viu"
|
||||
) # noqa: F541
|
||||
@@ -1,4 +1,3 @@
|
||||
|
||||
██╗░░░██╗██╗██╗░░░██╗
|
||||
██║░░░██║██║██║░░░██║
|
||||
╚██╗░██╔╝██║██║░░░██║
|
||||
|
Before Width: | Height: | Size: 3.7 KiB After Width: | Height: | Size: 3.7 KiB |
|
Before Width: | Height: | Size: 276 KiB After Width: | Height: | Size: 276 KiB |
@@ -4,7 +4,8 @@
|
||||
"Magia Record: Mahou Shoujo Madoka☆Magica Gaiden (TV)": "Mahou Shoujo Madoka☆Magica",
|
||||
"Dungeon ni Deai o Motomeru no wa Machigatte Iru Darouka": "Dungeon ni Deai wo Motomeru no wa Machigatteiru Darou ka",
|
||||
"Hazurewaku no \"Joutai Ijou Skill\" de Saikyou ni Natta Ore ga Subete wo Juurin suru made": "Hazure Waku no [Joutai Ijou Skill] de Saikyou ni Natta Ore ga Subete wo Juurin Suru made",
|
||||
"Re:Zero kara Hajimeru Isekai Seikatsu Season 3": "Re:Zero kara Hajimeru Isekai Seikatsu 3rd Season"
|
||||
"Re:Zero kara Hajimeru Isekai Seikatsu Season 3": "Re:Zero kara Hajimeru Isekai Seikatsu 3rd Season",
|
||||
"Hanka×Hanka (2011)": "Hunter × Hunter (2011)"
|
||||
},
|
||||
"hianime": {
|
||||
"My Star": "Oshi no Ko"
|
||||
@@ -13,5 +14,12 @@
|
||||
"Azumanga Daiou The Animation": "Azumanga Daioh",
|
||||
"Mairimashita! Iruma-kun 2nd Season": "Mairimashita! Iruma-kun 2",
|
||||
"Mairimashita! Iruma-kun 3rd Season": "Mairimashita! Iruma-kun 3"
|
||||
},
|
||||
"animeunity": {
|
||||
"Kaiju No. 8": "Kaiju No.8",
|
||||
"Naruto Shippuden": "Naruto: Shippuden",
|
||||
"Psycho-Pass: Sinners of the System Case.1 - Crime and Punishment": "PSYCHO-PASS Sinners of the System: Case.1 Crime and Punishment",
|
||||
"Psycho-Pass: Sinners of the System Case.2 - First Guardian": "PSYCHO-PASS Sinners of the System: Case.2 First Guardian",
|
||||
"Psycho-Pass: Sinners of the System Case.3 - On the Other Side of Love and Hate": "PSYCHO-PASS Sinners of the System: Case.3 Beyond the Pale of Vengeance"
|
||||
}
|
||||
}
|
||||
202
viu_media/assets/scripts/fzf/_ansi_utils.py
Normal file
202
viu_media/assets/scripts/fzf/_ansi_utils.py
Normal file
@@ -0,0 +1,202 @@
|
||||
"""
|
||||
ANSI utilities for FZF preview scripts.
|
||||
|
||||
Lightweight stdlib-only utilities to replace Rich dependency in preview scripts.
|
||||
Provides RGB color formatting, table rendering, and markdown stripping.
|
||||
"""
|
||||
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import textwrap
|
||||
import unicodedata
|
||||
|
||||
|
||||
def get_terminal_width() -> int:
|
||||
"""
|
||||
Get terminal width, prioritizing FZF preview environment variables.
|
||||
|
||||
Returns:
|
||||
Terminal width in columns
|
||||
"""
|
||||
fzf_cols = os.environ.get("FZF_PREVIEW_COLUMNS")
|
||||
if fzf_cols:
|
||||
return int(fzf_cols)
|
||||
return shutil.get_terminal_size((80, 24)).columns
|
||||
|
||||
|
||||
def display_width(text: str) -> int:
|
||||
"""
|
||||
Calculate the actual display width of text, accounting for wide characters.
|
||||
|
||||
Args:
|
||||
text: Text to measure
|
||||
|
||||
Returns:
|
||||
Display width in terminal columns
|
||||
"""
|
||||
width = 0
|
||||
for char in text:
|
||||
# East Asian Width property: 'F' (Fullwidth) and 'W' (Wide) take 2 columns
|
||||
if unicodedata.east_asian_width(char) in ("F", "W"):
|
||||
width += 2
|
||||
else:
|
||||
width += 1
|
||||
return width
|
||||
|
||||
|
||||
def rgb_color(r: int, g: int, b: int, text: str, bold: bool = False) -> str:
|
||||
"""
|
||||
Format text with RGB color using ANSI escape codes.
|
||||
|
||||
Args:
|
||||
r: Red component (0-255)
|
||||
g: Green component (0-255)
|
||||
b: Blue component (0-255)
|
||||
text: Text to colorize
|
||||
bold: Whether to make text bold
|
||||
|
||||
Returns:
|
||||
ANSI-escaped colored text
|
||||
"""
|
||||
color_code = f"\x1b[38;2;{r};{g};{b}m"
|
||||
bold_code = "\x1b[1m" if bold else ""
|
||||
reset = "\x1b[0m"
|
||||
return f"{color_code}{bold_code}{text}{reset}"
|
||||
|
||||
|
||||
def parse_color(color_csv: str) -> tuple[int, int, int]:
|
||||
"""
|
||||
Parse RGB color from comma-separated string.
|
||||
|
||||
Args:
|
||||
color_csv: Color as 'R,G,B' string
|
||||
|
||||
Returns:
|
||||
Tuple of (r, g, b) integers
|
||||
"""
|
||||
parts = color_csv.split(",")
|
||||
return int(parts[0]), int(parts[1]), int(parts[2])
|
||||
|
||||
|
||||
def print_rule(sep_color: str) -> None:
|
||||
"""
|
||||
Print a horizontal rule line.
|
||||
|
||||
Args:
|
||||
sep_color: Color as 'R,G,B' string
|
||||
"""
|
||||
width = get_terminal_width()
|
||||
r, g, b = parse_color(sep_color)
|
||||
print(rgb_color(r, g, b, "─" * width))
|
||||
|
||||
|
||||
def print_table_row(
|
||||
key: str, value: str, header_color: str, key_width: int, value_width: int
|
||||
) -> None:
|
||||
"""
|
||||
Print a two-column table row with left-aligned key and right-aligned value.
|
||||
|
||||
Args:
|
||||
key: Left column text (header/key)
|
||||
value: Right column text (value)
|
||||
header_color: Color for key as 'R,G,B' string
|
||||
key_width: Width for key column
|
||||
value_width: Width for value column
|
||||
"""
|
||||
r, g, b = parse_color(header_color)
|
||||
key_styled = rgb_color(r, g, b, key, bold=True)
|
||||
|
||||
# Get actual terminal width
|
||||
term_width = get_terminal_width()
|
||||
|
||||
# Calculate display widths accounting for wide characters
|
||||
key_display_width = display_width(key)
|
||||
|
||||
# Calculate actual value width based on terminal and key display width
|
||||
actual_value_width = max(20, term_width - key_display_width - 2)
|
||||
|
||||
# Wrap value if it's too long (use character count, not display width for wrapping)
|
||||
value_lines = textwrap.wrap(str(value), width=actual_value_width) if value else [""]
|
||||
|
||||
if not value_lines:
|
||||
value_lines = [""]
|
||||
|
||||
# Print first line with properly aligned value
|
||||
first_line = value_lines[0]
|
||||
first_line_display_width = display_width(first_line)
|
||||
|
||||
# Use manual spacing to right-align based on display width
|
||||
spacing = term_width - key_display_width - first_line_display_width - 2
|
||||
if spacing > 0:
|
||||
print(f"{key_styled} {' ' * spacing}{first_line}")
|
||||
else:
|
||||
print(f"{key_styled} {first_line}")
|
||||
|
||||
# Print remaining wrapped lines (left-aligned, indented)
|
||||
for line in value_lines[1:]:
|
||||
print(f"{' ' * (key_display_width + 2)}{line}")
|
||||
|
||||
|
||||
def strip_markdown(text: str) -> str:
|
||||
"""
|
||||
Strip markdown formatting from text.
|
||||
|
||||
Removes:
|
||||
- Headers (# ## ###)
|
||||
- Bold (**text** or __text__)
|
||||
- Italic (*text* or _text_)
|
||||
- Links ([text](url))
|
||||
- Code blocks (```code```)
|
||||
- Inline code (`code`)
|
||||
|
||||
Args:
|
||||
text: Markdown-formatted text
|
||||
|
||||
Returns:
|
||||
Plain text with markdown removed
|
||||
"""
|
||||
if not text:
|
||||
return ""
|
||||
|
||||
# Remove code blocks first
|
||||
text = re.sub(r"```[\s\S]*?```", "", text)
|
||||
|
||||
# Remove inline code
|
||||
text = re.sub(r"`([^`]+)`", r"\1", text)
|
||||
|
||||
# Remove headers
|
||||
text = re.sub(r"^#{1,6}\s+", "", text, flags=re.MULTILINE)
|
||||
|
||||
# Remove bold (** or __)
|
||||
text = re.sub(r"\*\*(.+?)\*\*", r"\1", text)
|
||||
text = re.sub(r"__(.+?)__", r"\1", text)
|
||||
|
||||
# Remove italic (* or _)
|
||||
text = re.sub(r"\*(.+?)\*", r"\1", text)
|
||||
text = re.sub(r"_(.+?)_", r"\1", text)
|
||||
|
||||
# Remove links, keep text
|
||||
text = re.sub(r"\[(.+?)\]\(.+?\)", r"\1", text)
|
||||
|
||||
# Remove images
|
||||
text = re.sub(r"!\[.*?\]\(.+?\)", "", text)
|
||||
|
||||
return text.strip()
|
||||
|
||||
|
||||
def wrap_text(text: str, width: int | None = None) -> str:
|
||||
"""
|
||||
Wrap text to terminal width.
|
||||
|
||||
Args:
|
||||
text: Text to wrap
|
||||
width: Width to wrap to (defaults to terminal width)
|
||||
|
||||
Returns:
|
||||
Wrapped text
|
||||
"""
|
||||
if width is None:
|
||||
width = get_terminal_width()
|
||||
|
||||
return textwrap.fill(text, width=width)
|
||||
36
viu_media/assets/scripts/fzf/airing_schedule_info.py
Normal file
36
viu_media/assets/scripts/fzf/airing_schedule_info.py
Normal file
@@ -0,0 +1,36 @@
|
||||
import sys
|
||||
from _ansi_utils import (
|
||||
print_rule,
|
||||
print_table_row,
|
||||
strip_markdown,
|
||||
wrap_text,
|
||||
get_terminal_width,
|
||||
)
|
||||
|
||||
HEADER_COLOR = sys.argv[1]
|
||||
SEPARATOR_COLOR = sys.argv[2]
|
||||
|
||||
# Get terminal dimensions
|
||||
term_width = get_terminal_width()
|
||||
|
||||
# Print title centered
|
||||
print("{ANIME_TITLE}".center(term_width))
|
||||
|
||||
rows = [
|
||||
("Total Episodes", "{TOTAL_EPISODES}"),
|
||||
]
|
||||
|
||||
print_rule(SEPARATOR_COLOR)
|
||||
for key, value in rows:
|
||||
print_table_row(key, value, HEADER_COLOR, 15, term_width - 20)
|
||||
|
||||
rows = [
|
||||
("Upcoming Episodes", "{UPCOMING_EPISODES}"),
|
||||
]
|
||||
|
||||
print_rule(SEPARATOR_COLOR)
|
||||
for key, value in rows:
|
||||
print_table_row(key, value, HEADER_COLOR, 15, term_width - 20)
|
||||
|
||||
print_rule(SEPARATOR_COLOR)
|
||||
print(wrap_text(strip_markdown("""{SCHEDULE_TABLE}"""), term_width))
|
||||
47
viu_media/assets/scripts/fzf/character_info.py
Normal file
47
viu_media/assets/scripts/fzf/character_info.py
Normal file
@@ -0,0 +1,47 @@
|
||||
import sys
|
||||
from _ansi_utils import (
|
||||
print_rule,
|
||||
print_table_row,
|
||||
strip_markdown,
|
||||
wrap_text,
|
||||
get_terminal_width,
|
||||
)
|
||||
|
||||
HEADER_COLOR = sys.argv[1]
|
||||
SEPARATOR_COLOR = sys.argv[2]
|
||||
|
||||
# Get terminal dimensions
|
||||
term_width = get_terminal_width()
|
||||
|
||||
# Print title centered
|
||||
print("{CHARACTER_NAME}".center(term_width))
|
||||
|
||||
rows = [
|
||||
("Native Name", "{CHARACTER_NATIVE_NAME}"),
|
||||
("Gender", "{CHARACTER_GENDER}"),
|
||||
]
|
||||
|
||||
print_rule(SEPARATOR_COLOR)
|
||||
for key, value in rows:
|
||||
print_table_row(key, value, HEADER_COLOR, 15, term_width - 20)
|
||||
|
||||
rows = [
|
||||
("Age", "{CHARACTER_AGE}"),
|
||||
("Blood Type", "{CHARACTER_BLOOD_TYPE}"),
|
||||
]
|
||||
|
||||
print_rule(SEPARATOR_COLOR)
|
||||
for key, value in rows:
|
||||
print_table_row(key, value, HEADER_COLOR, 15, term_width - 20)
|
||||
|
||||
rows = [
|
||||
("Birthday", "{CHARACTER_BIRTHDAY}"),
|
||||
("Favourites", "{CHARACTER_FAVOURITES}"),
|
||||
]
|
||||
|
||||
print_rule(SEPARATOR_COLOR)
|
||||
for key, value in rows:
|
||||
print_table_row(key, value, HEADER_COLOR, 15, term_width - 20)
|
||||
|
||||
print_rule(SEPARATOR_COLOR)
|
||||
print(wrap_text(strip_markdown("""{CHARACTER_DESCRIPTION}"""), term_width))
|
||||
434
viu_media/assets/scripts/fzf/dynamic_preview.py
Executable file
434
viu_media/assets/scripts/fzf/dynamic_preview.py
Executable file
@@ -0,0 +1,434 @@
|
||||
#!/usr/bin/env python3
|
||||
#
|
||||
# FZF Dynamic Preview Script for Search Results
|
||||
#
|
||||
# This script handles previews for dynamic search by reading from the cached
|
||||
# search results JSON and generating preview content on-the-fly.
|
||||
# Template variables are injected by Python using .replace()
|
||||
|
||||
import json
|
||||
import os
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
from hashlib import sha256
|
||||
from pathlib import Path
|
||||
|
||||
# Import the utility functions
|
||||
from _ansi_utils import (
|
||||
get_terminal_width,
|
||||
print_rule,
|
||||
print_table_row,
|
||||
strip_markdown,
|
||||
wrap_text,
|
||||
)
|
||||
|
||||
|
||||
# --- Template Variables (Injected by Python) ---
|
||||
SEARCH_RESULTS_FILE = Path("{SEARCH_RESULTS_FILE}")
|
||||
IMAGE_CACHE_DIR = Path("{IMAGE_CACHE_DIR}")
|
||||
PREVIEW_MODE = "{PREVIEW_MODE}"
|
||||
IMAGE_RENDERER = "{IMAGE_RENDERER}"
|
||||
HEADER_COLOR = "{HEADER_COLOR}"
|
||||
SEPARATOR_COLOR = "{SEPARATOR_COLOR}"
|
||||
SCALE_UP = "{SCALE_UP}" == "True"
|
||||
|
||||
# --- Arguments ---
|
||||
# sys.argv[1] is the selected anime title from fzf
|
||||
SELECTED_TITLE = sys.argv[1] if len(sys.argv) > 1 else ""
|
||||
|
||||
|
||||
def format_number(num):
|
||||
"""Format number with thousand separators."""
|
||||
if num is None:
|
||||
return "N/A"
|
||||
return f"{num:,}"
|
||||
|
||||
|
||||
def format_date(date_obj):
|
||||
"""Format date object to string."""
|
||||
if not date_obj or date_obj == "null":
|
||||
return "N/A"
|
||||
|
||||
year = date_obj.get("year")
|
||||
month = date_obj.get("month")
|
||||
day = date_obj.get("day")
|
||||
|
||||
if not year:
|
||||
return "N/A"
|
||||
if month and day:
|
||||
return f"{day}/{month}/{year}"
|
||||
if month:
|
||||
return f"{month}/{year}"
|
||||
return str(year)
|
||||
|
||||
|
||||
def get_media_from_results(title):
|
||||
"""Find media item in search results by title."""
|
||||
if not SEARCH_RESULTS_FILE.exists():
|
||||
return None
|
||||
|
||||
try:
|
||||
with open(SEARCH_RESULTS_FILE, "r", encoding="utf-8") as f:
|
||||
data = json.load(f)
|
||||
|
||||
media_list = data.get("data", {}).get("Page", {}).get("media", [])
|
||||
|
||||
for media in media_list:
|
||||
title_obj = media.get("title", {})
|
||||
eng = title_obj.get("english")
|
||||
rom = title_obj.get("romaji")
|
||||
nat = title_obj.get("native")
|
||||
|
||||
if title in (eng, rom, nat):
|
||||
return media
|
||||
|
||||
return None
|
||||
except Exception as e:
|
||||
print(f"Error reading search results: {e}", file=sys.stderr)
|
||||
return None
|
||||
|
||||
|
||||
def download_image(url: str, output_path: Path) -> bool:
|
||||
"""Download image from URL and save to file."""
|
||||
try:
|
||||
# Try using urllib (stdlib)
|
||||
from urllib import request
|
||||
|
||||
req = request.Request(url, headers={"User-Agent": "viu/1.0"})
|
||||
with request.urlopen(req, timeout=5) as response:
|
||||
data = response.read()
|
||||
output_path.write_bytes(data)
|
||||
return True
|
||||
except Exception:
|
||||
# Silently fail - preview will just not show image
|
||||
return False
|
||||
|
||||
|
||||
def which(cmd):
|
||||
"""Check if command exists."""
|
||||
return shutil.which(cmd)
|
||||
|
||||
|
||||
def get_terminal_dimensions():
|
||||
"""Get terminal dimensions from FZF environment."""
|
||||
fzf_cols = os.environ.get("FZF_PREVIEW_COLUMNS")
|
||||
fzf_lines = os.environ.get("FZF_PREVIEW_LINES")
|
||||
|
||||
if fzf_cols and fzf_lines:
|
||||
return int(fzf_cols), int(fzf_lines)
|
||||
|
||||
try:
|
||||
rows, cols = (
|
||||
subprocess.check_output(
|
||||
["stty", "size"], text=True, stderr=subprocess.DEVNULL
|
||||
)
|
||||
.strip()
|
||||
.split()
|
||||
)
|
||||
return int(cols), int(rows)
|
||||
except Exception:
|
||||
return 80, 24
|
||||
|
||||
|
||||
def render_kitty(file_path, width, height, scale_up):
|
||||
"""Render using the Kitty Graphics Protocol (kitten/icat)."""
|
||||
cmd = []
|
||||
if which("kitten"):
|
||||
cmd = ["kitten", "icat"]
|
||||
elif which("icat"):
|
||||
cmd = ["icat"]
|
||||
elif which("kitty"):
|
||||
cmd = ["kitty", "+kitten", "icat"]
|
||||
|
||||
if not cmd:
|
||||
return False
|
||||
|
||||
args = [
|
||||
"--clear",
|
||||
"--transfer-mode=memory",
|
||||
"--unicode-placeholder",
|
||||
"--stdin=no",
|
||||
f"--place={width}x{height}@0x0",
|
||||
]
|
||||
|
||||
if scale_up:
|
||||
args.append("--scale-up")
|
||||
|
||||
args.append(file_path)
|
||||
|
||||
subprocess.run(cmd + args, stdout=sys.stdout, stderr=sys.stderr)
|
||||
return True
|
||||
|
||||
|
||||
def render_sixel(file_path, width, height):
|
||||
"""Render using Sixel."""
|
||||
if which("chafa"):
|
||||
subprocess.run(
|
||||
["chafa", "-f", "sixel", "-s", f"{width}x{height}", file_path],
|
||||
stdout=sys.stdout,
|
||||
stderr=sys.stderr,
|
||||
)
|
||||
return True
|
||||
|
||||
if which("img2sixel"):
|
||||
pixel_width = width * 10
|
||||
pixel_height = height * 20
|
||||
subprocess.run(
|
||||
[
|
||||
"img2sixel",
|
||||
f"--width={pixel_width}",
|
||||
f"--height={pixel_height}",
|
||||
file_path,
|
||||
],
|
||||
stdout=sys.stdout,
|
||||
stderr=sys.stderr,
|
||||
)
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def render_iterm(file_path, width, height):
|
||||
"""Render using iTerm2 Inline Image Protocol."""
|
||||
if which("imgcat"):
|
||||
subprocess.run(
|
||||
["imgcat", "-W", str(width), "-H", str(height), file_path],
|
||||
stdout=sys.stdout,
|
||||
stderr=sys.stderr,
|
||||
)
|
||||
return True
|
||||
|
||||
if which("chafa"):
|
||||
subprocess.run(
|
||||
["chafa", "-f", "iterm", "-s", f"{width}x{height}", file_path],
|
||||
stdout=sys.stdout,
|
||||
stderr=sys.stderr,
|
||||
)
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def render_timg(file_path, width, height):
|
||||
"""Render using timg."""
|
||||
if which("timg"):
|
||||
subprocess.run(
|
||||
["timg", f"-g{width}x{height}", "--upscale", file_path],
|
||||
stdout=sys.stdout,
|
||||
stderr=sys.stderr,
|
||||
)
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def render_chafa_auto(file_path, width, height):
|
||||
"""Render using Chafa in auto mode."""
|
||||
if which("chafa"):
|
||||
subprocess.run(
|
||||
["chafa", "-s", f"{width}x{height}", file_path],
|
||||
stdout=sys.stdout,
|
||||
stderr=sys.stderr,
|
||||
)
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def fzf_image_preview(file_path: str):
|
||||
"""Main dispatch function to choose the best renderer."""
|
||||
cols, lines = get_terminal_dimensions()
|
||||
width = cols
|
||||
height = lines
|
||||
|
||||
# Check explicit configuration
|
||||
if IMAGE_RENDERER == "icat" or IMAGE_RENDERER == "system-kitty":
|
||||
if render_kitty(file_path, width, height, SCALE_UP):
|
||||
return
|
||||
|
||||
elif IMAGE_RENDERER == "sixel" or IMAGE_RENDERER == "system-sixels":
|
||||
if render_sixel(file_path, width, height):
|
||||
return
|
||||
|
||||
elif IMAGE_RENDERER == "imgcat":
|
||||
if render_iterm(file_path, width, height):
|
||||
return
|
||||
|
||||
elif IMAGE_RENDERER == "timg":
|
||||
if render_timg(file_path, width, height):
|
||||
return
|
||||
|
||||
elif IMAGE_RENDERER == "chafa":
|
||||
if render_chafa_auto(file_path, width, height):
|
||||
return
|
||||
|
||||
# Auto-detection / Fallback
|
||||
if os.environ.get("KITTY_WINDOW_ID") or os.environ.get("GHOSTTY_BIN_DIR"):
|
||||
if render_kitty(file_path, width, height, SCALE_UP):
|
||||
return
|
||||
|
||||
if os.environ.get("TERM_PROGRAM") == "iTerm.app":
|
||||
if render_iterm(file_path, width, height):
|
||||
return
|
||||
|
||||
# Try standard tools in order of quality/preference
|
||||
if render_kitty(file_path, width, height, SCALE_UP):
|
||||
return
|
||||
if render_sixel(file_path, width, height):
|
||||
return
|
||||
if render_timg(file_path, width, height):
|
||||
return
|
||||
if render_chafa_auto(file_path, width, height):
|
||||
return
|
||||
|
||||
print("⚠️ No suitable image renderer found (icat, chafa, timg, img2sixel).")
|
||||
|
||||
|
||||
def main():
|
||||
if not SELECTED_TITLE:
|
||||
print("No selection")
|
||||
return
|
||||
|
||||
# Get the media data from cached search results
|
||||
media = get_media_from_results(SELECTED_TITLE)
|
||||
|
||||
if not media:
|
||||
print("Loading preview...")
|
||||
return
|
||||
|
||||
term_width = get_terminal_width()
|
||||
|
||||
# Extract media information
|
||||
title_obj = media.get("title", {})
|
||||
title = (
|
||||
title_obj.get("english")
|
||||
or title_obj.get("romaji")
|
||||
or title_obj.get("native")
|
||||
or "Unknown"
|
||||
)
|
||||
|
||||
# Show image if in image or full mode
|
||||
if PREVIEW_MODE in ("image", "full"):
|
||||
cover_image = media.get("coverImage", {}).get("large", "")
|
||||
if cover_image:
|
||||
# Ensure image cache directory exists
|
||||
IMAGE_CACHE_DIR.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
# Generate hash matching the preview worker pattern
|
||||
# Use "anime-" prefix and hash of just the title (no KEY prefix for dynamic search)
|
||||
hash_id = f"anime-{sha256(SELECTED_TITLE.encode('utf-8')).hexdigest()}"
|
||||
image_file = IMAGE_CACHE_DIR / f"{hash_id}.png"
|
||||
|
||||
# Download image if not cached
|
||||
if not image_file.exists():
|
||||
download_image(cover_image, image_file)
|
||||
|
||||
# Try to render the image
|
||||
if image_file.exists():
|
||||
fzf_image_preview(str(image_file))
|
||||
print() # Spacer
|
||||
else:
|
||||
print("🖼️ Loading image...")
|
||||
print()
|
||||
|
||||
# Show text info if in text or full mode
|
||||
if PREVIEW_MODE in ("text", "full"):
|
||||
# Separator line
|
||||
r, g, b = map(int, SEPARATOR_COLOR.split(","))
|
||||
separator = f"\x1b[38;2;{r};{g};{b}m" + ("─" * term_width) + "\x1b[0m"
|
||||
print(separator, flush=True)
|
||||
|
||||
# Title centered
|
||||
print(title.center(term_width))
|
||||
|
||||
# Extract data
|
||||
status = media.get("status", "Unknown")
|
||||
format_type = media.get("format", "Unknown")
|
||||
episodes = media.get("episodes", "?")
|
||||
duration = media.get("duration")
|
||||
duration_str = f"{duration} min" if duration else "Unknown"
|
||||
|
||||
score = media.get("averageScore")
|
||||
score_str = f"{score}/100" if score else "N/A"
|
||||
|
||||
favourites = format_number(media.get("favourites", 0))
|
||||
popularity = format_number(media.get("popularity", 0))
|
||||
|
||||
genres = ", ".join(media.get("genres", [])[:5]) or "Unknown"
|
||||
|
||||
start_date = format_date(media.get("startDate"))
|
||||
end_date = format_date(media.get("endDate"))
|
||||
|
||||
studios_list = media.get("studios", {}).get("nodes", [])
|
||||
studios = ", ".join([s.get("name", "") for s in studios_list[:3]]) or "Unknown"
|
||||
|
||||
synonyms_list = media.get("synonyms", [])
|
||||
synonyms = ", ".join(synonyms_list[:3]) or "N/A"
|
||||
|
||||
description = media.get("description", "No description available.")
|
||||
description = strip_markdown(description)
|
||||
|
||||
# Print sections matching media_info.py structure
|
||||
rows = [
|
||||
("Score", score_str),
|
||||
("Favorites", favourites),
|
||||
("Popularity", popularity),
|
||||
("Status", status),
|
||||
]
|
||||
|
||||
print_rule(SEPARATOR_COLOR)
|
||||
for key, value in rows:
|
||||
print_table_row(key, value, HEADER_COLOR, 0, 0)
|
||||
|
||||
rows = [
|
||||
("Episodes", str(episodes)),
|
||||
("Duration", duration_str),
|
||||
]
|
||||
|
||||
print_rule(SEPARATOR_COLOR)
|
||||
for key, value in rows:
|
||||
print_table_row(key, value, HEADER_COLOR, 0, 0)
|
||||
|
||||
rows = [
|
||||
("Genres", genres),
|
||||
("Format", format_type),
|
||||
]
|
||||
|
||||
print_rule(SEPARATOR_COLOR)
|
||||
for key, value in rows:
|
||||
print_table_row(key, value, HEADER_COLOR, 0, 0)
|
||||
|
||||
rows = [
|
||||
("Start Date", start_date),
|
||||
("End Date", end_date),
|
||||
]
|
||||
|
||||
print_rule(SEPARATOR_COLOR)
|
||||
for key, value in rows:
|
||||
print_table_row(key, value, HEADER_COLOR, 0, 0)
|
||||
|
||||
rows = [
|
||||
("Studios", studios),
|
||||
]
|
||||
|
||||
print_rule(SEPARATOR_COLOR)
|
||||
for key, value in rows:
|
||||
print_table_row(key, value, HEADER_COLOR, 0, 0)
|
||||
|
||||
rows = [
|
||||
("Synonyms", synonyms),
|
||||
]
|
||||
|
||||
print_rule(SEPARATOR_COLOR)
|
||||
for key, value in rows:
|
||||
print_table_row(key, value, HEADER_COLOR, 0, 0)
|
||||
|
||||
print_rule(SEPARATOR_COLOR)
|
||||
print(wrap_text(description, term_width))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
try:
|
||||
main()
|
||||
except KeyboardInterrupt:
|
||||
pass
|
||||
except Exception as e:
|
||||
print(f"Preview Error: {e}", file=sys.stderr)
|
||||
49
viu_media/assets/scripts/fzf/episode_info.py
Normal file
49
viu_media/assets/scripts/fzf/episode_info.py
Normal file
@@ -0,0 +1,49 @@
|
||||
import sys
|
||||
from _ansi_utils import print_rule, print_table_row, get_terminal_width
|
||||
|
||||
HEADER_COLOR = sys.argv[1]
|
||||
SEPARATOR_COLOR = sys.argv[2]
|
||||
|
||||
# Get terminal dimensions
|
||||
term_width = get_terminal_width()
|
||||
|
||||
# Print title centered
|
||||
print("{TITLE}".center(term_width))
|
||||
|
||||
rows = [
|
||||
("Duration", "{DURATION}"),
|
||||
("Status", "{STATUS}"),
|
||||
]
|
||||
|
||||
print_rule(SEPARATOR_COLOR)
|
||||
for key, value in rows:
|
||||
print_table_row(key, value, HEADER_COLOR, 15, term_width - 20)
|
||||
|
||||
rows = [
|
||||
("Total Episodes", "{EPISODES}"),
|
||||
("Next Episode", "{NEXT_EPISODE}"),
|
||||
]
|
||||
|
||||
print_rule(SEPARATOR_COLOR)
|
||||
for key, value in rows:
|
||||
print_table_row(key, value, HEADER_COLOR, 15, term_width - 20)
|
||||
|
||||
rows = [
|
||||
("Progress", "{USER_PROGRESS}"),
|
||||
("List Status", "{USER_STATUS}"),
|
||||
]
|
||||
|
||||
print_rule(SEPARATOR_COLOR)
|
||||
for key, value in rows:
|
||||
print_table_row(key, value, HEADER_COLOR, 15, term_width - 20)
|
||||
|
||||
rows = [
|
||||
("Start Date", "{START_DATE}"),
|
||||
("End Date", "{END_DATE}"),
|
||||
]
|
||||
|
||||
print_rule(SEPARATOR_COLOR)
|
||||
for key, value in rows:
|
||||
print_table_row(key, value, HEADER_COLOR, 15, term_width - 20)
|
||||
|
||||
print_rule(SEPARATOR_COLOR)
|
||||
93
viu_media/assets/scripts/fzf/media_info.py
Normal file
93
viu_media/assets/scripts/fzf/media_info.py
Normal file
@@ -0,0 +1,93 @@
|
||||
import sys
|
||||
from _ansi_utils import (
|
||||
print_rule,
|
||||
print_table_row,
|
||||
strip_markdown,
|
||||
wrap_text,
|
||||
get_terminal_width,
|
||||
)
|
||||
|
||||
HEADER_COLOR = sys.argv[1]
|
||||
SEPARATOR_COLOR = sys.argv[2]
|
||||
|
||||
# Get terminal dimensions
|
||||
term_width = get_terminal_width()
|
||||
|
||||
# Print title centered
|
||||
print("{TITLE}".center(term_width))
|
||||
|
||||
# Define table data
|
||||
rows = [
|
||||
("Score", "{SCORE}"),
|
||||
("Favorites", "{FAVOURITES}"),
|
||||
("Popularity", "{POPULARITY}"),
|
||||
("Status", "{STATUS}"),
|
||||
]
|
||||
|
||||
print_rule(SEPARATOR_COLOR)
|
||||
for key, value in rows:
|
||||
print_table_row(key, value, HEADER_COLOR, 15, term_width - 20)
|
||||
|
||||
rows = [
|
||||
("Episodes", "{EPISODES}"),
|
||||
("Duration", "{DURATION}"),
|
||||
("Next Episode", "{NEXT_EPISODE}"),
|
||||
]
|
||||
|
||||
print_rule(SEPARATOR_COLOR)
|
||||
for key, value in rows:
|
||||
print_table_row(key, value, HEADER_COLOR, 15, term_width - 20)
|
||||
|
||||
rows = [
|
||||
("Genres", "{GENRES}"),
|
||||
("Format", "{FORMAT}"),
|
||||
]
|
||||
|
||||
print_rule(SEPARATOR_COLOR)
|
||||
for key, value in rows:
|
||||
print_table_row(key, value, HEADER_COLOR, 15, term_width - 20)
|
||||
|
||||
rows = [
|
||||
("List Status", "{USER_STATUS}"),
|
||||
("Progress", "{USER_PROGRESS}"),
|
||||
]
|
||||
|
||||
print_rule(SEPARATOR_COLOR)
|
||||
for key, value in rows:
|
||||
print_table_row(key, value, HEADER_COLOR, 15, term_width - 20)
|
||||
|
||||
rows = [
|
||||
("Start Date", "{START_DATE}"),
|
||||
("End Date", "{END_DATE}"),
|
||||
]
|
||||
|
||||
print_rule(SEPARATOR_COLOR)
|
||||
for key, value in rows:
|
||||
print_table_row(key, value, HEADER_COLOR, 15, term_width - 20)
|
||||
|
||||
rows = [
|
||||
("Studios", "{STUDIOS}"),
|
||||
]
|
||||
|
||||
print_rule(SEPARATOR_COLOR)
|
||||
for key, value in rows:
|
||||
print_table_row(key, value, HEADER_COLOR, 15, term_width - 20)
|
||||
|
||||
rows = [
|
||||
("Synonyms", "{SYNONYMNS}"),
|
||||
]
|
||||
|
||||
print_rule(SEPARATOR_COLOR)
|
||||
for key, value in rows:
|
||||
print_table_row(key, value, HEADER_COLOR, 15, term_width - 20)
|
||||
|
||||
rows = [
|
||||
("Tags", "{TAGS}"),
|
||||
]
|
||||
|
||||
print_rule(SEPARATOR_COLOR)
|
||||
for key, value in rows:
|
||||
print_table_row(key, value, HEADER_COLOR, 15, term_width - 20)
|
||||
|
||||
print_rule(SEPARATOR_COLOR)
|
||||
print(wrap_text(strip_markdown("""{SYNOPSIS}"""), term_width))
|
||||
288
viu_media/assets/scripts/fzf/preview.py
Normal file
288
viu_media/assets/scripts/fzf/preview.py
Normal file
@@ -0,0 +1,288 @@
|
||||
#!/usr/bin/env python3
|
||||
#
|
||||
# FZF Preview Script Template
|
||||
#
|
||||
# This script is a template. The placeholders in curly braces, like {NAME}
|
||||
# are dynamically filled by python using .replace() during runtime.
|
||||
|
||||
import os
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
from hashlib import sha256
|
||||
from pathlib import Path
|
||||
|
||||
# --- Template Variables (Injected by Python) ---
|
||||
PREVIEW_MODE = "{PREVIEW_MODE}"
|
||||
IMAGE_CACHE_DIR = Path("{IMAGE_CACHE_DIR}")
|
||||
INFO_CACHE_DIR = Path("{INFO_CACHE_DIR}")
|
||||
IMAGE_RENDERER = "{IMAGE_RENDERER}"
|
||||
HEADER_COLOR = "{HEADER_COLOR}"
|
||||
SEPARATOR_COLOR = "{SEPARATOR_COLOR}"
|
||||
PREFIX = "{PREFIX}"
|
||||
SCALE_UP = "{SCALE_UP}" == "True"
|
||||
|
||||
# --- Arguments ---
|
||||
# sys.argv[1] is usually the raw line from FZF (the anime title/key)
|
||||
TITLE = sys.argv[1] if len(sys.argv) > 1 else ""
|
||||
KEY = """{KEY}"""
|
||||
KEY = KEY + "-" if KEY else KEY
|
||||
|
||||
# Generate the hash to find the cached files
|
||||
hash_id = f"{PREFIX}-{sha256((KEY + TITLE).encode('utf-8')).hexdigest()}"
|
||||
|
||||
|
||||
def get_terminal_dimensions():
|
||||
"""
|
||||
Determine the available dimensions (cols x lines) for the preview window.
|
||||
Prioritizes FZF environment variables.
|
||||
"""
|
||||
fzf_cols = os.environ.get("FZF_PREVIEW_COLUMNS")
|
||||
fzf_lines = os.environ.get("FZF_PREVIEW_LINES")
|
||||
|
||||
if fzf_cols and fzf_lines:
|
||||
return int(fzf_cols), int(fzf_lines)
|
||||
|
||||
# Fallback to stty if FZF vars aren't set (unlikely in preview)
|
||||
try:
|
||||
rows, cols = (
|
||||
subprocess.check_output(
|
||||
["stty", "size"], text=True, stderr=subprocess.DEVNULL
|
||||
)
|
||||
.strip()
|
||||
.split()
|
||||
)
|
||||
return int(cols), int(rows)
|
||||
except Exception:
|
||||
return 80, 24
|
||||
|
||||
|
||||
def which(cmd):
|
||||
"""Alias for shutil.which"""
|
||||
return shutil.which(cmd)
|
||||
|
||||
|
||||
def render_kitty(file_path, width, height, scale_up):
|
||||
"""Render using the Kitty Graphics Protocol (kitten/icat)."""
|
||||
# 1. Try 'kitten icat' (Modern)
|
||||
# 2. Try 'icat' (Legacy/Alias)
|
||||
# 3. Try 'kitty +kitten icat' (Fallback)
|
||||
|
||||
cmd = []
|
||||
if which("kitten"):
|
||||
cmd = ["kitten", "icat"]
|
||||
elif which("icat"):
|
||||
cmd = ["icat"]
|
||||
elif which("kitty"):
|
||||
cmd = ["kitty", "+kitten", "icat"]
|
||||
|
||||
if not cmd:
|
||||
return False
|
||||
|
||||
# Build Arguments
|
||||
args = [
|
||||
"--clear",
|
||||
"--transfer-mode=memory",
|
||||
"--unicode-placeholder",
|
||||
"--stdin=no",
|
||||
f"--place={width}x{height}@0x0",
|
||||
]
|
||||
|
||||
if scale_up:
|
||||
args.append("--scale-up")
|
||||
|
||||
args.append(file_path)
|
||||
|
||||
subprocess.run(cmd + args, stdout=sys.stdout, stderr=sys.stderr)
|
||||
return True
|
||||
|
||||
|
||||
def render_sixel(file_path, width, height):
|
||||
"""
|
||||
Render using Sixel.
|
||||
Prioritizes 'chafa' for Sixel as it handles text-cell sizing better than img2sixel.
|
||||
"""
|
||||
|
||||
# Option A: Chafa (Best for Sixel sizing)
|
||||
if which("chafa"):
|
||||
# Chafa automatically detects Sixel support if terminal reports it,
|
||||
# but we force it here if specifically requested via logic flow.
|
||||
subprocess.run(
|
||||
["chafa", "-f", "sixel", "-s", f"{width}x{height}", file_path],
|
||||
stdout=sys.stdout,
|
||||
stderr=sys.stderr,
|
||||
)
|
||||
return True
|
||||
|
||||
# Option B: img2sixel (Libsixel)
|
||||
# Note: img2sixel uses pixels, not cells. We estimate 1 cell ~= 10px width, 20px height
|
||||
if which("img2sixel"):
|
||||
pixel_width = width * 10
|
||||
pixel_height = height * 20
|
||||
subprocess.run(
|
||||
[
|
||||
"img2sixel",
|
||||
f"--width={pixel_width}",
|
||||
f"--height={pixel_height}",
|
||||
file_path,
|
||||
],
|
||||
stdout=sys.stdout,
|
||||
stderr=sys.stderr,
|
||||
)
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def render_iterm(file_path, width, height):
|
||||
"""Render using iTerm2 Inline Image Protocol."""
|
||||
if which("imgcat"):
|
||||
subprocess.run(
|
||||
["imgcat", "-W", str(width), "-H", str(height), file_path],
|
||||
stdout=sys.stdout,
|
||||
stderr=sys.stderr,
|
||||
)
|
||||
return True
|
||||
|
||||
# Chafa also supports iTerm
|
||||
if which("chafa"):
|
||||
subprocess.run(
|
||||
["chafa", "-f", "iterm", "-s", f"{width}x{height}", file_path],
|
||||
stdout=sys.stdout,
|
||||
stderr=sys.stderr,
|
||||
)
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def render_timg(file_path, width, height):
|
||||
"""Render using timg (supports half-blocks, quarter-blocks, sixel, kitty, etc)."""
|
||||
if which("timg"):
|
||||
subprocess.run(
|
||||
["timg", f"-g{width}x{height}", "--upscale", file_path],
|
||||
stdout=sys.stdout,
|
||||
stderr=sys.stderr,
|
||||
)
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def render_chafa_auto(file_path, width, height):
|
||||
"""
|
||||
Render using Chafa in auto mode.
|
||||
It supports Sixel, Kitty, iTerm, and various unicode block modes.
|
||||
"""
|
||||
if which("chafa"):
|
||||
subprocess.run(
|
||||
["chafa", "-s", f"{width}x{height}", file_path],
|
||||
stdout=sys.stdout,
|
||||
stderr=sys.stderr,
|
||||
)
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def fzf_image_preview(file_path: str):
|
||||
"""
|
||||
Main dispatch function to choose the best renderer.
|
||||
"""
|
||||
cols, lines = get_terminal_dimensions()
|
||||
|
||||
# Heuristic: Reserve 1 line for prompt/status if needed, though FZF handles this.
|
||||
# Some renderers behave better with a tiny bit of padding.
|
||||
width = cols
|
||||
height = lines
|
||||
|
||||
# --- 1. Check Explicit Configuration ---
|
||||
|
||||
if IMAGE_RENDERER == "icat" or IMAGE_RENDERER == "system-kitty":
|
||||
if render_kitty(file_path, width, height, SCALE_UP):
|
||||
return
|
||||
|
||||
elif IMAGE_RENDERER == "sixel" or IMAGE_RENDERER == "system-sixels":
|
||||
if render_sixel(file_path, width, height):
|
||||
return
|
||||
|
||||
elif IMAGE_RENDERER == "imgcat":
|
||||
if render_iterm(file_path, width, height):
|
||||
return
|
||||
|
||||
elif IMAGE_RENDERER == "timg":
|
||||
if render_timg(file_path, width, height):
|
||||
return
|
||||
|
||||
elif IMAGE_RENDERER == "chafa":
|
||||
if render_chafa_auto(file_path, width, height):
|
||||
return
|
||||
|
||||
# --- 2. Auto-Detection / Fallback Strategy ---
|
||||
|
||||
# If explicit failed or set to 'auto'/'system-default', try detecting environment
|
||||
|
||||
# Ghostty / Kitty Environment
|
||||
if os.environ.get("KITTY_WINDOW_ID") or os.environ.get("GHOSTTY_BIN_DIR"):
|
||||
if render_kitty(file_path, width, height, SCALE_UP):
|
||||
return
|
||||
|
||||
# iTerm Environment
|
||||
if os.environ.get("TERM_PROGRAM") == "iTerm.app":
|
||||
if render_iterm(file_path, width, height):
|
||||
return
|
||||
|
||||
# Try standard tools in order of quality/preference
|
||||
if render_kitty(file_path, width, height, SCALE_UP):
|
||||
return # Try kitty just in case
|
||||
if render_sixel(file_path, width, height):
|
||||
return
|
||||
if render_timg(file_path, width, height):
|
||||
return
|
||||
if render_chafa_auto(file_path, width, height):
|
||||
return
|
||||
|
||||
print("⚠️ No suitable image renderer found (icat, chafa, timg, img2sixel).")
|
||||
|
||||
|
||||
def fzf_text_info_render():
|
||||
"""Renders the text-based info via the cached python script."""
|
||||
# Get terminal dimensions from FZF environment or fallback
|
||||
cols, lines = get_terminal_dimensions()
|
||||
|
||||
# Print simple separator line with proper width
|
||||
r, g, b = map(int, SEPARATOR_COLOR.split(","))
|
||||
separator = f"\x1b[38;2;{r};{g};{b}m" + ("─" * cols) + "\x1b[0m"
|
||||
print(separator, flush=True)
|
||||
|
||||
if PREVIEW_MODE == "text" or PREVIEW_MODE == "full":
|
||||
preview_info_path = INFO_CACHE_DIR / f"{hash_id}.py"
|
||||
if preview_info_path.exists():
|
||||
subprocess.run(
|
||||
[sys.executable, str(preview_info_path), HEADER_COLOR, SEPARATOR_COLOR]
|
||||
)
|
||||
else:
|
||||
# Print dim text
|
||||
print("\x1b[2m📝 Loading details...\x1b[0m")
|
||||
|
||||
|
||||
def main():
|
||||
# 1. Image Preview
|
||||
if (PREVIEW_MODE == "image" or PREVIEW_MODE == "full") and (
|
||||
PREFIX not in ("character", "review", "airing-schedule")
|
||||
):
|
||||
preview_image_path = IMAGE_CACHE_DIR / f"{hash_id}.png"
|
||||
if preview_image_path.exists():
|
||||
fzf_image_preview(str(preview_image_path))
|
||||
print() # Spacer
|
||||
else:
|
||||
print("🖼️ Loading image...")
|
||||
|
||||
# 2. Text Info Preview
|
||||
fzf_text_info_render()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
try:
|
||||
main()
|
||||
except KeyboardInterrupt:
|
||||
pass
|
||||
except Exception as e:
|
||||
print(f"Preview Error: {e}")
|
||||
28
viu_media/assets/scripts/fzf/review_info.py
Normal file
28
viu_media/assets/scripts/fzf/review_info.py
Normal file
@@ -0,0 +1,28 @@
|
||||
import sys
|
||||
from _ansi_utils import (
|
||||
print_rule,
|
||||
print_table_row,
|
||||
strip_markdown,
|
||||
wrap_text,
|
||||
get_terminal_width,
|
||||
)
|
||||
|
||||
HEADER_COLOR = sys.argv[1]
|
||||
SEPARATOR_COLOR = sys.argv[2]
|
||||
|
||||
# Get terminal dimensions
|
||||
term_width = get_terminal_width()
|
||||
|
||||
# Print title centered
|
||||
print("{REVIEWER_NAME}".center(term_width))
|
||||
|
||||
rows = [
|
||||
("Summary", "{REVIEW_SUMMARY}"),
|
||||
]
|
||||
|
||||
print_rule(SEPARATOR_COLOR)
|
||||
for key, value in rows:
|
||||
print_table_row(key, value, HEADER_COLOR, 15, term_width - 20)
|
||||
|
||||
print_rule(SEPARATOR_COLOR)
|
||||
print(wrap_text(strip_markdown("""{REVIEW_BODY}"""), term_width))
|
||||
145
viu_media/assets/scripts/fzf/search.py
Executable file
145
viu_media/assets/scripts/fzf/search.py
Executable file
@@ -0,0 +1,145 @@
|
||||
#!/usr/bin/env python3
|
||||
#
|
||||
# FZF Dynamic Search Script Template
|
||||
#
|
||||
# This script is a template for dynamic search functionality in fzf.
|
||||
# The placeholders in curly braces, like {GRAPHQL_ENDPOINT} are dynamically
|
||||
# filled by Python using .replace() during runtime.
|
||||
|
||||
import json
|
||||
import sys
|
||||
from pathlib import Path
|
||||
from urllib import request
|
||||
from urllib.error import URLError
|
||||
|
||||
# --- Template Variables (Injected by Python) ---
|
||||
GRAPHQL_ENDPOINT = "{GRAPHQL_ENDPOINT}"
|
||||
SEARCH_RESULTS_FILE = Path("{SEARCH_RESULTS_FILE}")
|
||||
AUTH_HEADER = "{AUTH_HEADER}"
|
||||
|
||||
# The GraphQL query is injected as a properly escaped JSON string
|
||||
GRAPHQL_QUERY = "{GRAPHQL_QUERY}"
|
||||
|
||||
# --- Get Query from fzf ---
|
||||
# fzf passes the current query as the first argument when using --bind change:reload
|
||||
QUERY = sys.argv[1] if len(sys.argv) > 1 else ""
|
||||
|
||||
# If query is empty, exit with empty results
|
||||
if not QUERY.strip():
|
||||
print("")
|
||||
sys.exit(0)
|
||||
|
||||
|
||||
def make_graphql_request(
|
||||
endpoint: str, query: str, variables: dict, auth_token: str = ""
|
||||
) -> dict | None:
|
||||
"""
|
||||
Make a GraphQL request to the specified endpoint.
|
||||
|
||||
Args:
|
||||
endpoint: GraphQL API endpoint URL
|
||||
query: GraphQL query string
|
||||
variables: Query variables as a dictionary
|
||||
auth_token: Optional authorization token (Bearer token)
|
||||
|
||||
Returns:
|
||||
Response JSON as a dictionary, or None if request fails
|
||||
"""
|
||||
payload = {"query": query, "variables": variables}
|
||||
|
||||
headers = {"Content-Type": "application/json", "User-Agent": "viu/1.0"}
|
||||
|
||||
if auth_token:
|
||||
headers["Authorization"] = auth_token
|
||||
|
||||
try:
|
||||
req = request.Request(
|
||||
endpoint,
|
||||
data=json.dumps(payload).encode("utf-8"),
|
||||
headers=headers,
|
||||
method="POST",
|
||||
)
|
||||
|
||||
with request.urlopen(req, timeout=10) as response:
|
||||
return json.loads(response.read().decode("utf-8"))
|
||||
except (URLError, json.JSONDecodeError, Exception) as e:
|
||||
print(f"❌ Request failed: {e}", file=sys.stderr)
|
||||
return None
|
||||
|
||||
|
||||
def extract_title(media_item: dict) -> str:
|
||||
"""
|
||||
Extract the best available title from a media item.
|
||||
|
||||
Args:
|
||||
media_item: Media object from GraphQL response
|
||||
|
||||
Returns:
|
||||
Title string (english > romaji > native > "Unknown")
|
||||
"""
|
||||
title_obj = media_item.get("title", {})
|
||||
return (
|
||||
title_obj.get("english")
|
||||
or title_obj.get("romaji")
|
||||
or title_obj.get("native")
|
||||
or "Unknown"
|
||||
)
|
||||
|
||||
|
||||
def main():
|
||||
# Ensure parent directory exists
|
||||
SEARCH_RESULTS_FILE.parent.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
# Create GraphQL variables
|
||||
variables = {
|
||||
"query": QUERY,
|
||||
"type": "ANIME",
|
||||
"per_page": 50,
|
||||
"genre_not_in": ["Hentai"],
|
||||
}
|
||||
|
||||
# Make the GraphQL request
|
||||
response = make_graphql_request(
|
||||
GRAPHQL_ENDPOINT, GRAPHQL_QUERY, variables, AUTH_HEADER
|
||||
)
|
||||
|
||||
if response is None:
|
||||
print("❌ Search failed")
|
||||
sys.exit(1)
|
||||
|
||||
# Save the raw response for later processing by dynamic_search.py
|
||||
try:
|
||||
with open(SEARCH_RESULTS_FILE, "w", encoding="utf-8") as f:
|
||||
json.dump(response, f, ensure_ascii=False, indent=2)
|
||||
except IOError as e:
|
||||
print(f"❌ Failed to save results: {e}", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
# Parse and display results
|
||||
if "errors" in response:
|
||||
print(f"❌ Search error: {response['errors']}")
|
||||
sys.exit(1)
|
||||
|
||||
# Navigate the response structure
|
||||
data = response.get("data", {})
|
||||
page = data.get("Page", {})
|
||||
media_list = page.get("media", [])
|
||||
|
||||
if not media_list:
|
||||
print("❌ No results found")
|
||||
sys.exit(0)
|
||||
|
||||
# Output titles for fzf (one per line)
|
||||
for media in media_list:
|
||||
title = extract_title(media)
|
||||
print(title)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
try:
|
||||
main()
|
||||
except KeyboardInterrupt:
|
||||
sys.exit(0)
|
||||
except Exception as e:
|
||||
print(f"❌ Unexpected error: {e}", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
247
viu_media/cli/cli.py
Normal file
247
viu_media/cli/cli.py
Normal file
@@ -0,0 +1,247 @@
|
||||
import logging
|
||||
import sys
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
import click
|
||||
from click.core import ParameterSource
|
||||
|
||||
from ..core.config import AppConfig
|
||||
from ..core.constants import CLI_NAME, USER_CONFIG, __version__
|
||||
from .config import ConfigLoader
|
||||
from .options import options_from_model
|
||||
from .utils.exception import setup_exceptions_handler
|
||||
from .utils.lazyloader import LazyGroup
|
||||
from .utils.logging import setup_logging
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from typing import TypedDict
|
||||
|
||||
from typing_extensions import Unpack
|
||||
|
||||
class Options(TypedDict):
|
||||
no_config: bool | None
|
||||
trace: bool | None
|
||||
dev: bool | None
|
||||
log: bool | None
|
||||
rich_traceback: bool | None
|
||||
rich_traceback_theme: str
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
commands = {
|
||||
"config": "config.config",
|
||||
"search": "search.search",
|
||||
"anilist": "anilist.anilist",
|
||||
"download": "download.download",
|
||||
"update": "update.update",
|
||||
"registry": "registry.registry",
|
||||
"worker": "worker.worker",
|
||||
"queue": "queue.queue",
|
||||
"completions": "completions.completions",
|
||||
}
|
||||
|
||||
|
||||
@click.group(
|
||||
cls=LazyGroup,
|
||||
root="viu_media.cli.commands",
|
||||
invoke_without_command=True,
|
||||
lazy_subcommands=commands,
|
||||
context_settings=dict(auto_envvar_prefix=CLI_NAME),
|
||||
)
|
||||
@click.version_option(__version__, "--version")
|
||||
@click.option("--no-config", is_flag=True, help="Don't load the user config file.")
|
||||
@click.option(
|
||||
"--trace", is_flag=True, help="Controls Whether to display tracebacks or not"
|
||||
)
|
||||
@click.option("--dev", is_flag=True, help="Controls Whether the app is in dev mode")
|
||||
@click.option("--log", is_flag=True, help="Controls Whether to log")
|
||||
@click.option(
|
||||
"--rich-traceback",
|
||||
is_flag=True,
|
||||
help="Controls Whether to display a rich traceback",
|
||||
)
|
||||
@click.option(
|
||||
"--rich-traceback-theme",
|
||||
default="github-dark",
|
||||
help="Controls Whether to display a rich traceback",
|
||||
)
|
||||
@options_from_model(AppConfig)
|
||||
@click.pass_context
|
||||
def cli(ctx: click.Context, **options: "Unpack[Options]"):
|
||||
"""
|
||||
The main entry point for the Viu CLI.
|
||||
"""
|
||||
setup_logging(options["log"])
|
||||
setup_exceptions_handler(
|
||||
options["trace"],
|
||||
options["dev"],
|
||||
options["rich_traceback"],
|
||||
options["rich_traceback_theme"],
|
||||
)
|
||||
|
||||
logger.info(f"Current Command: {' '.join(sys.argv)}")
|
||||
cli_overrides = {}
|
||||
param_lookup = {p.name: p for p in ctx.command.params}
|
||||
|
||||
for param_name, param_value in ctx.params.items():
|
||||
source = ctx.get_parameter_source(param_name)
|
||||
if source in (ParameterSource.ENVIRONMENT, ParameterSource.COMMANDLINE):
|
||||
parameter = param_lookup.get(param_name)
|
||||
|
||||
if (
|
||||
parameter
|
||||
and hasattr(parameter, "model_name")
|
||||
and hasattr(parameter, "field_name")
|
||||
):
|
||||
model_name = getattr(parameter, "model_name")
|
||||
field_name = getattr(parameter, "field_name")
|
||||
|
||||
if model_name not in cli_overrides:
|
||||
cli_overrides[model_name] = {}
|
||||
cli_overrides[model_name][field_name] = param_value
|
||||
|
||||
loader = ConfigLoader(config_path=USER_CONFIG)
|
||||
config = (
|
||||
AppConfig.model_validate(cli_overrides)
|
||||
if options["no_config"]
|
||||
else loader.load(cli_overrides)
|
||||
)
|
||||
ctx.obj = config
|
||||
|
||||
if config.general.welcome_screen:
|
||||
import time
|
||||
|
||||
from ..core.constants import APP_CACHE_DIR, USER_NAME, SUPPORT_PROJECT_URL
|
||||
|
||||
last_welcomed_at_file = APP_CACHE_DIR / ".last_welcome"
|
||||
should_welcome = False
|
||||
if last_welcomed_at_file.exists():
|
||||
try:
|
||||
last_welcomed_at = float(
|
||||
last_welcomed_at_file.read_text(encoding="utf-8")
|
||||
)
|
||||
# runs once a month
|
||||
if (time.time() - last_welcomed_at) > 30 * 24 * 3600:
|
||||
should_welcome = True
|
||||
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to read welcome screen timestamp: {e}")
|
||||
|
||||
else:
|
||||
should_welcome = True
|
||||
if should_welcome:
|
||||
last_welcomed_at_file.write_text(str(time.time()), encoding="utf-8")
|
||||
|
||||
from rich.prompt import Confirm
|
||||
|
||||
if Confirm.ask(f"""\
|
||||
[green]How are you, {USER_NAME} 🙂?
|
||||
If you enjoy the project and would like to support it, you can buy me a coffee at {SUPPORT_PROJECT_URL}.
|
||||
Would you like to open the support page? Select yes to continue — otherwise, enjoy your terminal-anime browsing experience 😁.[/]
|
||||
You can disable this message by turning off the welcome_screen option in the config. It only appears once a month.
|
||||
"""):
|
||||
from webbrowser import open
|
||||
|
||||
open(SUPPORT_PROJECT_URL)
|
||||
|
||||
if config.general.show_new_release:
|
||||
import time
|
||||
|
||||
from ..core.constants import APP_CACHE_DIR
|
||||
|
||||
last_release_file = APP_CACHE_DIR / ".last_release"
|
||||
should_print_release_notes = False
|
||||
if last_release_file.exists():
|
||||
last_release = last_release_file.read_text(encoding="utf-8")
|
||||
current_version = list(map(int, __version__.replace("v", "").split(".")))
|
||||
last_saved_version = list(
|
||||
map(int, last_release.replace("v", "").split("."))
|
||||
)
|
||||
if (
|
||||
(current_version[0] > last_saved_version[0])
|
||||
or (
|
||||
current_version[1] > last_saved_version[1]
|
||||
and current_version[0] == last_saved_version[0]
|
||||
)
|
||||
or (
|
||||
current_version[2] > last_saved_version[2]
|
||||
and current_version[0] == last_saved_version[0]
|
||||
and current_version[1] == last_saved_version[1]
|
||||
)
|
||||
):
|
||||
should_print_release_notes = True
|
||||
|
||||
else:
|
||||
should_print_release_notes = True
|
||||
if should_print_release_notes:
|
||||
last_release_file.write_text(__version__, encoding="utf-8")
|
||||
from .service.feedback import FeedbackService
|
||||
from .utils.update import check_for_updates, print_release_json, update_app
|
||||
from rich.prompt import Confirm
|
||||
|
||||
feedback = FeedbackService(config)
|
||||
feedback.info("Getting release notes...")
|
||||
is_latest, release_json = check_for_updates()
|
||||
if Confirm.ask(
|
||||
"Would you also like to update your config with the latest options and config notes"
|
||||
):
|
||||
import subprocess
|
||||
|
||||
cmd = ["viu", "config", "--update"]
|
||||
print(f"running '{' '.join(cmd)}'...")
|
||||
subprocess.run(cmd)
|
||||
|
||||
if is_latest:
|
||||
print_release_json(release_json)
|
||||
else:
|
||||
print_release_json(release_json)
|
||||
print("It seems theres another update waiting for you as well 😁")
|
||||
click.pause("Press Any Key To Proceed...")
|
||||
|
||||
if config.general.check_for_updates:
|
||||
import time
|
||||
|
||||
from ..core.constants import APP_CACHE_DIR
|
||||
|
||||
last_updated_at_file = APP_CACHE_DIR / ".last_update"
|
||||
should_check_for_update = False
|
||||
if last_updated_at_file.exists():
|
||||
try:
|
||||
last_updated_at_time = float(
|
||||
last_updated_at_file.read_text(encoding="utf-8")
|
||||
)
|
||||
if (
|
||||
time.time() - last_updated_at_time
|
||||
) > config.general.update_check_interval * 3600:
|
||||
should_check_for_update = True
|
||||
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to check for update: {e}")
|
||||
|
||||
else:
|
||||
should_check_for_update = True
|
||||
if should_check_for_update:
|
||||
last_updated_at_file.write_text(str(time.time()), encoding="utf-8")
|
||||
from .service.feedback import FeedbackService
|
||||
from .utils.update import check_for_updates, print_release_json, update_app
|
||||
|
||||
feedback = FeedbackService(config)
|
||||
feedback.info("Checking for updates...")
|
||||
is_latest, release_json = check_for_updates()
|
||||
if not is_latest:
|
||||
from ..libs.selectors.selector import create_selector
|
||||
|
||||
selector = create_selector(config)
|
||||
if release_json and selector.confirm(
|
||||
"Theres an update available would you like to see the release notes before deciding to update?"
|
||||
):
|
||||
print_release_json(release_json)
|
||||
selector.ask("Enter to continue...")
|
||||
if selector.confirm("Would you like to update?"):
|
||||
update_app()
|
||||
|
||||
if ctx.invoked_subcommand is None:
|
||||
from .commands.anilist import cmd
|
||||
|
||||
ctx.invoke(cmd.anilist)
|
||||
@@ -18,7 +18,7 @@ commands = {
|
||||
@click.group(
|
||||
cls=LazyGroup,
|
||||
name="anilist",
|
||||
root="viu_cli.cli.commands.anilist.commands",
|
||||
root="viu_media.cli.commands.anilist.commands",
|
||||
invoke_without_command=True,
|
||||
help="A beautiful interface that gives you access to a commplete streaming experience",
|
||||
short_help="Access all streaming options",
|
||||
@@ -45,7 +45,9 @@ def auth(config: AppConfig, status: bool, logout: bool):
|
||||
open_success = webbrowser.open(ANILIST_AUTH, new=2)
|
||||
if open_success:
|
||||
feedback.info("Your browser has been opened to obtain an AniList token.")
|
||||
feedback.info(f"or you can visit the site manually [magenta][link={ANILIST_AUTH}]here[/link][/magenta].")
|
||||
feedback.info(
|
||||
f"or you can visit the site manually [magenta][link={ANILIST_AUTH}]here[/link][/magenta]."
|
||||
)
|
||||
else:
|
||||
feedback.warning(
|
||||
f"Failed to open the browser. Please visit the site manually [magenta][link={ANILIST_AUTH}]here[/link][/magenta]."
|
||||
@@ -1,10 +1,10 @@
|
||||
from typing import TYPE_CHECKING, Dict, List
|
||||
|
||||
import click
|
||||
from viu_cli.cli.utils.completion import anime_titles_shell_complete
|
||||
from viu_cli.core.config import AppConfig
|
||||
from viu_cli.core.exceptions import ViuError
|
||||
from viu_cli.libs.media_api.types import (
|
||||
from viu_media.cli.utils.completion import anime_titles_shell_complete
|
||||
from viu_media.core.config import AppConfig
|
||||
from viu_media.core.exceptions import ViuError
|
||||
from viu_media.libs.media_api.types import (
|
||||
MediaFormat,
|
||||
MediaGenre,
|
||||
MediaItem,
|
||||
@@ -112,15 +112,15 @@ if TYPE_CHECKING:
|
||||
)
|
||||
@click.pass_obj
|
||||
def download(config: AppConfig, **options: "Unpack[DownloadOptions]"):
|
||||
from viu_cli.cli.service.download.service import DownloadService
|
||||
from viu_cli.cli.service.feedback import FeedbackService
|
||||
from viu_cli.cli.service.registry import MediaRegistryService
|
||||
from viu_cli.cli.service.watch_history import WatchHistoryService
|
||||
from viu_cli.cli.utils.parser import parse_episode_range
|
||||
from viu_cli.libs.media_api.api import create_api_client
|
||||
from viu_cli.libs.media_api.params import MediaSearchParams
|
||||
from viu_cli.libs.provider.anime.provider import create_provider
|
||||
from viu_cli.libs.selectors import create_selector
|
||||
from viu_media.cli.service.download.service import DownloadService
|
||||
from viu_media.cli.service.feedback import FeedbackService
|
||||
from viu_media.cli.service.registry import MediaRegistryService
|
||||
from viu_media.cli.service.watch_history import WatchHistoryService
|
||||
from viu_media.cli.utils.parser import parse_episode_range
|
||||
from viu_media.libs.media_api.api import create_api_client
|
||||
from viu_media.libs.media_api.params import MediaSearchParams
|
||||
from viu_media.libs.provider.anime.provider import create_provider
|
||||
from viu_media.libs.selectors import create_selector
|
||||
from rich.progress import Progress
|
||||
|
||||
feedback = FeedbackService(config)
|
||||
@@ -1,5 +1,4 @@
|
||||
import json
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
import click
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import click
|
||||
from viu_cli.core.config import AppConfig
|
||||
from viu_media.core.config import AppConfig
|
||||
from rich.console import Console
|
||||
from rich.table import Table
|
||||
|
||||
@@ -11,8 +11,8 @@ def notifications(config: AppConfig):
|
||||
Displays unread notifications from AniList.
|
||||
Running this command will also mark the notifications as read on the AniList website.
|
||||
"""
|
||||
from viu_cli.cli.service.feedback import FeedbackService
|
||||
from viu_cli.libs.media_api.api import create_api_client
|
||||
from viu_media.cli.service.feedback import FeedbackService
|
||||
from viu_media.libs.media_api.api import create_api_client
|
||||
|
||||
from ....service.auth import AuthService
|
||||
|
||||
@@ -251,18 +251,14 @@ def search(config: AppConfig, **options: "Unpack[SearchOptions]"):
|
||||
and start_date_lesser is not None
|
||||
and start_date_greater > start_date_lesser
|
||||
):
|
||||
raise ViuError(
|
||||
"Start date greater cannot be later than start date lesser"
|
||||
)
|
||||
raise ViuError("Start date greater cannot be later than start date lesser")
|
||||
|
||||
if (
|
||||
end_date_greater is not None
|
||||
and end_date_lesser is not None
|
||||
and end_date_greater > end_date_lesser
|
||||
):
|
||||
raise ViuError(
|
||||
"End date greater cannot be later than end date lesser"
|
||||
)
|
||||
raise ViuError("End date greater cannot be later than end date lesser")
|
||||
|
||||
# Build search parameters
|
||||
search_params = MediaSearchParams(
|
||||
@@ -3,7 +3,7 @@ from typing import TYPE_CHECKING
|
||||
import click
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from viu_cli.core.config import AppConfig
|
||||
from viu_media.core.config import AppConfig
|
||||
|
||||
|
||||
@click.command(help="Print out your anilist stats")
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user