mirror of
https://github.com/Benexl/FastAnime.git
synced 2026-01-25 18:54:41 -08:00
Compare commits
132 Commits
1519c8be17
...
master
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
148436e27c | ||
|
|
eba9ed4bee | ||
|
|
343fe977ad | ||
|
|
4caafda123 | ||
|
|
9ef834c94c | ||
|
|
5e9255b3d5 | ||
|
|
fd535ad3e3 | ||
|
|
121e02a7e2 | ||
|
|
2bb62fd0af | ||
|
|
a752a9efdd | ||
|
|
ac490d9a4b | ||
|
|
1ce2d2740d | ||
|
|
ce6294a17b | ||
|
|
b550956a3e | ||
|
|
e382e4c046 | ||
|
|
efa1340e41 | ||
|
|
ac7e90acdf | ||
|
|
8c5b066019 | ||
|
|
a826f391c1 | ||
|
|
6a31f4191f | ||
|
|
b8f77d80e9 | ||
|
|
6192252d10 | ||
|
|
efed80f4dc | ||
|
|
e49baed46f | ||
|
|
6e26ac500d | ||
|
|
5db33d2fa0 | ||
|
|
0524af6e26 | ||
|
|
a2fc9e442d | ||
|
|
f9ca8bbd79 | ||
|
|
dd9d9695e7 | ||
|
|
c9d948ae4b | ||
|
|
b9766af11a | ||
|
|
9d72a50916 | ||
|
|
acb14d025c | ||
|
|
ba9b170ba8 | ||
|
|
ecc4de6ae6 | ||
|
|
e065c8e8fc | ||
|
|
32df0503d0 | ||
|
|
11449378e9 | ||
|
|
8837c542f2 | ||
|
|
eb8c443775 | ||
|
|
b052ee8300 | ||
|
|
f684f561df | ||
|
|
7ed45ce07e | ||
|
|
10d1211388 | ||
|
|
efa6f4d142 | ||
|
|
0ca63dd765 | ||
|
|
b62d878a0e | ||
|
|
bcc5e7df8e | ||
|
|
df8e925eec | ||
|
|
9d9fa55b69 | ||
|
|
42f7e1d4e2 | ||
|
|
7f4a1f265a | ||
|
|
12ef447eaf | ||
|
|
75b1b8fab4 | ||
|
|
6f4155dd65 | ||
|
|
20ce2f6ca3 | ||
|
|
dbbfe0331b | ||
|
|
04ae196d5f | ||
|
|
fe92ff8716 | ||
|
|
c047377289 | ||
|
|
fcbaa7fb0d | ||
|
|
87c87ebca7 | ||
|
|
e1272ddf35 | ||
|
|
5fe59e1ddb | ||
|
|
83ad67a4a8 | ||
|
|
94866b68f3 | ||
|
|
5f7e10a510 | ||
|
|
95586eb36f | ||
|
|
c01c08c03b | ||
|
|
14e1f44696 | ||
|
|
36b71c0751 | ||
|
|
6a5d7a0116 | ||
|
|
91efee9065 | ||
|
|
69d3d2e032 | ||
|
|
29ba77f795 | ||
|
|
a4950efa02 | ||
|
|
bbd7931790 | ||
|
|
c3ae5f9053 | ||
|
|
bf06d7ee2c | ||
|
|
41aaf92bae | ||
|
|
d38dc3194f | ||
|
|
54233aca79 | ||
|
|
6b8dfba57e | ||
|
|
3b008696d5 | ||
|
|
ece1f77e99 | ||
|
|
7b9de8620b | ||
|
|
725754ea1a | ||
|
|
80771f65ea | ||
|
|
c8c4e1b2c0 | ||
|
|
f4958cc0cc | ||
|
|
1f72e0a579 | ||
|
|
803c8316a7 | ||
|
|
26bc84e2eb | ||
|
|
901d1e87c5 | ||
|
|
523766868c | ||
|
|
bd9bf24e1c | ||
|
|
f27c0b8548 | ||
|
|
76c1dcd5ac | ||
|
|
25a46bd242 | ||
|
|
a70db611f7 | ||
|
|
091edb3a9b | ||
|
|
9050dd7787 | ||
|
|
393b9e6ed6 | ||
|
|
5193df2197 | ||
|
|
6ccd96d252 | ||
|
|
e8387f3db9 | ||
|
|
23ebff3f42 | ||
|
|
8e803e8ecb | ||
|
|
61fcd39188 | ||
|
|
313f8369d7 | ||
|
|
bee73b3f9a | ||
|
|
f647b7419a | ||
|
|
901c4422b5 | ||
|
|
08ae8786c3 | ||
|
|
64093204ad | ||
|
|
8440ffb5e5 | ||
|
|
6e287d320d | ||
|
|
a7b0f21deb | ||
|
|
71b668894b | ||
|
|
8b3a57ed07 | ||
|
|
b2f9c8349a | ||
|
|
25fe1e5e01 | ||
|
|
45ff463f7a | ||
|
|
29ce664e4c | ||
|
|
2217f011af | ||
|
|
5960a7c502 | ||
|
|
bd0309ee85 | ||
|
|
3724f06e33 | ||
|
|
d20af89fc8 | ||
|
|
3872b4c8a8 | ||
|
|
9545b893e1 |
7
.envrc
7
.envrc
@@ -1,5 +1,6 @@
|
||||
VIU_APP_NAME="viu-dev"
|
||||
export VIU_APP_NAME
|
||||
if command -v nix >/dev/null;then
|
||||
use flake
|
||||
PATH="$PWD/.venv/bin:$PATH"
|
||||
export PATH VIU_APP_NAME
|
||||
if command -v nix >/dev/null; then
|
||||
use flake
|
||||
fi
|
||||
|
||||
152
.github/workflows/release-binaries.yml
vendored
Normal file
152
.github/workflows/release-binaries.yml
vendored
Normal file
@@ -0,0 +1,152 @@
|
||||
name: Build Release Binaries
|
||||
|
||||
on:
|
||||
release:
|
||||
types: [published]
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
tag:
|
||||
description: "Tag/version to build (leave empty for latest)"
|
||||
required: false
|
||||
type: string
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
|
||||
jobs:
|
||||
build:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- os: ubuntu-22.04
|
||||
target: linux
|
||||
asset_name: viu-linux-x86_64
|
||||
executable: viu
|
||||
- os: windows-latest
|
||||
target: windows
|
||||
asset_name: viu-windows-x86_64.exe
|
||||
executable: viu.exe
|
||||
- os: macos-latest
|
||||
target: macos
|
||||
asset_name: viu-macos-x86_64
|
||||
executable: viu
|
||||
|
||||
runs-on: ${{ matrix.os }}
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ github.event.inputs.tag || github.ref }}
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.11"
|
||||
|
||||
- name: Install uv
|
||||
uses: astral-sh/setup-uv@v3
|
||||
with:
|
||||
enable-cache: true
|
||||
|
||||
- name: Install system dependencies (Linux)
|
||||
if: runner.os == 'Linux'
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y libdbus-1-dev libglib2.0-dev
|
||||
|
||||
- name: Install dependencies
|
||||
run: uv sync --all-extras --all-groups
|
||||
|
||||
- name: Build executable with PyInstaller
|
||||
run: uv run pyinstaller bundle/pyinstaller.spec --distpath dist --workpath build/pyinstaller --clean
|
||||
|
||||
- name: Rename executable
|
||||
shell: bash
|
||||
run: mv dist/${{ matrix.executable }} dist/${{ matrix.asset_name }}
|
||||
|
||||
- name: Upload artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ matrix.asset_name }}
|
||||
path: dist/${{ matrix.asset_name }}
|
||||
if-no-files-found: error
|
||||
|
||||
- name: Upload to Release
|
||||
if: github.event_name == 'release'
|
||||
uses: softprops/action-gh-release@v2
|
||||
with:
|
||||
files: dist/${{ matrix.asset_name }}
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
# Build for macOS ARM (Apple Silicon)
|
||||
build-macos-arm:
|
||||
runs-on: macos-14
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ github.event.inputs.tag || github.ref }}
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.11"
|
||||
|
||||
- name: Install uv
|
||||
uses: astral-sh/setup-uv@v3
|
||||
with:
|
||||
enable-cache: true
|
||||
|
||||
- name: Install dependencies
|
||||
run: uv sync --all-extras --all-groups
|
||||
|
||||
- name: Build executable with PyInstaller
|
||||
run: uv run pyinstaller bundle/pyinstaller.spec --distpath dist --workpath build/pyinstaller --clean
|
||||
|
||||
- name: Rename executable
|
||||
run: mv dist/viu dist/viu-macos-arm64
|
||||
|
||||
- name: Upload artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: viu-macos-arm64
|
||||
path: dist/viu-macos-arm64
|
||||
if-no-files-found: error
|
||||
|
||||
- name: Upload to Release
|
||||
if: github.event_name == 'release'
|
||||
uses: softprops/action-gh-release@v2
|
||||
with:
|
||||
files: dist/viu-macos-arm64
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
# Create checksums after all builds complete
|
||||
checksums:
|
||||
needs: [build, build-macos-arm]
|
||||
runs-on: ubuntu-latest
|
||||
if: github.event_name == 'release'
|
||||
|
||||
steps:
|
||||
- name: Download all artifacts
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
path: artifacts
|
||||
merge-multiple: true
|
||||
|
||||
- name: Generate checksums
|
||||
run: |
|
||||
cd artifacts
|
||||
sha256sum * > SHA256SUMS.txt
|
||||
cat SHA256SUMS.txt
|
||||
|
||||
- name: Upload checksums to Release
|
||||
uses: softprops/action-gh-release@v2
|
||||
with:
|
||||
files: artifacts/SHA256SUMS.txt
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
6
.github/workflows/stale.yml
vendored
6
.github/workflows/stale.yml
vendored
@@ -1,9 +1,9 @@
|
||||
name: Mark Stale Issues and Pull Requests
|
||||
|
||||
on:
|
||||
schedule:
|
||||
# Runs every day at 6:30 UTC
|
||||
- cron: "30 6 * * *"
|
||||
# schedule:
|
||||
# Runs every day at 6:30 UTC
|
||||
# - cron: "30 6 * * *"
|
||||
# Allows you to run this workflow manually from the Actions tab for testing
|
||||
workflow_dispatch:
|
||||
|
||||
|
||||
126
README.md
126
README.md
@@ -32,6 +32,11 @@
|
||||
|
||||
</details>
|
||||
|
||||
> [!IMPORTANT]
|
||||
> This project scrapes public-facing websites for its streaming / downloading capabilities and primarily acts as an anilist, jikan and many other media apis tui client. The developer(s) of this application have no affiliation with these content providers. This application hosts zero content and is intended for educational and personal use only. Use at your own risk.
|
||||
>
|
||||
> [**Read the Full Disclaimer**](DISCLAIMER.md)
|
||||
|
||||
## Core Features
|
||||
|
||||
* 📺 **Interactive TUI:** Browse, search, and manage your AniList library in a rich terminal interface powered by `fzf`, `rofi`, or a built-in selector.
|
||||
@@ -44,7 +49,7 @@
|
||||
|
||||
## Installation
|
||||
|
||||
Viu runs on any platform with Python 3.10+, including Windows, macOS, Linux, and Android (via Termux).
|
||||
Viu runs on Windows, macOS, Linux, and Android (via Termux). Pre-built binaries are available for quick installation without Python, or you can install via Python 3.10+ package managers.
|
||||
|
||||
### Prerequisites
|
||||
|
||||
@@ -59,6 +64,39 @@ For the best experience, please install these external tools:
|
||||
* [**ffmpeg**](https://www.ffmpeg.org/) - Required for downloading HLS streams and merging subtitles.
|
||||
* [**webtorrent-cli**](https://github.com/webtorrent/webtorrent-cli) - For streaming torrents directly.
|
||||
|
||||
### Pre-built Binaries (Recommended for Quick Start)
|
||||
|
||||
The easiest way to get started is to download a pre-built, self-contained binary from the [**releases page**](https://github.com/viu-media/viu/releases/latest). These binaries include all dependencies and **do not require Python** to be installed.
|
||||
|
||||
**Available for:**
|
||||
* **Linux** (x86_64): `viu-linux-x86_64`
|
||||
* **Windows** (x86_64): `viu-windows-x86_64.exe`
|
||||
* **macOS** (Intel x86_64): `viu-macos-x86_64`
|
||||
* **macOS** (Apple Silicon ARM64): `viu-macos-arm64`
|
||||
|
||||
**Installation Steps:**
|
||||
1. Download the appropriate binary for your platform from the [**releases page**](https://github.com/viu-media/viu/releases/latest).
|
||||
2. **Linux/macOS:** Make it executable:
|
||||
```bash
|
||||
# Replace with the actual binary name you downloaded
|
||||
chmod +x viu-linux-x86_64
|
||||
```
|
||||
Then move it to a directory in your PATH:
|
||||
```bash
|
||||
# Option 1: System-wide installation (requires sudo)
|
||||
sudo mv viu-linux-x86_64 /usr/local/bin/viu
|
||||
|
||||
# Option 2: User directory installation
|
||||
mkdir -p ~/.local/bin
|
||||
mv viu-linux-x86_64 ~/.local/bin/viu
|
||||
# Make sure ~/.local/bin is in your PATH
|
||||
```
|
||||
**Windows:** Simply rename `viu-windows-x86_64.exe` to `viu.exe` and place it in a directory in your PATH, or run it directly.
|
||||
3. Verify the installation:
|
||||
```bash
|
||||
viu --version
|
||||
```
|
||||
|
||||
### Recommended Installation (uv)
|
||||
|
||||
The best way to install Viu is with [**uv**](https://github.com/astral-sh/uv), a lightning-fast Python package manager.
|
||||
@@ -107,6 +145,80 @@ uv tool install "viu-media[notifications]" # For desktop notifications
|
||||
# Git version (latest commit)
|
||||
yay -S viu-media-git
|
||||
```
|
||||
#### Termux
|
||||
You may have to have rust installed see this issue: https://github.com/pydantic/pydantic-core/issues/1012#issuecomment-2511269688.
|
||||
|
||||
```bash
|
||||
# Recommended (with pip due to more control)
|
||||
pkg install python
|
||||
pkg install rust # required cause of pydantic
|
||||
|
||||
# NOTE: order matters
|
||||
|
||||
# get pydantic from the termux user repository
|
||||
pip install pydantic --extra-index-url https://termux-user-repository.github.io/pypi/
|
||||
|
||||
# the above will take a while if you want to see more output and feel like sth is happening lol
|
||||
pip install pydantic --extra-index-url https://termux-user-repository.github.io/pypi/ -v
|
||||
|
||||
# now you can install viu
|
||||
pip install viu-media
|
||||
|
||||
# === optional deps ===
|
||||
# if you have reach here awesome lol :)
|
||||
|
||||
# yt-dlp for downloading m3u8 and hls streams
|
||||
pip install yt-dlp[default,curl-cffi]
|
||||
|
||||
# you may also need ffmpeg for processing the videos
|
||||
pkg install ffmpeg
|
||||
|
||||
# tip if you also want yt functionality
|
||||
pip install yt-dlp-ejs
|
||||
|
||||
# you require js runtime
|
||||
# eg the recommended one
|
||||
pkg install deno
|
||||
|
||||
# for faster fuzzy search
|
||||
pip install thefuzz
|
||||
|
||||
# if you want faster scraping, though barely noticeable lol
|
||||
pip install lxml --extra-index-url https://termux-user-repository.github.io/pypi/
|
||||
|
||||
# if compilation fails you need to have
|
||||
pkg install libxml2 libxslt
|
||||
|
||||
# == ui setup ==
|
||||
pkg install fzf
|
||||
|
||||
# then enable fzf in the config
|
||||
viu --selector fzf config --update
|
||||
|
||||
# if you want previews as well specify preview option
|
||||
# though images arent that pretty lol, so you can stick to text over full
|
||||
viu --preview text config --update
|
||||
|
||||
# if you set preview to full you need a terminal image renderer
|
||||
pkg install chafa
|
||||
|
||||
# == player setup ==
|
||||
# for this you need to strictly install from playstore
|
||||
# search for mpv or vlc (recommended, since has nicer ui)
|
||||
# the only limitation is currently its not possible to pass headers to the android players
|
||||
# through android intents
|
||||
# so use servers like sharepoint and wixmp
|
||||
# though this is not an issue when it comes to downloading ;)
|
||||
# if you have installed using 'pkg' uninstall it
|
||||
|
||||
# okey now you are all set, i promise the hussle is worth it lol :)
|
||||
# posted a video of it working to motivate you
|
||||
# note i recorded it from waydroid which is android for linux sought of like an emulator(bluestacks for example)
|
||||
```
|
||||
|
||||
|
||||
https://github.com/user-attachments/assets/0c628421-a439-4dea-91bb-7153e8f20ccf
|
||||
|
||||
|
||||
#### Using pipx (for isolated environments)
|
||||
```bash
|
||||
@@ -142,7 +254,7 @@ Get up and running in three simple steps:
|
||||
```bash
|
||||
viu anilist auth
|
||||
```
|
||||
This will open your browser. Authorize the app and paste the obtained token back into the terminal.
|
||||
This will open your browser. Authorize the app and paste the obtained token back into the terminal. Alternatively, you can pass the token directly as an argument, or provide a path to a text file containing the token.
|
||||
|
||||
2. **Launch the Interactive TUI:**
|
||||
```bash
|
||||
@@ -323,14 +435,10 @@ You can run the background worker as a systemd service for persistence.
|
||||
systemctl --user daemon-reload
|
||||
systemctl --user enable --now viu-worker.service
|
||||
```
|
||||
|
||||
## Project using it
|
||||
**[Inazuma](https://github.com/viu-media/Inazuma)** - official gui wrapper over viu built in kivymd
|
||||
|
||||
## Contributing
|
||||
|
||||
Contributions are welcome! Whether it's reporting a bug, proposing a feature, or writing code, your help is appreciated. Please read our [**Contributing Guidelines**](CONTRIBUTIONS.md) to get started.
|
||||
|
||||
## Disclaimer
|
||||
|
||||
> [!IMPORTANT]
|
||||
> This project scrapes public-facing websites. The developer(s) of this application have no affiliation with these content providers. This application hosts zero content and is intended for educational and personal use only. Use at your own risk.
|
||||
>
|
||||
> [**Read the Full Disclaimer**](DISCLAIMER.md)
|
||||
|
||||
@@ -1,28 +1,56 @@
|
||||
# -*- mode: python ; coding: utf-8 -*-
|
||||
import sys
|
||||
from PyInstaller.utils.hooks import collect_data_files, collect_submodules
|
||||
|
||||
block_cipher = None
|
||||
|
||||
# Platform-specific settings
|
||||
is_windows = sys.platform == 'win32'
|
||||
is_macos = sys.platform == 'darwin'
|
||||
|
||||
# Collect all required data files
|
||||
datas = [
|
||||
('viu/assets/*', 'viu/assets'),
|
||||
('../viu_media/assets', 'viu_media/assets'),
|
||||
]
|
||||
|
||||
# Collect all required hidden imports
|
||||
# Include viu_media and all its submodules to ensure menu modules are bundled
|
||||
hiddenimports = [
|
||||
'click',
|
||||
'rich',
|
||||
'requests',
|
||||
'yt_dlp',
|
||||
'python_mpv',
|
||||
'fuzzywuzzy',
|
||||
'viu',
|
||||
] + collect_submodules('viu')
|
||||
'viu_media',
|
||||
'viu_media.cli.interactive.menu',
|
||||
'viu_media.cli.interactive.menu.media',
|
||||
# Explicit menu modules (PyInstaller doesn't always pick these up)
|
||||
'viu_media.cli.interactive.menu.media.downloads',
|
||||
'viu_media.cli.interactive.menu.media.download_episodes',
|
||||
'viu_media.cli.interactive.menu.media.dynamic_search',
|
||||
'viu_media.cli.interactive.menu.media.episodes',
|
||||
'viu_media.cli.interactive.menu.media.main',
|
||||
'viu_media.cli.interactive.menu.media.media_actions',
|
||||
'viu_media.cli.interactive.menu.media.media_airing_schedule',
|
||||
'viu_media.cli.interactive.menu.media.media_characters',
|
||||
'viu_media.cli.interactive.menu.media.media_review',
|
||||
'viu_media.cli.interactive.menu.media.player_controls',
|
||||
'viu_media.cli.interactive.menu.media.play_downloads',
|
||||
'viu_media.cli.interactive.menu.media.provider_search',
|
||||
'viu_media.cli.interactive.menu.media.results',
|
||||
'viu_media.cli.interactive.menu.media.servers',
|
||||
] + collect_submodules('viu_media')
|
||||
|
||||
|
||||
# Exclude OpenSSL libraries on Linux to avoid version conflicts
|
||||
import sys
|
||||
binaries = []
|
||||
if sys.platform == 'linux':
|
||||
# Remove any bundled libssl or libcrypto
|
||||
binaries = [b for b in binaries if not any(lib in b[0] for lib in ['libssl', 'libcrypto'])]
|
||||
|
||||
a = Analysis(
|
||||
['./viu/viu.py'], # Changed entry point
|
||||
['../viu_media/viu.py'],
|
||||
pathex=[],
|
||||
binaries=[],
|
||||
binaries=binaries,
|
||||
datas=datas,
|
||||
hiddenimports=hiddenimports,
|
||||
hookspath=[],
|
||||
@@ -32,16 +60,18 @@ a = Analysis(
|
||||
win_no_prefer_redirects=False,
|
||||
win_private_assemblies=False,
|
||||
cipher=block_cipher,
|
||||
strip=True, # Strip debug information
|
||||
optimize=2 # Optimize bytecode noarchive=False
|
||||
noarchive=False,
|
||||
)
|
||||
|
||||
pyz = PYZ(
|
||||
a.pure,
|
||||
a.zipped_data,
|
||||
optimize=2 # Optimize bytecode cipher=block_cipher
|
||||
cipher=block_cipher,
|
||||
)
|
||||
|
||||
# Icon path - only use .ico on Windows
|
||||
icon_path = '../viu_media/assets/icons/logo.ico' if is_windows else None
|
||||
|
||||
exe = EXE(
|
||||
pyz,
|
||||
a.scripts,
|
||||
@@ -52,7 +82,7 @@ exe = EXE(
|
||||
name='viu',
|
||||
debug=False,
|
||||
bootloader_ignore_signals=False,
|
||||
strip=True,
|
||||
strip=not is_windows, # strip doesn't work well on Windows without proper tools
|
||||
upx=True,
|
||||
upx_exclude=[],
|
||||
runtime_tmpdir=None,
|
||||
@@ -61,5 +91,5 @@ exe = EXE(
|
||||
target_arch=None,
|
||||
codesign_identity=None,
|
||||
entitlements_file=None,
|
||||
icon='viu/assets/logo.ico'
|
||||
icon=icon_path,
|
||||
)
|
||||
|
||||
@@ -1,16 +1,16 @@
|
||||
[project]
|
||||
name = "viu-media"
|
||||
version = "3.2.8"
|
||||
version = "3.3.7"
|
||||
description = "A browser anime site experience from the terminal"
|
||||
license = "UNLICENSE"
|
||||
readme = "README.md"
|
||||
requires-python = ">=3.11"
|
||||
dependencies = [
|
||||
"click>=8.1.7",
|
||||
"httpx>=0.28.1",
|
||||
"inquirerpy>=0.3.4",
|
||||
"pydantic>=2.11.7",
|
||||
"rich>=13.9.2",
|
||||
"click>=8.1.7",
|
||||
"httpx>=0.28.1",
|
||||
"inquirerpy>=0.3.4",
|
||||
"pydantic>=2.11.7",
|
||||
"rich>=13.9.2",
|
||||
]
|
||||
|
||||
[project.scripts]
|
||||
@@ -18,32 +18,27 @@ viu = 'viu_media:Cli'
|
||||
|
||||
[project.optional-dependencies]
|
||||
standard = [
|
||||
"thefuzz>=0.22.1",
|
||||
"yt-dlp>=2025.7.21",
|
||||
"pycryptodomex>=3.23.0",
|
||||
"pypiwin32; sys_platform == 'win32'", # For Windows-specific functionality
|
||||
"pyobjc; sys_platform == 'darwin'", # For macOS-specific functionality
|
||||
"dbus-python; sys_platform == 'linux'", # For Linux-specific functionality (e.g., notifications),
|
||||
"plyer>=2.1.0",
|
||||
"lxml>=6.0.0"
|
||||
"thefuzz>=0.22.1",
|
||||
"yt-dlp>=2025.7.21",
|
||||
"pycryptodomex>=3.23.0",
|
||||
"pypiwin32; sys_platform == 'win32'", # For Windows-specific functionality
|
||||
"pyobjc; sys_platform == 'darwin'", # For macOS-specific functionality
|
||||
"dbus-python; sys_platform == 'linux'", # For Linux-specific functionality (e.g., notifications),
|
||||
"plyer>=2.1.0",
|
||||
"lxml>=6.0.0",
|
||||
]
|
||||
notifications = [
|
||||
"dbus-python>=1.4.0",
|
||||
"pypiwin32; sys_platform == 'win32'", # For Windows-specific functionality
|
||||
"pyobjc; sys_platform == 'darwin'", # For macOS-specific functionality
|
||||
"dbus-python>=1.4.0; sys_platform == 'linux'",
|
||||
"plyer>=2.1.0",
|
||||
]
|
||||
mpv = [
|
||||
"mpv>=1.0.7",
|
||||
]
|
||||
mpv = ["mpv>=1.0.7"]
|
||||
torrent = ["libtorrent>=2.0.11"]
|
||||
lxml = ["lxml>=6.0.0"]
|
||||
discord = ["pypresence>=4.3.0"]
|
||||
download = [
|
||||
"pycryptodomex>=3.23.0",
|
||||
"yt-dlp>=2025.7.21",
|
||||
]
|
||||
torrents = [
|
||||
"libtorrent>=2.0.11",
|
||||
]
|
||||
download = ["pycryptodomex>=3.23.0", "yt-dlp>=2025.7.21"]
|
||||
torrents = ["libtorrent>=2.0.11"]
|
||||
|
||||
[build-system]
|
||||
requires = ["hatchling"]
|
||||
@@ -51,12 +46,12 @@ build-backend = "hatchling.build"
|
||||
|
||||
[dependency-groups]
|
||||
dev = [
|
||||
"pre-commit>=4.0.1",
|
||||
"pyinstaller>=6.11.1",
|
||||
"pyright>=1.1.384",
|
||||
"pytest>=8.3.3",
|
||||
"pytest-httpx>=0.35.0",
|
||||
"ruff>=0.6.9",
|
||||
"pre-commit>=4.0.1",
|
||||
"pyinstaller>=6.11.1",
|
||||
"pyright>=1.1.384",
|
||||
"pytest>=8.3.3",
|
||||
"pytest-httpx>=0.35.0",
|
||||
"ruff>=0.6.9",
|
||||
]
|
||||
|
||||
[tool.pytest.ini_options]
|
||||
|
||||
284
tests/cli/commands/anilist/commands/test_auth.py
Normal file
284
tests/cli/commands/anilist/commands/test_auth.py
Normal file
@@ -0,0 +1,284 @@
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
import pytest
|
||||
from click.testing import CliRunner
|
||||
|
||||
from viu_media.cli.commands.anilist.commands.auth import auth
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def runner():
|
||||
return CliRunner()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_config():
|
||||
config = MagicMock()
|
||||
config.user.interactive = True
|
||||
return config
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_auth_service():
|
||||
with patch("viu_media.cli.service.auth.AuthService") as mock:
|
||||
yield mock
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_feedback_service():
|
||||
with patch("viu_media.cli.service.feedback.FeedbackService") as mock:
|
||||
yield mock
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_selector():
|
||||
with patch("viu_media.libs.selectors.selector.create_selector") as mock:
|
||||
yield mock
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_api_client():
|
||||
with patch("viu_media.libs.media_api.api.create_api_client") as mock:
|
||||
yield mock
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_webbrowser():
|
||||
with patch("viu_media.cli.commands.anilist.commands.auth.webbrowser") as mock:
|
||||
yield mock
|
||||
|
||||
|
||||
def test_auth_with_token_argument(
|
||||
runner,
|
||||
mock_config,
|
||||
mock_auth_service,
|
||||
mock_feedback_service,
|
||||
mock_selector,
|
||||
mock_api_client,
|
||||
):
|
||||
"""Test 'viu anilist auth <token>'."""
|
||||
api_client_instance = mock_api_client.return_value
|
||||
profile_mock = MagicMock()
|
||||
profile_mock.name = "testuser"
|
||||
api_client_instance.authenticate.return_value = profile_mock
|
||||
|
||||
auth_service_instance = mock_auth_service.return_value
|
||||
auth_service_instance.get_auth.return_value = None
|
||||
|
||||
result = runner.invoke(auth, ["test_token"], obj=mock_config)
|
||||
|
||||
assert result.exit_code == 0
|
||||
mock_api_client.assert_called_with("anilist", mock_config)
|
||||
api_client_instance.authenticate.assert_called_with("test_token")
|
||||
auth_service_instance.save_user_profile.assert_called_with(
|
||||
profile_mock, "test_token"
|
||||
)
|
||||
feedback_instance = mock_feedback_service.return_value
|
||||
feedback_instance.info.assert_called_with("Successfully logged in as testuser! ✨")
|
||||
|
||||
|
||||
def test_auth_with_token_file(
|
||||
runner,
|
||||
mock_config,
|
||||
mock_auth_service,
|
||||
mock_feedback_service,
|
||||
mock_selector,
|
||||
mock_api_client,
|
||||
tmp_path,
|
||||
):
|
||||
"""Test 'viu anilist auth <path/to/token.txt>'."""
|
||||
token_file = tmp_path / "token.txt"
|
||||
token_file.write_text("file_token")
|
||||
|
||||
api_client_instance = mock_api_client.return_value
|
||||
profile_mock = MagicMock()
|
||||
profile_mock.name = "testuser"
|
||||
api_client_instance.authenticate.return_value = profile_mock
|
||||
|
||||
auth_service_instance = mock_auth_service.return_value
|
||||
auth_service_instance.get_auth.return_value = None
|
||||
|
||||
result = runner.invoke(auth, [str(token_file)], obj=mock_config)
|
||||
|
||||
assert result.exit_code == 0
|
||||
mock_api_client.assert_called_with("anilist", mock_config)
|
||||
api_client_instance.authenticate.assert_called_with("file_token")
|
||||
auth_service_instance.save_user_profile.assert_called_with(
|
||||
profile_mock, "file_token"
|
||||
)
|
||||
feedback_instance = mock_feedback_service.return_value
|
||||
feedback_instance.info.assert_called_with("Successfully logged in as testuser! ✨")
|
||||
|
||||
|
||||
def test_auth_with_empty_token_file(
|
||||
runner,
|
||||
mock_config,
|
||||
mock_auth_service,
|
||||
mock_feedback_service,
|
||||
mock_selector,
|
||||
mock_api_client,
|
||||
tmp_path,
|
||||
):
|
||||
"""Test 'viu anilist auth' with an empty token file."""
|
||||
token_file = tmp_path / "token.txt"
|
||||
token_file.write_text("")
|
||||
|
||||
auth_service_instance = mock_auth_service.return_value
|
||||
auth_service_instance.get_auth.return_value = None
|
||||
|
||||
result = runner.invoke(auth, [str(token_file)], obj=mock_config)
|
||||
|
||||
assert result.exit_code == 0
|
||||
feedback_instance = mock_feedback_service.return_value
|
||||
feedback_instance.error.assert_called_with(f"Token file is empty: {token_file}")
|
||||
|
||||
|
||||
def test_auth_interactive(
|
||||
runner,
|
||||
mock_config,
|
||||
mock_auth_service,
|
||||
mock_feedback_service,
|
||||
mock_selector,
|
||||
mock_api_client,
|
||||
mock_webbrowser,
|
||||
):
|
||||
"""Test 'viu anilist auth' interactive mode."""
|
||||
mock_webbrowser.open.return_value = True
|
||||
|
||||
selector_instance = mock_selector.return_value
|
||||
selector_instance.ask.return_value = "interactive_token"
|
||||
|
||||
api_client_instance = mock_api_client.return_value
|
||||
profile_mock = MagicMock()
|
||||
profile_mock.name = "testuser"
|
||||
api_client_instance.authenticate.return_value = profile_mock
|
||||
|
||||
auth_service_instance = mock_auth_service.return_value
|
||||
auth_service_instance.get_auth.return_value = None
|
||||
|
||||
result = runner.invoke(auth, [], obj=mock_config)
|
||||
|
||||
assert result.exit_code == 0
|
||||
selector_instance.ask.assert_called_with("Enter your AniList Access Token")
|
||||
api_client_instance.authenticate.assert_called_with("interactive_token")
|
||||
auth_service_instance.save_user_profile.assert_called_with(
|
||||
profile_mock, "interactive_token"
|
||||
)
|
||||
feedback_instance = mock_feedback_service.return_value
|
||||
feedback_instance.info.assert_called_with("Successfully logged in as testuser! ✨")
|
||||
|
||||
|
||||
def test_auth_status_logged_in(
|
||||
runner, mock_config, mock_auth_service, mock_feedback_service
|
||||
):
|
||||
"""Test 'viu anilist auth --status' when logged in."""
|
||||
auth_service_instance = mock_auth_service.return_value
|
||||
user_data_mock = MagicMock()
|
||||
user_data_mock.user_profile = "testuser"
|
||||
auth_service_instance.get_auth.return_value = user_data_mock
|
||||
|
||||
result = runner.invoke(auth, ["--status"], obj=mock_config)
|
||||
|
||||
assert result.exit_code == 0
|
||||
feedback_instance = mock_feedback_service.return_value
|
||||
feedback_instance.info.assert_called_with("Logged in as: testuser")
|
||||
|
||||
|
||||
def test_auth_status_logged_out(
|
||||
runner, mock_config, mock_auth_service, mock_feedback_service
|
||||
):
|
||||
"""Test 'viu anilist auth --status' when logged out."""
|
||||
auth_service_instance = mock_auth_service.return_value
|
||||
auth_service_instance.get_auth.return_value = None
|
||||
|
||||
result = runner.invoke(auth, ["--status"], obj=mock_config)
|
||||
|
||||
assert result.exit_code == 0
|
||||
feedback_instance = mock_feedback_service.return_value
|
||||
feedback_instance.error.assert_called_with("Not logged in.")
|
||||
|
||||
|
||||
def test_auth_logout(
|
||||
runner, mock_config, mock_auth_service, mock_feedback_service, mock_selector
|
||||
):
|
||||
"""Test 'viu anilist auth --logout'."""
|
||||
selector_instance = mock_selector.return_value
|
||||
selector_instance.confirm.return_value = True
|
||||
|
||||
result = runner.invoke(auth, ["--logout"], obj=mock_config)
|
||||
|
||||
assert result.exit_code == 0
|
||||
auth_service_instance = mock_auth_service.return_value
|
||||
auth_service_instance.clear_user_profile.assert_called_once()
|
||||
feedback_instance = mock_feedback_service.return_value
|
||||
feedback_instance.info.assert_called_with("You have been logged out.")
|
||||
|
||||
|
||||
def test_auth_logout_cancel(
|
||||
runner, mock_config, mock_auth_service, mock_feedback_service, mock_selector
|
||||
):
|
||||
"""Test 'viu anilist auth --logout' when user cancels."""
|
||||
selector_instance = mock_selector.return_value
|
||||
selector_instance.confirm.return_value = False
|
||||
|
||||
result = runner.invoke(auth, ["--logout"], obj=mock_config)
|
||||
|
||||
assert result.exit_code == 0
|
||||
auth_service_instance = mock_auth_service.return_value
|
||||
auth_service_instance.clear_user_profile.assert_not_called()
|
||||
|
||||
|
||||
def test_auth_already_logged_in_relogin_yes(
|
||||
runner,
|
||||
mock_config,
|
||||
mock_auth_service,
|
||||
mock_feedback_service,
|
||||
mock_selector,
|
||||
mock_api_client,
|
||||
):
|
||||
"""Test 'viu anilist auth' when already logged in and user chooses to relogin."""
|
||||
auth_service_instance = mock_auth_service.return_value
|
||||
auth_profile_mock = MagicMock()
|
||||
auth_profile_mock.user_profile.name = "testuser"
|
||||
auth_service_instance.get_auth.return_value = auth_profile_mock
|
||||
|
||||
selector_instance = mock_selector.return_value
|
||||
selector_instance.confirm.return_value = True
|
||||
selector_instance.ask.return_value = "new_token"
|
||||
|
||||
api_client_instance = mock_api_client.return_value
|
||||
new_profile_mock = MagicMock()
|
||||
new_profile_mock.name = "newuser"
|
||||
api_client_instance.authenticate.return_value = new_profile_mock
|
||||
|
||||
result = runner.invoke(auth, [], obj=mock_config)
|
||||
|
||||
assert result.exit_code == 0
|
||||
selector_instance.confirm.assert_called_with(
|
||||
"You are already logged in as testuser. Would you like to relogin"
|
||||
)
|
||||
auth_service_instance.save_user_profile.assert_called_with(
|
||||
new_profile_mock, "new_token"
|
||||
)
|
||||
feedback_instance = mock_feedback_service.return_value
|
||||
feedback_instance.info.assert_called_with("Successfully logged in as newuser! ✨")
|
||||
|
||||
|
||||
def test_auth_already_logged_in_relogin_no(
|
||||
runner, mock_config, mock_auth_service, mock_feedback_service, mock_selector
|
||||
):
|
||||
"""Test 'viu anilist auth' when already logged in and user chooses not to relogin."""
|
||||
auth_service_instance = mock_auth_service.return_value
|
||||
auth_profile_mock = MagicMock()
|
||||
auth_profile_mock.user_profile.name = "testuser"
|
||||
auth_service_instance.get_auth.return_value = auth_profile_mock
|
||||
|
||||
selector_instance = mock_selector.return_value
|
||||
selector_instance.confirm.return_value = False
|
||||
|
||||
result = runner.invoke(auth, [], obj=mock_config)
|
||||
|
||||
assert result.exit_code == 0
|
||||
auth_service_instance.save_user_profile.assert_not_called()
|
||||
feedback_instance = mock_feedback_service.return_value
|
||||
feedback_instance.info.assert_not_called()
|
||||
0
tests/libs/__init__.py
Normal file
0
tests/libs/__init__.py
Normal file
0
tests/libs/media_api/__init__.py
Normal file
0
tests/libs/media_api/__init__.py
Normal file
0
tests/libs/media_api/anilist/__init__.py
Normal file
0
tests/libs/media_api/anilist/__init__.py
Normal file
54
tests/libs/media_api/anilist/test_mapper.py
Normal file
54
tests/libs/media_api/anilist/test_mapper.py
Normal file
@@ -0,0 +1,54 @@
|
||||
from typing import Any
|
||||
|
||||
from viu_media.libs.media_api.anilist.mapper import to_generic_user_profile
|
||||
from viu_media.libs.media_api.anilist.types import AnilistViewerData
|
||||
from viu_media.libs.media_api.types import UserProfile
|
||||
|
||||
|
||||
def test_to_generic_user_profile_success():
|
||||
data: AnilistViewerData = {
|
||||
"data": {
|
||||
"Viewer": {
|
||||
"id": 123,
|
||||
"name": "testuser",
|
||||
"avatar": {
|
||||
"large": "https://example.com/avatar.png",
|
||||
"medium": "https://example.com/avatar_medium.png",
|
||||
"extraLarge": "https://example.com/avatar_extraLarge.png",
|
||||
"small": "https://example.com/avatar_small.png",
|
||||
},
|
||||
"bannerImage": "https://example.com/banner.png",
|
||||
"token": "test_token",
|
||||
}
|
||||
}
|
||||
}
|
||||
profile = to_generic_user_profile(data)
|
||||
assert isinstance(profile, UserProfile)
|
||||
assert profile.id == 123
|
||||
assert profile.name == "testuser"
|
||||
assert profile.avatar_url == "https://example.com/avatar.png"
|
||||
assert profile.banner_url == "https://example.com/banner.png"
|
||||
|
||||
|
||||
def test_to_generic_user_profile_data_none():
|
||||
data: Any = {"data": None}
|
||||
profile = to_generic_user_profile(data)
|
||||
assert profile is None
|
||||
|
||||
|
||||
def test_to_generic_user_profile_no_data_key():
|
||||
data: Any = {"errors": [{"message": "Invalid token"}]}
|
||||
profile = to_generic_user_profile(data)
|
||||
assert profile is None
|
||||
|
||||
|
||||
def test_to_generic_user_profile_no_viewer_key():
|
||||
data: Any = {"data": {"Page": {}}}
|
||||
profile = to_generic_user_profile(data)
|
||||
assert profile is None
|
||||
|
||||
|
||||
def test_to_generic_user_profile_viewer_none():
|
||||
data: Any = {"data": {"Viewer": None}}
|
||||
profile = to_generic_user_profile(data)
|
||||
assert profile is None
|
||||
@@ -4,7 +4,9 @@
|
||||
"Magia Record: Mahou Shoujo Madoka☆Magica Gaiden (TV)": "Mahou Shoujo Madoka☆Magica",
|
||||
"Dungeon ni Deai o Motomeru no wa Machigatte Iru Darouka": "Dungeon ni Deai wo Motomeru no wa Machigatteiru Darou ka",
|
||||
"Hazurewaku no \"Joutai Ijou Skill\" de Saikyou ni Natta Ore ga Subete wo Juurin suru made": "Hazure Waku no [Joutai Ijou Skill] de Saikyou ni Natta Ore ga Subete wo Juurin Suru made",
|
||||
"Re:Zero kara Hajimeru Isekai Seikatsu Season 3": "Re:Zero kara Hajimeru Isekai Seikatsu 3rd Season"
|
||||
"Re:Zero kara Hajimeru Isekai Seikatsu Season 3": "Re:Zero kara Hajimeru Isekai Seikatsu 3rd Season",
|
||||
"Hanka×Hanka (2011)": "Hunter × Hunter (2011)",
|
||||
"Burichi -": "bleach"
|
||||
},
|
||||
"hianime": {
|
||||
"My Star": "Oshi no Ko"
|
||||
|
||||
202
viu_media/assets/scripts/fzf/_ansi_utils.py
Normal file
202
viu_media/assets/scripts/fzf/_ansi_utils.py
Normal file
@@ -0,0 +1,202 @@
|
||||
"""
|
||||
ANSI utilities for FZF preview scripts.
|
||||
|
||||
Lightweight stdlib-only utilities to replace Rich dependency in preview scripts.
|
||||
Provides RGB color formatting, table rendering, and markdown stripping.
|
||||
"""
|
||||
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import textwrap
|
||||
import unicodedata
|
||||
|
||||
|
||||
def get_terminal_width() -> int:
|
||||
"""
|
||||
Get terminal width, prioritizing FZF preview environment variables.
|
||||
|
||||
Returns:
|
||||
Terminal width in columns
|
||||
"""
|
||||
fzf_cols = os.environ.get("FZF_PREVIEW_COLUMNS")
|
||||
if fzf_cols:
|
||||
return int(fzf_cols)
|
||||
return shutil.get_terminal_size((80, 24)).columns
|
||||
|
||||
|
||||
def display_width(text: str) -> int:
|
||||
"""
|
||||
Calculate the actual display width of text, accounting for wide characters.
|
||||
|
||||
Args:
|
||||
text: Text to measure
|
||||
|
||||
Returns:
|
||||
Display width in terminal columns
|
||||
"""
|
||||
width = 0
|
||||
for char in text:
|
||||
# East Asian Width property: 'F' (Fullwidth) and 'W' (Wide) take 2 columns
|
||||
if unicodedata.east_asian_width(char) in ("F", "W"):
|
||||
width += 2
|
||||
else:
|
||||
width += 1
|
||||
return width
|
||||
|
||||
|
||||
def rgb_color(r: int, g: int, b: int, text: str, bold: bool = False) -> str:
|
||||
"""
|
||||
Format text with RGB color using ANSI escape codes.
|
||||
|
||||
Args:
|
||||
r: Red component (0-255)
|
||||
g: Green component (0-255)
|
||||
b: Blue component (0-255)
|
||||
text: Text to colorize
|
||||
bold: Whether to make text bold
|
||||
|
||||
Returns:
|
||||
ANSI-escaped colored text
|
||||
"""
|
||||
color_code = f"\x1b[38;2;{r};{g};{b}m"
|
||||
bold_code = "\x1b[1m" if bold else ""
|
||||
reset = "\x1b[0m"
|
||||
return f"{color_code}{bold_code}{text}{reset}"
|
||||
|
||||
|
||||
def parse_color(color_csv: str) -> tuple[int, int, int]:
|
||||
"""
|
||||
Parse RGB color from comma-separated string.
|
||||
|
||||
Args:
|
||||
color_csv: Color as 'R,G,B' string
|
||||
|
||||
Returns:
|
||||
Tuple of (r, g, b) integers
|
||||
"""
|
||||
parts = color_csv.split(",")
|
||||
return int(parts[0]), int(parts[1]), int(parts[2])
|
||||
|
||||
|
||||
def print_rule(sep_color: str) -> None:
|
||||
"""
|
||||
Print a horizontal rule line.
|
||||
|
||||
Args:
|
||||
sep_color: Color as 'R,G,B' string
|
||||
"""
|
||||
width = get_terminal_width()
|
||||
r, g, b = parse_color(sep_color)
|
||||
print(rgb_color(r, g, b, "─" * width))
|
||||
|
||||
|
||||
def print_table_row(
|
||||
key: str, value: str, header_color: str, key_width: int, value_width: int
|
||||
) -> None:
|
||||
"""
|
||||
Print a two-column table row with left-aligned key and right-aligned value.
|
||||
|
||||
Args:
|
||||
key: Left column text (header/key)
|
||||
value: Right column text (value)
|
||||
header_color: Color for key as 'R,G,B' string
|
||||
key_width: Width for key column
|
||||
value_width: Width for value column
|
||||
"""
|
||||
r, g, b = parse_color(header_color)
|
||||
key_styled = rgb_color(r, g, b, key, bold=True)
|
||||
|
||||
# Get actual terminal width
|
||||
term_width = get_terminal_width()
|
||||
|
||||
# Calculate display widths accounting for wide characters
|
||||
key_display_width = display_width(key)
|
||||
|
||||
# Calculate actual value width based on terminal and key display width
|
||||
actual_value_width = max(20, term_width - key_display_width - 2)
|
||||
|
||||
# Wrap value if it's too long (use character count, not display width for wrapping)
|
||||
value_lines = textwrap.wrap(str(value), width=actual_value_width) if value else [""]
|
||||
|
||||
if not value_lines:
|
||||
value_lines = [""]
|
||||
|
||||
# Print first line with properly aligned value
|
||||
first_line = value_lines[0]
|
||||
first_line_display_width = display_width(first_line)
|
||||
|
||||
# Use manual spacing to right-align based on display width
|
||||
spacing = term_width - key_display_width - first_line_display_width - 2
|
||||
if spacing > 0:
|
||||
print(f"{key_styled} {' ' * spacing}{first_line}")
|
||||
else:
|
||||
print(f"{key_styled} {first_line}")
|
||||
|
||||
# Print remaining wrapped lines (left-aligned, indented)
|
||||
for line in value_lines[1:]:
|
||||
print(f"{' ' * (key_display_width + 2)}{line}")
|
||||
|
||||
|
||||
def strip_markdown(text: str) -> str:
|
||||
"""
|
||||
Strip markdown formatting from text.
|
||||
|
||||
Removes:
|
||||
- Headers (# ## ###)
|
||||
- Bold (**text** or __text__)
|
||||
- Italic (*text* or _text_)
|
||||
- Links ([text](url))
|
||||
- Code blocks (```code```)
|
||||
- Inline code (`code`)
|
||||
|
||||
Args:
|
||||
text: Markdown-formatted text
|
||||
|
||||
Returns:
|
||||
Plain text with markdown removed
|
||||
"""
|
||||
if not text:
|
||||
return ""
|
||||
|
||||
# Remove code blocks first
|
||||
text = re.sub(r"```[\s\S]*?```", "", text)
|
||||
|
||||
# Remove inline code
|
||||
text = re.sub(r"`([^`]+)`", r"\1", text)
|
||||
|
||||
# Remove headers
|
||||
text = re.sub(r"^#{1,6}\s+", "", text, flags=re.MULTILINE)
|
||||
|
||||
# Remove bold (** or __)
|
||||
text = re.sub(r"\*\*(.+?)\*\*", r"\1", text)
|
||||
text = re.sub(r"__(.+?)__", r"\1", text)
|
||||
|
||||
# Remove italic (* or _)
|
||||
text = re.sub(r"\*(.+?)\*", r"\1", text)
|
||||
text = re.sub(r"_(.+?)_", r"\1", text)
|
||||
|
||||
# Remove links, keep text
|
||||
text = re.sub(r"\[(.+?)\]\(.+?\)", r"\1", text)
|
||||
|
||||
# Remove images
|
||||
text = re.sub(r"!\[.*?\]\(.+?\)", "", text)
|
||||
|
||||
return text.strip()
|
||||
|
||||
|
||||
def wrap_text(text: str, width: int | None = None) -> str:
|
||||
"""
|
||||
Wrap text to terminal width.
|
||||
|
||||
Args:
|
||||
text: Text to wrap
|
||||
width: Width to wrap to (defaults to terminal width)
|
||||
|
||||
Returns:
|
||||
Wrapped text
|
||||
"""
|
||||
if width is None:
|
||||
width = get_terminal_width()
|
||||
|
||||
return textwrap.fill(text, width=width)
|
||||
323
viu_media/assets/scripts/fzf/_filter_parser.py
Normal file
323
viu_media/assets/scripts/fzf/_filter_parser.py
Normal file
@@ -0,0 +1,323 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Filter Parser for Dynamic Search
|
||||
|
||||
This module provides a parser for the special filter syntax used in dynamic search.
|
||||
Filter syntax allows users to add filters inline with their search query.
|
||||
|
||||
SYNTAX:
|
||||
@filter:value - Apply a filter with the given value
|
||||
@filter:value1,value2 - Apply multiple values (for array filters)
|
||||
@filter:!value - Exclude/negate a filter value
|
||||
|
||||
SUPPORTED FILTERS:
|
||||
@genre:action,comedy - Filter by genres
|
||||
@genre:!hentai - Exclude genre
|
||||
@status:airing - Filter by status (airing, finished, upcoming, cancelled, hiatus)
|
||||
@year:2024 - Filter by season year
|
||||
@season:winter - Filter by season (winter, spring, summer, fall)
|
||||
@format:tv,movie - Filter by format (tv, movie, ova, ona, special, music)
|
||||
@sort:score - Sort by (score, popularity, trending, title, date)
|
||||
@score:>80 - Minimum score
|
||||
@score:<50 - Maximum score
|
||||
@popularity:>10000 - Minimum popularity
|
||||
@onlist - Only show anime on user's list
|
||||
@onlist:false - Only show anime NOT on user's list
|
||||
|
||||
EXAMPLES:
|
||||
"naruto @genre:action @status:finished"
|
||||
"isekai @year:2024 @season:winter @sort:score"
|
||||
"@genre:action,adventure @status:airing"
|
||||
"romance @genre:!hentai @format:tv,movie"
|
||||
"""
|
||||
|
||||
import re
|
||||
from typing import Any, Dict, List, Optional, Tuple
|
||||
|
||||
# Mapping of user-friendly filter names to GraphQL variable names
|
||||
FILTER_ALIASES = {
|
||||
# Status aliases
|
||||
"airing": "RELEASING",
|
||||
"releasing": "RELEASING",
|
||||
"finished": "FINISHED",
|
||||
"completed": "FINISHED",
|
||||
"upcoming": "NOT_YET_RELEASED",
|
||||
"not_yet_released": "NOT_YET_RELEASED",
|
||||
"unreleased": "NOT_YET_RELEASED",
|
||||
"cancelled": "CANCELLED",
|
||||
"canceled": "CANCELLED",
|
||||
"hiatus": "HIATUS",
|
||||
"paused": "HIATUS",
|
||||
# Format aliases
|
||||
"tv": "TV",
|
||||
"tv_short": "TV_SHORT",
|
||||
"tvshort": "TV_SHORT",
|
||||
"movie": "MOVIE",
|
||||
"film": "MOVIE",
|
||||
"ova": "OVA",
|
||||
"ona": "ONA",
|
||||
"special": "SPECIAL",
|
||||
"music": "MUSIC",
|
||||
# Season aliases
|
||||
"winter": "WINTER",
|
||||
"spring": "SPRING",
|
||||
"summer": "SUMMER",
|
||||
"fall": "FALL",
|
||||
"autumn": "FALL",
|
||||
# Sort aliases
|
||||
"score": "SCORE_DESC",
|
||||
"score_desc": "SCORE_DESC",
|
||||
"score_asc": "SCORE",
|
||||
"popularity": "POPULARITY_DESC",
|
||||
"popularity_desc": "POPULARITY_DESC",
|
||||
"popularity_asc": "POPULARITY",
|
||||
"trending": "TRENDING_DESC",
|
||||
"trending_desc": "TRENDING_DESC",
|
||||
"trending_asc": "TRENDING",
|
||||
"title": "TITLE_ROMAJI",
|
||||
"title_desc": "TITLE_ROMAJI_DESC",
|
||||
"date": "START_DATE_DESC",
|
||||
"date_desc": "START_DATE_DESC",
|
||||
"date_asc": "START_DATE",
|
||||
"newest": "START_DATE_DESC",
|
||||
"oldest": "START_DATE",
|
||||
"favourites": "FAVOURITES_DESC",
|
||||
"favorites": "FAVOURITES_DESC",
|
||||
"episodes": "EPISODES_DESC",
|
||||
}
|
||||
|
||||
# Genre name normalization (lowercase -> proper case)
|
||||
GENRE_NAMES = {
|
||||
"action": "Action",
|
||||
"adventure": "Adventure",
|
||||
"comedy": "Comedy",
|
||||
"drama": "Drama",
|
||||
"ecchi": "Ecchi",
|
||||
"fantasy": "Fantasy",
|
||||
"horror": "Horror",
|
||||
"mahou_shoujo": "Mahou Shoujo",
|
||||
"mahou": "Mahou Shoujo",
|
||||
"magical_girl": "Mahou Shoujo",
|
||||
"mecha": "Mecha",
|
||||
"music": "Music",
|
||||
"mystery": "Mystery",
|
||||
"psychological": "Psychological",
|
||||
"romance": "Romance",
|
||||
"sci-fi": "Sci-Fi",
|
||||
"scifi": "Sci-Fi",
|
||||
"sci_fi": "Sci-Fi",
|
||||
"slice_of_life": "Slice of Life",
|
||||
"sol": "Slice of Life",
|
||||
"sports": "Sports",
|
||||
"supernatural": "Supernatural",
|
||||
"thriller": "Thriller",
|
||||
"hentai": "Hentai",
|
||||
}
|
||||
|
||||
# Filter pattern: @key:value or @key (boolean flags)
|
||||
FILTER_PATTERN = re.compile(r"@(\w+)(?::([^\s]+))?", re.IGNORECASE)
|
||||
|
||||
# Comparison operators for numeric filters
|
||||
COMPARISON_PATTERN = re.compile(r"^([<>]=?)?(\d+)$")
|
||||
|
||||
|
||||
def normalize_value(value: str, value_type: str) -> str:
|
||||
"""Normalize a filter value based on its type."""
|
||||
value_lower = value.lower().strip()
|
||||
|
||||
if value_type == "genre":
|
||||
return GENRE_NAMES.get(value_lower, value.title())
|
||||
elif value_type in ("status", "format", "season", "sort"):
|
||||
return FILTER_ALIASES.get(value_lower, value.upper())
|
||||
|
||||
return value
|
||||
|
||||
|
||||
def parse_value_list(value_str: str) -> Tuple[List[str], List[str]]:
|
||||
"""
|
||||
Parse a comma-separated value string, separating includes from excludes.
|
||||
|
||||
Returns:
|
||||
Tuple of (include_values, exclude_values)
|
||||
"""
|
||||
includes = []
|
||||
excludes = []
|
||||
|
||||
for val in value_str.split(","):
|
||||
val = val.strip()
|
||||
if not val:
|
||||
continue
|
||||
if val.startswith("!"):
|
||||
excludes.append(val[1:])
|
||||
else:
|
||||
includes.append(val)
|
||||
|
||||
return includes, excludes
|
||||
|
||||
|
||||
def parse_comparison(value: str) -> Tuple[Optional[str], Optional[int]]:
|
||||
"""
|
||||
Parse a comparison value like ">80" or "<50".
|
||||
|
||||
Returns:
|
||||
Tuple of (operator, number) or (None, None) if invalid
|
||||
"""
|
||||
match = COMPARISON_PATTERN.match(value)
|
||||
if match:
|
||||
operator = match.group(1) or ">" # Default to greater than
|
||||
number = int(match.group(2))
|
||||
return operator, number
|
||||
return None, None
|
||||
|
||||
|
||||
def parse_filters(query: str) -> Tuple[str, Dict[str, Any]]:
|
||||
"""
|
||||
Parse a search query and extract filter directives.
|
||||
|
||||
Args:
|
||||
query: The full search query including filter syntax
|
||||
|
||||
Returns:
|
||||
Tuple of (clean_query, filters_dict)
|
||||
- clean_query: The query with filter syntax removed
|
||||
- filters_dict: Dictionary of GraphQL variables to apply
|
||||
"""
|
||||
filters: Dict[str, Any] = {}
|
||||
|
||||
# Find all filter matches
|
||||
matches = list(FILTER_PATTERN.finditer(query))
|
||||
|
||||
for match in matches:
|
||||
filter_name = match.group(1).lower()
|
||||
filter_value = match.group(2) # May be None for boolean flags
|
||||
|
||||
# Handle different filter types
|
||||
if filter_name == "genre":
|
||||
if filter_value:
|
||||
includes, excludes = parse_value_list(filter_value)
|
||||
if includes:
|
||||
normalized = [normalize_value(v, "genre") for v in includes]
|
||||
filters.setdefault("genre_in", []).extend(normalized)
|
||||
if excludes:
|
||||
normalized = [normalize_value(v, "genre") for v in excludes]
|
||||
filters.setdefault("genre_not_in", []).extend(normalized)
|
||||
|
||||
elif filter_name == "status":
|
||||
if filter_value:
|
||||
includes, excludes = parse_value_list(filter_value)
|
||||
if includes:
|
||||
normalized = [normalize_value(v, "status") for v in includes]
|
||||
filters.setdefault("status_in", []).extend(normalized)
|
||||
if excludes:
|
||||
normalized = [normalize_value(v, "status") for v in excludes]
|
||||
filters.setdefault("status_not_in", []).extend(normalized)
|
||||
|
||||
elif filter_name == "format":
|
||||
if filter_value:
|
||||
includes, _ = parse_value_list(filter_value)
|
||||
if includes:
|
||||
normalized = [normalize_value(v, "format") for v in includes]
|
||||
filters.setdefault("format_in", []).extend(normalized)
|
||||
|
||||
elif filter_name == "year":
|
||||
if filter_value:
|
||||
try:
|
||||
filters["seasonYear"] = int(filter_value)
|
||||
except ValueError:
|
||||
pass # Invalid year, skip
|
||||
|
||||
elif filter_name == "season":
|
||||
if filter_value:
|
||||
filters["season"] = normalize_value(filter_value, "season")
|
||||
|
||||
elif filter_name == "sort":
|
||||
if filter_value:
|
||||
sort_val = normalize_value(filter_value, "sort")
|
||||
filters["sort"] = [sort_val]
|
||||
|
||||
elif filter_name == "score":
|
||||
if filter_value:
|
||||
op, num = parse_comparison(filter_value)
|
||||
if num is not None:
|
||||
if op in (">", ">="):
|
||||
filters["averageScore_greater"] = num
|
||||
elif op in ("<", "<="):
|
||||
filters["averageScore_lesser"] = num
|
||||
|
||||
elif filter_name == "popularity":
|
||||
if filter_value:
|
||||
op, num = parse_comparison(filter_value)
|
||||
if num is not None:
|
||||
if op in (">", ">="):
|
||||
filters["popularity_greater"] = num
|
||||
elif op in ("<", "<="):
|
||||
filters["popularity_lesser"] = num
|
||||
|
||||
elif filter_name == "onlist":
|
||||
if filter_value is None or filter_value.lower() in ("true", "yes", "1"):
|
||||
filters["on_list"] = True
|
||||
elif filter_value.lower() in ("false", "no", "0"):
|
||||
filters["on_list"] = False
|
||||
|
||||
elif filter_name == "tag":
|
||||
if filter_value:
|
||||
includes, excludes = parse_value_list(filter_value)
|
||||
if includes:
|
||||
# Tags use title case typically
|
||||
normalized = [v.replace("_", " ").title() for v in includes]
|
||||
filters.setdefault("tag_in", []).extend(normalized)
|
||||
if excludes:
|
||||
normalized = [v.replace("_", " ").title() for v in excludes]
|
||||
filters.setdefault("tag_not_in", []).extend(normalized)
|
||||
|
||||
# Remove filter syntax from query to get clean search text
|
||||
clean_query = FILTER_PATTERN.sub("", query).strip()
|
||||
# Clean up multiple spaces
|
||||
clean_query = re.sub(r"\s+", " ", clean_query).strip()
|
||||
|
||||
return clean_query, filters
|
||||
|
||||
|
||||
def get_help_text() -> str:
|
||||
"""Return a help string describing the filter syntax."""
|
||||
return """
|
||||
╭─────────────────── Filter Syntax Help ───────────────────╮
|
||||
│ │
|
||||
│ @genre:action,comedy Filter by genres │
|
||||
│ @genre:!hentai Exclude genre │
|
||||
│ @status:airing Status: airing, finished, │
|
||||
│ upcoming, cancelled, hiatus │
|
||||
│ @year:2024 Filter by year │
|
||||
│ @season:winter winter, spring, summer, fall │
|
||||
│ @format:tv,movie tv, movie, ova, ona, special │
|
||||
│ @sort:score score, popularity, trending, │
|
||||
│ date, title, newest, oldest │
|
||||
│ @score:>80 Minimum score │
|
||||
│ @score:<50 Maximum score │
|
||||
│ @popularity:>10000 Minimum popularity │
|
||||
│ @onlist Only on your list │
|
||||
│ @onlist:false Not on your list │
|
||||
│ @tag:isekai,reincarnation Filter by tags │
|
||||
│ │
|
||||
│ Examples: │
|
||||
│ naruto @genre:action @status:finished │
|
||||
│ @genre:action,adventure @year:2024 @sort:score │
|
||||
│ isekai @season:winter @year:2024 │
|
||||
│ │
|
||||
╰──────────────────────────────────────────────────────────╯
|
||||
""".strip()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
# Test the parser
|
||||
import json
|
||||
import sys
|
||||
|
||||
if len(sys.argv) > 1:
|
||||
test_query = " ".join(sys.argv[1:])
|
||||
clean, filters = parse_filters(test_query)
|
||||
print(f"Original: {test_query}")
|
||||
print(f"Clean query: {clean}")
|
||||
print(f"Filters: {json.dumps(filters, indent=2)}")
|
||||
else:
|
||||
print(get_help_text())
|
||||
@@ -0,0 +1,36 @@
|
||||
import sys
|
||||
from _ansi_utils import (
|
||||
print_rule,
|
||||
print_table_row,
|
||||
strip_markdown,
|
||||
wrap_text,
|
||||
get_terminal_width,
|
||||
)
|
||||
|
||||
HEADER_COLOR = sys.argv[1]
|
||||
SEPARATOR_COLOR = sys.argv[2]
|
||||
|
||||
# Get terminal dimensions
|
||||
term_width = get_terminal_width()
|
||||
|
||||
# Print title centered
|
||||
print("{ANIME_TITLE}".center(term_width))
|
||||
|
||||
rows = [
|
||||
("Total Episodes", "{TOTAL_EPISODES}"),
|
||||
]
|
||||
|
||||
print_rule(SEPARATOR_COLOR)
|
||||
for key, value in rows:
|
||||
print_table_row(key, value, HEADER_COLOR, 15, term_width - 20)
|
||||
|
||||
rows = [
|
||||
("Upcoming Episodes", "{UPCOMING_EPISODES}"),
|
||||
]
|
||||
|
||||
print_rule(SEPARATOR_COLOR)
|
||||
for key, value in rows:
|
||||
print_table_row(key, value, HEADER_COLOR, 15, term_width - 20)
|
||||
|
||||
print_rule(SEPARATOR_COLOR)
|
||||
print(wrap_text(strip_markdown("""{SCHEDULE_TABLE}"""), term_width))
|
||||
|
||||
@@ -0,0 +1,47 @@
|
||||
import sys
|
||||
from _ansi_utils import (
|
||||
print_rule,
|
||||
print_table_row,
|
||||
strip_markdown,
|
||||
wrap_text,
|
||||
get_terminal_width,
|
||||
)
|
||||
|
||||
HEADER_COLOR = sys.argv[1]
|
||||
SEPARATOR_COLOR = sys.argv[2]
|
||||
|
||||
# Get terminal dimensions
|
||||
term_width = get_terminal_width()
|
||||
|
||||
# Print title centered
|
||||
print("{CHARACTER_NAME}".center(term_width))
|
||||
|
||||
rows = [
|
||||
("Native Name", "{CHARACTER_NATIVE_NAME}"),
|
||||
("Gender", "{CHARACTER_GENDER}"),
|
||||
]
|
||||
|
||||
print_rule(SEPARATOR_COLOR)
|
||||
for key, value in rows:
|
||||
print_table_row(key, value, HEADER_COLOR, 15, term_width - 20)
|
||||
|
||||
rows = [
|
||||
("Age", "{CHARACTER_AGE}"),
|
||||
("Blood Type", "{CHARACTER_BLOOD_TYPE}"),
|
||||
]
|
||||
|
||||
print_rule(SEPARATOR_COLOR)
|
||||
for key, value in rows:
|
||||
print_table_row(key, value, HEADER_COLOR, 15, term_width - 20)
|
||||
|
||||
rows = [
|
||||
("Birthday", "{CHARACTER_BIRTHDAY}"),
|
||||
("Favourites", "{CHARACTER_FAVOURITES}"),
|
||||
]
|
||||
|
||||
print_rule(SEPARATOR_COLOR)
|
||||
for key, value in rows:
|
||||
print_table_row(key, value, HEADER_COLOR, 15, term_width - 20)
|
||||
|
||||
print_rule(SEPARATOR_COLOR)
|
||||
print(wrap_text(strip_markdown("""{CHARACTER_DESCRIPTION}"""), term_width))
|
||||
|
||||
499
viu_media/assets/scripts/fzf/dynamic_preview.py
Executable file
499
viu_media/assets/scripts/fzf/dynamic_preview.py
Executable file
@@ -0,0 +1,499 @@
|
||||
#!/usr/bin/env python3
|
||||
#
|
||||
# FZF Dynamic Preview Script for Search Results
|
||||
#
|
||||
# This script handles previews for dynamic search by reading from the cached
|
||||
# search results JSON and generating preview content on-the-fly.
|
||||
# Template variables are injected by Python using .replace()
|
||||
|
||||
import json
|
||||
import os
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
from hashlib import sha256
|
||||
from pathlib import Path
|
||||
|
||||
# Import the utility functions
|
||||
from _ansi_utils import (
|
||||
get_terminal_width,
|
||||
print_rule,
|
||||
print_table_row,
|
||||
strip_markdown,
|
||||
wrap_text,
|
||||
)
|
||||
|
||||
|
||||
# --- Template Variables (Injected by Python) ---
|
||||
SEARCH_RESULTS_FILE = Path("{SEARCH_RESULTS_FILE}")
|
||||
IMAGE_CACHE_DIR = Path("{IMAGE_CACHE_DIR}")
|
||||
PREVIEW_MODE = "{PREVIEW_MODE}"
|
||||
IMAGE_RENDERER = "{IMAGE_RENDERER}"
|
||||
HEADER_COLOR = "{HEADER_COLOR}"
|
||||
SEPARATOR_COLOR = "{SEPARATOR_COLOR}"
|
||||
SCALE_UP = "{SCALE_UP}" == "True"
|
||||
|
||||
# --- Arguments ---
|
||||
# sys.argv[1] is the selected anime title from fzf
|
||||
SELECTED_TITLE = sys.argv[1] if len(sys.argv) > 1 else ""
|
||||
|
||||
|
||||
def format_number(num):
|
||||
"""Format number with thousand separators."""
|
||||
if num is None:
|
||||
return "N/A"
|
||||
return f"{num:,}"
|
||||
|
||||
|
||||
def format_score_stars(score):
|
||||
"""Format score as stars out of 6."""
|
||||
if score is None:
|
||||
return "N/A"
|
||||
# Convert 0-100 score to 0-6 stars, capped at 6 for consistency
|
||||
stars = min(round(score * 6 / 100), 6)
|
||||
return "⭐" * stars + f" ({score}/100)"
|
||||
|
||||
|
||||
def format_date(date_obj):
|
||||
"""Format date object to string."""
|
||||
if not date_obj or date_obj == "null":
|
||||
return "N/A"
|
||||
|
||||
year = date_obj.get("year")
|
||||
month = date_obj.get("month")
|
||||
day = date_obj.get("day")
|
||||
|
||||
if not year:
|
||||
return "N/A"
|
||||
if month and day:
|
||||
return f"{day}/{month}/{year}"
|
||||
if month:
|
||||
return f"{month}/{year}"
|
||||
return str(year)
|
||||
|
||||
|
||||
def get_media_from_results(title):
|
||||
"""Find media item in search results by title."""
|
||||
if not SEARCH_RESULTS_FILE.exists():
|
||||
return None
|
||||
|
||||
try:
|
||||
with open(SEARCH_RESULTS_FILE, "r", encoding="utf-8") as f:
|
||||
data = json.load(f)
|
||||
|
||||
media_list = data.get("data", {}).get("Page", {}).get("media", [])
|
||||
|
||||
for media in media_list:
|
||||
title_obj = media.get("title", {})
|
||||
eng = title_obj.get("english")
|
||||
rom = title_obj.get("romaji")
|
||||
nat = title_obj.get("native")
|
||||
|
||||
if title in (eng, rom, nat):
|
||||
return media
|
||||
|
||||
return None
|
||||
except Exception as e:
|
||||
print(f"Error reading search results: {e}", file=sys.stderr)
|
||||
return None
|
||||
|
||||
|
||||
def download_image(url: str, output_path: Path) -> bool:
|
||||
"""Download image from URL and save to file."""
|
||||
try:
|
||||
# Try using urllib (stdlib)
|
||||
from urllib import request
|
||||
|
||||
req = request.Request(url, headers={"User-Agent": "viu/1.0"})
|
||||
with request.urlopen(req, timeout=5) as response:
|
||||
data = response.read()
|
||||
output_path.write_bytes(data)
|
||||
return True
|
||||
except Exception:
|
||||
# Silently fail - preview will just not show image
|
||||
return False
|
||||
|
||||
|
||||
def which(cmd):
|
||||
"""Check if command exists."""
|
||||
return shutil.which(cmd)
|
||||
|
||||
|
||||
def get_terminal_dimensions():
|
||||
"""Get terminal dimensions from FZF environment."""
|
||||
fzf_cols = os.environ.get("FZF_PREVIEW_COLUMNS")
|
||||
fzf_lines = os.environ.get("FZF_PREVIEW_LINES")
|
||||
|
||||
if fzf_cols and fzf_lines:
|
||||
return int(fzf_cols), int(fzf_lines)
|
||||
|
||||
try:
|
||||
rows, cols = (
|
||||
subprocess.check_output(
|
||||
["stty", "size"], text=True, stderr=subprocess.DEVNULL
|
||||
)
|
||||
.strip()
|
||||
.split()
|
||||
)
|
||||
return int(cols), int(rows)
|
||||
except Exception:
|
||||
return 80, 24
|
||||
|
||||
|
||||
def render_kitty(file_path, width, height, scale_up):
|
||||
"""Render using the Kitty Graphics Protocol (kitten/icat)."""
|
||||
cmd = []
|
||||
if which("kitten"):
|
||||
cmd = ["kitten", "icat"]
|
||||
elif which("icat"):
|
||||
cmd = ["icat"]
|
||||
elif which("kitty"):
|
||||
cmd = ["kitty", "+kitten", "icat"]
|
||||
|
||||
if not cmd:
|
||||
return False
|
||||
|
||||
args = [
|
||||
"--clear",
|
||||
"--transfer-mode=memory",
|
||||
"--unicode-placeholder",
|
||||
"--stdin=no",
|
||||
f"--place={width}x{height}@0x0",
|
||||
]
|
||||
|
||||
if scale_up:
|
||||
args.append("--scale-up")
|
||||
|
||||
args.append(file_path)
|
||||
|
||||
subprocess.run(cmd + args, stdout=sys.stdout, stderr=sys.stderr)
|
||||
return True
|
||||
|
||||
|
||||
def render_sixel(file_path, width, height):
|
||||
"""Render using Sixel."""
|
||||
if which("chafa"):
|
||||
subprocess.run(
|
||||
["chafa", "-f", "sixel", "-s", f"{width}x{height}", file_path],
|
||||
stdout=sys.stdout,
|
||||
stderr=sys.stderr,
|
||||
)
|
||||
return True
|
||||
|
||||
if which("img2sixel"):
|
||||
pixel_width = width * 10
|
||||
pixel_height = height * 20
|
||||
subprocess.run(
|
||||
[
|
||||
"img2sixel",
|
||||
f"--width={pixel_width}",
|
||||
f"--height={pixel_height}",
|
||||
file_path,
|
||||
],
|
||||
stdout=sys.stdout,
|
||||
stderr=sys.stderr,
|
||||
)
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def render_iterm(file_path, width, height):
|
||||
"""Render using iTerm2 Inline Image Protocol."""
|
||||
if which("imgcat"):
|
||||
subprocess.run(
|
||||
["imgcat", "-W", str(width), "-H", str(height), file_path],
|
||||
stdout=sys.stdout,
|
||||
stderr=sys.stderr,
|
||||
)
|
||||
return True
|
||||
|
||||
if which("chafa"):
|
||||
subprocess.run(
|
||||
["chafa", "-f", "iterm", "-s", f"{width}x{height}", file_path],
|
||||
stdout=sys.stdout,
|
||||
stderr=sys.stderr,
|
||||
)
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def render_timg(file_path, width, height):
|
||||
"""Render using timg."""
|
||||
if which("timg"):
|
||||
subprocess.run(
|
||||
["timg", f"-g{width}x{height}", "--upscale", file_path],
|
||||
stdout=sys.stdout,
|
||||
stderr=sys.stderr,
|
||||
)
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def render_chafa_auto(file_path, width, height):
|
||||
"""Render using Chafa in auto mode."""
|
||||
if which("chafa"):
|
||||
subprocess.run(
|
||||
["chafa", "-s", f"{width}x{height}", file_path],
|
||||
stdout=sys.stdout,
|
||||
stderr=sys.stderr,
|
||||
)
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def fzf_image_preview(file_path: str):
|
||||
"""Main dispatch function to choose the best renderer."""
|
||||
cols, lines = get_terminal_dimensions()
|
||||
width = cols
|
||||
height = lines
|
||||
|
||||
# Check explicit configuration
|
||||
if IMAGE_RENDERER == "icat" or IMAGE_RENDERER == "system-kitty":
|
||||
if render_kitty(file_path, width, height, SCALE_UP):
|
||||
return
|
||||
|
||||
elif IMAGE_RENDERER == "sixel" or IMAGE_RENDERER == "system-sixels":
|
||||
if render_sixel(file_path, width, height):
|
||||
return
|
||||
|
||||
elif IMAGE_RENDERER == "imgcat":
|
||||
if render_iterm(file_path, width, height):
|
||||
return
|
||||
|
||||
elif IMAGE_RENDERER == "timg":
|
||||
if render_timg(file_path, width, height):
|
||||
return
|
||||
|
||||
elif IMAGE_RENDERER == "chafa":
|
||||
if render_chafa_auto(file_path, width, height):
|
||||
return
|
||||
|
||||
# Auto-detection / Fallback
|
||||
if os.environ.get("KITTY_WINDOW_ID") or os.environ.get("GHOSTTY_BIN_DIR"):
|
||||
if render_kitty(file_path, width, height, SCALE_UP):
|
||||
return
|
||||
|
||||
if os.environ.get("TERM_PROGRAM") == "iTerm.app":
|
||||
if render_iterm(file_path, width, height):
|
||||
return
|
||||
|
||||
# Try standard tools in order of quality/preference
|
||||
if render_kitty(file_path, width, height, SCALE_UP):
|
||||
return
|
||||
if render_sixel(file_path, width, height):
|
||||
return
|
||||
if render_timg(file_path, width, height):
|
||||
return
|
||||
if render_chafa_auto(file_path, width, height):
|
||||
return
|
||||
|
||||
print("⚠️ No suitable image renderer found (icat, chafa, timg, img2sixel).")
|
||||
|
||||
|
||||
def main():
|
||||
if not SELECTED_TITLE:
|
||||
print("No selection")
|
||||
return
|
||||
|
||||
# Get the media data from cached search results
|
||||
media = get_media_from_results(SELECTED_TITLE)
|
||||
|
||||
if not media:
|
||||
print("Loading preview...")
|
||||
return
|
||||
|
||||
term_width = get_terminal_width()
|
||||
|
||||
# Extract media information
|
||||
title_obj = media.get("title", {})
|
||||
title = (
|
||||
title_obj.get("english")
|
||||
or title_obj.get("romaji")
|
||||
or title_obj.get("native")
|
||||
or "Unknown"
|
||||
)
|
||||
|
||||
# Show image if in image or full mode
|
||||
if PREVIEW_MODE in ("image", "full"):
|
||||
cover_image = media.get("coverImage", {}).get("large", "")
|
||||
if cover_image:
|
||||
# Ensure image cache directory exists
|
||||
IMAGE_CACHE_DIR.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
# Generate hash matching the preview worker pattern
|
||||
# Use "anime-" prefix and hash of just the title (no KEY prefix for dynamic search)
|
||||
hash_id = f"anime-{sha256(SELECTED_TITLE.encode('utf-8')).hexdigest()}"
|
||||
image_file = IMAGE_CACHE_DIR / f"{hash_id}.png"
|
||||
|
||||
# Download image if not cached
|
||||
if not image_file.exists():
|
||||
download_image(cover_image, image_file)
|
||||
|
||||
# Try to render the image
|
||||
if image_file.exists():
|
||||
fzf_image_preview(str(image_file))
|
||||
print() # Spacer
|
||||
else:
|
||||
print("🖼️ Loading image...")
|
||||
print()
|
||||
|
||||
# Show text info if in text or full mode
|
||||
if PREVIEW_MODE in ("text", "full"):
|
||||
# Separator line
|
||||
r, g, b = map(int, SEPARATOR_COLOR.split(","))
|
||||
separator = f"\x1b[38;2;{r};{g};{b}m" + ("─" * term_width) + "\x1b[0m"
|
||||
print(separator, flush=True)
|
||||
|
||||
# Title centered
|
||||
print(title.center(term_width))
|
||||
|
||||
# Extract data
|
||||
status = media.get("status", "Unknown")
|
||||
format_type = media.get("format", "Unknown")
|
||||
episodes = media.get("episodes", "??")
|
||||
duration = media.get("duration")
|
||||
duration_str = f"{duration} min/ep" if duration else "Unknown"
|
||||
|
||||
score = media.get("averageScore")
|
||||
score_str = format_score_stars(score)
|
||||
|
||||
favourites = format_number(media.get("favourites", 0))
|
||||
popularity = format_number(media.get("popularity", 0))
|
||||
|
||||
genres = ", ".join(media.get("genres", [])) or "Unknown"
|
||||
|
||||
start_date = format_date(media.get("startDate"))
|
||||
end_date = format_date(media.get("endDate"))
|
||||
|
||||
studios_list = media.get("studios", {}).get("nodes", [])
|
||||
# Studios are those with isAnimationStudio=true
|
||||
studios = ", ".join([s["name"] for s in studios_list if s.get("name") and s.get("isAnimationStudio")]) or "N/A"
|
||||
# Producers are those with isAnimationStudio=false
|
||||
producers = ", ".join([s["name"] for s in studios_list if s.get("name") and not s.get("isAnimationStudio")]) or "N/A"
|
||||
|
||||
synonyms_list = media.get("synonyms", [])
|
||||
# Include romaji in synonyms if different from title
|
||||
romaji = title_obj.get("romaji")
|
||||
if romaji and romaji != title and romaji not in synonyms_list:
|
||||
synonyms_list = [romaji] + synonyms_list
|
||||
synonyms = ", ".join(synonyms_list) or "N/A"
|
||||
|
||||
# Tags
|
||||
tags_list = media.get("tags", [])
|
||||
tags = ", ".join([t.get("name", "") for t in tags_list if t.get("name")]) or "N/A"
|
||||
|
||||
# Next airing episode
|
||||
next_airing = media.get("nextAiringEpisode")
|
||||
if next_airing:
|
||||
next_ep = next_airing.get("episode", "?")
|
||||
airing_at = next_airing.get("airingAt")
|
||||
if airing_at:
|
||||
from datetime import datetime
|
||||
try:
|
||||
dt = datetime.fromtimestamp(airing_at)
|
||||
next_episode_str = f"Episode {next_ep} on {dt.strftime('%A, %d %B %Y at %H:%M')}"
|
||||
except (ValueError, OSError):
|
||||
next_episode_str = f"Episode {next_ep}"
|
||||
else:
|
||||
next_episode_str = f"Episode {next_ep}"
|
||||
else:
|
||||
next_episode_str = "N/A"
|
||||
|
||||
# User list status
|
||||
media_list_entry = media.get("mediaListEntry")
|
||||
if media_list_entry:
|
||||
user_status = media_list_entry.get("status", "NOT_ON_LIST")
|
||||
user_progress = f"Episode {media_list_entry.get('progress', 0)}"
|
||||
else:
|
||||
user_status = "NOT_ON_LIST"
|
||||
user_progress = "0"
|
||||
|
||||
description = media.get("description", "No description available.")
|
||||
description = strip_markdown(description)
|
||||
|
||||
# Print sections matching media_info.py structure exactly
|
||||
rows = [
|
||||
("Score", score_str),
|
||||
("Favorites", favourites),
|
||||
("Popularity", popularity),
|
||||
("Status", status),
|
||||
]
|
||||
|
||||
print_rule(SEPARATOR_COLOR)
|
||||
for key, value in rows:
|
||||
print_table_row(key, value, HEADER_COLOR, 15, term_width - 20)
|
||||
|
||||
rows = [
|
||||
("Episodes", str(episodes)),
|
||||
("Duration", duration_str),
|
||||
("Next Episode", next_episode_str),
|
||||
]
|
||||
|
||||
print_rule(SEPARATOR_COLOR)
|
||||
for key, value in rows:
|
||||
print_table_row(key, value, HEADER_COLOR, 15, term_width - 20)
|
||||
|
||||
rows = [
|
||||
("Genres", genres),
|
||||
("Format", format_type),
|
||||
]
|
||||
|
||||
print_rule(SEPARATOR_COLOR)
|
||||
for key, value in rows:
|
||||
print_table_row(key, value, HEADER_COLOR, 15, term_width - 20)
|
||||
|
||||
rows = [
|
||||
("List Status", user_status),
|
||||
("Progress", user_progress),
|
||||
]
|
||||
|
||||
print_rule(SEPARATOR_COLOR)
|
||||
for key, value in rows:
|
||||
print_table_row(key, value, HEADER_COLOR, 15, term_width - 20)
|
||||
|
||||
rows = [
|
||||
("Start Date", start_date),
|
||||
("End Date", end_date),
|
||||
]
|
||||
|
||||
print_rule(SEPARATOR_COLOR)
|
||||
for key, value in rows:
|
||||
print_table_row(key, value, HEADER_COLOR, 15, term_width - 20)
|
||||
|
||||
rows = [
|
||||
("Studios", studios),
|
||||
("Producers", producers),
|
||||
]
|
||||
|
||||
print_rule(SEPARATOR_COLOR)
|
||||
for key, value in rows:
|
||||
print_table_row(key, value, HEADER_COLOR, 15, term_width - 20)
|
||||
|
||||
rows = [
|
||||
("Synonyms", synonyms),
|
||||
]
|
||||
|
||||
print_rule(SEPARATOR_COLOR)
|
||||
for key, value in rows:
|
||||
print_table_row(key, value, HEADER_COLOR, 15, term_width - 20)
|
||||
|
||||
rows = [
|
||||
("Tags", tags),
|
||||
]
|
||||
|
||||
print_rule(SEPARATOR_COLOR)
|
||||
for key, value in rows:
|
||||
print_table_row(key, value, HEADER_COLOR, 15, term_width - 20)
|
||||
|
||||
print_rule(SEPARATOR_COLOR)
|
||||
print(wrap_text(description, term_width))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
try:
|
||||
main()
|
||||
except KeyboardInterrupt:
|
||||
pass
|
||||
except Exception as e:
|
||||
print(f"Preview Error: {e}", file=sys.stderr)
|
||||
@@ -0,0 +1,49 @@
|
||||
import sys
|
||||
from _ansi_utils import print_rule, print_table_row, get_terminal_width
|
||||
|
||||
HEADER_COLOR = sys.argv[1]
|
||||
SEPARATOR_COLOR = sys.argv[2]
|
||||
|
||||
# Get terminal dimensions
|
||||
term_width = get_terminal_width()
|
||||
|
||||
# Print title centered
|
||||
print("{TITLE}".center(term_width))
|
||||
|
||||
rows = [
|
||||
("Duration", "{DURATION}"),
|
||||
("Status", "{STATUS}"),
|
||||
]
|
||||
|
||||
print_rule(SEPARATOR_COLOR)
|
||||
for key, value in rows:
|
||||
print_table_row(key, value, HEADER_COLOR, 15, term_width - 20)
|
||||
|
||||
rows = [
|
||||
("Total Episodes", "{EPISODES}"),
|
||||
("Next Episode", "{NEXT_EPISODE}"),
|
||||
]
|
||||
|
||||
print_rule(SEPARATOR_COLOR)
|
||||
for key, value in rows:
|
||||
print_table_row(key, value, HEADER_COLOR, 15, term_width - 20)
|
||||
|
||||
rows = [
|
||||
("Progress", "{USER_PROGRESS}"),
|
||||
("List Status", "{USER_STATUS}"),
|
||||
]
|
||||
|
||||
print_rule(SEPARATOR_COLOR)
|
||||
for key, value in rows:
|
||||
print_table_row(key, value, HEADER_COLOR, 15, term_width - 20)
|
||||
|
||||
rows = [
|
||||
("Start Date", "{START_DATE}"),
|
||||
("End Date", "{END_DATE}"),
|
||||
]
|
||||
|
||||
print_rule(SEPARATOR_COLOR)
|
||||
for key, value in rows:
|
||||
print_table_row(key, value, HEADER_COLOR, 15, term_width - 20)
|
||||
|
||||
print_rule(SEPARATOR_COLOR)
|
||||
|
||||
@@ -1,89 +0,0 @@
|
||||
import sys
|
||||
from rich.console import Console
|
||||
from rich.table import Table
|
||||
from rich.rule import Rule
|
||||
from rich.markdown import Markdown
|
||||
|
||||
console = Console(force_terminal=True, color_system="truecolor")
|
||||
|
||||
HEADER_COLOR = sys.argv[1]
|
||||
SEPARATOR_COLOR = sys.argv[2]
|
||||
|
||||
|
||||
def rule(title: str | None = None):
|
||||
console.print(Rule(style=f"rgb({SEPARATOR_COLOR})"))
|
||||
|
||||
|
||||
console.print("{TITLE}", justify="center")
|
||||
|
||||
left = [
|
||||
(
|
||||
"Score",
|
||||
"Favorites",
|
||||
"Popularity",
|
||||
"Status",
|
||||
),
|
||||
(
|
||||
"Episodes",
|
||||
"Duration",
|
||||
"Next Episode",
|
||||
),
|
||||
(
|
||||
"Genres",
|
||||
"Format",
|
||||
),
|
||||
(
|
||||
"List Status",
|
||||
"Progress",
|
||||
),
|
||||
(
|
||||
"Start Date",
|
||||
"End Date",
|
||||
),
|
||||
("Studios",),
|
||||
("Synonymns",),
|
||||
("Tags",),
|
||||
]
|
||||
right = [
|
||||
(
|
||||
"{SCORE}",
|
||||
"{FAVOURITES}",
|
||||
"{POPULARITY}",
|
||||
"{STATUS}",
|
||||
),
|
||||
(
|
||||
"{EPISODES}",
|
||||
"{DURATION}",
|
||||
"{NEXT_EPISODE}",
|
||||
),
|
||||
(
|
||||
"{GENRES}",
|
||||
"{FORMAT}",
|
||||
),
|
||||
(
|
||||
"{USER_STATUS}",
|
||||
"{USER_PROGRESS}",
|
||||
),
|
||||
(
|
||||
"{START_DATE}",
|
||||
"{END_DATE}",
|
||||
),
|
||||
("{STUDIOS}",),
|
||||
("{SYNONYMNS}",),
|
||||
("{TAGS}",),
|
||||
]
|
||||
|
||||
|
||||
for L_grp, R_grp in zip(left, right):
|
||||
table = Table.grid(expand=True)
|
||||
table.add_column(justify="left", no_wrap=True)
|
||||
table.add_column(justify="right", overflow="fold")
|
||||
for L, R in zip(L_grp, R_grp):
|
||||
table.add_row(f"[bold rgb({HEADER_COLOR})]{L} [/]", f"{R}")
|
||||
|
||||
rule()
|
||||
console.print(table)
|
||||
|
||||
|
||||
rule()
|
||||
console.print(Markdown("""{SYNOPSIS}"""))
|
||||
94
viu_media/assets/scripts/fzf/media_info.py
Normal file
94
viu_media/assets/scripts/fzf/media_info.py
Normal file
@@ -0,0 +1,94 @@
|
||||
import sys
|
||||
from _ansi_utils import (
|
||||
print_rule,
|
||||
print_table_row,
|
||||
strip_markdown,
|
||||
wrap_text,
|
||||
get_terminal_width,
|
||||
)
|
||||
|
||||
HEADER_COLOR = sys.argv[1]
|
||||
SEPARATOR_COLOR = sys.argv[2]
|
||||
|
||||
# Get terminal dimensions
|
||||
term_width = get_terminal_width()
|
||||
|
||||
# Print title centered
|
||||
print("{TITLE}".center(term_width))
|
||||
|
||||
# Define table data
|
||||
rows = [
|
||||
("Score", "{SCORE}"),
|
||||
("Favorites", "{FAVOURITES}"),
|
||||
("Popularity", "{POPULARITY}"),
|
||||
("Status", "{STATUS}"),
|
||||
]
|
||||
|
||||
print_rule(SEPARATOR_COLOR)
|
||||
for key, value in rows:
|
||||
print_table_row(key, value, HEADER_COLOR, 15, term_width - 20)
|
||||
|
||||
rows = [
|
||||
("Episodes", "{EPISODES}"),
|
||||
("Duration", "{DURATION}"),
|
||||
("Next Episode", "{NEXT_EPISODE}"),
|
||||
]
|
||||
|
||||
print_rule(SEPARATOR_COLOR)
|
||||
for key, value in rows:
|
||||
print_table_row(key, value, HEADER_COLOR, 15, term_width - 20)
|
||||
|
||||
rows = [
|
||||
("Genres", "{GENRES}"),
|
||||
("Format", "{FORMAT}"),
|
||||
]
|
||||
|
||||
print_rule(SEPARATOR_COLOR)
|
||||
for key, value in rows:
|
||||
print_table_row(key, value, HEADER_COLOR, 15, term_width - 20)
|
||||
|
||||
rows = [
|
||||
("List Status", "{USER_STATUS}"),
|
||||
("Progress", "{USER_PROGRESS}"),
|
||||
]
|
||||
|
||||
print_rule(SEPARATOR_COLOR)
|
||||
for key, value in rows:
|
||||
print_table_row(key, value, HEADER_COLOR, 15, term_width - 20)
|
||||
|
||||
rows = [
|
||||
("Start Date", "{START_DATE}"),
|
||||
("End Date", "{END_DATE}"),
|
||||
]
|
||||
|
||||
print_rule(SEPARATOR_COLOR)
|
||||
for key, value in rows:
|
||||
print_table_row(key, value, HEADER_COLOR, 15, term_width - 20)
|
||||
|
||||
rows = [
|
||||
("Studios", "{STUDIOS}"),
|
||||
("Producers", "{PRODUCERS}"),
|
||||
]
|
||||
|
||||
print_rule(SEPARATOR_COLOR)
|
||||
for key, value in rows:
|
||||
print_table_row(key, value, HEADER_COLOR, 15, term_width - 20)
|
||||
|
||||
rows = [
|
||||
("Synonyms", "{SYNONYMNS}"),
|
||||
]
|
||||
|
||||
print_rule(SEPARATOR_COLOR)
|
||||
for key, value in rows:
|
||||
print_table_row(key, value, HEADER_COLOR, 15, term_width - 20)
|
||||
|
||||
rows = [
|
||||
("Tags", "{TAGS}"),
|
||||
]
|
||||
|
||||
print_rule(SEPARATOR_COLOR)
|
||||
for key, value in rows:
|
||||
print_table_row(key, value, HEADER_COLOR, 15, term_width - 20)
|
||||
|
||||
print_rule(SEPARATOR_COLOR)
|
||||
print(wrap_text(strip_markdown("""{SYNOPSIS}"""), term_width))
|
||||
@@ -1,22 +0,0 @@
|
||||
#!/bin/sh
|
||||
#
|
||||
# Viu Airing Schedule Info Script Template
|
||||
# This script formats and displays airing schedule details in the FZF preview pane.
|
||||
# Python injects the actual data values into the placeholders.
|
||||
|
||||
draw_rule
|
||||
|
||||
print_kv "Anime Title" "{ANIME_TITLE}"
|
||||
|
||||
draw_rule
|
||||
|
||||
print_kv "Total Episodes" "{TOTAL_EPISODES}"
|
||||
print_kv "Upcoming Episodes" "{UPCOMING_EPISODES}"
|
||||
|
||||
draw_rule
|
||||
|
||||
echo "{C_KEY}Next Episodes:{RESET}"
|
||||
echo
|
||||
echo "{SCHEDULE_TABLE}" | fold -s -w "$WIDTH"
|
||||
|
||||
draw_rule
|
||||
@@ -1,75 +0,0 @@
|
||||
#!/bin/sh
|
||||
#
|
||||
# FZF Airing Schedule Preview Script Template
|
||||
#
|
||||
# This script is a template. The placeholders in curly braces, like {NAME}
|
||||
# are dynamically filled by python using .replace()
|
||||
|
||||
WIDTH=${FZF_PREVIEW_COLUMNS:-80} # Set a fallback width of 80
|
||||
IMAGE_RENDERER="{IMAGE_RENDERER}"
|
||||
|
||||
generate_sha256() {
|
||||
local input
|
||||
|
||||
# Check if input is passed as an argument or piped
|
||||
if [ -n "$1" ]; then
|
||||
input="$1"
|
||||
else
|
||||
input=$(cat)
|
||||
fi
|
||||
|
||||
if command -v sha256sum &>/dev/null; then
|
||||
echo -n "$input" | sha256sum | awk '{print $1}'
|
||||
elif command -v shasum &>/dev/null; then
|
||||
echo -n "$input" | shasum -a 256 | awk '{print $1}'
|
||||
elif command -v sha256 &>/dev/null; then
|
||||
echo -n "$input" | sha256 | awk '{print $1}'
|
||||
elif command -v openssl &>/dev/null; then
|
||||
echo -n "$input" | openssl dgst -sha256 | awk '{print $2}'
|
||||
else
|
||||
echo -n "$input" | base64 | tr '/+' '_-' | tr -d '\n'
|
||||
fi
|
||||
}
|
||||
|
||||
|
||||
print_kv() {
|
||||
local key="$1"
|
||||
local value="$2"
|
||||
local key_len=${#key}
|
||||
local value_len=${#value}
|
||||
local multiplier="${3:-1}"
|
||||
|
||||
# Correctly calculate padding by accounting for the key, the ": ", and the value.
|
||||
local padding_len=$((WIDTH - key_len - 2 - value_len * multiplier))
|
||||
|
||||
# If the text is too long to fit, just add a single space for separation.
|
||||
if [ "$padding_len" -lt 1 ]; then
|
||||
padding_len=1
|
||||
value=$(echo $value| fold -s -w "$((WIDTH - key_len - 3))")
|
||||
printf "{C_KEY}%s:{RESET}%*s%s\\n" "$key" "$padding_len" "" " $value"
|
||||
else
|
||||
printf "{C_KEY}%s:{RESET}%*s%s\\n" "$key" "$padding_len" "" " $value"
|
||||
fi
|
||||
}
|
||||
|
||||
|
||||
draw_rule(){
|
||||
ll=2
|
||||
while [ $ll -le $FZF_PREVIEW_COLUMNS ];do
|
||||
echo -n -e "{C_RULE}─{RESET}"
|
||||
((ll++))
|
||||
done
|
||||
echo
|
||||
}
|
||||
|
||||
title={}
|
||||
hash=$(generate_sha256 "$title")
|
||||
|
||||
if [ "{PREVIEW_MODE}" = "full" ] || [ "{PREVIEW_MODE}" = "text" ]; then
|
||||
info_file="{INFO_CACHE_DIR}{PATH_SEP}$hash"
|
||||
if [ -f "$info_file" ]; then
|
||||
source "$info_file"
|
||||
else
|
||||
echo "📅 Loading airing schedule..."
|
||||
fi
|
||||
fi
|
||||
@@ -1,41 +0,0 @@
|
||||
#!/bin/sh
|
||||
#
|
||||
# Viu Character Info Script Template
|
||||
# This script formats and displays character details in the FZF preview pane.
|
||||
# Python injects the actual data values into the placeholders.
|
||||
|
||||
draw_rule
|
||||
|
||||
print_kv "Character Name" "{CHARACTER_NAME}"
|
||||
|
||||
if [ -n "{CHARACTER_NATIVE_NAME}" ] && [ "{CHARACTER_NATIVE_NAME}" != "N/A" ]; then
|
||||
print_kv "Native Name" "{CHARACTER_NATIVE_NAME}"
|
||||
fi
|
||||
|
||||
draw_rule
|
||||
|
||||
if [ -n "{CHARACTER_GENDER}" ] && [ "{CHARACTER_GENDER}" != "Unknown" ]; then
|
||||
print_kv "Gender" "{CHARACTER_GENDER}"
|
||||
fi
|
||||
|
||||
if [ -n "{CHARACTER_AGE}" ] && [ "{CHARACTER_AGE}" != "Unknown" ]; then
|
||||
print_kv "Age" "{CHARACTER_AGE}"
|
||||
fi
|
||||
|
||||
if [ -n "{CHARACTER_BLOOD_TYPE}" ] && [ "{CHARACTER_BLOOD_TYPE}" != "N/A" ]; then
|
||||
print_kv "Blood Type" "{CHARACTER_BLOOD_TYPE}"
|
||||
fi
|
||||
|
||||
if [ -n "{CHARACTER_BIRTHDAY}" ] && [ "{CHARACTER_BIRTHDAY}" != "N/A" ]; then
|
||||
print_kv "Birthday" "{CHARACTER_BIRTHDAY}"
|
||||
fi
|
||||
|
||||
if [ -n "{CHARACTER_FAVOURITES}" ] && [ "{CHARACTER_FAVOURITES}" != "0" ]; then
|
||||
print_kv "Favorites" "{CHARACTER_FAVOURITES}"
|
||||
fi
|
||||
|
||||
draw_rule
|
||||
|
||||
echo "{CHARACTER_DESCRIPTION}" | fold -s -w "$WIDTH"
|
||||
|
||||
draw_rule
|
||||
@@ -1,130 +0,0 @@
|
||||
#!/bin/sh
|
||||
#
|
||||
# FZF Character Preview Script Template
|
||||
#
|
||||
# This script is a template. The placeholders in curly braces, like {NAME}
|
||||
# are dynamically filled by python using .replace()
|
||||
|
||||
WIDTH=${FZF_PREVIEW_COLUMNS:-80} # Set a fallback width of 80
|
||||
IMAGE_RENDERER="{IMAGE_RENDERER}"
|
||||
|
||||
generate_sha256() {
|
||||
local input
|
||||
|
||||
# Check if input is passed as an argument or piped
|
||||
if [ -n "$1" ]; then
|
||||
input="$1"
|
||||
else
|
||||
input=$(cat)
|
||||
fi
|
||||
|
||||
if command -v sha256sum &>/dev/null; then
|
||||
echo -n "$input" | sha256sum | awk '{print $1}'
|
||||
elif command -v shasum &>/dev/null; then
|
||||
echo -n "$input" | shasum -a 256 | awk '{print $1}'
|
||||
elif command -v sha256 &>/dev/null; then
|
||||
echo -n "$input" | sha256 | awk '{print $1}'
|
||||
elif command -v openssl &>/dev/null; then
|
||||
echo -n "$input" | openssl dgst -sha256 | awk '{print $2}'
|
||||
else
|
||||
echo -n "$input" | base64 | tr '/+' '_-' | tr -d '\n'
|
||||
fi
|
||||
}
|
||||
|
||||
fzf_preview() {
|
||||
file=$1
|
||||
|
||||
dim=${FZF_PREVIEW_COLUMNS}x${FZF_PREVIEW_LINES}
|
||||
if [ "$dim" = x ]; then
|
||||
dim=$(stty size </dev/tty | awk "{print \$2 \"x\" \$1}")
|
||||
fi
|
||||
if ! [ "$IMAGE_RENDERER" = "icat" ] && [ -z "$KITTY_WINDOW_ID" ] && [ "$((FZF_PREVIEW_TOP + FZF_PREVIEW_LINES))" -eq "$(stty size </dev/tty | awk "{print \$1}")" ]; then
|
||||
dim=${FZF_PREVIEW_COLUMNS}x$((FZF_PREVIEW_LINES - 1))
|
||||
fi
|
||||
|
||||
if [ "$IMAGE_RENDERER" = "icat" ] && [ -z "$GHOSTTY_BIN_DIR" ]; then
|
||||
if command -v kitten >/dev/null 2>&1; then
|
||||
kitten icat --clear --transfer-mode=memory --unicode-placeholder --stdin=no --place="$dim@0x0" "$file" | sed "\$d" | sed "$(printf "\$s/\$/\033[m/")"
|
||||
elif command -v icat >/dev/null 2>&1; then
|
||||
icat --clear --transfer-mode=memory --unicode-placeholder --stdin=no --place="$dim@0x0" "$file" | sed "\$d" | sed "$(printf "\$s/\$/\033[m/")"
|
||||
else
|
||||
kitty icat --clear --transfer-mode=memory --unicode-placeholder --stdin=no --place="$dim@0x0" "$file" | sed "\$d" | sed "$(printf "\$s/\$/\033[m/")"
|
||||
fi
|
||||
|
||||
elif [ -n "$GHOSTTY_BIN_DIR" ]; then
|
||||
if command -v kitten >/dev/null 2>&1; then
|
||||
kitten icat --clear --transfer-mode=memory --unicode-placeholder --stdin=no --place="$dim@0x0" "$file" | sed "\$d" | sed "$(printf "\$s/\$/\033[m/")"
|
||||
elif command -v icat >/dev/null 2>&1; then
|
||||
icat --clear --transfer-mode=memory --unicode-placeholder --stdin=no --place="$dim@0x0" "$file" | sed "\$d" | sed "$(printf "\$s/\$/\033[m/")"
|
||||
else
|
||||
chafa -s "$dim" "$file"
|
||||
fi
|
||||
elif command -v chafa >/dev/null 2>&1; then
|
||||
case "$PLATFORM" in
|
||||
android) chafa -s "$dim" "$file" ;;
|
||||
windows) chafa -f sixel -s "$dim" "$file" ;;
|
||||
*) chafa -s "$dim" "$file" ;;
|
||||
esac
|
||||
echo
|
||||
|
||||
elif command -v imgcat >/dev/null; then
|
||||
imgcat -W "${dim%%x*}" -H "${dim##*x}" "$file"
|
||||
|
||||
else
|
||||
echo please install a terminal image viewer
|
||||
echo either icat for kitty terminal and wezterm or imgcat or chafa
|
||||
fi
|
||||
}
|
||||
print_kv() {
|
||||
local key="$1"
|
||||
local value="$2"
|
||||
local key_len=${#key}
|
||||
local value_len=${#value}
|
||||
local multiplier="${3:-1}"
|
||||
|
||||
# Correctly calculate padding by accounting for the key, the ": ", and the value.
|
||||
local padding_len=$((WIDTH - key_len - 2 - value_len * multiplier))
|
||||
|
||||
# If the text is too long to fit, just add a single space for separation.
|
||||
if [ "$padding_len" -lt 1 ]; then
|
||||
padding_len=1
|
||||
value=$(echo $value| fold -s -w "$((WIDTH - key_len - 3))")
|
||||
printf "{C_KEY}%s:{RESET}%*s%s\\n" "$key" "$padding_len" "" " $value"
|
||||
else
|
||||
printf "{C_KEY}%s:{RESET}%*s%s\\n" "$key" "$padding_len" "" " $value"
|
||||
fi
|
||||
}
|
||||
|
||||
|
||||
draw_rule(){
|
||||
ll=2
|
||||
while [ $ll -le $FZF_PREVIEW_COLUMNS ];do
|
||||
echo -n -e "{C_RULE}─{RESET}"
|
||||
((ll++))
|
||||
done
|
||||
echo
|
||||
}
|
||||
|
||||
title={}
|
||||
hash=$(generate_sha256 "$title")
|
||||
|
||||
|
||||
# FIXME: Disabled since they cover the text perhaps its aspect ratio related or image format not sure
|
||||
# if [ "{PREVIEW_MODE}" = "full" ] || [ "{PREVIEW_MODE}" = "image" ]; then
|
||||
# image_file="{IMAGE_CACHE_DIR}{PATH_SEP}$hash.png"
|
||||
# if [ -f "$image_file" ]; then
|
||||
# fzf_preview "$image_file"
|
||||
# echo # Add a newline for spacing
|
||||
# fi
|
||||
# fi
|
||||
|
||||
if [ "{PREVIEW_MODE}" = "full" ] || [ "{PREVIEW_MODE}" = "text" ]; then
|
||||
info_file="{INFO_CACHE_DIR}{PATH_SEP}$hash"
|
||||
if [ -f "$info_file" ]; then
|
||||
source "$info_file"
|
||||
else
|
||||
echo "👤 Loading character details..."
|
||||
fi
|
||||
fi
|
||||
|
||||
|
||||
@@ -1,315 +0,0 @@
|
||||
#!/bin/bash
|
||||
#
|
||||
# FZF Dynamic Preview Script Template
|
||||
#
|
||||
# This script handles previews for dynamic search results by parsing the JSON
|
||||
# search results file and extracting info for the selected item.
|
||||
# The placeholders in curly braces are dynamically filled by Python using .replace()
|
||||
|
||||
WIDTH=${FZF_PREVIEW_COLUMNS:-80}
|
||||
IMAGE_RENDERER="{IMAGE_RENDERER}"
|
||||
SEARCH_RESULTS_FILE="{SEARCH_RESULTS_FILE}"
|
||||
IMAGE_CACHE_PATH="{IMAGE_CACHE_PATH}"
|
||||
INFO_CACHE_PATH="{INFO_CACHE_PATH}"
|
||||
PATH_SEP="{PATH_SEP}"
|
||||
|
||||
# Color codes injected by Python
|
||||
C_TITLE="{C_TITLE}"
|
||||
C_KEY="{C_KEY}"
|
||||
C_VALUE="{C_VALUE}"
|
||||
C_RULE="{C_RULE}"
|
||||
RESET="{RESET}"
|
||||
|
||||
# Selected item from fzf
|
||||
SELECTED_ITEM={}
|
||||
|
||||
generate_sha256() {
|
||||
local input="$1"
|
||||
if command -v sha256sum &>/dev/null; then
|
||||
echo -n "$input" | sha256sum | awk '{print $1}'
|
||||
elif command -v shasum &>/dev/null; then
|
||||
echo -n "$input" | shasum -a 256 | awk '{print $1}'
|
||||
elif command -v sha256 &>/dev/null; then
|
||||
echo -n "$input" | sha256 | awk '{print $1}'
|
||||
elif command -v openssl &>/dev/null; then
|
||||
echo -n "$input" | openssl dgst -sha256 | awk '{print $2}'
|
||||
else
|
||||
echo -n "$input" | base64 | tr '/+' '_-' | tr -d '\n'
|
||||
fi
|
||||
}
|
||||
|
||||
fzf_preview() {
|
||||
file=$1
|
||||
dim=${FZF_PREVIEW_COLUMNS}x${FZF_PREVIEW_LINES}
|
||||
if [ "$dim" = x ]; then
|
||||
dim=$(stty size </dev/tty | awk "{print \$2 \"x\" \$1}")
|
||||
fi
|
||||
if ! [ "$IMAGE_RENDERER" = "icat" ] && [ -z "$KITTY_WINDOW_ID" ] && [ "$((FZF_PREVIEW_TOP + FZF_PREVIEW_LINES))" -eq "$(stty size </dev/tty | awk "{print \$1}")" ]; then
|
||||
dim=${FZF_PREVIEW_COLUMNS}x$((FZF_PREVIEW_LINES - 1))
|
||||
fi
|
||||
|
||||
if [ "$IMAGE_RENDERER" = "icat" ] && [ -z "$GHOSTTY_BIN_DIR" ]; then
|
||||
if command -v kitten >/dev/null 2>&1; then
|
||||
kitten icat --clear --transfer-mode=memory --unicode-placeholder --stdin=no --place="$dim@0x0" "$file" | sed "\$d" | sed "$(printf "\$s/\$/\033[m/")"
|
||||
elif command -v icat >/dev/null 2>&1; then
|
||||
icat --clear --transfer-mode=memory --unicode-placeholder --stdin=no --place="$dim@0x0" "$file" | sed "\$d" | sed "$(printf "\$s/\$/\033[m/")"
|
||||
else
|
||||
kitty icat --clear --transfer-mode=memory --unicode-placeholder --stdin=no --place="$dim@0x0" "$file" | sed "\$d" | sed "$(printf "\$s/\$/\033[m/")"
|
||||
fi
|
||||
elif [ -n "$GHOSTTY_BIN_DIR" ]; then
|
||||
if command -v kitten >/dev/null 2>&1; then
|
||||
kitten icat --clear --transfer-mode=memory --unicode-placeholder --stdin=no --place="$dim@0x0" "$file" | sed "\$d" | sed "$(printf "\$s/\$/\033[m/")"
|
||||
elif command -v icat >/dev/null 2>&1; then
|
||||
icat --clear --transfer-mode=memory --unicode-placeholder --stdin=no --place="$dim@0x0" "$file" | sed "\$d" | sed "$(printf "\$s/\$/\033[m/")"
|
||||
else
|
||||
chafa -s "$dim" "$file"
|
||||
fi
|
||||
elif command -v chafa >/dev/null 2>&1; then
|
||||
case "$PLATFORM" in
|
||||
android) chafa -s "$dim" "$file" ;;
|
||||
windows) chafa -f sixel -s "$dim" "$file" ;;
|
||||
*) chafa -s "$dim" "$file" ;;
|
||||
esac
|
||||
echo
|
||||
elif command -v imgcat >/dev/null; then
|
||||
imgcat -W "${dim%%x*}" -H "${dim##*x}" "$file"
|
||||
else
|
||||
echo please install a terminal image viewer
|
||||
echo either icat for kitty terminal and wezterm or imgcat or chafa
|
||||
fi
|
||||
}
|
||||
|
||||
print_kv() {
|
||||
local key="$1"
|
||||
local value="$2"
|
||||
local key_len=${#key}
|
||||
local value_len=${#value}
|
||||
local multiplier="${3:-1}"
|
||||
|
||||
local padding_len=$((WIDTH - key_len - 2 - value_len * multiplier))
|
||||
|
||||
if [ "$padding_len" -lt 1 ]; then
|
||||
padding_len=1
|
||||
value=$(echo $value| fold -s -w "$((WIDTH - key_len - 3))")
|
||||
printf "{C_KEY}%s:{RESET}%*s%s\\n" "$key" "$padding_len" "" " $value"
|
||||
else
|
||||
printf "{C_KEY}%s:{RESET}%*s%s\\n" "$key" "$padding_len" "" " $value"
|
||||
fi
|
||||
}
|
||||
|
||||
draw_rule() {
|
||||
ll=2
|
||||
while [ $ll -le $FZF_PREVIEW_COLUMNS ];do
|
||||
echo -n -e "{C_RULE}─{RESET}"
|
||||
((ll++))
|
||||
done
|
||||
echo
|
||||
}
|
||||
|
||||
clean_html() {
|
||||
echo "$1" | sed 's/<[^>]*>//g' | sed 's/</</g' | sed 's/>/>/g' | sed 's/&/\&/g' | sed 's/"/"/g' | sed "s/'/'/g"
|
||||
}
|
||||
|
||||
format_date() {
|
||||
local date_obj="$1"
|
||||
if [ "$date_obj" = "null" ] || [ -z "$date_obj" ]; then
|
||||
echo "N/A"
|
||||
return
|
||||
fi
|
||||
|
||||
# Extract year, month, day from the date object
|
||||
if command -v jq >/dev/null 2>&1; then
|
||||
year=$(echo "$date_obj" | jq -r '.year // "N/A"' 2>/dev/null || echo "N/A")
|
||||
month=$(echo "$date_obj" | jq -r '.month // ""' 2>/dev/null || echo "")
|
||||
day=$(echo "$date_obj" | jq -r '.day // ""' 2>/dev/null || echo "")
|
||||
else
|
||||
year=$(echo "$date_obj" | python3 -c "import json, sys; data=json.load(sys.stdin); print(data.get('year', 'N/A'))" 2>/dev/null || echo "N/A")
|
||||
month=$(echo "$date_obj" | python3 -c "import json, sys; data=json.load(sys.stdin); print(data.get('month', ''))" 2>/dev/null || echo "")
|
||||
day=$(echo "$date_obj" | python3 -c "import json, sys; data=json.load(sys.stdin); print(data.get('day', ''))" 2>/dev/null || echo "")
|
||||
fi
|
||||
|
||||
if [ "$year" = "N/A" ] || [ "$year" = "null" ]; then
|
||||
echo "N/A"
|
||||
elif [ -n "$month" ] && [ "$month" != "null" ] && [ -n "$day" ] && [ "$day" != "null" ]; then
|
||||
echo "$day/$month/$year"
|
||||
elif [ -n "$month" ] && [ "$month" != "null" ]; then
|
||||
echo "$month/$year"
|
||||
else
|
||||
echo "$year"
|
||||
fi
|
||||
}
|
||||
|
||||
# If no selection or search results file doesn't exist, show placeholder
|
||||
if [ -z "$SELECTED_ITEM" ] || [ ! -f "$SEARCH_RESULTS_FILE" ]; then
|
||||
echo "${C_TITLE}Dynamic Search Preview${RESET}"
|
||||
draw_rule
|
||||
echo "Type to search for anime..."
|
||||
echo "Results will appear here as you type."
|
||||
echo
|
||||
echo "DEBUG:"
|
||||
echo "SELECTED_ITEM='$SELECTED_ITEM'"
|
||||
echo "SEARCH_RESULTS_FILE='$SEARCH_RESULTS_FILE'"
|
||||
if [ -f "$SEARCH_RESULTS_FILE" ]; then
|
||||
echo "Search results file exists"
|
||||
else
|
||||
echo "Search results file missing"
|
||||
fi
|
||||
exit 0
|
||||
fi
|
||||
# Parse the search results JSON and find the matching item
|
||||
if command -v jq >/dev/null 2>&1; then
|
||||
MEDIA_DATA=$(cat "$SEARCH_RESULTS_FILE" | jq --arg anime_title "$SELECTED_ITEM" '
|
||||
.data.Page.media[]? |
|
||||
select((.title.english // .title.romaji // .title.native // "Unknown") == $anime_title )
|
||||
' )
|
||||
else
|
||||
# Fallback to Python for JSON parsing
|
||||
MEDIA_DATA=$(cat "$SEARCH_RESULTS_FILE" | python3 -c "
|
||||
import json
|
||||
import sys
|
||||
|
||||
try:
|
||||
data = json.load(sys.stdin)
|
||||
selected_item = '''$SELECTED_ITEM'''
|
||||
|
||||
if 'data' not in data or 'Page' not in data['data'] or 'media' not in data['data']['Page']:
|
||||
sys.exit(1)
|
||||
|
||||
media_list = data['data']['Page']['media']
|
||||
|
||||
for media in media_list:
|
||||
title = media.get('title', {})
|
||||
english_title = title.get('english') or title.get('romaji') or title.get('native', 'Unknown')
|
||||
year = media.get('startDate', {}).get('year', 'Unknown') if media.get('startDate') else 'Unknown'
|
||||
status = media.get('status', 'Unknown')
|
||||
genres = ', '.join(media.get('genres', [])[:3]) or 'Unknown'
|
||||
display_format = f'{english_title} ({year}) [{status}] - {genres}'
|
||||
# Debug output for matching
|
||||
print(f"DEBUG: selected_item='{selected_item.strip()}' display_format='{display_format.strip()}'", file=sys.stderr)
|
||||
if selected_item.strip() == display_format.strip():
|
||||
json.dump(media, sys.stdout, indent=2)
|
||||
sys.exit(0)
|
||||
print(f"DEBUG: No match found for selected_item='{selected_item.strip()}'", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
except Exception as e:
|
||||
print(f'Error: {e}', file=sys.stderr)
|
||||
sys.exit(1)
|
||||
" 2>/dev/null)
|
||||
fi
|
||||
|
||||
# If we couldn't find the media data, show error
|
||||
if [ $? -ne 0 ] || [ -z "$MEDIA_DATA" ]; then
|
||||
echo "${C_TITLE}Preview Error${RESET}"
|
||||
draw_rule
|
||||
echo "Could not load preview data for:"
|
||||
echo "$SELECTED_ITEM"
|
||||
echo
|
||||
echo "DEBUG INFO:"
|
||||
echo "Search results file: $SEARCH_RESULTS_FILE"
|
||||
if [ -f "$SEARCH_RESULTS_FILE" ]; then
|
||||
echo "File exists, size: $(wc -c < "$SEARCH_RESULTS_FILE") bytes"
|
||||
echo "First few lines of search results:"
|
||||
head -3 "$SEARCH_RESULTS_FILE" 2>/dev/null || echo "Cannot read file"
|
||||
else
|
||||
echo "Search results file does not exist"
|
||||
fi
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Extract information from the media data
|
||||
if command -v jq >/dev/null 2>&1; then
|
||||
# Use jq for faster extraction
|
||||
TITLE=$(echo "$MEDIA_DATA" | jq -r '.title.english // .title.romaji // .title.native // "Unknown"' 2>/dev/null || echo "Unknown")
|
||||
STATUS=$(echo "$MEDIA_DATA" | jq -r '.status // "Unknown"' 2>/dev/null || echo "Unknown")
|
||||
FORMAT=$(echo "$MEDIA_DATA" | jq -r '.format // "Unknown"' 2>/dev/null || echo "Unknown")
|
||||
EPISODES=$(echo "$MEDIA_DATA" | jq -r '.episodes // "Unknown"' 2>/dev/null || echo "Unknown")
|
||||
DURATION=$(echo "$MEDIA_DATA" | jq -r 'if .duration then "\(.duration) min" else "Unknown" end' 2>/dev/null || echo "Unknown")
|
||||
SCORE=$(echo "$MEDIA_DATA" | jq -r 'if .averageScore then "\(.averageScore)/100" else "N/A" end' 2>/dev/null || echo "N/A")
|
||||
FAVOURITES=$(echo "$MEDIA_DATA" | jq -r '.favourites // 0' 2>/dev/null | sed ':a;s/\B[0-9]\{3\}\>/,&/;ta' || echo "0")
|
||||
POPULARITY=$(echo "$MEDIA_DATA" | jq -r '.popularity // 0' 2>/dev/null | sed ':a;s/\B[0-9]\{3\}\>/,&/;ta' || echo "0")
|
||||
GENRES=$(echo "$MEDIA_DATA" | jq -r '(.genres[:5] // []) | join(", ") | if . == "" then "Unknown" else . end' 2>/dev/null || echo "Unknown")
|
||||
DESCRIPTION=$(echo "$MEDIA_DATA" | jq -r '.description // "No description available."' 2>/dev/null || echo "No description available.")
|
||||
|
||||
# Get start and end dates as JSON objects
|
||||
START_DATE_OBJ=$(echo "$MEDIA_DATA" | jq -c '.startDate' 2>/dev/null || echo "null")
|
||||
END_DATE_OBJ=$(echo "$MEDIA_DATA" | jq -c '.endDate' 2>/dev/null || echo "null")
|
||||
|
||||
# Get cover image URL
|
||||
COVER_IMAGE=$(echo "$MEDIA_DATA" | jq -r '.coverImage.large // ""' 2>/dev/null || echo "")
|
||||
else
|
||||
# Fallback to Python for extraction
|
||||
TITLE=$(echo "$MEDIA_DATA" | python3 -c "import json, sys; data=json.load(sys.stdin); title=data.get('title',{}); print(title.get('english') or title.get('romaji') or title.get('native', 'Unknown'))" 2>/dev/null || echo "Unknown")
|
||||
STATUS=$(echo "$MEDIA_DATA" | python3 -c "import json, sys; data=json.load(sys.stdin); print(data.get('status', 'Unknown'))" 2>/dev/null || echo "Unknown")
|
||||
FORMAT=$(echo "$MEDIA_DATA" | python3 -c "import json, sys; data=json.load(sys.stdin); print(data.get('format', 'Unknown'))" 2>/dev/null || echo "Unknown")
|
||||
EPISODES=$(echo "$MEDIA_DATA" | python3 -c "import json, sys; data=json.load(sys.stdin); print(data.get('episodes', 'Unknown'))" 2>/dev/null || echo "Unknown")
|
||||
DURATION=$(echo "$MEDIA_DATA" | python3 -c "import json, sys; data=json.load(sys.stdin); duration=data.get('duration'); print(f'{duration} min' if duration else 'Unknown')" 2>/dev/null || echo "Unknown")
|
||||
SCORE=$(echo "$MEDIA_DATA" | python3 -c "import json, sys; data=json.load(sys.stdin); score=data.get('averageScore'); print(f'{score}/100' if score else 'N/A')" 2>/dev/null || echo "N/A")
|
||||
FAVOURITES=$(echo "$MEDIA_DATA" | python3 -c "import json, sys; data=json.load(sys.stdin); print(f\"{data.get('favourites', 0):,}\")" 2>/dev/null || echo "0")
|
||||
POPULARITY=$(echo "$MEDIA_DATA" | python3 -c "import json, sys; data=json.load(sys.stdin); print(f\"{data.get('popularity', 0):,}\")" 2>/dev/null || echo "0")
|
||||
GENRES=$(echo "$MEDIA_DATA" | python3 -c "import json, sys; data=json.load(sys.stdin); print(', '.join(data.get('genres', [])[:5]))" 2>/dev/null || echo "Unknown")
|
||||
DESCRIPTION=$(echo "$MEDIA_DATA" | python3 -c "import json, sys; data=json.load(sys.stdin); print(data.get('description', 'No description available.'))" 2>/dev/null || echo "No description available.")
|
||||
|
||||
# Get start and end dates
|
||||
START_DATE_OBJ=$(echo "$MEDIA_DATA" | python3 -c "import json, sys; data=json.load(sys.stdin); json.dump(data.get('startDate'), sys.stdout)" 2>/dev/null || echo "null")
|
||||
END_DATE_OBJ=$(echo "$MEDIA_DATA" | python3 -c "import json, sys; data=json.load(sys.stdin); json.dump(data.get('endDate'), sys.stdout)" 2>/dev/null || echo "null")
|
||||
|
||||
# Get cover image URL
|
||||
COVER_IMAGE=$(echo "$MEDIA_DATA" | python3 -c "import json, sys; data=json.load(sys.stdin); cover=data.get('coverImage',{}); print(cover.get('large', ''))" 2>/dev/null || echo "")
|
||||
fi
|
||||
|
||||
# Format the dates
|
||||
START_DATE=$(format_date "$START_DATE_OBJ")
|
||||
END_DATE=$(format_date "$END_DATE_OBJ")
|
||||
|
||||
# Generate cache hash for this item (using selected item like regular preview)
|
||||
CACHE_HASH=$(generate_sha256 "$SELECTED_ITEM")
|
||||
|
||||
# Try to show image if available
|
||||
if [ "{PREVIEW_MODE}" = "full" ] || [ "{PREVIEW_MODE}" = "image" ]; then
|
||||
image_file="{IMAGE_CACHE_PATH}{PATH_SEP}${CACHE_HASH}.png"
|
||||
|
||||
# If image not cached and we have a URL, try to download it quickly
|
||||
if [ ! -f "$image_file" ] && [ -n "$COVER_IMAGE" ]; then
|
||||
if command -v curl >/dev/null 2>&1; then
|
||||
# Quick download with timeout
|
||||
curl -s -m 3 -L "$COVER_IMAGE" -o "$image_file" 2>/dev/null || rm -f "$image_file" 2>/dev/null
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ -f "$image_file" ]; then
|
||||
fzf_preview "$image_file"
|
||||
else
|
||||
echo "🖼️ Loading image..."
|
||||
fi
|
||||
echo
|
||||
fi
|
||||
|
||||
# Display text info if configured
|
||||
if [ "{PREVIEW_MODE}" = "full" ] || [ "{PREVIEW_MODE}" = "text" ]; then
|
||||
draw_rule
|
||||
print_kv "Title" "$TITLE"
|
||||
draw_rule
|
||||
|
||||
print_kv "Score" "$SCORE"
|
||||
print_kv "Favourites" "$FAVOURITES"
|
||||
print_kv "Popularity" "$POPULARITY"
|
||||
print_kv "Status" "$STATUS"
|
||||
|
||||
draw_rule
|
||||
|
||||
print_kv "Episodes" "$EPISODES"
|
||||
print_kv "Duration" "$DURATION"
|
||||
print_kv "Format" "$FORMAT"
|
||||
|
||||
draw_rule
|
||||
|
||||
print_kv "Genres" "$GENRES"
|
||||
print_kv "Start Date" "$START_DATE"
|
||||
print_kv "End Date" "$END_DATE"
|
||||
|
||||
draw_rule
|
||||
|
||||
# Clean and display description
|
||||
CLEAN_DESCRIPTION=$(clean_html "$DESCRIPTION")
|
||||
echo "$CLEAN_DESCRIPTION" | fold -s -w "$WIDTH"
|
||||
fi
|
||||
@@ -1,31 +0,0 @@
|
||||
#!/bin/sh
|
||||
#
|
||||
# Episode Preview Info Script Template
|
||||
# This script formats and displays episode information in the FZF preview pane.
|
||||
# Some values are injected by python those with '{name}' syntax using .replace()
|
||||
|
||||
draw_rule
|
||||
|
||||
echo "{TITLE}" | fold -s -w "$WIDTH"
|
||||
|
||||
draw_rule
|
||||
|
||||
print_kv "Duration" "{DURATION}"
|
||||
print_kv "Status" "{STATUS}"
|
||||
|
||||
draw_rule
|
||||
|
||||
print_kv "Total Episodes" "{EPISODES}"
|
||||
print_kv "Next Episode" "{NEXT_EPISODE}"
|
||||
|
||||
draw_rule
|
||||
|
||||
print_kv "Progress" "{USER_PROGRESS}"
|
||||
print_kv "List Status" "{USER_STATUS}"
|
||||
|
||||
draw_rule
|
||||
|
||||
print_kv "Start Date" "{START_DATE}"
|
||||
print_kv "End Date" "{END_DATE}"
|
||||
|
||||
draw_rule
|
||||
@@ -1,54 +0,0 @@
|
||||
#!/bin/sh
|
||||
#
|
||||
# Viu Preview Info Script Template
|
||||
# This script formats and displays the textual information in the FZF preview pane.
|
||||
# Some values are injected by python those with '{name}' syntax using .replace()
|
||||
|
||||
draw_rule
|
||||
|
||||
print_kv "Title" "{TITLE}"
|
||||
|
||||
draw_rule
|
||||
|
||||
# Emojis take up double the space
|
||||
score_multiplier=1
|
||||
if ! [ "{SCORE}" = "N/A" ]; then
|
||||
score_multiplier=2
|
||||
fi
|
||||
print_kv "Score" "{SCORE}" $score_multiplier
|
||||
|
||||
print_kv "Favourites" "{FAVOURITES}"
|
||||
print_kv "Popularity" "{POPULARITY}"
|
||||
print_kv "Status" "{STATUS}"
|
||||
|
||||
draw_rule
|
||||
|
||||
print_kv "Episodes" "{EPISODES}"
|
||||
print_kv "Next Episode" "{NEXT_EPISODE}"
|
||||
print_kv "Duration" "{DURATION}"
|
||||
|
||||
draw_rule
|
||||
|
||||
print_kv "Genres" "{GENRES}"
|
||||
print_kv "Format" "{FORMAT}"
|
||||
|
||||
draw_rule
|
||||
|
||||
print_kv "List Status" "{USER_STATUS}"
|
||||
print_kv "Progress" "{USER_PROGRESS}"
|
||||
|
||||
draw_rule
|
||||
|
||||
print_kv "Start Date" "{START_DATE}"
|
||||
print_kv "End Date" "{END_DATE}"
|
||||
|
||||
draw_rule
|
||||
|
||||
print_kv "Studios" "{STUDIOS}"
|
||||
print_kv "Synonymns" "{SYNONYMNS}"
|
||||
print_kv "Tags" "{TAGS}"
|
||||
|
||||
draw_rule
|
||||
|
||||
# Synopsis
|
||||
echo "{SYNOPSIS}" | fold -s -w "$WIDTH"
|
||||
@@ -1,147 +0,0 @@
|
||||
#!/bin/sh
|
||||
#
|
||||
# FZF Preview Script Template
|
||||
#
|
||||
# This script is a template. The placeholders in curly braces, like {NAME}
|
||||
# are dynamically filled by python using .replace()
|
||||
|
||||
WIDTH=${FZF_PREVIEW_COLUMNS:-80} # Set a fallback width of 80
|
||||
IMAGE_RENDERER="{IMAGE_RENDERER}"
|
||||
|
||||
generate_sha256() {
|
||||
local input
|
||||
|
||||
# Check if input is passed as an argument or piped
|
||||
if [ -n "$1" ]; then
|
||||
input="$1"
|
||||
else
|
||||
input=$(cat)
|
||||
fi
|
||||
|
||||
if command -v sha256sum &>/dev/null; then
|
||||
echo -n "$input" | sha256sum | awk '{print $1}'
|
||||
elif command -v shasum &>/dev/null; then
|
||||
echo -n "$input" | shasum -a 256 | awk '{print $1}'
|
||||
elif command -v sha256 &>/dev/null; then
|
||||
echo -n "$input" | sha256 | awk '{print $1}'
|
||||
elif command -v openssl &>/dev/null; then
|
||||
echo -n "$input" | openssl dgst -sha256 | awk '{print $2}'
|
||||
else
|
||||
echo -n "$input" | base64 | tr '/+' '_-' | tr -d '\n'
|
||||
fi
|
||||
}
|
||||
|
||||
fzf_preview() {
|
||||
file=$1
|
||||
|
||||
dim=${FZF_PREVIEW_COLUMNS}x${FZF_PREVIEW_LINES}
|
||||
if [ "$dim" = x ]; then
|
||||
dim=$(stty size </dev/tty | awk "{print \$2 \"x\" \$1}")
|
||||
fi
|
||||
if ! [ "$IMAGE_RENDERER" = "icat" ] && [ -z "$KITTY_WINDOW_ID" ] && [ "$((FZF_PREVIEW_TOP + FZF_PREVIEW_LINES))" -eq "$(stty size </dev/tty | awk "{print \$1}")" ]; then
|
||||
dim=${FZF_PREVIEW_COLUMNS}x$((FZF_PREVIEW_LINES - 1))
|
||||
fi
|
||||
|
||||
if [ "$IMAGE_RENDERER" = "icat" ] && [ -z "$GHOSTTY_BIN_DIR" ]; then
|
||||
if command -v kitten >/dev/null 2>&1; then
|
||||
kitten icat --clear --transfer-mode=memory --unicode-placeholder{SCALE_UP} --stdin=no --place="$dim@0x0" "$file" | sed "\$d" | sed "$(printf "\$s/\$/\033[m/")"
|
||||
elif command -v icat >/dev/null 2>&1; then
|
||||
icat --clear --transfer-mode=memory --unicode-placeholder{SCALE_UP} --stdin=no --place="$dim@0x0" "$file" | sed "\$d" | sed "$(printf "\$s/\$/\033[m/")"
|
||||
else
|
||||
kitty icat --clear --transfer-mode=memory --unicode-placeholder{SCALE_UP} --stdin=no --place="$dim@0x0" "$file" | sed "\$d" | sed "$(printf "\$s/\$/\033[m/")"
|
||||
fi
|
||||
|
||||
elif [ -n "$GHOSTTY_BIN_DIR" ]; then
|
||||
dim=$((FZF_PREVIEW_COLUMNS - 1))x${FZF_PREVIEW_LINES}
|
||||
if command -v kitten >/dev/null 2>&1; then
|
||||
kitten icat --clear --transfer-mode=memory --unicode-placeholder{SCALE_UP} --stdin=no --place="$dim@0x0" "$file" | sed "\$d" | sed "$(printf "\$s/\$/\033[m/")"
|
||||
elif command -v icat >/dev/null 2>&1; then
|
||||
icat --clear --transfer-mode=memory --unicode-placeholder{SCALE_UP} --stdin=no --place="$dim@0x0" "$file" | sed "\$d" | sed "$(printf "\$s/\$/\033[m/")"
|
||||
else
|
||||
chafa -s "$dim" "$file"
|
||||
fi
|
||||
elif command -v chafa >/dev/null 2>&1; then
|
||||
case "$PLATFORM" in
|
||||
android) chafa -s "$dim" "$file" ;;
|
||||
windows) chafa -f sixel -s "$dim" "$file" ;;
|
||||
*) chafa -s "$dim" "$file" ;;
|
||||
esac
|
||||
echo
|
||||
|
||||
elif command -v imgcat >/dev/null; then
|
||||
imgcat -W "${dim%%x*}" -H "${dim##*x}" "$file"
|
||||
|
||||
else
|
||||
echo please install a terminal image viewer
|
||||
echo either icat for kitty terminal and wezterm or imgcat or chafa
|
||||
fi
|
||||
}
|
||||
|
||||
|
||||
# --- Helper function for printing a key-value pair, aligning the value to the right ---
|
||||
print_kv() {
|
||||
local key="$1"
|
||||
local value="$2"
|
||||
local key_len=${#key}
|
||||
local value_len=${#value}
|
||||
local multiplier="${3:-1}"
|
||||
|
||||
# Correctly calculate padding by accounting for the key, the ": ", and the value.
|
||||
local padding_len=$((WIDTH - key_len - 2 - value_len * multiplier))
|
||||
|
||||
# If the text is too long to fit, just add a single space for separation.
|
||||
if [ "$padding_len" -lt 1 ]; then
|
||||
padding_len=1
|
||||
value=$(echo "$value"| fold -s -w "$((WIDTH - key_len - 3))")
|
||||
printf "{C_KEY}%s:{RESET}%*s%s\\n" "$key" "$padding_len" "" " $value"
|
||||
else
|
||||
printf "{C_KEY}%s:{RESET}%*s%s\\n" "$key" "$padding_len" "" " $value"
|
||||
fi
|
||||
}
|
||||
|
||||
# --- Draw a rule across the screen ---
|
||||
# TODO: figure out why this method does not work in fzf
|
||||
draw_rule() {
|
||||
local rule
|
||||
# Generate the line of '─' characters, removing the trailing newline `tr` adds.
|
||||
rule=$(printf '%*s' "$WIDTH" | tr ' ' '─' | tr -d '\n')
|
||||
# Print the rule with colors and a single, clean newline.
|
||||
printf "{C_RULE}%s{RESET}\\n" "$rule"
|
||||
}
|
||||
|
||||
|
||||
draw_rule(){
|
||||
ll=2
|
||||
while [ $ll -le $FZF_PREVIEW_COLUMNS ];do
|
||||
echo -n -e "{C_RULE}─{RESET}"
|
||||
((ll++))
|
||||
done
|
||||
echo
|
||||
}
|
||||
|
||||
# Generate the same cache key that the Python worker uses
|
||||
# {PREFIX} is used only on episode previews to make sure they are unique
|
||||
title={}
|
||||
hash=$(generate_sha256 "{PREFIX}$title")
|
||||
|
||||
#
|
||||
# --- Display image if configured and the cached file exists ---
|
||||
#
|
||||
if [ "{PREVIEW_MODE}" = "full" ] || [ "{PREVIEW_MODE}" = "image" ]; then
|
||||
image_file="{IMAGE_CACHE_PATH}{PATH_SEP}$hash.png"
|
||||
if [ -f "$image_file" ]; then
|
||||
fzf_preview "$image_file"
|
||||
else
|
||||
echo "🖼️ Loading image..."
|
||||
fi
|
||||
echo # Add a newline for spacing
|
||||
fi
|
||||
# Display text info if configured and the cached file exists
|
||||
if [ "{PREVIEW_MODE}" = "full" ] || [ "{PREVIEW_MODE}" = "text" ]; then
|
||||
info_file="{INFO_CACHE_PATH}{PATH_SEP}$hash"
|
||||
if [ -f "$info_file" ]; then
|
||||
source "$info_file"
|
||||
else
|
||||
echo "📝 Loading details..."
|
||||
fi
|
||||
fi
|
||||
@@ -1,19 +0,0 @@
|
||||
#!/bin/sh
|
||||
#
|
||||
# Viu Review Info Script Template
|
||||
# This script formats and displays review details in the FZF preview pane.
|
||||
# Python injects the actual data values into the placeholders.
|
||||
|
||||
draw_rule
|
||||
|
||||
print_kv "Review By" "{REVIEWER_NAME}"
|
||||
|
||||
draw_rule
|
||||
|
||||
print_kv "Summary" "{REVIEW_SUMMARY}"
|
||||
|
||||
draw_rule
|
||||
|
||||
echo "{REVIEW_BODY}" | fold -s -w "$WIDTH"
|
||||
|
||||
draw_rule
|
||||
@@ -1,75 +0,0 @@
|
||||
#!/bin/sh
|
||||
#
|
||||
# FZF Preview Script Template
|
||||
#
|
||||
# This script is a template. The placeholders in curly braces, like {NAME}
|
||||
# are dynamically filled by python using .replace()
|
||||
|
||||
WIDTH=${FZF_PREVIEW_COLUMNS:-80} # Set a fallback width of 80
|
||||
IMAGE_RENDERER="{IMAGE_RENDERER}"
|
||||
|
||||
generate_sha256() {
|
||||
local input
|
||||
|
||||
# Check if input is passed as an argument or piped
|
||||
if [ -n "$1" ]; then
|
||||
input="$1"
|
||||
else
|
||||
input=$(cat)
|
||||
fi
|
||||
|
||||
if command -v sha256sum &>/dev/null; then
|
||||
echo -n "$input" | sha256sum | awk '{print $1}'
|
||||
elif command -v shasum &>/dev/null; then
|
||||
echo -n "$input" | shasum -a 256 | awk '{print $1}'
|
||||
elif command -v sha256 &>/dev/null; then
|
||||
echo -n "$input" | sha256 | awk '{print $1}'
|
||||
elif command -v openssl &>/dev/null; then
|
||||
echo -n "$input" | openssl dgst -sha256 | awk '{print $2}'
|
||||
else
|
||||
echo -n "$input" | base64 | tr '/+' '_-' | tr -d '\n'
|
||||
fi
|
||||
}
|
||||
|
||||
|
||||
print_kv() {
|
||||
local key="$1"
|
||||
local value="$2"
|
||||
local key_len=${#key}
|
||||
local value_len=${#value}
|
||||
local multiplier="${3:-1}"
|
||||
|
||||
# Correctly calculate padding by accounting for the key, the ": ", and the value.
|
||||
local padding_len=$((WIDTH - key_len - 2 - value_len * multiplier))
|
||||
|
||||
# If the text is too long to fit, just add a single space for separation.
|
||||
if [ "$padding_len" -lt 1 ]; then
|
||||
padding_len=1
|
||||
value=$(echo $value| fold -s -w "$((WIDTH - key_len - 3))")
|
||||
printf "{C_KEY}%s:{RESET}%*s%s\\n" "$key" "$padding_len" "" " $value"
|
||||
else
|
||||
printf "{C_KEY}%s:{RESET}%*s%s\\n" "$key" "$padding_len" "" " $value"
|
||||
fi
|
||||
}
|
||||
|
||||
|
||||
draw_rule(){
|
||||
ll=2
|
||||
while [ $ll -le $FZF_PREVIEW_COLUMNS ];do
|
||||
echo -n -e "{C_RULE}─{RESET}"
|
||||
((ll++))
|
||||
done
|
||||
echo
|
||||
}
|
||||
|
||||
title={}
|
||||
hash=$(generate_sha256 "$title")
|
||||
|
||||
if [ "{PREVIEW_MODE}" = "full" ] || [ "{PREVIEW_MODE}" = "text" ]; then
|
||||
info_file="{INFO_CACHE_DIR}{PATH_SEP}$hash"
|
||||
if [ -f "$info_file" ]; then
|
||||
source "$info_file"
|
||||
else
|
||||
echo "📝 Loading details..."
|
||||
fi
|
||||
fi
|
||||
@@ -1,118 +0,0 @@
|
||||
#!/bin/bash
|
||||
#
|
||||
# FZF Dynamic Search Script Template
|
||||
#
|
||||
# This script is a template for dynamic search functionality in fzf.
|
||||
# The placeholders in curly braces, like {QUERY} are dynamically filled by Python using .replace()
|
||||
|
||||
# Configuration variables (injected by Python)
|
||||
GRAPHQL_ENDPOINT="{GRAPHQL_ENDPOINT}"
|
||||
CACHE_DIR="{CACHE_DIR}"
|
||||
SEARCH_RESULTS_FILE="{SEARCH_RESULTS_FILE}"
|
||||
AUTH_HEADER="{AUTH_HEADER}"
|
||||
|
||||
# Get the current query from fzf
|
||||
QUERY="{{q}}"
|
||||
|
||||
# If query is empty, exit with empty results
|
||||
if [ -z "$QUERY" ]; then
|
||||
echo ""
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Create GraphQL variables
|
||||
VARIABLES=$(cat <<EOF
|
||||
{
|
||||
"query": "$QUERY",
|
||||
"type": "ANIME",
|
||||
"per_page": 50,
|
||||
"genre_not_in": ["Hentai"]
|
||||
}
|
||||
EOF
|
||||
)
|
||||
|
||||
# The GraphQL query is injected here as a properly escaped string
|
||||
GRAPHQL_QUERY='{GRAPHQL_QUERY}'
|
||||
|
||||
# Create the GraphQL request payload
|
||||
PAYLOAD=$(cat <<EOF
|
||||
{
|
||||
"query": $GRAPHQL_QUERY,
|
||||
"variables": $VARIABLES
|
||||
}
|
||||
EOF
|
||||
)
|
||||
|
||||
# Make the GraphQL request and save raw results
|
||||
if [ -n "$AUTH_HEADER" ]; then
|
||||
RESPONSE=$(curl -s -X POST \
|
||||
-H "Content-Type: application/json" \
|
||||
-H "Authorization: $AUTH_HEADER" \
|
||||
-d "$PAYLOAD" \
|
||||
"$GRAPHQL_ENDPOINT")
|
||||
else
|
||||
RESPONSE=$(curl -s -X POST \
|
||||
-H "Content-Type: application/json" \
|
||||
-d "$PAYLOAD" \
|
||||
"$GRAPHQL_ENDPOINT")
|
||||
fi
|
||||
|
||||
# Check if the request was successful
|
||||
if [ $? -ne 0 ] || [ -z "$RESPONSE" ]; then
|
||||
echo "❌ Search failed"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Save the raw response for later processing
|
||||
echo "$RESPONSE" > "$SEARCH_RESULTS_FILE"
|
||||
|
||||
# Parse and display results
|
||||
if command -v jq >/dev/null 2>&1; then
|
||||
# Use jq for faster and more reliable JSON parsing
|
||||
echo "$RESPONSE" | jq -r '
|
||||
if .errors then
|
||||
"❌ Search error: " + (.errors | tostring)
|
||||
elif (.data.Page.media // []) | length == 0 then
|
||||
"❌ No results found"
|
||||
else
|
||||
.data.Page.media[] | (.title.english // .title.romaji // .title.native // "Unknown")
|
||||
end
|
||||
' 2>/dev/null || echo "❌ Parse error"
|
||||
else
|
||||
# Fallback to Python for JSON parsing
|
||||
echo "$RESPONSE" | python3 -c "
|
||||
import json
|
||||
import sys
|
||||
|
||||
try:
|
||||
data = json.load(sys.stdin)
|
||||
|
||||
if 'errors' in data:
|
||||
print('❌ Search error: ' + str(data['errors']))
|
||||
sys.exit(1)
|
||||
|
||||
if 'data' not in data or 'Page' not in data['data'] or 'media' not in data['data']['Page']:
|
||||
print('❌ No results found')
|
||||
sys.exit(0)
|
||||
|
||||
media_list = data['data']['Page']['media']
|
||||
|
||||
if not media_list:
|
||||
print('❌ No results found')
|
||||
sys.exit(0)
|
||||
|
||||
for media in media_list:
|
||||
title = media.get('title', {})
|
||||
english_title = title.get('english') or title.get('romaji') or title.get('native', 'Unknown')
|
||||
year = media.get('startDate', {}).get('year', 'Unknown') if media.get('startDate') else 'Unknown'
|
||||
status = media.get('status', 'Unknown')
|
||||
genres = ', '.join(media.get('genres', [])[:3]) or 'Unknown'
|
||||
|
||||
# Format: Title (Year) [Status] - Genres
|
||||
print(f'{english_title} ({year}) [{status}] - {genres}')
|
||||
|
||||
except Exception as e:
|
||||
print(f'❌ Parse error: {str(e)}')
|
||||
sys.exit(1)
|
||||
"
|
||||
fi
|
||||
@@ -3,18 +3,16 @@
|
||||
# FZF Preview Script Template
|
||||
#
|
||||
# This script is a template. The placeholders in curly braces, like {NAME}
|
||||
# are dynamically filled by python using .replace()
|
||||
# are dynamically filled by python using .replace() during runtime.
|
||||
|
||||
from pathlib import Path
|
||||
from hashlib import sha256
|
||||
import subprocess
|
||||
import os
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
from rich.console import Console
|
||||
from rich.rule import Rule
|
||||
from hashlib import sha256
|
||||
from pathlib import Path
|
||||
|
||||
# dynamically filled variables
|
||||
# --- Template Variables (Injected by Python) ---
|
||||
PREVIEW_MODE = "{PREVIEW_MODE}"
|
||||
IMAGE_CACHE_DIR = Path("{IMAGE_CACHE_DIR}")
|
||||
INFO_CACHE_DIR = Path("{INFO_CACHE_DIR}")
|
||||
@@ -24,189 +22,267 @@ SEPARATOR_COLOR = "{SEPARATOR_COLOR}"
|
||||
PREFIX = "{PREFIX}"
|
||||
SCALE_UP = "{SCALE_UP}" == "True"
|
||||
|
||||
# fzf passes the title with quotes, so we need to trim them
|
||||
TITLE = sys.argv[1]
|
||||
# --- Arguments ---
|
||||
# sys.argv[1] is usually the raw line from FZF (the anime title/key)
|
||||
TITLE = sys.argv[1] if len(sys.argv) > 1 else ""
|
||||
KEY = """{KEY}"""
|
||||
KEY = KEY + "-" if KEY else KEY
|
||||
|
||||
hash = f"{PREFIX}-{sha256(TITLE.encode('utf-8')).hexdigest()}"
|
||||
# Generate the hash to find the cached files
|
||||
hash_id = f"{PREFIX}-{sha256((KEY + TITLE).encode('utf-8')).hexdigest()}"
|
||||
|
||||
|
||||
def get_terminal_dimensions():
|
||||
"""
|
||||
Determine the available dimensions (cols x lines) for the preview window.
|
||||
Prioritizes FZF environment variables.
|
||||
"""
|
||||
fzf_cols = os.environ.get("FZF_PREVIEW_COLUMNS")
|
||||
fzf_lines = os.environ.get("FZF_PREVIEW_LINES")
|
||||
|
||||
if fzf_cols and fzf_lines:
|
||||
return int(fzf_cols), int(fzf_lines)
|
||||
|
||||
# Fallback to stty if FZF vars aren't set (unlikely in preview)
|
||||
try:
|
||||
rows, cols = (
|
||||
subprocess.check_output(
|
||||
["stty", "size"], text=True, stderr=subprocess.DEVNULL
|
||||
)
|
||||
.strip()
|
||||
.split()
|
||||
)
|
||||
return int(cols), int(rows)
|
||||
except Exception:
|
||||
return 80, 24
|
||||
|
||||
|
||||
def which(cmd):
|
||||
"""Alias for shutil.which"""
|
||||
return shutil.which(cmd)
|
||||
|
||||
|
||||
def render_kitty(file_path, width, height, scale_up):
|
||||
"""Render using the Kitty Graphics Protocol (kitten/icat)."""
|
||||
# 1. Try 'kitten icat' (Modern)
|
||||
# 2. Try 'icat' (Legacy/Alias)
|
||||
# 3. Try 'kitty +kitten icat' (Fallback)
|
||||
|
||||
cmd = []
|
||||
if which("kitten"):
|
||||
cmd = ["kitten", "icat"]
|
||||
elif which("icat"):
|
||||
cmd = ["icat"]
|
||||
elif which("kitty"):
|
||||
cmd = ["kitty", "+kitten", "icat"]
|
||||
|
||||
if not cmd:
|
||||
return False
|
||||
|
||||
# Build Arguments
|
||||
args = [
|
||||
"--clear",
|
||||
"--transfer-mode=memory",
|
||||
"--unicode-placeholder",
|
||||
"--stdin=no",
|
||||
f"--place={width}x{height}@0x0",
|
||||
]
|
||||
|
||||
if scale_up:
|
||||
args.append("--scale-up")
|
||||
|
||||
args.append(file_path)
|
||||
|
||||
subprocess.run(cmd + args, stdout=sys.stdout, stderr=sys.stderr)
|
||||
return True
|
||||
|
||||
|
||||
def render_sixel(file_path, width, height):
|
||||
"""
|
||||
Render using Sixel.
|
||||
Prioritizes 'chafa' for Sixel as it handles text-cell sizing better than img2sixel.
|
||||
"""
|
||||
|
||||
# Option A: Chafa (Best for Sixel sizing)
|
||||
if which("chafa"):
|
||||
# Chafa automatically detects Sixel support if terminal reports it,
|
||||
# but we force it here if specifically requested via logic flow.
|
||||
subprocess.run(
|
||||
["chafa", "-f", "sixel", "-s", f"{width}x{height}", file_path],
|
||||
stdout=sys.stdout,
|
||||
stderr=sys.stderr,
|
||||
)
|
||||
return True
|
||||
|
||||
# Option B: img2sixel (Libsixel)
|
||||
# Note: img2sixel uses pixels, not cells. We estimate 1 cell ~= 10px width, 20px height
|
||||
if which("img2sixel"):
|
||||
pixel_width = width * 10
|
||||
pixel_height = height * 20
|
||||
subprocess.run(
|
||||
[
|
||||
"img2sixel",
|
||||
f"--width={pixel_width}",
|
||||
f"--height={pixel_height}",
|
||||
file_path,
|
||||
],
|
||||
stdout=sys.stdout,
|
||||
stderr=sys.stderr,
|
||||
)
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def render_iterm(file_path, width, height):
|
||||
"""Render using iTerm2 Inline Image Protocol."""
|
||||
if which("imgcat"):
|
||||
subprocess.run(
|
||||
["imgcat", "-W", str(width), "-H", str(height), file_path],
|
||||
stdout=sys.stdout,
|
||||
stderr=sys.stderr,
|
||||
)
|
||||
return True
|
||||
|
||||
# Chafa also supports iTerm
|
||||
if which("chafa"):
|
||||
subprocess.run(
|
||||
["chafa", "-f", "iterm", "-s", f"{width}x{height}", file_path],
|
||||
stdout=sys.stdout,
|
||||
stderr=sys.stderr,
|
||||
)
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def render_timg(file_path, width, height):
|
||||
"""Render using timg (supports half-blocks, quarter-blocks, sixel, kitty, etc)."""
|
||||
if which("timg"):
|
||||
subprocess.run(
|
||||
["timg", f"-g{width}x{height}", "--upscale", file_path],
|
||||
stdout=sys.stdout,
|
||||
stderr=sys.stderr,
|
||||
)
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def render_chafa_auto(file_path, width, height):
|
||||
"""
|
||||
Render using Chafa in auto mode.
|
||||
It supports Sixel, Kitty, iTerm, and various unicode block modes.
|
||||
"""
|
||||
if which("chafa"):
|
||||
subprocess.run(
|
||||
["chafa", "-s", f"{width}x{height}", file_path],
|
||||
stdout=sys.stdout,
|
||||
stderr=sys.stderr,
|
||||
)
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def fzf_image_preview(file_path: str):
|
||||
# Environment variables from fzf
|
||||
FZF_PREVIEW_COLUMNS = os.environ.get("FZF_PREVIEW_COLUMNS")
|
||||
FZF_PREVIEW_LINES = os.environ.get("FZF_PREVIEW_LINES")
|
||||
FZF_PREVIEW_TOP = os.environ.get("FZF_PREVIEW_TOP")
|
||||
KITTY_WINDOW_ID = os.environ.get("KITTY_WINDOW_ID")
|
||||
GHOSTTY_BIN_DIR = os.environ.get("GHOSTTY_BIN_DIR")
|
||||
PLATFORM = os.environ.get("PLATFORM")
|
||||
"""
|
||||
Main dispatch function to choose the best renderer.
|
||||
"""
|
||||
cols, lines = get_terminal_dimensions()
|
||||
|
||||
# Compute terminal dimensions
|
||||
dim = (
|
||||
f"{FZF_PREVIEW_COLUMNS}x{FZF_PREVIEW_LINES}"
|
||||
if FZF_PREVIEW_COLUMNS and FZF_PREVIEW_LINES
|
||||
else "x"
|
||||
)
|
||||
# Heuristic: Reserve 1 line for prompt/status if needed, though FZF handles this.
|
||||
# Some renderers behave better with a tiny bit of padding.
|
||||
width = cols
|
||||
height = lines
|
||||
|
||||
if dim == "x":
|
||||
try:
|
||||
rows, cols = (
|
||||
subprocess.check_output(
|
||||
["stty", "size"], text=True, stderr=subprocess.DEVNULL
|
||||
)
|
||||
.strip()
|
||||
.split()
|
||||
# --- 1. Check Explicit Configuration ---
|
||||
|
||||
if IMAGE_RENDERER == "icat" or IMAGE_RENDERER == "system-kitty":
|
||||
if render_kitty(file_path, width, height, SCALE_UP):
|
||||
return
|
||||
|
||||
elif IMAGE_RENDERER == "sixel" or IMAGE_RENDERER == "system-sixels":
|
||||
if render_sixel(file_path, width, height):
|
||||
return
|
||||
|
||||
elif IMAGE_RENDERER == "imgcat":
|
||||
if render_iterm(file_path, width, height):
|
||||
return
|
||||
|
||||
elif IMAGE_RENDERER == "timg":
|
||||
if render_timg(file_path, width, height):
|
||||
return
|
||||
|
||||
elif IMAGE_RENDERER == "chafa":
|
||||
if render_chafa_auto(file_path, width, height):
|
||||
return
|
||||
|
||||
# --- 2. Auto-Detection / Fallback Strategy ---
|
||||
|
||||
# If explicit failed or set to 'auto'/'system-default', try detecting environment
|
||||
|
||||
# Ghostty / Kitty Environment
|
||||
if os.environ.get("KITTY_WINDOW_ID") or os.environ.get("GHOSTTY_BIN_DIR"):
|
||||
if render_kitty(file_path, width, height, SCALE_UP):
|
||||
return
|
||||
|
||||
# iTerm Environment
|
||||
if os.environ.get("TERM_PROGRAM") == "iTerm.app":
|
||||
if render_iterm(file_path, width, height):
|
||||
return
|
||||
|
||||
# Try standard tools in order of quality/preference
|
||||
if render_kitty(file_path, width, height, SCALE_UP):
|
||||
return # Try kitty just in case
|
||||
if render_sixel(file_path, width, height):
|
||||
return
|
||||
if render_timg(file_path, width, height):
|
||||
return
|
||||
if render_chafa_auto(file_path, width, height):
|
||||
return
|
||||
|
||||
print("⚠️ No suitable image renderer found (icat, chafa, timg, img2sixel).")
|
||||
|
||||
|
||||
def fzf_text_info_render():
|
||||
"""Renders the text-based info via the cached python script."""
|
||||
# Get terminal dimensions from FZF environment or fallback
|
||||
cols, lines = get_terminal_dimensions()
|
||||
|
||||
# Print simple separator line with proper width
|
||||
r, g, b = map(int, SEPARATOR_COLOR.split(","))
|
||||
separator = f"\x1b[38;2;{r};{g};{b}m" + ("─" * cols) + "\x1b[0m"
|
||||
print(separator, flush=True)
|
||||
|
||||
if PREVIEW_MODE == "text" or PREVIEW_MODE == "full":
|
||||
preview_info_path = INFO_CACHE_DIR / f"{hash_id}.py"
|
||||
if preview_info_path.exists():
|
||||
subprocess.run(
|
||||
[sys.executable, str(preview_info_path), HEADER_COLOR, SEPARATOR_COLOR]
|
||||
)
|
||||
dim = f"{cols}x{rows}"
|
||||
except Exception:
|
||||
dim = "80x24"
|
||||
else:
|
||||
# Print dim text
|
||||
print("\x1b[2m📝 Loading details...\x1b[0m")
|
||||
|
||||
# Adjust dimension if icat not used and preview area fills bottom of screen
|
||||
if (
|
||||
IMAGE_RENDERER != "icat"
|
||||
and not KITTY_WINDOW_ID
|
||||
and FZF_PREVIEW_TOP
|
||||
and FZF_PREVIEW_LINES
|
||||
|
||||
def main():
|
||||
# 1. Image Preview
|
||||
if (PREVIEW_MODE == "image" or PREVIEW_MODE == "full") and (
|
||||
PREFIX not in ("character", "review", "airing-schedule")
|
||||
):
|
||||
try:
|
||||
term_rows = int(
|
||||
subprocess.check_output(["stty", "size"], text=True).split()[0]
|
||||
)
|
||||
if int(FZF_PREVIEW_TOP) + int(FZF_PREVIEW_LINES) == term_rows:
|
||||
dim = f"{FZF_PREVIEW_COLUMNS}x{int(FZF_PREVIEW_LINES) - 1}"
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# Helper to run commands
|
||||
def run(cmd):
|
||||
subprocess.run(cmd, stdout=sys.stdout, stderr=sys.stderr)
|
||||
|
||||
def command_exists(cmd):
|
||||
return shutil.which(cmd) is not None
|
||||
|
||||
# ICAT / KITTY path
|
||||
if IMAGE_RENDERER == "icat" and not GHOSTTY_BIN_DIR:
|
||||
icat_cmd = None
|
||||
if command_exists("kitten"):
|
||||
icat_cmd = ["kitten", "icat"]
|
||||
elif command_exists("icat"):
|
||||
icat_cmd = ["icat"]
|
||||
elif command_exists("kitty"):
|
||||
icat_cmd = ["kitty", "icat"]
|
||||
|
||||
if icat_cmd:
|
||||
run(
|
||||
icat_cmd
|
||||
+ [
|
||||
"--clear",
|
||||
"--transfer-mode=memory",
|
||||
"--unicode-placeholder",
|
||||
"--stdin=no",
|
||||
f"--place={dim}@0x0",
|
||||
file_path,
|
||||
]
|
||||
)
|
||||
preview_image_path = IMAGE_CACHE_DIR / f"{hash_id}.png"
|
||||
if preview_image_path.exists():
|
||||
fzf_image_preview(str(preview_image_path))
|
||||
print() # Spacer
|
||||
else:
|
||||
print("No icat-compatible viewer found (kitten/icat/kitty)")
|
||||
print("🖼️ Loading image...")
|
||||
|
||||
elif GHOSTTY_BIN_DIR:
|
||||
try:
|
||||
cols = int(FZF_PREVIEW_COLUMNS or "80") - 1
|
||||
lines = FZF_PREVIEW_LINES or "24"
|
||||
dim = f"{cols}x{lines}"
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
if command_exists("kitten"):
|
||||
run(
|
||||
[
|
||||
"kitten",
|
||||
"icat",
|
||||
"--clear",
|
||||
"--transfer-mode=memory",
|
||||
"--unicode-placeholder",
|
||||
"--stdin=no",
|
||||
f"--place={dim}@0x0",
|
||||
file_path,
|
||||
]
|
||||
)
|
||||
elif command_exists("icat"):
|
||||
run(
|
||||
[
|
||||
"icat",
|
||||
"--clear",
|
||||
"--transfer-mode=memory",
|
||||
"--unicode-placeholder",
|
||||
"--stdin=no",
|
||||
f"--place={dim}@0x0",
|
||||
file_path,
|
||||
]
|
||||
)
|
||||
elif command_exists("chafa"):
|
||||
run(["chafa", "-s", dim, file_path])
|
||||
|
||||
elif command_exists("chafa"):
|
||||
# Platform specific rendering
|
||||
if PLATFORM == "android":
|
||||
run(["chafa", "-s", dim, file_path])
|
||||
elif PLATFORM == "windows":
|
||||
run(["chafa", "-f", "sixel", "-s", dim, file_path])
|
||||
else:
|
||||
run(["chafa", "-s", dim, file_path])
|
||||
print()
|
||||
|
||||
elif command_exists("imgcat"):
|
||||
width, height = dim.split("x")
|
||||
run(["imgcat", "-W", width, "-H", height, file_path])
|
||||
|
||||
else:
|
||||
print(
|
||||
"⚠️ Please install a terminal image viewer (icat, kitten, imgcat, or chafa)."
|
||||
)
|
||||
# 2. Text Info Preview
|
||||
fzf_text_info_render()
|
||||
|
||||
|
||||
def fzf_text_preview(file_path: str):
|
||||
from base64 import standard_b64encode
|
||||
|
||||
def serialize_gr_command(**cmd):
|
||||
payload = cmd.pop("payload", None)
|
||||
cmd = ",".join(f"{k}={v}" for k, v in cmd.items())
|
||||
ans = []
|
||||
w = ans.append
|
||||
w(b"\033_G")
|
||||
w(cmd.encode("ascii"))
|
||||
if payload:
|
||||
w(b";")
|
||||
w(payload)
|
||||
w(b"\033\\")
|
||||
return b"".join(ans)
|
||||
|
||||
def write_chunked(**cmd):
|
||||
data = standard_b64encode(cmd.pop("data"))
|
||||
while data:
|
||||
chunk, data = data[:4096], data[4096:]
|
||||
m = 1 if data else 0
|
||||
sys.stdout.buffer.write(serialize_gr_command(payload=chunk, m=m, **cmd))
|
||||
sys.stdout.flush()
|
||||
cmd.clear()
|
||||
|
||||
with open(file_path, "rb") as f:
|
||||
write_chunked(a="T", f=100, data=f.read())
|
||||
|
||||
|
||||
console = Console(force_terminal=True, color_system="truecolor")
|
||||
if PREVIEW_MODE == "image" or PREVIEW_MODE == "full":
|
||||
preview_image_path = IMAGE_CACHE_DIR / f"{hash}.png"
|
||||
if preview_image_path.exists():
|
||||
fzf_image_preview(str(preview_image_path))
|
||||
print()
|
||||
else:
|
||||
print("🖼️ Loading image...")
|
||||
|
||||
console.print(Rule(style=f"rgb({SEPARATOR_COLOR})"))
|
||||
if PREVIEW_MODE == "text" or PREVIEW_MODE == "full":
|
||||
preview_info_path = INFO_CACHE_DIR / f"{hash}.py"
|
||||
if preview_info_path.exists():
|
||||
subprocess.run(
|
||||
[sys.executable, str(preview_info_path), HEADER_COLOR, SEPARATOR_COLOR]
|
||||
)
|
||||
else:
|
||||
console.print("📝 Loading details...")
|
||||
if __name__ == "__main__":
|
||||
try:
|
||||
main()
|
||||
except KeyboardInterrupt:
|
||||
pass
|
||||
except Exception as e:
|
||||
print(f"Preview Error: {e}")
|
||||
|
||||
@@ -0,0 +1,28 @@
|
||||
import sys
|
||||
from _ansi_utils import (
|
||||
print_rule,
|
||||
print_table_row,
|
||||
strip_markdown,
|
||||
wrap_text,
|
||||
get_terminal_width,
|
||||
)
|
||||
|
||||
HEADER_COLOR = sys.argv[1]
|
||||
SEPARATOR_COLOR = sys.argv[2]
|
||||
|
||||
# Get terminal dimensions
|
||||
term_width = get_terminal_width()
|
||||
|
||||
# Print title centered
|
||||
print("{REVIEWER_NAME}".center(term_width))
|
||||
|
||||
rows = [
|
||||
("Summary", "{REVIEW_SUMMARY}"),
|
||||
]
|
||||
|
||||
print_rule(SEPARATOR_COLOR)
|
||||
for key, value in rows:
|
||||
print_table_row(key, value, HEADER_COLOR, 15, term_width - 20)
|
||||
|
||||
print_rule(SEPARATOR_COLOR)
|
||||
print(wrap_text(strip_markdown("""{REVIEW_BODY}"""), term_width))
|
||||
|
||||
211
viu_media/assets/scripts/fzf/search.py
Executable file
211
viu_media/assets/scripts/fzf/search.py
Executable file
@@ -0,0 +1,211 @@
|
||||
#!/usr/bin/env python3
|
||||
#
|
||||
# FZF Dynamic Search Script Template
|
||||
#
|
||||
# This script is a template for dynamic search functionality in fzf.
|
||||
# The placeholders in curly braces, like {GRAPHQL_ENDPOINT} are dynamically
|
||||
# filled by Python using .replace() during runtime.
|
||||
#
|
||||
# FILTER SYNTAX:
|
||||
# @genre:action,comedy Filter by genres
|
||||
# @genre:!hentai Exclude genre
|
||||
# @status:airing Status: airing, finished, upcoming, cancelled, hiatus
|
||||
# @year:2024 Filter by year
|
||||
# @season:winter winter, spring, summer, fall
|
||||
# @format:tv,movie tv, movie, ova, ona, special
|
||||
# @sort:score score, popularity, trending, date, title
|
||||
# @score:>80 / @score:<50 Min/max score
|
||||
# @onlist / @onlist:false Filter by list status
|
||||
# @tag:isekai Filter by tags
|
||||
|
||||
import json
|
||||
import sys
|
||||
from pathlib import Path
|
||||
from urllib import request
|
||||
from urllib.error import URLError
|
||||
|
||||
# Import the filter parser
|
||||
from _filter_parser import parse_filters
|
||||
|
||||
# --- Template Variables (Injected by Python) ---
|
||||
GRAPHQL_ENDPOINT = "{GRAPHQL_ENDPOINT}"
|
||||
SEARCH_RESULTS_FILE = Path("{SEARCH_RESULTS_FILE}")
|
||||
LAST_QUERY_FILE = Path("{LAST_QUERY_FILE}")
|
||||
AUTH_HEADER = "{AUTH_HEADER}"
|
||||
|
||||
# The GraphQL query is injected as a properly escaped JSON string
|
||||
GRAPHQL_QUERY = "{GRAPHQL_QUERY}"
|
||||
|
||||
# --- Get Query from fzf ---
|
||||
# fzf passes the current query as the first argument when using --bind change:reload
|
||||
RAW_QUERY = sys.argv[1] if len(sys.argv) > 1 else ""
|
||||
|
||||
# Parse the query to extract filters and clean search text
|
||||
QUERY, PARSED_FILTERS = parse_filters(RAW_QUERY)
|
||||
|
||||
# If query is empty and no filters, show help hint
|
||||
if not RAW_QUERY.strip():
|
||||
print("💡 Tip: Use @genre:action @status:airing for filters (type @help for syntax)")
|
||||
sys.exit(0)
|
||||
|
||||
# Show filter help if requested
|
||||
if RAW_QUERY.strip().lower() in ("@help", "@?", "@h"):
|
||||
from _filter_parser import get_help_text
|
||||
print(get_help_text())
|
||||
sys.exit(0)
|
||||
|
||||
# If we only have filters (no search text), that's valid - we'll search with filters only
|
||||
# But if we have neither query nor filters, we already showed the help hint above
|
||||
|
||||
|
||||
def make_graphql_request(
|
||||
endpoint: str, query: str, variables: dict, auth_token: str = ""
|
||||
) -> tuple[dict | None, str | None]:
|
||||
"""
|
||||
Make a GraphQL request to the specified endpoint.
|
||||
|
||||
Args:
|
||||
endpoint: GraphQL API endpoint URL
|
||||
query: GraphQL query string
|
||||
variables: Query variables as a dictionary
|
||||
auth_token: Optional authorization token (Bearer token)
|
||||
|
||||
Returns:
|
||||
Tuple of (Response JSON, error message) - one will be None
|
||||
"""
|
||||
payload = {"query": query, "variables": variables}
|
||||
|
||||
headers = {"Content-Type": "application/json", "User-Agent": "viu/1.0"}
|
||||
|
||||
if auth_token:
|
||||
headers["Authorization"] = auth_token
|
||||
|
||||
try:
|
||||
req = request.Request(
|
||||
endpoint,
|
||||
data=json.dumps(payload).encode("utf-8"),
|
||||
headers=headers,
|
||||
method="POST",
|
||||
)
|
||||
|
||||
with request.urlopen(req, timeout=10) as response:
|
||||
return json.loads(response.read().decode("utf-8")), None
|
||||
except URLError as e:
|
||||
return None, f"Network error: {e.reason}"
|
||||
except json.JSONDecodeError as e:
|
||||
return None, f"Invalid response: {e}"
|
||||
except Exception as e:
|
||||
return None, f"Request error: {e}"
|
||||
|
||||
|
||||
def extract_title(media_item: dict) -> str:
|
||||
"""
|
||||
Extract the best available title from a media item.
|
||||
|
||||
Args:
|
||||
media_item: Media object from GraphQL response
|
||||
|
||||
Returns:
|
||||
Title string (english > romaji > native > "Unknown")
|
||||
"""
|
||||
title_obj = media_item.get("title", {})
|
||||
return (
|
||||
title_obj.get("english")
|
||||
or title_obj.get("romaji")
|
||||
or title_obj.get("native")
|
||||
or "Unknown"
|
||||
)
|
||||
|
||||
|
||||
def main():
|
||||
# Ensure parent directory exists
|
||||
SEARCH_RESULTS_FILE.parent.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
# Base GraphQL variables
|
||||
variables = {
|
||||
"type": "ANIME",
|
||||
"per_page": 50,
|
||||
"genre_not_in": ["Hentai"], # Default exclusion
|
||||
}
|
||||
|
||||
# Add search query if provided
|
||||
if QUERY:
|
||||
variables["query"] = QUERY
|
||||
|
||||
# Apply parsed filters from the filter syntax
|
||||
for key, value in PARSED_FILTERS.items():
|
||||
# Handle array merging for _in and _not_in fields
|
||||
if key.endswith("_in") or key.endswith("_not_in"):
|
||||
if key in variables:
|
||||
# Merge arrays, avoiding duplicates
|
||||
existing = set(variables[key])
|
||||
existing.update(value)
|
||||
variables[key] = list(existing)
|
||||
else:
|
||||
variables[key] = value
|
||||
else:
|
||||
variables[key] = value
|
||||
|
||||
# Make the GraphQL request
|
||||
response, error = make_graphql_request(
|
||||
GRAPHQL_ENDPOINT, GRAPHQL_QUERY, variables, AUTH_HEADER
|
||||
)
|
||||
|
||||
if error:
|
||||
print(f"❌ {error}")
|
||||
# Also show what we tried to search for debugging
|
||||
print(f" Query: {QUERY or '(none)'}")
|
||||
print(f" Filters: {json.dumps(PARSED_FILTERS) if PARSED_FILTERS else '(none)'}")
|
||||
sys.exit(1)
|
||||
|
||||
if response is None:
|
||||
print("❌ Search failed: No response received")
|
||||
sys.exit(1)
|
||||
|
||||
# Check for GraphQL errors first (these come in the response body)
|
||||
if "errors" in response:
|
||||
errors = response["errors"]
|
||||
if errors:
|
||||
# Extract error messages
|
||||
error_msgs = [e.get("message", str(e)) for e in errors]
|
||||
print(f"❌ API Error: {'; '.join(error_msgs)}")
|
||||
# Show variables for debugging
|
||||
print(f" Filters used: {json.dumps(PARSED_FILTERS, indent=2) if PARSED_FILTERS else '(none)'}")
|
||||
sys.exit(1)
|
||||
|
||||
# Save the raw response for later processing by dynamic_search.py
|
||||
try:
|
||||
with open(SEARCH_RESULTS_FILE, "w", encoding="utf-8") as f:
|
||||
json.dump(response, f, ensure_ascii=False, indent=2)
|
||||
# Also save the raw query so it can be restored when going back
|
||||
with open(LAST_QUERY_FILE, "w", encoding="utf-8") as f:
|
||||
f.write(RAW_QUERY)
|
||||
except IOError as e:
|
||||
print(f"❌ Failed to save results: {e}")
|
||||
sys.exit(1)
|
||||
|
||||
# Navigate the response structure
|
||||
data = response.get("data", {})
|
||||
page = data.get("Page", {})
|
||||
media_list = page.get("media", [])
|
||||
|
||||
if not media_list:
|
||||
print("🔍 No results found")
|
||||
if PARSED_FILTERS:
|
||||
print(" Try adjusting your filters")
|
||||
sys.exit(0)
|
||||
|
||||
# Output titles for fzf (one per line)
|
||||
for media in media_list:
|
||||
title = extract_title(media)
|
||||
print(title)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
try:
|
||||
main()
|
||||
except KeyboardInterrupt:
|
||||
sys.exit(0)
|
||||
except Exception as e:
|
||||
print(f"❌ Unexpected error: {type(e).__name__}: {e}")
|
||||
sys.exit(1)
|
||||
@@ -1,3 +1,9 @@
|
||||
from .cli import cli as run_cli
|
||||
import sys
|
||||
import os
|
||||
|
||||
if sys.platform.startswith("win"):
|
||||
os.environ.setdefault("PYTHONUTF8", "1")
|
||||
|
||||
|
||||
__all__ = ["run_cli"]
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import logging
|
||||
import shutil
|
||||
import sys
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
@@ -109,12 +110,103 @@ def cli(ctx: click.Context, **options: "Unpack[Options]"):
|
||||
)
|
||||
ctx.obj = config
|
||||
|
||||
if config.general.welcome_screen:
|
||||
import time
|
||||
|
||||
from ..core.constants import APP_CACHE_DIR, USER_NAME, SUPPORT_PROJECT_URL
|
||||
|
||||
last_welcomed_at_file = APP_CACHE_DIR / ".last_welcome"
|
||||
should_welcome = False
|
||||
if last_welcomed_at_file.exists():
|
||||
try:
|
||||
last_welcomed_at = float(
|
||||
last_welcomed_at_file.read_text(encoding="utf-8")
|
||||
)
|
||||
# runs once a month
|
||||
if (time.time() - last_welcomed_at) > 30 * 24 * 3600:
|
||||
should_welcome = True
|
||||
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to read welcome screen timestamp: {e}")
|
||||
|
||||
else:
|
||||
should_welcome = True
|
||||
if should_welcome:
|
||||
last_welcomed_at_file.write_text(str(time.time()), encoding="utf-8")
|
||||
|
||||
from rich.prompt import Confirm
|
||||
|
||||
if Confirm.ask(f"""\
|
||||
[green]How are you, {USER_NAME} 🙂?
|
||||
If you enjoy the project and would like to support it, you can buy me a coffee at {SUPPORT_PROJECT_URL}.
|
||||
Would you like to open the support page? Select yes to continue — otherwise, enjoy your terminal-anime browsing experience 😁.[/]
|
||||
You can disable this message by turning off the welcome_screen option in the config. It only appears once a month.
|
||||
"""):
|
||||
from webbrowser import open
|
||||
|
||||
open(SUPPORT_PROJECT_URL)
|
||||
|
||||
if config.general.show_new_release:
|
||||
import time
|
||||
|
||||
from ..core.constants import APP_CACHE_DIR
|
||||
|
||||
last_release_file = APP_CACHE_DIR / ".last_release"
|
||||
should_print_release_notes = False
|
||||
if last_release_file.exists():
|
||||
last_release = last_release_file.read_text(encoding="utf-8")
|
||||
current_version = list(map(int, __version__.replace("v", "").split(".")))
|
||||
last_saved_version = list(
|
||||
map(int, last_release.replace("v", "").split("."))
|
||||
)
|
||||
if (
|
||||
(current_version[0] > last_saved_version[0])
|
||||
or (
|
||||
current_version[1] > last_saved_version[1]
|
||||
and current_version[0] == last_saved_version[0]
|
||||
)
|
||||
or (
|
||||
current_version[2] > last_saved_version[2]
|
||||
and current_version[0] == last_saved_version[0]
|
||||
and current_version[1] == last_saved_version[1]
|
||||
)
|
||||
):
|
||||
should_print_release_notes = True
|
||||
|
||||
else:
|
||||
should_print_release_notes = True
|
||||
if should_print_release_notes:
|
||||
last_release_file.write_text(__version__, encoding="utf-8")
|
||||
from .service.feedback import FeedbackService
|
||||
from .utils.update import check_for_updates, print_release_json, update_app
|
||||
from rich.prompt import Confirm
|
||||
|
||||
feedback = FeedbackService(config)
|
||||
feedback.info("Getting release notes...")
|
||||
is_latest, release_json = check_for_updates()
|
||||
if Confirm.ask(
|
||||
"Would you also like to update your config with the latest options and config notes"
|
||||
):
|
||||
import subprocess
|
||||
|
||||
_cli_cmd_name = "viu" if not shutil.which("viu-media") else "viu-media"
|
||||
cmd = [_cli_cmd_name, "config", "--update"]
|
||||
print(f"running '{' '.join(cmd)}'...")
|
||||
subprocess.run(cmd)
|
||||
|
||||
if is_latest:
|
||||
print_release_json(release_json)
|
||||
else:
|
||||
print_release_json(release_json)
|
||||
print("It seems theres another update waiting for you as well 😁")
|
||||
click.pause("Press Any Key To Proceed...")
|
||||
|
||||
if config.general.check_for_updates:
|
||||
import time
|
||||
|
||||
from ..core.constants import APP_CACHE_DIR
|
||||
|
||||
last_updated_at_file = APP_CACHE_DIR / "last_update"
|
||||
last_updated_at_file = APP_CACHE_DIR / ".last_update"
|
||||
should_check_for_update = False
|
||||
if last_updated_at_file.exists():
|
||||
try:
|
||||
|
||||
@@ -1,25 +1,72 @@
|
||||
import click
|
||||
import webbrowser
|
||||
from pathlib import Path
|
||||
import click
|
||||
|
||||
from .....core.config.model import AppConfig
|
||||
|
||||
|
||||
def _get_token(feedback, selector, token_input: str | None) -> str | None:
|
||||
"""
|
||||
Retrieves the authentication token from a file path, a direct string, or an interactive prompt.
|
||||
"""
|
||||
if token_input:
|
||||
path = Path(token_input)
|
||||
if path.is_file():
|
||||
try:
|
||||
token = path.read_text().strip()
|
||||
if not token:
|
||||
feedback.error(f"Token file is empty: {path}")
|
||||
return None
|
||||
return token
|
||||
except Exception as e:
|
||||
feedback.error(f"Error reading token from file: {e}")
|
||||
return None
|
||||
return token_input
|
||||
|
||||
from .....core.constants import ANILIST_AUTH
|
||||
|
||||
open_success = webbrowser.open(ANILIST_AUTH, new=2)
|
||||
if open_success:
|
||||
feedback.info("Your browser has been opened to obtain an AniList token.")
|
||||
feedback.info(
|
||||
f"Or you can visit the site manually [magenta][link={ANILIST_AUTH}]here[/link][/magenta]."
|
||||
)
|
||||
else:
|
||||
feedback.warning(
|
||||
f"Failed to open the browser. Please visit the site manually [magenta][link={ANILIST_AUTH}]here[/link][/magenta]."
|
||||
)
|
||||
feedback.info(
|
||||
"After authorizing, copy the token from the address bar and paste it below."
|
||||
)
|
||||
return selector.ask("Enter your AniList Access Token")
|
||||
|
||||
|
||||
@click.command(help="Login to your AniList account to enable progress tracking.")
|
||||
@click.option("--status", "-s", is_flag=True, help="Check current login status.")
|
||||
@click.option("--logout", "-l", is_flag=True, help="Log out and erase credentials.")
|
||||
@click.argument("token_input", required=False, type=str)
|
||||
@click.pass_obj
|
||||
def auth(config: AppConfig, status: bool, logout: bool):
|
||||
"""Handles user authentication and credential management."""
|
||||
from .....core.constants import ANILIST_AUTH
|
||||
def auth(config: AppConfig, status: bool, logout: bool, token_input: str | None):
|
||||
"""
|
||||
Handles user authentication and credential management.
|
||||
|
||||
This command allows you to log in to your AniList account to enable
|
||||
progress tracking and other features.
|
||||
|
||||
You can provide your authentication token in three ways:
|
||||
1. Interactively: Run the command without arguments to open a browser
|
||||
and be prompted to paste the token.
|
||||
2. As an argument: Pass the token string directly to the command.
|
||||
$ viu anilist auth "your_token_here"
|
||||
3. As a file: Pass the path to a text file containing the token.
|
||||
$ viu anilist auth /path/to/token.txt
|
||||
"""
|
||||
from .....libs.media_api.api import create_api_client
|
||||
from .....libs.selectors.selector import create_selector
|
||||
from ....service.auth import AuthService
|
||||
from ....service.feedback import FeedbackService
|
||||
|
||||
auth_service = AuthService("anilist")
|
||||
feedback = FeedbackService(config)
|
||||
selector = create_selector(config)
|
||||
feedback.clear_console()
|
||||
|
||||
if status:
|
||||
user_data = auth_service.get_auth()
|
||||
@@ -29,6 +76,11 @@ def auth(config: AppConfig, status: bool, logout: bool):
|
||||
feedback.error("Not logged in.")
|
||||
return
|
||||
|
||||
from .....libs.selectors.selector import create_selector
|
||||
|
||||
selector = create_selector(config)
|
||||
feedback.clear_console()
|
||||
|
||||
if logout:
|
||||
if selector.confirm("Are you sure you want to log out and erase your token?"):
|
||||
auth_service.clear_user_profile()
|
||||
@@ -40,27 +92,14 @@ def auth(config: AppConfig, status: bool, logout: bool):
|
||||
f"You are already logged in as {auth_profile.user_profile.name}.Would you like to relogin"
|
||||
):
|
||||
return
|
||||
api_client = create_api_client("anilist", config)
|
||||
token = _get_token(feedback, selector, token_input)
|
||||
|
||||
open_success = webbrowser.open(ANILIST_AUTH, new=2)
|
||||
if open_success:
|
||||
feedback.info("Your browser has been opened to obtain an AniList token.")
|
||||
feedback.info(
|
||||
f"or you can visit the site manually [magenta][link={ANILIST_AUTH}]here[/link][/magenta]."
|
||||
)
|
||||
else:
|
||||
feedback.warning(
|
||||
f"Failed to open the browser. Please visit the site manually [magenta][link={ANILIST_AUTH}]here[/link][/magenta]."
|
||||
)
|
||||
feedback.info(
|
||||
"After authorizing, copy the token from the address bar and paste it below."
|
||||
)
|
||||
|
||||
token = selector.ask("Enter your AniList Access Token")
|
||||
if not token:
|
||||
feedback.error("Login cancelled.")
|
||||
if not token_input:
|
||||
feedback.error("Login cancelled.")
|
||||
return
|
||||
|
||||
api_client = create_api_client("anilist", config)
|
||||
# Use the API client to validate the token and get profile info
|
||||
profile = api_client.authenticate(token.strip())
|
||||
|
||||
|
||||
@@ -2,6 +2,7 @@ from typing import TYPE_CHECKING
|
||||
|
||||
import click
|
||||
|
||||
|
||||
from ...core.config import AppConfig
|
||||
from ...core.exceptions import ViuError
|
||||
from ..utils.completion import anime_titles_shell_complete
|
||||
@@ -30,7 +31,6 @@ if TYPE_CHECKING:
|
||||
@click.option(
|
||||
"--anime-title",
|
||||
"-t",
|
||||
required=True,
|
||||
shell_complete=anime_titles_shell_complete,
|
||||
multiple=True,
|
||||
help="Specify which anime to download",
|
||||
@@ -50,8 +50,13 @@ def search(config: AppConfig, **options: "Unpack[Options]"):
|
||||
SearchParams,
|
||||
)
|
||||
from ...libs.provider.anime.provider import create_provider
|
||||
from viu_media.core.utils.normalizer import normalize_title
|
||||
from ...libs.selectors.selector import create_selector
|
||||
|
||||
if not options["anime_title"]:
|
||||
raw = click.prompt("What are you in the mood for? (comma-separated)")
|
||||
options["anime_title"] = [a.strip() for a in raw.split(",") if a.strip()]
|
||||
|
||||
feedback = FeedbackService(config)
|
||||
provider = create_provider(config.general.provider)
|
||||
selector = create_selector(config)
|
||||
@@ -64,7 +69,10 @@ def search(config: AppConfig, **options: "Unpack[Options]"):
|
||||
with feedback.progress(f"Fetching anime search results for {anime_title}"):
|
||||
search_results = provider.search(
|
||||
SearchParams(
|
||||
query=anime_title, translation_type=config.stream.translation_type
|
||||
query=normalize_title(
|
||||
anime_title, config.general.provider.value, True
|
||||
).lower(),
|
||||
translation_type=config.stream.translation_type,
|
||||
)
|
||||
)
|
||||
if not search_results:
|
||||
@@ -173,6 +181,22 @@ def stream_anime(
|
||||
if not server_name:
|
||||
raise ViuError("Server not selected")
|
||||
server = servers[server_name]
|
||||
quality = [
|
||||
ep_stream.link
|
||||
for ep_stream in server.links
|
||||
if ep_stream.quality == config.stream.quality
|
||||
]
|
||||
if not quality:
|
||||
feedback.warning("Preferred quality not found, selecting quality...")
|
||||
stream_link = selector.choose(
|
||||
"Select Quality", [link.quality for link in server.links]
|
||||
)
|
||||
if not stream_link:
|
||||
raise ViuError("Quality not selected")
|
||||
stream_link = next(
|
||||
(link.link for link in server.links if link.quality == stream_link), None
|
||||
)
|
||||
|
||||
stream_link = server.links[0].link
|
||||
if not stream_link:
|
||||
raise ViuError(
|
||||
|
||||
@@ -10,7 +10,13 @@ from pydantic.fields import ComputedFieldInfo, FieldInfo
|
||||
from pydantic_core import PydanticUndefined
|
||||
|
||||
from ...core.config import AppConfig
|
||||
from ...core.constants import APP_ASCII_ART, CLI_NAME, DISCORD_INVITE, REPO_HOME
|
||||
from ...core.constants import (
|
||||
APP_ASCII_ART,
|
||||
CLI_NAME,
|
||||
DISCORD_INVITE,
|
||||
REPO_HOME,
|
||||
SUPPORT_PROJECT_URL,
|
||||
)
|
||||
|
||||
# The header for the config file.
|
||||
config_asci = "\n".join(
|
||||
@@ -38,6 +44,9 @@ CONFIG_FOOTER = f"""
|
||||
# Also join the discord server
|
||||
# where the anime tech community lives :)
|
||||
# {DISCORD_INVITE}
|
||||
# If you like the project and are able to support it please consider buying me a coffee at {SUPPORT_PROJECT_URL}.
|
||||
# If you would like to connect with me join the discord server from there you can dm for hackathons, or even to tell me a joke 😂
|
||||
# Otherwise enjoy your terminal anime browser experience 😁
|
||||
#
|
||||
# ==============================================================================
|
||||
""".lstrip()
|
||||
|
||||
@@ -71,7 +71,7 @@ class ConfigLoader:
|
||||
|
||||
return app_config
|
||||
|
||||
def load(self, update: Dict = {}) -> AppConfig:
|
||||
def load(self, update: Dict = {}, allow_setup=True) -> AppConfig:
|
||||
"""
|
||||
Loads the configuration and returns a populated, validated AppConfig object.
|
||||
|
||||
@@ -84,7 +84,7 @@ class ConfigLoader:
|
||||
Raises:
|
||||
ConfigError: If the configuration file contains validation or parsing errors.
|
||||
"""
|
||||
if not self.config_path.exists():
|
||||
if not self.config_path.exists() and allow_setup:
|
||||
return self._handle_first_run()
|
||||
|
||||
try:
|
||||
|
||||
1
viu_media/cli/interactive/menu/__init__.py
Normal file
1
viu_media/cli/interactive/menu/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
# Menu package for interactive session
|
||||
18
viu_media/cli/interactive/menu/media/__init__.py
Normal file
18
viu_media/cli/interactive/menu/media/__init__.py
Normal file
@@ -0,0 +1,18 @@
|
||||
# Media menu modules
|
||||
# Explicit module list for PyInstaller compatibility
|
||||
__all__ = [
|
||||
"downloads",
|
||||
"download_episodes",
|
||||
"dynamic_search",
|
||||
"episodes",
|
||||
"main",
|
||||
"media_actions",
|
||||
"media_airing_schedule",
|
||||
"media_characters",
|
||||
"media_review",
|
||||
"player_controls",
|
||||
"play_downloads",
|
||||
"provider_search",
|
||||
"results",
|
||||
"servers",
|
||||
]
|
||||
@@ -1,19 +1,48 @@
|
||||
import json
|
||||
import logging
|
||||
import shutil
|
||||
from pathlib import Path
|
||||
|
||||
from .....core.constants import APP_CACHE_DIR, SCRIPTS_DIR
|
||||
from .....core.utils.detect import get_python_executable
|
||||
from .....libs.media_api.params import MediaSearchParams
|
||||
from ...session import Context, session
|
||||
from ...state import InternalDirective, MediaApiState, MenuName, State
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
SEARCH_CACHE_DIR = APP_CACHE_DIR / "search"
|
||||
SEARCH_CACHE_DIR = APP_CACHE_DIR / "previews" / "dynamic-search"
|
||||
SEARCH_RESULTS_FILE = SEARCH_CACHE_DIR / "current_search_results.json"
|
||||
LAST_QUERY_FILE = SEARCH_CACHE_DIR / "last_query.txt"
|
||||
RESTORE_MODE_FILE = SEARCH_CACHE_DIR / ".restore_mode"
|
||||
FZF_SCRIPTS_DIR = SCRIPTS_DIR / "fzf"
|
||||
SEARCH_TEMPLATE_SCRIPT = (FZF_SCRIPTS_DIR / "search.template.sh").read_text(
|
||||
encoding="utf-8"
|
||||
)
|
||||
SEARCH_TEMPLATE_SCRIPT = (FZF_SCRIPTS_DIR / "search.py").read_text(encoding="utf-8")
|
||||
FILTER_PARSER_SCRIPT = FZF_SCRIPTS_DIR / "_filter_parser.py"
|
||||
|
||||
|
||||
def _load_cached_titles() -> list[str]:
|
||||
"""Load titles from cached search results for display in fzf."""
|
||||
if not SEARCH_RESULTS_FILE.exists():
|
||||
return []
|
||||
|
||||
try:
|
||||
with open(SEARCH_RESULTS_FILE, "r", encoding="utf-8") as f:
|
||||
data = json.load(f)
|
||||
|
||||
media_list = data.get("data", {}).get("Page", {}).get("media", [])
|
||||
titles = []
|
||||
for media in media_list:
|
||||
title_obj = media.get("title", {})
|
||||
title = (
|
||||
title_obj.get("english")
|
||||
or title_obj.get("romaji")
|
||||
or title_obj.get("native")
|
||||
or "Unknown"
|
||||
)
|
||||
titles.append(title)
|
||||
return titles
|
||||
except (IOError, json.JSONDecodeError):
|
||||
return []
|
||||
|
||||
|
||||
@session.menu
|
||||
@@ -25,12 +54,18 @@ def dynamic_search(ctx: Context, state: State) -> State | InternalDirective:
|
||||
# Ensure cache directory exists
|
||||
SEARCH_CACHE_DIR.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
# Check if we're in restore mode (coming back from media_actions)
|
||||
restore_mode = RESTORE_MODE_FILE.exists()
|
||||
if restore_mode:
|
||||
# Clear the restore flag
|
||||
RESTORE_MODE_FILE.unlink(missing_ok=True)
|
||||
|
||||
# Read the GraphQL search query
|
||||
from .....libs.media_api.anilist import gql
|
||||
|
||||
search_query = gql.SEARCH_MEDIA.read_text(encoding="utf-8")
|
||||
# Properly escape the GraphQL query for JSON
|
||||
search_query_escaped = json.dumps(search_query)
|
||||
# Escape the GraphQL query as a JSON string literal for Python script
|
||||
search_query_json = json.dumps(search_query).replace('"', "")
|
||||
|
||||
# Prepare the search script
|
||||
auth_header = ""
|
||||
@@ -42,15 +77,47 @@ def dynamic_search(ctx: Context, state: State) -> State | InternalDirective:
|
||||
|
||||
replacements = {
|
||||
"GRAPHQL_ENDPOINT": "https://graphql.anilist.co",
|
||||
"GRAPHQL_QUERY": search_query_escaped,
|
||||
"CACHE_DIR": str(SEARCH_CACHE_DIR),
|
||||
"SEARCH_RESULTS_FILE": str(SEARCH_RESULTS_FILE),
|
||||
"GRAPHQL_QUERY": search_query_json,
|
||||
"SEARCH_RESULTS_FILE": SEARCH_RESULTS_FILE.as_posix(),
|
||||
"LAST_QUERY_FILE": LAST_QUERY_FILE.as_posix(),
|
||||
"AUTH_HEADER": auth_header,
|
||||
}
|
||||
|
||||
for key, value in replacements.items():
|
||||
search_command = search_command.replace(f"{{{key}}}", str(value))
|
||||
|
||||
# Write the filled template to a cache file
|
||||
search_script_file = SEARCH_CACHE_DIR / "search.py"
|
||||
search_script_file.write_text(search_command, encoding="utf-8")
|
||||
|
||||
# Copy the filter parser module to the cache directory
|
||||
# This is required for the search script to import it
|
||||
filter_parser_dest = SEARCH_CACHE_DIR / "_filter_parser.py"
|
||||
if FILTER_PARSER_SCRIPT.exists():
|
||||
shutil.copy2(FILTER_PARSER_SCRIPT, filter_parser_dest)
|
||||
|
||||
# Make the search script executable by calling it with python3
|
||||
# fzf will pass the query as {q} which becomes the first argument
|
||||
search_command_final = (
|
||||
f"{Path(get_python_executable()).as_posix()} {search_script_file.as_posix()} {{q}}"
|
||||
)
|
||||
|
||||
# Header hint for filter syntax
|
||||
filter_hint = "💡 Filters: @genre:action @status:airing @year:2024 @sort:score (type @help for more)"
|
||||
|
||||
# Only load previous query if we're in restore mode (coming back from media_actions)
|
||||
initial_query = None
|
||||
cached_results = None
|
||||
if restore_mode:
|
||||
# Load previous query
|
||||
if LAST_QUERY_FILE.exists():
|
||||
try:
|
||||
initial_query = LAST_QUERY_FILE.read_text(encoding="utf-8").strip()
|
||||
except IOError:
|
||||
pass
|
||||
# Load cached results to display immediately without network request
|
||||
cached_results = _load_cached_titles()
|
||||
|
||||
try:
|
||||
# Prepare preview functionality
|
||||
preview_command = None
|
||||
@@ -62,13 +129,19 @@ def dynamic_search(ctx: Context, state: State) -> State | InternalDirective:
|
||||
|
||||
choice = ctx.selector.search(
|
||||
prompt="Search Anime",
|
||||
search_command=search_command,
|
||||
search_command=search_command_final,
|
||||
preview=preview_command,
|
||||
header=filter_hint,
|
||||
initial_query=initial_query,
|
||||
initial_results=cached_results,
|
||||
)
|
||||
else:
|
||||
choice = ctx.selector.search(
|
||||
prompt="Search Anime",
|
||||
search_command=search_command,
|
||||
search_command=search_command_final,
|
||||
header=filter_hint,
|
||||
initial_query=initial_query,
|
||||
initial_results=cached_results,
|
||||
)
|
||||
except NotImplementedError:
|
||||
feedback.error("Dynamic search is not supported by your current selector")
|
||||
@@ -107,6 +180,9 @@ def dynamic_search(ctx: Context, state: State) -> State | InternalDirective:
|
||||
logger.error(f"Could not find selected media for choice: {choice}")
|
||||
return InternalDirective.MAIN
|
||||
|
||||
# Set restore mode flag so we can restore state when user goes back
|
||||
RESTORE_MODE_FILE.touch()
|
||||
|
||||
# Navigate to media actions with the selected item
|
||||
return State(
|
||||
menu_name=MenuName.MEDIA_ACTIONS,
|
||||
|
||||
@@ -28,7 +28,9 @@ def provider_search(ctx: Context, state: State) -> State | InternalDirective:
|
||||
|
||||
provider_search_results = provider.search(
|
||||
SearchParams(
|
||||
query=normalize_title(media_title, config.general.provider.value, True),
|
||||
query=normalize_title(
|
||||
media_title, config.general.provider.value, True
|
||||
).lower(),
|
||||
translation_type=config.stream.translation_type,
|
||||
)
|
||||
)
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import importlib
|
||||
import importlib.util
|
||||
import logging
|
||||
import os
|
||||
import pkgutil
|
||||
from dataclasses import dataclass, field
|
||||
from typing import TYPE_CHECKING, Callable, List, Optional, Union
|
||||
|
||||
@@ -309,30 +310,46 @@ class Session:
|
||||
return decorator
|
||||
|
||||
def load_menus_from_folder(self, package: str):
|
||||
package_path = MENUS_DIR / package
|
||||
package_name = package_path.name
|
||||
logger.debug(f"Loading menus from '{package_path}'...")
|
||||
"""Load menu modules from a subfolder.
|
||||
|
||||
Uses pkgutil to discover modules for regular Python, and falls back
|
||||
to the package's __all__ list for PyInstaller frozen executables.
|
||||
"""
|
||||
full_package_name = f"viu_media.cli.interactive.menu.{package}"
|
||||
logger.debug(f"Loading menus from package '{full_package_name}'...")
|
||||
|
||||
for filename in os.listdir(package_path):
|
||||
if filename.endswith(".py") and not filename.startswith("__"):
|
||||
module_name = filename[:-3]
|
||||
full_module_name = (
|
||||
f"viu_media.cli.interactive.menu.{package_name}.{module_name}"
|
||||
try:
|
||||
# Import the parent package first
|
||||
parent_package = importlib.import_module(full_package_name)
|
||||
except ImportError as e:
|
||||
logger.error(f"Failed to import menu package '{full_package_name}': {e}")
|
||||
return
|
||||
|
||||
# Try pkgutil first (works in regular Python)
|
||||
package_path = getattr(parent_package, "__path__", None)
|
||||
module_names = []
|
||||
|
||||
if package_path:
|
||||
module_names = [
|
||||
name for _, name, ispkg in pkgutil.iter_modules(package_path)
|
||||
if not ispkg and not name.startswith("_")
|
||||
]
|
||||
|
||||
# Fallback to __all__ for PyInstaller frozen executables
|
||||
if not module_names:
|
||||
module_names = getattr(parent_package, "__all__", [])
|
||||
logger.debug(f"Using __all__ fallback with {len(module_names)} modules")
|
||||
|
||||
for module_name in module_names:
|
||||
full_module_name = f"{full_package_name}.{module_name}"
|
||||
try:
|
||||
# Simply importing the module will execute it,
|
||||
# which runs the @session.menu decorators
|
||||
importlib.import_module(full_module_name)
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Failed to load menu module '{full_module_name}': {e}"
|
||||
)
|
||||
file_path = package_path / filename
|
||||
|
||||
try:
|
||||
spec = importlib.util.spec_from_file_location(
|
||||
full_module_name, file_path
|
||||
)
|
||||
if spec and spec.loader:
|
||||
module = importlib.util.module_from_spec(spec)
|
||||
# The act of executing the module runs the @session.menu decorators
|
||||
spec.loader.exec_module(module)
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Failed to load menu module '{full_module_name}': {e}"
|
||||
)
|
||||
|
||||
|
||||
# Create a single, global instance of the Session to be imported by menu modules.
|
||||
|
||||
@@ -296,8 +296,7 @@ class DownloadService:
|
||||
message=message,
|
||||
app_name="Viu",
|
||||
app_icon=app_icon,
|
||||
timeout=self.app_config.general.desktop_notification_duration
|
||||
* 60,
|
||||
timeout=self.app_config.general.desktop_notification_duration,
|
||||
)
|
||||
except: # noqa: E722
|
||||
pass
|
||||
@@ -318,7 +317,7 @@ class DownloadService:
|
||||
message=message,
|
||||
app_name="Viu",
|
||||
app_icon=app_icon,
|
||||
timeout=self.app_config.general.desktop_notification_duration * 60,
|
||||
timeout=self.app_config.general.desktop_notification_duration,
|
||||
)
|
||||
except: # noqa: E722
|
||||
pass
|
||||
|
||||
@@ -41,7 +41,7 @@ class FeedbackService:
|
||||
message=message,
|
||||
app_name=CLI_NAME,
|
||||
app_icon=str(ICON_PATH),
|
||||
timeout=self.app_config.general.desktop_notification_duration * 60,
|
||||
timeout=self.app_config.general.desktop_notification_duration,
|
||||
)
|
||||
return
|
||||
except: # noqa: E722
|
||||
@@ -67,7 +67,7 @@ class FeedbackService:
|
||||
message=message,
|
||||
app_name=CLI_NAME,
|
||||
app_icon=str(ICON_PATH),
|
||||
timeout=self.app_config.general.desktop_notification_duration * 60,
|
||||
timeout=self.app_config.general.desktop_notification_duration,
|
||||
)
|
||||
return
|
||||
except: # noqa: E722
|
||||
@@ -94,7 +94,7 @@ class FeedbackService:
|
||||
message=message,
|
||||
app_name=CLI_NAME,
|
||||
app_icon=str(ICON_PATH),
|
||||
timeout=self.app_config.general.desktop_notification_duration * 60,
|
||||
timeout=self.app_config.general.desktop_notification_duration,
|
||||
)
|
||||
return
|
||||
except: # noqa: E722
|
||||
@@ -120,7 +120,7 @@ class FeedbackService:
|
||||
message=message,
|
||||
app_name=CLI_NAME,
|
||||
app_icon=str(ICON_PATH),
|
||||
timeout=self.app_config.general.desktop_notification_duration * 60,
|
||||
timeout=self.app_config.general.desktop_notification_duration,
|
||||
)
|
||||
return
|
||||
except: # noqa: E722
|
||||
@@ -176,7 +176,7 @@ class FeedbackService:
|
||||
message="No current way to display info in rofi, use fzf and the terminal instead",
|
||||
app_name=CLI_NAME,
|
||||
app_icon=str(ICON_PATH),
|
||||
timeout=self.app_config.general.desktop_notification_duration * 60,
|
||||
timeout=self.app_config.general.desktop_notification_duration,
|
||||
)
|
||||
return
|
||||
except: # noqa: E722
|
||||
|
||||
@@ -101,7 +101,7 @@ class NotificationService:
|
||||
message=message,
|
||||
app_name="Viu",
|
||||
app_icon=app_icon, # plyer supports file paths or URLs depending on platform
|
||||
timeout=self.app_config.general.desktop_notification_duration * 60,
|
||||
timeout=self.app_config.general.desktop_notification_duration,
|
||||
)
|
||||
logger.info(f"Displayed notification: {message}")
|
||||
self._mark_seen(
|
||||
|
||||
@@ -57,6 +57,9 @@ class MPVIPCClient:
|
||||
|
||||
def connect(self, timeout: float = 5.0) -> None:
|
||||
"""Connect to MPV IPC socket and start the reader thread."""
|
||||
if not hasattr(socket, "AF_UNIX"):
|
||||
raise MPVIPCError("Unix domain sockets are unavailable on this platform")
|
||||
|
||||
start_time = time.time()
|
||||
while time.time() - start_time < timeout:
|
||||
try:
|
||||
@@ -299,6 +302,10 @@ class MpvIPCPlayer(BaseIPCPlayer):
|
||||
def _play_with_ipc(self, player: BasePlayer, params: PlayerParams) -> PlayerResult:
|
||||
"""Play media using MPV IPC."""
|
||||
try:
|
||||
if not hasattr(socket, "AF_UNIX"):
|
||||
raise MPVIPCError(
|
||||
"MPV IPC requires Unix domain sockets, which are unavailable on this platform."
|
||||
)
|
||||
self._start_mpv_process(player, params)
|
||||
self._connect_ipc()
|
||||
self._setup_event_handling()
|
||||
|
||||
@@ -9,6 +9,8 @@ import importlib.util
|
||||
import click
|
||||
import httpx
|
||||
|
||||
from viu_media.core.utils import detect
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@@ -138,6 +140,7 @@ def render(url: str, capture: bool = False, size: str = "30x30") -> Optional[str
|
||||
[icat_executable, "--align", "left", url],
|
||||
capture_output=capture,
|
||||
text=capture,
|
||||
env=detect.get_clean_env(),
|
||||
)
|
||||
if process.returncode == 0:
|
||||
return process.stdout if capture else None
|
||||
|
||||
@@ -1,13 +1,16 @@
|
||||
import logging
|
||||
from pathlib import Path
|
||||
import re
|
||||
from hashlib import sha256
|
||||
import sys
|
||||
from typing import Dict, List, Optional
|
||||
|
||||
import httpx
|
||||
|
||||
from viu_media.core.utils import formatter
|
||||
|
||||
from ...core.config import AppConfig
|
||||
from ...core.constants import APP_CACHE_DIR, PLATFORM, SCRIPTS_DIR
|
||||
from ...core.constants import APP_CACHE_DIR, SCRIPTS_DIR
|
||||
from ...core.utils.detect import get_python_executable
|
||||
from ...core.utils.file import AtomicWriter
|
||||
from ...libs.media_api.types import (
|
||||
AiringScheduleResult,
|
||||
@@ -15,7 +18,6 @@ from ...libs.media_api.types import (
|
||||
MediaItem,
|
||||
MediaReview,
|
||||
)
|
||||
from . import ansi
|
||||
from .preview_workers import PreviewWorkerManager
|
||||
|
||||
|
||||
@@ -122,16 +124,12 @@ logger = logging.getLogger(__name__)
|
||||
PREVIEWS_CACHE_DIR = APP_CACHE_DIR / "previews"
|
||||
IMAGES_CACHE_DIR = PREVIEWS_CACHE_DIR / "images"
|
||||
INFO_CACHE_DIR = PREVIEWS_CACHE_DIR / "info"
|
||||
REVIEWS_CACHE_DIR = PREVIEWS_CACHE_DIR / "reviews"
|
||||
CHARACTERS_CACHE_DIR = PREVIEWS_CACHE_DIR / "characters"
|
||||
AIRING_SCHEDULE_CACHE_DIR = PREVIEWS_CACHE_DIR / "airing_schedule"
|
||||
|
||||
FZF_SCRIPTS_DIR = SCRIPTS_DIR / "fzf"
|
||||
TEMPLATE_PREVIEW_SCRIPT = (FZF_SCRIPTS_DIR / "preview.py").read_text(encoding="utf-8")
|
||||
TEMPLATE_REVIEW_PREVIEW_SCRIPT = ""
|
||||
TEMPLATE_CHARACTER_PREVIEW_SCRIPT = ""
|
||||
TEMPLATE_AIRING_SCHEDULE_PREVIEW_SCRIPT = ""
|
||||
DYNAMIC_PREVIEW_SCRIPT = ""
|
||||
DYNAMIC_PREVIEW_SCRIPT = (FZF_SCRIPTS_DIR / "dynamic_preview.py").read_text(
|
||||
encoding="utf-8"
|
||||
)
|
||||
|
||||
EPISODE_PATTERN = re.compile(r"^Episode\s+(\d+)\s-\s.*")
|
||||
|
||||
@@ -139,6 +137,23 @@ EPISODE_PATTERN = re.compile(r"^Episode\s+(\d+)\s-\s.*")
|
||||
_preview_manager: Optional[PreviewWorkerManager] = None
|
||||
|
||||
|
||||
def _ensure_ansi_utils_in_cache():
|
||||
"""Copy _ansi_utils.py to the info cache directory so cached scripts can import it."""
|
||||
source = FZF_SCRIPTS_DIR / "_ansi_utils.py"
|
||||
dest = INFO_CACHE_DIR / "_ansi_utils.py"
|
||||
|
||||
if source.exists() and (
|
||||
not dest.exists() or source.stat().st_mtime > dest.stat().st_mtime
|
||||
):
|
||||
try:
|
||||
import shutil
|
||||
|
||||
shutil.copy2(source, dest)
|
||||
logger.debug(f"Copied _ansi_utils.py to {INFO_CACHE_DIR}")
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to copy _ansi_utils.py to cache: {e}")
|
||||
|
||||
|
||||
def create_preview_context():
|
||||
"""
|
||||
Create a context manager for preview operations.
|
||||
@@ -274,6 +289,7 @@ def get_anime_preview(
|
||||
# Ensure cache directories exist on startup
|
||||
IMAGES_CACHE_DIR.mkdir(parents=True, exist_ok=True)
|
||||
INFO_CACHE_DIR.mkdir(parents=True, exist_ok=True)
|
||||
_ensure_ansi_utils_in_cache()
|
||||
|
||||
HEADER_COLOR = config.fzf.preview_header_color.split(",")
|
||||
SEPARATOR_COLOR = config.fzf.preview_separator_color.split(",")
|
||||
@@ -293,24 +309,26 @@ def get_anime_preview(
|
||||
# Format the template with the dynamic values
|
||||
replacements = {
|
||||
"PREVIEW_MODE": config.general.preview,
|
||||
"IMAGE_CACHE_DIR": str(IMAGES_CACHE_DIR),
|
||||
"INFO_CACHE_DIR": str(INFO_CACHE_DIR),
|
||||
"IMAGE_CACHE_DIR": IMAGES_CACHE_DIR.as_posix(),
|
||||
"INFO_CACHE_DIR": INFO_CACHE_DIR.as_posix(),
|
||||
"IMAGE_RENDERER": config.general.image_renderer,
|
||||
# Color codes
|
||||
"HEADER_COLOR": ",".join(HEADER_COLOR),
|
||||
"SEPARATOR_COLOR": ",".join(SEPARATOR_COLOR),
|
||||
"PREFIX": "search-results",
|
||||
"PREFIX": "search-result",
|
||||
"KEY": "",
|
||||
"SCALE_UP": str(config.general.preview_scale_up),
|
||||
}
|
||||
|
||||
for key, value in replacements.items():
|
||||
preview_script = preview_script.replace(f"{{{key}}}", value)
|
||||
|
||||
(PREVIEWS_CACHE_DIR / "search-results-preview-script.py").write_text(
|
||||
preview_script, encoding="utf-8"
|
||||
)
|
||||
preview_file = PREVIEWS_CACHE_DIR / "search-result-preview-script.py"
|
||||
preview_file.write_text(preview_script, encoding="utf-8")
|
||||
|
||||
preview_script_final = f"{sys.executable} {PREVIEWS_CACHE_DIR / 'search-results-preview-script.py'} {{}}"
|
||||
preview_script_final = (
|
||||
f"{Path(get_python_executable()).as_posix()} {preview_file.as_posix()} {{}}"
|
||||
)
|
||||
return preview_script_final
|
||||
|
||||
|
||||
@@ -348,30 +366,175 @@ def get_episode_preview(
|
||||
logger.error(f"Failed to start episode background caching: {e}")
|
||||
# Continue with script generation even if caching fails
|
||||
|
||||
# Prepare values to inject into the template
|
||||
path_sep = "\\" if PLATFORM == "win32" else "/"
|
||||
|
||||
# Format the template with the dynamic values
|
||||
replacements = {
|
||||
"PREVIEW_MODE": config.general.preview,
|
||||
"IMAGE_CACHE_PATH": str(IMAGES_CACHE_DIR),
|
||||
"INFO_CACHE_PATH": str(INFO_CACHE_DIR),
|
||||
"PATH_SEP": path_sep,
|
||||
"IMAGE_CACHE_DIR": IMAGES_CACHE_DIR.as_posix(),
|
||||
"INFO_CACHE_DIR": INFO_CACHE_DIR.as_posix(),
|
||||
"IMAGE_RENDERER": config.general.image_renderer,
|
||||
# Color codes
|
||||
"C_TITLE": ansi.get_true_fg(HEADER_COLOR, bold=True),
|
||||
"C_KEY": ansi.get_true_fg(HEADER_COLOR, bold=True),
|
||||
"C_VALUE": ansi.get_true_fg(HEADER_COLOR, bold=True),
|
||||
"C_RULE": ansi.get_true_fg(SEPARATOR_COLOR, bold=True),
|
||||
"RESET": ansi.RESET,
|
||||
"PREFIX": f"{media_item.title.english}_Episode_",
|
||||
"SCALE_UP": " --scale-up" if config.general.preview_scale_up else "",
|
||||
"HEADER_COLOR": ",".join(HEADER_COLOR),
|
||||
"SEPARATOR_COLOR": ",".join(SEPARATOR_COLOR),
|
||||
"PREFIX": "episode",
|
||||
"KEY": f"{media_item.title.english.replace(formatter.DOUBLE_QUOTE, formatter.SINGLE_QUOTE)}",
|
||||
"SCALE_UP": str(config.general.preview_scale_up),
|
||||
}
|
||||
|
||||
for key, value in replacements.items():
|
||||
preview_script = preview_script.replace(f"{{{key}}}", value)
|
||||
|
||||
return preview_script
|
||||
preview_file = PREVIEWS_CACHE_DIR / "episode-preview-script.py"
|
||||
preview_file.write_text(preview_script, encoding="utf-8")
|
||||
|
||||
preview_script_final = (
|
||||
f"{Path(get_python_executable()).as_posix()} {preview_file.as_posix()} {{}}"
|
||||
)
|
||||
return preview_script_final
|
||||
|
||||
|
||||
def get_character_preview(choice_map: Dict[str, Character], config: AppConfig) -> str:
|
||||
"""
|
||||
Generate the generic loader script for character previews and start background caching.
|
||||
"""
|
||||
|
||||
IMAGES_CACHE_DIR.mkdir(parents=True, exist_ok=True)
|
||||
INFO_CACHE_DIR.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
HEADER_COLOR = config.fzf.preview_header_color.split(",")
|
||||
SEPARATOR_COLOR = config.fzf.preview_separator_color.split(",")
|
||||
|
||||
# Start managed background caching for episodes
|
||||
try:
|
||||
preview_manager = _get_preview_manager()
|
||||
worker = preview_manager.get_character_worker()
|
||||
worker.cache_character_previews(choice_map, config)
|
||||
logger.debug("Started background caching for character previews")
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to start episode background caching: {e}")
|
||||
|
||||
# Use the generic loader script
|
||||
preview_script = TEMPLATE_PREVIEW_SCRIPT
|
||||
|
||||
replacements = {
|
||||
"PREVIEW_MODE": config.general.preview,
|
||||
"IMAGE_CACHE_DIR": IMAGES_CACHE_DIR.as_posix(),
|
||||
"INFO_CACHE_DIR": INFO_CACHE_DIR.as_posix(),
|
||||
"IMAGE_RENDERER": config.general.image_renderer,
|
||||
# Color codes
|
||||
"HEADER_COLOR": ",".join(HEADER_COLOR),
|
||||
"SEPARATOR_COLOR": ",".join(SEPARATOR_COLOR),
|
||||
"PREFIX": "character",
|
||||
"KEY": "",
|
||||
"SCALE_UP": str(config.general.preview_scale_up),
|
||||
}
|
||||
|
||||
for key, value in replacements.items():
|
||||
preview_script = preview_script.replace(f"{{{key}}}", value)
|
||||
|
||||
preview_file = PREVIEWS_CACHE_DIR / "character-preview-script.py"
|
||||
preview_file.write_text(preview_script, encoding="utf-8")
|
||||
|
||||
preview_script_final = (
|
||||
f"{Path(get_python_executable()).as_posix()} {preview_file.as_posix()} {{}}"
|
||||
)
|
||||
return preview_script_final
|
||||
|
||||
|
||||
def get_review_preview(choice_map: Dict[str, MediaReview], config: AppConfig) -> str:
|
||||
"""
|
||||
Generate the generic loader script for review previews and start background caching.
|
||||
"""
|
||||
|
||||
IMAGES_CACHE_DIR.mkdir(parents=True, exist_ok=True)
|
||||
INFO_CACHE_DIR.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
HEADER_COLOR = config.fzf.preview_header_color.split(",")
|
||||
SEPARATOR_COLOR = config.fzf.preview_separator_color.split(",")
|
||||
|
||||
# Start managed background caching for episodes
|
||||
try:
|
||||
preview_manager = _get_preview_manager()
|
||||
worker = preview_manager.get_review_worker()
|
||||
worker.cache_review_previews(choice_map, config)
|
||||
logger.debug("Started background caching for review previews")
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to start episode background caching: {e}")
|
||||
|
||||
# Use the generic loader script
|
||||
preview_script = TEMPLATE_PREVIEW_SCRIPT
|
||||
|
||||
replacements = {
|
||||
"PREVIEW_MODE": config.general.preview,
|
||||
"IMAGE_CACHE_DIR": IMAGES_CACHE_DIR.as_posix(),
|
||||
"INFO_CACHE_DIR": INFO_CACHE_DIR.as_posix(),
|
||||
"IMAGE_RENDERER": config.general.image_renderer,
|
||||
# Color codes
|
||||
"HEADER_COLOR": ",".join(HEADER_COLOR),
|
||||
"SEPARATOR_COLOR": ",".join(SEPARATOR_COLOR),
|
||||
"PREFIX": "review",
|
||||
"KEY": "",
|
||||
"SCALE_UP": str(config.general.preview_scale_up),
|
||||
}
|
||||
|
||||
for key, value in replacements.items():
|
||||
preview_script = preview_script.replace(f"{{{key}}}", value)
|
||||
|
||||
preview_file = PREVIEWS_CACHE_DIR / "review-preview-script.py"
|
||||
preview_file.write_text(preview_script, encoding="utf-8")
|
||||
|
||||
preview_script_final = (
|
||||
f"{Path(get_python_executable()).as_posix()} {preview_file.as_posix()} {{}}"
|
||||
)
|
||||
return preview_script_final
|
||||
|
||||
|
||||
def get_airing_schedule_preview(
|
||||
schedule_result: AiringScheduleResult, config: AppConfig, anime_title: str = "Anime"
|
||||
) -> str:
|
||||
"""
|
||||
Generate the generic loader script for airing schedule previews and start background caching.
|
||||
"""
|
||||
|
||||
IMAGES_CACHE_DIR.mkdir(parents=True, exist_ok=True)
|
||||
INFO_CACHE_DIR.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
HEADER_COLOR = config.fzf.preview_header_color.split(",")
|
||||
SEPARATOR_COLOR = config.fzf.preview_separator_color.split(",")
|
||||
|
||||
# Start managed background caching for episodes
|
||||
try:
|
||||
preview_manager = _get_preview_manager()
|
||||
worker = preview_manager.get_airing_schedule_worker()
|
||||
worker.cache_airing_schedule_preview(anime_title, schedule_result, config)
|
||||
logger.debug("Started background caching for airing schedule previews")
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to start episode background caching: {e}")
|
||||
|
||||
# Use the generic loader script
|
||||
preview_script = TEMPLATE_PREVIEW_SCRIPT
|
||||
|
||||
replacements = {
|
||||
"PREVIEW_MODE": config.general.preview,
|
||||
"IMAGE_CACHE_DIR": IMAGES_CACHE_DIR.as_posix(),
|
||||
"INFO_CACHE_DIR": INFO_CACHE_DIR.as_posix(),
|
||||
"IMAGE_RENDERER": config.general.image_renderer,
|
||||
# Color codes
|
||||
"HEADER_COLOR": ",".join(HEADER_COLOR),
|
||||
"SEPARATOR_COLOR": ",".join(SEPARATOR_COLOR),
|
||||
"PREFIX": "airing-schedule",
|
||||
"KEY": "",
|
||||
"SCALE_UP": str(config.general.preview_scale_up),
|
||||
}
|
||||
|
||||
for key, value in replacements.items():
|
||||
preview_script = preview_script.replace(f"{{{key}}}", value)
|
||||
|
||||
preview_file = PREVIEWS_CACHE_DIR / "airing-schedule-preview-script.py"
|
||||
preview_file.write_text(preview_script, encoding="utf-8")
|
||||
|
||||
# preview_script_final = f"{sys.executable} {preview_file} {{}}"
|
||||
# NOTE: disabled cause not very useful
|
||||
return ""
|
||||
|
||||
|
||||
def get_dynamic_anime_preview(config: AppConfig) -> str:
|
||||
@@ -381,17 +544,32 @@ def get_dynamic_anime_preview(config: AppConfig) -> str:
|
||||
This is different from regular anime preview because:
|
||||
1. We don't have media items upfront
|
||||
2. The preview needs to work with search results as they come in
|
||||
3. Preview is handled entirely in shell by parsing JSON results
|
||||
3. Preview script dynamically loads data from search results JSON
|
||||
|
||||
Args:
|
||||
config: Application configuration
|
||||
|
||||
Returns:
|
||||
Preview script content for fzf dynamic search
|
||||
Preview script command for fzf dynamic search
|
||||
"""
|
||||
# Ensure cache directories exist
|
||||
IMAGES_CACHE_DIR.mkdir(parents=True, exist_ok=True)
|
||||
INFO_CACHE_DIR.mkdir(parents=True, exist_ok=True)
|
||||
search_cache_dir = APP_CACHE_DIR / "previews" / "dynamic-search"
|
||||
search_cache_dir.mkdir(parents=True, exist_ok=True)
|
||||
source = FZF_SCRIPTS_DIR / "_ansi_utils.py"
|
||||
dest = search_cache_dir / "_ansi_utils.py"
|
||||
|
||||
if source.exists() and (
|
||||
not dest.exists() or source.stat().st_mtime > dest.stat().st_mtime
|
||||
):
|
||||
try:
|
||||
import shutil
|
||||
|
||||
shutil.copy2(source, dest)
|
||||
logger.debug(f"Copied _ansi_utils.py to {INFO_CACHE_DIR}")
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to copy _ansi_utils.py to cache: {e}")
|
||||
|
||||
HEADER_COLOR = config.fzf.preview_header_color.split(",")
|
||||
SEPARATOR_COLOR = config.fzf.preview_separator_color.split(",")
|
||||
@@ -399,42 +577,38 @@ def get_dynamic_anime_preview(config: AppConfig) -> str:
|
||||
# Use the dynamic preview script template
|
||||
preview_script = DYNAMIC_PREVIEW_SCRIPT
|
||||
|
||||
search_cache_dir = APP_CACHE_DIR / "search"
|
||||
search_results_file = search_cache_dir / "current_search_results.json"
|
||||
|
||||
# Prepare values to inject into the template
|
||||
path_sep = "\\" if PLATFORM == "win32" else "/"
|
||||
|
||||
# Format the template with the dynamic values
|
||||
# Prepare replacements for the template
|
||||
replacements = {
|
||||
"SEARCH_RESULTS_FILE": search_results_file.as_posix(),
|
||||
"IMAGE_CACHE_DIR": IMAGES_CACHE_DIR.as_posix(),
|
||||
"PREVIEW_MODE": config.general.preview,
|
||||
"IMAGE_CACHE_PATH": str(IMAGES_CACHE_DIR),
|
||||
"INFO_CACHE_PATH": str(INFO_CACHE_DIR),
|
||||
"PATH_SEP": path_sep,
|
||||
"IMAGE_RENDERER": config.general.image_renderer,
|
||||
"SEARCH_RESULTS_FILE": str(search_results_file),
|
||||
# Color codes
|
||||
"C_TITLE": ansi.get_true_fg(HEADER_COLOR, bold=True),
|
||||
"C_KEY": ansi.get_true_fg(HEADER_COLOR, bold=True),
|
||||
"C_VALUE": ansi.get_true_fg(HEADER_COLOR, bold=True),
|
||||
"C_RULE": ansi.get_true_fg(SEPARATOR_COLOR, bold=True),
|
||||
"RESET": ansi.RESET,
|
||||
"SCALE_UP": " --scale-up" if config.general.preview_scale_up else "",
|
||||
"HEADER_COLOR": ",".join(HEADER_COLOR),
|
||||
"SEPARATOR_COLOR": ",".join(SEPARATOR_COLOR),
|
||||
"SCALE_UP": str(config.general.preview_scale_up),
|
||||
}
|
||||
|
||||
for key, value in replacements.items():
|
||||
preview_script = preview_script.replace(f"{{{key}}}", value)
|
||||
|
||||
return preview_script
|
||||
# Write the preview script to cache
|
||||
preview_file = search_cache_dir / "dynamic-search-preview-script.py"
|
||||
preview_file.write_text(preview_script, encoding="utf-8")
|
||||
|
||||
# Return the command to execute the preview script
|
||||
preview_script_final = (
|
||||
f"{Path(get_python_executable()).as_posix()} {preview_file.as_posix()} {{}}"
|
||||
)
|
||||
return preview_script_final
|
||||
|
||||
|
||||
def _get_preview_manager() -> PreviewWorkerManager:
|
||||
"""Get or create the global preview worker manager."""
|
||||
global _preview_manager
|
||||
if _preview_manager is None:
|
||||
_preview_manager = PreviewWorkerManager(
|
||||
IMAGES_CACHE_DIR, INFO_CACHE_DIR, REVIEWS_CACHE_DIR
|
||||
)
|
||||
_preview_manager = PreviewWorkerManager(IMAGES_CACHE_DIR, INFO_CACHE_DIR)
|
||||
return _preview_manager
|
||||
|
||||
|
||||
@@ -458,111 +632,3 @@ def get_preview_worker_status() -> dict:
|
||||
if _preview_manager:
|
||||
return _preview_manager.get_status()
|
||||
return {"preview_worker": None, "episode_worker": None}
|
||||
|
||||
|
||||
def get_review_preview(choice_map: Dict[str, MediaReview], config: AppConfig) -> str:
|
||||
"""
|
||||
Generate the generic loader script for review previews and start background caching.
|
||||
"""
|
||||
|
||||
REVIEWS_CACHE_DIR.mkdir(parents=True, exist_ok=True)
|
||||
preview_manager = _get_preview_manager()
|
||||
worker = preview_manager.get_review_worker()
|
||||
worker.cache_review_previews(choice_map, config)
|
||||
logger.debug("Started background caching for review previews")
|
||||
|
||||
# Use the generic loader script
|
||||
preview_script = TEMPLATE_REVIEW_PREVIEW_SCRIPT
|
||||
path_sep = "\\" if PLATFORM == "win32" else "/"
|
||||
|
||||
# Inject the correct cache path and color codes
|
||||
replacements = {
|
||||
"PREVIEW_MODE": config.general.preview,
|
||||
"INFO_CACHE_DIR": str(REVIEWS_CACHE_DIR),
|
||||
"PATH_SEP": path_sep,
|
||||
"C_TITLE": ansi.get_true_fg(config.fzf.header_color.split(","), bold=True),
|
||||
"C_KEY": ansi.get_true_fg(config.fzf.header_color.split(","), bold=True),
|
||||
"C_VALUE": ansi.get_true_fg(config.fzf.header_color.split(","), bold=True),
|
||||
"C_RULE": ansi.get_true_fg(
|
||||
config.fzf.preview_separator_color.split(","), bold=True
|
||||
),
|
||||
"RESET": ansi.RESET,
|
||||
}
|
||||
|
||||
for key, value in replacements.items():
|
||||
preview_script = preview_script.replace(f"{{{key}}}", value)
|
||||
|
||||
return preview_script
|
||||
|
||||
|
||||
def get_character_preview(choice_map: Dict[str, Character], config: AppConfig) -> str:
|
||||
"""
|
||||
Generate the generic loader script for character previews and start background caching.
|
||||
"""
|
||||
|
||||
INFO_CACHE_DIR.mkdir(parents=True, exist_ok=True)
|
||||
preview_manager = _get_preview_manager()
|
||||
worker = preview_manager.get_character_worker()
|
||||
worker.cache_character_previews(choice_map, config)
|
||||
logger.debug("Started background caching for character previews")
|
||||
|
||||
# Use the generic loader script
|
||||
preview_script = TEMPLATE_CHARACTER_PREVIEW_SCRIPT
|
||||
path_sep = "\\" if PLATFORM == "win32" else "/"
|
||||
|
||||
# Inject the correct cache path and color codes
|
||||
replacements = {
|
||||
"PREVIEW_MODE": config.general.preview,
|
||||
"INFO_CACHE_DIR": str(INFO_CACHE_DIR),
|
||||
"IMAGE_CACHE_DIR": str(IMAGES_CACHE_DIR),
|
||||
"PATH_SEP": path_sep,
|
||||
"C_TITLE": ansi.get_true_fg(config.fzf.header_color.split(","), bold=True),
|
||||
"C_KEY": ansi.get_true_fg(config.fzf.header_color.split(","), bold=True),
|
||||
"C_VALUE": ansi.get_true_fg(config.fzf.header_color.split(","), bold=True),
|
||||
"C_RULE": ansi.get_true_fg(
|
||||
config.fzf.preview_separator_color.split(","), bold=True
|
||||
),
|
||||
"RESET": ansi.RESET,
|
||||
}
|
||||
|
||||
for key, value in replacements.items():
|
||||
preview_script = preview_script.replace(f"{{{key}}}", value)
|
||||
|
||||
return preview_script
|
||||
|
||||
|
||||
def get_airing_schedule_preview(
|
||||
schedule_result: AiringScheduleResult, config: AppConfig, anime_title: str = "Anime"
|
||||
) -> str:
|
||||
"""
|
||||
Generate the generic loader script for airing schedule previews and start background caching.
|
||||
"""
|
||||
|
||||
INFO_CACHE_DIR.mkdir(parents=True, exist_ok=True)
|
||||
preview_manager = _get_preview_manager()
|
||||
worker = preview_manager.get_airing_schedule_worker()
|
||||
worker.cache_airing_schedule_preview(anime_title, schedule_result, config)
|
||||
logger.debug("Started background caching for airing schedule previews")
|
||||
|
||||
# Use the generic loader script
|
||||
preview_script = TEMPLATE_AIRING_SCHEDULE_PREVIEW_SCRIPT
|
||||
path_sep = "\\" if PLATFORM == "win32" else "/"
|
||||
|
||||
# Inject the correct cache path and color codes
|
||||
replacements = {
|
||||
"PREVIEW_MODE": config.general.preview,
|
||||
"INFO_CACHE_DIR": str(INFO_CACHE_DIR),
|
||||
"PATH_SEP": path_sep,
|
||||
"C_TITLE": ansi.get_true_fg(config.fzf.header_color.split(","), bold=True),
|
||||
"C_KEY": ansi.get_true_fg(config.fzf.header_color.split(","), bold=True),
|
||||
"C_VALUE": ansi.get_true_fg(config.fzf.header_color.split(","), bold=True),
|
||||
"C_RULE": ansi.get_true_fg(
|
||||
config.fzf.preview_separator_color.split(","), bold=True
|
||||
),
|
||||
"RESET": ansi.RESET,
|
||||
}
|
||||
|
||||
for key, value in replacements.items():
|
||||
preview_script = preview_script.replace(f"{{{key}}}", value)
|
||||
|
||||
return preview_script
|
||||
|
||||
@@ -6,6 +6,7 @@ including image downloads and info text generation with proper lifecycle managem
|
||||
"""
|
||||
|
||||
import logging
|
||||
from pathlib import Path
|
||||
from typing import Dict, List, Optional
|
||||
|
||||
import httpx
|
||||
@@ -31,7 +32,9 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
FZF_SCRIPTS_DIR = SCRIPTS_DIR / "fzf"
|
||||
TEMPLATE_INFO_SCRIPT = (FZF_SCRIPTS_DIR / "info.py").read_text(encoding="utf-8")
|
||||
TEMPLATE_MEDIA_INFO_SCRIPT = (FZF_SCRIPTS_DIR / "media_info.py").read_text(
|
||||
encoding="utf-8"
|
||||
)
|
||||
TEMPLATE_EPISODE_INFO_SCRIPT = (FZF_SCRIPTS_DIR / "episode_info.py").read_text(
|
||||
encoding="utf-8"
|
||||
)
|
||||
@@ -142,7 +145,7 @@ class PreviewCacheWorker(ManagedBackgroundWorker):
|
||||
def _generate_info_text(self, media_item: MediaItem, config: AppConfig) -> str:
|
||||
"""Generate formatted info text for a media item."""
|
||||
# Import here to avoid circular imports
|
||||
info_script = TEMPLATE_INFO_SCRIPT
|
||||
info_script = TEMPLATE_MEDIA_INFO_SCRIPT
|
||||
description = formatter.clean_html(
|
||||
media_item.description or "No description available."
|
||||
)
|
||||
@@ -186,7 +189,12 @@ class PreviewCacheWorker(ManagedBackgroundWorker):
|
||||
),
|
||||
"STUDIOS": formatter.shell_safe(
|
||||
formatter.format_list_with_commas(
|
||||
[t.name for t in media_item.studios if t.name]
|
||||
[t.name for t in media_item.studios if t.name and t.is_animation_studio]
|
||||
)
|
||||
),
|
||||
"PRODUCERS": formatter.shell_safe(
|
||||
formatter.format_list_with_commas(
|
||||
[t.name for t in media_item.studios if t.name and not t.is_animation_studio]
|
||||
)
|
||||
),
|
||||
"SYNONYMNS": formatter.shell_safe(
|
||||
@@ -236,7 +244,7 @@ class PreviewCacheWorker(ManagedBackgroundWorker):
|
||||
"""Generate a cache hash for the given text."""
|
||||
from hashlib import sha256
|
||||
|
||||
return f"search-results-{sha256(text.encode('utf-8')).hexdigest()}"
|
||||
return f"search-result-{sha256(text.encode('utf-8')).hexdigest()}"
|
||||
|
||||
def _on_task_completed(self, task: WorkerTask, future) -> None:
|
||||
"""Handle task completion with enhanced logging."""
|
||||
@@ -306,7 +314,7 @@ class EpisodeCacheWorker(ManagedBackgroundWorker):
|
||||
|
||||
for episode_str in episodes:
|
||||
hash_id = self._get_cache_hash(
|
||||
f"{media_item.title.english}_Episode_{episode_str}"
|
||||
f"{media_item.title.english.replace(formatter.DOUBLE_QUOTE, formatter.SINGLE_QUOTE)}-{episode_str}"
|
||||
)
|
||||
|
||||
# Find episode data
|
||||
@@ -357,7 +365,7 @@ class EpisodeCacheWorker(ManagedBackgroundWorker):
|
||||
replacements = {
|
||||
"TITLE": formatter.shell_safe(title),
|
||||
"NEXT_EPISODE": formatter.shell_safe(
|
||||
f"Episode {media_item.next_airing.episode} on {formatter.format_date(media_item.next_airing.airing_at, '%A, %d %B %Y at %X)')}"
|
||||
f"Episode {media_item.next_airing.episode} on {formatter.format_date(media_item.next_airing.airing_at, '%A, %d %B %Y at %X')}"
|
||||
if media_item.next_airing
|
||||
else "N/A"
|
||||
),
|
||||
@@ -390,7 +398,7 @@ class EpisodeCacheWorker(ManagedBackgroundWorker):
|
||||
def _save_info_text(self, info_text: str, hash_id: str) -> None:
|
||||
"""Save episode info text to cache."""
|
||||
try:
|
||||
info_path = self.info_cache_dir / hash_id
|
||||
info_path = self.info_cache_dir / (hash_id + ".py")
|
||||
with AtomicWriter(info_path) as f:
|
||||
f.write(info_text)
|
||||
logger.debug(f"Successfully cached episode info: {hash_id}")
|
||||
@@ -402,7 +410,7 @@ class EpisodeCacheWorker(ManagedBackgroundWorker):
|
||||
"""Generate a cache hash for the given text."""
|
||||
from hashlib import sha256
|
||||
|
||||
return sha256(text.encode("utf-8")).hexdigest()
|
||||
return "episode-" + sha256(text.encode("utf-8")).hexdigest()
|
||||
|
||||
def _on_task_completed(self, task: WorkerTask, future) -> None:
|
||||
"""Handle task completion with enhanced logging."""
|
||||
@@ -419,9 +427,12 @@ class ReviewCacheWorker(ManagedBackgroundWorker):
|
||||
Specialized background worker for caching fully-rendered media review previews.
|
||||
"""
|
||||
|
||||
def __init__(self, reviews_cache_dir, max_workers: int = 10):
|
||||
def __init__(
|
||||
self, images_cache_dir: Path, info_cache_dir: Path, max_workers: int = 10
|
||||
):
|
||||
super().__init__(max_workers=max_workers, name="ReviewCacheWorker")
|
||||
self.reviews_cache_dir = reviews_cache_dir
|
||||
self.images_cache_dir = images_cache_dir
|
||||
self.info_cache_dir = info_cache_dir
|
||||
|
||||
def cache_review_previews(
|
||||
self, choice_map: Dict[str, MediaReview], config: AppConfig
|
||||
@@ -469,7 +480,7 @@ class ReviewCacheWorker(ManagedBackgroundWorker):
|
||||
def _save_preview_content(self, content: str, hash_id: str) -> None:
|
||||
"""Saves the final preview content to the cache."""
|
||||
try:
|
||||
info_path = self.reviews_cache_dir / hash_id
|
||||
info_path = self.info_cache_dir / hash_id
|
||||
with AtomicWriter(info_path) as f:
|
||||
f.write(content)
|
||||
logger.debug(f"Successfully cached review preview: {hash_id}")
|
||||
@@ -480,7 +491,7 @@ class ReviewCacheWorker(ManagedBackgroundWorker):
|
||||
def _get_cache_hash(self, text: str) -> str:
|
||||
from hashlib import sha256
|
||||
|
||||
return sha256(text.encode("utf-8")).hexdigest()
|
||||
return "review-" + sha256(text.encode("utf-8")).hexdigest() + ".py"
|
||||
|
||||
def _on_task_completed(self, task: WorkerTask, future) -> None:
|
||||
super()._on_task_completed(task, future)
|
||||
@@ -615,7 +626,7 @@ class CharacterCacheWorker(ManagedBackgroundWorker):
|
||||
def _get_cache_hash(self, text: str) -> str:
|
||||
from hashlib import sha256
|
||||
|
||||
return sha256(text.encode("utf-8")).hexdigest()
|
||||
return "character-" + sha256(text.encode("utf-8")).hexdigest() + ".py"
|
||||
|
||||
def _on_task_completed(self, task: WorkerTask, future) -> None:
|
||||
super()._on_task_completed(task, future)
|
||||
@@ -739,7 +750,7 @@ class AiringScheduleCacheWorker(ManagedBackgroundWorker):
|
||||
def _get_cache_hash(self, text: str) -> str:
|
||||
from hashlib import sha256
|
||||
|
||||
return sha256(text.encode("utf-8")).hexdigest()
|
||||
return "airing-schedule-" + sha256(text.encode("utf-8")).hexdigest() + ".py"
|
||||
|
||||
def _on_task_completed(self, task: WorkerTask, future) -> None:
|
||||
super()._on_task_completed(task, future)
|
||||
@@ -755,7 +766,7 @@ class PreviewWorkerManager:
|
||||
caching workers with automatic lifecycle management.
|
||||
"""
|
||||
|
||||
def __init__(self, images_cache_dir, info_cache_dir, reviews_cache_dir):
|
||||
def __init__(self, images_cache_dir, info_cache_dir):
|
||||
"""
|
||||
Initialize the preview worker manager.
|
||||
|
||||
@@ -766,7 +777,6 @@ class PreviewWorkerManager:
|
||||
"""
|
||||
self.images_cache_dir = images_cache_dir
|
||||
self.info_cache_dir = info_cache_dir
|
||||
self.reviews_cache_dir = reviews_cache_dir
|
||||
self._preview_worker: Optional[PreviewCacheWorker] = None
|
||||
self._episode_worker: Optional[EpisodeCacheWorker] = None
|
||||
self._review_worker: Optional[ReviewCacheWorker] = None
|
||||
@@ -810,7 +820,9 @@ class PreviewWorkerManager:
|
||||
# Clean up old worker
|
||||
thread_manager.shutdown_worker("review_cache_worker")
|
||||
|
||||
self._review_worker = ReviewCacheWorker(self.reviews_cache_dir)
|
||||
self._review_worker = ReviewCacheWorker(
|
||||
self.images_cache_dir, self.info_cache_dir
|
||||
)
|
||||
self._review_worker.start()
|
||||
thread_manager.register_worker("review_cache_worker", self._review_worker)
|
||||
|
||||
|
||||
@@ -2,11 +2,12 @@ from ..constants import APP_DATA_DIR, DEFAULTS_DIR, PLATFORM, USER_VIDEOS_DIR
|
||||
from ..utils import detect
|
||||
|
||||
# GeneralConfig
|
||||
GENERAL_WELCOME_SCREEN = True
|
||||
GENERAL_PYGMENT_STYLE = "github-dark"
|
||||
GENERAL_PREFERRED_SPINNER = "smiley"
|
||||
GENERAL_API_CLIENT = "anilist"
|
||||
GENERAL_PREFERRED_TRACKER = "local"
|
||||
GENERAL_DESKTOP_NOTIFICATION_DURATION = 5
|
||||
GENERAL_DESKTOP_NOTIFICATION_DURATION = 5 * 60
|
||||
GENERAL_PROVIDER = "allanime"
|
||||
|
||||
|
||||
@@ -32,6 +33,7 @@ def GENERAL_IMAGE_RENDERER():
|
||||
|
||||
GENERAL_MANGA_VIEWER = "feh"
|
||||
GENERAL_CHECK_FOR_UPDATES = True
|
||||
GENERAL_SHOW_NEW_RELEASE = True
|
||||
GENERAL_UPDATE_CHECK_INTERVAL = 12
|
||||
GENERAL_CACHE_REQUESTS = True
|
||||
GENERAL_MAX_CACHE_LIFETIME = "03:00:00"
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
# GeneralConfig
|
||||
|
||||
GENERAL_WELCOME_SCREEN = "Whether to enable the welcome screen, that runs once per day"
|
||||
GENERAL_PYGMENT_STYLE = "The pygment style to use"
|
||||
GENERAL_PREFERRED_SPINNER = "The spinner to use"
|
||||
GENERAL_API_CLIENT = "The media database API to use (e.g., 'anilist', 'jikan')."
|
||||
@@ -24,6 +25,9 @@ GENERAL_IMAGE_RENDERER = (
|
||||
)
|
||||
GENERAL_MANGA_VIEWER = "The external application to use for viewing manga pages."
|
||||
GENERAL_CHECK_FOR_UPDATES = "Automatically check for new versions of Viu on startup."
|
||||
GENERAL_SHOW_NEW_RELEASE = (
|
||||
"Whether to show release notes after every update when running the new version"
|
||||
)
|
||||
GENERAL_UPDATE_CHECK_INTERVAL = "The interval in hours to check for updates"
|
||||
GENERAL_CACHE_REQUESTS = (
|
||||
"Enable caching of network requests to speed up subsequent operations."
|
||||
@@ -128,6 +132,7 @@ APP_SERVICE = "Configuration for the background download service."
|
||||
APP_FZF = "Settings for the FZF selector interface."
|
||||
APP_ROFI = "Settings for the Rofi selector interface."
|
||||
APP_MPV = "Configuration for the MPV media player."
|
||||
APP_VLC = "Configuration for the VLC media player."
|
||||
APP_MEDIA_REGISTRY = "Configuration for the media registry."
|
||||
APP_SESSIONS = "Configuration for sessions."
|
||||
|
||||
|
||||
@@ -156,6 +156,9 @@ class GeneralConfig(BaseModel):
|
||||
default=defaults.GENERAL_API_CLIENT,
|
||||
description=desc.GENERAL_API_CLIENT,
|
||||
)
|
||||
welcome_screen: bool = Field(
|
||||
default=defaults.GENERAL_WELCOME_SCREEN, description=desc.GENERAL_WELCOME_SCREEN
|
||||
)
|
||||
provider: ProviderName = Field(
|
||||
default=ProviderName.ALLANIME,
|
||||
description=desc.GENERAL_PROVIDER,
|
||||
@@ -178,7 +181,9 @@ class GeneralConfig(BaseModel):
|
||||
description=desc.GENERAL_SCALE_PREVIEW,
|
||||
)
|
||||
|
||||
image_renderer: Literal["icat", "chafa", "imgcat"] = Field(
|
||||
image_renderer: Literal[
|
||||
"icat", "chafa", "imgcat", "system-sixels", "system-kitty", "system-default"
|
||||
] = Field(
|
||||
default_factory=defaults.GENERAL_IMAGE_RENDERER,
|
||||
description=desc.GENERAL_IMAGE_RENDERER,
|
||||
)
|
||||
@@ -190,6 +195,10 @@ class GeneralConfig(BaseModel):
|
||||
default=defaults.GENERAL_CHECK_FOR_UPDATES,
|
||||
description=desc.GENERAL_CHECK_FOR_UPDATES,
|
||||
)
|
||||
show_new_release: bool = Field(
|
||||
default=defaults.GENERAL_SHOW_NEW_RELEASE,
|
||||
description=desc.GENERAL_SHOW_NEW_RELEASE,
|
||||
)
|
||||
update_check_interval: float = Field(
|
||||
default=defaults.GENERAL_UPDATE_CHECK_INTERVAL,
|
||||
description=desc.GENERAL_UPDATE_CHECK_INTERVAL,
|
||||
@@ -525,6 +534,7 @@ class AppConfig(BaseModel):
|
||||
description=desc.APP_ROFI,
|
||||
)
|
||||
mpv: MpvConfig = Field(default_factory=MpvConfig, description=desc.APP_MPV)
|
||||
vlc: VlcConfig = Field(default_factory=VlcConfig, description=desc.APP_VLC)
|
||||
media_registry: MediaRegistryConfig = Field(
|
||||
default_factory=MediaRegistryConfig, description=desc.APP_MEDIA_REGISTRY
|
||||
)
|
||||
|
||||
@@ -9,7 +9,8 @@ CLI_NAME_LOWER = "viu"
|
||||
PROJECT_NAME = "viu-media"
|
||||
APP_NAME = os.environ.get(f"{CLI_NAME}_APP_NAME", CLI_NAME_LOWER)
|
||||
|
||||
USER_NAME = os.environ.get("USERNAME", "User")
|
||||
USER_NAME = os.environ.get("USERNAME", os.environ.get("USER", "User"))
|
||||
|
||||
|
||||
__version__ = metadata.version("viu_media")
|
||||
|
||||
@@ -25,7 +26,7 @@ ANILIST_AUTH = (
|
||||
)
|
||||
|
||||
try:
|
||||
APP_DIR = Path(str(resources.files(CLI_NAME.lower())))
|
||||
APP_DIR = Path(str(resources.files(PROJECT_NAME.lower())))
|
||||
|
||||
except ModuleNotFoundError:
|
||||
from pathlib import Path
|
||||
@@ -85,3 +86,4 @@ USER_VIDEOS_DIR.mkdir(parents=True, exist_ok=True)
|
||||
USER_CONFIG = APP_DATA_DIR / "config.toml"
|
||||
|
||||
LOG_FILE = LOG_FOLDER / "app.log"
|
||||
SUPPORT_PROJECT_URL = "https://buymeacoffee.com/benexl"
|
||||
|
||||
@@ -21,7 +21,7 @@ from rich.progress import (
|
||||
)
|
||||
from rich.prompt import Confirm
|
||||
from ..utils.file import sanitize_filename
|
||||
|
||||
from ..utils.detect import get_clean_env
|
||||
from ..exceptions import ViuError
|
||||
from ..patterns import TORRENT_REGEX
|
||||
from ..utils.networking import get_remote_filename
|
||||
@@ -372,6 +372,7 @@ class DefaultDownloader(BaseDownloader):
|
||||
capture_output=params.silent, # Only suppress ffmpeg output if silent
|
||||
text=True,
|
||||
check=True,
|
||||
env=get_clean_env(),
|
||||
)
|
||||
|
||||
final_output_path = video_path.parent / merged_filename
|
||||
|
||||
@@ -9,6 +9,7 @@ class DownloadParams:
|
||||
episode_title: str
|
||||
silent: bool
|
||||
progress_hooks: list[Callable] = field(default_factory=list)
|
||||
logger: object | None = None
|
||||
vid_format: str = "best"
|
||||
force_unknown_ext: bool = False
|
||||
verbose: bool = False
|
||||
|
||||
@@ -11,7 +11,7 @@ from rich.prompt import Confirm
|
||||
|
||||
import yt_dlp
|
||||
from yt_dlp.utils import sanitize_filename
|
||||
|
||||
from ..utils.detect import get_clean_env
|
||||
from ..exceptions import ViuError
|
||||
from ..patterns import TORRENT_REGEX
|
||||
from ..utils.networking import get_remote_filename
|
||||
@@ -30,6 +30,9 @@ class YtDLPDownloader(BaseDownloader):
|
||||
sub_paths = []
|
||||
merged_path = None
|
||||
|
||||
logger.debug(f"Starting download for URL: {params.url}")
|
||||
logger.debug(f"Using Headers: {params.headers}")
|
||||
|
||||
if TORRENT_REGEX.match(params.url):
|
||||
from .torrents import download_torrent_with_webtorrent_cli
|
||||
|
||||
@@ -91,6 +94,7 @@ class YtDLPDownloader(BaseDownloader):
|
||||
else tuple(),
|
||||
"progress_hooks": params.progress_hooks,
|
||||
"nocheckcertificate": params.no_check_certificate,
|
||||
"logger": params.logger,
|
||||
}
|
||||
opts = opts
|
||||
if params.force_ffmpeg or params.hls_use_mpegts or params.hls_use_h264:
|
||||
@@ -220,7 +224,7 @@ class YtDLPDownloader(BaseDownloader):
|
||||
|
||||
# Run the ffmpeg command
|
||||
try:
|
||||
subprocess.run(args)
|
||||
subprocess.run(args, env=get_clean_env())
|
||||
final_output_path = video_path.parent / merged_filename
|
||||
|
||||
if final_output_path.exists():
|
||||
|
||||
@@ -56,3 +56,48 @@ def is_running_kitty_terminal() -> bool:
|
||||
|
||||
def has_fzf() -> bool:
|
||||
return True if shutil.which("fzf") else False
|
||||
|
||||
|
||||
def is_frozen() -> bool:
|
||||
"""Check if running as a PyInstaller frozen executable."""
|
||||
return getattr(sys, "frozen", False)
|
||||
|
||||
|
||||
def get_python_executable() -> str:
|
||||
"""
|
||||
Get the Python executable path.
|
||||
|
||||
In frozen (PyInstaller) apps, sys.executable points to the .exe,
|
||||
so we need to find the system Python instead.
|
||||
|
||||
Returns:
|
||||
Path to a Python executable.
|
||||
"""
|
||||
if is_frozen():
|
||||
# We're in a frozen app - find system Python
|
||||
for python_name in ["python3", "python", "py"]:
|
||||
python_path = shutil.which(python_name)
|
||||
if python_path:
|
||||
return python_path
|
||||
# Fallback - this likely won't work but is the best we can do
|
||||
return "python"
|
||||
else:
|
||||
return sys.executable
|
||||
|
||||
|
||||
def get_clean_env() -> dict[str, str]:
|
||||
"""
|
||||
Returns a copy of the environment with LD_LIBRARY_PATH fixed for system subprocesses
|
||||
when running as a PyInstaller frozen application.
|
||||
This prevents system binaries (like mpv, ffmpeg) from loading incompatible
|
||||
libraries from the PyInstaller bundle.
|
||||
"""
|
||||
env = os.environ.copy()
|
||||
if is_frozen():
|
||||
# PyInstaller saves the original LD_LIBRARY_PATH in LD_LIBRARY_PATH_ORIG
|
||||
if "LD_LIBRARY_PATH_ORIG" in env:
|
||||
env["LD_LIBRARY_PATH"] = env["LD_LIBRARY_PATH_ORIG"]
|
||||
else:
|
||||
# If orig didn't exist, LD_LIBRARY_PATH shouldn't exist for the subprocess
|
||||
env.pop("LD_LIBRARY_PATH", None)
|
||||
return env
|
||||
@@ -5,6 +5,8 @@ from typing import Dict, List, Optional, Union
|
||||
from ...libs.media_api.types import AiringSchedule
|
||||
|
||||
COMMA_REGEX = re.compile(r"([0-9]{3})(?=\d)")
|
||||
SINGLE_QUOTE = "'"
|
||||
DOUBLE_QUOTE = '"'
|
||||
|
||||
|
||||
def format_media_duration(total_minutes: Optional[int]) -> str:
|
||||
@@ -182,13 +184,22 @@ def format_score(score: Optional[float]) -> str:
|
||||
|
||||
def shell_safe(text: Optional[str]) -> str:
|
||||
"""
|
||||
Escapes a string for safe inclusion in a shell script,
|
||||
specifically for use within double quotes. It escapes backticks,
|
||||
double quotes, and dollar signs.
|
||||
Escapes a string for safe inclusion in a Python script string literal.
|
||||
This is used when generating Python cache scripts with embedded text content.
|
||||
|
||||
For Python string literals, we need to:
|
||||
- Escape backslashes first (so existing backslashes don't interfere)
|
||||
- Escape double quotes (to not break double-quoted string literals)
|
||||
- Escape single quotes (to not break single-quoted string literals)
|
||||
"""
|
||||
if not text:
|
||||
return ""
|
||||
return text.replace("`", "\\`").replace('"', '\\"').replace("$", "\\$")
|
||||
# Escape backslashes first
|
||||
result = text.replace("\\", "\\\\")
|
||||
# Escape both quote types for safe inclusion in any string literal
|
||||
result = result.replace('"', r"\"")
|
||||
result = result.replace("'", r"\'")
|
||||
return result
|
||||
|
||||
|
||||
def extract_episode_number(title: str) -> Optional[float]:
|
||||
|
||||
@@ -323,7 +323,14 @@ def to_generic_user_list_result(data: AnilistMediaLists) -> Optional[MediaSearch
|
||||
def to_generic_user_profile(data: AnilistViewerData) -> Optional[UserProfile]:
|
||||
"""Maps a raw AniList viewer response to a generic UserProfile."""
|
||||
|
||||
viewer_data: Optional[AnilistCurrentlyLoggedInUser] = data["data"]["Viewer"]
|
||||
data_node = data.get("data")
|
||||
if not data_node:
|
||||
return None
|
||||
|
||||
viewer_data: Optional[AnilistCurrentlyLoggedInUser] = data_node.get("Viewer")
|
||||
|
||||
if not viewer_data:
|
||||
return None
|
||||
|
||||
return UserProfile(
|
||||
id=viewer_data["id"],
|
||||
|
||||
@@ -407,3 +407,5 @@ class MediaYear(Enum):
|
||||
_2023 = "2023"
|
||||
_2024 = "2024"
|
||||
_2025 = "2025"
|
||||
_2026 = "2026"
|
||||
|
||||
@@ -52,7 +52,7 @@ class MpvPlayer(BasePlayer):
|
||||
if TORRENT_REGEX.match(params.url) and detect.is_running_in_termux():
|
||||
raise ViuError("Unable to play torrents on termux")
|
||||
elif params.syncplay and detect.is_running_in_termux():
|
||||
raise ViuError("Unable to play torrents on termux")
|
||||
raise ViuError("Unable to play with syncplay on termux")
|
||||
elif detect.is_running_in_termux():
|
||||
return self._play_on_mobile(params)
|
||||
else:
|
||||
@@ -97,7 +97,7 @@ class MpvPlayer(BasePlayer):
|
||||
"is.xyz.mpv/.MPVActivity",
|
||||
]
|
||||
|
||||
subprocess.run(args)
|
||||
subprocess.run(args,env=detect.get_clean_env())
|
||||
|
||||
return PlayerResult(params.episode)
|
||||
|
||||
@@ -146,6 +146,7 @@ class MpvPlayer(BasePlayer):
|
||||
text=True,
|
||||
encoding="utf-8",
|
||||
check=False,
|
||||
env=detect.get_clean_env(),
|
||||
)
|
||||
if proc.stdout:
|
||||
for line in reversed(proc.stdout.split("\n")):
|
||||
@@ -185,7 +186,7 @@ class MpvPlayer(BasePlayer):
|
||||
|
||||
logger.info(f"Starting MPV with IPC socket: {socket_path}")
|
||||
|
||||
process = subprocess.Popen(pre_args + mpv_args)
|
||||
process = subprocess.Popen(pre_args + mpv_args,env=detect.get_clean_env())
|
||||
|
||||
return process
|
||||
|
||||
@@ -210,7 +211,7 @@ class MpvPlayer(BasePlayer):
|
||||
args.append("--player-args")
|
||||
args.extend(mpv_args)
|
||||
|
||||
subprocess.run(args)
|
||||
subprocess.run(args,env=detect.get_clean_env())
|
||||
return PlayerResult(params.episode)
|
||||
|
||||
def _stream_on_desktop_with_syncplay(self, params: PlayerParams) -> PlayerResult:
|
||||
@@ -232,7 +233,7 @@ class MpvPlayer(BasePlayer):
|
||||
if mpv_args := self._create_mpv_cli_options(params):
|
||||
args.append("--")
|
||||
args.extend(mpv_args)
|
||||
subprocess.run(args)
|
||||
subprocess.run(args,env=detect.get_clean_env())
|
||||
|
||||
return PlayerResult(params.episode)
|
||||
|
||||
|
||||
@@ -41,6 +41,10 @@ class PlayerFactory:
|
||||
from .mpv.player import MpvPlayer
|
||||
|
||||
return MpvPlayer(config.mpv)
|
||||
elif player_name == "vlc":
|
||||
from .vlc.player import VlcPlayer
|
||||
|
||||
return VlcPlayer(config.vlc)
|
||||
raise NotImplementedError(
|
||||
f"Configuration logic for player '{player_name}' not implemented in factory."
|
||||
)
|
||||
|
||||
@@ -46,10 +46,11 @@ class VlcPlayer(BasePlayer):
|
||||
Returns:
|
||||
PlayerResult: Information about the playback session.
|
||||
"""
|
||||
if not self.executable:
|
||||
raise ViuError("VLC executable not found in PATH.")
|
||||
|
||||
if TORRENT_REGEX.match(params.url) and detect.is_running_in_termux():
|
||||
raise ViuError("Unable to play torrents on termux")
|
||||
elif params.syncplay and detect.is_running_in_termux():
|
||||
raise ViuError("Unable to play with syncplay on termux")
|
||||
elif detect.is_running_in_termux():
|
||||
return self._play_on_mobile(params)
|
||||
else:
|
||||
return self._play_on_desktop(params)
|
||||
@@ -102,7 +103,7 @@ class VlcPlayer(BasePlayer):
|
||||
params.title,
|
||||
]
|
||||
|
||||
subprocess.run(args)
|
||||
subprocess.run(args,env=detect.get_clean_env())
|
||||
|
||||
return PlayerResult(episode=params.episode)
|
||||
|
||||
@@ -116,6 +117,9 @@ class VlcPlayer(BasePlayer):
|
||||
Returns:
|
||||
PlayerResult: Information about the playback session.
|
||||
"""
|
||||
if not self.executable:
|
||||
raise ViuError("VLC executable not found in PATH.")
|
||||
|
||||
if TORRENT_REGEX.search(params.url):
|
||||
return self._stream_on_desktop_with_webtorrent_cli(params)
|
||||
|
||||
@@ -130,7 +134,7 @@ class VlcPlayer(BasePlayer):
|
||||
if self.config.args:
|
||||
args.extend(self.config.args.split(","))
|
||||
|
||||
subprocess.run(args, encoding="utf-8")
|
||||
subprocess.run(args, encoding="utf-8",env=detect.get_clean_env())
|
||||
return PlayerResult(episode=params.episode)
|
||||
|
||||
def _stream_on_desktop_with_webtorrent_cli(
|
||||
@@ -155,7 +159,7 @@ class VlcPlayer(BasePlayer):
|
||||
args.append("--player-args")
|
||||
args.extend(self.config.args.split(","))
|
||||
|
||||
subprocess.run(args)
|
||||
subprocess.run(args,env=detect.get_clean_env())
|
||||
return PlayerResult(episode=params.episode)
|
||||
|
||||
|
||||
|
||||
@@ -3,6 +3,8 @@ import re
|
||||
ANIMEPAHE = "animepahe.si"
|
||||
ANIMEPAHE_BASE = f"https://{ANIMEPAHE}"
|
||||
ANIMEPAHE_ENDPOINT = f"{ANIMEPAHE_BASE}/api"
|
||||
CDN_PROVIDER = "kwik.cx"
|
||||
CDN_PROVIDER_BASE = f"https://{CDN_PROVIDER}"
|
||||
|
||||
SERVERS_AVAILABLE = ["kwik"]
|
||||
REQUEST_HEADERS = {
|
||||
@@ -25,7 +27,7 @@ SERVER_HEADERS = {
|
||||
"Accept-Encoding": "Utf-8",
|
||||
"DNT": "1",
|
||||
"Connection": "keep-alive",
|
||||
"Referer": "https://animepahe.si/",
|
||||
"Referer": ANIMEPAHE_BASE + "/",
|
||||
"Upgrade-Insecure-Requests": "1",
|
||||
"Sec-Fetch-Dest": "iframe",
|
||||
"Sec-Fetch-Mode": "navigate",
|
||||
@@ -33,5 +35,22 @@ SERVER_HEADERS = {
|
||||
"Priority": "u=4",
|
||||
"TE": "trailers",
|
||||
}
|
||||
|
||||
STREAM_HEADERS = {
|
||||
# "Host": "vault-16.owocdn.top", # This will have to be the actual host of the stream (behind Kwik)
|
||||
"Accept": "*/*",
|
||||
"Accept-Language": "en-US,en;q=0.5",
|
||||
"Accept-Encoding": "gzip, deflate, br, zstd",
|
||||
"Origin": CDN_PROVIDER_BASE,
|
||||
"Sec-GPC": "1",
|
||||
"Connection": "keep-alive",
|
||||
"Referer": CDN_PROVIDER_BASE + "/",
|
||||
"Sec-Fetch-Dest": "empty",
|
||||
"Sec-Fetch-Mode": "cors",
|
||||
"Sec-Fetch-Site": "cross-site",
|
||||
"TE": "trailers",
|
||||
}
|
||||
|
||||
|
||||
JUICY_STREAM_REGEX = re.compile(r"source='(.*)';")
|
||||
KWIK_RE = re.compile(r"Player\|(.+?)'")
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
from typing import Any
|
||||
|
||||
from ..types import (
|
||||
Anime,
|
||||
AnimeEpisodeInfo,
|
||||
@@ -87,13 +85,19 @@ def map_to_anime_result(
|
||||
|
||||
|
||||
def map_to_server(
|
||||
episode: AnimeEpisodeInfo, translation_type: Any, quality: Any, stream_link: Any
|
||||
episode: AnimeEpisodeInfo,
|
||||
translation_type: str,
|
||||
stream_links: list[tuple[str, str]],
|
||||
headers: dict[str, str],
|
||||
) -> Server:
|
||||
links = [
|
||||
EpisodeStream(
|
||||
link=stream_link,
|
||||
quality=quality,
|
||||
link=link[1],
|
||||
quality=link[0] if link[0] in ["360", "480", "720", "1080"] else "1080", # type:ignore
|
||||
translation_type=translation_type_map[translation_type],
|
||||
)
|
||||
for link in stream_links
|
||||
]
|
||||
return Server(name="kwik", links=links, episode_title=episode.title)
|
||||
return Server(
|
||||
name="kwik", links=links, episode_title=episode.title, headers=headers
|
||||
)
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import logging
|
||||
from functools import lru_cache
|
||||
from typing import Iterator, Optional
|
||||
from urllib.parse import urlparse
|
||||
|
||||
from ..base import BaseAnimeProvider
|
||||
from ..params import AnimeParams, EpisodeStreamsParams, SearchParams
|
||||
@@ -9,9 +10,11 @@ from ..utils.debug import debug_provider
|
||||
from .constants import (
|
||||
ANIMEPAHE_BASE,
|
||||
ANIMEPAHE_ENDPOINT,
|
||||
CDN_PROVIDER,
|
||||
JUICY_STREAM_REGEX,
|
||||
REQUEST_HEADERS,
|
||||
SERVER_HEADERS,
|
||||
STREAM_HEADERS,
|
||||
)
|
||||
from .extractor import process_animepahe_embed_page
|
||||
from .mappers import map_to_anime_result, map_to_search_results, map_to_server
|
||||
@@ -131,15 +134,18 @@ class AnimePahe(BaseAnimeProvider):
|
||||
res_dicts = [extract_attributes(item) for item in resolutionMenuItems]
|
||||
quality = None
|
||||
translation_type = None
|
||||
stream_link = None
|
||||
stream_links = []
|
||||
stream_host = None
|
||||
|
||||
# TODO: better document the scraping process
|
||||
for res_dict in res_dicts:
|
||||
# the actual attributes are data attributes in the original html 'prefixed with data-'
|
||||
embed_url = res_dict["src"]
|
||||
logger.debug(f"Found embed url: {embed_url}")
|
||||
data_audio = "dub" if res_dict["audio"] == "eng" else "sub"
|
||||
|
||||
if data_audio != params.translation_type:
|
||||
logger.debug(f"Found {data_audio} but wanted {params.translation_type}")
|
||||
continue
|
||||
|
||||
if not embed_url:
|
||||
@@ -155,22 +161,34 @@ class AnimePahe(BaseAnimeProvider):
|
||||
)
|
||||
embed_response.raise_for_status()
|
||||
embed_page = embed_response.text
|
||||
logger.debug("Processing embed page for JS decoding")
|
||||
|
||||
decoded_js = process_animepahe_embed_page(embed_page)
|
||||
if not decoded_js:
|
||||
logger.error("failed to decode embed page")
|
||||
continue
|
||||
logger.debug(f"Decoded JS: {decoded_js[:100]}...")
|
||||
juicy_stream = JUICY_STREAM_REGEX.search(decoded_js)
|
||||
if not juicy_stream:
|
||||
logger.error("failed to find juicy stream")
|
||||
continue
|
||||
logger.debug(f"Found juicy stream: {juicy_stream.group(1)}")
|
||||
juicy_stream = juicy_stream.group(1)
|
||||
stream_host = urlparse(juicy_stream).hostname
|
||||
quality = res_dict["resolution"]
|
||||
logger.debug(f"Found quality: {quality}")
|
||||
translation_type = data_audio
|
||||
stream_link = juicy_stream
|
||||
stream_links.append((quality, juicy_stream))
|
||||
|
||||
if translation_type and quality and stream_link:
|
||||
yield map_to_server(episode, translation_type, quality, stream_link)
|
||||
if translation_type and stream_links:
|
||||
headers = {
|
||||
"User-Agent": self.client.headers["User-Agent"],
|
||||
"Host": stream_host or CDN_PROVIDER,
|
||||
**STREAM_HEADERS,
|
||||
}
|
||||
yield map_to_server(
|
||||
episode, translation_type, stream_links, headers=headers
|
||||
)
|
||||
|
||||
@lru_cache()
|
||||
def _get_episode_info(
|
||||
|
||||
@@ -11,4 +11,7 @@ REPLACEMENT_WORDS = {"Season ": "", "Cour": "Part"}
|
||||
# Server Specific
|
||||
AVAILABLE_VIDEO_QUALITY = ["1080", "720", "480"]
|
||||
VIDEO_INFO_REGEX = re.compile(r"window.video\s*=\s*(\{[^\}]*\})")
|
||||
VIDEO_INFO_CLEAN_REGEX = re.compile(r'(?<!["\'])(\b\w+\b)(?=\s*:)')
|
||||
DOWNLOAD_FILENAME_REGEX = re.compile(r"[?&]filename=([^&]+)")
|
||||
QUALITY_REGEX = re.compile(r"/(\d{3,4}p)")
|
||||
DOWNLOAD_URL_REGEX = re.compile(r"window.downloadUrl\s*=\s*'([^']*)'")
|
||||
|
||||
50
viu_media/libs/provider/anime/animeunity/extractor.py
Normal file
50
viu_media/libs/provider/anime/animeunity/extractor.py
Normal file
@@ -0,0 +1,50 @@
|
||||
import logging
|
||||
|
||||
from .constants import (
|
||||
DOWNLOAD_FILENAME_REGEX,
|
||||
DOWNLOAD_URL_REGEX,
|
||||
QUALITY_REGEX,
|
||||
VIDEO_INFO_CLEAN_REGEX,
|
||||
VIDEO_INFO_REGEX,
|
||||
)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def extract_server_info(html_content: str, episode_title: str | None) -> dict | None:
|
||||
"""
|
||||
Extracts server information from the VixCloud/AnimeUnity embed page.
|
||||
Handles extraction from both window.video object and download URL.
|
||||
"""
|
||||
video_info = VIDEO_INFO_REGEX.search(html_content)
|
||||
download_url_match = DOWNLOAD_URL_REGEX.search(html_content)
|
||||
|
||||
if not (download_url_match and video_info):
|
||||
return None
|
||||
|
||||
info_str = VIDEO_INFO_CLEAN_REGEX.sub(r'"\1"', video_info.group(1))
|
||||
|
||||
# Use eval context for JS constants
|
||||
ctx = {"null": None, "true": True, "false": False}
|
||||
try:
|
||||
info = eval(info_str, ctx)
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to parse JS object: {e}")
|
||||
return None
|
||||
|
||||
download_url = download_url_match.group(1)
|
||||
info["link"] = download_url
|
||||
|
||||
# Extract metadata from download URL if missing in window.video
|
||||
if filename_match := DOWNLOAD_FILENAME_REGEX.search(download_url):
|
||||
info["name"] = filename_match.group(1)
|
||||
else:
|
||||
info["name"] = f"{episode_title or 'Unknown'}"
|
||||
|
||||
if quality_match := QUALITY_REGEX.search(download_url):
|
||||
# "720p" -> 720
|
||||
info["quality"] = int(quality_match.group(1)[:-1])
|
||||
else:
|
||||
info["quality"] = 0 # Fallback
|
||||
|
||||
return info
|
||||
@@ -99,7 +99,11 @@ def map_to_server(
|
||||
translation_type=MediaTranslationType(translation_type),
|
||||
mp4=True,
|
||||
)
|
||||
for quality in AVAILABLE_VIDEO_QUALITY
|
||||
for quality in sorted(
|
||||
list(set(AVAILABLE_VIDEO_QUALITY + [str(info["quality"])])),
|
||||
key=lambda x: int(x),
|
||||
reverse=True,
|
||||
)
|
||||
if int(quality) <= info["quality"]
|
||||
],
|
||||
episode_title=episode.title,
|
||||
|
||||
@@ -8,12 +8,11 @@ from ..types import Anime, AnimeEpisodeInfo, SearchResult, SearchResults
|
||||
from ..utils.debug import debug_provider
|
||||
from .constants import (
|
||||
ANIMEUNITY_BASE,
|
||||
DOWNLOAD_URL_REGEX,
|
||||
MAX_TIMEOUT,
|
||||
REPLACEMENT_WORDS,
|
||||
TOKEN_REGEX,
|
||||
VIDEO_INFO_REGEX,
|
||||
)
|
||||
from .extractor import extract_server_info
|
||||
from .mappers import (
|
||||
map_to_anime_result,
|
||||
map_to_search_result,
|
||||
@@ -158,14 +157,10 @@ class AnimeUnity(BaseAnimeProvider):
|
||||
video_response = self.client.get(url=response.text.strip(), timeout=MAX_TIMEOUT)
|
||||
video_response.raise_for_status()
|
||||
|
||||
video_info = VIDEO_INFO_REGEX.search(video_response.text)
|
||||
download_url_match = DOWNLOAD_URL_REGEX.search(video_response.text)
|
||||
if not (download_url_match and video_info):
|
||||
if not (info := extract_server_info(video_response.text, episode.title)):
|
||||
logger.error(f"Failed to extract video info for episode {episode.id}")
|
||||
return None
|
||||
|
||||
info = eval(video_info.group(1).replace("null", "None"))
|
||||
info["link"] = download_url_match.group(1)
|
||||
yield map_to_server(episode, info, params.translation_type)
|
||||
|
||||
|
||||
|
||||
@@ -69,6 +69,9 @@ def test_anime_provider(AnimeProvider: Type[BaseAnimeProvider]):
|
||||
for i, stream in enumerate(episode_streams):
|
||||
print(f"{i + 1}: {stream.name}")
|
||||
stream = episode_streams[int(input("Select your preferred server: ")) - 1]
|
||||
for i, link in enumerate(stream.links):
|
||||
print(f"{i + 1}: {link.quality}")
|
||||
link = stream.links[int(input("Select your preferred quality: ")) - 1]
|
||||
if executable := shutil.which("mpv"):
|
||||
cmd = executable
|
||||
elif executable := shutil.which("xdg-open"):
|
||||
@@ -84,4 +87,4 @@ def test_anime_provider(AnimeProvider: Type[BaseAnimeProvider]):
|
||||
"Episode: ",
|
||||
stream.episode_title if stream.episode_title else episode_number,
|
||||
)
|
||||
subprocess.run([cmd, stream.links[0].link])
|
||||
subprocess.run([cmd, link.link])
|
||||
|
||||
@@ -88,6 +88,8 @@ class BaseSelector(ABC):
|
||||
*,
|
||||
preview: Optional[str] = None,
|
||||
header: Optional[str] = None,
|
||||
initial_query: Optional[str] = None,
|
||||
initial_results: Optional[List[str]] = None,
|
||||
) -> str | None:
|
||||
"""
|
||||
Provides dynamic search functionality that reloads results based on user input.
|
||||
@@ -97,6 +99,8 @@ class BaseSelector(ABC):
|
||||
search_command: The command to execute for searching/reloading results.
|
||||
preview: An optional command or string for a preview window.
|
||||
header: An optional header to display above the choices.
|
||||
initial_query: An optional initial query to pre-populate the search.
|
||||
initial_results: Optional list of results to display initially (avoids network request).
|
||||
|
||||
Returns:
|
||||
The string of the chosen item.
|
||||
|
||||
@@ -5,6 +5,8 @@ import subprocess
|
||||
|
||||
from rich.prompt import Prompt
|
||||
|
||||
from viu_media.core.utils import detect
|
||||
|
||||
from ....core.config import FzfConfig
|
||||
from ....core.exceptions import ViuError
|
||||
from ..base import BaseSelector
|
||||
@@ -49,6 +51,7 @@ class FzfSelector(BaseSelector):
|
||||
stdout=subprocess.PIPE,
|
||||
text=True,
|
||||
encoding="utf-8",
|
||||
env=detect.get_clean_env(),
|
||||
)
|
||||
if result.returncode != 0:
|
||||
return None
|
||||
@@ -76,6 +79,7 @@ class FzfSelector(BaseSelector):
|
||||
stdout=subprocess.PIPE,
|
||||
text=True,
|
||||
encoding="utf-8",
|
||||
env=detect.get_clean_env(),
|
||||
)
|
||||
if result.returncode != 0:
|
||||
return []
|
||||
@@ -117,29 +121,55 @@ class FzfSelector(BaseSelector):
|
||||
lines = result.stdout.strip().splitlines()
|
||||
return lines[-1] if lines else (default or "")
|
||||
|
||||
def search(self, prompt, search_command, *, preview=None, header=None):
|
||||
def search(
|
||||
self,
|
||||
prompt,
|
||||
search_command,
|
||||
*,
|
||||
preview=None,
|
||||
header=None,
|
||||
initial_query=None,
|
||||
initial_results=None,
|
||||
):
|
||||
"""Enhanced search using fzf's --reload flag for dynamic search."""
|
||||
# Build the header with optional custom header line
|
||||
display_header = self.header
|
||||
if header:
|
||||
display_header = f"{self.header}\n{header}"
|
||||
|
||||
commands = [
|
||||
self.executable,
|
||||
"--prompt",
|
||||
f"{prompt.title()}: ",
|
||||
"--header",
|
||||
self.header,
|
||||
display_header,
|
||||
"--header-first",
|
||||
"--disabled", # Disable local filtering - rely on external search command
|
||||
"--bind",
|
||||
f"change:reload({search_command})",
|
||||
"--ansi",
|
||||
]
|
||||
|
||||
# If there's an initial query, set it
|
||||
if initial_query:
|
||||
commands.extend(["--query", initial_query])
|
||||
# Only trigger reload on start if we don't have cached results
|
||||
if not initial_results:
|
||||
commands.extend(["--bind", f"start:reload({search_command})"])
|
||||
|
||||
if preview:
|
||||
commands.extend(["--preview", preview])
|
||||
|
||||
# Use cached results as initial input if provided (avoids network request)
|
||||
fzf_input = "\n".join(initial_results) if initial_results else ""
|
||||
|
||||
result = subprocess.run(
|
||||
commands,
|
||||
input="",
|
||||
input=fzf_input,
|
||||
stdout=subprocess.PIPE,
|
||||
text=True,
|
||||
encoding="utf-8",
|
||||
env=detect.get_clean_env(),
|
||||
)
|
||||
if result.returncode != 0:
|
||||
return None
|
||||
|
||||
@@ -43,6 +43,7 @@ class RofiSelector(BaseSelector):
|
||||
input=rofi_input,
|
||||
stdout=subprocess.PIPE,
|
||||
text=True,
|
||||
env=detect.get_clean_env()
|
||||
)
|
||||
|
||||
if result.returncode == 0:
|
||||
@@ -106,6 +107,7 @@ class RofiSelector(BaseSelector):
|
||||
input=rofi_input,
|
||||
stdout=subprocess.PIPE,
|
||||
text=True,
|
||||
env=detect.get_clean_env()
|
||||
)
|
||||
|
||||
if result.returncode == 0:
|
||||
|
||||
Reference in New Issue
Block a user