Compare commits

...

28 Commits

Author SHA1 Message Date
Benexl
1ce2d2740d feat: implement get_clean_env function to manage environment variables for subprocesses 2025-12-31 21:43:43 +03:00
Benexl
ce6294a17b fix: exclude OpenSSL libraries on Linux to avoid version conflicts 2025-12-31 21:14:08 +03:00
Benexl
b550956a3e fix: update Ubuntu version in release binaries workflow to 22.04 2025-12-31 21:03:29 +03:00
Benexl
e382e4c046 chore: bump version to 3.3.7 in pyproject.toml and uv.lock 2025-12-31 20:51:00 +03:00
Benedict Xavier
efa1340e41 Merge pull request #177 from viu-media/dynamic-search-filters
Implement dynamic search enhancements (eg filters) and media info differentiation
2025-12-31 18:57:04 +03:00
Benedict Xavier
ac7e90acdf Update viu_media/assets/scripts/fzf/dynamic_preview.py
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-12-31 18:54:02 +03:00
Benedict Xavier
8c5b066019 Update viu_media/assets/scripts/fzf/dynamic_preview.py
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-12-31 18:52:57 +03:00
Benedict Xavier
a826f391c1 Update viu_media/core/utils/formatter.py
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-12-31 18:51:17 +03:00
benexl
6a31f4191f fix: remove f-string for filter adjustment message in search results 2025-12-31 18:47:40 +03:00
benexl
b8f77d80e9 feat: implement restore mode for dynamic search with last query and cached results 2025-12-31 18:43:59 +03:00
benexl
6192252d10 feat: enhance shell_safe function to support Python string literals and escape triple quotes 2025-12-31 18:31:40 +03:00
benexl
efed80f4dc feat: update score formatting in format_score_stars function to match media_info.py style 2025-12-31 18:25:05 +03:00
benexl
e49baed46f feat: differentiate between studios and producers in media info and dynamic preview 2025-12-31 18:11:10 +03:00
benexl
6e26ac500d feat: enhance consistency with normal media-info menu 2025-12-31 18:04:58 +03:00
benexl
5db33d2fa0 feat: implement dynamic search filter parser and enhance search script with filter syntax 2025-12-31 17:59:04 +03:00
benexl
0524af6e26 fix(ipc): add checks for Unix domain socket availability in MPVIPCClient and MpvIPCPlayer 2025-12-31 15:47:43 +03:00
benexl
a2fc9e442d fix: add libglib2.0-dev installation for Linux system dependencies in GitHub Actions workflow 2025-12-31 15:22:37 +03:00
benexl
f9ca8bbd79 fix: add installation of system dependencies for Linux in GitHub Actions workflow 2025-12-31 15:18:24 +03:00
benexl
dd9d9695e7 fix: remove unused imports for cleaner code 2025-12-31 15:14:14 +03:00
benexl
c9d948ae4b feat: add GitHub Actions workflow for building release binaries across platforms 2025-12-31 15:09:06 +03:00
benexl
b9766af11a fix(pyinstaller): update platform-specific settings and optimize EXE configuration 2025-12-31 15:02:29 +03:00
benexl
9d72a50916 fix: replace sys.executable with get_python_executable for better compatibility 2025-12-31 14:51:50 +03:00
benexl
acb14d025c fix: enhance menu loading to support PyInstaller compatibility with explicit module listing 2025-12-31 14:42:44 +03:00
benexl
ba9b170ba8 fix: update menu loading mechanism to support pkgutil for dynamic imports 2025-12-31 14:31:35 +03:00
benexl
ecc4de6ae6 ci: update paths 2025-12-31 14:21:50 +03:00
benexl
e065c8e8fc fix(normalizer): add anime title mapping for "Burichi -" 2025-12-31 13:10:47 +03:00
benexl
32df0503d0 fix(dependencies): update optional dependencies for platform-specific functionality 2025-12-31 13:05:56 +03:00
Benedict Xavier
11449378e9 docs: Revise Termux installation instructions in README
Updated installation instructions for Termux, including required packages and optional dependencies.
2025-12-30 14:51:36 +03:00
27 changed files with 1040 additions and 153 deletions

152
.github/workflows/release-binaries.yml vendored Normal file
View File

@@ -0,0 +1,152 @@
name: Build Release Binaries
on:
release:
types: [published]
workflow_dispatch:
inputs:
tag:
description: "Tag/version to build (leave empty for latest)"
required: false
type: string
permissions:
contents: write
jobs:
build:
strategy:
fail-fast: false
matrix:
include:
- os: ubuntu-22.04
target: linux
asset_name: viu-linux-x86_64
executable: viu
- os: windows-latest
target: windows
asset_name: viu-windows-x86_64.exe
executable: viu.exe
- os: macos-latest
target: macos
asset_name: viu-macos-x86_64
executable: viu
runs-on: ${{ matrix.os }}
steps:
- name: Checkout repository
uses: actions/checkout@v4
with:
ref: ${{ github.event.inputs.tag || github.ref }}
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: "3.11"
- name: Install uv
uses: astral-sh/setup-uv@v3
with:
enable-cache: true
- name: Install system dependencies (Linux)
if: runner.os == 'Linux'
run: |
sudo apt-get update
sudo apt-get install -y libdbus-1-dev libglib2.0-dev
- name: Install dependencies
run: uv sync --all-extras --all-groups
- name: Build executable with PyInstaller
run: uv run pyinstaller bundle/pyinstaller.spec --distpath dist --workpath build/pyinstaller --clean
- name: Rename executable
shell: bash
run: mv dist/${{ matrix.executable }} dist/${{ matrix.asset_name }}
- name: Upload artifact
uses: actions/upload-artifact@v4
with:
name: ${{ matrix.asset_name }}
path: dist/${{ matrix.asset_name }}
if-no-files-found: error
- name: Upload to Release
if: github.event_name == 'release'
uses: softprops/action-gh-release@v2
with:
files: dist/${{ matrix.asset_name }}
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
# Build for macOS ARM (Apple Silicon)
build-macos-arm:
runs-on: macos-14
steps:
- name: Checkout repository
uses: actions/checkout@v4
with:
ref: ${{ github.event.inputs.tag || github.ref }}
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: "3.11"
- name: Install uv
uses: astral-sh/setup-uv@v3
with:
enable-cache: true
- name: Install dependencies
run: uv sync --all-extras --all-groups
- name: Build executable with PyInstaller
run: uv run pyinstaller bundle/pyinstaller.spec --distpath dist --workpath build/pyinstaller --clean
- name: Rename executable
run: mv dist/viu dist/viu-macos-arm64
- name: Upload artifact
uses: actions/upload-artifact@v4
with:
name: viu-macos-arm64
path: dist/viu-macos-arm64
if-no-files-found: error
- name: Upload to Release
if: github.event_name == 'release'
uses: softprops/action-gh-release@v2
with:
files: dist/viu-macos-arm64
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
# Create checksums after all builds complete
checksums:
needs: [build, build-macos-arm]
runs-on: ubuntu-latest
if: github.event_name == 'release'
steps:
- name: Download all artifacts
uses: actions/download-artifact@v4
with:
path: artifacts
merge-multiple: true
- name: Generate checksums
run: |
cd artifacts
sha256sum * > SHA256SUMS.txt
cat SHA256SUMS.txt
- name: Upload checksums to Release
uses: softprops/action-gh-release@v2
with:
files: artifacts/SHA256SUMS.txt
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

View File

@@ -114,38 +114,78 @@ uv tool install "viu-media[notifications]" # For desktop notifications
``` ```
#### Termux #### Termux
You may have to have rust installed see this issue: https://github.com/pydantic/pydantic-core/issues/1012#issuecomment-2511269688. You may have to have rust installed see this issue: https://github.com/pydantic/pydantic-core/issues/1012#issuecomment-2511269688.
```bash ```bash
pkg install python # though uv will probably install python for you, but doesn't hurt to have it :)
pkg install rust # maybe required cause of pydantic
# Recommended (with pip due to more control) # Recommended (with pip due to more control)
pkg install python
pkg install rust # required cause of pydantic
# NOTE: order matters
# get pydantic from the termux user repository
pip install pydantic --extra-index-url https://termux-user-repository.github.io/pypi/
# the above will take a while if you want to see more output and feel like sth is happening lol
pip install pydantic --extra-index-url https://termux-user-repository.github.io/pypi/ -v
# now you can install viu
pip install viu-media pip install viu-media
# you may need to install pydantic manually # === optional deps ===
python -m pip install pydantic --extra-index-url https://termux-user-repository.github.io/pypi/ # may also be necessary incase the above fails # if you have reach here awesome lol :)
# add yt-dlp by # yt-dlp for downloading m3u8 and hls streams
pip install yt-dlp[default,curl-cffi] pip install yt-dlp[default,curl-cffi]
# prefer without standard and manually install the things you need lxml, yt-dlp and # you may also need ffmpeg for processing the videos
pip install viu-media[standard] pkg install ffmpeg
# you may need to manually install lxml and plyer manually eg # tip if you also want yt functionality
python -m pip install lxml --extra-index-url https://termux-user-repository.github.io/pypi/ # may also be necessary incase the above fails pip install yt-dlp-ejs
# Alternative With Uv may work, no promises # you require js runtime
pkg install uv # eg the recommended one
pkg install deno
uv tool install viu-media # for faster fuzzy search
pip install thefuzz
# and to add yt-dlp only you can do # if you want faster scraping, though barely noticeable lol
uv tool install viu-media --with yt-dlp[default,curl-cffi] pip install lxml --extra-index-url https://termux-user-repository.github.io/pypi/
# or though may fail, cause of lxml and plyer, in that case try to install manually # if compilation fails you need to have
uv tool install viu-media[standard] pkg install libxml2 libxslt
# == ui setup ==
pkg install fzf
# then enable fzf in the config
viu --selector fzf config --update
# if you want previews as well specify preview option
# though images arent that pretty lol, so you can stick to text over full
viu --preview text config --update
# if you set preview to full you need a terminal image renderer
pkg install chafa
# == player setup ==
# for this you need to strictly install from playstore
# search for mpv or vlc (recommended, since has nicer ui)
# the only limitation is currently its not possible to pass headers to the android players
# through android intents
# so use servers like sharepoint and wixmp
# though this is not an issue when it comes to downloading ;)
# if you have installed using 'pkg' uninstall it
# okey now you are all set, i promise the hussle is worth it lol :)
# posted a video of it working to motivate you
# note i recorded it from waydroid which is android for linux sought of like an emulator(bluestacks for example)
```
https://github.com/user-attachments/assets/0c628421-a439-4dea-91bb-7153e8f20ccf
```
#### Using pipx (for isolated environments) #### Using pipx (for isolated environments)
```bash ```bash

View File

@@ -1,28 +1,56 @@
# -*- mode: python ; coding: utf-8 -*- # -*- mode: python ; coding: utf-8 -*-
import sys
from PyInstaller.utils.hooks import collect_data_files, collect_submodules from PyInstaller.utils.hooks import collect_data_files, collect_submodules
block_cipher = None block_cipher = None
# Platform-specific settings
is_windows = sys.platform == 'win32'
is_macos = sys.platform == 'darwin'
# Collect all required data files # Collect all required data files
datas = [ datas = [
('viu/assets/*', 'viu/assets'), ('../viu_media/assets', 'viu_media/assets'),
] ]
# Collect all required hidden imports # Collect all required hidden imports
# Include viu_media and all its submodules to ensure menu modules are bundled
hiddenimports = [ hiddenimports = [
'click', 'click',
'rich', 'rich',
'requests',
'yt_dlp', 'yt_dlp',
'python_mpv', 'viu_media',
'fuzzywuzzy', 'viu_media.cli.interactive.menu',
'viu', 'viu_media.cli.interactive.menu.media',
] + collect_submodules('viu') # Explicit menu modules (PyInstaller doesn't always pick these up)
'viu_media.cli.interactive.menu.media.downloads',
'viu_media.cli.interactive.menu.media.download_episodes',
'viu_media.cli.interactive.menu.media.dynamic_search',
'viu_media.cli.interactive.menu.media.episodes',
'viu_media.cli.interactive.menu.media.main',
'viu_media.cli.interactive.menu.media.media_actions',
'viu_media.cli.interactive.menu.media.media_airing_schedule',
'viu_media.cli.interactive.menu.media.media_characters',
'viu_media.cli.interactive.menu.media.media_review',
'viu_media.cli.interactive.menu.media.player_controls',
'viu_media.cli.interactive.menu.media.play_downloads',
'viu_media.cli.interactive.menu.media.provider_search',
'viu_media.cli.interactive.menu.media.results',
'viu_media.cli.interactive.menu.media.servers',
] + collect_submodules('viu_media')
# Exclude OpenSSL libraries on Linux to avoid version conflicts
import sys
binaries = []
if sys.platform == 'linux':
# Remove any bundled libssl or libcrypto
binaries = [b for b in binaries if not any(lib in b[0] for lib in ['libssl', 'libcrypto'])]
a = Analysis( a = Analysis(
['./viu/viu.py'], # Changed entry point ['../viu_media/viu.py'],
pathex=[], pathex=[],
binaries=[], binaries=binaries,
datas=datas, datas=datas,
hiddenimports=hiddenimports, hiddenimports=hiddenimports,
hookspath=[], hookspath=[],
@@ -32,16 +60,18 @@ a = Analysis(
win_no_prefer_redirects=False, win_no_prefer_redirects=False,
win_private_assemblies=False, win_private_assemblies=False,
cipher=block_cipher, cipher=block_cipher,
strip=True, # Strip debug information noarchive=False,
optimize=2 # Optimize bytecode noarchive=False
) )
pyz = PYZ( pyz = PYZ(
a.pure, a.pure,
a.zipped_data, a.zipped_data,
optimize=2 # Optimize bytecode cipher=block_cipher cipher=block_cipher,
) )
# Icon path - only use .ico on Windows
icon_path = '../viu_media/assets/icons/logo.ico' if is_windows else None
exe = EXE( exe = EXE(
pyz, pyz,
a.scripts, a.scripts,
@@ -52,7 +82,7 @@ exe = EXE(
name='viu', name='viu',
debug=False, debug=False,
bootloader_ignore_signals=False, bootloader_ignore_signals=False,
strip=True, strip=not is_windows, # strip doesn't work well on Windows without proper tools
upx=True, upx=True,
upx_exclude=[], upx_exclude=[],
runtime_tmpdir=None, runtime_tmpdir=None,
@@ -61,5 +91,5 @@ exe = EXE(
target_arch=None, target_arch=None,
codesign_identity=None, codesign_identity=None,
entitlements_file=None, entitlements_file=None,
icon='viu/assets/logo.ico' icon=icon_path,
) )

View File

@@ -1,16 +1,16 @@
[project] [project]
name = "viu-media" name = "viu-media"
version = "3.3.6" version = "3.3.7"
description = "A browser anime site experience from the terminal" description = "A browser anime site experience from the terminal"
license = "UNLICENSE" license = "UNLICENSE"
readme = "README.md" readme = "README.md"
requires-python = ">=3.11" requires-python = ">=3.11"
dependencies = [ dependencies = [
"click>=8.1.7", "click>=8.1.7",
"httpx>=0.28.1", "httpx>=0.28.1",
"inquirerpy>=0.3.4", "inquirerpy>=0.3.4",
"pydantic>=2.11.7", "pydantic>=2.11.7",
"rich>=13.9.2", "rich>=13.9.2",
] ]
[project.scripts] [project.scripts]
@@ -18,32 +18,27 @@ viu = 'viu_media:Cli'
[project.optional-dependencies] [project.optional-dependencies]
standard = [ standard = [
"thefuzz>=0.22.1", "thefuzz>=0.22.1",
"yt-dlp>=2025.7.21", "yt-dlp>=2025.7.21",
"pycryptodomex>=3.23.0", "pycryptodomex>=3.23.0",
"pypiwin32; sys_platform == 'win32'", # For Windows-specific functionality "pypiwin32; sys_platform == 'win32'", # For Windows-specific functionality
"pyobjc; sys_platform == 'darwin'", # For macOS-specific functionality "pyobjc; sys_platform == 'darwin'", # For macOS-specific functionality
"dbus-python; sys_platform == 'linux'", # For Linux-specific functionality (e.g., notifications), "dbus-python; sys_platform == 'linux'", # For Linux-specific functionality (e.g., notifications),
"plyer>=2.1.0", "plyer>=2.1.0",
"lxml>=6.0.0" "lxml>=6.0.0",
] ]
notifications = [ notifications = [
"dbus-python>=1.4.0", "pypiwin32; sys_platform == 'win32'", # For Windows-specific functionality
"pyobjc; sys_platform == 'darwin'", # For macOS-specific functionality
"dbus-python>=1.4.0; sys_platform == 'linux'",
"plyer>=2.1.0", "plyer>=2.1.0",
] ]
mpv = [ mpv = ["mpv>=1.0.7"]
"mpv>=1.0.7",
]
torrent = ["libtorrent>=2.0.11"] torrent = ["libtorrent>=2.0.11"]
lxml = ["lxml>=6.0.0"] lxml = ["lxml>=6.0.0"]
discord = ["pypresence>=4.3.0"] discord = ["pypresence>=4.3.0"]
download = [ download = ["pycryptodomex>=3.23.0", "yt-dlp>=2025.7.21"]
"pycryptodomex>=3.23.0", torrents = ["libtorrent>=2.0.11"]
"yt-dlp>=2025.7.21",
]
torrents = [
"libtorrent>=2.0.11",
]
[build-system] [build-system]
requires = ["hatchling"] requires = ["hatchling"]
@@ -51,12 +46,12 @@ build-backend = "hatchling.build"
[dependency-groups] [dependency-groups]
dev = [ dev = [
"pre-commit>=4.0.1", "pre-commit>=4.0.1",
"pyinstaller>=6.11.1", "pyinstaller>=6.11.1",
"pyright>=1.1.384", "pyright>=1.1.384",
"pytest>=8.3.3", "pytest>=8.3.3",
"pytest-httpx>=0.35.0", "pytest-httpx>=0.35.0",
"ruff>=0.6.9", "ruff>=0.6.9",
] ]
[tool.pytest.ini_options] [tool.pytest.ini_options]

10
uv.lock generated
View File

@@ -3743,7 +3743,7 @@ wheels = [
[[package]] [[package]]
name = "viu-media" name = "viu-media"
version = "3.3.6" version = "3.3.7"
source = { editable = "." } source = { editable = "." }
dependencies = [ dependencies = [
{ name = "click" }, { name = "click" },
@@ -3768,8 +3768,10 @@ mpv = [
{ name = "mpv" }, { name = "mpv" },
] ]
notifications = [ notifications = [
{ name = "dbus-python" }, { name = "dbus-python", marker = "sys_platform == 'linux'" },
{ name = "plyer" }, { name = "plyer" },
{ name = "pyobjc", marker = "sys_platform == 'darwin'" },
{ name = "pypiwin32", marker = "sys_platform == 'win32'" },
] ]
standard = [ standard = [
{ name = "dbus-python", marker = "sys_platform == 'linux'" }, { name = "dbus-python", marker = "sys_platform == 'linux'" },
@@ -3801,8 +3803,8 @@ dev = [
[package.metadata] [package.metadata]
requires-dist = [ requires-dist = [
{ name = "click", specifier = ">=8.1.7" }, { name = "click", specifier = ">=8.1.7" },
{ name = "dbus-python", marker = "sys_platform == 'linux' and extra == 'notifications'", specifier = ">=1.4.0" },
{ name = "dbus-python", marker = "sys_platform == 'linux' and extra == 'standard'" }, { name = "dbus-python", marker = "sys_platform == 'linux' and extra == 'standard'" },
{ name = "dbus-python", marker = "extra == 'notifications'", specifier = ">=1.4.0" },
{ name = "httpx", specifier = ">=0.28.1" }, { name = "httpx", specifier = ">=0.28.1" },
{ name = "inquirerpy", specifier = ">=0.3.4" }, { name = "inquirerpy", specifier = ">=0.3.4" },
{ name = "libtorrent", marker = "extra == 'torrent'", specifier = ">=2.0.11" }, { name = "libtorrent", marker = "extra == 'torrent'", specifier = ">=2.0.11" },
@@ -3815,7 +3817,9 @@ requires-dist = [
{ name = "pycryptodomex", marker = "extra == 'download'", specifier = ">=3.23.0" }, { name = "pycryptodomex", marker = "extra == 'download'", specifier = ">=3.23.0" },
{ name = "pycryptodomex", marker = "extra == 'standard'", specifier = ">=3.23.0" }, { name = "pycryptodomex", marker = "extra == 'standard'", specifier = ">=3.23.0" },
{ name = "pydantic", specifier = ">=2.11.7" }, { name = "pydantic", specifier = ">=2.11.7" },
{ name = "pyobjc", marker = "sys_platform == 'darwin' and extra == 'notifications'" },
{ name = "pyobjc", marker = "sys_platform == 'darwin' and extra == 'standard'" }, { name = "pyobjc", marker = "sys_platform == 'darwin' and extra == 'standard'" },
{ name = "pypiwin32", marker = "sys_platform == 'win32' and extra == 'notifications'" },
{ name = "pypiwin32", marker = "sys_platform == 'win32' and extra == 'standard'" }, { name = "pypiwin32", marker = "sys_platform == 'win32' and extra == 'standard'" },
{ name = "pypresence", marker = "extra == 'discord'", specifier = ">=4.3.0" }, { name = "pypresence", marker = "extra == 'discord'", specifier = ">=4.3.0" },
{ name = "rich", specifier = ">=13.9.2" }, { name = "rich", specifier = ">=13.9.2" },

View File

@@ -5,7 +5,8 @@
"Dungeon ni Deai o Motomeru no wa Machigatte Iru Darouka": "Dungeon ni Deai wo Motomeru no wa Machigatteiru Darou ka", "Dungeon ni Deai o Motomeru no wa Machigatte Iru Darouka": "Dungeon ni Deai wo Motomeru no wa Machigatteiru Darou ka",
"Hazurewaku no \"Joutai Ijou Skill\" de Saikyou ni Natta Ore ga Subete wo Juurin suru made": "Hazure Waku no [Joutai Ijou Skill] de Saikyou ni Natta Ore ga Subete wo Juurin Suru made", "Hazurewaku no \"Joutai Ijou Skill\" de Saikyou ni Natta Ore ga Subete wo Juurin suru made": "Hazure Waku no [Joutai Ijou Skill] de Saikyou ni Natta Ore ga Subete wo Juurin Suru made",
"Re:Zero kara Hajimeru Isekai Seikatsu Season 3": "Re:Zero kara Hajimeru Isekai Seikatsu 3rd Season", "Re:Zero kara Hajimeru Isekai Seikatsu Season 3": "Re:Zero kara Hajimeru Isekai Seikatsu 3rd Season",
"Hanka×Hanka (2011)": "Hunter × Hunter (2011)" "Hanka×Hanka (2011)": "Hunter × Hunter (2011)",
"Burichi -": "bleach"
}, },
"hianime": { "hianime": {
"My Star": "Oshi no Ko" "My Star": "Oshi no Ko"

View File

@@ -0,0 +1,323 @@
#!/usr/bin/env python3
"""
Filter Parser for Dynamic Search
This module provides a parser for the special filter syntax used in dynamic search.
Filter syntax allows users to add filters inline with their search query.
SYNTAX:
@filter:value - Apply a filter with the given value
@filter:value1,value2 - Apply multiple values (for array filters)
@filter:!value - Exclude/negate a filter value
SUPPORTED FILTERS:
@genre:action,comedy - Filter by genres
@genre:!hentai - Exclude genre
@status:airing - Filter by status (airing, finished, upcoming, cancelled, hiatus)
@year:2024 - Filter by season year
@season:winter - Filter by season (winter, spring, summer, fall)
@format:tv,movie - Filter by format (tv, movie, ova, ona, special, music)
@sort:score - Sort by (score, popularity, trending, title, date)
@score:>80 - Minimum score
@score:<50 - Maximum score
@popularity:>10000 - Minimum popularity
@onlist - Only show anime on user's list
@onlist:false - Only show anime NOT on user's list
EXAMPLES:
"naruto @genre:action @status:finished"
"isekai @year:2024 @season:winter @sort:score"
"@genre:action,adventure @status:airing"
"romance @genre:!hentai @format:tv,movie"
"""
import re
from typing import Any, Dict, List, Optional, Tuple
# Mapping of user-friendly filter names to GraphQL variable names
FILTER_ALIASES = {
# Status aliases
"airing": "RELEASING",
"releasing": "RELEASING",
"finished": "FINISHED",
"completed": "FINISHED",
"upcoming": "NOT_YET_RELEASED",
"not_yet_released": "NOT_YET_RELEASED",
"unreleased": "NOT_YET_RELEASED",
"cancelled": "CANCELLED",
"canceled": "CANCELLED",
"hiatus": "HIATUS",
"paused": "HIATUS",
# Format aliases
"tv": "TV",
"tv_short": "TV_SHORT",
"tvshort": "TV_SHORT",
"movie": "MOVIE",
"film": "MOVIE",
"ova": "OVA",
"ona": "ONA",
"special": "SPECIAL",
"music": "MUSIC",
# Season aliases
"winter": "WINTER",
"spring": "SPRING",
"summer": "SUMMER",
"fall": "FALL",
"autumn": "FALL",
# Sort aliases
"score": "SCORE_DESC",
"score_desc": "SCORE_DESC",
"score_asc": "SCORE",
"popularity": "POPULARITY_DESC",
"popularity_desc": "POPULARITY_DESC",
"popularity_asc": "POPULARITY",
"trending": "TRENDING_DESC",
"trending_desc": "TRENDING_DESC",
"trending_asc": "TRENDING",
"title": "TITLE_ROMAJI",
"title_desc": "TITLE_ROMAJI_DESC",
"date": "START_DATE_DESC",
"date_desc": "START_DATE_DESC",
"date_asc": "START_DATE",
"newest": "START_DATE_DESC",
"oldest": "START_DATE",
"favourites": "FAVOURITES_DESC",
"favorites": "FAVOURITES_DESC",
"episodes": "EPISODES_DESC",
}
# Genre name normalization (lowercase -> proper case)
GENRE_NAMES = {
"action": "Action",
"adventure": "Adventure",
"comedy": "Comedy",
"drama": "Drama",
"ecchi": "Ecchi",
"fantasy": "Fantasy",
"horror": "Horror",
"mahou_shoujo": "Mahou Shoujo",
"mahou": "Mahou Shoujo",
"magical_girl": "Mahou Shoujo",
"mecha": "Mecha",
"music": "Music",
"mystery": "Mystery",
"psychological": "Psychological",
"romance": "Romance",
"sci-fi": "Sci-Fi",
"scifi": "Sci-Fi",
"sci_fi": "Sci-Fi",
"slice_of_life": "Slice of Life",
"sol": "Slice of Life",
"sports": "Sports",
"supernatural": "Supernatural",
"thriller": "Thriller",
"hentai": "Hentai",
}
# Filter pattern: @key:value or @key (boolean flags)
FILTER_PATTERN = re.compile(r"@(\w+)(?::([^\s]+))?", re.IGNORECASE)
# Comparison operators for numeric filters
COMPARISON_PATTERN = re.compile(r"^([<>]=?)?(\d+)$")
def normalize_value(value: str, value_type: str) -> str:
"""Normalize a filter value based on its type."""
value_lower = value.lower().strip()
if value_type == "genre":
return GENRE_NAMES.get(value_lower, value.title())
elif value_type in ("status", "format", "season", "sort"):
return FILTER_ALIASES.get(value_lower, value.upper())
return value
def parse_value_list(value_str: str) -> Tuple[List[str], List[str]]:
"""
Parse a comma-separated value string, separating includes from excludes.
Returns:
Tuple of (include_values, exclude_values)
"""
includes = []
excludes = []
for val in value_str.split(","):
val = val.strip()
if not val:
continue
if val.startswith("!"):
excludes.append(val[1:])
else:
includes.append(val)
return includes, excludes
def parse_comparison(value: str) -> Tuple[Optional[str], Optional[int]]:
"""
Parse a comparison value like ">80" or "<50".
Returns:
Tuple of (operator, number) or (None, None) if invalid
"""
match = COMPARISON_PATTERN.match(value)
if match:
operator = match.group(1) or ">" # Default to greater than
number = int(match.group(2))
return operator, number
return None, None
def parse_filters(query: str) -> Tuple[str, Dict[str, Any]]:
"""
Parse a search query and extract filter directives.
Args:
query: The full search query including filter syntax
Returns:
Tuple of (clean_query, filters_dict)
- clean_query: The query with filter syntax removed
- filters_dict: Dictionary of GraphQL variables to apply
"""
filters: Dict[str, Any] = {}
# Find all filter matches
matches = list(FILTER_PATTERN.finditer(query))
for match in matches:
filter_name = match.group(1).lower()
filter_value = match.group(2) # May be None for boolean flags
# Handle different filter types
if filter_name == "genre":
if filter_value:
includes, excludes = parse_value_list(filter_value)
if includes:
normalized = [normalize_value(v, "genre") for v in includes]
filters.setdefault("genre_in", []).extend(normalized)
if excludes:
normalized = [normalize_value(v, "genre") for v in excludes]
filters.setdefault("genre_not_in", []).extend(normalized)
elif filter_name == "status":
if filter_value:
includes, excludes = parse_value_list(filter_value)
if includes:
normalized = [normalize_value(v, "status") for v in includes]
filters.setdefault("status_in", []).extend(normalized)
if excludes:
normalized = [normalize_value(v, "status") for v in excludes]
filters.setdefault("status_not_in", []).extend(normalized)
elif filter_name == "format":
if filter_value:
includes, _ = parse_value_list(filter_value)
if includes:
normalized = [normalize_value(v, "format") for v in includes]
filters.setdefault("format_in", []).extend(normalized)
elif filter_name == "year":
if filter_value:
try:
filters["seasonYear"] = int(filter_value)
except ValueError:
pass # Invalid year, skip
elif filter_name == "season":
if filter_value:
filters["season"] = normalize_value(filter_value, "season")
elif filter_name == "sort":
if filter_value:
sort_val = normalize_value(filter_value, "sort")
filters["sort"] = [sort_val]
elif filter_name == "score":
if filter_value:
op, num = parse_comparison(filter_value)
if num is not None:
if op in (">", ">="):
filters["averageScore_greater"] = num
elif op in ("<", "<="):
filters["averageScore_lesser"] = num
elif filter_name == "popularity":
if filter_value:
op, num = parse_comparison(filter_value)
if num is not None:
if op in (">", ">="):
filters["popularity_greater"] = num
elif op in ("<", "<="):
filters["popularity_lesser"] = num
elif filter_name == "onlist":
if filter_value is None or filter_value.lower() in ("true", "yes", "1"):
filters["on_list"] = True
elif filter_value.lower() in ("false", "no", "0"):
filters["on_list"] = False
elif filter_name == "tag":
if filter_value:
includes, excludes = parse_value_list(filter_value)
if includes:
# Tags use title case typically
normalized = [v.replace("_", " ").title() for v in includes]
filters.setdefault("tag_in", []).extend(normalized)
if excludes:
normalized = [v.replace("_", " ").title() for v in excludes]
filters.setdefault("tag_not_in", []).extend(normalized)
# Remove filter syntax from query to get clean search text
clean_query = FILTER_PATTERN.sub("", query).strip()
# Clean up multiple spaces
clean_query = re.sub(r"\s+", " ", clean_query).strip()
return clean_query, filters
def get_help_text() -> str:
"""Return a help string describing the filter syntax."""
return """
╭─────────────────── Filter Syntax Help ───────────────────╮
│ │
│ @genre:action,comedy Filter by genres │
│ @genre:!hentai Exclude genre │
│ @status:airing Status: airing, finished, │
│ upcoming, cancelled, hiatus │
│ @year:2024 Filter by year │
│ @season:winter winter, spring, summer, fall │
│ @format:tv,movie tv, movie, ova, ona, special │
│ @sort:score score, popularity, trending, │
│ date, title, newest, oldest │
│ @score:>80 Minimum score │
│ @score:<50 Maximum score │
│ @popularity:>10000 Minimum popularity │
│ @onlist Only on your list │
│ @onlist:false Not on your list │
│ @tag:isekai,reincarnation Filter by tags │
│ │
│ Examples: │
│ naruto @genre:action @status:finished │
│ @genre:action,adventure @year:2024 @sort:score │
│ isekai @season:winter @year:2024 │
│ │
╰──────────────────────────────────────────────────────────╯
""".strip()
if __name__ == "__main__":
# Test the parser
import json
import sys
if len(sys.argv) > 1:
test_query = " ".join(sys.argv[1:])
clean, filters = parse_filters(test_query)
print(f"Original: {test_query}")
print(f"Clean query: {clean}")
print(f"Filters: {json.dumps(filters, indent=2)}")
else:
print(get_help_text())

View File

@@ -45,6 +45,15 @@ def format_number(num):
return f"{num:,}" return f"{num:,}"
def format_score_stars(score):
"""Format score as stars out of 6."""
if score is None:
return "N/A"
# Convert 0-100 score to 0-6 stars, capped at 6 for consistency
stars = min(round(score * 6 / 100), 6)
return "" * stars + f" ({score}/100)"
def format_date(date_obj): def format_date(date_obj):
"""Format date object to string.""" """Format date object to string."""
if not date_obj or date_obj == "null": if not date_obj or date_obj == "null":
@@ -342,31 +351,68 @@ def main():
# Extract data # Extract data
status = media.get("status", "Unknown") status = media.get("status", "Unknown")
format_type = media.get("format", "Unknown") format_type = media.get("format", "Unknown")
episodes = media.get("episodes", "?") episodes = media.get("episodes", "??")
duration = media.get("duration") duration = media.get("duration")
duration_str = f"{duration} min" if duration else "Unknown" duration_str = f"{duration} min/ep" if duration else "Unknown"
score = media.get("averageScore") score = media.get("averageScore")
score_str = f"{score}/100" if score else "N/A" score_str = format_score_stars(score)
favourites = format_number(media.get("favourites", 0)) favourites = format_number(media.get("favourites", 0))
popularity = format_number(media.get("popularity", 0)) popularity = format_number(media.get("popularity", 0))
genres = ", ".join(media.get("genres", [])[:5]) or "Unknown" genres = ", ".join(media.get("genres", [])) or "Unknown"
start_date = format_date(media.get("startDate")) start_date = format_date(media.get("startDate"))
end_date = format_date(media.get("endDate")) end_date = format_date(media.get("endDate"))
studios_list = media.get("studios", {}).get("nodes", []) studios_list = media.get("studios", {}).get("nodes", [])
studios = ", ".join([s.get("name", "") for s in studios_list[:3]]) or "Unknown" # Studios are those with isAnimationStudio=true
studios = ", ".join([s["name"] for s in studios_list if s.get("name") and s.get("isAnimationStudio")]) or "N/A"
# Producers are those with isAnimationStudio=false
producers = ", ".join([s["name"] for s in studios_list if s.get("name") and not s.get("isAnimationStudio")]) or "N/A"
synonyms_list = media.get("synonyms", []) synonyms_list = media.get("synonyms", [])
synonyms = ", ".join(synonyms_list[:3]) or "N/A" # Include romaji in synonyms if different from title
romaji = title_obj.get("romaji")
if romaji and romaji != title and romaji not in synonyms_list:
synonyms_list = [romaji] + synonyms_list
synonyms = ", ".join(synonyms_list) or "N/A"
# Tags
tags_list = media.get("tags", [])
tags = ", ".join([t.get("name", "") for t in tags_list if t.get("name")]) or "N/A"
# Next airing episode
next_airing = media.get("nextAiringEpisode")
if next_airing:
next_ep = next_airing.get("episode", "?")
airing_at = next_airing.get("airingAt")
if airing_at:
from datetime import datetime
try:
dt = datetime.fromtimestamp(airing_at)
next_episode_str = f"Episode {next_ep} on {dt.strftime('%A, %d %B %Y at %H:%M')}"
except (ValueError, OSError):
next_episode_str = f"Episode {next_ep}"
else:
next_episode_str = f"Episode {next_ep}"
else:
next_episode_str = "N/A"
# User list status
media_list_entry = media.get("mediaListEntry")
if media_list_entry:
user_status = media_list_entry.get("status", "NOT_ON_LIST")
user_progress = f"Episode {media_list_entry.get('progress', 0)}"
else:
user_status = "NOT_ON_LIST"
user_progress = "0"
description = media.get("description", "No description available.") description = media.get("description", "No description available.")
description = strip_markdown(description) description = strip_markdown(description)
# Print sections matching media_info.py structure # Print sections matching media_info.py structure exactly
rows = [ rows = [
("Score", score_str), ("Score", score_str),
("Favorites", favourites), ("Favorites", favourites),
@@ -376,16 +422,17 @@ def main():
print_rule(SEPARATOR_COLOR) print_rule(SEPARATOR_COLOR)
for key, value in rows: for key, value in rows:
print_table_row(key, value, HEADER_COLOR, 0, 0) print_table_row(key, value, HEADER_COLOR, 15, term_width - 20)
rows = [ rows = [
("Episodes", str(episodes)), ("Episodes", str(episodes)),
("Duration", duration_str), ("Duration", duration_str),
("Next Episode", next_episode_str),
] ]
print_rule(SEPARATOR_COLOR) print_rule(SEPARATOR_COLOR)
for key, value in rows: for key, value in rows:
print_table_row(key, value, HEADER_COLOR, 0, 0) print_table_row(key, value, HEADER_COLOR, 15, term_width - 20)
rows = [ rows = [
("Genres", genres), ("Genres", genres),
@@ -394,7 +441,16 @@ def main():
print_rule(SEPARATOR_COLOR) print_rule(SEPARATOR_COLOR)
for key, value in rows: for key, value in rows:
print_table_row(key, value, HEADER_COLOR, 0, 0) print_table_row(key, value, HEADER_COLOR, 15, term_width - 20)
rows = [
("List Status", user_status),
("Progress", user_progress),
]
print_rule(SEPARATOR_COLOR)
for key, value in rows:
print_table_row(key, value, HEADER_COLOR, 15, term_width - 20)
rows = [ rows = [
("Start Date", start_date), ("Start Date", start_date),
@@ -403,15 +459,16 @@ def main():
print_rule(SEPARATOR_COLOR) print_rule(SEPARATOR_COLOR)
for key, value in rows: for key, value in rows:
print_table_row(key, value, HEADER_COLOR, 0, 0) print_table_row(key, value, HEADER_COLOR, 15, term_width - 20)
rows = [ rows = [
("Studios", studios), ("Studios", studios),
("Producers", producers),
] ]
print_rule(SEPARATOR_COLOR) print_rule(SEPARATOR_COLOR)
for key, value in rows: for key, value in rows:
print_table_row(key, value, HEADER_COLOR, 0, 0) print_table_row(key, value, HEADER_COLOR, 15, term_width - 20)
rows = [ rows = [
("Synonyms", synonyms), ("Synonyms", synonyms),
@@ -419,7 +476,15 @@ def main():
print_rule(SEPARATOR_COLOR) print_rule(SEPARATOR_COLOR)
for key, value in rows: for key, value in rows:
print_table_row(key, value, HEADER_COLOR, 0, 0) print_table_row(key, value, HEADER_COLOR, 15, term_width - 20)
rows = [
("Tags", tags),
]
print_rule(SEPARATOR_COLOR)
for key, value in rows:
print_table_row(key, value, HEADER_COLOR, 15, term_width - 20)
print_rule(SEPARATOR_COLOR) print_rule(SEPARATOR_COLOR)
print(wrap_text(description, term_width)) print(wrap_text(description, term_width))

View File

@@ -67,6 +67,7 @@ for key, value in rows:
rows = [ rows = [
("Studios", "{STUDIOS}"), ("Studios", "{STUDIOS}"),
("Producers", "{PRODUCERS}"),
] ]
print_rule(SEPARATOR_COLOR) print_rule(SEPARATOR_COLOR)

View File

@@ -5,6 +5,18 @@
# This script is a template for dynamic search functionality in fzf. # This script is a template for dynamic search functionality in fzf.
# The placeholders in curly braces, like {GRAPHQL_ENDPOINT} are dynamically # The placeholders in curly braces, like {GRAPHQL_ENDPOINT} are dynamically
# filled by Python using .replace() during runtime. # filled by Python using .replace() during runtime.
#
# FILTER SYNTAX:
# @genre:action,comedy Filter by genres
# @genre:!hentai Exclude genre
# @status:airing Status: airing, finished, upcoming, cancelled, hiatus
# @year:2024 Filter by year
# @season:winter winter, spring, summer, fall
# @format:tv,movie tv, movie, ova, ona, special
# @sort:score score, popularity, trending, date, title
# @score:>80 / @score:<50 Min/max score
# @onlist / @onlist:false Filter by list status
# @tag:isekai Filter by tags
import json import json
import sys import sys
@@ -12,9 +24,13 @@ from pathlib import Path
from urllib import request from urllib import request
from urllib.error import URLError from urllib.error import URLError
# Import the filter parser
from _filter_parser import parse_filters
# --- Template Variables (Injected by Python) --- # --- Template Variables (Injected by Python) ---
GRAPHQL_ENDPOINT = "{GRAPHQL_ENDPOINT}" GRAPHQL_ENDPOINT = "{GRAPHQL_ENDPOINT}"
SEARCH_RESULTS_FILE = Path("{SEARCH_RESULTS_FILE}") SEARCH_RESULTS_FILE = Path("{SEARCH_RESULTS_FILE}")
LAST_QUERY_FILE = Path("{LAST_QUERY_FILE}")
AUTH_HEADER = "{AUTH_HEADER}" AUTH_HEADER = "{AUTH_HEADER}"
# The GraphQL query is injected as a properly escaped JSON string # The GraphQL query is injected as a properly escaped JSON string
@@ -22,17 +38,29 @@ GRAPHQL_QUERY = "{GRAPHQL_QUERY}"
# --- Get Query from fzf --- # --- Get Query from fzf ---
# fzf passes the current query as the first argument when using --bind change:reload # fzf passes the current query as the first argument when using --bind change:reload
QUERY = sys.argv[1] if len(sys.argv) > 1 else "" RAW_QUERY = sys.argv[1] if len(sys.argv) > 1 else ""
# If query is empty, exit with empty results # Parse the query to extract filters and clean search text
if not QUERY.strip(): QUERY, PARSED_FILTERS = parse_filters(RAW_QUERY)
print("")
# If query is empty and no filters, show help hint
if not RAW_QUERY.strip():
print("💡 Tip: Use @genre:action @status:airing for filters (type @help for syntax)")
sys.exit(0) sys.exit(0)
# Show filter help if requested
if RAW_QUERY.strip().lower() in ("@help", "@?", "@h"):
from _filter_parser import get_help_text
print(get_help_text())
sys.exit(0)
# If we only have filters (no search text), that's valid - we'll search with filters only
# But if we have neither query nor filters, we already showed the help hint above
def make_graphql_request( def make_graphql_request(
endpoint: str, query: str, variables: dict, auth_token: str = "" endpoint: str, query: str, variables: dict, auth_token: str = ""
) -> dict | None: ) -> tuple[dict | None, str | None]:
""" """
Make a GraphQL request to the specified endpoint. Make a GraphQL request to the specified endpoint.
@@ -43,7 +71,7 @@ def make_graphql_request(
auth_token: Optional authorization token (Bearer token) auth_token: Optional authorization token (Bearer token)
Returns: Returns:
Response JSON as a dictionary, or None if request fails Tuple of (Response JSON, error message) - one will be None
""" """
payload = {"query": query, "variables": variables} payload = {"query": query, "variables": variables}
@@ -61,10 +89,13 @@ def make_graphql_request(
) )
with request.urlopen(req, timeout=10) as response: with request.urlopen(req, timeout=10) as response:
return json.loads(response.read().decode("utf-8")) return json.loads(response.read().decode("utf-8")), None
except (URLError, json.JSONDecodeError, Exception) as e: except URLError as e:
print(f"❌ Request failed: {e}", file=sys.stderr) return None, f"Network error: {e.reason}"
return None except json.JSONDecodeError as e:
return None, f"Invalid response: {e}"
except Exception as e:
return None, f"Request error: {e}"
def extract_title(media_item: dict) -> str: def extract_title(media_item: dict) -> str:
@@ -90,34 +121,67 @@ def main():
# Ensure parent directory exists # Ensure parent directory exists
SEARCH_RESULTS_FILE.parent.mkdir(parents=True, exist_ok=True) SEARCH_RESULTS_FILE.parent.mkdir(parents=True, exist_ok=True)
# Create GraphQL variables # Base GraphQL variables
variables = { variables = {
"query": QUERY,
"type": "ANIME", "type": "ANIME",
"per_page": 50, "per_page": 50,
"genre_not_in": ["Hentai"], "genre_not_in": ["Hentai"], # Default exclusion
} }
# Add search query if provided
if QUERY:
variables["query"] = QUERY
# Apply parsed filters from the filter syntax
for key, value in PARSED_FILTERS.items():
# Handle array merging for _in and _not_in fields
if key.endswith("_in") or key.endswith("_not_in"):
if key in variables:
# Merge arrays, avoiding duplicates
existing = set(variables[key])
existing.update(value)
variables[key] = list(existing)
else:
variables[key] = value
else:
variables[key] = value
# Make the GraphQL request # Make the GraphQL request
response = make_graphql_request( response, error = make_graphql_request(
GRAPHQL_ENDPOINT, GRAPHQL_QUERY, variables, AUTH_HEADER GRAPHQL_ENDPOINT, GRAPHQL_QUERY, variables, AUTH_HEADER
) )
if response is None: if error:
print("Search failed") print(f"{error}")
# Also show what we tried to search for debugging
print(f" Query: {QUERY or '(none)'}")
print(f" Filters: {json.dumps(PARSED_FILTERS) if PARSED_FILTERS else '(none)'}")
sys.exit(1) sys.exit(1)
if response is None:
print("❌ Search failed: No response received")
sys.exit(1)
# Check for GraphQL errors first (these come in the response body)
if "errors" in response:
errors = response["errors"]
if errors:
# Extract error messages
error_msgs = [e.get("message", str(e)) for e in errors]
print(f"❌ API Error: {'; '.join(error_msgs)}")
# Show variables for debugging
print(f" Filters used: {json.dumps(PARSED_FILTERS, indent=2) if PARSED_FILTERS else '(none)'}")
sys.exit(1)
# Save the raw response for later processing by dynamic_search.py # Save the raw response for later processing by dynamic_search.py
try: try:
with open(SEARCH_RESULTS_FILE, "w", encoding="utf-8") as f: with open(SEARCH_RESULTS_FILE, "w", encoding="utf-8") as f:
json.dump(response, f, ensure_ascii=False, indent=2) json.dump(response, f, ensure_ascii=False, indent=2)
# Also save the raw query so it can be restored when going back
with open(LAST_QUERY_FILE, "w", encoding="utf-8") as f:
f.write(RAW_QUERY)
except IOError as e: except IOError as e:
print(f"❌ Failed to save results: {e}", file=sys.stderr) print(f"❌ Failed to save results: {e}")
sys.exit(1)
# Parse and display results
if "errors" in response:
print(f"❌ Search error: {response['errors']}")
sys.exit(1) sys.exit(1)
# Navigate the response structure # Navigate the response structure
@@ -126,7 +190,9 @@ def main():
media_list = page.get("media", []) media_list = page.get("media", [])
if not media_list: if not media_list:
print(" No results found") print("🔍 No results found")
if PARSED_FILTERS:
print(" Try adjusting your filters")
sys.exit(0) sys.exit(0)
# Output titles for fzf (one per line) # Output titles for fzf (one per line)
@@ -141,5 +207,5 @@ if __name__ == "__main__":
except KeyboardInterrupt: except KeyboardInterrupt:
sys.exit(0) sys.exit(0)
except Exception as e: except Exception as e:
print(f"❌ Unexpected error: {e}", file=sys.stderr) print(f"❌ Unexpected error: {type(e).__name__}: {e}")
sys.exit(1) sys.exit(1)

View File

@@ -0,0 +1 @@
# Menu package for interactive session

View File

@@ -0,0 +1,18 @@
# Media menu modules
# Explicit module list for PyInstaller compatibility
__all__ = [
"downloads",
"download_episodes",
"dynamic_search",
"episodes",
"main",
"media_actions",
"media_airing_schedule",
"media_characters",
"media_review",
"player_controls",
"play_downloads",
"provider_search",
"results",
"servers",
]

View File

@@ -1,9 +1,10 @@
import json import json
import logging import logging
import sys import shutil
from pathlib import Path from pathlib import Path
from .....core.constants import APP_CACHE_DIR, SCRIPTS_DIR from .....core.constants import APP_CACHE_DIR, SCRIPTS_DIR
from .....core.utils.detect import get_python_executable
from .....libs.media_api.params import MediaSearchParams from .....libs.media_api.params import MediaSearchParams
from ...session import Context, session from ...session import Context, session
from ...state import InternalDirective, MediaApiState, MenuName, State from ...state import InternalDirective, MediaApiState, MenuName, State
@@ -12,8 +13,36 @@ logger = logging.getLogger(__name__)
SEARCH_CACHE_DIR = APP_CACHE_DIR / "previews" / "dynamic-search" SEARCH_CACHE_DIR = APP_CACHE_DIR / "previews" / "dynamic-search"
SEARCH_RESULTS_FILE = SEARCH_CACHE_DIR / "current_search_results.json" SEARCH_RESULTS_FILE = SEARCH_CACHE_DIR / "current_search_results.json"
LAST_QUERY_FILE = SEARCH_CACHE_DIR / "last_query.txt"
RESTORE_MODE_FILE = SEARCH_CACHE_DIR / ".restore_mode"
FZF_SCRIPTS_DIR = SCRIPTS_DIR / "fzf" FZF_SCRIPTS_DIR = SCRIPTS_DIR / "fzf"
SEARCH_TEMPLATE_SCRIPT = (FZF_SCRIPTS_DIR / "search.py").read_text(encoding="utf-8") SEARCH_TEMPLATE_SCRIPT = (FZF_SCRIPTS_DIR / "search.py").read_text(encoding="utf-8")
FILTER_PARSER_SCRIPT = FZF_SCRIPTS_DIR / "_filter_parser.py"
def _load_cached_titles() -> list[str]:
"""Load titles from cached search results for display in fzf."""
if not SEARCH_RESULTS_FILE.exists():
return []
try:
with open(SEARCH_RESULTS_FILE, "r", encoding="utf-8") as f:
data = json.load(f)
media_list = data.get("data", {}).get("Page", {}).get("media", [])
titles = []
for media in media_list:
title_obj = media.get("title", {})
title = (
title_obj.get("english")
or title_obj.get("romaji")
or title_obj.get("native")
or "Unknown"
)
titles.append(title)
return titles
except (IOError, json.JSONDecodeError):
return []
@session.menu @session.menu
@@ -25,6 +54,12 @@ def dynamic_search(ctx: Context, state: State) -> State | InternalDirective:
# Ensure cache directory exists # Ensure cache directory exists
SEARCH_CACHE_DIR.mkdir(parents=True, exist_ok=True) SEARCH_CACHE_DIR.mkdir(parents=True, exist_ok=True)
# Check if we're in restore mode (coming back from media_actions)
restore_mode = RESTORE_MODE_FILE.exists()
if restore_mode:
# Clear the restore flag
RESTORE_MODE_FILE.unlink(missing_ok=True)
# Read the GraphQL search query # Read the GraphQL search query
from .....libs.media_api.anilist import gql from .....libs.media_api.anilist import gql
@@ -44,6 +79,7 @@ def dynamic_search(ctx: Context, state: State) -> State | InternalDirective:
"GRAPHQL_ENDPOINT": "https://graphql.anilist.co", "GRAPHQL_ENDPOINT": "https://graphql.anilist.co",
"GRAPHQL_QUERY": search_query_json, "GRAPHQL_QUERY": search_query_json,
"SEARCH_RESULTS_FILE": SEARCH_RESULTS_FILE.as_posix(), "SEARCH_RESULTS_FILE": SEARCH_RESULTS_FILE.as_posix(),
"LAST_QUERY_FILE": LAST_QUERY_FILE.as_posix(),
"AUTH_HEADER": auth_header, "AUTH_HEADER": auth_header,
} }
@@ -54,12 +90,34 @@ def dynamic_search(ctx: Context, state: State) -> State | InternalDirective:
search_script_file = SEARCH_CACHE_DIR / "search.py" search_script_file = SEARCH_CACHE_DIR / "search.py"
search_script_file.write_text(search_command, encoding="utf-8") search_script_file.write_text(search_command, encoding="utf-8")
# Copy the filter parser module to the cache directory
# This is required for the search script to import it
filter_parser_dest = SEARCH_CACHE_DIR / "_filter_parser.py"
if FILTER_PARSER_SCRIPT.exists():
shutil.copy2(FILTER_PARSER_SCRIPT, filter_parser_dest)
# Make the search script executable by calling it with python3 # Make the search script executable by calling it with python3
# fzf will pass the query as {q} which becomes the first argument # fzf will pass the query as {q} which becomes the first argument
search_command_final = ( search_command_final = (
f"{Path(sys.executable).as_posix()} {search_script_file.as_posix()} {{q}}" f"{Path(get_python_executable()).as_posix()} {search_script_file.as_posix()} {{q}}"
) )
# Header hint for filter syntax
filter_hint = "💡 Filters: @genre:action @status:airing @year:2024 @sort:score (type @help for more)"
# Only load previous query if we're in restore mode (coming back from media_actions)
initial_query = None
cached_results = None
if restore_mode:
# Load previous query
if LAST_QUERY_FILE.exists():
try:
initial_query = LAST_QUERY_FILE.read_text(encoding="utf-8").strip()
except IOError:
pass
# Load cached results to display immediately without network request
cached_results = _load_cached_titles()
try: try:
# Prepare preview functionality # Prepare preview functionality
preview_command = None preview_command = None
@@ -73,11 +131,17 @@ def dynamic_search(ctx: Context, state: State) -> State | InternalDirective:
prompt="Search Anime", prompt="Search Anime",
search_command=search_command_final, search_command=search_command_final,
preview=preview_command, preview=preview_command,
header=filter_hint,
initial_query=initial_query,
initial_results=cached_results,
) )
else: else:
choice = ctx.selector.search( choice = ctx.selector.search(
prompt="Search Anime", prompt="Search Anime",
search_command=search_command_final, search_command=search_command_final,
header=filter_hint,
initial_query=initial_query,
initial_results=cached_results,
) )
except NotImplementedError: except NotImplementedError:
feedback.error("Dynamic search is not supported by your current selector") feedback.error("Dynamic search is not supported by your current selector")
@@ -116,6 +180,9 @@ def dynamic_search(ctx: Context, state: State) -> State | InternalDirective:
logger.error(f"Could not find selected media for choice: {choice}") logger.error(f"Could not find selected media for choice: {choice}")
return InternalDirective.MAIN return InternalDirective.MAIN
# Set restore mode flag so we can restore state when user goes back
RESTORE_MODE_FILE.touch()
# Navigate to media actions with the selected item # Navigate to media actions with the selected item
return State( return State(
menu_name=MenuName.MEDIA_ACTIONS, menu_name=MenuName.MEDIA_ACTIONS,

View File

@@ -1,6 +1,7 @@
import importlib
import importlib.util import importlib.util
import logging import logging
import os import pkgutil
from dataclasses import dataclass, field from dataclasses import dataclass, field
from typing import TYPE_CHECKING, Callable, List, Optional, Union from typing import TYPE_CHECKING, Callable, List, Optional, Union
@@ -309,30 +310,46 @@ class Session:
return decorator return decorator
def load_menus_from_folder(self, package: str): def load_menus_from_folder(self, package: str):
package_path = MENUS_DIR / package """Load menu modules from a subfolder.
package_name = package_path.name
logger.debug(f"Loading menus from '{package_path}'...") Uses pkgutil to discover modules for regular Python, and falls back
to the package's __all__ list for PyInstaller frozen executables.
"""
full_package_name = f"viu_media.cli.interactive.menu.{package}"
logger.debug(f"Loading menus from package '{full_package_name}'...")
for filename in os.listdir(package_path): try:
if filename.endswith(".py") and not filename.startswith("__"): # Import the parent package first
module_name = filename[:-3] parent_package = importlib.import_module(full_package_name)
full_module_name = ( except ImportError as e:
f"viu_media.cli.interactive.menu.{package_name}.{module_name}" logger.error(f"Failed to import menu package '{full_package_name}': {e}")
return
# Try pkgutil first (works in regular Python)
package_path = getattr(parent_package, "__path__", None)
module_names = []
if package_path:
module_names = [
name for _, name, ispkg in pkgutil.iter_modules(package_path)
if not ispkg and not name.startswith("_")
]
# Fallback to __all__ for PyInstaller frozen executables
if not module_names:
module_names = getattr(parent_package, "__all__", [])
logger.debug(f"Using __all__ fallback with {len(module_names)} modules")
for module_name in module_names:
full_module_name = f"{full_package_name}.{module_name}"
try:
# Simply importing the module will execute it,
# which runs the @session.menu decorators
importlib.import_module(full_module_name)
except Exception as e:
logger.error(
f"Failed to load menu module '{full_module_name}': {e}"
) )
file_path = package_path / filename
try:
spec = importlib.util.spec_from_file_location(
full_module_name, file_path
)
if spec and spec.loader:
module = importlib.util.module_from_spec(spec)
# The act of executing the module runs the @session.menu decorators
spec.loader.exec_module(module)
except Exception as e:
logger.error(
f"Failed to load menu module '{full_module_name}': {e}"
)
# Create a single, global instance of the Session to be imported by menu modules. # Create a single, global instance of the Session to be imported by menu modules.

View File

@@ -57,6 +57,9 @@ class MPVIPCClient:
def connect(self, timeout: float = 5.0) -> None: def connect(self, timeout: float = 5.0) -> None:
"""Connect to MPV IPC socket and start the reader thread.""" """Connect to MPV IPC socket and start the reader thread."""
if not hasattr(socket, "AF_UNIX"):
raise MPVIPCError("Unix domain sockets are unavailable on this platform")
start_time = time.time() start_time = time.time()
while time.time() - start_time < timeout: while time.time() - start_time < timeout:
try: try:
@@ -299,6 +302,10 @@ class MpvIPCPlayer(BaseIPCPlayer):
def _play_with_ipc(self, player: BasePlayer, params: PlayerParams) -> PlayerResult: def _play_with_ipc(self, player: BasePlayer, params: PlayerParams) -> PlayerResult:
"""Play media using MPV IPC.""" """Play media using MPV IPC."""
try: try:
if not hasattr(socket, "AF_UNIX"):
raise MPVIPCError(
"MPV IPC requires Unix domain sockets, which are unavailable on this platform."
)
self._start_mpv_process(player, params) self._start_mpv_process(player, params)
self._connect_ipc() self._connect_ipc()
self._setup_event_handling() self._setup_event_handling()

View File

@@ -9,6 +9,8 @@ import importlib.util
import click import click
import httpx import httpx
from viu_media.core.utils import detect
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@@ -138,6 +140,7 @@ def render(url: str, capture: bool = False, size: str = "30x30") -> Optional[str
[icat_executable, "--align", "left", url], [icat_executable, "--align", "left", url],
capture_output=capture, capture_output=capture,
text=capture, text=capture,
env=detect.get_clean_env(),
) )
if process.returncode == 0: if process.returncode == 0:
return process.stdout if capture else None return process.stdout if capture else None

View File

@@ -2,7 +2,6 @@ import logging
from pathlib import Path from pathlib import Path
import re import re
from hashlib import sha256 from hashlib import sha256
import sys
from typing import Dict, List, Optional from typing import Dict, List, Optional
import httpx import httpx
@@ -11,6 +10,7 @@ from viu_media.core.utils import formatter
from ...core.config import AppConfig from ...core.config import AppConfig
from ...core.constants import APP_CACHE_DIR, SCRIPTS_DIR from ...core.constants import APP_CACHE_DIR, SCRIPTS_DIR
from ...core.utils.detect import get_python_executable
from ...core.utils.file import AtomicWriter from ...core.utils.file import AtomicWriter
from ...libs.media_api.types import ( from ...libs.media_api.types import (
AiringScheduleResult, AiringScheduleResult,
@@ -327,7 +327,7 @@ def get_anime_preview(
preview_file.write_text(preview_script, encoding="utf-8") preview_file.write_text(preview_script, encoding="utf-8")
preview_script_final = ( preview_script_final = (
f"{Path(sys.executable).as_posix()} {preview_file.as_posix()} {{}}" f"{Path(get_python_executable()).as_posix()} {preview_file.as_posix()} {{}}"
) )
return preview_script_final return preview_script_final
@@ -387,7 +387,7 @@ def get_episode_preview(
preview_file.write_text(preview_script, encoding="utf-8") preview_file.write_text(preview_script, encoding="utf-8")
preview_script_final = ( preview_script_final = (
f"{Path(sys.executable).as_posix()} {preview_file.as_posix()} {{}}" f"{Path(get_python_executable()).as_posix()} {preview_file.as_posix()} {{}}"
) )
return preview_script_final return preview_script_final
@@ -435,7 +435,7 @@ def get_character_preview(choice_map: Dict[str, Character], config: AppConfig) -
preview_file.write_text(preview_script, encoding="utf-8") preview_file.write_text(preview_script, encoding="utf-8")
preview_script_final = ( preview_script_final = (
f"{Path(sys.executable).as_posix()} {preview_file.as_posix()} {{}}" f"{Path(get_python_executable()).as_posix()} {preview_file.as_posix()} {{}}"
) )
return preview_script_final return preview_script_final
@@ -483,7 +483,7 @@ def get_review_preview(choice_map: Dict[str, MediaReview], config: AppConfig) ->
preview_file.write_text(preview_script, encoding="utf-8") preview_file.write_text(preview_script, encoding="utf-8")
preview_script_final = ( preview_script_final = (
f"{Path(sys.executable).as_posix()} {preview_file.as_posix()} {{}}" f"{Path(get_python_executable()).as_posix()} {preview_file.as_posix()} {{}}"
) )
return preview_script_final return preview_script_final
@@ -599,7 +599,7 @@ def get_dynamic_anime_preview(config: AppConfig) -> str:
# Return the command to execute the preview script # Return the command to execute the preview script
preview_script_final = ( preview_script_final = (
f"{Path(sys.executable).as_posix()} {preview_file.as_posix()} {{}}" f"{Path(get_python_executable()).as_posix()} {preview_file.as_posix()} {{}}"
) )
return preview_script_final return preview_script_final

View File

@@ -189,7 +189,12 @@ class PreviewCacheWorker(ManagedBackgroundWorker):
), ),
"STUDIOS": formatter.shell_safe( "STUDIOS": formatter.shell_safe(
formatter.format_list_with_commas( formatter.format_list_with_commas(
[t.name for t in media_item.studios if t.name] [t.name for t in media_item.studios if t.name and t.is_animation_studio]
)
),
"PRODUCERS": formatter.shell_safe(
formatter.format_list_with_commas(
[t.name for t in media_item.studios if t.name and not t.is_animation_studio]
) )
), ),
"SYNONYMNS": formatter.shell_safe( "SYNONYMNS": formatter.shell_safe(

View File

@@ -21,7 +21,7 @@ from rich.progress import (
) )
from rich.prompt import Confirm from rich.prompt import Confirm
from ..utils.file import sanitize_filename from ..utils.file import sanitize_filename
from ..utils.detect import get_clean_env
from ..exceptions import ViuError from ..exceptions import ViuError
from ..patterns import TORRENT_REGEX from ..patterns import TORRENT_REGEX
from ..utils.networking import get_remote_filename from ..utils.networking import get_remote_filename
@@ -372,6 +372,7 @@ class DefaultDownloader(BaseDownloader):
capture_output=params.silent, # Only suppress ffmpeg output if silent capture_output=params.silent, # Only suppress ffmpeg output if silent
text=True, text=True,
check=True, check=True,
env=get_clean_env(),
) )
final_output_path = video_path.parent / merged_filename final_output_path = video_path.parent / merged_filename

View File

@@ -11,7 +11,7 @@ from rich.prompt import Confirm
import yt_dlp import yt_dlp
from yt_dlp.utils import sanitize_filename from yt_dlp.utils import sanitize_filename
from ..utils.detect import get_clean_env
from ..exceptions import ViuError from ..exceptions import ViuError
from ..patterns import TORRENT_REGEX from ..patterns import TORRENT_REGEX
from ..utils.networking import get_remote_filename from ..utils.networking import get_remote_filename
@@ -224,7 +224,7 @@ class YtDLPDownloader(BaseDownloader):
# Run the ffmpeg command # Run the ffmpeg command
try: try:
subprocess.run(args) subprocess.run(args, env=get_clean_env())
final_output_path = video_path.parent / merged_filename final_output_path = video_path.parent / merged_filename
if final_output_path.exists(): if final_output_path.exists():

View File

@@ -56,3 +56,48 @@ def is_running_kitty_terminal() -> bool:
def has_fzf() -> bool: def has_fzf() -> bool:
return True if shutil.which("fzf") else False return True if shutil.which("fzf") else False
def is_frozen() -> bool:
"""Check if running as a PyInstaller frozen executable."""
return getattr(sys, "frozen", False)
def get_python_executable() -> str:
"""
Get the Python executable path.
In frozen (PyInstaller) apps, sys.executable points to the .exe,
so we need to find the system Python instead.
Returns:
Path to a Python executable.
"""
if is_frozen():
# We're in a frozen app - find system Python
for python_name in ["python3", "python", "py"]:
python_path = shutil.which(python_name)
if python_path:
return python_path
# Fallback - this likely won't work but is the best we can do
return "python"
else:
return sys.executable
def get_clean_env() -> dict[str, str]:
"""
Returns a copy of the environment with LD_LIBRARY_PATH fixed for system subprocesses
when running as a PyInstaller frozen application.
This prevents system binaries (like mpv, ffmpeg) from loading incompatible
libraries from the PyInstaller bundle.
"""
env = os.environ.copy()
if is_frozen():
# PyInstaller saves the original LD_LIBRARY_PATH in LD_LIBRARY_PATH_ORIG
if "LD_LIBRARY_PATH_ORIG" in env:
env["LD_LIBRARY_PATH"] = env["LD_LIBRARY_PATH_ORIG"]
else:
# If orig didn't exist, LD_LIBRARY_PATH shouldn't exist for the subprocess
env.pop("LD_LIBRARY_PATH", None)
return env

View File

@@ -184,13 +184,22 @@ def format_score(score: Optional[float]) -> str:
def shell_safe(text: Optional[str]) -> str: def shell_safe(text: Optional[str]) -> str:
""" """
Escapes a string for safe inclusion in a shell script, Escapes a string for safe inclusion in a Python script string literal.
specifically for use within double quotes. It escapes backticks, This is used when generating Python cache scripts with embedded text content.
double quotes, and dollar signs.
For Python triple-quoted strings, we need to:
- Escape backslashes first (so existing backslashes don't interfere)
- Escape triple quotes (to not break the string literal)
- Remove or replace problematic characters
""" """
if not text: if not text:
return "" return ""
return text.replace("`", "\\`").replace('"', '\\"').replace("$", "\\$") # Escape backslashes first
result = text.replace("\\", "\\\\")
# Escape triple quotes (both types) for Python triple-quoted string literals
result = result.replace('"""', r'\"\"\"')
result = result.replace("'''", r"\'\'\'")
return result
def extract_episode_number(title: str) -> Optional[float]: def extract_episode_number(title: str) -> Optional[float]:

View File

@@ -97,7 +97,7 @@ class MpvPlayer(BasePlayer):
"is.xyz.mpv/.MPVActivity", "is.xyz.mpv/.MPVActivity",
] ]
subprocess.run(args) subprocess.run(args,env=detect.get_clean_env())
return PlayerResult(params.episode) return PlayerResult(params.episode)
@@ -146,6 +146,7 @@ class MpvPlayer(BasePlayer):
text=True, text=True,
encoding="utf-8", encoding="utf-8",
check=False, check=False,
env=detect.get_clean_env(),
) )
if proc.stdout: if proc.stdout:
for line in reversed(proc.stdout.split("\n")): for line in reversed(proc.stdout.split("\n")):
@@ -185,7 +186,7 @@ class MpvPlayer(BasePlayer):
logger.info(f"Starting MPV with IPC socket: {socket_path}") logger.info(f"Starting MPV with IPC socket: {socket_path}")
process = subprocess.Popen(pre_args + mpv_args) process = subprocess.Popen(pre_args + mpv_args,env=detect.get_clean_env())
return process return process
@@ -210,7 +211,7 @@ class MpvPlayer(BasePlayer):
args.append("--player-args") args.append("--player-args")
args.extend(mpv_args) args.extend(mpv_args)
subprocess.run(args) subprocess.run(args,env=detect.get_clean_env())
return PlayerResult(params.episode) return PlayerResult(params.episode)
def _stream_on_desktop_with_syncplay(self, params: PlayerParams) -> PlayerResult: def _stream_on_desktop_with_syncplay(self, params: PlayerParams) -> PlayerResult:
@@ -232,7 +233,7 @@ class MpvPlayer(BasePlayer):
if mpv_args := self._create_mpv_cli_options(params): if mpv_args := self._create_mpv_cli_options(params):
args.append("--") args.append("--")
args.extend(mpv_args) args.extend(mpv_args)
subprocess.run(args) subprocess.run(args,env=detect.get_clean_env())
return PlayerResult(params.episode) return PlayerResult(params.episode)

View File

@@ -103,7 +103,7 @@ class VlcPlayer(BasePlayer):
params.title, params.title,
] ]
subprocess.run(args) subprocess.run(args,env=detect.get_clean_env())
return PlayerResult(episode=params.episode) return PlayerResult(episode=params.episode)
@@ -134,7 +134,7 @@ class VlcPlayer(BasePlayer):
if self.config.args: if self.config.args:
args.extend(self.config.args.split(",")) args.extend(self.config.args.split(","))
subprocess.run(args, encoding="utf-8") subprocess.run(args, encoding="utf-8",env=detect.get_clean_env())
return PlayerResult(episode=params.episode) return PlayerResult(episode=params.episode)
def _stream_on_desktop_with_webtorrent_cli( def _stream_on_desktop_with_webtorrent_cli(
@@ -159,7 +159,7 @@ class VlcPlayer(BasePlayer):
args.append("--player-args") args.append("--player-args")
args.extend(self.config.args.split(",")) args.extend(self.config.args.split(","))
subprocess.run(args) subprocess.run(args,env=detect.get_clean_env())
return PlayerResult(episode=params.episode) return PlayerResult(episode=params.episode)

View File

@@ -88,6 +88,8 @@ class BaseSelector(ABC):
*, *,
preview: Optional[str] = None, preview: Optional[str] = None,
header: Optional[str] = None, header: Optional[str] = None,
initial_query: Optional[str] = None,
initial_results: Optional[List[str]] = None,
) -> str | None: ) -> str | None:
""" """
Provides dynamic search functionality that reloads results based on user input. Provides dynamic search functionality that reloads results based on user input.
@@ -97,6 +99,8 @@ class BaseSelector(ABC):
search_command: The command to execute for searching/reloading results. search_command: The command to execute for searching/reloading results.
preview: An optional command or string for a preview window. preview: An optional command or string for a preview window.
header: An optional header to display above the choices. header: An optional header to display above the choices.
initial_query: An optional initial query to pre-populate the search.
initial_results: Optional list of results to display initially (avoids network request).
Returns: Returns:
The string of the chosen item. The string of the chosen item.

View File

@@ -5,6 +5,8 @@ import subprocess
from rich.prompt import Prompt from rich.prompt import Prompt
from viu_media.core.utils import detect
from ....core.config import FzfConfig from ....core.config import FzfConfig
from ....core.exceptions import ViuError from ....core.exceptions import ViuError
from ..base import BaseSelector from ..base import BaseSelector
@@ -49,6 +51,7 @@ class FzfSelector(BaseSelector):
stdout=subprocess.PIPE, stdout=subprocess.PIPE,
text=True, text=True,
encoding="utf-8", encoding="utf-8",
env=detect.get_clean_env(),
) )
if result.returncode != 0: if result.returncode != 0:
return None return None
@@ -76,6 +79,7 @@ class FzfSelector(BaseSelector):
stdout=subprocess.PIPE, stdout=subprocess.PIPE,
text=True, text=True,
encoding="utf-8", encoding="utf-8",
env=detect.get_clean_env(),
) )
if result.returncode != 0: if result.returncode != 0:
return [] return []
@@ -117,29 +121,55 @@ class FzfSelector(BaseSelector):
lines = result.stdout.strip().splitlines() lines = result.stdout.strip().splitlines()
return lines[-1] if lines else (default or "") return lines[-1] if lines else (default or "")
def search(self, prompt, search_command, *, preview=None, header=None): def search(
self,
prompt,
search_command,
*,
preview=None,
header=None,
initial_query=None,
initial_results=None,
):
"""Enhanced search using fzf's --reload flag for dynamic search.""" """Enhanced search using fzf's --reload flag for dynamic search."""
# Build the header with optional custom header line
display_header = self.header
if header:
display_header = f"{self.header}\n{header}"
commands = [ commands = [
self.executable, self.executable,
"--prompt", "--prompt",
f"{prompt.title()}: ", f"{prompt.title()}: ",
"--header", "--header",
self.header, display_header,
"--header-first", "--header-first",
"--disabled", # Disable local filtering - rely on external search command
"--bind", "--bind",
f"change:reload({search_command})", f"change:reload({search_command})",
"--ansi", "--ansi",
] ]
# If there's an initial query, set it
if initial_query:
commands.extend(["--query", initial_query])
# Only trigger reload on start if we don't have cached results
if not initial_results:
commands.extend(["--bind", f"start:reload({search_command})"])
if preview: if preview:
commands.extend(["--preview", preview]) commands.extend(["--preview", preview])
# Use cached results as initial input if provided (avoids network request)
fzf_input = "\n".join(initial_results) if initial_results else ""
result = subprocess.run( result = subprocess.run(
commands, commands,
input="", input=fzf_input,
stdout=subprocess.PIPE, stdout=subprocess.PIPE,
text=True, text=True,
encoding="utf-8", encoding="utf-8",
env=detect.get_clean_env(),
) )
if result.returncode != 0: if result.returncode != 0:
return None return None

View File

@@ -43,6 +43,7 @@ class RofiSelector(BaseSelector):
input=rofi_input, input=rofi_input,
stdout=subprocess.PIPE, stdout=subprocess.PIPE,
text=True, text=True,
env=detect.get_clean_env()
) )
if result.returncode == 0: if result.returncode == 0:
@@ -106,6 +107,7 @@ class RofiSelector(BaseSelector):
input=rofi_input, input=rofi_input,
stdout=subprocess.PIPE, stdout=subprocess.PIPE,
text=True, text=True,
env=detect.get_clean_env()
) )
if result.returncode == 0: if result.returncode == 0: