Compare commits

..

12 Commits

Author SHA1 Message Date
Benedict Xavier
4caafda123 Merge branch 'master' into copilot/improve-installation-section 2026-01-01 10:49:48 +03:00
Benexl
9ef834c94c fix: update shell_safe function to improve string literal escaping 2026-01-01 10:43:30 +03:00
copilot-swe-agent[bot]
5e9255b3d5 docs: Restructure binary installation to reduce redundancy
Co-authored-by: Benexl <81157281+Benexl@users.noreply.github.com>
2026-01-01 07:31:33 +00:00
copilot-swe-agent[bot]
fd535ad3e3 docs: Fix link consistency and chmod order in binary installation
Co-authored-by: Benexl <81157281+Benexl@users.noreply.github.com>
2026-01-01 07:30:45 +00:00
copilot-swe-agent[bot]
121e02a7e2 docs: Clarify binary installation with exact filenames and Windows steps
Co-authored-by: Benexl <81157281+Benexl@users.noreply.github.com>
2026-01-01 07:29:46 +00:00
copilot-swe-agent[bot]
2bb62fd0af docs: Make binary installation instructions more specific
Co-authored-by: Benexl <81157281+Benexl@users.noreply.github.com>
2026-01-01 07:28:46 +00:00
copilot-swe-agent[bot]
a752a9efdd docs: Add pre-built binaries section to README installation
Co-authored-by: Benexl <81157281+Benexl@users.noreply.github.com>
2026-01-01 07:27:24 +00:00
copilot-swe-agent[bot]
ac490d9a4b Initial plan 2026-01-01 07:25:17 +00:00
Benexl
1ce2d2740d feat: implement get_clean_env function to manage environment variables for subprocesses 2025-12-31 21:43:43 +03:00
Benexl
ce6294a17b fix: exclude OpenSSL libraries on Linux to avoid version conflicts 2025-12-31 21:14:08 +03:00
Benexl
b550956a3e fix: update Ubuntu version in release binaries workflow to 22.04 2025-12-31 21:03:29 +03:00
Benexl
e382e4c046 chore: bump version to 3.3.7 in pyproject.toml and uv.lock 2025-12-31 20:51:00 +03:00
21 changed files with 241 additions and 942 deletions

View File

@@ -1,152 +1,152 @@
name: Build Release Binaries
on:
release:
types: [published]
workflow_dispatch:
inputs:
tag:
description: "Tag/version to build (leave empty for latest)"
required: false
type: string
release:
types: [published]
workflow_dispatch:
inputs:
tag:
description: "Tag/version to build (leave empty for latest)"
required: false
type: string
permissions:
contents: write
contents: write
jobs:
build:
strategy:
fail-fast: false
matrix:
include:
- os: ubuntu-latest
target: linux
asset_name: viu-linux-x86_64
executable: viu
- os: windows-latest
target: windows
asset_name: viu-windows-x86_64.exe
executable: viu.exe
- os: macos-latest
target: macos
asset_name: viu-macos-x86_64
executable: viu
build:
strategy:
fail-fast: false
matrix:
include:
- os: ubuntu-22.04
target: linux
asset_name: viu-linux-x86_64
executable: viu
- os: windows-latest
target: windows
asset_name: viu-windows-x86_64.exe
executable: viu.exe
- os: macos-latest
target: macos
asset_name: viu-macos-x86_64
executable: viu
runs-on: ${{ matrix.os }}
runs-on: ${{ matrix.os }}
steps:
- name: Checkout repository
uses: actions/checkout@v4
with:
ref: ${{ github.event.inputs.tag || github.ref }}
steps:
- name: Checkout repository
uses: actions/checkout@v4
with:
ref: ${{ github.event.inputs.tag || github.ref }}
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: "3.11"
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: "3.11"
- name: Install uv
uses: astral-sh/setup-uv@v3
with:
enable-cache: true
- name: Install uv
uses: astral-sh/setup-uv@v3
with:
enable-cache: true
- name: Install system dependencies (Linux)
if: runner.os == 'Linux'
run: |
sudo apt-get update
sudo apt-get install -y libdbus-1-dev libglib2.0-dev
- name: Install system dependencies (Linux)
if: runner.os == 'Linux'
run: |
sudo apt-get update
sudo apt-get install -y libdbus-1-dev libglib2.0-dev
- name: Install dependencies
run: uv sync --all-extras --all-groups
- name: Install dependencies
run: uv sync --all-extras --all-groups
- name: Build executable with PyInstaller
run: uv run pyinstaller bundle/pyinstaller.spec --distpath dist --workpath build/pyinstaller --clean
- name: Build executable with PyInstaller
run: uv run pyinstaller bundle/pyinstaller.spec --distpath dist --workpath build/pyinstaller --clean
- name: Rename executable
shell: bash
run: mv dist/${{ matrix.executable }} dist/${{ matrix.asset_name }}
- name: Rename executable
shell: bash
run: mv dist/${{ matrix.executable }} dist/${{ matrix.asset_name }}
- name: Upload artifact
uses: actions/upload-artifact@v4
with:
name: ${{ matrix.asset_name }}
path: dist/${{ matrix.asset_name }}
if-no-files-found: error
- name: Upload artifact
uses: actions/upload-artifact@v4
with:
name: ${{ matrix.asset_name }}
path: dist/${{ matrix.asset_name }}
if-no-files-found: error
- name: Upload to Release
if: github.event_name == 'release'
uses: softprops/action-gh-release@v2
with:
files: dist/${{ matrix.asset_name }}
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
# Build for macOS ARM (Apple Silicon)
build-macos-arm:
runs-on: macos-14
steps:
- name: Checkout repository
uses: actions/checkout@v4
with:
ref: ${{ github.event.inputs.tag || github.ref }}
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: "3.11"
- name: Install uv
uses: astral-sh/setup-uv@v3
with:
enable-cache: true
- name: Install dependencies
run: uv sync --all-extras --all-groups
- name: Build executable with PyInstaller
run: uv run pyinstaller bundle/pyinstaller.spec --distpath dist --workpath build/pyinstaller --clean
- name: Rename executable
run: mv dist/viu dist/viu-macos-arm64
- name: Upload artifact
uses: actions/upload-artifact@v4
with:
name: viu-macos-arm64
path: dist/viu-macos-arm64
if-no-files-found: error
- name: Upload to Release
if: github.event_name == 'release'
uses: softprops/action-gh-release@v2
with:
files: dist/viu-macos-arm64
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
# Create checksums after all builds complete
checksums:
needs: [build, build-macos-arm]
runs-on: ubuntu-latest
- name: Upload to Release
if: github.event_name == 'release'
uses: softprops/action-gh-release@v2
with:
files: dist/${{ matrix.asset_name }}
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
steps:
- name: Download all artifacts
uses: actions/download-artifact@v4
with:
path: artifacts
merge-multiple: true
# Build for macOS ARM (Apple Silicon)
build-macos-arm:
runs-on: macos-14
- name: Generate checksums
run: |
cd artifacts
sha256sum * > SHA256SUMS.txt
cat SHA256SUMS.txt
steps:
- name: Checkout repository
uses: actions/checkout@v4
with:
ref: ${{ github.event.inputs.tag || github.ref }}
- name: Upload checksums to Release
uses: softprops/action-gh-release@v2
with:
files: artifacts/SHA256SUMS.txt
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: "3.11"
- name: Install uv
uses: astral-sh/setup-uv@v3
with:
enable-cache: true
- name: Install dependencies
run: uv sync --all-extras --all-groups
- name: Build executable with PyInstaller
run: uv run pyinstaller bundle/pyinstaller.spec --distpath dist --workpath build/pyinstaller --clean
- name: Rename executable
run: mv dist/viu dist/viu-macos-arm64
- name: Upload artifact
uses: actions/upload-artifact@v4
with:
name: viu-macos-arm64
path: dist/viu-macos-arm64
if-no-files-found: error
- name: Upload to Release
if: github.event_name == 'release'
uses: softprops/action-gh-release@v2
with:
files: dist/viu-macos-arm64
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
# Create checksums after all builds complete
checksums:
needs: [build, build-macos-arm]
runs-on: ubuntu-latest
if: github.event_name == 'release'
steps:
- name: Download all artifacts
uses: actions/download-artifact@v4
with:
path: artifacts
merge-multiple: true
- name: Generate checksums
run: |
cd artifacts
sha256sum * > SHA256SUMS.txt
cat SHA256SUMS.txt
- name: Upload checksums to Release
uses: softprops/action-gh-release@v2
with:
files: artifacts/SHA256SUMS.txt
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

View File

@@ -49,7 +49,7 @@
## Installation
Viu runs on any platform with Python 3.10+, including Windows, macOS, Linux, and Android (via Termux, see other installation methods).
Viu runs on Windows, macOS, Linux, and Android (via Termux). Pre-built binaries are available for quick installation without Python, or you can install via Python 3.10+ package managers.
### Prerequisites
@@ -64,6 +64,39 @@ For the best experience, please install these external tools:
* [**ffmpeg**](https://www.ffmpeg.org/) - Required for downloading HLS streams and merging subtitles.
* [**webtorrent-cli**](https://github.com/webtorrent/webtorrent-cli) - For streaming torrents directly.
### Pre-built Binaries (Recommended for Quick Start)
The easiest way to get started is to download a pre-built, self-contained binary from the [**releases page**](https://github.com/viu-media/viu/releases/latest). These binaries include all dependencies and **do not require Python** to be installed.
**Available for:**
* **Linux** (x86_64): `viu-linux-x86_64`
* **Windows** (x86_64): `viu-windows-x86_64.exe`
* **macOS** (Intel x86_64): `viu-macos-x86_64`
* **macOS** (Apple Silicon ARM64): `viu-macos-arm64`
**Installation Steps:**
1. Download the appropriate binary for your platform from the [**releases page**](https://github.com/viu-media/viu/releases/latest).
2. **Linux/macOS:** Make it executable:
```bash
# Replace with the actual binary name you downloaded
chmod +x viu-linux-x86_64
```
Then move it to a directory in your PATH:
```bash
# Option 1: System-wide installation (requires sudo)
sudo mv viu-linux-x86_64 /usr/local/bin/viu
# Option 2: User directory installation
mkdir -p ~/.local/bin
mv viu-linux-x86_64 ~/.local/bin/viu
# Make sure ~/.local/bin is in your PATH
```
**Windows:** Simply rename `viu-windows-x86_64.exe` to `viu.exe` and place it in a directory in your PATH, or run it directly.
3. Verify the installation:
```bash
viu --version
```
### Recommended Installation (uv)
The best way to install Viu is with [**uv**](https://github.com/astral-sh/uv), a lightning-fast Python package manager.

View File

@@ -39,10 +39,18 @@ hiddenimports = [
'viu_media.cli.interactive.menu.media.servers',
] + collect_submodules('viu_media')
# Exclude OpenSSL libraries on Linux to avoid version conflicts
import sys
binaries = []
if sys.platform == 'linux':
# Remove any bundled libssl or libcrypto
binaries = [b for b in binaries if not any(lib in b[0] for lib in ['libssl', 'libcrypto'])]
a = Analysis(
['../viu_media/viu.py'],
pathex=[],
binaries=[],
binaries=binaries,
datas=datas,
hiddenimports=hiddenimports,
hookspath=[],

View File

@@ -1,6 +1,6 @@
[project]
name = "viu-media"
version = "3.3.6"
version = "3.3.7"
description = "A browser anime site experience from the terminal"
license = "UNLICENSE"
readme = "README.md"

2
uv.lock generated
View File

@@ -3743,7 +3743,7 @@ wheels = [
[[package]]
name = "viu-media"
version = "3.3.6"
version = "3.3.7"
source = { editable = "." }
dependencies = [
{ name = "click" },

View File

@@ -30,9 +30,6 @@ from ...core.config import AppConfig
\b
# view the current contents of your config
viu config --view
\b
# clear cached GitHub authentication token
viu config --clear-github-auth
""",
)
@click.option("--path", "-p", help="Print the config location and exit", is_flag=True)
@@ -63,11 +60,6 @@ from ...core.config import AppConfig
is_flag=True,
help="Start the interactive configuration wizard.",
)
@click.option(
"--clear-github-auth",
is_flag=True,
help="Clear cached GitHub authentication token.",
)
@click.pass_obj
def config(
user_config: AppConfig,
@@ -77,18 +69,12 @@ def config(
generate_desktop_entry,
update,
interactive,
clear_github_auth,
):
from ...core.constants import USER_CONFIG
from ..config.editor import InteractiveConfigEditor
from ..config.generate import generate_config_toml_from_app_model
if clear_github_auth:
from ..service.github import GitHubContributionService
GitHubContributionService.clear_cached_auth_static()
click.echo("GitHub authentication cache cleared.")
elif path:
if path:
print(USER_CONFIG)
elif view:
from rich.console import Console

View File

@@ -73,21 +73,6 @@ def provider_search(ctx: Context, state: State) -> State | InternalDirective:
update_user_normalizer_json(
chosen_title, media_title, config.general.provider.value
)
# Offer to submit the mapping to GitHub
if selector.confirm(
"Would you like to contribute this mapping to the project on GitHub?"
):
from ....service.github import GitHubContribution
contribution = GitHubContribution(
provider_name=config.general.provider.value,
provider_title=chosen_title,
media_api_title=media_title,
anilist_id=media_item.id if hasattr(media_item, "id") else None,
)
ctx.github.submit_contribution(contribution)
selected_provider_anime = provider_results_map[chosen_title]
with feedback.progress(

View File

@@ -17,7 +17,6 @@ if TYPE_CHECKING:
from ...libs.selectors.base import BaseSelector
from ..service.auth import AuthService
from ..service.feedback import FeedbackService
from ..service.github import GitHubContributionService
from ..service.player import PlayerService
from ..service.registry import MediaRegistryService
from ..service.session import SessionsService
@@ -93,7 +92,6 @@ class Context:
_session: Optional["SessionsService"] = None
_auth: Optional["AuthService"] = None
_player: Optional["PlayerService"] = None
_github: Optional["GitHubContributionService"] = None
@property
def provider(self) -> "BaseAnimeProvider":
@@ -193,17 +191,6 @@ class Context:
self._auth = AuthService(self.config.general.media_api)
return self._auth
@property
def github(self) -> "GitHubContributionService":
if not self._github:
from ..service.github.service import GitHubContributionService
self._github = GitHubContributionService(
selector=self.selector,
feedback=self.feedback,
)
return self._github
MenuFunction = Callable[[Context, State], Union[State, InternalDirective]]

View File

@@ -1,4 +0,0 @@
from .model import AuthMethod, GitHubContribution, GitHubPRResponse
from .service import GitHubContributionService
__all__ = ["GitHubContributionService", "GitHubContribution", "AuthMethod", "GitHubPRResponse"]

View File

@@ -1,66 +0,0 @@
from enum import Enum
from typing import Optional
from pydantic import BaseModel, Field
class AuthMethod(str, Enum):
"""Authentication method for GitHub API."""
BROWSER = "browser"
GH_CLI = "gh"
class GitHubAuth(BaseModel):
"""Stored GitHub authentication credentials."""
access_token: str
token_type: str = "bearer"
scope: str = ""
class GitHubContribution(BaseModel):
"""Represents a normalizer mapping contribution."""
provider_name: str = Field(..., description="The provider name (e.g., 'allanime')")
provider_title: str = Field(
..., description="The title as it appears on the provider"
)
media_api_title: str = Field(..., description="The normalized media API title")
anilist_id: Optional[int] = Field(
default=None, description="Optional AniList ID for reference"
)
class GitHubPRResponse(BaseModel):
"""Response from GitHub API when creating a pull request."""
id: int
number: int
html_url: str
title: str
state: str
class GitHubUser(BaseModel):
"""GitHub user information."""
login: str
id: int
class GitHubRepo(BaseModel):
"""GitHub repository information."""
full_name: str
default_branch: str
fork: bool = False
class GitHubFileContent(BaseModel):
"""GitHub file content response."""
sha: str
content: str
encoding: str = "base64"

View File

@@ -1,674 +0,0 @@
"""
GitHub Contribution Service
Provides functionality to submit normalizer mappings to the viu repository
via Pull Request, using either browser-based OAuth or the GitHub CLI (gh).
"""
import base64
import json
import logging
import shutil
import subprocess
import time
import webbrowser
from typing import TYPE_CHECKING, Optional
import httpx
from ....core.constants import APP_DATA_DIR, AUTHOR, CLI_NAME
from ....core.utils.file import AtomicWriter, FileLock
from ....core.utils.normalizer import USER_NORMALIZER_JSON
from .model import (
AuthMethod,
GitHubAuth,
GitHubContribution,
GitHubFileContent,
GitHubPRResponse,
GitHubRepo,
GitHubUser,
)
if TYPE_CHECKING:
from ....libs.selectors.base import BaseSelector
from ...service.feedback import FeedbackService
logger = logging.getLogger(__name__)
# GitHub OAuth configuration
GITHUB_CLIENT_ID = "Iv23liXUYWot4d4Zvjxa" # Register your OAuth app on GitHub
GITHUB_OAUTH_SCOPES = "public_repo"
GITHUB_API_BASE = "https://api.github.com"
# Repository information
REPO_OWNER = AUTHOR
REPO_NAME = "viu" # Must match GitHub repo name exactly (case-sensitive)
NORMALIZER_FILE_PATH = "viu_media/assets/normalizer.json"
AUTH_FILE = APP_DATA_DIR / "github_auth.json"
class GitHubContributionService:
"""Service for submitting normalizer mappings to GitHub."""
def __init__(
self,
selector: "BaseSelector",
feedback: Optional["FeedbackService"] = None,
):
self.selector = selector
self.feedback = feedback
self._lock = FileLock(APP_DATA_DIR / "github_auth.lock")
self._http_client = httpx.Client(
headers={
"Accept": "application/json",
"User-Agent": f"{CLI_NAME}/1.0",
},
timeout=30.0,
follow_redirects=True, # Follow redirects for all request types
)
def __del__(self):
"""Cleanup HTTP client."""
if hasattr(self, "_http_client"):
self._http_client.close()
def is_gh_cli_available(self) -> bool:
"""Check if GitHub CLI (gh) is installed and available."""
return shutil.which("gh") is not None
def is_gh_cli_authenticated(self) -> bool:
"""Check if GitHub CLI is authenticated."""
if not self.is_gh_cli_available():
return False
try:
result = subprocess.run(
["gh", "auth", "status"],
capture_output=True,
text=True,
timeout=10,
)
return result.returncode == 0
except (subprocess.SubprocessError, OSError):
return False
def get_available_auth_methods(self) -> list[AuthMethod]:
"""Get list of available authentication methods."""
methods = [AuthMethod.BROWSER]
if self.is_gh_cli_available():
methods.insert(0, AuthMethod.GH_CLI) # Prefer gh CLI if available
return methods
def prompt_auth_method(self) -> Optional[AuthMethod]:
"""
Prompt user to select their preferred authentication method.
Returns:
Selected AuthMethod or None if cancelled.
"""
methods = self.get_available_auth_methods()
choices = []
for method in methods:
if method == AuthMethod.GH_CLI:
status = "✓ authenticated" if self.is_gh_cli_authenticated() else ""
choices.append(f"gh CLI {status}".strip())
else:
choices.append("Browser (OAuth)")
choices.append("Cancel")
choice = self.selector.choose(
prompt="Select GitHub authentication method",
choices=choices,
)
if not choice or choice == "Cancel":
return None
if choice.startswith("gh CLI"):
return AuthMethod.GH_CLI
return AuthMethod.BROWSER
def submit_contribution(
self,
contribution: GitHubContribution,
auth_method: Optional[AuthMethod] = None,
) -> Optional[str]:
"""
Submit a normalizer mapping contribution to GitHub as a Pull Request.
This will:
1. Fork the repository (if not already forked)
2. Create a new branch with the updated normalizer.json
3. Open a Pull Request to the upstream repository
Args:
contribution: The mapping contribution to submit.
auth_method: The authentication method to use. If None, will prompt.
Returns:
URL of the created PR, or None if failed.
"""
if auth_method is None:
auth_method = self.prompt_auth_method()
if auth_method is None:
return None
if auth_method == AuthMethod.GH_CLI:
return self._submit_pr_via_gh_cli(contribution)
else:
return self._submit_pr_via_api(contribution)
def _get_user_normalizer_content(self) -> Optional[dict]:
"""Read the user's local normalizer.json file."""
if not USER_NORMALIZER_JSON.exists():
self._log_error(
f"Local normalizer.json not found at {USER_NORMALIZER_JSON}"
)
return None
try:
with USER_NORMALIZER_JSON.open("r", encoding="utf-8") as f:
return json.load(f)
except (json.JSONDecodeError, OSError) as e:
self._log_error(f"Failed to read normalizer.json: {e}")
return None
def _submit_pr_via_gh_cli(
self, contribution: GitHubContribution
) -> Optional[str]:
"""Submit PR using GitHub CLI."""
if not self.is_gh_cli_available():
self._log_error("GitHub CLI (gh) is not installed")
return None
if not self.is_gh_cli_authenticated():
self._log_info("GitHub CLI not authenticated. Running 'gh auth login'...")
try:
subprocess.run(["gh", "auth", "login"], check=True)
except subprocess.SubprocessError:
self._log_error("Failed to authenticate with GitHub CLI")
return None
# Read local normalizer content
normalizer_content = self._get_user_normalizer_content()
if not normalizer_content:
return None
# Get current username
try:
result = subprocess.run(
["gh", "api", "user", "--jq", ".login"],
capture_output=True,
text=True,
timeout=30,
)
if result.returncode != 0:
self._log_error("Failed to get GitHub username")
return None
username = result.stdout.strip()
except subprocess.SubprocessError as e:
self._log_error(f"Failed to get username: {e}")
return None
# Fork the repository if not already forked
self._log_info("Ensuring fork exists...")
try:
subprocess.run(
["gh", "repo", "fork", f"{REPO_OWNER}/{REPO_NAME}", "--clone=false"],
capture_output=True,
text=True,
timeout=60,
)
except subprocess.SubprocessError:
pass # Fork may already exist, continue
# Create branch name
branch_name = f"normalizer/{contribution.provider_name}-{int(time.time())}"
# Create the PR using gh pr create with the file content
title = self._format_pr_title(contribution)
body = self._format_pr_body(contribution)
# We need to create the branch and commit via API since gh doesn't support this directly
# Fall back to API method for the actual PR creation
self._log_info("Creating pull request...")
# Get token from gh CLI
try:
result = subprocess.run(
["gh", "auth", "token"],
capture_output=True,
text=True,
timeout=10,
)
if result.returncode != 0:
self._log_error("Failed to get auth token from gh CLI")
return None
token = result.stdout.strip()
except subprocess.SubprocessError as e:
self._log_error(f"Failed to get token: {e}")
return None
return self._create_pr_via_api(contribution, token, normalizer_content)
def _submit_pr_via_api(self, contribution: GitHubContribution) -> Optional[str]:
"""Submit PR using browser-based OAuth and GitHub API."""
# Authenticate
auth = self._load_cached_auth()
if not auth or not self._validate_token(auth.access_token):
auth = self._perform_device_flow_auth()
if not auth:
self._log_error("Failed to authenticate with GitHub")
return None
self._save_auth(auth)
# Read local normalizer content
normalizer_content = self._get_user_normalizer_content()
if not normalizer_content:
return None
return self._create_pr_via_api(contribution, auth.access_token, normalizer_content)
def _create_pr_via_api(
self,
contribution: GitHubContribution,
token: str,
normalizer_content: dict,
) -> Optional[str]:
"""Create a Pull Request via GitHub API."""
headers = {"Authorization": f"Bearer {token}"}
# Step 1: Get current user
self._log_info("Getting user info...")
try:
response = self._http_client.get(
f"{GITHUB_API_BASE}/user", headers=headers
)
response.raise_for_status()
user = GitHubUser.model_validate(response.json())
except httpx.HTTPError as e:
self._log_error(f"Failed to get user info: {e}")
return None
# Step 2: Fork the repository (if not already forked)
self._log_info("Ensuring fork exists...")
fork_exists = False
fork_full_name = ""
try:
# Check if fork exists by listing user's forks of the repo
response = self._http_client.get(
f"{GITHUB_API_BASE}/repos/{REPO_OWNER}/{REPO_NAME}/forks",
headers=headers,
)
response.raise_for_status()
forks = response.json()
# Find user's fork
user_fork = next(
(f for f in forks if f["owner"]["login"].lower() == user.login.lower()),
None
)
if user_fork:
fork_full_name = user_fork["full_name"]
fork_exists = True
else:
# Create fork
self._log_info("Creating fork...")
response = self._http_client.post(
f"{GITHUB_API_BASE}/repos/{REPO_OWNER}/{REPO_NAME}/forks",
headers=headers,
)
response.raise_for_status()
fork_data = response.json()
fork_full_name = fork_data["full_name"]
# Wait for fork to be ready
time.sleep(5)
except httpx.HTTPError as e:
self._log_error(f"Failed to create/check fork: {e}")
return None
self._log_info(f"Using fork: {fork_full_name}")
# Step 3: Get the default branch SHA from upstream
self._log_info("Getting upstream branch info...")
try:
response = self._http_client.get(
f"{GITHUB_API_BASE}/repos/{REPO_OWNER}/{REPO_NAME}/git/ref/heads/master",
headers=headers,
)
response.raise_for_status()
base_sha = response.json()["object"]["sha"]
except httpx.HTTPError as e:
self._log_error(f"Failed to get base branch: {e}")
return None
# Step 3.5: Sync fork with upstream if it already existed
if fork_exists:
self._log_info("Syncing fork with upstream...")
try:
response = self._http_client.post(
f"{GITHUB_API_BASE}/repos/{fork_full_name}/merge-upstream",
headers=headers,
json={"branch": "master"},
)
# 409 means already up to date, which is fine
if response.status_code not in (200, 409):
response.raise_for_status()
except httpx.HTTPError as e:
self._log_info(f"Could not sync fork (continuing anyway): {e}")
# Step 4: Create a new branch in the fork
branch_name = f"normalizer/{contribution.provider_name}-{int(time.time())}"
self._log_info(f"Creating branch: {branch_name}")
try:
response = self._http_client.post(
f"{GITHUB_API_BASE}/repos/{fork_full_name}/git/refs",
headers=headers,
json={"ref": f"refs/heads/{branch_name}", "sha": base_sha},
)
response.raise_for_status()
except httpx.HTTPStatusError as e:
error_detail = ""
try:
error_detail = str(e.response.json())
except Exception:
pass
self._log_error(f"Failed to create branch: {e} {error_detail}")
return None
except httpx.HTTPError as e:
self._log_error(f"Failed to create branch: {e}")
return None
# Step 5: Get current normalizer.json from the fork's new branch to get SHA
self._log_info("Fetching current normalizer.json...")
try:
response = self._http_client.get(
f"{GITHUB_API_BASE}/repos/{fork_full_name}/contents/{NORMALIZER_FILE_PATH}",
headers=headers,
params={"ref": branch_name},
)
response.raise_for_status()
file_info = GitHubFileContent.model_validate(response.json())
file_sha = file_info.sha
# Decode existing content and merge with user's mappings
existing_content = json.loads(
base64.b64decode(file_info.content).decode("utf-8")
)
# Merge: user's normalizer takes precedence
merged_content = existing_content.copy()
for provider, mappings in normalizer_content.items():
if provider not in merged_content:
merged_content[provider] = {}
merged_content[provider].update(mappings)
except httpx.HTTPError as e:
self._log_error(f"Failed to get normalizer.json: {e}")
return None
# Step 6: Update the file in the fork
self._log_info("Committing changes...")
new_content = json.dumps(merged_content, indent=2, ensure_ascii=False)
encoded_content = base64.b64encode(new_content.encode("utf-8")).decode("utf-8")
commit_message = (
f"feat(normalizer): add mapping for '{contribution.provider_title}'\n\n"
f"Provider: {contribution.provider_name}\n"
f"Maps: {contribution.provider_title} -> {contribution.media_api_title}"
)
try:
response = self._http_client.put(
f"{GITHUB_API_BASE}/repos/{fork_full_name}/contents/{NORMALIZER_FILE_PATH}",
headers=headers,
json={
"message": commit_message,
"content": encoded_content,
"sha": file_sha,
"branch": branch_name,
},
)
response.raise_for_status()
except httpx.HTTPStatusError as e:
error_detail = ""
try:
error_detail = str(e.response.json())
except Exception:
pass
self._log_error(f"Failed to commit changes: {e} {error_detail}")
return None
except httpx.HTTPError as e:
self._log_error(f"Failed to commit changes: {e}")
return None
# Step 7: Create the Pull Request
self._log_info("Creating pull request...")
title = self._format_pr_title(contribution)
body = self._format_pr_body(contribution)
try:
response = self._http_client.post(
f"{GITHUB_API_BASE}/repos/{REPO_OWNER}/{REPO_NAME}/pulls",
headers=headers,
json={
"title": title,
"body": body,
"head": f"{user.login}:{branch_name}",
"base": "master",
},
)
response.raise_for_status()
pr = GitHubPRResponse.model_validate(response.json())
self._log_success(f"Created PR #{pr.number}: {pr.html_url}")
return pr.html_url
except httpx.HTTPStatusError as e:
error_detail = ""
try:
error_json = e.response.json()
error_detail = error_json.get("message", "")
# GitHub includes detailed errors in 'errors' array
if "errors" in error_json:
errors = error_json["errors"]
error_detail += " | " + str(errors)
except Exception:
pass
self._log_error(f"Failed to create PR: {e} {error_detail}")
return None
except httpx.HTTPError as e:
self._log_error(f"Failed to create PR: {e}")
return None
def _format_pr_title(self, contribution: GitHubContribution) -> str:
"""Format the PR title."""
return (
f"feat(normalizer): add mapping for '{contribution.provider_title}' "
f"({contribution.provider_name})"
)
def _format_pr_body(self, contribution: GitHubContribution) -> str:
"""Format the PR body."""
return f"""## Normalizer Mapping Contribution
This PR adds a new title mapping to the normalizer.
### Mapping Details
| Field | Value |
|-------|-------|
| **Provider** | `{contribution.provider_name}` |
| **Provider Title** | `{contribution.provider_title}` |
| **Media API Title** | `{contribution.media_api_title}` |
| **AniList ID** | {contribution.anilist_id or 'N/A'} |
### Changes
This PR updates `{NORMALIZER_FILE_PATH}` with the following mapping:
```json
"{contribution.provider_title}": "{contribution.media_api_title.lower()}"
```
---
*Submitted automatically via {CLI_NAME} CLI*
"""
def _perform_device_flow_auth(self) -> Optional[GitHubAuth]:
"""
Perform GitHub Device Flow authentication.
This is more reliable for CLI apps than the web redirect flow.
"""
self._log_info("Starting GitHub authentication...")
# Request device code
try:
response = self._http_client.post(
"https://github.com/login/device/code",
data={
"client_id": GITHUB_CLIENT_ID,
"scope": GITHUB_OAUTH_SCOPES,
},
headers={"Accept": "application/json"},
)
response.raise_for_status()
data = response.json()
except httpx.HTTPError as e:
self._log_error(f"Failed to start authentication: {e}")
return None
device_code = data.get("device_code")
user_code = data.get("user_code")
verification_uri = data.get("verification_uri")
expires_in = data.get("expires_in", 900)
interval = data.get("interval", 5)
if not all([device_code, user_code, verification_uri]):
self._log_error("Invalid response from GitHub")
return None
# Show user the code and open browser
self._log_info(f"\n🔑 Your code: {user_code}")
self._log_info(f"Opening {verification_uri} in your browser...")
self._log_info("Enter the code above to authenticate.\n")
webbrowser.open(verification_uri)
# Poll for token
import time
start_time = time.time()
while time.time() - start_time < expires_in:
time.sleep(interval)
try:
token_response = self._http_client.post(
"https://github.com/login/oauth/access_token",
data={
"client_id": GITHUB_CLIENT_ID,
"device_code": device_code,
"grant_type": "urn:ietf:params:oauth:grant-type:device_code",
},
headers={"Accept": "application/json"},
)
token_data = token_response.json()
if "access_token" in token_data:
self._log_success("Authentication successful!")
return GitHubAuth(
access_token=token_data["access_token"],
token_type=token_data.get("token_type", "bearer"),
scope=token_data.get("scope", ""),
)
error = token_data.get("error")
if error == "authorization_pending":
continue
elif error == "slow_down":
interval += 5
elif error == "expired_token":
self._log_error("Authentication expired. Please try again.")
return None
elif error == "access_denied":
self._log_error("Authentication denied by user.")
return None
else:
self._log_error(f"Authentication error: {error}")
return None
except httpx.HTTPError:
continue
self._log_error("Authentication timed out. Please try again.")
return None
def _validate_token(self, token: str) -> bool:
"""Check if a GitHub token is still valid."""
try:
response = self._http_client.get(
f"{GITHUB_API_BASE}/user",
headers={"Authorization": f"Bearer {token}"},
)
return response.status_code == 200
except httpx.HTTPError:
return False
def _load_cached_auth(self) -> Optional[GitHubAuth]:
"""Load cached GitHub authentication."""
if not AUTH_FILE.exists():
return None
try:
with AUTH_FILE.open("r", encoding="utf-8") as f:
data = json.load(f)
return GitHubAuth.model_validate(data)
except (json.JSONDecodeError, ValueError):
return None
def _save_auth(self, auth: GitHubAuth) -> None:
"""Save GitHub authentication to cache."""
APP_DATA_DIR.mkdir(parents=True, exist_ok=True)
with self._lock:
with AtomicWriter(AUTH_FILE) as f:
json.dump(auth.model_dump(), f, indent=2)
def clear_cached_auth(self) -> None:
"""Clear cached GitHub authentication."""
if AUTH_FILE.exists():
AUTH_FILE.unlink()
logger.info("Cleared GitHub authentication cache")
@staticmethod
def clear_cached_auth_static() -> None:
"""Clear cached GitHub authentication (static method for CLI use)."""
if AUTH_FILE.exists():
AUTH_FILE.unlink()
logger.info("Cleared GitHub authentication cache")
def _log_info(self, message: str) -> None:
"""Log info message."""
if self.feedback:
self.feedback.info(message)
else:
logger.info(message)
def _log_success(self, message: str) -> None:
"""Log success message."""
if self.feedback:
self.feedback.success(message)
else:
logger.info(message)
def _log_error(self, message: str) -> None:
"""Log error message."""
if self.feedback:
self.feedback.error(message)
else:
logger.error(message)

View File

@@ -9,6 +9,8 @@ import importlib.util
import click
import httpx
from viu_media.core.utils import detect
logger = logging.getLogger(__name__)
@@ -138,6 +140,7 @@ def render(url: str, capture: bool = False, size: str = "30x30") -> Optional[str
[icat_executable, "--align", "left", url],
capture_output=capture,
text=capture,
env=detect.get_clean_env(),
)
if process.returncode == 0:
return process.stdout if capture else None

View File

@@ -21,7 +21,7 @@ from rich.progress import (
)
from rich.prompt import Confirm
from ..utils.file import sanitize_filename
from ..utils.detect import get_clean_env
from ..exceptions import ViuError
from ..patterns import TORRENT_REGEX
from ..utils.networking import get_remote_filename
@@ -372,6 +372,7 @@ class DefaultDownloader(BaseDownloader):
capture_output=params.silent, # Only suppress ffmpeg output if silent
text=True,
check=True,
env=get_clean_env(),
)
final_output_path = video_path.parent / merged_filename

View File

@@ -11,7 +11,7 @@ from rich.prompt import Confirm
import yt_dlp
from yt_dlp.utils import sanitize_filename
from ..utils.detect import get_clean_env
from ..exceptions import ViuError
from ..patterns import TORRENT_REGEX
from ..utils.networking import get_remote_filename
@@ -224,7 +224,7 @@ class YtDLPDownloader(BaseDownloader):
# Run the ffmpeg command
try:
subprocess.run(args)
subprocess.run(args, env=get_clean_env())
final_output_path = video_path.parent / merged_filename
if final_output_path.exists():

View File

@@ -83,3 +83,21 @@ def get_python_executable() -> str:
return "python"
else:
return sys.executable
def get_clean_env() -> dict[str, str]:
"""
Returns a copy of the environment with LD_LIBRARY_PATH fixed for system subprocesses
when running as a PyInstaller frozen application.
This prevents system binaries (like mpv, ffmpeg) from loading incompatible
libraries from the PyInstaller bundle.
"""
env = os.environ.copy()
if is_frozen():
# PyInstaller saves the original LD_LIBRARY_PATH in LD_LIBRARY_PATH_ORIG
if "LD_LIBRARY_PATH_ORIG" in env:
env["LD_LIBRARY_PATH"] = env["LD_LIBRARY_PATH_ORIG"]
else:
# If orig didn't exist, LD_LIBRARY_PATH shouldn't exist for the subprocess
env.pop("LD_LIBRARY_PATH", None)
return env

View File

@@ -186,19 +186,19 @@ def shell_safe(text: Optional[str]) -> str:
"""
Escapes a string for safe inclusion in a Python script string literal.
This is used when generating Python cache scripts with embedded text content.
For Python triple-quoted strings, we need to:
For Python string literals, we need to:
- Escape backslashes first (so existing backslashes don't interfere)
- Escape triple quotes (to not break the string literal)
- Remove or replace problematic characters
- Escape double quotes (to not break double-quoted string literals)
- Escape single quotes (to not break single-quoted string literals)
"""
if not text:
return ""
# Escape backslashes first
result = text.replace("\\", "\\\\")
# Escape triple quotes (both types) for Python triple-quoted string literals
result = result.replace('"""', r'\"\"\"')
result = result.replace("'''", r"\'\'\'")
# Escape both quote types for safe inclusion in any string literal
result = result.replace('"', r"\"")
result = result.replace("'", r"\'")
return result

View File

@@ -50,10 +50,15 @@ def _load_normalizer_data() -> Dict[str, Dict[str, str]]:
def update_user_normalizer_json(
provider_title: str, media_api_title: str, provider_name: str
):
import time
from .file import AtomicWriter
logger.info(f"Updating user normalizer JSON at: {USER_NORMALIZER_JSON}")
print(
"UPDATING USER NORMALIZER JSON. PLEASE CONTRIBUTE TO THE PROJECT BY OPENING A PR ON GITHUB TO MERGE YOUR NORMALIZER JSON TO MAIN. MAEMOTTE KANSHA SHIMASU :)"
)
print(f"NORMALIZER JSON PATH IS: {USER_NORMALIZER_JSON}")
time.sleep(5)
if not _normalizer_cache:
raise RuntimeError(
"Fatal _normalizer_cache missing this should not be the case : (. Please report"

View File

@@ -97,7 +97,7 @@ class MpvPlayer(BasePlayer):
"is.xyz.mpv/.MPVActivity",
]
subprocess.run(args)
subprocess.run(args,env=detect.get_clean_env())
return PlayerResult(params.episode)
@@ -146,6 +146,7 @@ class MpvPlayer(BasePlayer):
text=True,
encoding="utf-8",
check=False,
env=detect.get_clean_env(),
)
if proc.stdout:
for line in reversed(proc.stdout.split("\n")):
@@ -185,7 +186,7 @@ class MpvPlayer(BasePlayer):
logger.info(f"Starting MPV with IPC socket: {socket_path}")
process = subprocess.Popen(pre_args + mpv_args)
process = subprocess.Popen(pre_args + mpv_args,env=detect.get_clean_env())
return process
@@ -210,7 +211,7 @@ class MpvPlayer(BasePlayer):
args.append("--player-args")
args.extend(mpv_args)
subprocess.run(args)
subprocess.run(args,env=detect.get_clean_env())
return PlayerResult(params.episode)
def _stream_on_desktop_with_syncplay(self, params: PlayerParams) -> PlayerResult:
@@ -232,7 +233,7 @@ class MpvPlayer(BasePlayer):
if mpv_args := self._create_mpv_cli_options(params):
args.append("--")
args.extend(mpv_args)
subprocess.run(args)
subprocess.run(args,env=detect.get_clean_env())
return PlayerResult(params.episode)

View File

@@ -103,7 +103,7 @@ class VlcPlayer(BasePlayer):
params.title,
]
subprocess.run(args)
subprocess.run(args,env=detect.get_clean_env())
return PlayerResult(episode=params.episode)
@@ -134,7 +134,7 @@ class VlcPlayer(BasePlayer):
if self.config.args:
args.extend(self.config.args.split(","))
subprocess.run(args, encoding="utf-8")
subprocess.run(args, encoding="utf-8",env=detect.get_clean_env())
return PlayerResult(episode=params.episode)
def _stream_on_desktop_with_webtorrent_cli(
@@ -159,7 +159,7 @@ class VlcPlayer(BasePlayer):
args.append("--player-args")
args.extend(self.config.args.split(","))
subprocess.run(args)
subprocess.run(args,env=detect.get_clean_env())
return PlayerResult(episode=params.episode)

View File

@@ -5,6 +5,8 @@ import subprocess
from rich.prompt import Prompt
from viu_media.core.utils import detect
from ....core.config import FzfConfig
from ....core.exceptions import ViuError
from ..base import BaseSelector
@@ -49,6 +51,7 @@ class FzfSelector(BaseSelector):
stdout=subprocess.PIPE,
text=True,
encoding="utf-8",
env=detect.get_clean_env(),
)
if result.returncode != 0:
return None
@@ -76,6 +79,7 @@ class FzfSelector(BaseSelector):
stdout=subprocess.PIPE,
text=True,
encoding="utf-8",
env=detect.get_clean_env(),
)
if result.returncode != 0:
return []
@@ -117,7 +121,16 @@ class FzfSelector(BaseSelector):
lines = result.stdout.strip().splitlines()
return lines[-1] if lines else (default or "")
def search(self, prompt, search_command, *, preview=None, header=None, initial_query=None, initial_results=None):
def search(
self,
prompt,
search_command,
*,
preview=None,
header=None,
initial_query=None,
initial_results=None,
):
"""Enhanced search using fzf's --reload flag for dynamic search."""
# Build the header with optional custom header line
display_header = self.header
@@ -156,6 +169,7 @@ class FzfSelector(BaseSelector):
stdout=subprocess.PIPE,
text=True,
encoding="utf-8",
env=detect.get_clean_env(),
)
if result.returncode != 0:
return None

View File

@@ -43,6 +43,7 @@ class RofiSelector(BaseSelector):
input=rofi_input,
stdout=subprocess.PIPE,
text=True,
env=detect.get_clean_env()
)
if result.returncode == 0:
@@ -106,6 +107,7 @@ class RofiSelector(BaseSelector):
input=rofi_input,
stdout=subprocess.PIPE,
text=True,
env=detect.get_clean_env()
)
if result.returncode == 0: