Compare commits

...

148 Commits

Author SHA1 Message Date
benex
3ac4e1ac71 chore: bump version (v2.7.4) 2024-11-16 22:10:15 +03:00
benex
d62f580d7a docs: update readme 2024-11-16 22:09:41 +03:00
benex
02e35b66cb fix: implement another way to get timestamps from mpv due to issues on nixos 2024-11-16 22:09:41 +03:00
benex
7b11e0a301 feat: add rofi-theme-preview option 2024-11-16 22:09:41 +03:00
benex
aa8b91aed3 chore: remove unnecessary yt-dlp extras 2024-11-16 22:09:41 +03:00
benex
fe0fa97576 fix(player): workaround weird problem with mpv in nixos 2024-11-16 22:09:41 +03:00
Benedict Xavier
92059cd5ed Update README.md 2024-11-16 12:01:55 +03:00
benex
ed3064e3b1 feat(anime_provider): add yugen provider 2024-11-13 20:53:18 +03:00
Benex254
441d1e5e6c chore: bump version (v2.7.3) 2024-11-11 12:57:50 +03:00
Benex254
653b2cf4eb chore: add pyinstaller as dev dep 2024-11-11 12:57:29 +03:00
Benex254
8d4b71e0c8 feat: add entry point for pyinstaller executable 2024-11-11 12:57:29 +03:00
Benex254
29cc6cad09 build: pyinstaller spec 2024-11-11 12:57:28 +03:00
Benex254
8119eef263 docs(readme): update table of contents 2024-11-11 12:57:28 +03:00
Benex254
912c8674cf refactor(player): remove unnecessary orint statement 2024-11-11 12:57:28 +03:00
Benex254
6b3ca236dd fix: auto next episode 2024-11-11 12:57:28 +03:00
benex
f1c352d4ff chore: bump version (v2.7.2) 2024-11-10 12:29:46 +03:00
benex
714533d845 refactor(cli): update config options and set fastanime config environs 2024-11-10 12:06:44 +03:00
benex
56dd25df8d refactor(cli): update config options
This commit updates the configuration options in the CLI module. Specifically, it modifies the "image_previews" option to be platform-dependent, setting it to "True" for non-Windows platforms and "False" for Windows. Additionally, it sets the "normalize_titles" option to "True". These changes improve the behavior and user experience of the CLI.
2024-11-10 12:06:17 +03:00
benex
8248dc53df fix: text preview not showing on windows 2024-11-10 12:05:32 +03:00
Benex254
1a8a187de6 docs: update readme 2024-11-09 00:27:49 +03:00
Benex254
bc86be8c93 chore: bump version 2024-11-09 00:11:20 +03:00
Benex254
75026d4fc5 feat(api): add watch endpoint 2024-11-09 00:11:20 +03:00
Benedict Xavier
f8a5ccb8d2 Update README.md 2024-11-08 22:33:16 +03:00
Benex254
719d1bd187 chore: update deps 2024-11-08 16:26:43 +03:00
Benex254
0dd83463c6 docs: update readme 2024-10-20 11:28:39 +03:00
Benex254
1ee50e8a55 chore: bump version (v2.6.9) 2024-10-20 10:06:02 +03:00
Benex254
ae95c5ea3d docs: update readme 2024-10-20 10:04:58 +03:00
Benex254
d64ad5e11d fix: move quality to stream section in config 2024-10-20 10:03:49 +03:00
Benex254
d1a47c6d44 chore: bump version (v2.6.8) 2024-10-18 22:59:18 +03:00
Benex254
51a834a62f chore: update deps 2024-10-18 22:53:39 +03:00
Benex254
3a030bf6f7 feat: add ability to update fastanime uv installations 2024-10-18 22:53:26 +03:00
Benex254
eb6a6fc82c chore: use uv in fa script 2024-10-18 22:46:44 +03:00
Benex254
437ccd94e4 ci: update to use uv 2024-10-18 22:37:14 +03:00
Benex254
d65868cc30 chore: update workflows to work with uv 2024-10-18 21:50:20 +03:00
Benex254
8678aa6544 Merge branch 'master' into uv 2024-10-18 20:26:55 +03:00
Benex254
00e5141152 chore: bump version (v2.6.7) 2024-10-12 01:08:14 +03:00
Benex254
90e757dfe1 feat: init switch to uv 2024-10-11 11:57:29 +03:00
Benex254
8b471b08e8 chore: init switch to uv 2024-10-11 10:52:18 +03:00
Benex254
158bc5710f docs: update readme 2024-10-11 10:49:53 +03:00
Benex254
a0b946a13d feat: add recent menu 2024-10-11 10:22:23 +03:00
Benex254
b547b75f03 feat: add environment variable that force updating of the cache db 2024-10-11 09:34:40 +03:00
Benex254
58c7427a47 feat(cli:serve): use the full executable path to python 2024-10-06 01:25:22 +03:00
Benex254
6220b9c55d chore: bump version (v2.6.6) 2024-10-06 01:15:15 +03:00
Benex254
6b9b5c131c fix(cli): use str instead of ints in serve 2024-10-06 01:15:05 +03:00
Benex254
212f2af39c chore: bump version (v2.6.5) 2024-10-06 01:05:28 +03:00
Benex254
f7b2b4e0c9 feat: add serve command 2024-10-06 01:04:20 +03:00
Benedict Xavier
a747529279 Update README.md 2024-10-05 19:37:19 +03:00
Benex254
1dfdcc27ce chore: bump version (v2.6.4) 2024-10-05 12:33:32 +03:00
Benex254
3c03289453 fix: add git push to make_release 2024-10-05 12:33:23 +03:00
Benex254
06fd446a72 chore: bump version (v2.6.3) 2024-10-05 12:29:29 +03:00
Benex254
172d912d8b chore(release): improve the make release script to also stage changes after bumping version 2024-10-05 12:29:15 +03:00
Benex254
2396018607 feat: make script to automate releases 2024-10-05 12:19:03 +03:00
Benex254
a9be9779c5 feat(fa): improve fa script 2024-10-05 12:14:45 +03:00
Benex254
2f76b26a99 feat(fzf): add some bindings 2024-10-05 11:54:22 +03:00
Benex254
2fe5edf810 feat(cli): make all threads daemon threads 2024-10-05 11:47:52 +03:00
Benex254
d67ee6a779 feat(downloader): add progress hook option to be passed to yt-dlp 2024-10-05 11:47:30 +03:00
Benex254
e06ec5dbd4 feat(cli): make the image previews optional 2024-10-05 11:31:13 +03:00
Benex254
c1b24ba2aa feat(cli): save images with .png extenstion to enable easier viewing by external apps 2024-10-05 11:05:07 +03:00
Benex254
59e9cf9fd0 feat: improve previews 2024-10-05 10:12:14 +03:00
Benex254
58761f5b96 chore: bump version 2024-10-04 19:44:54 +03:00
Benex254
ac959da229 feat: renable bg downloading function 2024-10-04 19:42:53 +03:00
benex
bacc8c48ec fix: image previews not showing up on windows 2024-10-04 11:03:54 +03:00
Benex254
905a159428 chore: add a mapping for re:zero s3 in normalizer 2024-10-03 15:09:51 +03:00
Benex254
20f734cab2 feat: also compare synonymns 2024-10-03 15:09:14 +03:00
Benex254
7c2c644aef chore: bump version 2024-10-03 14:18:22 +03:00
Benex254
0efc92081a feat: use .get in normlizer 2024-10-03 14:18:05 +03:00
Benex254
fafeee2367 chore: bump version 2024-10-03 12:48:41 +03:00
Benex254
e03063cd76 feat: let configuration of providers be managed by AnimeProvider wrapper 2024-10-03 12:34:34 +03:00
Benex254
93b38b055f docs: update readme 2024-10-03 12:33:52 +03:00
Benex254
045635fb55 feat: update config.py 2024-10-03 12:33:40 +03:00
Benex254
de7f773e9e feat: make the threads non-daemon 2024-10-03 11:47:27 +03:00
Benex254
ef6a465bd2 fix: typing issue 2024-10-02 22:19:02 +03:00
Benex254
0c623af8a4 chore: update poetry lockfile 2024-10-02 21:57:17 +03:00
Benex254
0589f83998 chore: bump version 2024-10-02 21:56:50 +03:00
Benex254
e17608afd5 feat: add provider store 2024-10-02 21:33:41 +03:00
Benex254
b915654685 feat: make the requests cache allow multiple connections by switching to wal 2024-10-02 16:49:36 +03:00
Benex254
2ce9bf6c47 feat: use a more meaningful name for the request caching files 2024-09-30 13:20:14 +03:00
Benex254
3c22232432 feat: add option to delete the db file 2024-09-30 13:19:33 +03:00
Benex254
3474e9520c tests: pass custom env 2024-09-29 22:09:57 +03:00
Benex254
e9bacf4f9c fix: extra atexit callbacks 2024-09-29 21:31:58 +03:00
Benex254
ef422ed6fd fix: inability to reload provider dynamically when using cached sessions 2024-09-29 21:19:15 +03:00
Benex254
d0f5366908 feat: allow access of fastanime config from environment variables 2024-09-29 21:00:41 +03:00
Benex254
3557205feb chore: cleanup requests cacher 2024-09-29 20:41:55 +03:00
Benex254
ba4c41d888 feat: implement usage of the requests cacher 2024-09-29 20:40:14 +03:00
Benex254
1427a3193c feat: implement requests cacher for fastanime 2024-09-29 20:39:27 +03:00
Benex254
b5cee20e56 fix: episodes range generated by mini anilist 2024-09-28 10:31:33 +03:00
Benex254
be7f464073 chore: update deps 2024-09-24 15:56:09 +03:00
Benex254
c7f8f168f5 docs: update readme 2024-09-24 15:55:59 +03:00
Benex254
ba59fbdcb0 chore: bump version 2024-09-24 15:55:47 +03:00
Benex254
9f54fa4998 feat: handle abscence of webtorrent-cli 2024-09-24 15:55:24 +03:00
Benex254
3c9688b32c feat: add nyaa as provider 2024-09-24 15:45:34 +03:00
Benex254
1f046447bb chore: update all instances of aniwatch to hianime 2024-09-24 10:02:49 +03:00
Benex254
87e3a275bb chore: bump version 2024-09-23 11:29:22 +03:00
Benex254
037b5c36a4 fix: normalize unknown_video to mp4 2024-09-23 11:04:31 +03:00
Benex254
7d8b60fb14 fix: change aniwatch to hianime in data.py 2024-09-23 11:04:06 +03:00
Benex254
0ad16fee53 fix: typing issue in player 2024-09-22 22:34:07 +03:00
Benex254
249243aeb4 chore: use --all-extras flag in poetry install 2024-09-22 22:31:35 +03:00
Benex254
c208dc3579 chore: bump version 2024-09-22 22:27:04 +03:00
Benex254
ea93f2ba23 chore: make some dependencies optional 2024-09-22 22:26:37 +03:00
Benex254
d910a0bb6a chore: update depenedencies 2024-09-22 22:25:52 +03:00
Benex254
550fcfeddc feat: make plyer an optional dependency 2024-09-22 22:13:12 +03:00
Benex254
c6910e5a1c feat: improve prompt text 2024-09-22 22:13:12 +03:00
Benex254
8555edb521 feat: dont pass obj to providers 2024-09-22 22:13:12 +03:00
Benex254
139193ce29 chore: remove aniwave as a provider; you shall forever live in our hearts 2024-09-22 22:13:12 +03:00
Benex254
1a87375ccd feat: add debug mode for providers 2024-09-22 22:13:12 +03:00
BeneX254
83cbef40f6 Update README.md 2024-09-21 18:06:09 +03:00
Benex254
85b4fc75a1 docs: update the readme 2024-09-20 17:58:29 +03:00
Benex254
f2e2da378f feat: improved medi list tracking 2024-09-20 17:58:06 +03:00
Benex254
7c34bc9120 feat: restrict some genres in mini_anilist 2024-09-19 19:12:10 +03:00
Benex254
6f153f2acb feat: immprove help messages for all cli commands 2024-09-19 19:11:15 +03:00
Benex254
8171083978 chore: update deps 2024-09-18 20:10:35 +03:00
Benex254
db5b9a59b4 fix: fastanime update not working with pip installs 2024-09-18 20:09:34 +03:00
Benex254
6fa656ba11 chore: bump version 2024-09-18 19:59:54 +03:00
Benex254
de0682c1bb fix: invalid cmd 2024-09-18 19:59:40 +03:00
Benex254
a6a32d8de4 chore: bump version 2024-09-18 19:44:10 +03:00
Benex254
bb14b269de feat: add --player option 2024-09-18 19:42:47 +03:00
Benex254
14331d8bc2 feat: workaround image previews on android 2024-09-18 19:42:47 +03:00
BeneX254
1729464844 Update README.md 2024-09-17 21:44:35 +03:00
benex
5fb9747285 fix: default ui not persisting when using config --update 2024-09-15 14:59:01 +03:00
benex
394228d391 chore: bump version 2024-09-15 14:42:54 +03:00
benex
5d3c0cc6ec fix: unicode error on windows when writing the config file 2024-09-15 14:38:50 +03:00
BeneX254
3ef7c5248c Update README.md 2024-09-15 13:46:50 +03:00
Benex254
8bebc401fd fix: rename use_mpv_mod to use_python_mpv in config 2024-09-15 13:40:20 +03:00
Benex254
215b28457b docs: update readme 2024-09-15 13:39:58 +03:00
Benex254
dfd2bfc857 docs: update readme 2024-09-15 13:29:32 +03:00
Benex254
f991292e94 chore: bump version 2024-09-15 13:29:20 +03:00
Benex254
d837457f80 feat: improve config file docs 2024-09-15 13:22:14 +03:00
Benex254
343bdba31b feat: add the --update option to the config command which causes all config options passed to fastanime to be persisted to your config file 2024-09-15 13:22:14 +03:00
benex
1c1c2457e8 feat: improve the preview with a workaround 2024-09-15 10:05:20 +03:00
benex
b083bfb074 fix: previews not working on windows 2024-09-15 09:36:15 +03:00
benex
ea1abcb2ae feat: dont use roaming folder for the config file 2024-09-15 08:54:02 +03:00
benex
001030ba2b fix: unicode error when running fzf on wndows 2024-09-15 08:53:22 +03:00
BeneX254
eda8984781 Update README.md 2024-09-13 21:57:15 +03:00
Benex254
d8dc6f0a34 chore: bump version 2024-09-10 19:15:43 +03:00
Benex254
2d711a7a7f docs: update readme 2024-09-10 19:15:25 +03:00
Benex254
30ca25626a feat: add --titles option to downloads 2024-09-10 19:11:52 +03:00
Benex254
b1f5a558c8 feat: improve animepahe utils 2024-09-10 19:11:19 +03:00
Benex254
8062c8dc83 feat: stat command ?? 2024-08-23 20:51:53 +03:00
Benex254
cb7eed46bc docs: update readme 2024-08-23 17:44:23 +03:00
Benex254
4626eca89e feat: improvements on media list intergration 2024-08-23 17:44:10 +03:00
Benex254
0d549c5915 docs: update readme 2024-08-23 17:18:49 +03:00
Benex254
33c518ed4c chore: cleanup codebase 2024-08-23 17:18:36 +03:00
Benex254
8e155dcc74 chore: bump version 2024-08-23 16:05:45 +03:00
Benex254
7743b0423e chore: clean up codebase 2024-08-23 16:05:26 +03:00
Benex254
6346ea7343 docs: update readme 2024-08-23 11:40:08 +03:00
Benex254
32de01047f chore:bump version 2024-08-23 11:39:57 +03:00
Benex254
35c7f81afb fix: no chapter title 2024-08-23 11:39:45 +03:00
Benex254
2dbbb1c4df feat: add experimental manga support 2024-08-23 11:19:25 +03:00
99 changed files with 6973 additions and 3772 deletions

View File

@@ -8,31 +8,24 @@ jobs:
debug_build:
if: ${{ github.event.workflow_run.conclusion == 'success' }}
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Install Python
- name: "Set up Python"
uses: actions/setup-python@v5
- name: Install poetry
uses: abatilo/actions-poetry@v2
- name: Setup a local virtual environment (if no poetry.toml file)
run: |
poetry config virtualenvs.create true --local
poetry config virtualenvs.in-project true --local
- uses: actions/cache@v3
name: Define a cache for the virtual environment based on the dependencies lock file
- name: Install uv
uses: astral-sh/setup-uv@v3
with:
path: ./.venv
key: venv-${{ hashFiles('poetry.lock') }}
- name: Install the project dependencies
run: poetry install
- name: build app
run: poetry build
enable-cache: true
- name: Build fastanime
run: uv build
- name: Archive production artifacts
uses: actions/upload-artifact@v4
with:
name: fastanime_debug_build
path: |
dist
!dist/*.whl
# - name: Run the automated tests (for example)
# run: poetry run pytest -v

View File

@@ -27,11 +27,13 @@ jobs:
with:
python-version: "3.10"
- name: Build release distributions
run: |
# NOTE: put your own distribution build steps here.
python -m pip install build
python -m build
- name: Install uv
uses: astral-sh/setup-uv@v3
with:
enable-cache: true
- name: Build fastanime
run: uv build
- name: Upload distributions
uses: actions/upload-artifact@v4

View File

@@ -6,37 +6,35 @@ on:
pull_request:
branches:
- master
jobs:
test:
runs-on: ubuntu-latest
strategy:
matrix:
python-version: ["3.10", "3.11"] # List the Python versions you want to test
steps:
- uses: actions/checkout@v4
- name: Install Python ${{ matrix.python-version }}
uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
- name: Install poetry
uses: abatilo/actions-poetry@v2
- name: Setup a local virtual environment (if no poetry.toml file)
run: |
poetry config virtualenvs.create true --local
poetry config virtualenvs.in-project true --local
- uses: actions/cache@v3
name: Define a cache for the virtual environment based on the dependencies lock file
- name: Install uv
uses: astral-sh/setup-uv@v3
with:
path: ./.venv
key: venv-${{ hashFiles('poetry.lock') }}
- name: Install the project dependencies
run: poetry install
- name: run linter, formatters and sort imports
run: |
poetry run black .
poetry run ruff check --output-format=github . --fix
poetry run isort . --profile black
- name: run type checking
run: poetry run pyright
- name: run tests
run: poetry run pytest
enable-cache: true
- name: Install the project
run: uv sync --all-extras --dev
- name: Run linter and formater
run: uv run ruff check --output-format=github
- name: Run type checking
run: uv run pyright
- name: Run tests
run: uv run pytest tests

View File

@@ -1,10 +1,7 @@
FROM ubuntu
RUN apt-get update
RUN apt-get -y install python3
RUN apt-get update
RUN apt-get -y install pipx
RUN pipx ensurepath
FROM python:3.12-slim-bookworm
COPY --from=ghcr.io/astral-sh/uv:latest /uv /uvx /bin/
COPY . /fastanime
ENV PATH=/root/.local/bin:$PATH
WORKDIR /fastanime
RUN pipx install .
RUN uv tool install .
CMD ["bash"]

867
README.md

File diff suppressed because it is too large Load Diff

5
fa
View File

@@ -1,4 +1,3 @@
#!/usr/bin/env sh
# exec "${PYTHON:-python3}" -Werror -Xdev -m "$(dirname "$(realpath "$0")")/fastanime" "$@"
cd "$(dirname "$(realpath "$0")")" || exit 1
exec python -m fastanime "$@"
CLI_DIR="$(dirname "$(realpath "$0")")"
exec uv run --directory "$CLI_DIR/../" fastanime "$@"

View File

@@ -1,10 +1,8 @@
"""An abstraction over all providers offering added features with a simple and well typed api
[TODO:description]
"""
"""An abstraction over all providers offering added features with a simple and well typed api"""
import importlib
import logging
import os
from typing import TYPE_CHECKING
from .libs.anime_provider import anime_sources
@@ -12,7 +10,6 @@ from .libs.anime_provider import anime_sources
if TYPE_CHECKING:
from typing import Iterator
from .libs.anilist.types import AnilistBaseMediaDataSchema
from .libs.anime_provider.types import Anime, SearchResults, Server
logger = logging.getLogger(__name__)
@@ -33,25 +30,41 @@ class AnimeProvider:
PROVIDERS = list(anime_sources.keys())
provider = PROVIDERS[0]
def __init__(self, provider, dynamic=False, retries=0) -> None:
def __init__(
self,
provider,
cache_requests=os.environ.get("FASTANIME_CACHE_REQUESTS", "false"),
use_persistent_provider_store=os.environ.get(
"FASTANIME_USE_PERSISTENT_PROVIDER_STORE", "false"
),
dynamic=False,
retries=0,
) -> None:
self.provider = provider
self.dynamic = dynamic
self.retries = retries
self.cache_requests = cache_requests
self.use_persistent_provider_store = use_persistent_provider_store
self.lazyload_provider(self.provider)
def lazyload_provider(self, provider):
"""updates the current provider being used"""
try:
self.anime_provider.session.kill_connection_to_db()
except Exception:
pass
_, anime_provider_cls_name = anime_sources[provider].split(".", 1)
package = f"fastanime.libs.anime_provider.{provider}"
provider_api = importlib.import_module(".api", package)
anime_provider = getattr(provider_api, anime_provider_cls_name)
self.anime_provider = anime_provider()
self.anime_provider = anime_provider(
self.cache_requests, self.use_persistent_provider_store
)
def search_for_anime(
self,
user_query,
translation_type,
anilist_obj: "AnilistBaseMediaDataSchema | None" = None,
nsfw=True,
unknown=True,
) -> "SearchResults | None":
@@ -68,19 +81,15 @@ class AnimeProvider:
[TODO:return]
"""
anime_provider = self.anime_provider
try:
results = anime_provider.search_for_anime(
user_query, translation_type, nsfw, unknown
)
except Exception as e:
logger.error(e)
results = None
results = anime_provider.search_for_anime(
user_query, translation_type, nsfw, unknown
)
return results
def get_anime(
self,
anime_id: str,
anilist_obj: "AnilistBaseMediaDataSchema | None" = None,
) -> "Anime | None":
"""core abstraction over getting info of an anime from all providers
@@ -92,19 +101,15 @@ class AnimeProvider:
[TODO:return]
"""
anime_provider = self.anime_provider
try:
results = anime_provider.get_anime(anime_id)
except Exception as e:
logger.error(e)
results = None
results = anime_provider.get_anime(anime_id)
return results
def get_episode_streams(
self,
anime,
anime_id,
episode: str,
translation_type: str,
anilist_obj: "AnilistBaseMediaDataSchema|None" = None,
) -> "Iterator[Server] | None":
"""core abstractions for getting juicy streams from all providers
@@ -118,11 +123,7 @@ class AnimeProvider:
[TODO:return]
"""
anime_provider = self.anime_provider
try:
results = anime_provider.get_episode_streams(
anime, episode, translation_type
)
except Exception as e:
logger.error(e)
results = None
return results # pyright:ignore
results = anime_provider.get_episode_streams(
anime_id, episode, translation_type
)
return results

105
fastanime/MangaProvider.py Normal file
View File

@@ -0,0 +1,105 @@
"""An abstraction over all providers offering added features with a simple and well typed api
[TODO:description]
"""
import importlib
import logging
from typing import TYPE_CHECKING
from .libs.manga_provider import manga_sources
if TYPE_CHECKING:
pass
logger = logging.getLogger(__name__)
class MangaProvider:
"""Class that manages all anime sources adding some extra functionality to them.
Attributes:
PROVIDERS: [TODO:attribute]
provider: [TODO:attribute]
provider: [TODO:attribute]
dynamic: [TODO:attribute]
retries: [TODO:attribute]
manga_provider: [TODO:attribute]
"""
PROVIDERS = list(manga_sources.keys())
provider = PROVIDERS[0]
def __init__(self, provider="mangadex", dynamic=False, retries=0) -> None:
self.provider = provider
self.dynamic = dynamic
self.retries = retries
self.lazyload_provider(self.provider)
def lazyload_provider(self, provider):
"""updates the current provider being used"""
_, anime_provider_cls_name = manga_sources[provider].split(".", 1)
package = f"fastanime.libs.manga_provider.{provider}"
provider_api = importlib.import_module(".api", package)
manga_provider = getattr(provider_api, anime_provider_cls_name)
self.manga_provider = manga_provider()
def search_for_manga(
self,
user_query,
nsfw=True,
unknown=True,
):
"""core abstraction over all providers search functionality
Args:
user_query ([TODO:parameter]): [TODO:description]
translation_type ([TODO:parameter]): [TODO:description]
nsfw ([TODO:parameter]): [TODO:description]
manga_provider ([TODO:parameter]): [TODO:description]
anilist_obj: [TODO:description]
Returns:
[TODO:return]
"""
manga_provider = self.manga_provider
try:
results = manga_provider.search_for_manga(user_query, nsfw, unknown)
except Exception as e:
logger.error(e)
results = None
return results
def get_manga(
self,
anime_id: str,
):
"""core abstraction over getting info of an anime from all providers
Args:
anime_id: [TODO:description]
anilist_obj: [TODO:description]
Returns:
[TODO:return]
"""
manga_provider = self.manga_provider
try:
results = manga_provider.get_manga(anime_id)
except Exception as e:
logger.error(e)
results = None
return results
def get_chapter_thumbnails(
self,
manga_id: str,
chapter: str,
):
manga_provider = self.manga_provider
try:
results = manga_provider.get_chapter_thumbnails(manga_id, chapter)
except Exception as e:
logger.error(e)
results = None
return results # pyright:ignore

View File

@@ -1,13 +1,16 @@
import re
from datetime import datetime
from typing import TYPE_CHECKING
if TYPE_CHECKING:
from ..libs.anilist.types import AnilistDateObject, AnilistMediaNextAiringEpisode
COMMA_REGEX = re.compile(r"([0-9]{3})(?=\d)")
# TODO: Add formating options for the final date
def format_anilist_date_object(anilist_date_object: "AnilistDateObject"):
if anilist_date_object:
if anilist_date_object and anilist_date_object["day"]:
return f"{anilist_date_object['day']}/{anilist_date_object['month']}/{anilist_date_object['year']}"
else:
return "Unknown"
@@ -27,6 +30,12 @@ def format_list_data_with_comma(data: list | None):
return "None"
def format_number_with_commas(number: int | None):
if not number:
return "0"
return COMMA_REGEX.sub(lambda match: f"{match.group(1)},", str(number)[::-1])[::-1]
def extract_next_airing_episode(airing_episode: "AnilistMediaNextAiringEpisode"):
if airing_episode:
return f"{airing_episode['episode']} on {format_anilist_timestamp(airing_episode['airingAt'])}"

View File

@@ -9,9 +9,12 @@ anime_normalizer_raw = {
"Magia Record: Mahou Shoujo Madoka☆Magica Gaiden (TV)": "Mahou Shoujo Madoka☆Magica",
"Dungeon ni Deai o Motomeru no wa Machigatte Iru Darouka": "Dungeon ni Deai wo Motomeru no wa Machigatteiru Darou ka",
'Hazurewaku no "Joutai Ijou Skill" de Saikyou ni Natta Ore ga Subete wo Juurin suru made': "Hazure Waku no [Joutai Ijou Skill] de Saikyou ni Natta Ore ga Subete wo Juurin Suru made",
"Re:Zero kara Hajimeru Isekai Seikatsu Season 3": "Re:Zero kara Hajimeru Isekai Seikatsu 3rd Season",
},
"aniwatch": {"My Star": "Oshi no Ko"},
"animepahe": {},
"hianime": {"My Star": "Oshi no Ko"},
"animepahe": {"Azumanga Daiou The Animation": "Azumanga Daioh"},
"nyaa": {},
"yugen": {},
}
@@ -19,7 +22,7 @@ def get_anime_normalizer():
"""Used because there are different providers"""
import os
current_provider = os.environ["CURRENT_FASTANIME_PROVIDER"]
current_provider = os.environ.get("FASTANIME_PROVIDER", "allanime")
return anime_normalizer_raw[current_provider]

View File

@@ -0,0 +1,6 @@
from yt_dlp import YoutubeDL
# TODO: create a class that makes yt-dlp's YoutubeDL fit in more with fastanime
class YtDlp(YoutubeDL):
pass

View File

@@ -16,6 +16,7 @@ logger = logging.getLogger(__name__)
class YtDLPDownloader:
downloads_queue = Queue()
_thread = None
def _worker(self):
while True:
@@ -26,13 +27,6 @@ class YtDLPDownloader:
logger.error(f"Something went wrong {e}")
self.downloads_queue.task_done()
def __init__(self):
self._thread = Thread(target=self._worker)
self._thread.daemon = True
self._thread.start()
# Function to download the file
# TODO: untpack the title to its actual values episode_title and anime_title
def _download_file(
self,
url: str,
@@ -40,6 +34,7 @@ class YtDLPDownloader:
episode_title: str,
download_dir: str,
silent: bool,
progress_hooks=[],
vid_format: str = "best",
force_unknown_ext=False,
verbose=False,
@@ -61,6 +56,25 @@ class YtDLPDownloader:
"""
anime_title = sanitize_filename(anime_title)
episode_title = sanitize_filename(episode_title)
if url.endswith(".torrent"):
WEBTORRENT_CLI = shutil.which("webtorrent")
if not WEBTORRENT_CLI:
import time
print(
"webtorrent cli is not installed which is required for downloading and streaming from nyaa\nplease install it or use another provider"
)
time.sleep(120)
return
cmd = [
WEBTORRENT_CLI,
"download",
url,
"--out",
os.path.join(download_dir, anime_title, episode_title),
]
subprocess.run(cmd)
return
ydl_opts = {
# Specify the output path and template
"http_headers": headers,
@@ -69,6 +83,7 @@ class YtDLPDownloader:
"verbose": verbose,
"format": vid_format,
"compat_opts": ("allow-unsafe-ext",) if force_unknown_ext else tuple(),
"progress_hooks": progress_hooks,
}
urls = [url]
if sub:
@@ -81,7 +96,14 @@ class YtDLPDownloader:
if not info:
continue
if i == 0:
vid_path = info["requested_downloads"][0]["filepath"]
vid_path: str = info["requested_downloads"][0]["filepath"]
if vid_path.endswith(".unknown_video"):
print("Normalizing path...")
_vid_path = vid_path.replace(".unknown_video", ".mp4")
shutil.move(vid_path, _vid_path)
vid_path = _vid_path
print("successfully normalized path")
else:
sub_path = info["requested_downloads"][0]["filepath"]
if sub_path and vid_path and merge:
@@ -150,8 +172,15 @@ class YtDLPDownloader:
except Exception as e:
print(f"[red bold]An error[/] occurred: {e}")
# WARN: May remove this legacy functionality
def download_file(self, url: str, title, silent=True):
def download_file(
self,
url: str,
anime_title: str,
episode_title: str,
download_dir: str,
silent: bool = True,
**kwargs,
):
"""A helper that just does things in the background
Args:
@@ -159,7 +188,17 @@ class YtDLPDownloader:
silent ([TODO:parameter]): [TODO:description]
url: [TODO:description]
"""
self.downloads_queue.put((self._download_file, (url, title, silent)))
if not self._thread:
self._thread = Thread(target=self._worker)
self._thread.daemon = True
self._thread.start()
self.downloads_queue.put(
(
self._download_file,
(url, anime_title, episode_title, download_dir, silent),
)
)
downloader = YtDLPDownloader()

View File

@@ -37,6 +37,10 @@ def anime_title_percentage_match(
title_a = str(anime["title"]["romaji"])
title_b = str(anime["title"]["english"])
percentage_ratio = max(
*[
fuzz.ratio(title.lower(), possible_user_requested_anime_title.lower())
for title in anime["synonyms"]
],
fuzz.ratio(title_a.lower(), possible_user_requested_anime_title.lower()),
fuzz.ratio(title_b.lower(), possible_user_requested_anime_title.lower()),
)

View File

@@ -2,11 +2,11 @@ import sys
if sys.version_info < (3, 10):
raise ImportError(
"You are using an unsupported version of Python. Only Python versions 3.8 and above are supported by yt-dlp"
"You are using an unsupported version of Python. Only Python versions 3.10 and above are supported by FastAnime"
) # noqa: F541
__version__ = "v2.4.2"
__version__ = "v2.7.4"
APP_NAME = "FastAnime"
AUTHOR = "Benex254"

93
fastanime/api/__init__.py Normal file
View File

@@ -0,0 +1,93 @@
from typing import Literal
from fastapi import FastAPI
from requests import post
from thefuzz import fuzz
from ..AnimeProvider import AnimeProvider
from ..Utility.data import anime_normalizer
app = FastAPI()
anime_provider = AnimeProvider("allanime", "true", "true")
ANILIST_ENDPOINT = "https://graphql.anilist.co"
@app.get("/search")
def search_for_anime(title: str, translation_type: Literal["dub", "sub"] = "sub"):
return anime_provider.search_for_anime(title, translation_type)
@app.get("/anime/{anime_id}")
def get_anime(anime_id: str):
return anime_provider.get_anime(anime_id)
@app.get("/anime/{anime_id}/watch")
def get_episode_streams(
anime_id: str, episode: str, translation_type: Literal["sub", "dub"]
):
return anime_provider.get_episode_streams(anime_id, episode, translation_type)
def get_anime_by_anilist_id(anilist_id: int):
query = f"""
query {{
Media(id: {anilist_id}) {{
id
title {{
romaji
english
native
}}
synonyms
episodes
duration
}}
}}
"""
response = post(ANILIST_ENDPOINT, json={"query": query}).json()
return response["data"]["Media"]
@app.get("/watch/{anilist_id}")
def get_episode_streams_by_anilist_id(
anilist_id: int, episode: str, translation_type: Literal["sub", "dub"]
):
anime = get_anime_by_anilist_id(anilist_id)
if not anime:
return
if search_results := anime_provider.search_for_anime(
str(anime["title"]["romaji"] or anime["title"]["english"]), translation_type
):
if not search_results["results"]:
return
def match_title(possible_user_requested_anime_title):
possible_user_requested_anime_title = anime_normalizer.get(
possible_user_requested_anime_title, possible_user_requested_anime_title
)
title_a = str(anime["title"]["romaji"])
title_b = str(anime["title"]["english"])
percentage_ratio = max(
*[
fuzz.ratio(
title.lower(), possible_user_requested_anime_title.lower()
)
for title in anime["synonyms"]
],
fuzz.ratio(
title_a.lower(), possible_user_requested_anime_title.lower()
),
fuzz.ratio(
title_b.lower(), possible_user_requested_anime_title.lower()
),
)
return percentage_ratio
provider_anime = max(
search_results["results"], key=lambda x: match_title(x["title"])
)
anime_provider.get_anime(provider_anime["id"])
return anime_provider.get_episode_streams(
provider_anime["id"], episode, translation_type
)

View File

@@ -16,6 +16,7 @@ commands = {
"completions": "completions.completions",
"update": "update.update",
"grab": "grab.grab",
"serve": "serve.serve",
}
@@ -38,8 +39,32 @@ signal.signal(signal.SIGINT, handle_exit)
cls=LazyGroup,
help="A command line application for streaming anime that provides a complete and featureful interface",
short_help="Stream Anime",
epilog="""
\b
\b\bExamples:
# example of syncplay intergration
fastanime --sync-play --server sharepoint search -t <anime-title>
\b
# --- or ---
\b
# to watch with anilist intergration
fastanime --sync-play --server sharepoint anilist
\b
# downloading dubbed anime
fastanime --dub download -t <anime>
\b
# use icons and fzf for a more elegant ui with preview
fastanime --icons --preview --fzf anilist
\b
# use icons with default ui
fastanime --icons --default anilist
\b
# viewing manga
fastanime --manga search -t <manga-title>
""",
)
@click.version_option(__version__, "--version")
@click.option("--manga", "-m", help="Enable manga mode", is_flag=True)
@click.option("--log", help="Allow logging to stdout", is_flag=True)
@click.option("--log-file", help="Allow logging to a file", is_flag=True)
@click.option("--rich-traceback", help="Use rich to output tracebacks", is_flag=True)
@@ -133,6 +158,9 @@ signal.signal(signal.SIGINT, handle_exit)
@click.option("--sub", help="Set the translation type to sub", is_flag=True)
@click.option("--rofi", help="Use rofi for the ui", is_flag=True)
@click.option("--rofi-theme", help="Rofi theme to use", type=click.Path())
@click.option(
"--rofi-theme-preview", help="Rofi theme to use for previews", type=click.Path()
)
@click.option(
"--rofi-theme-confirm",
help="Rofi theme to use for the confirm prompt",
@@ -144,12 +172,22 @@ signal.signal(signal.SIGINT, handle_exit)
type=click.Path(),
)
@click.option(
"--use-mpv-mod/--use-default-player", help="Whether to use python-mpv", type=bool
"--use-python-mpv/--use-default-player", help="Whether to use python-mpv", type=bool
)
@click.option("--sync-play", "-sp", help="Use sync play", is_flag=True)
@click.option(
"--player",
"-P",
help="the player to use when streaming",
type=click.Choice(["mpv", "vlc"]),
)
@click.option(
"--fresh-requests", is_flag=True, help="Force the requests cache to be updated"
)
@click.pass_context
def run_cli(
ctx: click.Context,
manga,
log,
log_file,
rich_traceback,
@@ -175,14 +213,20 @@ def run_cli(
sub,
rofi,
rofi_theme,
rofi_theme_preview,
rofi_theme_confirm,
rofi_theme_input,
use_mpv_mod,
use_python_mpv,
sync_play,
player,
fresh_requests,
):
import os
from .config import Config
ctx.obj = Config()
ctx.obj.manga = manga
if log:
import logging
@@ -217,6 +261,8 @@ def run_cli(
install()
if fresh_requests:
os.environ["FASTANIME_FRESH_REQUESTS"] = "1"
if sync_play:
ctx.obj.sync_play = sync_play
if provider:
@@ -229,6 +275,8 @@ def run_cli(
ctx.obj.sub_lang = sub_lang
if ctx.get_parameter_source("continue_") == click.core.ParameterSource.COMMANDLINE:
ctx.obj.continue_from_history = continue_
if ctx.get_parameter_source("player") == click.core.ParameterSource.COMMANDLINE:
ctx.obj.player = player
if ctx.get_parameter_source("skip") == click.core.ParameterSource.COMMANDLINE:
ctx.obj.skip = skip
if (
@@ -254,18 +302,19 @@ def run_cli(
):
ctx.obj.auto_select = auto_select
if (
ctx.get_parameter_source("use_mpv_mod")
ctx.get_parameter_source("use_python_mpv")
== click.core.ParameterSource.COMMANDLINE
):
ctx.obj.use_mpv_mod = use_mpv_mod
ctx.obj.use_python_mpv = use_python_mpv
if downloads_dir:
ctx.obj.downloads_dir = downloads_dir
if translation_type:
ctx.obj.translation_type = translation_type
if fzf:
ctx.obj.use_fzf = True
if default:
ctx.obj.use_fzf = False
ctx.obj.use_rofi = False
if fzf:
ctx.obj.use_fzf = True
if preview:
ctx.obj.preview = True
if no_preview:
@@ -280,6 +329,10 @@ def run_cli(
if rofi:
from ..libs.rofi import Rofi
if rofi_theme_preview:
ctx.obj.rofi_theme_preview = rofi_theme_preview
Rofi.rofi_theme_preview = rofi_theme_preview
if rofi_theme:
ctx.obj.rofi_theme = rofi_theme
Rofi.rofi_theme = rofi_theme
@@ -291,3 +344,4 @@ def run_cli(
if rofi_theme_confirm:
ctx.obj.rofi_theme_confirm = rofi_theme_confirm
Rofi.rofi_theme_confirm = rofi_theme_confirm
ctx.obj.set_fastanime_config_environs()

View File

@@ -75,9 +75,9 @@ def is_git_repo(author, repository):
return bool(match) and match.group(1) == f"{author}/{repository}"
def update_app():
def update_app(force=False):
is_latest, release_json = check_for_updates()
if is_latest:
if is_latest and not force:
print("[green]App is up to date[/]")
return False, release_json
tag_name = release_json["tag_name"]
@@ -101,8 +101,10 @@ def update_app():
)
else:
if PIPX_EXECUTABLE := shutil.which("pipx"):
process = subprocess.run([PIPX_EXECUTABLE, "upgrade", APP_NAME])
if UV := shutil.which("uv"):
process = subprocess.run([UV, "tool", "upgrade", APP_NAME])
elif PIPX := shutil.which("pipx"):
process = subprocess.run([PIPX, "upgrade", APP_NAME])
else:
PYTHON_EXECUTABLE = sys.executable
@@ -112,6 +114,7 @@ def update_app():
"pip",
"install",
APP_NAME,
"-U",
"--user",
"--no-warn-script-location",
]

View File

@@ -20,6 +20,7 @@ commands = {
"completed": "completed.completed",
"planning": "planning.planning",
"notifier": "notifier.notifier",
"stats": "stats.stats",
}
@@ -29,6 +30,53 @@ commands = {
invoke_without_command=True,
help="A beautiful interface that gives you access to a commplete streaming experience",
short_help="Access all streaming options",
epilog="""
\b
\b\bExamples:
# ---- search ----
\b
# get anime with the tag of isekai
fastanime anilist search -T isekai
\b
# get anime of 2024 and sort by popularity
# that has already finished airing or is releasing
# and is not in your anime lists
fastanime anilist search -y 2024 -s POPULARITY_DESC --status RELEASING --status FINISHED --not-on-list
\b
# get anime of 2024 season WINTER
fastanime anilist search -y 2024 --season WINTER
\b
# get anime genre action and tag isekai,magic
fastanime anilist search -g Action -T Isekai -T Magic
\b
# get anime of 2024 thats finished airing
fastanime anilist search -y 2024 -S FINISHED
\b
# get the most favourite anime movies
fastanime anilist search -f MOVIE -s FAVOURITES_DESC
\b
# ---- login ----
\b
# To sign in just run
fastanime anilist login
\b
# To view your login status
fastanime anilist login --status
\b
# To erase login data
fastanime anilist login --erase
\b
# ---- notifier ----
\b
# basic form
fastanime anilist notifier
\b
# with logging to stdout
fastanime --log anilist notifier
\b
# with logging to a file. stored in the same place as your config
fastanime --log-file anilist notifier
""",
)
@click.pass_context
def anilist(ctx: click.Context):

View File

@@ -42,5 +42,5 @@ def completed(config: "Config", dump_json):
from ...interfaces import anilist_interfaces
fastanime_runtime_state = FastAnimeRuntimeState()
fastanime_runtime_state.anilist_data = anime_list[1]
fastanime_runtime_state.anilist_results_data = anime_list[1]
anilist_interfaces.anilist_results_menu(config, fastanime_runtime_state)

View File

@@ -42,5 +42,5 @@ def dropped(config: "Config", dump_json):
from ...utils.tools import FastAnimeRuntimeState
fastanime_runtime_state = FastAnimeRuntimeState()
fastanime_runtime_state.anilist_data = anime_list[1]
fastanime_runtime_state.anilist_results_data = anime_list[1]
anilist_interfaces.anilist_results_menu(config, fastanime_runtime_state)

View File

@@ -26,7 +26,7 @@ def favourites(config, dump_json):
from ...utils.tools import FastAnimeRuntimeState
fastanime_runtime_state = FastAnimeRuntimeState()
fastanime_runtime_state.anilist_data = anime_data[1]
fastanime_runtime_state.anilist_results_data = anime_data[1]
anilist_results_menu(config, fastanime_runtime_state)
else:
from sys import exit

View File

@@ -16,7 +16,12 @@ def notifier(config: "Config"):
from sys import exit
import requests
from plyer import notification
try:
from plyer import notification
except ImportError:
print("Please install plyer to use this command")
exit(1)
from ....anilist import AniList
from ....constants import APP_CACHE_DIR, APP_DATA_DIR, APP_NAME, ICON_PATH, PLATFORM

View File

@@ -42,5 +42,5 @@ def paused(config: "Config", dump_json):
from ...utils.tools import FastAnimeRuntimeState
anilist_config = FastAnimeRuntimeState()
anilist_config.data = anime_list[1]
anilist_config.anilist_results_data = anime_list[1]
anilist_interfaces.anilist_results_menu(config, anilist_config)

View File

@@ -42,5 +42,5 @@ def planning(config: "Config", dump_json):
from ...utils.tools import FastAnimeRuntimeState
fastanime_runtime_state = FastAnimeRuntimeState()
fastanime_runtime_state.anilist_data = anime_list[1]
fastanime_runtime_state.anilist_results_data = anime_list[1]
anilist_interfaces.anilist_results_menu(config, fastanime_runtime_state)

View File

@@ -25,7 +25,7 @@ def popular(config, dump_json):
from ...utils.tools import FastAnimeRuntimeState
fastanime_runtime_state = FastAnimeRuntimeState()
fastanime_runtime_state.anilist_data = anime_data[1]
fastanime_runtime_state.anilist_results_data = anime_data[1]
anilist_results_menu(config, fastanime_runtime_state)
else:
from sys import exit

View File

@@ -33,7 +33,7 @@ def random_anime(config, dump_json):
from ...utils.tools import FastAnimeRuntimeState
fastanime_runtime_state = FastAnimeRuntimeState()
fastanime_runtime_state.anilist_data = anime_data[1]
fastanime_runtime_state.anilist_results_data = anime_data[1]
anilist_results_menu(config, fastanime_runtime_state)
else:
exit(1)

View File

@@ -26,7 +26,7 @@ def recent(config, dump_json):
from ...utils.tools import FastAnimeRuntimeState
fastanime_runtime_state = FastAnimeRuntimeState()
fastanime_runtime_state.anilist_data = anime_data[1]
fastanime_runtime_state.anilist_results_data = anime_data[1]
anilist_results_menu(config, fastanime_runtime_state)
else:
from sys import exit

View File

@@ -42,5 +42,5 @@ def rewatching(config: "Config", dump_json):
from ...utils.tools import FastAnimeRuntimeState
fastanime_runtime_state = FastAnimeRuntimeState()
fastanime_runtime_state.anilist_data = anime_list[1]
fastanime_runtime_state.anilist_results_data = anime_list[1]
anilist_interfaces.anilist_results_menu(config, fastanime_runtime_state)

View File

@@ -25,7 +25,7 @@ def scores(config, dump_json):
from ...utils.tools import FastAnimeRuntimeState
fastanime_runtime_state = FastAnimeRuntimeState()
fastanime_runtime_state.data = anime_data[1]
fastanime_runtime_state.anilist_results_data = anime_data[1]
anilist_results_menu(config, fastanime_runtime_state)
else:
from sys import exit

View File

@@ -4,7 +4,7 @@ from ...completion_functions import anime_titles_shell_complete
tags_available = {
"Cast": ["Polyamorous"],
"Cast / Main Cast": [
"Cast Main Cast": [
"Anti-Hero",
"Elderly Protagonist",
"Ensemble Cast",
@@ -18,7 +18,7 @@ tags_available = {
"Primarily Male Cast",
"Primarily Teen Cast",
],
"Cast / Traits": [
"Cast Traits": [
"Age Regression",
"Agender",
"Aliens",
@@ -94,7 +94,7 @@ tags_available = {
],
"Demographic": ["Josei", "Kids", "Seinen", "Shoujo", "Shounen"],
"Setting": ["Matriarchy"],
"Setting / Scene": [
"Setting Scene": [
"Bar",
"Boarding School",
"Circus",
@@ -117,7 +117,7 @@ tags_available = {
"Urban",
"Work",
],
"Setting / Time": [
"Setting Time": [
"Achronological Order",
"Anachronism",
"Ancient China",
@@ -125,7 +125,7 @@ tags_available = {
"Historical",
"Time Skip",
],
"Setting / Universe": [
"Setting Universe": [
"Afterlife",
"Alternate Universe",
"Augmented Reality",
@@ -152,7 +152,7 @@ tags_available = {
"Rotoscoping",
"Stop Motion",
],
"Theme / Action": [
"Theme Action": [
"Archery",
"Battle Royale",
"Espionage",
@@ -162,7 +162,7 @@ tags_available = {
"Spearplay",
"Swordplay",
],
"Theme / Arts": [
"Theme Arts": [
"Acting",
"Calligraphy",
"Classic Literature",
@@ -174,7 +174,7 @@ tags_available = {
"Rakugo",
"Writing",
],
"Theme / Arts-Music": [
"Theme Arts-Music": [
"Band",
"Classical Music",
"Dancing",
@@ -184,8 +184,8 @@ tags_available = {
"Musical Theater",
"Rock Music",
],
"Theme / Comedy": ["Parody", "Satire", "Slapstick", "Surreal Comedy"],
"Theme / Drama": [
"Theme Comedy": ["Parody", "Satire", "Slapstick", "Surreal Comedy"],
"Theme Drama": [
"Bullying",
"Class Struggle",
"Coming of Age",
@@ -198,7 +198,7 @@ tags_available = {
"Suicide",
"Tragedy",
],
"Theme / Fantasy": [
"Theme Fantasy": [
"Alchemy",
"Body Swapping",
"Cultivation",
@@ -216,8 +216,8 @@ tags_available = {
"Wuxia",
"Youkai",
],
"Theme / Game": ["Board Game", "E-Sports", "Video Games"],
"Theme / Game-Card & Board Game": [
"Theme Game": ["Board Game", "E-Sports", "Video Games"],
"Theme Game-Card & Board Game": [
"Card Battle",
"Go",
"Karuta",
@@ -225,7 +225,7 @@ tags_available = {
"Poker",
"Shogi",
],
"Theme / Game-Sport": [
"Theme Game-Sport": [
"Acrobatics",
"Airsoft",
"American Football",
@@ -258,7 +258,7 @@ tags_available = {
"Volleyball",
"Wrestling",
],
"Theme / Other": [
"Theme Other": [
"Adoption",
"Animals",
"Astronomy",
@@ -308,7 +308,7 @@ tags_available = {
"Travel",
"War",
],
"Theme / Other-Organisations": [
"Theme Other-Organisations": [
"Assassins",
"Criminal Organization",
"Cult",
@@ -320,7 +320,7 @@ tags_available = {
"Triads",
"Yakuza",
],
"Theme / Other-Vehicle": [
"Theme Other-Vehicle": [
"Aviation",
"Cars",
"Mopeds",
@@ -329,7 +329,7 @@ tags_available = {
"Tanks",
"Trains",
],
"Theme / Romance": [
"Theme Romance": [
"Age Gap",
"Bisexual",
"Boys' Love",
@@ -343,15 +343,15 @@ tags_available = {
"Unrequited Love",
"Yuri",
],
"Theme / Sci Fi": [
"Theme Sci Fi": [
"Cyberpunk",
"Space Opera",
"Time Loop",
"Time Manipulation",
"Tokusatsu",
],
"Theme / Sci Fi-Mecha": ["Real Robot", "Super Robot"],
"Theme / Slice of Life": [
"Theme Sci Fi-Mecha": ["Real Robot", "Super Robot"],
"Theme Slice of Life": [
"Agriculture",
"Cute Boys Doing Cute Things",
"Cute Girls Doing Cute Things",
@@ -386,6 +386,7 @@ for tag_category, tags_in_category in tags_available.items():
"--status",
"-S",
help="The media status of the anime",
multiple=True,
type=click.Choice(
["FINISHED", "RELEASING", "NOT_YET_RELEASED", "CANCELLED", "HIATUS"]
),
@@ -486,57 +487,74 @@ for tag_category, tags_in_category in tags_available.items():
"-y",
type=click.Choice(
[
"2024",
"2023",
"2022",
"2021",
"2020",
"2019",
"2018",
"2017",
"2016",
"2015",
"2014",
"2013",
"2012",
"2011",
"2010",
"2009",
"2008",
"2007",
"2006",
"2005",
"2004",
"2000",
"1990",
"1980",
"1970",
"1960",
"1950",
"1940",
"1930",
"1920",
"1910",
"1900",
"1910",
"1920",
"1930",
"1940",
"1950",
"1960",
"1970",
"1980",
"1990",
"2000",
"2004",
"2005",
"2006",
"2007",
"2008",
"2009",
"2010",
"2011",
"2012",
"2013",
"2014",
"2015",
"2016",
"2017",
"2018",
"2019",
"2020",
"2021",
"2022",
"2023",
"2024",
]
),
help="the year the media was released",
)
@click.option(
"--on-list/--not-on-list",
"-L/-no-L",
help="Whether the anime should be in your list or not",
type=bool,
)
@click.pass_obj
def search(
config, title, dump_json, season, status, sort, genres, tags, media_format, year
config,
title,
dump_json,
season,
status,
sort,
genres,
tags,
media_format,
year,
on_list,
):
from ....anilist import AniList
success, search_results = AniList.search(
query=title,
sort=sort,
status=status,
status_in=list(status),
genre_in=list(genres),
season=season,
tag_in=list(tags),
seasonYear=year,
format_in=list(media_format),
on_list=on_list,
)
if success:
if dump_json:
@@ -548,7 +566,7 @@ def search(
from ...utils.tools import FastAnimeRuntimeState
fastanime_runtime_state = FastAnimeRuntimeState()
fastanime_runtime_state.anilist_data = search_results
fastanime_runtime_state.anilist_results_data = search_results
anilist_results_menu(config, fastanime_runtime_state)
else:
from sys import exit

View File

@@ -0,0 +1,63 @@
from typing import TYPE_CHECKING
import click
if TYPE_CHECKING:
from ...config import Config
@click.command(help="Print out your anilist stats")
@click.pass_obj
def stats(
config: "Config",
):
import shutil
import subprocess
from sys import exit
from rich.console import Console
console = Console()
from rich.markdown import Markdown
from rich.panel import Panel
from ....anilist import AniList
user_data = AniList.get_user_info()
if not user_data[0] or not user_data[1]:
print("Failed to get user info")
print(user_data[1])
exit(1)
KITTEN_EXECUTABLE = shutil.which("kitten")
if not KITTEN_EXECUTABLE:
print("Kitten not found")
exit(1)
image_url = user_data[1]["data"]["User"]["avatar"]["medium"]
user_name = user_data[1]["data"]["User"]["name"]
about = user_data[1]["data"]["User"]["about"] or ""
console.clear()
image_x = int(console.size.width * 0.1)
image_y = int(console.size.height * 0.1)
img_w = console.size.width // 3
img_h = console.size.height // 3
image_process = subprocess.run(
[
KITTEN_EXECUTABLE,
"icat",
"--clear",
"--place",
f"{img_w}x{img_h}@{image_x}x{image_y}",
image_url,
],
)
if not image_process.returncode == 0:
print("failed to get image from icat")
exit(1)
console.print(
Panel(
Markdown(about),
title=user_name,
)
)

View File

@@ -26,7 +26,7 @@ def trending(config, dump_json):
from ...utils.tools import FastAnimeRuntimeState
fastanime_runtime_state = FastAnimeRuntimeState()
fastanime_runtime_state.anilist_data = data
fastanime_runtime_state.anilist_results_data = data
anilist_results_menu(config, fastanime_runtime_state)
else:
from sys import exit

View File

@@ -25,7 +25,7 @@ def upcoming(config, dump_json):
from ...utils.tools import FastAnimeRuntimeState
fastanime_runtime_state = FastAnimeRuntimeState()
fastanime_runtime_state.anilist_data = data
fastanime_runtime_state.anilist_results_data = data
anilist_results_menu(config, fastanime_runtime_state)
else:
from sys import exit

View File

@@ -42,5 +42,5 @@ def watching(config: "Config", dump_json):
from ...utils.tools import FastAnimeRuntimeState
fastanime_runtime_state = FastAnimeRuntimeState()
fastanime_runtime_state.anilist_data = anime_list[1]
fastanime_runtime_state.anilist_results_data = anime_list[1]
anilist_interfaces.anilist_results_menu(config, fastanime_runtime_state)

View File

@@ -1,7 +1,24 @@
import click
@click.command(help="Helper command to manage cache")
@click.command(
help="Helper command to manage cache",
epilog="""
\b
\b\bExamples:
# delete everything in the cache dir
fastanime cache --clean
\b
# print the path to the cache dir and exit
fastanime cache --path
\b
# print the current size of the cache dir and exit
fastanime cache --size
\b
# open the cache dir and exit
fastanime cache
""",
)
@click.option("--clean", help="Clean the cache dir", is_flag=True)
@click.option("--path", help="The path to the cache dir", is_flag=True)
@click.option("--size", help="The size of the cache dir", is_flag=True)

View File

@@ -1,7 +1,24 @@
import click
@click.command(help="Helper command to get shell completions")
@click.command(
help="Helper command to get shell completions",
epilog="""
\b
\b\bExamples:
# try to detect your shell and print completions
fastanime completions
\b
# print fish completions
fastanime completions --fish
\b
# print bash completions
fastanime completions --bash
\b
# print zsh completions
fastanime completions --zsh
""",
)
@click.option("--fish", is_flag=True, help="print fish completions")
@click.option("--zsh", is_flag=True, help="print zsh completions")
@click.option("--bash", is_flag=True, help="print bash completions")

View File

@@ -7,8 +7,27 @@ if TYPE_CHECKING:
@click.command(
help="Opens up your fastanime config in your preferred editor",
help="Manage your config with ease",
short_help="Edit your config",
epilog="""
\b
\b\bExamples:
# Edit your config in your default editor
# NB: If it opens vim or vi exit with `:q`
fastanime config
\b
# get the path of the config file
fastanime config --path
\b
# print desktop entry info
fastanime config --desktop-entry
\b
# update your config without opening an editor
fastanime --icons --fzf --preview config --update
\b
# view the current contents of your config
fastanime config --view
""",
)
@click.option("--path", "-p", help="Print the config location and exit", is_flag=True)
@click.option(
@@ -20,8 +39,14 @@ if TYPE_CHECKING:
help="Configure the desktop entry of fastanime",
is_flag=True,
)
@click.option(
"--update",
"-u",
help="Persist all the config options passed to fastanime to your config file",
is_flag=True,
)
@click.pass_obj
def config(config: "Config", path, view, desktop_entry):
def config(user_config: "Config", path, view, desktop_entry, update):
import sys
from rich import print
@@ -32,7 +57,7 @@ def config(config: "Config", path, view, desktop_entry):
if path:
print(USER_CONFIG_PATH)
elif view:
print(config)
print(user_config)
elif desktop_entry:
import os
import shutil
@@ -87,7 +112,9 @@ def config(config: "Config", path, view, desktop_entry):
with open(desktop_entry_path) as f:
print(f"Successfully wrote \n{f.read()}")
exit_app(0)
elif update:
with open(USER_CONFIG_PATH, "w", encoding="utf-8") as file:
file.write(user_config.__str__())
print("update successfull")
else:
import click
click.edit(filename=USER_CONFIG_PATH)

View File

@@ -11,6 +11,53 @@ if TYPE_CHECKING:
@click.command(
help="Download anime using the anime provider for a specified range",
short_help="Download anime",
epilog="""
\b
\b\bExamples:
# Download all available episodes
# multiple titles can be specified with -t option
fastanime download -t <anime-title> -t <anime-title>
# -- or --
fastanime download -t <anime-title> -t <anime-title> -r ':'
\b
# download latest episode for the two anime titles
# the number can be any no of latest episodes but a minus sign
# must be present
fastanime download -t <anime-title> -t <anime-title> -r '-1'
\b
# latest 5
fastanime download -t <anime-title> -t <anime-title> -r '-5'
\b
# Download specific episode range
# be sure to observe the range Syntax
fastanime download -t <anime-title> -r '<episodes-start>:<episodes-end>:<step>'
\b
fastanime download -t <anime-title> -r '<episodes-start>:<episodes-end>'
\b
fastanime download -t <anime-title> -r '<episodes-start>:'
\b
fastanime download -t <anime-title> -r ':<episodes-end>'
\b
# download specific episode
# remember python indexing starts at 0
fastanime download -t <anime-title> -r '<episode-1>:<episode>'
\b
# merge subtitles with ffmpeg to mkv format; hianime tends to give subs as separate files
# and dont prompt for anything
# eg existing file in destination instead remove
# and clean
# ie remove original files (sub file and vid file)
# only keep merged files
fastanime download -t <anime-title> --merge --clean --no-prompt
\b
# EOF is used since -t always expects a title
# you can supply anime titles from file or -t at the same time
# from stdin
echo -e "<anime-title>\\n<anime-title>\\n<anime-title>" | fastanime download -t "EOF" -r <range> -f -
\b
# from file
fastanime download -t "EOF" -r <range> -f <file-path>
""",
)
@click.option(
"--anime-titles",
@@ -160,7 +207,7 @@ def download(
choices = list(search_results_.keys())
if config.use_fzf:
selected_anime_title = fzf.run(
choices, "Please Select title: ", "FastAnime"
choices, "Please Select title", "FastAnime"
)
else:
selected_anime_title = fuzzy_inquirer(
@@ -237,7 +284,7 @@ def download(
with Progress() as progress:
progress.add_task("Fetching Episode Streams...", total=None)
streams = anime_provider.get_episode_streams(
anime, episode, config.translation_type
anime["id"], episode, config.translation_type
)
if not streams:
print("No streams skipping")
@@ -272,7 +319,7 @@ def download(
server_name = config.server
else:
if config.use_fzf:
server_name = fzf.run(servers_names, "Select an link: ")
server_name = fzf.run(servers_names, "Select an link")
else:
server_name = fuzzy_inquirer(
servers_names,
@@ -314,9 +361,9 @@ def download(
episode_title,
download_dir,
silent,
config.format,
force_unknown_ext,
verbose,
vid_format=config.format,
force_unknown_ext=force_unknown_ext,
verbose=verbose,
headers=provider_headers,
sub=subtitles[0]["url"] if subtitles else "",
merge=merge,

View File

@@ -3,25 +3,60 @@ from typing import TYPE_CHECKING
import click
from ..completion_functions import downloaded_anime_titles
logger = logging.getLogger(__name__)
if TYPE_CHECKING:
from ..config import Config
@click.command(
help="View and watch your downloads using mpv", short_help="Watch downloads"
help="View and watch your downloads using mpv",
short_help="Watch downloads",
epilog="""
\b
\b\bExamples:
fastanime downloads
\b
# view individual episodes
fastanime downloads --view-episodes
# --- or ---
fastanime downloads -v
\b
# to set seek time when using ffmpegthumbnailer for local previews
# -1 means random and is the default
fastanime downloads --time-to-seek <intRange(-1,100)>
# --- or ---
fastanime downloads -t <intRange(-1,100)>
\b
# to watch a specific title
# be sure to get the completions for the best experience
fastanime downloads --title <title>
\b
# to get the path to the downloads folder set
fastanime downloads --path
# useful when you want to use the value for other programs
""",
)
@click.option("--path", "-p", help="print the downloads folder and exit", is_flag=True)
@click.option(
"--title",
"-T",
shell_complete=downloaded_anime_titles,
help="watch a specific title",
)
@click.option("--view-episodes", "-v", help="View individual episodes", is_flag=True)
@click.option(
"--ffmpegthumbnailer-seek-time",
"--time-to-seek",
"-t",
type=click.IntRange(-1, 100),
help="ffmpegthumbnailer seek time [0-100]",
help="ffmpegthumbnailer seek time",
)
@click.pass_obj
def downloads(config: "Config", path: bool, view_episodes, ffmpegthumbnailer_seek_time):
def downloads(
config: "Config", path: bool, title, view_episodes, ffmpegthumbnailer_seek_time
):
import os
from ...cli.utils.mpv import run_mpv
@@ -239,6 +274,7 @@ def downloads(config: "Config", path: bool, view_episodes, ffmpegthumbnailer_see
os.listdir(anime_playlist_path), key=sort_by_episode_number
)
downloaded_episodes = [*episodes, "Back"]
if config.use_fzf:
if not config.preview:
episode_title = fzf.run(
@@ -257,7 +293,7 @@ def downloads(config: "Config", path: bool, view_episodes, ffmpegthumbnailer_see
else:
episode_title = fuzzy_inquirer(
downloaded_episodes,
"Enter Playlist Name: ",
"Enter Playlist Name",
)
if episode_title == "Back":
stream_anime()
@@ -268,11 +304,18 @@ def downloads(config: "Config", path: bool, view_episodes, ffmpegthumbnailer_see
SyncPlayer(episode_path)
else:
run_mpv(episode_path)
run_mpv(
episode_path,
player=config.player,
)
stream_episode(anime_playlist_path)
def stream_anime():
if config.use_fzf:
def stream_anime(title=None):
if title:
from thefuzz import fuzz
playlist_name = max(anime_downloads, key=lambda t: fuzz.ratio(title, t))
elif config.use_fzf:
if not config.preview:
playlist_name = fzf.run(
anime_downloads,
@@ -290,7 +333,7 @@ def downloads(config: "Config", path: bool, view_episodes, ffmpegthumbnailer_see
else:
playlist_name = fuzzy_inquirer(
anime_downloads,
"Enter Playlist Name: ",
"Enter Playlist Name",
)
if playlist_name == "Exit":
exit_app()
@@ -306,7 +349,10 @@ def downloads(config: "Config", path: bool, view_episodes, ffmpegthumbnailer_see
SyncPlayer(playlist)
else:
run_mpv(playlist)
run_mpv(
playlist,
player=config.player,
)
stream_anime()
stream_anime()
stream_anime(title)

View File

@@ -11,6 +11,41 @@ if TYPE_CHECKING:
@click.command(
help="Helper command to get streams for anime to use externally in a non-python application",
short_help="Print anime streams to standard out",
epilog="""
\b
\b\bExamples:
# --- print anime info + episode streams ---
\b
# multiple titles can be specified with the -t option
fastanime grab -t <anime-title> -t <anime-title>
# -- or --
# print all available episodes
fastanime grab -t <anime-title> -r ':'
\b
# print the latest episode
fastanime grab -t <anime-title> -r '-1'
\b
# print a specific episode range
# be sure to observe the range Syntax
fastanime grab -t <anime-title> -r '<start>:<stop>'
\b
fastanime grab -t <anime-title> -r '<start>:<stop>:<step>'
\b
fastanime grab -t <anime-title> -r '<start>:'
\b
fastanime grab -t <anime-title> -r ':<end>'
\b
# --- grab options ---
\b
# print search results only
fastanime grab -t <anime-title> -r <range> --search-results-only
\b
# print anime info only
fastanime grab -t <anime-title> -r <range> --anime-info-only
\b
# print episode streams only
fastanime grab -t <anime-title> -r <range> --episode-streams-only
""",
)
@click.option(
"--anime-titles",
@@ -56,26 +91,19 @@ def grab(
from thefuzz import fuzz
from ...AnimeProvider import AnimeProvider
logger = getLogger(__name__)
if config.manga:
manga_title = anime_titles[0]
from ...MangaProvider import MangaProvider
anime_provider = AnimeProvider(config.provider)
grabbed_animes = []
for anime_title in anime_titles:
# ---- search for anime ----
search_results = anime_provider.search_for_anime(
anime_title, translation_type=config.translation_type
)
if not search_results:
manga_provider = MangaProvider()
search_data = manga_provider.search_for_manga(manga_title)
if not search_data:
exit(1)
if search_results_only:
# grab only search results skipping all lines after this
grabbed_animes.append(search_results)
continue
search_results = search_results["results"]
print(json.dumps(search_data))
exit(0)
search_results = search_data["results"]
if not search_results:
logger.error("no results for your search")
exit(1)
@@ -83,83 +111,133 @@ def grab(
search_result["title"]: search_result for search_result in search_results
}
search_result = max(
search_results_.keys(), key=lambda title: fuzz.ratio(title, anime_title)
search_result_anime_title = max(
search_results_.keys(), key=lambda title: fuzz.ratio(title, anime_titles[0])
)
# ---- fetch anime ----
anime = anime_provider.get_anime(search_results_[search_result]["id"])
if not anime:
exit(1)
manga_info = manga_provider.get_manga(
search_results_[search_result_anime_title]["id"]
)
if not manga_info:
return
if anime_info_only:
# grab only the anime data skipping all lines after this
grabbed_animes.append(anime)
continue
episodes = sorted(
anime["availableEpisodesDetail"][config.translation_type], key=float
print(json.dumps(manga_info))
exit(0)
chapter_info = manga_provider.get_chapter_thumbnails(
manga_info["id"], str(episode_range)
)
if not chapter_info:
exit(1)
print(json.dumps(chapter_info))
# where the magic happens
if episode_range:
if ":" in episode_range:
ep_range_tuple = episode_range.split(":")
if len(ep_range_tuple) == 2 and all(ep_range_tuple):
episodes_start, episodes_end = ep_range_tuple
episodes_range = episodes[int(episodes_start) : int(episodes_end)]
elif len(ep_range_tuple) == 3 and all(ep_range_tuple):
episodes_start, episodes_end, step = ep_range_tuple
episodes_range = episodes[
int(episodes_start) : int(episodes_end) : int(step)
]
else:
episodes_start, episodes_end = ep_range_tuple
if episodes_start.strip():
episodes_range = episodes[int(episodes_start) :]
elif episodes_end.strip():
episodes_range = episodes[: int(episodes_end)]
else:
episodes_range = episodes
else:
episodes_range = episodes[int(episode_range) :]
else:
episodes_range = sorted(episodes, key=float)
if not episode_streams_only:
grabbed_anime = dict(anime)
grabbed_anime["requested_episodes"] = episodes_range
grabbed_anime["translation_type"] = config.translation_type
grabbed_anime["episodes_streams"] = {}
else:
grabbed_anime = {}
# lets download em
for episode in episodes_range:
try:
if episode not in episodes:
continue
streams = anime_provider.get_episode_streams(
anime, episode, config.translation_type
)
if not streams:
continue
episode_streams = {server["server"]: server for server in streams}
if episode_streams_only:
grabbed_anime[episode] = episode_streams
else:
grabbed_anime["episodes_streams"][ # pyright:ignore
episode
] = episode_streams
except Exception as e:
logger.error(e)
# grab the full data for single title and appen to final result or episode streams
grabbed_animes.append(grabbed_anime)
# print out the final result either {} or [] depending if more than one title os requested
if len(grabbed_animes) == 1:
print(json.dumps(grabbed_animes[0]))
else:
print(json.dumps(grabbed_animes))
from ...AnimeProvider import AnimeProvider
anime_provider = AnimeProvider(config.provider)
grabbed_animes = []
for anime_title in anime_titles:
# ---- search for anime ----
search_results = anime_provider.search_for_anime(
anime_title, translation_type=config.translation_type
)
if not search_results:
exit(1)
if search_results_only:
# grab only search results skipping all lines after this
grabbed_animes.append(search_results)
continue
search_results = search_results["results"]
if not search_results:
logger.error("no results for your search")
exit(1)
search_results_ = {
search_result["title"]: search_result
for search_result in search_results
}
search_result_anime_title = max(
search_results_.keys(), key=lambda title: fuzz.ratio(title, anime_title)
)
# ---- fetch anime ----
anime = anime_provider.get_anime(
search_results_[search_result_anime_title]["id"]
)
if not anime:
exit(1)
if anime_info_only:
# grab only the anime data skipping all lines after this
grabbed_animes.append(anime)
continue
episodes = sorted(
anime["availableEpisodesDetail"][config.translation_type], key=float
)
# where the magic happens
if episode_range:
if ":" in episode_range:
ep_range_tuple = episode_range.split(":")
if len(ep_range_tuple) == 2 and all(ep_range_tuple):
episodes_start, episodes_end = ep_range_tuple
episodes_range = episodes[
int(episodes_start) : int(episodes_end)
]
elif len(ep_range_tuple) == 3 and all(ep_range_tuple):
episodes_start, episodes_end, step = ep_range_tuple
episodes_range = episodes[
int(episodes_start) : int(episodes_end) : int(step)
]
else:
episodes_start, episodes_end = ep_range_tuple
if episodes_start.strip():
episodes_range = episodes[int(episodes_start) :]
elif episodes_end.strip():
episodes_range = episodes[: int(episodes_end)]
else:
episodes_range = episodes
else:
episodes_range = episodes[int(episode_range) :]
else:
episodes_range = sorted(episodes, key=float)
if not episode_streams_only:
grabbed_anime = dict(anime)
grabbed_anime["requested_episodes"] = episodes_range
grabbed_anime["translation_type"] = config.translation_type
grabbed_anime["episodes_streams"] = {}
else:
grabbed_anime = {}
# lets download em
for episode in episodes_range:
try:
if episode not in episodes:
continue
streams = anime_provider.get_episode_streams(
anime["id"], episode, config.translation_type
)
if not streams:
continue
episode_streams = {server["server"]: server for server in streams}
if episode_streams_only:
grabbed_anime[episode] = episode_streams
else:
grabbed_anime["episodes_streams"][ # pyright:ignore
episode
] = episode_streams
except Exception as e:
logger.error(e)
# grab the full data for single title and appen to final result or episode streams
grabbed_animes.append(grabbed_anime)
# print out the final result either {} or [] depending if more than one title os requested
if len(grabbed_animes) == 1:
print(json.dumps(grabbed_animes[0]))
else:
print(json.dumps(grabbed_animes))

View File

@@ -1,12 +1,39 @@
from typing import TYPE_CHECKING
import click
from ...cli.config import Config
from ..completion_functions import anime_titles_shell_complete
if TYPE_CHECKING:
from ...cli.config import Config
@click.command(
help="This subcommand directly interacts with the provider to enable basic streaming. Useful for binging anime.",
short_help="Binge anime",
epilog="""
\b
\b\bExamples:
# basic form where you will still be prompted for the episode number
# multiple titles can be specified with the -t option
fastanime search -t <anime-title> -t <anime-title>
\b
# binge all episodes with this command
fastanime search -t <anime-title> -r ':'
\b
# watch latest episode
fastanime search -t <anime-title> -r '-1'
\b
# binge a specific episode range with this command
# be sure to observe the range Syntax
fastanime search -t <anime-title> -r '<start>:<stop>'
\b
fastanime search -t <anime-title> -r '<start>:<stop>:<step>'
\b
fastanime search -t <anime-title> -r '<start>:'
\b
fastanime search -t <anime-title> -r ':<end>'
""",
)
@click.option(
"--anime-titles",
@@ -23,240 +50,336 @@ from ..completion_functions import anime_titles_shell_complete
help="A range of episodes to binge (start-end)",
)
@click.pass_obj
def search(config: Config, anime_titles: str, episode_range: str):
def search(config: "Config", anime_titles: str, episode_range: str):
from click import clear
from rich import print
from rich.progress import Progress
from thefuzz import fuzz
from ...AnimeProvider import AnimeProvider
from ...libs.anime_provider.types import Anime
from ...libs.fzf import fzf
from ...libs.rofi import Rofi
from ...Utility.data import anime_normalizer
from ..utils.mpv import run_mpv
from ..utils.tools import exit_app
from ..utils.utils import (
filter_by_quality,
fuzzy_inquirer,
move_preferred_subtitle_lang_to_top,
)
from ..utils.utils import fuzzy_inquirer
anime_provider = AnimeProvider(config.provider)
anilist_anime_info = None
if config.manga:
from InquirerPy.prompts.number import NumberPrompt
from yt_dlp.utils import sanitize_filename
from ...MangaProvider import MangaProvider
from ..utils.feh import feh_manga_viewer
manga_title = anime_titles[0]
manga_provider = MangaProvider()
search_data = manga_provider.search_for_manga(manga_title)
if not search_data:
print("No search results")
exit(1)
search_results = search_data["results"]
print(f"[green bold]Streaming:[/] {anime_titles}")
for anime_title in anime_titles:
# ---- search for anime ----
with Progress() as progress:
progress.add_task("Fetching Search Results...", total=None)
search_results = anime_provider.search_for_anime(
anime_title, config.translation_type
)
if not search_results:
print("Search results not found")
input("Enter to retry")
search(config, anime_title, episode_range)
return
search_results = search_results["results"]
if not search_results:
print("Anime not found :cry:")
exit_app()
search_results_ = {
search_result["title"]: search_result for search_result in search_results
sanitize_filename(search_result["title"]): search_result
for search_result in search_results
}
if config.auto_select:
search_result = max(
search_result_manga_title = max(
search_results_.keys(),
key=lambda title: fuzz.ratio(
anime_normalizer.get(title, title), anime_title
),
key=lambda title: fuzz.ratio(title, manga_title),
)
print("[cyan]Auto Selecting:[/] ", search_result)
print("[cyan]Auto Selecting:[/] ", search_result_manga_title)
else:
choices = list(search_results_.keys())
preview = None
if config.preview:
from ..interfaces.utils import get_fzf_manga_preview
preview = get_fzf_manga_preview(search_results)
if config.use_fzf:
search_result = fzf.run(choices, "Please Select title: ", "FastAnime")
search_result_manga_title = fzf.run(
choices, "Please Select title", preview=preview
)
elif config.use_rofi:
search_result = Rofi.run(choices, "Please Select Title")
search_result_manga_title = Rofi.run(choices, "Please Select Title")
else:
search_result = fuzzy_inquirer(
search_result_manga_title = fuzzy_inquirer(
choices,
"Please Select Title",
)
# ---- fetch selected anime ----
with Progress() as progress:
progress.add_task("Fetching Anime...", total=None)
anime: Anime | None = anime_provider.get_anime(
search_results_[search_result]["id"]
anilist_id = search_results_[search_result_manga_title]["id"]
manga_info = manga_provider.get_manga(anilist_id)
if not manga_info:
print("No manga info")
exit(1)
anilist_helper = None
if config.user:
from ...anilist import AniList
AniList.login_user(config.user["token"])
anilist_helper = AniList
def _manga_viewer():
chapter_number = NumberPrompt("Select a chapter number").execute()
chapter_info = manga_provider.get_chapter_thumbnails(
manga_info["id"], str(chapter_number)
)
if not anime:
print("Sth went wring anime no found")
input("Enter to continue...")
search(config, anime_title, episode_range)
return
episodes_range = []
episodes: list[str] = sorted(
anime["availableEpisodesDetail"][config.translation_type], key=float
)
if episode_range:
if ":" in episode_range:
ep_range_tuple = episode_range.split(":")
if len(ep_range_tuple) == 3 and all(ep_range_tuple):
episodes_start, episodes_end, step = ep_range_tuple
episodes_range = episodes[
int(episodes_start) : int(episodes_end) : int(step)
]
if not chapter_info:
print("No chapter info")
input("Enter to retry...")
_manga_viewer()
return
print(
f"[purple bold]Now Reading: [/] {search_result_manga_title} [cyan bold]Chapter:[/] {chapter_info['title']}"
)
feh_manga_viewer(chapter_info["thumbnails"], str(chapter_info["title"]))
if anilist_helper:
anilist_helper.update_anime_list(
{"mediaId": anilist_id, "progress": chapter_number}
)
_manga_viewer()
_manga_viewer()
else:
from ...AnimeProvider import AnimeProvider
from ...libs.anime_provider.types import Anime
from ...Utility.data import anime_normalizer
from ..utils.mpv import run_mpv
from ..utils.utils import filter_by_quality, move_preferred_subtitle_lang_to_top
anime_provider = AnimeProvider(config.provider)
anilist_anime_info = None
print(f"[green bold]Streaming:[/] {anime_titles}")
for anime_title in anime_titles:
# ---- search for anime ----
with Progress() as progress:
progress.add_task("Fetching Search Results...", total=None)
search_results = anime_provider.search_for_anime(
anime_title, config.translation_type
)
if not search_results:
print("Search results not found")
input("Enter to retry")
search(config, anime_title, episode_range)
return
search_results = search_results["results"]
if not search_results:
print("Anime not found :cry:")
exit_app()
search_results_ = {
search_result["title"]: search_result
for search_result in search_results
}
if config.auto_select:
search_result_manga_title = max(
search_results_.keys(),
key=lambda title: fuzz.ratio(
anime_normalizer.get(title, title), anime_title
),
)
print("[cyan]Auto Selecting:[/] ", search_result_manga_title)
elif len(ep_range_tuple) == 2 and all(ep_range_tuple):
episodes_start, episodes_end = ep_range_tuple
episodes_range = episodes[int(episodes_start) : int(episodes_end)]
else:
episodes_start, episodes_end = ep_range_tuple
if episodes_start.strip():
episodes_range = episodes[int(episodes_start) :]
elif episodes_end.strip():
episodes_range = episodes[: int(episodes_end)]
else:
episodes_range = episodes
else:
episodes_range = episodes[int(episode_range) :]
episodes_range = iter(episodes_range)
if config.normalize_titles:
from ...libs.common.mini_anilist import get_basic_anime_info_by_title
anilist_anime_info = get_basic_anime_info_by_title(anime["title"])
def stream_anime():
clear()
episode = None
if episodes_range:
try:
episode = next(episodes_range) # pyright:ignore
print(
f"[cyan]Auto selecting:[/] {search_result} [cyan]Episode:[/] {episode}"
)
except StopIteration:
print("[green]Completed binge sequence[/]:smile:")
return
if not episode or episode not in episodes:
choices = [*episodes, "end"]
choices = list(search_results_.keys())
if config.use_fzf:
episode = fzf.run(
choices, "Select an episode: ", header=search_result
search_result_manga_title = fzf.run(
choices, "Please Select title", "FastAnime"
)
elif config.use_rofi:
episode = Rofi.run(choices, "Select an episode")
search_result_manga_title = Rofi.run(choices, "Please Select Title")
else:
episode = fuzzy_inquirer(
search_result_manga_title = fuzzy_inquirer(
choices,
"Select episode",
"Please Select Title",
)
if episode == "end":
return
# ---- fetch streams ----
# ---- fetch selected anime ----
with Progress() as progress:
progress.add_task("Fetching Episode Streams...", total=None)
streams = anime_provider.get_episode_streams(
anime, episode, config.translation_type
progress.add_task("Fetching Anime...", total=None)
anime: Anime | None = anime_provider.get_anime(
search_results_[search_result_manga_title]["id"]
)
if not streams:
print("Failed to get streams")
if not anime:
print("Sth went wring anime no found")
input("Enter to continue...")
search(config, anime_title, episode_range)
return
episodes_range = []
episodes: list[str] = sorted(
anime["availableEpisodesDetail"][config.translation_type], key=float
)
if episode_range:
if ":" in episode_range:
ep_range_tuple = episode_range.split(":")
if len(ep_range_tuple) == 3 and all(ep_range_tuple):
episodes_start, episodes_end, step = ep_range_tuple
episodes_range = episodes[
int(episodes_start) : int(episodes_end) : int(step)
]
elif len(ep_range_tuple) == 2 and all(ep_range_tuple):
episodes_start, episodes_end = ep_range_tuple
episodes_range = episodes[
int(episodes_start) : int(episodes_end)
]
else:
episodes_start, episodes_end = ep_range_tuple
if episodes_start.strip():
episodes_range = episodes[int(episodes_start) :]
elif episodes_end.strip():
episodes_range = episodes[: int(episodes_end)]
else:
episodes_range = episodes
else:
episodes_range = episodes[int(episode_range) :]
episodes_range = iter(episodes_range)
if config.normalize_titles:
from ...libs.common.mini_anilist import get_basic_anime_info_by_title
anilist_anime_info = get_basic_anime_info_by_title(anime["title"])
def stream_anime(anime: "Anime"):
clear()
episode = None
if episodes_range:
try:
episode = next(episodes_range) # pyright:ignore
print(
f"[cyan]Auto selecting:[/] {search_result_manga_title} [cyan]Episode:[/] {episode}"
)
except StopIteration:
print("[green]Completed binge sequence[/]:smile:")
return
if not episode or episode not in episodes:
choices = [*episodes, "end"]
if config.use_fzf:
episode = fzf.run(
choices,
"Select an episode",
header=search_result_manga_title,
)
elif config.use_rofi:
episode = Rofi.run(choices, "Select an episode")
else:
episode = fuzzy_inquirer(
choices,
"Select episode",
)
if episode == "end":
return
try:
# ---- fetch servers ----
if config.server == "top":
with Progress() as progress:
progress.add_task("Fetching top server...", total=None)
server = next(streams, None)
if not server:
print("Sth went wrong when fetching the episode")
# ---- fetch streams ----
with Progress() as progress:
progress.add_task("Fetching Episode Streams...", total=None)
streams = anime_provider.get_episode_streams(
anime["id"], episode, config.translation_type
)
if not streams:
print("Failed to get streams")
return
try:
# ---- fetch servers ----
if config.server == "top":
with Progress() as progress:
progress.add_task("Fetching top server...", total=None)
server = next(streams, None)
if not server:
print("Sth went wrong when fetching the episode")
input("Enter to continue")
stream_anime(anime)
return
stream_link = filter_by_quality(config.quality, server["links"])
if not stream_link:
print("Quality not found")
input("Enter to continue")
stream_anime()
stream_anime(anime)
return
stream_link = filter_by_quality(config.quality, server["links"])
if not stream_link:
print("Quality not found")
input("Enter to continue")
stream_anime()
return
link = stream_link["link"]
subtitles = server["subtitles"]
stream_headers = server["headers"]
episode_title = server["episode_title"]
else:
with Progress() as progress:
progress.add_task("Fetching servers", total=None)
# prompt for server selection
servers = {server["server"]: server for server in streams}
servers_names = list(servers.keys())
if config.server in servers_names:
server = config.server
link = stream_link["link"]
subtitles = server["subtitles"]
stream_headers = server["headers"]
episode_title = server["episode_title"]
else:
if config.use_fzf:
server = fzf.run(servers_names, "Select an link: ")
elif config.use_rofi:
server = Rofi.run(servers_names, "Select an link")
with Progress() as progress:
progress.add_task("Fetching servers", total=None)
# prompt for server selection
servers = {server["server"]: server for server in streams}
servers_names = list(servers.keys())
if config.server in servers_names:
server = config.server
else:
server = fuzzy_inquirer(
servers_names,
"Select link",
)
stream_link = filter_by_quality(
config.quality, servers[server]["links"]
)
if not stream_link:
print("Quality not found")
input("Enter to continue")
stream_anime()
return
link = stream_link["link"]
stream_headers = servers[server]["headers"]
subtitles = servers[server]["subtitles"]
episode_title = servers[server]["episode_title"]
if config.use_fzf:
server = fzf.run(servers_names, "Select an link")
elif config.use_rofi:
server = Rofi.run(servers_names, "Select an link")
else:
server = fuzzy_inquirer(
servers_names,
"Select link",
)
stream_link = filter_by_quality(
config.quality, servers[server]["links"]
)
if not stream_link:
print("Quality not found")
input("Enter to continue")
stream_anime(anime)
return
link = stream_link["link"]
stream_headers = servers[server]["headers"]
subtitles = servers[server]["subtitles"]
episode_title = servers[server]["episode_title"]
selected_anime_title = search_result
if anilist_anime_info:
selected_anime_title = (
anilist_anime_info["title"][config.preferred_language]
or anilist_anime_info["title"]["romaji"]
or anilist_anime_info["title"]["english"]
)
import re
selected_anime_title = search_result_manga_title
if anilist_anime_info:
selected_anime_title = (
anilist_anime_info["title"][config.preferred_language]
or anilist_anime_info["title"]["romaji"]
or anilist_anime_info["title"]["english"]
)
import re
for episode_detail in anilist_anime_info["episodes"]:
if re.match(f"Episode {episode} ", episode_detail["title"]):
episode_title = episode_detail["title"]
break
print(
f"[purple]Now Playing:[/] {selected_anime_title} Episode {episode}"
)
subtitles = move_preferred_subtitle_lang_to_top(
subtitles, config.sub_lang
)
if config.sync_play:
from ..utils.syncplay import SyncPlayer
SyncPlayer(
link, episode_title, headers=stream_headers, subtitles=subtitles
for episode_detail in anilist_anime_info["episodes"]:
if re.match(f"Episode {episode} ", episode_detail["title"]):
episode_title = episode_detail["title"]
break
print(
f"[purple]Now Playing:[/] {selected_anime_title} Episode {episode}"
)
else:
run_mpv(
link, episode_title, headers=stream_headers, subtitles=subtitles
subtitles = move_preferred_subtitle_lang_to_top(
subtitles, config.sub_lang
)
except IndexError as e:
print(e)
input("Enter to continue")
stream_anime()
if config.sync_play:
from ..utils.syncplay import SyncPlayer
stream_anime()
SyncPlayer(
link,
episode_title,
headers=stream_headers,
subtitles=subtitles,
)
else:
run_mpv(
link,
episode_title,
headers=stream_headers,
subtitles=subtitles,
player=config.player,
)
except IndexError as e:
print(e)
input("Enter to continue")
stream_anime(anime)
stream_anime(anime)

View File

@@ -0,0 +1,31 @@
import click
@click.command(
help="Command that automates the starting of the builtin fastanime server",
epilog="""
\b
\b\bExamples:
# default
fastanime serve
# specify host and port
fastanime serve --host 127.0.0.1 --port 8080
""",
)
@click.option("--host", "-H", help="Specify the host to run the server on")
@click.option("--port", "-p", help="Specify the port to run the server on")
def serve(host, port):
import os
import sys
from ...constants import APP_DIR
args = [sys.executable, "-m", "fastapi", "run"]
if host:
args.extend(["--host", host])
if port:
args.extend(["--port", port])
args.append(os.path.join(APP_DIR, "api"))
os.execv(sys.executable, args)

View File

@@ -1,11 +1,24 @@
import click
@click.command(help="Helper command to update fastanime to latest")
@click.command(
help="Helper command to update fastanime to latest",
epilog="""
\b
\b\bExamples:
# update fastanime to latest
fastanime update
\b
# check for latest release
fastanime update --check
# Force an update regardless of the current version
fastanime update --force
""",
)
@click.option("--check", "-c", help="Check for the latest release", is_flag=True)
def update(
check,
):
@click.option("--force", "-c", help="Force update", is_flag=True)
def update(check, force):
from rich.console import Console
from rich.markdown import Markdown
@@ -34,7 +47,7 @@ def update(
print(f"You are running the latest version ({__version__}) of fastanime")
_print_release(github_release_data)
else:
success, github_release_data = update_app()
success, github_release_data = update_app(force)
_print_release(github_release_data)
if success:
print("Successfully updated")

View File

@@ -6,20 +6,20 @@ ANILIST_ENDPOINT = "https://graphql.anilist.co"
anime_title_query = """
query($query:String){
Page(perPage:50){
pageInfo{
total
}
media(search:$query,type:ANIME){
id
idMal
title{
romaji
english
}
}
query ($query: String) {
Page(perPage: 50) {
pageInfo {
total
}
media(search: $query, type: ANIME) {
id
idMal
title {
romaji
english
}
}
}
}
"""
@@ -63,6 +63,22 @@ def get_anime_titles(query: str, variables: dict = {}):
return []
def downloaded_anime_titles(ctx, param, incomplete):
import os
from ..constants import USER_VIDEOS_DIR
try:
titles = [
title
for title in os.listdir(USER_VIDEOS_DIR)
if title.lower().startswith(incomplete.lower()) or not incomplete
]
return titles
except Exception:
return []
def anime_titles_shell_complete(ctx, param, incomplete):
incomplete = incomplete.strip()
if not incomplete:

View File

@@ -4,9 +4,13 @@ import os
from configparser import ConfigParser
from typing import TYPE_CHECKING
from rich import print
from ..constants import USER_CONFIG_PATH, USER_DATA_PATH, USER_VIDEOS_DIR
from ..constants import (
USER_CONFIG_PATH,
USER_DATA_PATH,
USER_VIDEOS_DIR,
USER_WATCH_HISTORY_PATH,
S_PLATFORM,
)
from ..libs.rofi import Rofi
logger = logging.getLogger(__name__)
@@ -15,159 +19,172 @@ if TYPE_CHECKING:
class Config(object):
"""class that handles and manages configuration and user data throughout the clis lifespan
Attributes:
anime_list: [TODO:attribute]
watch_history: [TODO:attribute]
fastanime_anilist_app_login_url: [TODO:attribute]
anime_provider: [TODO:attribute]
user_data: [TODO:attribute]
configparser: [TODO:attribute]
downloads_dir: [TODO:attribute]
provider: [TODO:attribute]
use_fzf: [TODO:attribute]
use_rofi: [TODO:attribute]
skip: [TODO:attribute]
icons: [TODO:attribute]
preview: [TODO:attribute]
translation_type: [TODO:attribute]
sort_by: [TODO:attribute]
continue_from_history: [TODO:attribute]
auto_next: [TODO:attribute]
auto_select: [TODO:attribute]
use_mpv_mod: [TODO:attribute]
quality: [TODO:attribute]
notification_duration: [TODO:attribute]
error: [TODO:attribute]
server: [TODO:attribute]
format: [TODO:attribute]
force_window: [TODO:attribute]
preferred_language: [TODO:attribute]
rofi_theme: [TODO:attribute]
rofi_theme: [TODO:attribute]
rofi_theme_input: [TODO:attribute]
rofi_theme_input: [TODO:attribute]
rofi_theme_confirm: [TODO:attribute]
rofi_theme_confirm: [TODO:attribute]
watch_history: [TODO:attribute]
anime_list: [TODO:attribute]
user: [TODO:attribute]
"""
manga = False
sync_play = False
anime_list: list
watch_history: dict
watch_history: dict = {}
fastanime_anilist_app_login_url = (
"https://anilist.co/api/v2/oauth/authorize?client_id=20148&response_type=token"
)
anime_provider: "AnimeProvider"
user_data = {"watch_history": {}, "animelist": [], "user": {}}
user_data = {"recent_anime": [], "animelist": [], "user": {}}
default_config = {
"auto_next": "False",
"auto_select": "True",
"cache_requests": "true",
"continue_from_history": "True",
"default_media_list_tracking": "None",
"downloads_dir": USER_VIDEOS_DIR,
"disable_mpv_popen": "True",
"episode_complete_at": "80",
"ffmpegthumbnailer_seek_time": "-1",
"force_forward_tracking": "true",
"force_window": "immediate",
"format": "best[height<=1080]/bestvideo[height<=1080]+bestaudio/best",
"icons": "false",
"image_previews": "True" if S_PLATFORM != "win32" else "False",
"normalize_titles": "True",
"notification_duration": "2",
"player": "mpv",
"preferred_history": "local",
"preferred_language": "english",
"preview": "False",
"provider": "allanime",
"quality": "1080",
"recent": "50",
"rofi_theme": "",
"rofi_theme_preview": "",
"rofi_theme_confirm": "",
"rofi_theme_input": "",
"server": "top",
"skip": "false",
"sort_by": "search match",
"sub_lang": "eng",
"translation_type": "sub",
"use_fzf": "False",
"use_persistent_provider_store": "false",
"use_python_mpv": "false",
"use_rofi": "false",
}
def __init__(self) -> None:
self.initialize_user_data()
self.initialize_user_data_and_watch_history_recent_anime()
self.load_config()
def load_config(self):
self.configparser = ConfigParser(
{
"quality": "1080",
"auto_next": "False",
"auto_select": "True",
"sort_by": "search match",
"downloads_dir": USER_VIDEOS_DIR,
"translation_type": "sub",
"server": "top",
"continue_from_history": "True",
"preferred_history": "local",
"use_mpv_mod": "false",
"force_window": "immediate",
"preferred_language": "english",
"use_fzf": "False",
"preview": "False",
"format": "best[height<=1080]/bestvideo[height<=1080]+bestaudio/best",
"provider": "allanime",
"error": "3",
"icons": "false",
"notification_duration": "2",
"skip": "false",
"use_rofi": "false",
"rofi_theme": "",
"rofi_theme_input": "",
"rofi_theme_confirm": "",
"ffmpegthumnailer_seek_time": "-1",
"sub_lang": "eng",
"normalize_titles": "true",
}
)
self.configparser = ConfigParser(self.default_config)
self.configparser.add_section("stream")
self.configparser.add_section("general")
self.configparser.add_section("anilist")
if not os.path.exists(USER_CONFIG_PATH):
with open(USER_CONFIG_PATH, "w") as config:
self.configparser.write(config)
self.configparser.read(USER_CONFIG_PATH)
# --- set config values from file or using defaults ---
self.downloads_dir = self.get_downloads_dir()
self.sub_lang = self.get_sub_lang()
self.provider = self.get_provider()
self.use_fzf = self.get_use_fzf()
self.use_rofi = self.get_use_rofi()
self.skip = self.get_skip()
self.icons = self.get_icons()
self.preview = self.get_preview()
self.translation_type = self.get_translation_type()
self.sort_by = self.get_sort_by()
self.continue_from_history = self.get_continue_from_history()
if os.path.exists(USER_CONFIG_PATH):
self.configparser.read(USER_CONFIG_PATH, encoding="utf-8")
# TODO: rewrite all this removing the useless functions
# hate technical debt
# why did i do this lol
self.auto_next = self.get_auto_next()
self.normalize_titles = self.get_normalize_titles()
self.auto_select = self.get_auto_select()
self.use_mpv_mod = self.get_use_mpv_mod()
self.quality = self.get_quality()
self.notification_duration = self.get_notification_duration()
self.error = self.get_error()
self.server = self.get_server()
self.format = self.get_format()
self.force_window = self.get_force_window()
self.preferred_language = self.get_preferred_language()
self.preferred_history = self.get_preferred_history()
self.rofi_theme = self.get_rofi_theme()
Rofi.rofi_theme = self.rofi_theme
self.rofi_theme_input = self.get_rofi_theme_input()
Rofi.rofi_theme_input = self.rofi_theme_input
self.rofi_theme_confirm = self.get_rofi_theme_confirm()
Rofi.rofi_theme_confirm = self.rofi_theme_confirm
self.cache_requests = self.get_cache_requests()
self.continue_from_history = self.get_continue_from_history()
self.default_media_list_tracking = self.get_default_media_list_tracking()
self.disable_mpv_popen = self.configparser.getboolean(
"stream", "disable_mpv_popen"
)
self.downloads_dir = self.get_downloads_dir()
self.episode_complete_at = self.get_episode_complete_at()
self.ffmpegthumbnailer_seek_time = self.get_ffmpegthumnailer_seek_time()
self.force_forward_tracking = self.get_force_forward_tracking()
self.force_window = self.get_force_window()
self.format = self.get_format()
self.icons = self.get_icons()
self.image_previews = self.get_image_previews()
self.normalize_titles = self.get_normalize_titles()
self.notification_duration = self.get_notification_duration()
self.player = self.get_player()
self.preferred_history = self.get_preferred_history()
self.preferred_language = self.get_preferred_language()
self.preview = self.get_preview()
self.provider = self.get_provider()
self.quality = self.get_quality()
self.recent = self.get_recent()
self.rofi_theme_confirm = self.get_rofi_theme_confirm()
self.rofi_theme_input = self.get_rofi_theme_input()
self.rofi_theme = self.get_rofi_theme()
self.rofi_theme_preview = self.get_rofi_theme_preview()
Rofi.rofi_theme_confirm = self.rofi_theme_confirm
Rofi.rofi_theme_input = self.rofi_theme_input
Rofi.rofi_theme = self.rofi_theme
Rofi.rofi_theme_preview = self.rofi_theme_preview
self.server = self.get_server()
self.skip = self.get_skip()
self.sort_by = self.get_sort_by()
self.sub_lang = self.get_sub_lang()
self.translation_type = self.get_translation_type()
self.use_fzf = self.get_use_fzf()
self.use_python_mpv = self.get_use_mpv_mod()
self.use_rofi = self.get_use_rofi()
self.use_persistent_provider_store = self.get_use_persistent_provider_store()
# ---- setup user data ------
self.watch_history: dict = self.user_data.get("watch_history", {})
self.anime_list: list = self.user_data.get("animelist", [])
self.user: dict = self.user_data.get("user", {})
os.environ["CURRENT_FASTANIME_PROVIDER"] = self.provider
if not os.path.exists(USER_CONFIG_PATH):
with open(USER_CONFIG_PATH, "w", encoding="utf-8") as config:
config.write(self.__repr__())
def set_fastanime_config_environs(self):
current_config = []
for key in self.default_config:
current_config.append((f"FASTANIME_{key.upper()}", str(getattr(self, key))))
os.environ.update(current_config)
def update_user(self, user):
self.user = user
self.user_data["user"] = user
self._update_user_data()
def update_watch_history(
self, anime_id: int, episode: str, start_time="0", total_time="0"
def update_recent(self, recent_anime: list):
recent_anime_ids = []
_recent_anime = []
for anime in recent_anime[::-1]:
if (
anime["id"] not in recent_anime_ids
and len(recent_anime_ids) <= self.recent
):
_recent_anime.append(anime)
recent_anime_ids.append(anime["id"])
self.user_data["recent_anime"] = _recent_anime
self._update_user_data()
def media_list_track(
self,
anime_id: int,
episode_no: str,
episode_stopped_at="0",
episode_total_length="0",
progress_tracking="prompt",
):
self.watch_history.update(
{
str(anime_id): {
"episode": episode,
"start_time": start_time,
"total_time": total_time,
"episode_no": episode_no,
"episode_stopped_at": episode_stopped_at,
"episode_total_length": episode_total_length,
"progress_tracking": progress_tracking,
}
}
)
self.user_data["watch_history"] = self.watch_history
self._update_user_data()
with open(USER_WATCH_HISTORY_PATH, "w") as f:
json.dump(self.watch_history, f)
def initialize_user_data(self):
def initialize_user_data_and_watch_history_recent_anime(self):
try:
if os.path.isfile(USER_DATA_PATH):
with open(USER_DATA_PATH, "r") as f:
@@ -175,6 +192,13 @@ class Config(object):
self.user_data.update(user_data)
except Exception as e:
logger.error(e)
try:
if os.path.isfile(USER_WATCH_HISTORY_PATH):
with open(USER_WATCH_HISTORY_PATH, "r") as f:
watch_history = json.load(f)
self.watch_history.update(watch_history)
except Exception as e:
logger.error(e)
def _update_user_data(self):
"""method that updates the actual user data file"""
@@ -188,7 +212,7 @@ class Config(object):
return self.configparser.get("general", "provider")
def get_ffmpegthumnailer_seek_time(self):
return self.configparser.getint("general", "ffmpegthumnailer_seek_time")
return self.configparser.getint("general", "ffmpegthumbnailer_seek_time")
def get_preferred_language(self):
return self.configparser.get("general", "preferred_language")
@@ -202,12 +226,18 @@ class Config(object):
def get_icons(self):
return self.configparser.getboolean("general", "icons")
def get_image_previews(self):
return self.configparser.getboolean("general", "image_previews")
def get_preview(self):
return self.configparser.getboolean("general", "preview")
def get_use_fzf(self):
return self.configparser.getboolean("general", "use_fzf")
def get_use_persistent_provider_store(self):
return self.configparser.getboolean("general", "use_persistent_provider_store")
# rofi conifiguration
def get_use_rofi(self):
return self.configparser.getboolean("general", "use_rofi")
@@ -215,15 +245,30 @@ class Config(object):
def get_rofi_theme(self):
return self.configparser.get("general", "rofi_theme")
def get_rofi_theme_preview(self):
return self.configparser.get("general", "rofi_theme_preview")
def get_rofi_theme_input(self):
return self.configparser.get("general", "rofi_theme_input")
def get_rofi_theme_confirm(self):
return self.configparser.get("general", "rofi_theme_confirm")
def get_force_forward_tracking(self):
return self.configparser.getboolean("general", "force_forward_tracking")
def get_cache_requests(self):
return self.configparser.getboolean("general", "cache_requests")
def get_default_media_list_tracking(self):
return self.configparser.get("general", "default_media_list_tracking")
def get_normalize_titles(self):
return self.configparser.getboolean("general", "normalize_titles")
def get_recent(self):
return self.configparser.getint("general", "recent")
# --- stream section ---
def get_skip(self):
return self.configparser.getboolean("stream", "skip")
@@ -238,13 +283,13 @@ class Config(object):
return self.configparser.getboolean("stream", "continue_from_history")
def get_use_mpv_mod(self):
return self.configparser.getboolean("stream", "use_mpv_mod")
return self.configparser.getboolean("stream", "use_python_mpv")
def get_notification_duration(self):
return self.configparser.getint("general", "notification_duration")
def get_error(self):
return self.configparser.getint("stream", "error")
def get_episode_complete_at(self):
return self.configparser.getint("stream", "episode_complete_at")
def get_force_window(self):
return self.configparser.get("stream", "force_window")
@@ -264,6 +309,9 @@ class Config(object):
def get_format(self):
return self.configparser.get("stream", "format")
def get_player(self):
return self.configparser.get("stream", "player")
def get_sort_by(self):
return self.configparser.get("anilist", "sort_by")
@@ -273,111 +321,248 @@ class Config(object):
self.configparser.write(config)
def __repr__(self):
current_config_state = f"""
current_config_state = f"""\
#
# ███████╗░█████╗░░██████╗████████╗░█████╗░███╗░░██╗██╗███╗░░░███╗███████╗ ░█████╗░░█████╗░███╗░░██╗███████╗██╗░██████╗░
# ██╔════╝██╔══██╗██╔════╝╚══██╔══╝██╔══██╗████╗░██║██║████╗░████║██╔════╝ ██╔══██╗██╔══██╗████╗░██║██╔════╝██║██╔════╝░
# █████╗░░███████║╚█████╗░░░░██║░░░███████║██╔██╗██║██║██╔████╔██║█████╗░░ ██║░░╚═╝██║░░██║██╔██╗██║█████╗░░██║██║░░██╗░
# ██╔══╝░░██╔══██║░╚═══██╗░░░██║░░░██╔══██║██║╚████║██║██║╚██╔╝██║██╔══╝░░ ██║░░██╗██║░░██║██║╚████║██╔══╝░░██║██║░░╚██╗
# ██║░░░░░██║░░██║██████╔╝░░░██║░░░██║░░██║██║░╚███║██║██║░╚═╝░██║███████╗ ╚█████╔╝╚█████╔╝██║░╚███║██║░░░░░██║╚██████╔╝
# ╚═╝░░░░░╚═╝░░╚═╝╚═════╝░░░░╚═╝░░░╚═╝░░╚═╝╚═╝░░╚══╝╚═╝╚═╝░░░░░╚═╝╚══════╝ ░╚════╝░░╚════╝░╚═╝░░╚══╝╚═╝░░░░░╚═╝░╚═════╝░
#
[general]
# whether to show the icons in the tui [True/False]
# more like emojis
# by the way if you have any recommendations to which should be used where please
# don't hesitate to share your opinion
# cause it's a lot of work to look for the right one for each menu option
# be sure to also give the replacement emoji
icons = {self.icons}
# whether to normalize provider titles [True/False]
# basically takes the provider titles and finds the corresponding anilist title then changes the title to that
# useful for uniformity especially when downloading from different providers
# this also applies to episode titles
normalize_titles = {self.normalize_titles}
# can be [allanime, animepahe, hianime]
# allanime is the most realible
# animepahe provides different links to streams of different quality so a quality can be selected reliably with --quality option
# hianime which is now hianime usually provides subs in different languuages and its servers are generally faster
provider = {self.provider}
# Display language [english, romaji]
# this is passed to anilist directly and is used to set the language which the anime titles will be in
# when using the anilist interface
preferred_language = {self.preferred_language}
# Download directory
# where you will find your videos after downloading them with 'fastanime download' command
downloads_dir = {self.downloads_dir}
# whether to show a preview window when using fzf or rofi [True/False]
# the preview requires you have a commandline image viewer as documented in the README
# this is only when usinf fzf
# if you dont care about image previews it doesnt matter
# though its awesome
# try it and you will see
preview = {self.preview}
# whether to show images in the preview [true/false]
image_previews = {self.image_previews}
# the time to seek when using ffmpegthumbnailer [-1 to 100]
# -1 means random and is the default
# ffmpegthumbnailer is used to generate previews and you can select at what time in the video to extract an image
# random makes things quite exciting cause you never no at what time it will extract the image from
ffmpegthumbnailer_seek_time = {self.ffmpegthumbnailer_seek_time}
# whether to use fzf as the interface for the anilist command and others. [True/False]
use_fzf = {self.use_fzf}
# whether to use rofi for the ui [True/False]
# it's more useful if you want to create a desktop entry
# which can be setup with 'fastanime config --desktop-entry'
# though if you want it to be your sole interface even when fastanime is run directly from the terminal
use_rofi = {self.use_rofi}
# rofi themes to use
# the values of this option is the path to the rofi config files to use
# i choose to split it into three since it gives the best look and feel
# you can refer to the rofi demo on github to see for your self
# by the way i recommend getting the rofi themes from this project;
rofi_theme = {self.rofi_theme}
rofi_theme = {self.rofi_theme_preview}
rofi_theme_input = {self.rofi_theme_input}
rofi_theme_confirm = {self.rofi_theme_confirm}
# the duration in minutes a notification will stay in the screen
# used by notifier command
notification_duration = {self.notification_duration}
# used when the provider gives subs of different languages
# currently its the case for:
# hianime
# the values for this option are the short names for countries
# regex is used to determine what you selected
sub_lang = {self.sub_lang}
# what is your default media list tracking [track/disabled/prompt]
# only affects your anilist anime list
# track - means your progress will always be reflected in your anilist anime list
# disabled - means progress tracking will no longer be reflected in your anime list
# prompt - means for every anime you will be prompted whether you want your progress to be tracked or not
default_media_list_tracking = {self.default_media_list_tracking}
# whether media list tracking should only be updated when the next episode is greater than the previous
# this affects only your anilist anime list
force_forward_tracking = {self.force_forward_tracking}
# whether to cache requests [true/false]
# this makes the experience better and more faster
# as data need not always be fetched from web server
# and instead can be gotten locally
# from the cached_requests_db
cache_requests = {self.cache_requests}
# whether to use a persistent store (basically a sqlitedb) for storing some data the provider requires
# to enable a seamless experience [true/false]
# this option exists primarily because i think it may help in the optimization
# of fastanime as a library in a website project
# for now i don't recommend changing it
# leave it as is
use_persistent_provider_store = {self.use_persistent_provider_store}
# no of recent anime to keep [0-50]
# 0 will disable recent anime tracking
recent = {self.recent}
[stream]
# Auto continue from watch history
# the quality of the stream [1080,720,480,360]
# this option is usually only reliable when:
# provider=animepahe
# since it provides links that actually point to streams of different qualities
# while the rest just point to another link that can provide the anime from the same server
quality = {self.quality}
# Auto continue from watch history [True/False]
# this will make fastanime to choose the episode that you last watched to completion
# and increment it by one
# and use that to auto select the episode you want to watch
continue_from_history = {self.continue_from_history}
# which hostory to use [local/remote]
# which history to use [local/remote]
# local history means it will just use the watch history stored locally in your device
# the file that stores it is called watch_history.json and is stored next to your config file
# remote means it ignores the last episode stored locally and instead uses the one in your anilist anime list
# this config option is useful if you want to overwrite your local history or import history covered from another device or platform
# since remote history will take precendence over whats available locally
preferred_history = {self.preferred_history}
# Preferred language for anime (options: dub, sub)
# Preferred language for anime [dub/sub]
translation_type = {self.translation_type}
# Default server (options: dropbox, sharepoint, wetransfer.gogoanime, top, wixmp)
# what server to use for a particular provider
# allanime: [dropbox, sharepoint, wetransfer, gogoanime, wixmp]
# animepahe: [kwik]
# hianime: [HD1, HD2, StreamSB, StreamTape]
# 'top' can also be used as a value for this option
# 'top' will cause fastanime to auto select the first server it sees
# this saves on resources and is faster since not all servers are being fetched
server = {self.server}
# Auto-select next episode
# Auto select next episode [True/False]
# this makes fastanime increment the current episode number
# then after using that value to fetch the next episode instead of prompting
# this option is useful for binging
auto_next = {self.auto_next}
# Auto select the anime provider results with fuzzy find.
# Note this wont always be correct.But 99% of the time will be.
# Auto select the anime provider results with fuzzy find. [True/False]
# Note this won't always be correct
# this is because the providers sometime use non-standard names
# that are there own preference rather than the official names
# But 99% of the time will be accurate
# if this happens just turn of auto_select in the menus or from the commandline and manually select the correct anime title
# and then please open an issue at <> highlighting the normalized title and the title given by the provider for the anime you wished to watch
# or even better edit this file <> and open a pull request
auto_select = {self.auto_select}
# whether to skip the opening and ending theme songs
# whether to skip the opening and ending theme songs [True/False]
# NOTE: requires ani-skip to be in path
# for python-mpv users am planning to create this functionality n python without the use of an external script
# so its disabled for now
skip = {self.skip}
# the maximum delta time in minutes after which the episode should be considered as completed
# used in the continue from time stamp
error = {self.error}
# at what percentage progress should the episode be considered as completed [0-100]
# this value is used to determine whether to increment the current episode number and save it to your local list
# so you can continue immediately to the next episode without select it the next time you decide to watch the anime
# it is also used to determine whether your anilist anime list should be updated or not
episode_complete_at = {self.episode_complete_at}
# whether to use python-mpv
# whether to use python-mpv [True/False]
# to enable superior control over the player
# adding more options to it
use_mpv_mod = {self.use_mpv_mod}
# Enable this one and you will be wonder why you did not discover fastanime sooner
# Since you basically don't have to close the player window
# to go to the next or previous episode, switch servers,
# change translation type or change to a given episode x
# so try it if you haven't already
# if you have any issues setting it up
# don't be afraid to ask
# especially on windows
# honestly it can be a pain to set it up there
# personally it took me quite sometime to figure it out
# this is because of how windows handles shared libraries
# so just ask when you find yourself stuck
# or just switch to arch linux
use_python_mpv = {self.use_python_mpv}
# whether to use popen to get the timestamps for continue_from_history
# implemented because popen does not work for some reason in nixos
# if you are on nixos and you have a solution to this problem please share
# i will be glad to hear it 😄
# So for now ignore this option
# and anyways the new method of getting timestamps is better
disable_mpv_popen = {self.disable_mpv_popen}
# force mpv window
# the default 'immediate' just makes mpv to open the window even if the video has not yet loaded
# done for asthetics
# passed directly to mpv so values are same
force_window = immediate
# the format of downloaded anime and trailer
# based on yt-dlp format and passed directly to it
# learn more by looking it up on their site
# only works for downloaded anime if server=gogoanime
# since its the only one that offers different formats
# the others tend not to
# only works for downloaded anime if:
# provider=allanime, server=gogoanime
# provider=allanime, server=wixmp
# provider=hianime
# this is because they provider a m3u8 file that contans multiple quality streams
format = {self.format}
[general]
# set the player to use for streaming [mpv/vlc]
# while this option exists i will still recommend that you use mpv
# since you will miss out on some features if you use the others
player = {self.player}
# whether to normalize provider titles
normalize_titles = {self.normalize_titles}
# can be [allanime,animepahe]
provider = {self.provider}
# Display language (options: english, romaji)
preferred_language = {self.preferred_language}
# Download directory
downloads_dir = {self.downloads_dir}
# whether to show a preview window when using fzf or rofi
preview = {self.preview}
# the time to seek when using ffmpegthumbnailer [-1 to 100]
# -1 means random and is the default
ffmpegthumbnailer_seek_time = {self.ffmpegthumbnailer_seek_time}
# whether to use fzf as the interface for the anilist command and others.
use_fzf = {self.use_fzf}
# whether to use rofi for the ui
use_rofi = {self.use_rofi}
# rofi theme to use
rofi_theme = {self.rofi_theme}
rofi_theme_input = {self.rofi_theme_input}
rofi_theme_confirm = {self.rofi_theme_confirm}
# whether to show the icons
icons = {self.icons}
# the duration in minutes a notification will stay in the screen
# used by notifier command
notification_duration = {self.notification_duration}
"""
# NOTE:
# if you have any trouble setting up your config
# please don't be afraid to ask in our discord
# plus if there are any errors, improvements or suggestions please tell us in the discord
# or help us by contributing
# we appreciate all the help we can get
# since we may not always have the time to immediately implement the changes
#
# HOPE YOU ENJOY FASTANIME AND BE SURE TO STAR THE PROJECT ON GITHUB
#
"""
return current_config_state
def __str__(self):
return self.__repr__()
# WARNING: depracated and will probably be removed
def update_anime_list(self, anime_id: int, remove=False):
if remove:
try:
self.anime_list.remove(anime_id)
print("Succesfully removed :cry:")
except Exception:
print(anime_id, "Nothing to remove :confused:")
else:
self.anime_list.append(anime_id)
self.user_data["animelist"] = list(set(self.anime_list))
self._update_user_data()
print("Succesfully added :smile:")
input("Enter to continue...")

View File

@@ -2,7 +2,6 @@ from __future__ import annotations
import os
import random
from datetime import datetime
from typing import TYPE_CHECKING
from click import clear
@@ -35,8 +34,7 @@ if TYPE_CHECKING:
from ..utils.tools import FastAnimeRuntimeState
# TODO: make the error handling more sane
def calculate_time_delta(start_time, end_time):
def calculate_percentage_completion(start_time, end_time):
"""helper function used to calculate the difference between two timestamps in seconds
Args:
@@ -46,16 +44,12 @@ def calculate_time_delta(start_time, end_time):
Returns:
[TODO:return]
"""
time_format = "%H:%M:%S"
# Convert string times to datetime objects
start = datetime.strptime(start_time, time_format)
end = datetime.strptime(end_time, time_format)
# Calculate the difference
delta = end - start
return delta
start = start_time.split(":")
end = end_time.split(":")
start_secs = int(start[0]) * 3600 + int(start[1]) * 60 + int(start[2])
end_secs = int(end[0]) * 3600 + int(end[1]) * 60 + int(end[2])
return start_secs / end_secs * 100
def media_player_controls(
@@ -103,10 +97,12 @@ def media_player_controls(
)
if (
config.watch_history[str(anime_id_anilist)]["episode"]
config.watch_history[str(anime_id_anilist)]["episode_no"]
== current_episode_number
):
start_time = config.watch_history[str(anime_id_anilist)]["start_time"]
start_time = config.watch_history[str(anime_id_anilist)][
"episode_stopped_at"
]
print("[green]Continuing from:[/] ", start_time)
else:
start_time = "0"
@@ -141,7 +137,7 @@ def media_player_controls(
headers=selected_server["headers"],
subtitles=subtitles,
)
elif config.use_mpv_mod:
elif config.use_python_mpv:
from ..utils.player import player
player.create_player(
@@ -164,15 +160,17 @@ def media_player_controls(
custom_args=custom_args,
headers=selected_server["headers"],
subtitles=subtitles,
player=config.player,
)
# either update the watch history to the next episode or current depending on progress
if stop_time == "0" or total_time == "0":
episode = str(int(current_episode_number) + 1)
else:
error = 5 * 60
delta = calculate_time_delta(stop_time, total_time)
if delta.total_seconds() > error:
percentage_completion_of_episode = calculate_percentage_completion(
stop_time, total_time
)
if percentage_completion_of_episode < config.episode_complete_at:
episode = current_episode_number
else:
episode = str(int(current_episode_number) + 1)
@@ -180,28 +178,34 @@ def media_player_controls(
total_time = "0"
clear()
config.update_watch_history(anime_id_anilist, episode, stop_time, total_time)
config.media_list_track(
anime_id_anilist,
episode_no=episode,
episode_stopped_at=stop_time,
episode_total_length=total_time,
progress_tracking=fastanime_runtime_state.progress_tracking,
)
media_player_controls(config, fastanime_runtime_state)
def _next_episode():
"""watch the next episode"""
# ensures you dont accidentally erase your progress for an in complete episode
stop_time = config.watch_history.get(str(anime_id_anilist), {}).get(
"start_time", "0"
"episode_stopped_at", "0"
)
total_time = config.watch_history.get(str(anime_id_anilist), {}).get(
"total_time", "0"
"episode_total_length", "0"
)
# compute if the episode is actually completed
error = config.error * 60
if stop_time == "0" or total_time == "0":
dt = 0
percentage_completion_of_episode = 0
else:
delta = calculate_time_delta(stop_time, total_time)
dt = delta.total_seconds()
if dt > error:
percentage_completion_of_episode = calculate_percentage_completion(
stop_time, total_time
)
if percentage_completion_of_episode < config.episode_complete_at:
if config.auto_next:
if config.use_rofi:
if not Rofi.confirm(
@@ -221,7 +225,7 @@ def media_player_controls(
"Are you sure you wish to continue to the next episode, your progress for the current episodes will be erased?",
default=True,
):
media_player_controls(config, fastanime_runtime_state)
media_actions_menu(config, fastanime_runtime_state)
return
# all checks have passed lets go to the next episode
@@ -235,7 +239,11 @@ def media_player_controls(
]
# update user config
config.update_watch_history(anime_id_anilist, available_episodes[next_episode])
config.media_list_track(
anime_id_anilist,
episode_no=available_episodes[next_episode],
progress_tracking=fastanime_runtime_state.progress_tracking,
)
# call interface
provider_anime_episode_servers_menu(config, fastanime_runtime_state)
@@ -259,7 +267,11 @@ def media_player_controls(
]
# update user config
config.update_watch_history(anime_id_anilist, available_episodes[prev_episode])
config.media_list_track(
anime_id_anilist,
episode_no=available_episodes[prev_episode],
progress_tracking=fastanime_runtime_state.progress_tracking,
)
# call interface
provider_anime_episode_servers_menu(config, fastanime_runtime_state)
@@ -272,7 +284,7 @@ def media_player_controls(
# prompt for new quality
if config.use_fzf:
quality = fzf.run(
options, prompt="Select Quality:", header="Quality Options"
options, prompt="Select Quality", header="Quality Options"
)
elif config.use_rofi:
quality = Rofi.run(options, "Select Quality")
@@ -290,7 +302,7 @@ def media_player_controls(
options = ["sub", "dub"]
if config.use_fzf:
translation_type = fzf.run(
options, prompt="Select Translation Type: ", header="Lang Options"
options, prompt="Select Translation Type", header="Lang Options"
).lower()
elif config.use_rofi:
translation_type = Rofi.run(options, "Select Translation Type")
@@ -336,7 +348,7 @@ def media_player_controls(
if config.use_fzf:
action = fzf.run(
choices,
prompt="Select Action:",
prompt="Select Action",
)
elif config.use_rofi:
action = Rofi.run(choices, "Select Action")
@@ -370,15 +382,14 @@ def provider_anime_episode_servers_menu(
anime_id_anilist: int = fastanime_runtime_state.selected_anime_id_anilist
provider_anime: "Anime" = fastanime_runtime_state.provider_anime
server_name = None
server_name = ""
# get streams for episode from provider
with Progress() as progress:
progress.add_task("Fetching Episode Streams...", total=None)
episode_streams_generator = anime_provider.get_episode_streams(
provider_anime,
provider_anime["id"],
current_episode_number,
translation_type,
fastanime_runtime_state.selected_anime_anilist,
)
if not episode_streams_generator:
if not config.use_rofi:
@@ -387,7 +398,7 @@ def provider_anime_episode_servers_menu(
else:
if not Rofi.confirm("Sth went wrong!!Enter to continue..."):
exit(1)
provider_anime_episode_servers_menu(config, fastanime_runtime_state)
media_actions_menu(config, fastanime_runtime_state)
return
if config.server == "top":
@@ -435,7 +446,7 @@ def provider_anime_episode_servers_menu(
if config.use_fzf:
server_name = fzf.run(
choices,
prompt="Select Server: ",
prompt="Select Server",
header="Servers",
)
elif config.use_rofi:
@@ -496,21 +507,12 @@ def provider_anime_episode_servers_menu(
"[bold magenta] Episode: [/]",
current_episode_number,
)
# -- update anilist progress if user --
if config.user and current_episode_number:
AniList.update_anime_list(
{
"mediaId": anime_id_anilist,
"progress": int(float(current_episode_number)),
}
)
# try to get the timestamp you left off from if available
start_time = config.watch_history.get(str(anime_id_anilist), {}).get(
"start_time", "0"
"episode_stopped_at", "0"
)
episode_in_history = config.watch_history.get(str(anime_id_anilist), {}).get(
"episode", ""
"episode_no", ""
)
if start_time != "0" and episode_in_history == current_episode_number:
print("[green]Continuing from:[/] ", start_time)
@@ -537,6 +539,14 @@ def provider_anime_episode_servers_menu(
episode_title = episode_detail["title"]
break
if config.recent:
config.update_recent(
[
*config.user_data["recent_anime"],
fastanime_runtime_state.selected_anime_anilist,
]
)
print("Updating recent anime...")
if config.sync_play:
from ..utils.syncplay import SyncPlayer
@@ -546,7 +556,7 @@ def provider_anime_episode_servers_menu(
headers=selected_server["headers"],
subtitles=subtitles,
)
elif config.use_mpv_mod:
elif config.use_python_mpv:
from ..utils.player import player
if start_time == "0" and episode_in_history != current_episode_number:
@@ -575,6 +585,7 @@ def provider_anime_episode_servers_menu(
custom_args=custom_args,
headers=selected_server["headers"],
subtitles=subtitles,
player=config.player,
)
print("Finished at: ", stop_time)
@@ -582,34 +593,56 @@ def provider_anime_episode_servers_menu(
# this will try to update the episode to be the next episode if delta has reached a specific threshhold
# this update will only apply locally
# the remote(anilist) is only updated when its certain you are going to open the player
available_episodes: list = sorted(
fastanime_runtime_state.provider_available_episodes, key=float
)
if stop_time == "0" or total_time == "0":
# increment the episodes
next_episode = available_episodes.index(current_episode_number) + 1
if next_episode >= len(available_episodes):
next_episode = len(available_episodes) - 1
episode = available_episodes[next_episode]
# next_episode = available_episodes.index(current_episode_number) + 1
# if next_episode >= len(available_episodes):
# next_episode = len(available_episodes) - 1
# episode = available_episodes[next_episode]
pass
else:
error = config.error * 60
delta = calculate_time_delta(stop_time, total_time)
if delta.total_seconds() > error:
episode = current_episode_number
else:
# increment the episodes
next_episode = available_episodes.index(current_episode_number) + 1
if next_episode >= len(available_episodes):
next_episode = len(available_episodes) - 1
episode = available_episodes[next_episode]
stop_time = "0"
total_time = "0"
percentage_completion_of_episode = calculate_percentage_completion(
stop_time, total_time
)
if percentage_completion_of_episode > config.episode_complete_at:
# -- update anilist progress if user --
remote_progress = (
fastanime_runtime_state.selected_anime_anilist["mediaListEntry"] or {}
).get("progress")
disable_anilist_update = False
if remote_progress:
if (
float(remote_progress) > float(current_episode_number)
and config.force_forward_tracking
):
disable_anilist_update = True
if (
fastanime_runtime_state.progress_tracking == "track"
and config.user
and not disable_anilist_update
and current_episode_number
):
AniList.update_anime_list(
{
"mediaId": anime_id_anilist,
"progress": int(float(current_episode_number)),
}
)
config.update_watch_history(
# increment the episodes
# next_episode = available_episodes.index(current_episode_number) + 1
# if next_episode >= len(available_episodes):
# next_episode = len(available_episodes) - 1
# episode = available_episodes[next_episode]
# stop_time = "0"
# total_time = "0"
config.media_list_track(
anime_id_anilist,
episode,
start_time=stop_time,
total_time=total_time,
episode_no=current_episode_number,
episode_stopped_at=stop_time,
episode_total_length=total_time,
progress_tracking=fastanime_runtime_state.progress_tracking,
)
# switch to controls
@@ -641,7 +674,9 @@ def provider_anime_episodes_menu(
)
# prompt for episode number
total_episodes = provider_anime["availableEpisodesDetail"][translation_type]
available_episodes = sorted(
provider_anime["availableEpisodesDetail"][translation_type], key=float
)
current_episode_number = ""
# auto select episode if continue from history otherwise prompt episode number
@@ -649,16 +684,39 @@ def provider_anime_episodes_menu(
# the user watch history thats locally available
# will be preferred over remote
if (
user_watch_history.get(str(anime_id_anilist), {}).get("episode")
in total_episodes
user_watch_history.get(str(anime_id_anilist), {}).get("episode_no")
in available_episodes
):
if (
config.preferred_history == "local"
or not selected_anime_anilist["mediaListEntry"]
):
current_episode_number = user_watch_history[str(anime_id_anilist)][
"episode"
"episode_no"
]
stop_time = user_watch_history.get(str(anime_id_anilist), {}).get(
"episode_stopped_at", "0"
)
total_time = user_watch_history.get(str(anime_id_anilist), {}).get(
"episode_total_length", "0"
)
if stop_time != "0" or total_time != "0":
percentage_completion_of_episode = calculate_percentage_completion(
stop_time, total_time
)
if percentage_completion_of_episode > config.episode_complete_at:
# increment the episodes
next_episode = (
available_episodes.index(current_episode_number) + 1
)
if next_episode >= len(available_episodes):
next_episode = len(available_episodes) - 1
episode = available_episodes[next_episode]
stop_time = "0"
total_time = "0"
current_episode_number = episode
else:
current_episode_number = str(
(selected_anime_anilist["mediaListEntry"] or {"progress": 0}).get(
@@ -676,7 +734,7 @@ def provider_anime_episodes_menu(
"progress"
)
)
if current_episode_number not in total_episodes:
if current_episode_number not in available_episodes:
current_episode_number = ""
print(
f"[bold cyan]Continuing from Episode:[/] [bold]{current_episode_number}[/]"
@@ -686,8 +744,8 @@ def provider_anime_episodes_menu(
current_episode_number = ""
# prompt for episode number if not set
if not current_episode_number or current_episode_number not in total_episodes:
choices = [*total_episodes, "Back"]
if not current_episode_number or current_episode_number not in available_episodes:
choices = [*available_episodes, "Back"]
preview = None
if config.preview:
from .utils import get_fzf_episode_preview
@@ -696,13 +754,13 @@ def provider_anime_episodes_menu(
if e:
eps = range(0, e + 1)
else:
eps = total_episodes
eps = available_episodes
preview = get_fzf_episode_preview(
fastanime_runtime_state.selected_anime_anilist, eps
)
if config.use_fzf:
current_episode_number = fzf.run(
choices, prompt="Select Episode:", header=anime_title, preview=preview
choices, prompt="Select Episode", header=anime_title, preview=preview
)
elif config.use_rofi:
current_episode_number = Rofi.run(choices, "Select Episode")
@@ -725,14 +783,16 @@ def provider_anime_episodes_menu(
# )
# update runtime data
fastanime_runtime_state.provider_available_episodes = total_episodes
fastanime_runtime_state.provider_available_episodes = available_episodes
fastanime_runtime_state.provider_current_episode_number = current_episode_number
# next interface
provider_anime_episode_servers_menu(config, fastanime_runtime_state)
def fetch_anime_episode(config, fastanime_runtime_state: "FastAnimeRuntimeState"):
def fetch_anime_episode(
config: "Config", fastanime_runtime_state: "FastAnimeRuntimeState"
):
selected_anime: "SearchResult" = (
fastanime_runtime_state.provider_anime_search_result
)
@@ -740,7 +800,7 @@ def fetch_anime_episode(config, fastanime_runtime_state: "FastAnimeRuntimeState"
with Progress() as progress:
progress.add_task("Fetching Anime Info...", total=None)
provider_anime = anime_provider.get_anime(
selected_anime["id"], fastanime_runtime_state.selected_anime_anilist
selected_anime["id"],
)
if not provider_anime:
print(
@@ -751,7 +811,7 @@ def fetch_anime_episode(config, fastanime_runtime_state: "FastAnimeRuntimeState"
else:
if not Rofi.confirm("Sth went wrong!!Enter to continue..."):
exit(1)
return fetch_anime_episode(config, fastanime_runtime_state)
return media_actions_menu(config, fastanime_runtime_state)
fastanime_runtime_state.provider_anime = provider_anime
provider_anime_episodes_menu(config, fastanime_runtime_state)
@@ -760,6 +820,39 @@ def fetch_anime_episode(config, fastanime_runtime_state: "FastAnimeRuntimeState"
#
# ---- ANIME PROVIDER SEARCH RESULTS MENU ----
#
def set_prefered_progress_tracking(
config: "Config", fastanime_runtime_state: "FastAnimeRuntimeState", update=False
):
if (
fastanime_runtime_state.progress_tracking == ""
or update
or fastanime_runtime_state.progress_tracking == "prompt"
):
if config.default_media_list_tracking == "track":
fastanime_runtime_state.progress_tracking = "track"
elif config.default_media_list_tracking == "disabled":
fastanime_runtime_state.progress_tracking = "disabled"
else:
options = ["disabled", "track"]
if config.use_fzf:
fastanime_runtime_state.progress_tracking = fzf.run(
options,
"Enter your preferred progress tracking for the current anime",
)
elif config.use_rofi:
fastanime_runtime_state.progress_tracking = Rofi.run(
options,
"Enter your preferred progress tracking for the current anime",
)
else:
fastanime_runtime_state.progress_tracking = fuzzy_inquirer(
options,
"Enter your preferred progress tracking for the current anime",
)
def anime_provider_search_results_menu(
config: "Config", fastanime_runtime_state: "FastAnimeRuntimeState"
):
@@ -786,7 +879,6 @@ def anime_provider_search_results_menu(
provider_search_results = anime_provider.search_for_anime(
selected_anime_title,
translation_type,
selected_anime_anilist,
)
if not provider_search_results:
print(
@@ -797,7 +889,7 @@ def anime_provider_search_results_menu(
else:
if not Rofi.confirm("Sth went wrong!!Enter to continue..."):
exit(1)
return anime_provider_search_results_menu(config, fastanime_runtime_state)
return media_actions_menu(config, fastanime_runtime_state)
provider_search_results = {
anime["title"]: anime for anime in provider_search_results["results"]
@@ -826,7 +918,7 @@ def anime_provider_search_results_menu(
if config.use_fzf:
provider_anime_title = fzf.run(
choices,
prompt="Select Search Result:",
prompt="Select Search Result",
header="Anime Search Results",
)
@@ -848,6 +940,11 @@ def anime_provider_search_results_menu(
fastanime_runtime_state.provider_anime_search_result = provider_search_results[
provider_anime_title
]
fastanime_runtime_state.progress_tracking = config.watch_history.get(
str(fastanime_runtime_state.selected_anime_id_anilist), {}
).get("progress_tracking", "prompt")
set_prefered_progress_tracking(config, fastanime_runtime_state)
fetch_anime_episode(config, fastanime_runtime_state)
@@ -891,11 +988,12 @@ def media_actions_menu(
run_mpv(
trailer_url,
ytdl_format=config.format,
player=config.player,
)
media_actions_menu(config, fastanime_runtime_state)
else:
if not config.use_rofi:
print("no trailer available :confused:")
print("no trailer available :confused")
input("Enter to continue...")
else:
if not Rofi.confirm("No trailler found!!Enter to continue"):
@@ -1028,7 +1126,7 @@ def media_actions_menu(
options = ["Sub", "Dub"]
if config.use_fzf:
translation_type = fzf.run(
options, prompt="Select Translation Type:", header="Language Options"
options, prompt="Select Translation Type", header="Language Options"
)
elif config.use_rofi:
translation_type = Rofi.run(options, "Select Translation Type")
@@ -1057,10 +1155,10 @@ def media_actions_menu(
if config.use_fzf:
player = fzf.run(
options,
prompt="Select Player:",
prompt="Select Player",
)
elif config.use_rofi:
player = Rofi.run(options, "Select Player: ")
player = Rofi.run(options, "Select Player")
else:
player = fuzzy_inquirer(
options,
@@ -1070,13 +1168,13 @@ def media_actions_menu(
# update internal config
if player == "syncplay":
config.sync_play = True
config.use_mpv_mod = False
config.use_python_mpv = False
else:
config.sync_play = False
if player == "mpv-mod":
config.use_mpv_mod = True
config.use_python_mpv = True
else:
config.use_mpv_mod = False
config.use_python_mpv = False
media_actions_menu(config, fastanime_runtime_state)
def _view_info(config: "Config", fastanime_runtime_state: "FastAnimeRuntimeState"):
@@ -1197,7 +1295,7 @@ def media_actions_menu(
options = list(anime_sources.keys())
if config.use_fzf:
provider = fzf.run(
options, prompt="Select Translation Type:", header="Language Options"
options, prompt="Select Translation Type", header="Language Options"
)
elif config.use_rofi:
provider = Rofi.run(options, "Select Translation Type")
@@ -1236,12 +1334,19 @@ def media_actions_menu(
config.continue_from_history = False
anime_provider_search_results_menu(config, fastanime_runtime_state)
def _set_progress_tracking(
config: "Config", fastanime_runtime_state: "FastAnimeRuntimeState"
):
set_prefered_progress_tracking(config, fastanime_runtime_state, update=True)
media_actions_menu(config, fastanime_runtime_state)
icons = config.icons
options = {
f"{'📽️ ' if icons else ''}Stream ({progress}/{episodes_total})": _stream_anime,
f"{'📽️ ' if icons else ''}Episodes": _select_episode_to_stream,
f"{'📼 ' if icons else ''}Watch Trailer": _watch_trailer,
f"{'' if icons else ''}Score Anime": _score_anime,
f"{'' if icons else ''}Progress Tracking": _set_progress_tracking,
f"{'📥 ' if icons else ''}Add to List": _add_to_list,
f"{'📤 ' if icons else ''}Remove from List": _remove_from_list,
f"{'📖 ' if icons else ''}View Info": _view_info,
@@ -1256,7 +1361,7 @@ def media_actions_menu(
}
choices = list(options.keys())
if config.use_fzf:
action = fzf.run(choices, prompt="Select Action:", header="Anime Menu")
action = fzf.run(choices, prompt="Select Action", header="Anime Menu")
elif config.use_rofi:
action = Rofi.run(choices, "Select Action")
else:
@@ -1279,7 +1384,9 @@ def anilist_results_menu(
config: [TODO:description]
fastanime_runtime_state: [TODO:description]
"""
search_results = fastanime_runtime_state.anilist_data["data"]["Page"]["media"]
search_results = fastanime_runtime_state.anilist_results_data["data"]["Page"][
"media"
]
anime_data = {}
for anime in search_results:
@@ -1322,14 +1429,14 @@ def anilist_results_menu(
preview = get_fzf_anime_preview(search_results, anime_data.keys())
selected_anime_title = fzf.run(
choices,
prompt="Select Anime: ",
prompt="Select Anime",
header="Search Results",
preview=preview,
)
else:
selected_anime_title = fzf.run(
choices,
prompt="Select Anime: ",
prompt="Select Anime",
header="Search Results",
)
elif config.use_rofi:
@@ -1340,7 +1447,7 @@ def anilist_results_menu(
choices = []
for title in anime_data.keys():
icon_path = os.path.join(IMAGES_CACHE_DIR, title)
choices.append(f"{title}\0icon\x1f{icon_path}")
choices.append(f"{title}\0icon\x1f{icon_path}.png")
choices.append("Back")
selected_anime_title = Rofi.run_with_icons(choices, "Select Anime")
else:
@@ -1482,6 +1589,9 @@ def fastanime_main_menu(
watch_history = list(map(int, config.watch_history.keys()))
return AniList.search(id_in=watch_history, sort="TRENDING_DESC")
def _recent():
return (True, {"data": {"Page": {"media": config.user_data["recent_anime"]}}})
# WARNING: Will probably be depracated
def _anime_list():
anime_list = config.anime_list
@@ -1500,6 +1610,8 @@ def fastanime_main_menu(
else:
config.load_config()
config.set_fastanime_config_environs()
config.anime_provider.provider = config.provider
config.anime_provider.lazyload_provider(config.provider)
@@ -1509,6 +1621,7 @@ def fastanime_main_menu(
# each option maps to anilist data that is described by the option name
options = {
f"{'🔥 ' if icons else ''}Trending": AniList.get_trending,
f"{'🎞️ ' if icons else ''}Recent": _recent,
f"{'📺 ' if icons else ''}Watching": lambda media_list_type="Watching": handle_animelist(
config, fastanime_runtime_state, media_list_type
),
@@ -1544,7 +1657,7 @@ def fastanime_main_menu(
if config.use_fzf:
action = fzf.run(
choices,
prompt="Select Action: ",
prompt="Select Action",
header="Anilist Menu",
)
elif config.use_rofi:
@@ -1558,7 +1671,7 @@ def fastanime_main_menu(
# anilist data is a (bool,data)
# the bool indicated success
if anilist_data[0]:
fastanime_runtime_state.anilist_data = anilist_data[1]
fastanime_runtime_state.anilist_results_data = anilist_data[1]
anilist_results_menu(config, fastanime_runtime_state)
else:

View File

@@ -7,9 +7,9 @@ import textwrap
from threading import Thread
import requests
from yt_dlp.utils import clean_html
from yt_dlp.utils import clean_html, sanitize_filename
from ...constants import APP_CACHE_DIR
from ...constants import APP_CACHE_DIR, S_PLATFORM
from ...libs.anilist.types import AnilistBaseMediaDataSchema
from ...Utility import anilist_data_helper
from ..utils.scripts import fzf_preview
@@ -46,7 +46,9 @@ def aniskip(mal_id: int, episode: str):
# NOTE: May change this to a temp dir but there were issues so later
WORKING_DIR = APP_CACHE_DIR # tempfile.gettempdir()
HEADER_COLOR = 215, 0, 95
SEPARATOR_COLOR = 208, 208, 208
SINGLE_QUOTE = "'"
IMAGES_CACHE_DIR = os.path.join(WORKING_DIR, "images")
if not os.path.exists(IMAGES_CACHE_DIR):
os.mkdir(IMAGES_CACHE_DIR)
@@ -63,7 +65,7 @@ def save_image_from_url(url: str, file_name: str):
file_name: filename to use
"""
image = requests.get(url)
with open(f"{IMAGES_CACHE_DIR}/{file_name}", "wb") as f:
with open(os.path.join(IMAGES_CACHE_DIR,f"{file_name}.png"), "wb") as f:
f.write(image.content)
@@ -74,7 +76,7 @@ def save_info_from_str(info: str, file_name: str):
info: the information anilist has on the anime
file_name: the filename to use
"""
with open(f"{ANIME_INFO_CACHE_DIR}/{file_name}", "w") as f:
with open(os.path.join(ANIME_INFO_CACHE_DIR,file_name,), "w",encoding="utf-8") as f:
f.write(info)
@@ -91,40 +93,75 @@ def write_search_results(
workers:number of threads to use defaults to as many as possible
"""
# NOTE: Will probably make this a configuraable option
HEADER_COLOR = 215, 0, 95
SEPARATOR_COLOR = 208, 208, 208
SEPARATOR_WIDTH = 45
# use concurency to download and write as fast as possible
with concurrent.futures.ThreadPoolExecutor(max_workers=workers) as executor:
future_to_task = {}
for anime, title in zip(anilist_results, titles):
# actual image url
image_url = anime["coverImage"]["large"]
future_to_task[executor.submit(save_image_from_url, image_url, title)] = (
image_url
)
if os.environ.get("FASTANIME_IMAGE_PREVIEWS", "true").lower() == "true":
image_url = anime["coverImage"]["large"]
future_to_task[
executor.submit(save_image_from_url, image_url, title)
] = image_url
mediaListName = "Not in any of your lists"
progress = "UNKNOWN"
if anime_list := anime["mediaListEntry"]:
mediaListName = anime_list["status"]
progress = anime_list["progress"]
# handle the text data
template = f"""
{get_true_fg("-"*SEPARATOR_WIDTH,*SEPARATOR_COLOR,bold=False)}
{get_true_fg('Title(jp):',*HEADER_COLOR)} {anime['title']['romaji']}
{get_true_fg('Title(eng):',*HEADER_COLOR)} {anime['title']['english']}
{get_true_fg('Popularity:',*HEADER_COLOR)} {anime['popularity']}
{get_true_fg('Favourites:',*HEADER_COLOR)} {anime['favourites']}
{get_true_fg('Status:',*HEADER_COLOR)} {anime['status']}
{get_true_fg('Episodes:',*HEADER_COLOR)} {anime['episodes']}
{get_true_fg('Genres:',*HEADER_COLOR)} {anilist_data_helper.format_list_data_with_comma(anime['genres'])}
{get_true_fg('Next Episode:',*HEADER_COLOR)} {anilist_data_helper.extract_next_airing_episode(anime['nextAiringEpisode'])}
{get_true_fg('Start Date:',*HEADER_COLOR)} {anilist_data_helper.format_anilist_date_object(anime['startDate'])}
{get_true_fg('End Date:',*HEADER_COLOR)} {anilist_data_helper.format_anilist_date_object(anime['endDate'])}
{get_true_fg("-"*SEPARATOR_WIDTH,*SEPARATOR_COLOR,bold=False)}
{get_true_fg('Description:',*HEADER_COLOR)}
ll=2
while [ $ll -le $FZF_PREVIEW_COLUMNS ];do
echo -n -e "{get_true_fg("",*SEPARATOR_COLOR,bold=False)}"
((ll++))
done
echo
echo "{get_true_fg('Title(jp):',*HEADER_COLOR)} {(anime['title']['romaji'] or "").replace('"',SINGLE_QUOTE)}"
echo "{get_true_fg('Title(eng):',*HEADER_COLOR)} {(anime['title']['english'] or "").replace('"',SINGLE_QUOTE)}"
ll=2
while [ $ll -le $FZF_PREVIEW_COLUMNS ];do
echo -n -e "{get_true_fg("",*SEPARATOR_COLOR,bold=False)}"
((ll++))
done
echo
echo "{get_true_fg('Popularity:',*HEADER_COLOR)} {anilist_data_helper.format_number_with_commas(anime['popularity'])}"
echo "{get_true_fg('Favourites:',*HEADER_COLOR)} {anilist_data_helper.format_number_with_commas(anime['favourites'])}"
echo "{get_true_fg('Status:',*HEADER_COLOR)} {str(anime['status']).replace('"',SINGLE_QUOTE)}"
echo "{get_true_fg('Next Episode:',*HEADER_COLOR)} {anilist_data_helper.extract_next_airing_episode(anime['nextAiringEpisode']).replace('"',SINGLE_QUOTE)}"
echo "{get_true_fg('Genres:',*HEADER_COLOR)} {anilist_data_helper.format_list_data_with_comma(anime['genres']).replace('"',SINGLE_QUOTE)}"
ll=2
while [ $ll -le $FZF_PREVIEW_COLUMNS ];do
echo -n -e "{get_true_fg("",*SEPARATOR_COLOR,bold=False)}"
((ll++))
done
echo
echo "{get_true_fg('Episodes:',*HEADER_COLOR)} {(anime['episodes']) or 'UNKNOWN'}"
echo "{get_true_fg('Start Date:',*HEADER_COLOR)} {anilist_data_helper.format_anilist_date_object(anime['startDate']).replace('"',SINGLE_QUOTE)}"
echo "{get_true_fg('End Date:',*HEADER_COLOR)} {anilist_data_helper.format_anilist_date_object(anime['endDate']).replace('"',SINGLE_QUOTE)}"
ll=2
while [ $ll -le $FZF_PREVIEW_COLUMNS ];do
echo -n -e "{get_true_fg("",*SEPARATOR_COLOR,bold=False)}"
((ll++))
done
echo
echo "{get_true_fg('Media List:',*HEADER_COLOR)} {mediaListName.replace('"',SINGLE_QUOTE)}"
echo "{get_true_fg('Progress:',*HEADER_COLOR)} {progress}"
ll=2
while [ $ll -le $FZF_PREVIEW_COLUMNS ];do
echo -n -e "{get_true_fg("",*SEPARATOR_COLOR,bold=False)}"
((ll++))
done
echo
# echo "{get_true_fg('Description:',*HEADER_COLOR).replace('"',SINGLE_QUOTE)}"
"""
template = textwrap.dedent(template)
template = f"""
{template}
echo "
{textwrap.fill(clean_html(
str(anime['description'])), width=45)}
(anime['description']) or "").replace('"',SINGLE_QUOTE), width=45)}
"
"""
future_to_task[executor.submit(save_info_from_str, template, title)] = title
@@ -168,6 +205,63 @@ def get_rofi_icons(
logger.error("%r generated an exception: %s" % (url, e))
# get rofi icons
def get_fzf_manga_preview(manga_results, workers=None, wait=False):
"""A helper function to make sure that the images are downloaded so they can be used as icons
Args:
titles (list[str]): sanitized titles of the anime; NOTE: its important that they are sanitized since they are used as the filenames of the images
workers ([TODO:parameter]): Number of threads to use to download the images; defaults to as many as possible
anilist_results: the anilist results from an anilist action
"""
def _worker():
# use concurrency to download the images as fast as possible
with concurrent.futures.ThreadPoolExecutor(max_workers=workers) as executor:
# load the jobs
future_to_url = {}
for manga in manga_results:
image_url = manga["poster"]
future_to_url[
executor.submit(
save_image_from_url,
image_url,
sanitize_filename(manga["title"]),
)
] = image_url
# execute the jobs
for future in concurrent.futures.as_completed(future_to_url):
url = future_to_url[future]
try:
future.result()
except Exception as e:
logger.error("%r generated an exception: %s" % (url, e))
background_worker = Thread(
target=_worker,
)
background_worker.daemon = True
# ensure images and info exists
background_worker.start()
# the preview script is in bash so making sure fzf doesnt use any other shell lang to process the preview script
os.environ["SHELL"] = shutil.which("bash") or "bash"
preview = """
%s
if [ -s %s/{} ]; then fzf-preview %s/{}
else echo Loading...
fi
""" % (
fzf_preview,
IMAGES_CACHE_DIR,
IMAGES_CACHE_DIR,
)
if wait:
background_worker.join()
return preview
# get rofi icons
def get_fzf_episode_preview(
anilist_result: AnilistBaseMediaDataSchema, episodes, workers=None, wait=False
@@ -205,8 +299,13 @@ def get_fzf_episode_preview(
] = image_url
template = textwrap.dedent(
f"""
{get_true_fg('Anime Title:',*HEADER_COLOR)} {anilist_result['title']['romaji'] or anilist_result['title']['english']}
{get_true_fg('Episode Title:',*HEADER_COLOR)} {episode_title}
ll=2
while [ $ll -le $FZF_PREVIEW_COLUMNS ];do
echo -n -e "{get_true_fg("",*SEPARATOR_COLOR,bold=False)}"
((ll++))
done
echo "{get_true_fg('Anime Title:',*HEADER_COLOR)} {(anilist_result['title']['romaji'] or anilist_result['title']['english']).replace('"',SINGLE_QUOTE)}"
echo "{get_true_fg('Episode Title:',*HEADER_COLOR)} {str(episode_title).replace('"',SINGLE_QUOTE)}"
"""
)
future_to_url[
@@ -224,27 +323,61 @@ def get_fzf_episode_preview(
background_worker = Thread(
target=_worker,
)
# ensure images and info exists
background_worker.daemon = True
# ensure images and info exists
background_worker.start()
# the preview script is in bash so making sure fzf doesnt use any other shell lang to process the preview script
os.environ["SHELL"] = shutil.which("bash") or "bash"
preview = """
%s
if [ -s %s/{} ]; then fzf-preview %s/{}
else echo Loading...
fi
if [ -s %s/{} ]; then cat %s/{}
else echo Loading...
fi
""" % (
fzf_preview,
IMAGES_CACHE_DIR,
IMAGES_CACHE_DIR,
ANIME_INFO_CACHE_DIR,
ANIME_INFO_CACHE_DIR,
)
if S_PLATFORM == "win32":
preview = """
%s
title={}
show_image_previews="%s"
dim=${FZF_PREVIEW_COLUMNS}x${FZF_PREVIEW_LINES}
if [ $show_image_previews = "true" ];then
if [ -s "%s\\\\\\${title}.png" ]; then
if command -v "chafa">/dev/null;then
chafa -s $dim "%s\\\\\\${title}.png"
else
echo please install chafa to enjoy image previews
fi
echo
else
echo Loading...
fi
fi
if [ -s "%s\\\\\\$title" ]; then source "%s\\\\\\$title"
else echo Loading...
fi
""" % (
fzf_preview,
os.environ.get("FASTANIME_IMAGE_PREVIEWS", "true").lower(),
IMAGES_CACHE_DIR.replace("\\", "\\\\\\"),
IMAGES_CACHE_DIR.replace("\\", "\\\\\\"),
ANIME_INFO_CACHE_DIR.replace("\\", "\\\\\\"),
ANIME_INFO_CACHE_DIR.replace("\\", "\\\\\\"),
)
else:
preview = """
%s
show_image_previews="%s"
if [ $show_image_previews = "true" ];then
if [ -s %s/{} ]; then fzf-preview %s/{}
else echo Loading...
fi
fi
if [ -s %s/{} ]; then source %s/{}
else echo Loading...
fi
""" % (
fzf_preview,
os.environ.get("FASTANIME_IMAGE_PREVIEWS", "true").lower(),
IMAGES_CACHE_DIR,
IMAGES_CACHE_DIR,
ANIME_INFO_CACHE_DIR,
ANIME_INFO_CACHE_DIR,
)
if wait:
background_worker.join()
return preview
@@ -264,6 +397,7 @@ def get_fzf_anime_preview(
THe fzf preview script to use
"""
# ensure images and info exists
background_worker = Thread(
target=write_search_results, args=(anilist_results, titles)
)
@@ -272,21 +406,56 @@ def get_fzf_anime_preview(
# the preview script is in bash so making sure fzf doesnt use any other shell lang to process the preview script
os.environ["SHELL"] = shutil.which("bash") or "bash"
preview = """
%s
if [ -s %s/{} ]; then fzf-preview %s/{}
else echo Loading...
fi
if [ -s %s/{} ]; then cat %s/{}
else echo Loading...
fi
""" % (
fzf_preview,
IMAGES_CACHE_DIR,
IMAGES_CACHE_DIR,
ANIME_INFO_CACHE_DIR,
ANIME_INFO_CACHE_DIR,
)
if S_PLATFORM == "win32":
preview = """
%s
title={}
show_image_previews="%s"
dim=${FZF_PREVIEW_COLUMNS}x${FZF_PREVIEW_LINES}
if [ $show_image_previews = "true" ];then
if [ -s "%s\\\\\\${title}.png" ]; then
if command -v "chafa">/dev/null;then
chafa -s $dim "%s\\\\\\${title}.png"
else
echo please install chafa to enjoy image previews
fi
echo
else
echo Loading...
fi
fi
if [ -s "%s\\\\\\$title" ]; then source "%s\\\\\\$title"
else echo Loading...
fi
""" % (
fzf_preview,
os.environ.get("FASTANIME_IMAGE_PREVIEWS", "true").lower(),
IMAGES_CACHE_DIR.replace("\\", "\\\\\\"),
IMAGES_CACHE_DIR.replace("\\", "\\\\\\"),
ANIME_INFO_CACHE_DIR.replace("\\", "\\\\\\"),
ANIME_INFO_CACHE_DIR.replace("\\", "\\\\\\"),
)
else:
preview = """
%s
title={}
show_image_previews="%s"
if [ $show_image_previews = "true" ];then
if [ -s "%s/${title}.png" ]; then fzf-preview "%s/${title}.png"
else echo Loading...
fi
fi
if [ -s "%s/$title" ]; then source "%s/$title"
else echo Loading...
fi
""" % (
fzf_preview,
os.environ.get("FASTANIME_IMAGE_PREVIEWS", "true").lower(),
IMAGES_CACHE_DIR,
IMAGES_CACHE_DIR,
ANIME_INFO_CACHE_DIR,
ANIME_INFO_CACHE_DIR,
)
if wait:
background_worker.join()
return preview

View File

@@ -0,0 +1,12 @@
import shutil
import subprocess
from sys import exit
def feh_manga_viewer(image_links: list[str], window_title: str):
FEH_EXECUTABLE = shutil.which("feh")
if not FEH_EXECUTABLE:
print("feh not found")
exit(1)
commands = [FEH_EXECUTABLE, *image_links, "--title", window_title]
subprocess.run(commands)

View File

@@ -1,131 +1,209 @@
import re
import os
import shutil
import subprocess
import logging
import time
from fastanime.constants import S_PLATFORM
from ...constants import S_PLATFORM
logger = logging.getLogger(__name__)
mpv_av_time_pattern = re.compile(r"AV: ([0-9:]*) / ([0-9:]*) \(([0-9]*)%\)")
def stream_video(MPV, url, mpv_args, custom_args):
process = subprocess.Popen(
[MPV, url, *mpv_args, *custom_args],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
text=True,
)
last_time = None
av_time_pattern = re.compile(r"AV: ([0-9:]*) / ([0-9:]*) \(([0-9]*)%\)")
last_time = "0"
total_time = "0"
if os.environ.get("FASTANIME_DISABLE_MPV_POPEN", "False") == "False":
process = subprocess.Popen(
[
MPV,
url,
*mpv_args,
*custom_args,
"--no-terminal",
],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
text=True,
bufsize=1,
encoding="utf-8",
)
try:
while True:
if not process.stderr:
continue
output = process.stderr.readline()
try:
while True:
if not process.stderr:
time.sleep(0.1)
continue
output = process.stderr.readline()
if output:
# Match the timestamp in the output
match = av_time_pattern.search(output.strip())
if output:
# Match the timestamp in the output
match = mpv_av_time_pattern.search(output.strip())
if match:
current_time = match.group(1)
total_time = match.group(2)
last_time = current_time
# Check if the process has terminated
retcode = process.poll()
if retcode is not None:
break
except Exception as e:
print(f"An error occurred: {e}")
logger.error(f"An error occurred: {e}")
finally:
process.terminate()
process.wait()
else:
proc = subprocess.run(
[MPV, url, *mpv_args, *custom_args], capture_output=True, text=True
)
if proc.stdout:
for line in reversed(proc.stdout.split("\n")):
match = mpv_av_time_pattern.search(line.strip())
if match:
current_time = match.group(1)
last_time = match.group(1)
total_time = match.group(2)
match.group(3)
last_time = current_time
# print(f"Current stream time: {current_time}, Total time: {total_time}, Progress: {percentage}%")
# Check if the process has terminated
retcode = process.poll()
if retcode is not None:
print("Finshed at: ", last_time)
break
except Exception as e:
print(f"An error occurred: {e}")
finally:
process.terminate()
break
return last_time, total_time
def run_mpv(
link: str,
title: str | None = "",
title: str = "",
start_time: str = "0",
ytdl_format="",
custom_args=[],
headers={},
subtitles=[],
player="",
):
# Determine if mpv is available
MPV = shutil.which("mpv")
# If title is None, set a default value
# Regex to check if the link is a YouTube URL
youtube_regex = r"(https?://)?(www\.)?(youtube|youtu|youtube-nocookie)\.(com|be)/.+"
if not MPV and not S_PLATFORM == "win32":
# Determine if the link is a YouTube URL
if re.match(youtube_regex, link):
# Android specific commands to launch mpv with a YouTube URL
args = [
"nohup",
"am",
"start",
"--user",
"0",
"-a",
"android.intent.action.VIEW",
"-d",
link,
"-n",
"com.google.android.youtube/.UrlActivity",
]
if link.endswith(".torrent"):
WEBTORRENT_CLI = shutil.which("webtorrent")
if not WEBTORRENT_CLI:
import time
print(
"webtorrent cli is not installed which is required for downloading and streaming from nyaa\nplease install it or use another provider"
)
time.sleep(120)
return "0", "0"
cmd = [WEBTORRENT_CLI, link, f"--{player}"]
subprocess.run(cmd)
return "0", "0"
if player == "vlc":
VLC = shutil.which("vlc")
if not VLC and not S_PLATFORM == "win32":
# Determine if the link is a YouTube URL
if re.match(youtube_regex, link):
# Android specific commands to launch mpv with a YouTube URL
args = [
"nohup",
"am",
"start",
"--user",
"0",
"-a",
"android.intent.action.VIEW",
"-d",
link,
"-n",
"com.google.android.youtube/.UrlActivity",
]
return "0", "0"
else:
args = [
"nohup",
"am",
"start",
"--user",
"0",
"-a",
"android.intent.action.VIEW",
"-d",
link,
"-n",
"org.videolan.vlc/org.videolan.vlc.gui.video.VideoPlayerActivity",
"-e",
"title",
title,
]
subprocess.run(args)
return "0", "0"
else:
# Android specific commands to launch mpv with a regular URL
args = [
"nohup",
"am",
"start",
"--user",
"0",
"-a",
"android.intent.action.VIEW",
"-d",
link,
"-n",
"is.xyz.mpv/.MPVActivity",
]
subprocess.run(args)
return "0", "0"
args = ["vlc", link]
for subtitle in subtitles:
args.append("--sub-file")
args.append(subtitle["url"])
break
if title:
args.append("--video-title")
args.append(title)
subprocess.run(args)
return "0", "0"
else:
# General mpv command with custom arguments
mpv_args = []
if headers:
mpv_headers = "--http-header-fields="
for header_name, header_value in headers.items():
mpv_headers += f"{header_name}:{header_value},"
mpv_args.append(mpv_headers)
for subtitle in subtitles:
mpv_args.append(f"--sub-file={subtitle['url']}")
if start_time != "0":
mpv_args.append(f"--start={start_time}")
if title:
mpv_args.append(f"--title={title}")
if ytdl_format:
mpv_args.append(f"--ytdl-format={ytdl_format}")
stop_time, total_time = stream_video(MPV, link, mpv_args, custom_args)
return stop_time, total_time
# Determine if mpv is available
MPV = shutil.which("mpv")
if not MPV and not S_PLATFORM == "win32":
# Determine if the link is a YouTube URL
if re.match(youtube_regex, link):
# Android specific commands to launch mpv with a YouTube URL
args = [
"nohup",
"am",
"start",
"--user",
"0",
"-a",
"android.intent.action.VIEW",
"-d",
link,
"-n",
"com.google.android.youtube/.UrlActivity",
]
return "0", "0"
else:
# Android specific commands to launch mpv with a regular URL
args = [
"nohup",
"am",
"start",
"--user",
"0",
"-a",
"android.intent.action.VIEW",
"-d",
link,
"-n",
"is.xyz.mpv/.MPVActivity",
]
# Example usage
if __name__ == "__main__":
run_mpv(
"https://www.youtube.com/watch?v=dQw4w9WgXcQ",
"Example Video",
"--fullscreen",
"--volume=50",
)
subprocess.run(args)
return "0", "0"
else:
# General mpv command with custom arguments
mpv_args = []
if headers:
mpv_headers = "--http-header-fields="
for header_name, header_value in headers.items():
mpv_headers += f"{header_name}:{header_value},"
mpv_args.append(mpv_headers)
for subtitle in subtitles:
mpv_args.append(f"--sub-file={subtitle['url']}")
if start_time != "0":
mpv_args.append(f"--start={start_time}")
if title:
mpv_args.append(f"--title={title}")
if ytdl_format:
mpv_args.append(f"--ytdl-format={ytdl_format}")
stop_time, total_time = stream_video(MPV, link, mpv_args, custom_args)
return stop_time, total_time

View File

@@ -10,6 +10,7 @@ if TYPE_CHECKING:
from ...AnimeProvider import AnimeProvider
from ..config import Config
from .tools import FastAnimeRuntimeState
def format_time(duration_in_secs: float):
@@ -67,7 +68,11 @@ class MpvPlayer(object):
current_episode_number = (
fastanime_runtime_state.provider_current_episode_number
)
config.update_watch_history(anime_id_anilist, str(current_episode_number))
config.media_list_track(
anime_id_anilist,
episode_no=str(current_episode_number),
progress_tracking=fastanime_runtime_state.progress_tracking,
)
elif type == "reload":
if current_episode_number not in total_episodes:
self.mpv_player.show_text("Episode not available")
@@ -83,7 +88,11 @@ class MpvPlayer(object):
self.mpv_player.show_text(f"Fetching episode {ep_no}")
current_episode_number = ep_no
config.update_watch_history(anime_id_anilist, str(ep_no))
config.media_list_track(
anime_id_anilist,
episode_no=str(ep_no),
progress_tracking=fastanime_runtime_state.progress_tracking,
)
fastanime_runtime_state.provider_current_episode_number = str(ep_no)
else:
self.mpv_player.show_text("Fetching previous episode...")
@@ -96,7 +105,11 @@ class MpvPlayer(object):
current_episode_number = (
fastanime_runtime_state.provider_current_episode_number
)
config.update_watch_history(anime_id_anilist, str(current_episode_number))
config.media_list_track(
anime_id_anilist,
episode_no=str(current_episode_number),
progress_tracking=fastanime_runtime_state.progress_tracking,
)
# update episode progress
if config.user and current_episode_number:
AniList.update_anime_list(
@@ -107,10 +120,9 @@ class MpvPlayer(object):
)
# get them juicy streams
episode_streams = anime_provider.get_episode_streams(
provider_anime,
provider_anime["id"],
current_episode_number,
translation_type,
fastanime_runtime_state.selected_anime_anilist,
)
if not episode_streams:
self.mpv_player.show_text("No streams were found")
@@ -164,7 +176,7 @@ class MpvPlayer(object):
self,
stream_link,
anime_provider: "AnimeProvider",
fastanime_runtime_state,
fastanime_runtime_state: "FastAnimeRuntimeState",
config: "Config",
title,
start_time,
@@ -270,7 +282,6 @@ class MpvPlayer(object):
mpv_player.show_text("Changing translation type...")
anime = anime_provider.get_anime(
fastanime_runtime_state.provider_anime_search_result["id"],
fastanime_runtime_state.selected_anime_anilist,
)
if not anime:
mpv_player.show_text("Failed to update translation type")

View File

@@ -1,78 +1,53 @@
# this script was written by the fzf devs as an example on how to preview images
# its only here for convinience
fzf_preview = r"""
#
# The purpose of this script is to demonstrate how to preview a file or an
# image in the preview window of fzf.
# Adapted from the preview script in the fzf repo
#
# Dependencies:
# - https://github.com/sharkdp/bat
# - https://github.com/hpjansson/chafa
# - https://iterm2.com/utilities/imgcat
fzf-preview(){
if [[ $# -ne 1 ]]; then
>&2 echo "usage: $0 FILENAME"
exit 1
fi
#
fzf-preview() {
file=${1/#\~\//$HOME/}
dim=${FZF_PREVIEW_COLUMNS}x${FZF_PREVIEW_LINES}
if [[ $dim = x ]]; then
dim=$(stty size </dev/tty | awk '{print $2 "x" $1}')
elif ! [[ $KITTY_WINDOW_ID ]] && ((FZF_PREVIEW_TOP + FZF_PREVIEW_LINES == $(stty size </dev/tty | awk '{print $1}'))); then
# Avoid scrolling issue when the Sixel image touches the bottom of the screen
# * https://github.com/junegunn/fzf/issues/2544
dim=${FZF_PREVIEW_COLUMNS}x$((FZF_PREVIEW_LINES - 1))
fi
file=${1/#\~\//$HOME/}
type=$(file --dereference --mime -- "$file")
# 1. Use kitty icat on kitty terminal
if [[ $KITTY_WINDOW_ID ]]; then
# 1. 'memory' is the fastest option but if you want the image to be scrollable,
# you have to use 'stream'.
#
# 2. The last line of the output is the ANSI reset code without newline.
# This confuses fzf and makes it render scroll offset indicator.
# So we remove the last line and append the reset code to its previous line.
kitty icat --clear --transfer-mode=memory --unicode-placeholder --stdin=no --place="$dim@0x0" "$file" | sed '$d' | sed $'$s/$/\e[m/'
if [[ ! $type =~ image/ ]]; then
if [[ $type =~ =binary ]]; then
file "$1"
exit
fi
# 2. Use chafa with Sixel output
elif command -v chafa >/dev/null; then
case "$(uname -a)" in
# termux does not support sixel graphics
# and produces weird output
*ndroid*) chafa -s "$dim" "$file";;
*) chafa -f sixel -s "$dim" "$file";;
esac
# Add a new line character so that fzf can display multiple images in the preview window
echo
# Sometimes bat is installed as batcat.
if command -v batcat > /dev/null; then
batname="batcat"
elif command -v bat > /dev/null; then
batname="bat"
else
cat "$1"
exit
fi
# 3. If chafa is not found but imgcat is available, use it on iTerm2
elif command -v imgcat >/dev/null; then
# NOTE: We should use https://iterm2.com/utilities/it2check to check if the
# user is running iTerm2. But for the sake of simplicity, we just assume
# that's the case here.
imgcat -W "${dim%%x*}" -H "${dim##*x}" "$file"
${batname} --style="${BAT_STYLE:-numbers}" --color=always --pager=never -- "$file"
exit
fi
dim=${FZF_PREVIEW_COLUMNS}x${FZF_PREVIEW_LINES}
if [[ $dim = x ]]; then
dim=$(stty size < /dev/tty | awk '{print $2 "x" $1}')
elif ! [[ $KITTY_WINDOW_ID ]] && (( FZF_PREVIEW_TOP + FZF_PREVIEW_LINES == $(stty size < /dev/tty | awk '{print $1}') )); then
# Avoid scrolling issue when the Sixel image touches the bottom of the screen
# * https://github.com/junegunn/fzf/issues/2544
dim=${FZF_PREVIEW_COLUMNS}x$((FZF_PREVIEW_LINES - 1))
fi
# 1. Use kitty icat on kitty terminal
if [[ $KITTY_WINDOW_ID ]]; then
# 1. 'memory' is the fastest option but if you want the image to be scrollable,
# you have to use 'stream'.
#
# 2. The last line of the output is the ANSI reset code without newline.
# This confuses fzf and makes it render scroll offset indicator.
# So we remove the last line and append the reset code to its previous line.
kitty icat --clear --transfer-mode=memory --unicode-placeholder --stdin=no --place="$dim@0x0" "$file" | sed '$d' | sed $'$s/$/\e[m/'
# 2. Use chafa with Sixel output
elif command -v chafa > /dev/null; then
chafa -f sixel -s "$dim" "$file"
# Add a new line character so that fzf can display multiple images in the preview window
echo
# 3. If chafa is not found but imgcat is available, use it on iTerm2
elif command -v imgcat > /dev/null; then
# NOTE: We should use https://iterm2.com/utilities/it2check to check if the
# user is running iTerm2. But for the sake of simplicity, we just assume
# that's the case here.
imgcat -W "${dim%%x*}" -H "${dim##*x}" "$file"
# 4. Cannot find any suitable method to preview the image
else
file "$file"
fi
# 4. Cannot find any suitable method to preview the image
else
echo install chafa or imgcat or install kitty terminal so you can enjoy image previews
fi
}
"""

View File

@@ -1,17 +1,31 @@
# TODO: add typing
class FastAnimeRuntimeState(dict):
from typing import TYPE_CHECKING
if TYPE_CHECKING:
from typing import Any
from ...libs.anilist.types import AnilistBaseMediaDataSchema
from ...libs.anime_provider.types import Anime, EpisodeStream, SearchResult, Server
class FastAnimeRuntimeState(object):
"""A class that manages fastanime runtime during anilist command runtime"""
def __getattr__(self, attr):
try:
return self.__getitem__(attr)
except KeyError:
raise AttributeError(
"%r object has no attribute %r" % (self.__class__.__name__, attr)
)
provider_current_episode_stream_link: str
provider_current_server: "Server"
provider_current_server_name: str
provider_available_episodes: list[str]
provider_current_episode_number: str
provider_server_episode_streams: list["EpisodeStream"]
provider_anime_title: str
provider_anime: "Anime"
provider_anime_search_result: "SearchResult"
progress_tracking: str = ""
def __setattr__(self, attr, value):
self.__setitem__(attr, value)
selected_anime_anilist: "AnilistBaseMediaDataSchema"
selected_anime_id_anilist: int
selected_anime_title_anilist: str
# current_anilist_data: "AnilistDataSchema | AnilistMediaList"
anilist_results_data: "Any"
def exit_app(exit_code=0, *args):
@@ -23,8 +37,13 @@ def exit_app(exit_code=0, *args):
console = Console()
if not console.is_terminal:
from plyer import notification
try:
from plyer import notification
except ImportError:
print(
"Plyer is not installed; install it for desktop notifications to be enabled"
)
exit(1)
notification.notify(
app_name=APP_NAME,
app_icon=ICON_PATH,

View File

@@ -25,7 +25,7 @@ else:
# ----- user configs and data -----
S_PLATFORM = sys.platform
APP_DATA_DIR = click.get_app_dir(APP_NAME)
APP_DATA_DIR = click.get_app_dir(APP_NAME, roaming=False)
if S_PLATFORM == "win32":
# app data
# app_data_dir_base = os.getenv("LOCALAPPDATA")
@@ -78,6 +78,7 @@ Path(USER_VIDEOS_DIR).mkdir(parents=True, exist_ok=True)
# useful paths
USER_DATA_PATH = os.path.join(APP_DATA_DIR, "user_data.json")
USER_WATCH_HISTORY_PATH = os.path.join(APP_DATA_DIR, "watch_history.json")
USER_CONFIG_PATH = os.path.join(APP_DATA_DIR, "config.ini")
LOG_FILE_PATH = os.path.join(APP_DATA_DIR, "fastanime.log")

14
fastanime/fastanime.py Executable file
View File

@@ -0,0 +1,14 @@
#!/usr/bin/env python3
import os
import sys
# Add the application root directory to Python path
if getattr(sys, "frozen", False):
application_path = os.path.dirname(sys.executable)
sys.path.insert(0, application_path)
# Import and run the main application
from fastanime import FastAnime
if __name__ == "__main__":
FastAnime()

View File

@@ -15,6 +15,7 @@ from .queries_graphql import (
delete_list_entry_query,
get_logged_in_user_query,
get_medialist_item_query,
get_user_info,
media_list_mutation,
media_list_query,
most_favourite_query,
@@ -34,8 +35,9 @@ if TYPE_CHECKING:
AnilistMediaLists,
AnilistMediaListStatus,
AnilistNotifications,
AnilistUser,
AnilistUser_,
AnilistUserData,
AnilistViewerData,
)
logger = logging.getLogger(__name__)
ANILIST_ENDPOINT = "https://graphql.anilist.co"
@@ -77,7 +79,7 @@ class AniListApi:
return
if not success or not user:
return
user_info: AnilistUser = user["data"]["Viewer"]
user_info: "AnilistUser_" = user["data"]["Viewer"]
self.user_id = user_info["id"]
return user_info
@@ -91,7 +93,7 @@ class AniListApi:
"""
return self._make_authenticated_request(notification_query)
def update_login_info(self, user: "AnilistUser", token: str):
def update_login_info(self, user: "AnilistUser_", token: str):
"""method used to login a user enabling authenticated requests
Args:
@@ -103,7 +105,18 @@ class AniListApi:
self.session.headers.update(self.headers)
self.user_id = user["id"]
def get_logged_in_user(self) -> tuple[bool, "AnilistUserData"] | tuple[bool, None]:
def get_user_info(self) -> tuple[bool, "AnilistUserData"] | tuple[bool, None]:
"""get the details of the user who is currently logged in
Returns:
an anilist user
"""
return self._make_authenticated_request(get_user_info, {"userId": self.user_id})
def get_logged_in_user(
self,
) -> tuple[bool, "AnilistViewerData"] | tuple[bool, None]:
"""get the details of the user who is currently logged in
Returns:
@@ -316,6 +329,7 @@ class AniListApi:
page: int | None = None,
season: str | None = None,
format_in: list[str] | None = None,
on_list: bool | None = None,
type="ANIME",
**kwargs,
):
@@ -324,7 +338,7 @@ class AniListApi:
"""
variables = {}
for key, val in list(locals().items())[1:]:
if val and key not in ["variables"]:
if (val or val is False) and key not in ["variables"]:
variables[key] = val
search_results = self.get_data(search_query, variables=variables)
return search_results

View File

@@ -3,7 +3,6 @@ This module contains all the preset queries for the sake of neatness and convini
Mostly for internal usage
"""
# TODO: Format the queries
mark_as_read_mutation = """
mutation{
UpdateUser{
@@ -17,7 +16,6 @@ query($id:Int){
pageInfo{
total
}
reviews(mediaId:$id){
summary
user{
@@ -35,50 +33,48 @@ query($id:Int){
"""
notification_query = """
query{
Page(perPage:5){
pageInfo {
total
}
notifications(resetNotificationCount:true,type:AIRING) {
... on AiringNotification {
id
type
episode
contexts
createdAt
media {
id
idMal
title {
romaji
english
}
coverImage{
medium
}
}
}
}
query {
Page(perPage: 5) {
pageInfo {
total
}
notifications(resetNotificationCount: true, type: AIRING) {
... on AiringNotification {
id
type
episode
contexts
createdAt
media {
id
idMal
title {
romaji
english
}
coverImage {
medium
}
}
}
}
}
}
"""
get_medialist_item_query = """
query($mediaId:Int){
MediaList(mediaId:$mediaId){
id
}
query ($mediaId: Int) {
MediaList(mediaId: $mediaId) {
id
}
}
"""
delete_list_entry_query = """
mutation($id:Int){
DeleteMediaListEntry(id:$id){
deleted
}
mutation ($id: Int) {
DeleteMediaListEntry(id: $id) {
deleted
}
}
"""
@@ -97,9 +93,85 @@ query{
}
"""
get_user_info = """
query ($userId: Int) {
User(id: $userId) {
name
about
avatar {
large
medium
}
bannerImage
statistics {
anime {
count
minutesWatched
episodesWatched
genres {
count
meanScore
genre
}
tags {
tag {
id
}
count
meanScore
}
}
manga {
count
meanScore
chaptersRead
volumesRead
tags {
count
meanScore
}
genres {
count
meanScore
}
}
}
favourites {
anime {
nodes {
title {
romaji
english
}
}
}
manga {
nodes {
title {
romaji
english
}
}
}
}
}
}
"""
media_list_mutation = """
mutation($mediaId:Int,$scoreRaw:Int,$repeat:Int,$progress:Int,$status:MediaListStatus){
SaveMediaListEntry(mediaId:$mediaId,scoreRaw:$scoreRaw,progress:$progress,repeat:$repeat,status:$status){
mutation (
$mediaId: Int
$scoreRaw: Int
$repeat: Int
$progress: Int
$status: MediaListStatus
) {
SaveMediaListEntry(
mediaId: $mediaId
scoreRaw: $scoreRaw
progress: $progress
repeat: $repeat
status: $status
) {
id
status
mediaId
@@ -116,21 +188,19 @@ mutation($mediaId:Int,$scoreRaw:Int,$repeat:Int,$progress:Int,$status:MediaListS
month
day
}
}
}
"""
media_list_query = """
query ($userId: Int, $status: MediaListStatus,$type:MediaType) {
query ($userId: Int, $status: MediaListStatus, $type: MediaType) {
Page {
pageInfo {
currentPage
total
currentPage
total
}
mediaList(userId: $userId, status: $status, type: $type) {
mediaId
media {
id
idMal
@@ -147,15 +217,15 @@ query ($userId: Int, $status: MediaListStatus,$type:MediaType) {
id
}
popularity
streamingEpisodes{
title
thumbnail
}
streamingEpisodes {
title
thumbnail
}
favourites
averageScore
episodes
genres
synonyms
studios {
nodes {
name
@@ -177,10 +247,10 @@ query ($userId: Int, $status: MediaListStatus,$type:MediaType) {
}
status
description
mediaListEntry{
status
id
progress
mediaListEntry {
status
id
progress
}
nextAiringEpisode {
timeUntilAiring
@@ -204,7 +274,6 @@ query ($userId: Int, $status: MediaListStatus,$type:MediaType) {
day
}
createdAt
}
}
}
@@ -235,76 +304,75 @@ $endDate_lesser:FuzzyDateInt,\
$format_in:[MediaFormat],\
$type:MediaType\
$season:MediaSeason\
$on_list:Boolean\
"
# FuzzyDateInt = (yyyymmdd)
# MediaStatus = (FINISHED,RELEASING,NOT_YET_RELEASED,CANCELLED,HIATUS)
search_query = (
"""
query($query:String,%s){
Page(perPage:50,page:$page){
pageInfo{
Page(perPage: 50, page: $page) {
pageInfo {
total
currentPage
hasNextPage
}
media(
search:$query,
id_in:$id_in,
genre_in:$genre_in,
genre_not_in:$genre_not_in,
tag_in:$tag_in,
tag_not_in:$tag_not_in,
status_in:$status_in,
status:$status,
startDate:$startDate,
status_not_in:$status_not_in,
popularity_greater:$popularity_greater,
popularity_lesser:$popularity_lesser,
averageScore_greater:$averageScore_greater,
averageScore_lesser:$averageScore_lesser,
startDate_greater:$startDate_greater,
startDate_lesser:$startDate_lesser,
endDate_greater:$endDate_greater,
endDate_lesser:$endDate_lesser,
format_in:$format_in,
sort:$sort,
season:$season,
seasonYear:$seasonYear,
type:$type
)
{
search: $query
id_in: $id_in
genre_in: $genre_in
genre_not_in: $genre_not_in
tag_in: $tag_in
tag_not_in: $tag_not_in
status_in: $status_in
status: $status
startDate: $startDate
status_not_in: $status_not_in
popularity_greater: $popularity_greater
popularity_lesser: $popularity_lesser
averageScore_greater: $averageScore_greater
averageScore_lesser: $averageScore_lesser
startDate_greater: $startDate_greater
startDate_lesser: $startDate_lesser
endDate_greater: $endDate_greater
endDate_lesser: $endDate_lesser
format_in: $format_in
sort: $sort
season: $season
seasonYear: $seasonYear
type: $type
onList:$on_list
) {
id
idMal
title{
idMal
title {
romaji
english
}
coverImage{
coverImage {
medium
large
}
trailer {
site
id
}
mediaListEntry{
status
mediaListEntry {
status
id
progress
}
}
popularity
streamingEpisodes{
streamingEpisodes {
title
thumbnail
}
}
favourites
averageScore
episodes
genres
studios{
nodes{
synonyms
studios {
nodes {
name
isAnimationStudio
}
@@ -337,17 +405,16 @@ query($query:String,%s){
)
trending_query = """
query($type:MediaType){
Page(perPage:15){
media(sort:TRENDING_DESC,type:$type,genre_not_in:["hentai"]){
query ($type: MediaType) {
Page(perPage: 15) {
media(sort: TRENDING_DESC, type: $type, genre_not_in: ["hentai"]) {
id
idMal
title{
idMal
title {
romaji
english
}
coverImage{
coverImage {
medium
large
}
@@ -356,14 +423,14 @@ query($type:MediaType){
id
}
popularity
streamingEpisodes{
streamingEpisodes {
title
thumbnail
}
}
favourites
averageScore
genres
synonyms
episodes
description
studios {
@@ -373,18 +440,18 @@ query($type:MediaType){
}
}
tags {
name
name
}
startDate {
year
month
day
}
mediaListEntry{
mediaListEntry {
status
id
progress
}
}
endDate {
year
month
@@ -403,44 +470,43 @@ query($type:MediaType){
# mosts
most_favourite_query = """
query($type:MediaType){
Page(perPage:15){
media(sort:FAVOURITES_DESC,type:$type,genre_not_in:["hentai"]){
query ($type: MediaType) {
Page(perPage: 15) {
media(sort: FAVOURITES_DESC, type: $type, genre_not_in: ["hentai"]) {
id
idMal
title{
idMal
title {
romaji
english
}
coverImage{
coverImage {
medium
large
}
trailer {
site
id
}
mediaListEntry{
mediaListEntry {
status
id
progress
}
}
popularity
streamingEpisodes{
streamingEpisodes {
title
thumbnail
}
streamingEpisodes{
}
streamingEpisodes {
title
thumbnail
}
}
favourites
averageScore
episodes
description
genres
synonyms
studios {
nodes {
name
@@ -448,7 +514,7 @@ query($type:MediaType){
}
}
tags {
name
name
}
startDate {
year
@@ -472,40 +538,39 @@ query($type:MediaType){
"""
most_scored_query = """
query($type:MediaType){
Page(perPage:15){
media(sort:SCORE_DESC,type:$type,genre_not_in:["hentai"]){
query ($type: MediaType) {
Page(perPage: 15) {
media(sort: SCORE_DESC, type: $type, genre_not_in: ["hentai"]) {
id
idMal
title{
idMal
title {
romaji
english
}
coverImage{
coverImage {
medium
large
}
trailer {
site
id
}
mediaListEntry{
mediaListEntry {
status
id
progress
}
}
popularity
streamingEpisodes{
streamingEpisodes {
title
thumbnail
}
}
episodes
favourites
averageScore
description
genres
synonyms
studios {
nodes {
name
@@ -513,7 +578,7 @@ query($type:MediaType){
}
}
tags {
name
name
}
startDate {
year
@@ -537,40 +602,39 @@ query($type:MediaType){
"""
most_popular_query = """
query($type:MediaType){
Page(perPage:15){
media(sort:POPULARITY_DESC,type:$type,genre_not_in:["hentai"]){
query ($type: MediaType) {
Page(perPage: 15) {
media(sort: POPULARITY_DESC, type: $type, genre_not_in: ["hentai"]) {
id
idMal
title{
idMal
title {
romaji
english
}
coverImage{
coverImage {
medium
large
}
trailer {
site
id
}
popularity
streamingEpisodes{
streamingEpisodes {
title
thumbnail
}
}
favourites
averageScore
description
episodes
genres
mediaListEntry{
synonyms
mediaListEntry {
status
id
progress
}
}
studios {
nodes {
name
@@ -578,8 +642,8 @@ query($type:MediaType){
}
}
tags {
name
}
name
}
startDate {
year
month
@@ -595,45 +659,52 @@ query($type:MediaType){
timeUntilAiring
airingAt
episode
}
}
}
}
}
"""
most_recently_updated_query = """
query($type:MediaType){
Page(perPage:15){
media(sort:UPDATED_AT_DESC,type:$type,averageScore_greater:50,genre_not_in:["hentai"],status:RELEASING){
query ($type: MediaType) {
Page(perPage: 15) {
media(
sort: UPDATED_AT_DESC
type: $type
averageScore_greater: 50
genre_not_in: ["hentai"]
status: RELEASING
) {
id
idMal
title{
idMal
title {
romaji
english
}
coverImage{
coverImage {
medium
large
}
trailer {
site
id
id
}
mediaListEntry{
mediaListEntry {
status
id
progress
}
}
popularity
streamingEpisodes{
streamingEpisodes {
title
thumbnail
}
}
favourites
averageScore
description
genres
synonyms
episodes
studios {
nodes {
@@ -642,7 +713,7 @@ query($type:MediaType){
}
}
tags {
name
name
}
startDate {
year
@@ -666,43 +737,42 @@ query($type:MediaType){
"""
recommended_query = """
query($type:MediaType){
Page(perPage:15) {
media( type: $type,genre_not_in:["hentai"]) {
recommendations(sort:RATING_DESC){
nodes{
media{
query ($type: MediaType) {
Page(perPage: 15) {
media(type: $type, genre_not_in: ["hentai"]) {
recommendations(sort: RATING_DESC) {
nodes {
media {
id
idMal
title{
idMal
title {
english
romaji
native
}
coverImage{
coverImage {
medium
large
}
mediaListEntry{
status
id
progress
mediaListEntry {
status
id
progress
}
description
episodes
trailer{
trailer {
site
id
}
genres
synonyms
averageScore
popularity
streamingEpisodes{
title
thumbnail
}
streamingEpisodes {
title
thumbnail
}
favourites
tags {
name
@@ -732,9 +802,9 @@ query($type:MediaType){
"""
anime_characters_query = """
query($id:Int,$type:MediaType){
query ($id: Int, $type: MediaType) {
Page {
media(id:$id, type: $type) {
media(id: $id, type: $type) {
characters {
nodes {
name {
@@ -767,13 +837,18 @@ query($id:Int,$type:MediaType){
anime_relations_query = """
query ($id: Int,$type:MediaType) {
query ($id: Int, $type: MediaType) {
Page(perPage: 20) {
media(id: $id, sort: POPULARITY_DESC, type: $type,genre_not_in:["hentai"]) {
media(
id: $id
sort: POPULARITY_DESC
type: $type
genre_not_in: ["hentai"]
) {
relations {
nodes {
id
idMal
idMal
title {
english
romaji
@@ -783,11 +858,11 @@ query ($id: Int,$type:MediaType) {
medium
large
}
mediaListEntry{
status
id
progress
}
mediaListEntry {
status
id
progress
}
description
episodes
trailer {
@@ -795,33 +870,33 @@ query ($id: Int,$type:MediaType) {
id
}
genres
synonyms
averageScore
popularity
streamingEpisodes{
title
thumbnail
}
streamingEpisodes {
title
thumbnail
}
favourites
tags {
name
}
startDate {
year
month
day
}
endDate {
year
month
day
}
status
nextAiringEpisode {
timeUntilAiring
airingAt
episode
}
year
month
day
}
endDate {
year
month
day
}
status
nextAiringEpisode {
timeUntilAiring
airingAt
episode
}
}
}
}
@@ -847,7 +922,7 @@ query ($id: Int,$type:MediaType) {
"""
upcoming_anime_query = """
query ($page: Int,$type:MediaType) {
query ($page: Int, $type: MediaType) {
Page(page: $page) {
pageInfo {
total
@@ -855,9 +930,14 @@ query ($page: Int,$type:MediaType) {
currentPage
hasNextPage
}
media(type: $type, status: NOT_YET_RELEASED,sort:POPULARITY_DESC,genre_not_in:["hentai"]) {
media(
type: $type
status: NOT_YET_RELEASED
sort: POPULARITY_DESC
genre_not_in: ["hentai"]
) {
id
idMal
idMal
title {
romaji
english
@@ -870,20 +950,20 @@ query ($page: Int,$type:MediaType) {
site
id
}
mediaListEntry{
status
mediaListEntry {
status
id
progress
}
}
popularity
streamingEpisodes{
streamingEpisodes {
title
thumbnail
}
}
favourites
averageScore
genres
synonyms
episodes
description
studios {
@@ -917,20 +997,20 @@ query ($page: Int,$type:MediaType) {
"""
anime_query = """
query($id:Int){
Page{
media(id:$id) {
query ($id: Int) {
Page {
media(id: $id) {
id
idMal
idMal
title {
romaji
english
}
mediaListEntry{
status
mediaListEntry {
status
id
progress
}
}
nextAiringEpisode {
timeUntilAiring
airingAt
@@ -944,7 +1024,6 @@ query($id:Int){
node {
name {
full
}
gender
dateOfBirth {
@@ -997,10 +1076,10 @@ query($id:Int){
countryOfOrigin
averageScore
popularity
streamingEpisodes{
streamingEpisodes {
title
thumbnail
}
}
favourites
source

View File

@@ -19,7 +19,7 @@ class AnilistImage(TypedDict):
large: str
class AnilistUser(TypedDict):
class AnilistUser_(TypedDict):
id: int
name: str
bannerImage: str | None
@@ -28,11 +28,26 @@ class AnilistUser(TypedDict):
class AnilistViewer(TypedDict):
Viewer: AnilistUser
Viewer: AnilistUser_
class AnilistViewerData(TypedDict):
data: AnilistViewer
class AnilistUser(TypedDict):
name: str
about: str | None
avatar: AnilistImage
bannerImage: str | None
class AnilistUserInfo(TypedDict):
User: AnilistUser
class AnilistUserData(TypedDict):
data: AnilistViewer
data: AnilistUserInfo
class AnilistMediaTrailer(TypedDict):
@@ -69,7 +84,7 @@ class AnilistMediaNextAiringEpisode(TypedDict):
class AnilistReview(TypedDict):
summary: str
user: AnilistUser
user: AnilistUser_
class AnilistReviewNodes(TypedDict):
@@ -114,16 +129,17 @@ class AnilistCharactersEdges(TypedDict):
edges: list[AnilistCharactersEdge]
class AnilistMediaList_(TypedDict):
id: int
progress: int
AnilistMediaListStatus = Literal[
"CURRENT", "PLANNING", "COMPLETED", "DROPPED", "PAUSED", "REPEATING"
]
class AnilistMediaList_(TypedDict):
id: int
progress: int
status: AnilistMediaListStatus
class AnilistMediaListProperties(TypedDict):
status: AnilistMediaListStatus
score: float
@@ -165,6 +181,7 @@ class AnilistBaseMediaDataSchema(TypedDict):
nextAiringEpisode: AnilistMediaNextAiringEpisode
season: str
streamingEpisodes: list[StreamingEpisode]
chapters: int
seasonYear: int
duration: int
synonyms: list[str]

View File

@@ -1,11 +1,12 @@
from .allanime.constants import SERVERS_AVAILABLE as ALLANIME_SERVERS
from .animepahe.constants import SERVERS_AVAILABLE as ANIMEPAHESERVERS
from .aniwatch.constants import SERVERS_AVAILABLE as ANIWATCHSERVERS
from .animepahe.constants import SERVERS_AVAILABLE as ANIMEPAHE_SERVERS
from .hianime.constants import SERVERS_AVAILABLE as HIANIME_SERVERS
anime_sources = {
"allanime": "api.AllAnimeAPI",
"animepahe": "api.AnimePaheApi",
"aniwatch": "api.AniWatchApi",
"aniwave": "api.AniWaveApi",
"hianime": "api.HiAnimeApi",
"nyaa": "api.NyaaApi",
"yugen": "api.YugenApi"
}
SERVERS_AVAILABLE = [*ALLANIME_SERVERS, *ANIMEPAHESERVERS, *ANIWATCHSERVERS]
SERVERS_AVAILABLE = [*ALLANIME_SERVERS, *ANIMEPAHE_SERVERS, *HIANIME_SERVERS]

View File

@@ -7,18 +7,14 @@ import json
import logging
from typing import TYPE_CHECKING
from requests.exceptions import Timeout
from ...anime_provider.base_provider import AnimeProvider
from ..decorators import debug_provider
from ..utils import give_random_quality, one_digit_symmetric_xor
from .constants import ALLANIME_API_ENDPOINT, ALLANIME_BASE, ALLANIME_REFERER
from .gql_queries import ALLANIME_EPISODES_GQL, ALLANIME_SEARCH_GQL, ALLANIME_SHOW_GQL
if TYPE_CHECKING:
from typing import Iterator
from ....libs.anime_provider.allanime.types import AllAnimeEpisode
from ....libs.anime_provider.types import Anime, Server
from .types import AllAnimeEpisode
logger = logging.getLogger(__name__)
@@ -30,6 +26,7 @@ class AllAnimeAPI(AnimeProvider):
Provides a fast and effective interface to AllAnime site.
"""
PROVIDER = "allanime"
api_endpoint = ALLANIME_API_ENDPOINT
HEADERS = {
"Referer": ALLANIME_REFERER,
@@ -45,29 +42,21 @@ class AllAnimeAPI(AnimeProvider):
Returns:
[TODO:return]
"""
try:
response = self.session.get(
self.api_endpoint,
params={
"variables": json.dumps(variables),
"query": query,
},
timeout=10,
)
if response.status_code == 200:
return response.json()["data"]
else:
logger.error("allanime(ERROR): ", response.text)
return {}
except Timeout:
logger.error(
"allanime(Error):Timeout exceeded this could mean allanime is down or you have lost internet connection"
)
return {}
except Exception as e:
logger.error(f"allanime:Error: {e}")
response = self.session.get(
self.api_endpoint,
params={
"variables": json.dumps(variables),
"query": query,
},
timeout=10,
)
if response.ok:
return response.json()["data"]
else:
logger.error("[ALLANIME-ERROR]: ", response.text)
return {}
@debug_provider(PROVIDER.upper())
def search_for_anime(
self,
user_query: str,
@@ -100,29 +89,25 @@ class AllAnimeAPI(AnimeProvider):
"translationtype": translationtype,
"countryorigin": countryorigin,
}
try:
search_results = self._fetch_gql(ALLANIME_SEARCH_GQL, variables)
page_info = search_results["shows"]["pageInfo"]
results = []
for result in search_results["shows"]["edges"]:
normalized_result = {
"id": result["_id"],
"title": result["name"],
"type": result["__typename"],
"availableEpisodes": result["availableEpisodes"],
}
results.append(normalized_result)
normalized_search_results = {
"pageInfo": page_info,
"results": results,
search_results = self._fetch_gql(ALLANIME_SEARCH_GQL, variables)
page_info = search_results["shows"]["pageInfo"]
results = []
for result in search_results["shows"]["edges"]:
normalized_result = {
"id": result["_id"],
"title": result["name"],
"type": result["__typename"],
"availableEpisodes": result["availableEpisodes"],
}
return normalized_search_results
results.append(normalized_result)
except Exception as e:
logger.error(f"FA(AllAnime): {e}")
return {}
normalized_search_results = {
"pageInfo": page_info,
"results": results,
}
return normalized_search_results
@debug_provider(PROVIDER.upper())
def get_anime(self, allanime_show_id: str):
"""get an anime details given its id
@@ -133,25 +118,23 @@ class AllAnimeAPI(AnimeProvider):
[TODO:return]
"""
variables = {"showId": allanime_show_id}
try:
anime = self._fetch_gql(ALLANIME_SHOW_GQL, variables)
id: str = anime["show"]["_id"]
title: str = anime["show"]["name"]
availableEpisodesDetail = anime["show"]["availableEpisodesDetail"]
type = anime.get("__typename")
normalized_anime = {
"id": id,
"title": title,
"availableEpisodesDetail": availableEpisodesDetail,
"type": type,
}
return normalized_anime
except Exception as e:
logger.error(f"AllAnime(get_anime): {e}")
return None
anime = self._fetch_gql(ALLANIME_SHOW_GQL, variables)
id: str = anime["show"]["_id"]
title: str = anime["show"]["name"]
availableEpisodesDetail = anime["show"]["availableEpisodesDetail"]
self.store.set(allanime_show_id, "anime_info", {"title": title})
type = anime.get("__typename")
normalized_anime = {
"id": id,
"title": title,
"availableEpisodesDetail": availableEpisodesDetail,
"type": type,
}
return normalized_anime
@debug_provider(PROVIDER.upper())
def _get_anime_episode(
self, allanime_show_id: str, episode_string: str, translation_type: str = "sub"
self, allanime_show_id: str, episode, translation_type: str = "sub"
) -> "AllAnimeEpisode | dict":
"""get the episode details and sources info
@@ -166,18 +149,15 @@ class AllAnimeAPI(AnimeProvider):
variables = {
"showId": allanime_show_id,
"translationType": translation_type,
"episodeString": episode_string,
"episodeString": episode,
}
try:
episode = self._fetch_gql(ALLANIME_EPISODES_GQL, variables)
return episode["episode"]
except Exception as e:
logger.error(f"FA(AllAnime): {e}")
return {}
episode = self._fetch_gql(ALLANIME_EPISODES_GQL, variables)
return episode["episode"]
@debug_provider(PROVIDER.upper())
def get_episode_streams(
self, anime: "Anime", episode_number: str, translation_type="sub"
) -> "Iterator[Server] | None":
self, anime_id, episode_number: str, translation_type="sub"
):
"""get the streams of an episode
Args:
@@ -188,7 +168,10 @@ class AllAnimeAPI(AnimeProvider):
Yields:
[TODO:description]
"""
anime_id = anime["id"]
anime_title = (self.store.get(anime_id, "anime_info", "") or {"title": ""})[
"title"
]
allanime_episode = self._get_anime_episode(
anime_id, episode_number, translation_type
)
@@ -196,128 +179,117 @@ class AllAnimeAPI(AnimeProvider):
return []
embeds = allanime_episode["sourceUrls"]
try:
for embed in embeds:
try:
# filter the working streams no need to get all since the others are mostly hsl
# TODO: should i just get all the servers and handle the hsl??
if embed.get("sourceName", "") not in (
# priorities based on death note
"Sak", # 7
"S-mp4", # 7.9
"Luf-mp4", # 7.7
"Default", # 8.5
"Yt-mp4", # 7.9
"Kir", # NA
# "Vid-mp4" # 4
# "Ok", # 3.5
# "Ss-Hls", # 5.5
# "Mp4", # 4
):
continue
url = embed.get("sourceUrl")
#
if not url:
continue
if url.startswith("--"):
url = url[2:]
url = one_digit_symmetric_xor(56, url)
if "tools.fast4speed.rsvp" in url:
yield {
"server": "Yt",
"episode_title": f'{anime["title"]}; Episode {episode_number}',
"headers": {"Referer": f"https://{ALLANIME_BASE}/"},
@debug_provider(self.PROVIDER.upper())
def _get_server(embed):
# filter the working streams no need to get all since the others are mostly hsl
# TODO: should i just get all the servers and handle the hsl??
if embed.get("sourceName", "") not in (
# priorities based on death note
"Sak", # 7
"S-mp4", # 7.9
"Luf-mp4", # 7.7
"Default", # 8.5
"Yt-mp4", # 7.9
"Kir", # NA
# "Vid-mp4" # 4
# "Ok", # 3.5
# "Ss-Hls", # 5.5
# "Mp4", # 4
):
return
url = embed.get("sourceUrl")
#
if not url:
return
if url.startswith("--"):
url = url[2:]
url = one_digit_symmetric_xor(56, url)
if "tools.fast4speed.rsvp" in url:
return {
"server": "Yt",
"episode_title": f"{anime_title}; Episode {episode_number}",
"headers": {"Referer": f"https://{ALLANIME_BASE}/"},
"subtitles": [],
"links": [
{
"link": url,
"quality": "1080",
}
],
}
# get the stream url for an episode of the defined source names
embed_url = f"https://{ALLANIME_BASE}{url.replace('clock', 'clock.json')}"
resp = self.session.get(
embed_url,
timeout=10,
)
if resp.ok:
match embed["sourceName"]:
case "Luf-mp4":
logger.debug("allanime:Found streams from gogoanime")
return {
"server": "gogoanime",
"headers": {},
"subtitles": [],
"links": [
{
"link": url,
"quality": "1080",
}
],
} # pyright:ignore
continue
"episode_title": (
allanime_episode["notes"] or f"{anime_title}"
)
+ f"; Episode {episode_number}",
"links": give_random_quality(resp.json()["links"]),
}
case "Kir":
logger.debug("allanime:Found streams from wetransfer")
return {
"server": "wetransfer",
"headers": {},
"subtitles": [],
"episode_title": (
allanime_episode["notes"] or f"{anime_title}"
)
+ f"; Episode {episode_number}",
"links": give_random_quality(resp.json()["links"]),
}
case "S-mp4":
logger.debug("allanime:Found streams from sharepoint")
return {
"server": "sharepoint",
"headers": {},
"subtitles": [],
"episode_title": (
allanime_episode["notes"] or f"{anime_title}"
)
+ f"; Episode {episode_number}",
"links": give_random_quality(resp.json()["links"]),
}
case "Sak":
logger.debug("allanime:Found streams from dropbox")
return {
"server": "dropbox",
"headers": {},
"subtitles": [],
"episode_title": (
allanime_episode["notes"] or f"{anime_title}"
)
+ f"; Episode {episode_number}",
"links": give_random_quality(resp.json()["links"]),
}
case "Default":
logger.debug("allanime:Found streams from wixmp")
return {
"server": "wixmp",
"headers": {},
"subtitles": [],
"episode_title": (
allanime_episode["notes"] or f"{anime_title}"
)
+ f"; Episode {episode_number}",
"links": give_random_quality(resp.json()["links"]),
}
# get the stream url for an episode of the defined source names
embed_url = (
f"https://{ALLANIME_BASE}{url.replace('clock', 'clock.json')}"
)
resp = self.session.get(
embed_url,
timeout=10,
)
if resp.status_code == 200:
match embed["sourceName"]:
case "Luf-mp4":
logger.debug("allanime:Found streams from gogoanime")
yield {
"server": "gogoanime",
"headers": {},
"subtitles": [],
"episode_title": (
allanime_episode["notes"] or f'{anime["title"]}'
)
+ f"; Episode {episode_number}",
"links": give_random_quality(resp.json()["links"]),
} # pyright:ignore
case "Kir":
logger.debug("allanime:Found streams from wetransfer")
yield {
"server": "wetransfer",
"headers": {},
"subtitles": [],
"episode_title": (
allanime_episode["notes"] or f'{anime["title"]}'
)
+ f"; Episode {episode_number}",
"links": give_random_quality(resp.json()["links"]),
} # pyright:ignore
case "S-mp4":
logger.debug("allanime:Found streams from sharepoint")
yield {
"server": "sharepoint",
"headers": {},
"subtitles": [],
"episode_title": (
allanime_episode["notes"] or f'{anime["title"]}'
)
+ f"; Episode {episode_number}",
"links": give_random_quality(resp.json()["links"]),
} # pyright:ignore
case "Sak":
logger.debug("allanime:Found streams from dropbox")
yield {
"server": "dropbox",
"headers": {},
"subtitles": [],
"episode_title": (
allanime_episode["notes"] or f'{anime["title"]}'
)
+ f"; Episode {episode_number}",
"links": give_random_quality(resp.json()["links"]),
} # pyright:ignore
case "Default":
logger.debug("allanime:Found streams from wixmp")
yield {
"server": "wixmp",
"headers": {},
"subtitles": [],
"episode_title": (
allanime_episode["notes"] or f'{anime["title"]}'
)
+ f"; Episode {episode_number}",
"links": give_random_quality(resp.json()["links"]),
} # pyright:ignore
except Timeout:
logger.error(
"Timeout has been exceeded this could mean allanime is down or you have lost internet connection"
)
except Exception as e:
logger.error(f"FA(Allanime): {e}")
except Exception as e:
logger.error(f"FA(Allanime): {e}")
return []
for embed in embeds:
if server := _get_server(embed):
yield server

View File

@@ -1,56 +1,56 @@
ALLANIME_SEARCH_GQL = """
query(
$search: SearchInput
$limit: Int
$page: Int
$translationType: VaildTranslationTypeEnumType
$countryOrigin: VaildCountryOriginEnumType
) {
shows(
search: $search
limit: $limit
page: $page
translationType: $translationType
countryOrigin: $countryOrigin
) {
pageInfo {
total
}
edges {
_id
name
availableEpisodes
__typename
}
query (
$search: SearchInput
$limit: Int
$page: Int
$translationType: VaildTranslationTypeEnumType
$countryOrigin: VaildCountryOriginEnumType
) {
shows(
search: $search
limit: $limit
page: $page
translationType: $translationType
countryOrigin: $countryOrigin
) {
pageInfo {
total
}
edges {
_id
name
availableEpisodes
__typename
}
}
}
"""
ALLANIME_EPISODES_GQL = """\
query ($showId: String!, $translationType: VaildTranslationTypeEnumType!, $episodeString: String!) {
episode(
showId: $showId
translationType: $translationType
episodeString: $episodeString
) {
episodeString
sourceUrls
notes
}
}"""
query (
$showId: String!
$translationType: VaildTranslationTypeEnumType!
$episodeString: String!
) {
episode(
showId: $showId
translationType: $translationType
episodeString: $episodeString
) {
episodeString
sourceUrls
notes
}
}
"""
ALLANIME_SHOW_GQL = """
query ($showId: String!) {
show(
_id: $showId
) {
_id
name
availableEpisodesDetail
}
show(_id: $showId) {
_id
name
availableEpisodesDetail
}
}
"""

View File

@@ -11,6 +11,7 @@ from yt_dlp.utils import (
)
from ..base_provider import AnimeProvider
from ..decorators import debug_provider
from .constants import (
ANIMEPAHE_BASE,
ANIMEPAHE_ENDPOINT,
@@ -20,65 +21,63 @@ from .constants import (
from .utils import process_animepahe_embed_page
if TYPE_CHECKING:
from ..types import Anime
from .types import AnimePaheAnimePage, AnimePaheSearchPage, AnimeSearchResult
from .types import AnimePaheAnimePage, AnimePaheSearchPage, AnimePaheSearchResult
JUICY_STREAM_REGEX = re.compile(r"source='(.*)';")
logger = logging.getLogger(__name__)
KWIK_RE = re.compile(r"Player\|(.+?)'")
# TODO: hack this to completion
class AnimePaheApi(AnimeProvider):
search_page: "AnimePaheSearchPage"
anime: "AnimePaheAnimePage"
HEADERS = REQUEST_HEADERS
PROVIDER = "animepahe"
@debug_provider(PROVIDER.upper())
def search_for_anime(self, user_query: str, *args):
try:
url = f"{ANIMEPAHE_ENDPOINT}m=search&q={user_query}"
response = self.session.get(
url,
url = f"{ANIMEPAHE_ENDPOINT}m=search&q={user_query}"
response = self.session.get(
url,
)
if not response.ok:
return
data: "AnimePaheSearchPage" = response.json()
self.search_page = data
for animepahe_search_result in data["data"]:
self.store.set(
str(animepahe_search_result["session"]),
"search_result",
animepahe_search_result,
)
if not response.status_code == 200:
return
data: "AnimePaheSearchPage" = response.json()
self.search_page = data
return {
"pageInfo": {
"total": data["total"],
"perPage": data["per_page"],
"currentPage": data["current_page"],
},
"results": [
{
"availableEpisodes": list(range(result["episodes"])),
"id": result["session"],
"title": result["title"],
"type": result["type"],
"year": result["year"],
"score": result["score"],
"status": result["status"],
"season": result["season"],
"poster": result["poster"],
}
for result in data["data"]
],
}
except Exception as e:
logger.error(f"AnimePahe(search): {e}")
return {}
return {
"pageInfo": {
"total": data["total"],
"perPage": data["per_page"],
"currentPage": data["current_page"],
},
"results": [
{
"availableEpisodes": list(range(result["episodes"])),
"id": result["session"],
"title": result["title"],
"type": result["type"],
"year": result["year"],
"score": result["score"],
"status": result["status"],
"season": result["season"],
"poster": result["poster"],
}
for result in data["data"]
],
}
@debug_provider(PROVIDER.upper())
def get_anime(self, session_id: str, *args):
page = 1
try:
anime_result: "AnimeSearchResult" = [
anime
for anime in self.search_page["data"]
if anime["session"] == session_id
][0]
if d := self.store.get(str(session_id), "search_result"):
anime_result: "AnimePaheSearchResult" = d
data: "AnimePaheAnimePage" = {} # pyright:ignore
url = f"{ANIMEPAHE_ENDPOINT}m=release&id={session_id}&sort=episode_asc&page={page}"
@@ -90,7 +89,7 @@ class AnimePaheApi(AnimeProvider):
response = self.session.get(
url,
)
if response.status_code == 200:
if response.ok:
if not data:
data.update(response.json())
else:
@@ -123,7 +122,8 @@ class AnimePaheApi(AnimeProvider):
if not data:
return {}
self.anime = data # pyright:ignore
data["title"] = anime_result["title"] # pyright:ignore
self.store.set(str(session_id), "anime_info", data)
episodes = list(map(str, [episode["episode"] for episode in data["data"]]))
title = ""
return {
@@ -150,91 +150,87 @@ class AnimePaheApi(AnimeProvider):
for episode in data["data"]
],
}
except Exception as e:
logger.error(f"AnimePahe(anime): {e}")
return {}
@debug_provider(PROVIDER.upper())
def get_episode_streams(
self, anime: "Anime", episode_number: str, translation_type, *args
self, anime_id, episode_number: str, translation_type, *args
):
try:
# extract episode details from memory
anime_title = ""
episode = None
# extract episode details from memory
if d := self.store.get(str(anime_id), "anime_info"):
anime_title = d["title"]
episode = [
episode
for episode in self.anime["data"]
for episode in d["data"]
if float(episode["episode"]) == float(episode_number)
]
if not episode:
logger.error(
f"AnimePahe(streams): episode {episode_number} doesn't exist"
if not episode:
logger.error(f"[ANIMEPAHE-ERROR]: episode {episode_number} doesn't exist")
return []
episode = episode[0]
# fetch the episode page
url = f"{ANIMEPAHE_BASE}/play/{anime_id}/{episode['session']}"
response = self.session.get(url)
# get the element containing links to juicy streams
c = get_element_by_id("resolutionMenu", response.text)
resolutionMenuItems = get_elements_html_by_class("dropdown-item", c)
# convert the elements containing embed links to a neat dict containing:
# data-src
# data-audio
# data-resolution
res_dicts = [extract_attributes(item) for item in resolutionMenuItems]
# get the episode title
episode_title = (
f"{episode['title'] or anime_title}; Episode {episode['episode']}"
)
# get all links
streams = {
"server": "kwik",
"links": [],
"episode_title": episode_title,
"subtitles": [],
"headers": {},
}
for res_dict in res_dicts:
# get embed url
embed_url = res_dict["data-src"]
data_audio = "dub" if res_dict["data-audio"] == "eng" else "sub"
# filter streams by translation_type
if data_audio != translation_type:
continue
if not embed_url:
logger.warning(
"[ANIMEPAHE-WARN]: embed url not found please report to the developers"
)
return []
episode = episode[0]
anime_id = anime["id"]
# fetch the episode page
url = f"{ANIMEPAHE_BASE}/play/{anime_id}/{episode['session']}"
response = self.session.get(url)
# get the element containing links to juicy streams
c = get_element_by_id("resolutionMenu", response.text)
resolutionMenuItems = get_elements_html_by_class("dropdown-item", c)
# convert the elements containing embed links to a neat dict containing:
# data-src
# data-audio
# data-resolution
res_dicts = [extract_attributes(item) for item in resolutionMenuItems]
# get the episode title
episode_title = (
f"{episode['title'] or anime['title']}; Episode {episode['episode']}"
# get embed page
embed_response = self.session.get(
embed_url, headers={"User-Agent": self.USER_AGENT, **SERVER_HEADERS}
)
# get all links
streams = {
"server": "kwik",
"links": [],
"episode_title": episode_title,
"subtitles": [],
"headers": {},
}
for res_dict in res_dicts:
# get embed url
embed_url = res_dict["data-src"]
data_audio = "dub" if res_dict["data-audio"] == "eng" else "sub"
# filter streams by translation_type
if data_audio != translation_type:
continue
if not response.ok:
continue
embed_page = embed_response.text
if not embed_url:
logger.warn(
"AnimePahe: embed url not found please report to the developers"
)
return []
# get embed page
embed_response = self.session.get(
embed_url, headers={"User-Agent": self.USER_AGENT, **SERVER_HEADERS}
)
if not response.status_code == 200:
continue
embed_page = embed_response.text
decoded_js = process_animepahe_embed_page(embed_page)
if not decoded_js:
logger.error("Animepahe: failed to decode embed page")
return
juicy_stream = JUICY_STREAM_REGEX.search(decoded_js)
if not juicy_stream:
logger.error("Animepahe: failed to find juicy stream")
return
juicy_stream = juicy_stream.group(1)
# add the link
streams["links"].append(
{
"quality": res_dict["data-resolution"],
"translation_type": data_audio,
"link": juicy_stream,
}
)
yield streams
except Exception as e:
logger.error(f"Animepahe: {e}")
decoded_js = process_animepahe_embed_page(embed_page)
if not decoded_js:
logger.error("[ANIMEPAHE-ERROR]: failed to decode embed page")
return
juicy_stream = JUICY_STREAM_REGEX.search(decoded_js)
if not juicy_stream:
logger.error("[ANIMEPAHE-ERROR]: failed to find juicy stream")
return
juicy_stream = juicy_stream.group(1)
# add the link
streams["links"].append(
{
"quality": res_dict["data-resolution"],
"translation_type": data_audio,
"link": juicy_stream,
}
)
yield streams

View File

@@ -1,7 +1,7 @@
from typing import Literal, TypedDict
class AnimeSearchResult(TypedDict):
class AnimePaheSearchResult(TypedDict):
id: int
title: str
type: str
@@ -21,7 +21,7 @@ class AnimePaheSearchPage(TypedDict):
last_page: int
_from: int
to: int
data: list[AnimeSearchResult]
data: list[AnimePaheSearchResult]
class Episode(TypedDict):

View File

@@ -21,12 +21,12 @@ def animepahe_embed_decoder(
encoded_js_p: str,
base_a: int,
no_of_keys_c: int,
key_values_k: list,
decode_mapper_d: dict = {},
values_to_replace_with_k: list,
):
decode_mapper_d: dict = {}
for i in range(no_of_keys_c):
key = animepahe_key_creator(i, base_a)
val = key_values_k[i] or key
val = values_to_replace_with_k[i] or key
decode_mapper_d[key] = val
return re.sub(
r"\b\w+\b", lambda match: decode_mapper_d[match.group(0)], encoded_js_p
@@ -64,18 +64,12 @@ def process_animepahe_embed_page(embed_page: str):
if __name__ == "__main__":
data = """<script>eval(function(p,a,c,k,e,d){e=function(c){return(c<a?'':e(parseInt(c/a)))+((c=c%a)>35?String.fromCharCode(c+29):c.toString(36))};if(!''.replace(/^/,String)){while(c--){d[e(c)]=k[c]||e(c)}k=[function(e){return d[e]}];e=function(){return'\\w+'};c=1};while(c--){if(k[c]){p=p.replace(new RegExp('\\b'+e(c)+'\\b','g'),k[c])}}return p}('f $7={H:a(2){4 B(9.7.h(y z("(?:(?:^|.*;)\\\\s*"+d(2).h(/[\\-\\.\\+\\*]/g,"\\\\$&")+"\\\\s*\\\\=\\\\s*([^;]*).*$)|^.*$"),"$1"))||G},E:a(2,q,3,6,5,t){k(!2||/^(?:8|r\\-v|o|m|p)$/i.D(2)){4 w}f b="";k(3){F(3.J){j K:b=3===P?"; 8=O, I N Q M:u:u A":"; r-v="+3;n;j L:b="; 8="+3;n;j S:b="; 8="+3.Z();n}}9.7=d(2)+"="+d(q)+b+(5?"; m="+5:"")+(6?"; o="+6:"")+(t?"; p":"");4 x},Y:a(2,6,5){k(!2||!11.C(2)){4 w}9.7=d(2)+"=; 8=12, R 10 W l:l:l A"+(5?"; m="+5:"")+(6?"; o="+6:"");4 x},C:a(2){4(y z("(?:^|;\\\\s*)"+d(2).h(/[\\-\\.\\+\\*]/g,"\\\\$&")+"\\\\s*\\\\=")).D(9.7)},X:a(){f c=9.7.h(/((?:^|\\s*;)[^\\=]+)(?=;|$)|^\\s*|\\s*(?:\\=[^;]*)?(?:\\1|$)/g,"").T(/\\s*(?:\\=[^;]*)?;\\s*/);U(f e=0;e<c.V;e++){c[e]=B(c[e])}4 c}};',62,65,'||sKey|vEnd|return|sDomain|sPath|cookie|expires|document|function|sExpires|aKeys|encodeURIComponent|nIdx|var||replace||case|if|00|domain|break|path|secure|sValue|max||bSecure|59|age|false|true|new|RegExp|GMT|decodeURIComponent|hasItem|test|setItem|switch|null|getItem|31|constructor|Number|String|23|Dec|Fri|Infinity|9999|01|Date|split|for|length|1970|keys|removeItem|toUTCString|Jan|this|Thu'.split('|'),0,{}));eval(function(p,a,c,k,e,d){e=function(c){return(c<a?'':e(parseInt(c/a)))+((c=c%a)>35?String.fromCharCode(c+29):c.toString(36))};if(!''.replace(/^/,String)){while(c--){d[e(c)]=k[c]||e(c)}k=[function(e){return d[e]}];e=function(){return'\\w+'};c=1};while(c--){if(k[c]){p=p.replace(new RegExp('\\b'+e(c)+'\\b','g'),k[c])}}return p}('h o=\'1D://1C-E.1B.1A.1z/1y/E/1x/1w/1v.1u\';h d=s.r(\'d\');h 0=B 1t(d,{\'1s\':{\'1r\':i},\'1q\':\'16:9\',\'D\':1,\'1p\':5,\'1o\':{\'1n\':\'1m\'},1l:[\'7-1k\',\'7\',\'1j\',\'1i-1h\',\'1g\',\'1f-1e\',\'1d\',\'D\',\'1c\',\'1b\',\'1a\',\'19\',\'C\',\'18\'],\'C\':{\'17\':i}});8(!A.15()){d.14=o}x{j z={13:12,11:10,Z:Y,X:i,W:i};h c=B A(z);c.V(o);c.U(d);g.c=c}0.3("T",6=>{g.S.R.Q("P")});0.O=1;k v(b,n,m){8(b.y){b.y(n,m,N)}x 8(b.w){b.w(\'3\'+n,m)}}j 4=k(l){g.M.L(l,\'*\')};v(g,\'l\',k(e){j a=e.a;8(a===\'7\')0.7();8(a===\'f\')0.f();8(a===\'u\')0.u()});0.3(\'t\',6=>{4(\'t\')});0.3(\'7\',6=>{4(\'7\')});0.3(\'f\',6=>{4(\'f\')});0.3(\'K\',6=>{4(0.q);s.r(\'.J-I\').H=G(0.q.F(2))});0.3(\'p\',6=>{4(\'p\')});',62,102,'player|||on|sendMessage||event|play|if||data|element|hls|video||pause|window|const|true|var|function|message|eventHandler|eventName|source|ended|currentTime|querySelector|document|ready|stop|bindEvent|attachEvent|else|addEventListener|config|Hls|new|fullscreen|volume|01|toFixed|String|innerHTML|timestamp|ss|timeupdate|postMessage|parent|false|speed|landscape|lock|orientation|screen|enterfullscreen|attachMedia|loadSource|lowLatencyMode|enableWorker|Infinity|backBufferLength|600|maxMaxBufferLength|180|maxBufferLength|src|isSupported||iosNative|capture|airplay|pip|settings|captions|mute|time|current|progress|forward|fast|rewind|large|controls|kwik|key|storage|seekTime|ratio|global|keyboard|Plyr|m3u8|uwu|b92a392054c041a3f9c6eecabeb0e127183f44e547828447b10bca8d77523e6f|03|stream|org|nextcdn|files|eu|https'.split('|'),0,{}))"""
a = 62
c = 102
k = "player|||on|sendMessage||event|play|if||data|element|hls|video||pause|window|const|true|var|function|message|eventHandler|eventName|source|ended|currentTime|querySelector|document|ready|stop|bindEvent|attachEvent|else|addEventListener|config|Hls|new|fullscreen|volume|toFixed|String|innerHTML|timestamp|ss|timeupdate|postMessage|parent|false|speed|landscape|lock|orientation|screen|enterfullscreen|attachMedia|loadSource|lowLatencyMode|enableWorker|Infinity|backBufferLength|600|maxMaxBufferLength||180|maxBufferLength|src|isSupported||iosNative|capture|airplay|pip|settings|captions|mute|time|current|progress|forward|fast|rewind|large|controls|kwik|key|storage|seekTime|ratio|global|keyboard|Plyr|m3u8|uwu|cda74eaebce25a12f5e548f7c220bb5dc245700b0280bdb45ff98b2fe4803d2b|06|stream|org|nextcdn|files|eu|https".split(
"|"
)
# Testing time
filepath = input("Enter file name: ")
if filepath:
with open(filepath, "r") as file:
data = file.read()
else:
data = """<script>eval(function(p,a,c,k,e,d){e=function(c){return(c<a?'':e(parseInt(c/a)))+((c=c%a)>35?String.fromCharCode(c+29):c.toString(36))};if(!''.replace(/^/,String)){while(c--){d[e(c)]=k[c]||e(c)}k=[function(e){return d[e]}];e=function(){return'\\w+'};c=1};while(c--){if(k[c]){p=p.replace(new RegExp('\\b'+e(c)+'\\b','g'),k[c])}}return p}('f $7={H:a(2){4 B(9.7.h(y z("(?:(?:^|.*;)\\\\s*"+d(2).h(/[\\-\\.\\+\\*]/g,"\\\\$&")+"\\\\s*\\\\=\\\\s*([^;]*).*$)|^.*$"),"$1"))||G},E:a(2,q,3,6,5,t){k(!2||/^(?:8|r\\-v|o|m|p)$/i.D(2)){4 w}f b="";k(3){F(3.J){j K:b=3===P?"; 8=O, I N Q M:u:u A":"; r-v="+3;n;j L:b="; 8="+3;n;j S:b="; 8="+3.Z();n}}9.7=d(2)+"="+d(q)+b+(5?"; m="+5:"")+(6?"; o="+6:"")+(t?"; p":"");4 x},Y:a(2,6,5){k(!2||!11.C(2)){4 w}9.7=d(2)+"=; 8=12, R 10 W l:l:l A"+(5?"; m="+5:"")+(6?"; o="+6:"");4 x},C:a(2){4(y z("(?:^|;\\\\s*)"+d(2).h(/[\\-\\.\\+\\*]/g,"\\\\$&")+"\\\\s*\\\\=")).D(9.7)},X:a(){f c=9.7.h(/((?:^|\\s*;)[^\\=]+)(?=;|$)|^\\s*|\\s*(?:\\=[^;]*)?(?:\\1|$)/g,"").T(/\\s*(?:\\=[^;]*)?;\\s*/);U(f e=0;e<c.V;e++){c[e]=B(c[e])}4 c}};',62,65,'||sKey|vEnd|return|sDomain|sPath|cookie|expires|document|function|sExpires|aKeys|encodeURIComponent|nIdx|var||replace||case|if|00|domain|break|path|secure|sValue|max||bSecure|59|age|false|true|new|RegExp|GMT|decodeURIComponent|hasItem|test|setItem|switch|null|getItem|31|constructor|Number|String|23|Dec|Fri|Infinity|9999|01|Date|split|for|length|1970|keys|removeItem|toUTCString|Jan|this|Thu'.split('|'),0,{}));eval(function(p,a,c,k,e,d){e=function(c){return(c<a?'':e(parseInt(c/a)))+((c=c%a)>35?String.fromCharCode(c+29):c.toString(36))};if(!''.replace(/^/,String)){while(c--){d[e(c)]=k[c]||e(c)}k=[function(e){return d[e]}];e=function(){return'\\w+'};c=1};while(c--){if(k[c]){p=p.replace(new RegExp('\\b'+e(c)+'\\b','g'),k[c])}}return p}('h o=\'1D://1C-E.1B.1A.1z/1y/E/1x/1w/1v.1u\';h d=s.r(\'d\');h 0=B 1t(d,{\'1s\':{\'1r\':i},\'1q\':\'16:9\',\'D\':1,\'1p\':5,\'1o\':{\'1n\':\'1m\'},1l:[\'7-1k\',\'7\',\'1j\',\'1i-1h\',\'1g\',\'1f-1e\',\'1d\',\'D\',\'1c\',\'1b\',\'1a\',\'19\',\'C\',\'18\'],\'C\':{\'17\':i}});8(!A.15()){d.14=o}x{j z={13:12,11:10,Z:Y,X:i,W:i};h c=B A(z);c.V(o);c.U(d);g.c=c}0.3("T",6=>{g.S.R.Q("P")});0.O=1;k v(b,n,m){8(b.y){b.y(n,m,N)}x 8(b.w){b.w(\'3\'+n,m)}}j 4=k(l){g.M.L(l,\'*\')};v(g,\'l\',k(e){j a=e.a;8(a===\'7\')0.7();8(a===\'f\')0.f();8(a===\'u\')0.u()});0.3(\'t\',6=>{4(\'t\')});0.3(\'7\',6=>{4(\'7\')});0.3(\'f\',6=>{4(\'f\')});0.3(\'K\',6=>{4(0.q);s.r(\'.J-I\').H=G(0.q.F(2))});0.3(\'p\',6=>{4(\'p\')});',62,102,'player|||on|sendMessage||event|play|if||data|element|hls|video||pause|window|const|true|var|function|message|eventHandler|eventName|source|ended|currentTime|querySelector|document|ready|stop|bindEvent|attachEvent|else|addEventListener|config|Hls|new|fullscreen|volume|01|toFixed|String|innerHTML|timestamp|ss|timeupdate|postMessage|parent|false|speed|landscape|lock|orientation|screen|enterfullscreen|attachMedia|loadSource|lowLatencyMode|enableWorker|Infinity|backBufferLength|600|maxMaxBufferLength|180|maxBufferLength|src|isSupported||iosNative|capture|airplay|pip|settings|captions|mute|time|current|progress|forward|fast|rewind|large|controls|kwik|key|storage|seekTime|ratio|global|keyboard|Plyr|m3u8|uwu|b92a392054c041a3f9c6eecabeb0e127183f44e547828447b10bca8d77523e6f|03|stream|org|nextcdn|files|eu|https'.split('|'),0,{}))</script>"""
p = "h o='1D://1C-11.1B.1A.1z/1y/11/1x/1w/1v.1u';h d=s.r('d');h 0=B 1t(d,{'1s':{'1r':i},'1q':'16:9','D':1,'1p':5,'1o':{'1n':'1m'},1l:['7-1k','7','1j','1i-1h','1g','1f-1e','1d','D','1c','1b','1a','19','C','18'],'C':{'17':i}});8(!A.15()){d.14=o}x{j z={13:12,10:Z,Y:X,W:i,V:i};h c=B A(z);c.U(o);c.T(d);g.c=c}0.3(\"S\",6=>{g.R.Q.P(\"O\")});0.N=1;k v(b,n,m){8(b.y){b.y(n,m,M)}x 8(b.w){b.w('3'+n,m)}}j 4=k(l){g.L.K(l,'*')};v(g,'l',k(e){j a=e.a;8(a==='7')0.7();8(a==='f')0.f();8(a==='u')0.u()});0.3('t',6=>{4('t')});0.3('7',6=>{4('7')});0.3('f',6=>{4('f')});0.3('J',6=>{4(0.q);s.r('.I-H').G=F(0.q.E(2))});0.3('p',6=>{4('p')});"
result = animepahe_embed_decoder(
p,
a,
c,
k,
)
print(result) # Output: j player = B A();
print(process_animepahe_embed_page(data))

View File

@@ -1,236 +0,0 @@
import logging
import re
from html.parser import HTMLParser
from itertools import cycle
from urllib.parse import quote_plus
from yt_dlp.utils import (
clean_html,
extract_attributes,
get_element_by_class,
get_element_html_by_class,
get_elements_by_class,
get_elements_html_by_class,
)
from ..base_provider import AnimeProvider
from ..utils import give_random_quality
from .constants import SERVERS_AVAILABLE
from .types import AniWatchStream
logger = logging.getLogger(__name__)
LINK_TO_STREAMS_REGEX = re.compile(r".*://(.*)/embed-(2|4|6)/e-([0-9])/(.*)\?.*")
IMAGE_HTML_ELEMENT_REGEX = re.compile(r"<img.*?>")
class ParseAnchorAndImgTag(HTMLParser):
def __init__(self):
super().__init__()
self.img_tag = None
self.a_tag = None
def handle_starttag(self, tag, attrs):
if tag == "img":
self.img_tag = {attr[0]: attr[1] for attr in attrs}
if tag == "a":
self.a_tag = {attr[0]: attr[1] for attr in attrs}
class AniWatchApi(AnimeProvider):
# HEADERS = {"Referer": "https://hianime.to/home"}
def search_for_anime(self, anime_title: str, *args):
try:
query = quote_plus(anime_title)
url = f"https://hianime.to/search?keyword={query}"
response = self.session.get(url)
if response.status_code != 200:
return
search_page = response.text
search_results_html_items = get_elements_by_class("flw-item", search_page)
results = []
for search_results_html_item in search_results_html_items:
film_poster_html = get_element_by_class(
"film-poster", search_results_html_item
)
if not film_poster_html:
continue
# get availableEpisodes
episodes_html = get_element_html_by_class("tick-sub", film_poster_html)
episodes = clean_html(episodes_html) or 12
# get anime id and poster image url
parser = ParseAnchorAndImgTag()
parser.feed(film_poster_html)
image_data = parser.img_tag
anime_link_data = parser.a_tag
if not image_data or not anime_link_data:
continue
episodes = int(episodes)
# finally!!
image_link = image_data["data-src"]
anime_id = anime_link_data["data-id"]
title = anime_link_data["title"]
results.append(
{
"availableEpisodes": list(range(1, episodes)),
"id": anime_id,
"title": title,
"poster": image_link,
}
)
self.search_results = results
return {"pageInfo": {}, "results": results}
except Exception as e:
logger.error(e)
def get_anime(self, aniwatch_id, *args):
try:
anime_result = {}
for anime in self.search_results:
if anime["id"] == aniwatch_id:
anime_result = anime
break
anime_url = f"https://hianime.to/ajax/v2/episode/list/{aniwatch_id}"
response = self.session.get(anime_url, timeout=10)
if response.status_code == 200:
response_json = response.json()
aniwatch_anime_page = response_json["html"]
episodes_info_container_html = get_element_html_by_class(
"ss-list", aniwatch_anime_page
)
episodes_info_html_list = get_elements_html_by_class(
"ep-item", episodes_info_container_html
)
# keys: [ data-number: episode_number, data-id: episode_id, title: episode_title , href:episode_page_url]
episodes_info_dicts = [
extract_attributes(episode_dict)
for episode_dict in episodes_info_html_list
]
episodes = [episode["data-number"] for episode in episodes_info_dicts]
self.episodes_info = [
{
"id": episode["data-id"],
"title": (
(episode["title"] or "").replace(
f"Episode {episode['data-number']}", ""
)
or anime_result["title"]
)
+ f"; Episode {episode['data-number']}",
"episode": episode["data-number"],
}
for episode in episodes_info_dicts
]
return {
"id": aniwatch_id,
"availableEpisodesDetail": {
"dub": episodes,
"sub": episodes,
"raw": episodes,
},
"poster": anime_result["poster"],
"title": anime_result["title"],
"episodes_info": self.episodes_info,
}
except Exception as e:
logger.error(e)
def get_episode_streams(self, anime, episode, translation_type, *args):
try:
episode_details = [
episode_details
for episode_details in self.episodes_info
if episode_details["episode"] == episode
]
if not episode_details:
return
episode_details = episode_details[0]
episode_url = f"https://hianime.to/ajax/v2/episode/servers?episodeId={episode_details['id']}"
response = self.session.get(episode_url)
if response.status_code == 200:
response_json = response.json()
episode_page_html = response_json["html"]
servers_containers_html = get_elements_html_by_class(
"ps__-list", episode_page_html
)
if not servers_containers_html:
return
# sub servers
try:
servers_html_sub = get_elements_html_by_class(
"server-item", servers_containers_html[0]
)
except Exception:
logger.warn("AniWatch: sub not found")
servers_html_sub = None
# dub servers
try:
servers_html_dub = get_elements_html_by_class(
"server-item", servers_containers_html[1]
)
except Exception:
logger.warn("AniWatch: dub not found")
servers_html_dub = None
if translation_type == "dub":
servers_html = servers_html_dub
else:
servers_html = servers_html_sub
if not servers_html:
return
for server_name, server_html in zip(
cycle(SERVERS_AVAILABLE), servers_html
):
try:
# keys: [ data-type: translation_type, data-id: embed_id, data-server-id: server_id ]
servers_info = extract_attributes(server_html)
embed_url = f"https://hianime.to/ajax/v2/episode/sources?id={servers_info['data-id']}"
embed_response = self.session.get(embed_url)
if embed_response.status_code == 200:
embed_json = embed_response.json()
raw_link_to_streams = embed_json["link"]
match = LINK_TO_STREAMS_REGEX.match(raw_link_to_streams)
if not match:
continue
provider_domain = match.group(1)
embed_type = match.group(2)
episode_number = match.group(3)
source_id = match.group(4)
link_to_streams = f"https://{provider_domain}/embed-{embed_type}/ajax/e-{episode_number}/getSources?id={source_id}"
link_to_streams_response = self.session.get(link_to_streams)
if link_to_streams_response.status_code == 200:
juicy_streams_json: "AniWatchStream" = (
link_to_streams_response.json()
)
yield {
"headers": {},
"subtitles": [
{
"url": track["file"],
"language": track["label"],
}
for track in juicy_streams_json["tracks"]
if track["kind"] == "captions"
],
"server": server_name,
"episode_title": episode_details["title"],
"links": give_random_quality(
[
{"link": link["file"], "type": link["type"]}
for link in juicy_streams_json["sources"]
]
),
}
except Exception as e:
logger.error(e)
except Exception as e:
logger.error(e)

View File

@@ -1,26 +0,0 @@
from typing import Literal, TypedDict
class AniWatchSkipTime(TypedDict):
start: int
end: int
class AniWatchSource(TypedDict):
file: str
type: str
class AniWatchTrack(TypedDict):
file: str
label: str
kind: Literal["captions", "thumbnails", "audio"]
class AniWatchStream(TypedDict):
sources: list[AniWatchSource]
tracks: list[AniWatchTrack]
encrypted: bool
intro: AniWatchSkipTime
outro: AniWatchSkipTime
server: int

View File

@@ -1,65 +0,0 @@
from html.parser import HTMLParser
from yt_dlp.utils import clean_html, get_element_by_class, get_elements_by_class
from ..base_provider import AnimeProvider
from .constants import ANIWAVE_BASE, SEARCH_HEADERS
class ParseAnchorAndImgTag(HTMLParser):
def __init__(self):
super().__init__()
self.img_tag = None
self.a_tag = None
def handle_starttag(self, tag, attrs):
if tag == "img":
self.img_tag = {attr[0]: attr[1] for attr in attrs}
if tag == "a":
self.a_tag = {attr[0]: attr[1] for attr in attrs}
class AniWaveApi(AnimeProvider):
def search_for_anime(self, anime_title, *args):
self.session.headers.update(SEARCH_HEADERS)
search_url = f"{ANIWAVE_BASE}/filter"
params = {"keyword": anime_title}
res = self.session.get(search_url, params=params)
search_page = res.text
search_results_html_list = get_elements_by_class("item", search_page)
results = []
for result_html in search_results_html_list:
aniposter_html = get_element_by_class("poster", result_html)
episode_html = get_element_by_class("sub", aniposter_html)
episodes = clean_html(episode_html) or 12
if not aniposter_html:
return
parser = ParseAnchorAndImgTag()
parser.feed(aniposter_html)
image_data = parser.img_tag
anime_link_data = parser.a_tag
if not image_data or not anime_link_data:
continue
episodes = int(episodes)
# finally!!
image_link = image_data["src"]
title = image_data["alt"]
anime_id = anime_link_data["href"]
results.append(
{
"availableEpisodes": list(range(1, episodes)),
"id": anime_id,
"title": title,
"poster": image_link,
}
)
self.search_results = results
return {"pageInfo": {}, "results": results}
def get_anime(self, anime_id, *args):
anime_page_url = f"{ANIWAVE_BASE}{anime_id}"
self.session.get(anime_page_url)
# TODO: to be continued; mostly js so very difficult

View File

@@ -1,20 +0,0 @@
ANIWAVE_BASE = "https://aniwave.to"
SEARCH_HEADERS = {
"Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/png,image/svg+xml,*/*;q=0.8",
"Accept-Language": "en-US,en;q=0.5",
# 'Accept-Encoding': 'Utf-8',
"Referer": "https://aniwave.to/filter",
"DNT": "1",
"Upgrade-Insecure-Requests": "1",
"Sec-Fetch-Dest": "document",
"Sec-Fetch-Mode": "navigate",
"Sec-Fetch-Site": "same-origin",
"Sec-Fetch-User": "?1",
"Connection": "keep-alive",
"Alt-Used": "aniwave.to",
# 'Cookie': '__pf=1; usertype=guest; session=BElk9DJdO3sFdDmLiGxuNiM9eGYO1TjktGsmdwjV',
"Priority": "u=0, i",
# Requests doesn't support trailers
# 'TE': 'trailers',
}

View File

@@ -1,13 +1,34 @@
import os
import requests
from yt_dlp.utils.networking import random_user_agent
from ...constants import APP_CACHE_DIR
from .providers_store import ProviderStore
class AnimeProvider:
session: requests.Session
PROVIDER = ""
USER_AGENT = random_user_agent()
HEADERS = {}
def __init__(self) -> None:
self.session = requests.session()
def __init__(self, cache_requests, use_persistent_provider_store) -> None:
if cache_requests.lower() == "true":
from ..common.requests_cacher import CachedRequestsSession
self.session = CachedRequestsSession(
os.path.join(APP_CACHE_DIR, "cached_requests.db")
)
else:
self.session = requests.session()
self.session.headers.update({"User-Agent": self.USER_AGENT, **self.HEADERS})
if use_persistent_provider_store.lower() == "true":
self.store = ProviderStore(
"persistent",
self.PROVIDER,
os.path.join(APP_CACHE_DIR, "anime_providers_store.db"),
)
else:
self.store = ProviderStore("memory")

View File

@@ -0,0 +1,39 @@
import functools
import logging
import os
logger = logging.getLogger(__name__)
def debug_provider(provider_name: str):
def _provider_function_decorator(provider_function):
@functools.wraps(provider_function)
def _provider_function_wrapper(*args, **kwargs):
if not os.environ.get("FASTANIME_DEBUG"):
try:
return provider_function(*args, **kwargs)
except Exception as e:
logger.error(f"[{provider_name}@{provider_function.__name__}]: {e}")
else:
return provider_function(*args, **kwargs)
return _provider_function_wrapper
return _provider_function_decorator
def ensure_internet_connection(provider_function):
@functools.wraps(provider_function)
def _wrapper(*args, **kwargs):
import requests
try:
requests.get("https://google.com", timeout=5)
except requests.ConnectionError:
from sys import exit
print("You are not connected to the internet;Aborting...")
exit(1)
return provider_function(*args, **kwargs)
return _wrapper

View File

@@ -0,0 +1,243 @@
import logging
import re
from html.parser import HTMLParser
from itertools import cycle
from urllib.parse import quote_plus
from yt_dlp.utils import (
clean_html,
extract_attributes,
get_element_by_class,
get_element_html_by_class,
get_elements_by_class,
get_elements_html_by_class,
)
from ..base_provider import AnimeProvider
from ..decorators import debug_provider
from ..utils import give_random_quality
from .constants import SERVERS_AVAILABLE
from .types import HiAnimeStream
logger = logging.getLogger(__name__)
LINK_TO_STREAMS_REGEX = re.compile(r".*://(.*)/embed-(2|4|6)/e-([0-9])/(.*)\?.*")
IMAGE_HTML_ELEMENT_REGEX = re.compile(r"<img.*?>")
class ParseAnchorAndImgTag(HTMLParser):
def __init__(self):
super().__init__()
self.img_tag = None
self.a_tag = None
def handle_starttag(self, tag, attrs):
if tag == "img":
self.img_tag = {attr[0]: attr[1] for attr in attrs}
if tag == "a":
self.a_tag = {attr[0]: attr[1] for attr in attrs}
class HiAnimeApi(AnimeProvider):
# HEADERS = {"Referer": "https://hianime.to/home"}
PROVIDER = "hianime"
@debug_provider(PROVIDER.upper())
def search_for_anime(self, anime_title: str, *args):
query = quote_plus(anime_title)
url = f"https://hianime.to/search?keyword={query}"
response = self.session.get(url)
if not response.ok:
return
search_page = response.text
search_results_html_items = get_elements_by_class("flw-item", search_page)
results = []
for search_results_html_item in search_results_html_items:
film_poster_html = get_element_by_class(
"film-poster", search_results_html_item
)
if not film_poster_html:
continue
# get availableEpisodes
episodes_html = get_element_html_by_class("tick-sub", film_poster_html)
episodes = clean_html(episodes_html) or 12
# get anime id and poster image url
parser = ParseAnchorAndImgTag()
parser.feed(film_poster_html)
image_data = parser.img_tag
anime_link_data = parser.a_tag
if not image_data or not anime_link_data:
continue
episodes = int(episodes)
# finally!!
image_link = image_data["data-src"]
anime_id = anime_link_data["data-id"]
title = anime_link_data["title"]
result = {
"availableEpisodes": list(range(1, episodes)),
"id": anime_id,
"title": title,
"poster": image_link,
}
results.append(result)
self.store.set(result["id"], "search_result", result)
return {"pageInfo": {}, "results": results}
@debug_provider(PROVIDER.upper())
def get_anime(self, hianime_id, *args):
anime_result = {}
if d := self.store.get(str(hianime_id), "search_result"):
anime_result = d
anime_url = f"https://hianime.to/ajax/v2/episode/list/{hianime_id}"
response = self.session.get(anime_url, timeout=10)
if response.ok:
response_json = response.json()
hianime_anime_page = response_json["html"]
episodes_info_container_html = get_element_html_by_class(
"ss-list", hianime_anime_page
)
episodes_info_html_list = get_elements_html_by_class(
"ep-item", episodes_info_container_html
)
# keys: [ data-number: episode_number, data-id: episode_id, title: episode_title , href:episode_page_url]
episodes_info_dicts = [
extract_attributes(episode_dict)
for episode_dict in episodes_info_html_list
]
episodes = [episode["data-number"] for episode in episodes_info_dicts]
episodes_info = [
{
"id": episode["data-id"],
"title": (
(episode["title"] or "").replace(
f"Episode {episode['data-number']}", ""
)
or anime_result["title"]
)
+ f"; Episode {episode['data-number']}",
"episode": episode["data-number"],
}
for episode in episodes_info_dicts
]
self.store.set(
str(hianime_id),
"anime_info",
episodes_info,
)
return {
"id": hianime_id,
"availableEpisodesDetail": {
"dub": episodes,
"sub": episodes,
"raw": episodes,
},
"poster": anime_result["poster"],
"title": anime_result["title"],
"episodes_info": episodes_info,
}
@debug_provider(PROVIDER.upper())
def get_episode_streams(self, anime_id, episode, translation_type, *args):
if d := self.store.get(str(anime_id), "anime_info"):
episodes_info = d
episode_details = [
episode_details
for episode_details in episodes_info
if episode_details["episode"] == episode
]
if not episode_details:
return
episode_details = episode_details[0]
episode_url = f"https://hianime.to/ajax/v2/episode/servers?episodeId={episode_details['id']}"
response = self.session.get(episode_url)
if response.ok:
response_json = response.json()
episode_page_html = response_json["html"]
servers_containers_html = get_elements_html_by_class(
"ps__-list", episode_page_html
)
if not servers_containers_html:
return
# sub servers
try:
servers_html_sub = get_elements_html_by_class(
"server-item", servers_containers_html[0]
)
except Exception:
logger.warning("HiAnime: sub not found")
servers_html_sub = None
# dub servers
try:
servers_html_dub = get_elements_html_by_class(
"server-item", servers_containers_html[1]
)
except Exception:
logger.warning("HiAnime: dub not found")
servers_html_dub = None
if translation_type == "dub":
servers_html = servers_html_dub
else:
servers_html = servers_html_sub
if not servers_html:
return
@debug_provider(self.PROVIDER.upper())
def _get_server(server_name, server_html):
# keys: [ data-type: translation_type, data-id: embed_id, data-server-id: server_id ]
servers_info = extract_attributes(server_html)
embed_url = f"https://hianime.to/ajax/v2/episode/sources?id={servers_info['data-id']}"
embed_response = self.session.get(embed_url)
if embed_response.ok:
embed_json = embed_response.json()
raw_link_to_streams = embed_json["link"]
match = LINK_TO_STREAMS_REGEX.match(raw_link_to_streams)
if not match:
return
provider_domain = match.group(1)
embed_type = match.group(2)
episode_number = match.group(3)
source_id = match.group(4)
link_to_streams = f"https://{provider_domain}/embed-{embed_type}/ajax/e-{episode_number}/getSources?id={source_id}"
link_to_streams_response = self.session.get(link_to_streams)
if link_to_streams_response.ok:
juicy_streams_json: "HiAnimeStream" = (
link_to_streams_response.json()
)
# TODO: Hianime decided to fucking encrypt shit
# so got to fix it later
return {
"headers": {},
"subtitles": [
{
"url": track["file"],
"language": track["label"],
}
for track in juicy_streams_json["tracks"]
if track["kind"] == "captions"
],
"server": server_name,
"episode_title": episode_details["title"],
"links": give_random_quality(
[
{"link": link["file"]}
for link in juicy_streams_json["tracks"]
]
),
}
for server_name, server_html in zip(
cycle(SERVERS_AVAILABLE), servers_html
):
if server := _get_server(server_name, server_html):
yield server

View File

@@ -0,0 +1,26 @@
from typing import Literal, TypedDict
class HiAnimeSkipTime(TypedDict):
start: int
end: int
class HiAnimeSource(TypedDict):
file: str
type: str
class HiAnimeTrack(TypedDict):
file: str
label: str
kind: Literal["captions", "thumbnails", "audio"]
class HiAnimeStream(TypedDict):
sources: list[HiAnimeSource]
tracks: list[HiAnimeTrack]
encrypted: bool
intro: HiAnimeSkipTime
outro: HiAnimeSkipTime
server: int

View File

@@ -1,153 +0,0 @@
import logging
from typing import TYPE_CHECKING
from requests import post
from thefuzz import fuzz
if TYPE_CHECKING:
from ..anilist.types import AnilistDataSchema
logger = logging.getLogger(__name__)
ANILIST_ENDPOINT = "https://graphql.anilist.co"
"""
query($query:String){
Page(perPage:50){
pageInfo{
total
currentPage
hasNextPage
}
media(search:$query,type:ANIME){
id
idMal
title{
romaji
english
}
episodes
status
nextAiringEpisode {
timeUntilAiring
airingAt
episode
}
}
}
}
"""
def search_for_anime_with_anilist(anime_title: str):
query = """
query($query:String){
Page(perPage:50){
pageInfo{
total
currentPage
hasNextPage
}
media(search:$query,type:ANIME){
id
idMal
title{
romaji
english
}
episodes
status
nextAiringEpisode {
timeUntilAiring
airingAt
episode
}
}
}
}
"""
response = post(
ANILIST_ENDPOINT,
json={"query": query, "variables": {"query": anime_title}},
timeout=10,
)
if response.status_code == 200:
anilist_data: "AnilistDataSchema" = response.json()
return {
"pageInfo": anilist_data["data"]["Page"]["pageInfo"],
"results": [
{
"id": anime_result["id"],
"title": anime_result["title"]["romaji"]
or anime_result["title"]["english"],
"type": "anime",
"availableEpisodes": list(
range(
1,
(
anime_result["episodes"]
if not anime_result["status"] == "RELEASING"
and anime_result["episodes"]
else (
anime_result["nextAiringEpisode"]["episode"] - 1
if anime_result["nextAiringEpisode"]
else 0
)
),
)
),
}
for anime_result in anilist_data["data"]["Page"]["media"]
],
}
def get_mal_id_and_anilist_id(anime_title: str) -> "dict[str,int] | None":
"""the abstraction over all none authenticated requests and that returns data of a similar type
Args:
query: the anilist query
variables: the anilist api variables
Returns:
a boolean indicating success and none or an anilist object depending on success
"""
query = """
query($query:String){
Page(perPage:50){
pageInfo{
total
currentPage
hasNextPage
}
media(search:$query,type:ANIME){
id
idMal
title{
romaji
english
}
}
}
}
"""
try:
variables = {"query": anime_title}
response = post(
ANILIST_ENDPOINT,
json={"query": query, "variables": variables},
timeout=10,
)
anilist_data: "AnilistDataSchema" = response.json()
if response.status_code == 200:
anime = max(
anilist_data["data"]["Page"]["media"],
key=lambda anime: max(
(
fuzz.ratio(anime, str(anime["title"]["romaji"])),
fuzz.ratio(anime_title, str(anime["title"]["english"])),
)
),
)
return {"id_anilist": anime["id"], "id_mal": anime["idMal"]}
except Exception as e:
logger.error(f"Something unexpected occured {e}")

View File

@@ -0,0 +1,345 @@
import os
import re
from logging import getLogger
from yt_dlp.utils import (
extract_attributes,
get_element_html_by_attribute,
get_element_html_by_class,
get_element_text_and_html_by_tag,
get_elements_html_by_class,
)
from ...common.mini_anilist import search_for_anime_with_anilist
from ..base_provider import AnimeProvider
from ..decorators import debug_provider
from ..types import SearchResults
from .constants import NYAA_ENDPOINT
logger = getLogger(__name__)
EXTRACT_USEFUL_INFO_PATTERN_1 = re.compile(
r"\[(\w+)\] (.+) - (\d+) [\[\(](\d+)p[\]\)].*"
)
EXTRACT_USEFUL_INFO_PATTERN_2 = re.compile(
r"\[(\w+)\] (.+)E(\d+) [\[\(]?(\d+)p.*[\]\)]?.*"
)
class NyaaApi(AnimeProvider):
search_results: SearchResults
PROVIDER = "nyaa"
@debug_provider(PROVIDER.upper())
def search_for_anime(self, user_query: str, *args, **_):
self.search_results = search_for_anime_with_anilist(
user_query, True
) # pyright: ignore
self.user_query = user_query
return self.search_results
@debug_provider(PROVIDER.upper())
def get_anime(self, anilist_id: str, *_):
for anime in self.search_results["results"]:
if anime["id"] == anilist_id:
self.titles = [anime["title"], *anime["otherTitles"], self.user_query]
return {
"id": anime["id"],
"title": anime["title"],
"poster": anime["poster"],
"availableEpisodesDetail": {
"dub": anime["availableEpisodes"],
"sub": anime["availableEpisodes"],
"raw": anime["availableEpisodes"],
},
}
@debug_provider(PROVIDER.upper())
def get_episode_streams(
self,
anime_id: str,
episode_number: str,
translation_type: str,
trusted_only=bool(int(os.environ.get("FA_NYAA_TRUSTED_ONLY", "0"))),
allow_dangerous=bool(int(os.environ.get("FA_NYAA_ALLOW_DANGEROUS", "0"))),
sort_by="seeders",
*args,
):
anime_title = self.titles[0]
logger.debug(f"Searching nyaa for query: '{anime_title} {episode_number}'")
servers = {}
torrents_table = ""
for title in self.titles:
try:
url_arguments: dict[str, str] = {
"c": "1_2", # Language (English)
"q": f"{title} {'0' if len(episode_number)==1 else ''}{episode_number}", # Search Query
}
# url_arguments["q"] = anime_title
# if trusted_only:
# url_arguments["f"] = "2" # Trusted uploaders only
# What to sort torrents by
if sort_by == "seeders":
url_arguments["s"] = "seeders"
elif sort_by == "date":
url_arguments["s"] = "id"
elif sort_by == "size":
url_arguments["s"] = "size"
elif sort_by == "comments":
url_arguments["s"] = "comments"
logger.debug(f"URL Arguments: {url_arguments}")
response = self.session.get(NYAA_ENDPOINT, params=url_arguments)
if not response.ok:
logger.error(f"[NYAA]: {response.text}")
return
try:
torrents_table = get_element_text_and_html_by_tag(
"table", response.text
)
except Exception as e:
logger.error(f"[NYAA]: {e}")
continue
if not torrents_table:
continue
for anime_torrent in get_elements_html_by_class(
"success", torrents_table[1]
):
td_title = get_element_html_by_attribute(
"colspan", "2", anime_torrent
)
if not td_title:
continue
title_anchor_tag = get_element_text_and_html_by_tag("a", td_title)
if not title_anchor_tag:
continue
title_anchor_tag_attrs = extract_attributes(title_anchor_tag[1])
if not title_anchor_tag_attrs:
continue
if "class" in title_anchor_tag_attrs:
td_title = td_title.replace(title_anchor_tag[1], "")
title_anchor_tag = get_element_text_and_html_by_tag(
"a", td_title
)
if not title_anchor_tag:
continue
title_anchor_tag_attrs = extract_attributes(title_anchor_tag[1])
if not title_anchor_tag_attrs:
continue
anime_title_info = title_anchor_tag_attrs["title"]
if not anime_title_info:
continue
match = EXTRACT_USEFUL_INFO_PATTERN_1.search(
anime_title_info.strip()
)
if not match:
continue
server = match[1]
match[2]
_episode_number = match[3]
quality = match[4]
if float(episode_number) != float(_episode_number):
continue
links_td = get_element_html_by_class("text-center", anime_torrent)
if not links_td:
continue
torrent_anchor_tag = get_element_text_and_html_by_tag("a", links_td)
if not torrent_anchor_tag:
continue
torrent_anchor_tag_atrrs = extract_attributes(torrent_anchor_tag[1])
if not torrent_anchor_tag_atrrs:
continue
torrent_file_url = (
f'{NYAA_ENDPOINT}{torrent_anchor_tag_atrrs["href"]}'
)
if server in servers:
link = {
"translation_type": "sub",
"link": torrent_file_url,
"quality": quality,
}
if link not in servers[server]["links"]:
servers[server]["links"].append(link)
else:
servers[server] = {
"server": server,
"headers": {},
"episode_title": f"{anime_title}; Episode {episode_number}",
"subtitles": [],
"links": [
{
"translation_type": "sub",
"link": torrent_file_url,
"quality": quality,
}
],
}
for anime_torrent in get_elements_html_by_class(
"default", torrents_table[1]
):
td_title = get_element_html_by_attribute(
"colspan", "2", anime_torrent
)
if not td_title:
continue
title_anchor_tag = get_element_text_and_html_by_tag("a", td_title)
if not title_anchor_tag:
continue
title_anchor_tag_attrs = extract_attributes(title_anchor_tag[1])
if not title_anchor_tag_attrs:
continue
if "class" in title_anchor_tag_attrs:
td_title = td_title.replace(title_anchor_tag[1], "")
title_anchor_tag = get_element_text_and_html_by_tag(
"a", td_title
)
if not title_anchor_tag:
continue
title_anchor_tag_attrs = extract_attributes(title_anchor_tag[1])
if not title_anchor_tag_attrs:
continue
anime_title_info = title_anchor_tag_attrs["title"]
if not anime_title_info:
continue
match = EXTRACT_USEFUL_INFO_PATTERN_2.search(
anime_title_info.strip()
)
if not match:
continue
server = match[1]
match[2]
_episode_number = match[3]
quality = match[4]
if float(episode_number) != float(_episode_number):
continue
links_td = get_element_html_by_class("text-center", anime_torrent)
if not links_td:
continue
torrent_anchor_tag = get_element_text_and_html_by_tag("a", links_td)
if not torrent_anchor_tag:
continue
torrent_anchor_tag_atrrs = extract_attributes(torrent_anchor_tag[1])
if not torrent_anchor_tag_atrrs:
continue
torrent_file_url = (
f'{NYAA_ENDPOINT}{torrent_anchor_tag_atrrs["href"]}'
)
if server in servers:
link = {
"translation_type": "sub",
"link": torrent_file_url,
"quality": quality,
}
if link not in servers[server]["links"]:
servers[server]["links"].append(link)
else:
servers[server] = {
"server": server,
"headers": {},
"episode_title": f"{anime_title}; Episode {episode_number}",
"subtitles": [],
"links": [
{
"translation_type": "sub",
"link": torrent_file_url,
"quality": quality,
}
],
}
if not allow_dangerous:
break
for anime_torrent in get_elements_html_by_class(
"danger", torrents_table[1]
):
td_title = get_element_html_by_attribute(
"colspan", "2", anime_torrent
)
if not td_title:
continue
title_anchor_tag = get_element_text_and_html_by_tag("a", td_title)
if not title_anchor_tag:
continue
title_anchor_tag_attrs = extract_attributes(title_anchor_tag[1])
if not title_anchor_tag_attrs:
continue
if "class" in title_anchor_tag_attrs:
td_title = td_title.replace(title_anchor_tag[1], "")
title_anchor_tag = get_element_text_and_html_by_tag(
"a", td_title
)
if not title_anchor_tag:
continue
title_anchor_tag_attrs = extract_attributes(title_anchor_tag[1])
if not title_anchor_tag_attrs:
continue
anime_title_info = title_anchor_tag_attrs["title"]
if not anime_title_info:
continue
match = EXTRACT_USEFUL_INFO_PATTERN_2.search(
anime_title_info.strip()
)
if not match:
continue
server = match[1]
match[2]
_episode_number = match[3]
quality = match[4]
if float(episode_number) != float(_episode_number):
continue
links_td = get_element_html_by_class("text-center", anime_torrent)
if not links_td:
continue
torrent_anchor_tag = get_element_text_and_html_by_tag("a", links_td)
if not torrent_anchor_tag:
continue
torrent_anchor_tag_atrrs = extract_attributes(torrent_anchor_tag[1])
if not torrent_anchor_tag_atrrs:
continue
torrent_file_url = (
f'{NYAA_ENDPOINT}{torrent_anchor_tag_atrrs["href"]}'
)
if server in servers:
link = {
"translation_type": "sub",
"link": torrent_file_url,
"quality": quality,
}
if link not in servers[server]["links"]:
servers[server]["links"].append(link)
else:
servers[server] = {
"server": server,
"headers": {},
"episode_title": f"{anime_title}; Episode {episode_number}",
"subtitles": [],
"links": [
{
"translation_type": "sub",
"link": torrent_file_url,
"quality": quality,
}
],
}
except Exception as e:
logger.error(f"[NYAA]: {e}")
continue
for server in servers:
yield servers[server]

View File

@@ -0,0 +1 @@
NYAA_ENDPOINT = "https://nyaa.si"

View File

@@ -0,0 +1,126 @@
import logging
import os
import sys
import time
import libtorrent # pyright: ignore
from rich import print
from rich.progress import (
BarColumn,
DownloadColumn,
Progress,
TextColumn,
TimeRemainingColumn,
TransferSpeedColumn,
)
logger = logging.getLogger("nyaa")
def download_torrent(
filename: str,
result_filename: str | None = None,
show_progress: bool = True,
base_path: str = "Anime",
) -> str:
session = libtorrent.session({"listen_interfaces": "0.0.0.0:6881"})
logger.debug("Started libtorrent session")
base_path = os.path.expanduser(base_path)
logger.debug(f"Downloading output to: '{base_path}'")
info = libtorrent.torrent_info(filename)
logger.debug("Started downloading torrent")
handle: libtorrent.torrent_handle = session.add_torrent(
{"ti": info, "save_path": base_path}
)
status: libtorrent.session_status = handle.status()
progress_bar = Progress(
"[progress.description]{task.description}",
BarColumn(bar_width=None),
"[progress.percentage]{task.percentage:>3.1f}%",
"",
DownloadColumn(),
"",
TransferSpeedColumn(),
"",
TimeRemainingColumn(),
"",
TextColumn("[green]Peers: {task.fields[peers]}[/green]"),
)
if show_progress:
with progress_bar:
download_task = progress_bar.add_task(
"downloading",
filename=status.name,
total=status.total_wanted,
peers=0,
start=False,
)
while not status.total_done:
# Checking files
status = handle.status()
description = "[bold yellow]Checking files[/bold yellow]"
progress_bar.update(
download_task,
completed=status.total_done,
peers=status.num_peers,
description=description,
)
# Started download
progress_bar.start_task(download_task)
description = f"[bold blue]Downloading[/bold blue] [bold yellow]{result_filename}[/bold yellow]"
while not status.is_seeding:
status = handle.status()
progress_bar.update(
download_task,
completed=status.total_done,
peers=status.num_peers,
description=description,
)
alerts = session.pop_alerts()
alert: libtorrent.alert
for alert in alerts:
if (
alert.category()
& libtorrent.alert.category_t.error_notification
):
logger.debug(f"[Alert] {alert}")
time.sleep(1)
progress_bar.update(
download_task,
description=f"[bold blue]Finished Downloading[/bold blue] [bold green]{result_filename}[/bold green]",
completed=status.total_wanted,
)
if result_filename:
old_name = f"{base_path}/{status.name}"
new_name = f"{base_path}/{result_filename}"
os.rename(old_name, new_name)
logger.debug(f"Finished torrent download, renamed '{old_name}' to '{new_name}'")
return new_name
return ""
if __name__ == "__main__":
if len(sys.argv) < 2:
print("You need to pass in the .torrent file path.")
sys.exit(1)
download_torrent(sys.argv[1])

View File

@@ -0,0 +1,114 @@
import json
import logging
import time
logger = logging.getLogger(__name__)
class ProviderStoreDB:
def __init__(
self,
provider_name,
cache_db_path: str,
max_lifetime: int = 604800,
max_size: int = (1024**2) * 10,
table_name: str = "fastanime_providers_store",
clean_db=False,
):
from ..common.sqlitedb_helper import SqliteDB
self.cache_db_path = cache_db_path
self.clean_db = clean_db
self.provider_name = provider_name
self.max_lifetime = max_lifetime
self.max_size = max_size
self.table_name = table_name
self.sqlite_db_connection = SqliteDB(self.cache_db_path)
# Prepare the cache table if it doesn't exist
self._create_store_table()
def _create_store_table(self):
"""Create cache table if it doesn't exist."""
with self.sqlite_db_connection as conn:
conn.execute(
f"""
CREATE TABLE IF NOT EXISTS {self.table_name} (
id TEXT,
data_type TEXT,
provider_name TEXT,
data TEXT,
cache_expiry INTEGER
)"""
)
def get(self, id: str, data_type: str, default=None):
with self.sqlite_db_connection as conn:
cursor = conn.cursor()
cursor.execute(
f"""
SELECT
data
FROM {self.table_name}
WHERE
id = ?
AND data_type = ?
AND provider_name = ?
AND cache_expiry > ?
""",
(id, data_type, self.provider_name, int(time.time())),
)
cached_data = cursor.fetchone()
if cached_data:
logger.debug("Found existing request in cache")
(json_data,) = cached_data
return json.loads(json_data)
return default
def set(self, id: str, data_type: str, data):
with self.sqlite_db_connection as connection:
cursor = connection.cursor()
cursor.execute(
f"""
INSERT INTO {self.table_name}
VALUES ( ?, ?,?, ?, ?)
""",
(
id,
data_type,
self.provider_name,
json.dumps(data),
int(time.time()) + self.max_lifetime,
),
)
class ProviderStoreMem:
def __init__(self) -> None:
from collections import defaultdict
self._store = defaultdict(dict)
def get(self, id: str, data_type: str, default=None):
return self._store[id][data_type]
def set(self, id: str, data_type: str, data):
self._store[id][data_type] = data
def ProviderStore(store_type, *args, **kwargs):
if store_type == "persistent":
return ProviderStoreDB(*args, **kwargs)
else:
return ProviderStoreMem()
if __name__ == "__main__":
store = ProviderStore("persistent", "test_provider", "provider_store")
store.set("123", "test", {"hello": "world"})
print(store.get("123", "test"))
print("-------------------------------")
store = ProviderStore("memory")
store.set("1", "test", {"hello": "world"})
print(store.get("1", "test"))

View File

@@ -19,6 +19,7 @@ class PageInfo(TypedDict):
class SearchResult(TypedDict):
id: str
title: str
otherTitles: list[str]
availableEpisodes: list[str]
type: str
score: int

View File

@@ -0,0 +1,216 @@
import base64
from itertools import cycle
from yt_dlp.utils import (
get_element_text_and_html_by_tag,
get_elements_text_and_html_by_attribute,
extract_attributes,
get_element_by_attribute,
)
import re
from yt_dlp.utils.traversal import get_element_html_by_attribute
from .constants import YUGEN_ENDPOINT, SEARCH_URL
from ..decorators import debug_provider
from ..base_provider import AnimeProvider
# ** Adapted from anipy-cli **
class YugenApi(AnimeProvider):
"""
Provides a fast and effective interface to YugenApi site.
"""
PROVIDER = "yugen"
api_endpoint = YUGEN_ENDPOINT
# HEADERS = {
# "Referer": ALLANIME_REFERER,
# }
@debug_provider(PROVIDER.upper())
def search_for_anime(
self,
user_query: str,
translation_type: str = "sub",
nsfw=True,
unknown=True,
**kwargs,
):
results = []
has_next = True
page = 0
while has_next:
page += 1
response = self.session.get(
SEARCH_URL, params={"q": user_query, "page": page}
)
search_results = response.json()
has_next = search_results["hasNext"]
results_html = search_results["query"]
anime = get_elements_text_and_html_by_attribute(
"class", "anime-meta", results_html, tag="a"
)
id_regex = re.compile(r"(\d+)\/([^\/]+)")
for _a in anime:
if not _a:
continue
a = extract_attributes(_a[1])
if not a:
continue
uri = a["href"]
identifier = id_regex.search(uri) # pyright:ignore
if identifier is None:
continue
if len(identifier.groups()) != 2:
continue
identifier = base64.b64encode(
f"{identifier.group(1)}/{identifier.group(2)}".encode()
).decode()
anime_title = a["title"]
languages = {"sub": 1, "dub": 0}
excl = get_element_by_attribute(
"class", "ani-exclamation", _a[1], tag="div"
)
if excl is not None:
if "dub" in excl.lower():
languages["dub"] = 1
#
results.append(
{
"id": identifier,
"title": anime_title,
"availableEpisodes": languages,
}
)
page += 1
return {
"pageInfo": {"total": len(results)},
"results": results,
}
@debug_provider(PROVIDER.upper())
def get_anime(self, anime_id: str, **kwargs):
identifier = base64.b64decode(anime_id).decode()
response = self.session.get(f"{YUGEN_ENDPOINT}/anime/{identifier}")
html_page = response.text
data_map = {
"id": anime_id,
"title": None,
"poster": None,
"genres": [],
"synopsis": None,
"release_year": None,
"status": None,
"otherTitles": [],
"availableEpisodesDetail": {},
}
sub_match = re.search(
r'<div class="ap-.+?">Episodes</div><span class="description" .+?>(\d+)</span></div>',
html_page,
)
if sub_match:
eps = int(sub_match.group(1))
data_map["availableEpisodesDetail"]["sub"] = list(map(str,range(1, eps + 1)))
dub_match = re.search(
r'<div class="ap-.+?">Episodes \(Dub\)</div><span class="description" .+?>(\d+)</span></div>',
html_page,
)
if dub_match:
eps = int(dub_match.group(1))
data_map["availableEpisodesDetail"]["dub"] = list(map(str,range(1, eps + 1)))
name = get_element_text_and_html_by_tag("h1", html_page)
if name is not None:
data_map["title"] = name[0].strip()
synopsis = get_element_by_attribute("class", "description", html_page, tag="p")
if synopsis is not None:
data_map["synopsis"] = synopsis
# FIXME: This is not working because ytdl is too strict on also getting a closing tag
try:
image = get_element_html_by_attribute(
"class", "cover", html_page, tag="img"
)
img_attrs = extract_attributes(image)
if img_attrs is not None:
data_map["image"] = img_attrs.get("src")
except Exception:
pass
data = get_elements_text_and_html_by_attribute(
"class", "data", html_page, tag="div"
)
for d in data:
title = get_element_text_and_html_by_tag("div", d[1])
desc = get_element_text_and_html_by_tag("span", d[1])
if title is None or desc is None:
continue
title = title[0]
desc = desc[0]
if title in ["Native", "Romaji"]:
data_map["alternative_names"].append(desc)
elif title == "Synonyms":
data_map["alternative_names"].extend(desc.split(","))
elif title == "Premiered":
try:
data_map["release_year"] = int(desc.split()[-1])
except (ValueError, TypeError):
pass
elif title == "Status":
data_map["status"] = title
elif title == "Genres":
data_map["genres"].extend([g.strip() for g in desc.split(",")])
return data_map
@debug_provider(PROVIDER.upper())
def get_episode_streams(
self, anime_id, episode_number: str, translation_type="sub"
):
"""get the streams of an episode
Args:
translation_type ([TODO:parameter]): [TODO:description]
anime: [TODO:description]
episode_number: [TODO:description]
Yields:
[TODO:description]
"""
identifier = base64.b64decode(anime_id).decode()
id_num, anime_title = identifier.split("/")
if translation_type == "dub":
video_query = f"{id_num}|{episode_number}|dub"
else:
video_query = f"{id_num}|{episode_number}"
#
res = self.session.post(
f"{YUGEN_ENDPOINT}/api/embed/",
data={
"id": base64.b64encode(video_query.encode()).decode(),
"ac": "0",
},
headers={"x-requested-with": "XMLHttpRequest"},
)
res = res.json()
yield {
"server": "gogoanime",
"episode_title": f"{anime_title}; Episode {episode_number}",
"headers": {},
"subtitles": [],
"links": [{"quality": quality, "link": link} for quality,link in zip(cycle(["1080","720","480","360"]),res["hls"])],
}

View File

@@ -0,0 +1,5 @@
YUGEN_ENDPOINT: str = "https://yugenanime.tv"
SEARCH_URL = YUGEN_ENDPOINT + "/api/discover/"
SERVERS_AVAILABLE = ["gogoanime"]

View File

@@ -0,0 +1,15 @@
import logging
from requests import get
logger = logging.getLogger(__name__)
def fetch_anime_info_from_bal(anilist_id):
try:
url = f"https://raw.githubusercontent.com/bal-mackup/mal-backup/master/anilist/anime/{anilist_id}.json"
response = get(url, timeout=11)
if response.status_code == 200:
return response.json()
except Exception as e:
logger.error(e)

View File

@@ -10,59 +10,122 @@ logger = logging.getLogger(__name__)
ANILIST_ENDPOINT = "https://graphql.anilist.co"
"""
query($query:String){
Page(perPage:50){
pageInfo{
total
currentPage
hasNextPage
}
media(search:$query,type:ANIME){
id
idMal
title{
romaji
english
}
episodes
status
nextAiringEpisode {
timeUntilAiring
airingAt
episode
}
query ($query: String) {
Page(perPage: 50) {
pageInfo {
total
currentPage
hasNextPage
}
media(search: $query, type: ANIME) {
id
idMal
title {
romaji
english
}
episodes
status
nextAiringEpisode {
timeUntilAiring
airingAt
episode
}
}
}
}
"""
def search_foranime_with_anilist(anime_title: str):
def search_for_manga_with_anilist(manga_title: str):
query = """
query($query:String){
Page(perPage:50){
pageInfo{
total
currentPage
hasNextPage
query ($query: String) {
Page(perPage: 50) {
pageInfo {
currentPage
}
media(search: $query, type: MANGA,genre_not_in: ["hentai"]) {
id
idMal
title {
romaji
english
}
chapters
status
coverImage {
medium
large
}
media(search:$query,type:ANIME){
id
idMal
title{
romaji
english
}
episodes
status
nextAiringEpisode {
timeUntilAiring
airingAt
episode
}
}
}
}
"""
response = post(
ANILIST_ENDPOINT,
json={"query": query, "variables": {"query": manga_title}},
timeout=10,
)
if response.status_code == 200:
anilist_data: "AnilistDataSchema" = response.json()
return {
"pageInfo": anilist_data["data"]["Page"]["pageInfo"],
"results": [
{
"id": anime_result["id"],
"poster": anime_result["coverImage"]["large"],
"title": (
anime_result["title"]["romaji"]
or anime_result["title"]["english"]
)
+ f" [Chapters: {anime_result['chapters']}]",
"type": "manga",
"availableChapters": list(
range(
1,
(
anime_result["chapters"]
if anime_result["chapters"]
else 0
),
)
),
}
for anime_result in anilist_data["data"]["Page"]["media"]
],
}
def search_for_anime_with_anilist(anime_title: str, prefer_eng_titles=False):
query = """
query ($query: String) {
Page(perPage: 50) {
pageInfo {
total
currentPage
hasNextPage
}
media(search: $query, type: ANIME, genre_not_in: ["hentai"]) {
id
idMal
title {
romaji
english
}
episodes
status
synonyms
nextAiringEpisode {
timeUntilAiring
airingAt
episode
}
coverImage {
large
}
}
}
}
"""
response = post(
ANILIST_ENDPOINT,
@@ -75,22 +138,55 @@ def search_foranime_with_anilist(anime_title: str):
"pageInfo": anilist_data["data"]["Page"]["pageInfo"],
"results": [
{
"id": anime_result["id"],
"title": anime_result["title"]["romaji"]
or anime_result["title"]["english"],
"type": "anime",
"availableEpisodes": list(
range(
1,
"id": str(anime_result["id"]),
"title": (
(
anime_result["title"]["english"]
or anime_result["title"]["romaji"]
)
if prefer_eng_titles
else (
anime_result["title"]["romaji"]
or anime_result["title"]["english"]
)
),
"otherTitles": [
(
(
anime_result["episodes"]
if not anime_result["status"] == "RELEASING"
and anime_result["episodes"]
else (
anime_result["nextAiringEpisode"]["episode"] - 1
if anime_result["nextAiringEpisode"]
else 0
anime_result["title"]["romaji"]
or anime_result["title"]["english"]
)
if prefer_eng_titles
else (
anime_result["title"]["english"]
or anime_result["title"]["romaji"]
)
),
*(anime_result["synonyms"] or []),
],
"type": "anime",
"poster": anime_result["coverImage"]["large"],
"availableEpisodes": list(
map(
str,
range(
1,
(
anime_result["episodes"]
if not anime_result["status"] == "RELEASING"
and anime_result["episodes"]
else (
(
anime_result["nextAiringEpisode"]["episode"]
- 1
if anime_result["nextAiringEpisode"]
else 0
)
if not anime_result["episodes"]
else anime_result["episodes"]
)
)
+ 1,
),
)
),
@@ -111,23 +207,23 @@ def get_mal_id_and_anilist_id(anime_title: str) -> "dict[str,int] | None":
a boolean indicating success and none or an anilist object depending on success
"""
query = """
query($query:String){
Page(perPage:50){
pageInfo{
total
currentPage
hasNextPage
query ($query: String) {
Page(perPage: 50) {
pageInfo {
total
currentPage
hasNextPage
}
media(search: $query, type: ANIME) {
id
idMal
title {
romaji
english
}
media(search:$query,type:ANIME){
id
idMal
title{
romaji
english
}
}
}
}
}
"""
try:
@@ -164,24 +260,24 @@ def get_basic_anime_info_by_title(anime_title: str):
a boolean indicating success and none or an anilist object depending on success
"""
query = """
query($query:String){
Page(perPage:50){
pageInfo{
total
query ($query: String) {
Page(perPage: 50) {
pageInfo {
total
}
media(search:$query,type:ANIME){
id
idMal
title{
romaji
english
}
streamingEpisodes{
media(search: $query, type: ANIME,genre_not_in: ["hentai"]) {
id
idMal
title {
romaji
english
}
streamingEpisodes {
title
}
}
}
}
}
}
"""
from ...Utility.data import anime_normalizer

View File

@@ -0,0 +1,209 @@
import json
import logging
import os
import re
import time
from datetime import datetime
from urllib.parse import urlencode
import requests
from .sqlitedb_helper import SqliteDB
logger = logging.getLogger(__name__)
caching_mimetypes = {
"application": {
"json",
"xml",
"x-www-form-urlencoded",
"x-javascript",
"javascript",
},
"text": {"html", "css", "javascript", "plain", "xml", "xsl", "x-javascript"},
}
class CachedRequestsSession(requests.Session):
__request_functions__ = (
"get",
"options",
"head",
"post",
"put",
"patch",
"delete",
)
def __new__(cls, *args, **kwargs):
def caching_params(name: str):
def wrapper(self, *args, **kwargs):
return cls.request(self, name, *args, **kwargs)
return wrapper
for func in cls.__request_functions__:
setattr(cls, func, caching_params(func))
return super().__new__(cls)
def __init__(
self,
cache_db_path: str,
max_lifetime: int = 259200,
max_size: int = (1024**2) * 10,
table_name: str = "fastanime_requests_cache",
clean_db=False,
*args,
**kwargs,
):
super().__init__(*args, **kwargs)
self.cache_db_path = cache_db_path
self.max_lifetime = max_lifetime
self.max_size = max_size
self.table_name = table_name
self.sqlite_db_connection = SqliteDB(self.cache_db_path)
# Prepare the cache table if it doesn't exist
self._create_cache_table()
def _create_cache_table(self):
"""Create cache table if it doesn't exist."""
with self.sqlite_db_connection as conn:
conn.execute(
f"""
CREATE TABLE IF NOT EXISTS {self.table_name} (
url TEXT,
status_code INTEGER,
request_headers TEXT,
response_headers TEXT,
data BLOB,
redirection_policy INT,
cache_expiry INTEGER
)"""
)
def request(
self,
method,
url,
params=None,
force_caching=False,
fresh=int(os.environ.get("FASTANIME_FRESH_REQUESTS", 0)),
*args,
**kwargs,
):
if params:
url += "?" + urlencode(params)
redirection_policy = int(kwargs.get("force_redirects", False))
with self.sqlite_db_connection as conn:
cursor = conn.cursor()
time_before_access_db = datetime.now()
logger.debug("Checking for existing request in cache")
cursor.execute(
f"""
SELECT
status_code,
request_headers,
response_headers,
data,
redirection_policy
FROM {self.table_name}
WHERE
url = ?
AND redirection_policy = ?
AND cache_expiry > ?
""",
(url, redirection_policy, int(time.time())),
)
cached_request = cursor.fetchone()
time_after_access_db = datetime.now()
if cached_request and not fresh:
logger.debug("Found existing request in cache")
(
status_code,
request_headers,
response_headers,
data,
redirection_policy,
) = cached_request
response = requests.Response()
response.headers.update(json.loads(response_headers))
response.status_code = status_code
response._content = data
if "timeout" in kwargs:
kwargs.pop("timeout")
if "headers" in kwargs:
kwargs.pop("headers")
_request = requests.Request(
method, url, headers=json.loads(request_headers), *args, **kwargs
)
response.request = _request.prepare()
response.elapsed = time_after_access_db - time_before_access_db
return response
# Perform the request and cache it
response = super().request(method, url, *args, **kwargs)
if response.ok and (
force_caching
or self.is_content_type_cachable(
response.headers.get("content-type"), caching_mimetypes
)
and len(response.content) < self.max_size
):
logger.debug("Caching the current request")
cursor.execute(
f"""
INSERT INTO {self.table_name}
VALUES (?, ?, ?, ?, ?, ?, ?)
""",
(
url,
response.status_code,
json.dumps(dict(response.request.headers)),
json.dumps(dict(response.headers)),
response.content,
redirection_policy,
int(time.time()) + self.max_lifetime,
),
)
return response
@staticmethod
def is_content_type_cachable(content_type, caching_mimetypes):
"""Checks whether the given encoding is supported by the cacher"""
if content_type is None:
return True
mime, contents = content_type.split("/")
contents = re.sub(r";.*$", "", contents)
return mime in caching_mimetypes and any(
content in caching_mimetypes[mime] for content in contents.split("+")
)
if __name__ == "__main__":
with CachedRequestsSession("cache.db") as session:
response = session.get(
"https://google.com",
)
response_b = session.get(
"https://google.com",
)
print("A: ", response.elapsed)
print("B: ", response_b.elapsed)
print(response_b.text[0:30])

View File

@@ -0,0 +1,34 @@
import logging
import sqlite3
import time
logger = logging.getLogger(__name__)
class SqliteDB:
def __init__(self, db_path: str) -> None:
self.db_path = db_path
self.connection = sqlite3.connect(self.db_path)
logger.debug("Enabling WAL mode for concurrent access")
self.connection.execute("PRAGMA journal_mode=WAL;")
self.connection.close()
self.connection = None
def __enter__(self):
logger.debug("Starting new connection...")
start_time = time.time()
self.connection = sqlite3.connect(self.db_path)
logger.debug(
"Successfully got a new connection in {} seconds".format(
time.time() - start_time
)
)
return self.connection
def __exit__(self, exc_type, exc_val, exc_tb):
if self.connection:
logger.debug("Closing connection to cache db")
self.connection.commit()
self.connection.close()
self.connection = None
logger.debug("Successfully closed connection to cache db")

View File

@@ -5,7 +5,6 @@ import subprocess
import sys
from typing import Callable, List
# TODO: will probably scrap art not to useful
from click import clear
from rich import print
@@ -50,7 +49,7 @@ class FZF:
"--info=hidden",
"--layout=reverse",
"--height=100%",
"--bind=right:accept",
"--bind=right:accept,ctrl-/:toggle-preview,ctrl-space:toggle-wrap+toggle-preview-wrap",
"--no-margin",
"+m",
"-i",
@@ -123,7 +122,9 @@ class FZF:
[self.FZF_EXECUTABLE, *commands],
input=fzf_input,
stdout=subprocess.PIPE,
universal_newlines=True,
text=True,
encoding="utf-8",
)
if not result or result.returncode != 0 or not result.stdout:
print("sth went wrong:confused:")
@@ -162,7 +163,7 @@ class FZF:
HEADER,
"--header-first",
"--prompt",
prompt.title(),
f"{prompt.title()}: ",
] # pyright:ignore
if preview:

View File

@@ -0,0 +1 @@
manga_sources = {"mangadex": "api.MangaDexApi"}

View File

@@ -0,0 +1,13 @@
import requests
from yt_dlp.utils.networking import random_user_agent
class MangaProvider:
session: requests.Session
USER_AGENT = random_user_agent()
HEADERS = {}
def __init__(self) -> None:
self.session = requests.session()
self.session.headers.update({"User-Agent": self.USER_AGENT, **self.HEADERS})

View File

@@ -0,0 +1,15 @@
import logging
from requests import get
logger = logging.getLogger(__name__)
def fetch_manga_info_from_bal(anilist_id):
try:
url = f"https://raw.githubusercontent.com/bal-mackup/mal-backup/master/anilist/manga/{anilist_id}.json"
response = get(url, timeout=11)
if response.ok:
return response.json()
except Exception as e:
logger.error(e)

View File

@@ -0,0 +1,51 @@
import logging
from ...common.mini_anilist import search_for_manga_with_anilist
from ..base_provider import MangaProvider
from ..common import fetch_manga_info_from_bal
logger = logging.getLogger(__name__)
class MangaDexApi(MangaProvider):
def search_for_manga(self, title: str, *args):
try:
search_results = search_for_manga_with_anilist(title)
return search_results
except Exception as e:
logger.error(f"[MANGADEX-ERROR]: {e}")
def get_manga(self, anilist_manga_id: str):
bal_data = fetch_manga_info_from_bal(anilist_manga_id)
if not bal_data:
return
manga_id, MangaDexManga = next(iter(bal_data["Sites"]["Mangadex"].items()))
return {
"id": manga_id,
"title": MangaDexManga["title"],
"poster": MangaDexManga["image"],
"availableChapters": [],
}
def get_chapter_thumbnails(self, manga_id, chapter):
chapter_info_url = f"https://api.mangadex.org/chapter?manga={manga_id}&translatedLanguage[]=en&chapter={chapter}&includeEmptyPages=0"
chapter_info_response = self.session.get(chapter_info_url)
if not chapter_info_response.ok:
return
chapter_info = next(iter(chapter_info_response.json()["data"]))
chapters_thumbnails_url = (
f"https://api.mangadex.org/at-home/server/{chapter_info['id']}"
)
chapter_thumbnails_response = self.session.get(chapters_thumbnails_url)
if not chapter_thumbnails_response.ok:
return
chapter_thumbnails_info = chapter_thumbnails_response.json()
base_url = chapter_thumbnails_info["baseUrl"]
hash = chapter_thumbnails_info["chapter"]["hash"]
return {
"thumbnails": [
f"{base_url}/data/{hash}/{chapter_thumbnail}"
for chapter_thumbnail in chapter_thumbnails_info["chapter"]["data"]
],
"title": chapter_info["attributes"]["title"],
}

View File

@@ -2,8 +2,6 @@ import subprocess
from shutil import which
from sys import exit
from plyer import notification
from fastanime import APP_NAME
from ...constants import ICON_PATH
@@ -13,6 +11,7 @@ class RofiApi:
ROFI_EXECUTABLE = which("rofi")
rofi_theme = ""
rofi_theme_preview = ""
rofi_theme_confirm = ""
rofi_theme_input = ""
@@ -23,9 +22,9 @@ class RofiApi:
raise Exception("Rofi not found")
args = [self.ROFI_EXECUTABLE]
if self.rofi_theme:
args.extend(["-no-config", "-theme", self.rofi_theme])
args.extend(["-p", prompt_text, "-i", "-show-icons", "-dmenu"])
if self.rofi_theme_preview:
args.extend(["-no-config", "-theme", self.rofi_theme_preview])
args.extend(["-p", f"{prompt_text.title()}", "-i", "-show-icons", "-dmenu"])
result = subprocess.run(
args,
input=rofi_input,
@@ -35,6 +34,13 @@ class RofiApi:
choice = result.stdout.strip()
if not choice:
try:
from plyer import notification
except ImportError:
print(
"Plyer is not installed; install it for desktop notifications to be enabled"
)
exit(1)
notification.notify(
app_name=APP_NAME,
app_icon=ICON_PATH,
@@ -64,6 +70,13 @@ class RofiApi:
choice = result.stdout.strip()
if not choice or choice not in options:
try:
from plyer import notification
except ImportError:
print(
"Plyer is not installed; install it for desktop notifications to be enabled"
)
exit(1)
notification.notify(
app_name=APP_NAME,
app_icon=ICON_PATH,
@@ -91,6 +104,13 @@ class RofiApi:
choice = result.stdout.strip()
if not choice:
try:
from plyer import notification
except ImportError:
print(
"Plyer is not installed; install it for desktop notifications to be enabled"
)
exit(1)
notification.notify(
app_name=APP_NAME,
app_icon=ICON_PATH,
@@ -120,6 +140,13 @@ class RofiApi:
user_input = result.stdout.strip()
if not user_input:
try:
from plyer import notification
except ImportError:
print(
"Plyer is not installed; install it for desktop notifications to be enabled"
)
exit(1)
notification.notify(
app_name=APP_NAME,
app_icon=ICON_PATH,

11
make_release Executable file
View File

@@ -0,0 +1,11 @@
#! /usr/bin/env sh
CLI_DIR="$(dirname "$(realpath "$0")")"
VERSION=$1
[ -z "$VERSION" ] && echo no version provided && exit 1
[ "$VERSION" = "current" ] && fastanime --version && exit 0
sed -i "s/^version.*/version = \"$VERSION\"/" "$CLI_DIR/pyproject.toml" &&
sed -i "s/__version__.*/__version__ = \"v$VERSION\"/" "$CLI_DIR/fastanime/__init__.py" &&
git stage "$CLI_DIR/pyproject.toml" "$CLI_DIR/fastanime/__init__.py" &&
git commit -m "chore: bump version (v$VERSION)" &&
git push &&
gh release create "v$VERSION"

1373
poetry.lock generated

File diff suppressed because it is too large Load Diff

65
pyinstaller.spec Normal file
View File

@@ -0,0 +1,65 @@
# -*- mode: python ; coding: utf-8 -*-
from PyInstaller.utils.hooks import collect_data_files, collect_submodules
block_cipher = None
# Collect all required data files
datas = [
('fastanime/assets/*', 'fastanime/assets'),
]
# Collect all required hidden imports
hiddenimports = [
'click',
'rich',
'requests',
'yt_dlp',
'python_mpv',
'fuzzywuzzy',
'fastanime',
] + collect_submodules('fastanime')
a = Analysis(
['./fastanime/fastanime.py'], # Changed entry point
pathex=[],
binaries=[],
datas=datas,
hiddenimports=hiddenimports,
hookspath=[],
hooksconfig={},
runtime_hooks=[],
excludes=[],
win_no_prefer_redirects=False,
win_private_assemblies=False,
cipher=block_cipher,
strip=True, # Strip debug information
optimize=2 # Optimize bytecode noarchive=False
)
pyz = PYZ(
a.pure,
a.zipped_data,
optimize=2 # Optimize bytecode cipher=block_cipher
)
exe = EXE(
pyz,
a.scripts,
a.binaries,
a.zipfiles,
a.datas,
[],
name='fastanime',
debug=False,
bootloader_ignore_signals=False,
strip=True,
upx=True,
upx_exclude=[],
runtime_tmpdir=None,
console=True,
disable_windowed_traceback=False,
target_arch=None,
codesign_identity=None,
entitlements_file=None,
icon='fastanime/assets/logo.ico'
)

View File

@@ -1,35 +1,36 @@
[tool.poetry]
[project]
name = "fastanime"
version = "2.4.2"
version = "2.7.4"
description = "A browser anime site experience from the terminal"
authors = ["Benextempest <benextempest@gmail.com>"]
license = "UNLICENSE"
readme = "README.md"
requires-python = ">=3.10"
dependencies = [
"click>=8.1.7",
"inquirerpy>=0.3.4",
"requests>=2.32.3",
"rich>=13.9.2",
"thefuzz>=0.22.1",
"yt-dlp>=2024.10.7",
]
[tool.poetry.dependencies]
python = "^3.10"
yt-dlp = "^2024.5.27"
rich = "^13.7.1"
click = "^8.1.7"
inquirerpy = "^0.3.4"
thefuzz = "^0.22.1"
requests = "^2.32.3"
plyer = "^2.1.0"
mpv = "^1.0.7"
[tool.poetry.group.dev.dependencies]
black = "^24.4.2"
isort = "^5.13.2"
pytest = "^8.2.2"
ruff = "^0.4.10"
pre-commit = "^3.7.1"
autoflake = "^2.3.1"
tox = "^4.16.0"
pyright = "^1.1.374"
[build-system]
requires = ["poetry-core"]
build-backend = "poetry.core.masonry.api"
[tool.poetry.scripts]
[project.scripts]
fastanime = 'fastanime:FastAnime'
[project.optional-dependencies]
standard = ["fastapi[standard]>=0.115.0", "mpv>=1.0.7", "plyer>=2.1.0"]
api = ["fastapi[standard]>=0.115.0"]
notifications = ["plyer>=2.1.0"]
mpv = ["mpv>=1.0.7"]
[build-system]
requires = ["hatchling"]
build-backend = "hatchling.build"
[tool.uv]
dev-dependencies = [
"pyinstaller>=6.11.1",
"pyright>=1.1.384",
"pytest>=8.3.3",
"ruff>=0.6.9",
]

View File

@@ -1,4 +1,3 @@
# TODO: Write tests to make sure all click commands work
import pytest
from click.testing import CliRunner
@@ -7,7 +6,7 @@ from fastanime.cli import run_cli
@pytest.fixture
def runner():
return CliRunner()
return CliRunner(env={"FASTANIME_CACHE_REQUESTS": "false"})
def test_main_help(runner: CliRunner):

18
tox.ini
View File

@@ -5,23 +5,23 @@ env_list = lint, pyright, py{310,311}
[testenv]
description = run unit tests
deps =poetry
deps =uv
commands =
poetry install
poetry run pytest
uv sync --dev --all-extras
uv run pytest
[testenv:lint]
description = run linters
skip_install = true
deps =poetry
deps =uv
commands =
poetry install
poetry run black .
uv sync --dev --all-extras
uv run ruff format .
[testenv:pyright]
description = run type checking
skip_install = true
deps =poetry
deps =uv
commands =
poetry install --no-root
poetry run pyright
uv sync --dev --all-extras
uv run pyright

1207
uv.lock generated Normal file

File diff suppressed because it is too large Load Diff