Compare commits

...

171 Commits

Author SHA1 Message Date
benexl
4c8ff2ae9b chore: update lock files 2025-01-04 23:47:48 +03:00
benexl
23274de367 chore: bump version (v2.8.7) 2025-01-04 23:47:45 +03:00
benexl
2aec40ead0 refactor: temporarily remove nix from make - release 2025-01-04 23:47:42 +03:00
benexl
172f2bb1de chore: bump version in uv.lock 2025-01-04 23:37:55 +03:00
benexl
2f5684a93a feat(cli): add option to disable user config 2025-01-04 23:37:30 +03:00
benexl
1d40160abf chore: update rofi configs 2025-01-04 23:37:11 +03:00
Benedict Xavier
af84d80137 Merge pull request #46 from piradata/patch-2
docs(config): correct some typos
2025-01-01 01:31:58 +03:00
Piradata
e6412631ae correct some typos 2024-12-31 19:06:53 -03:00
Benedict Xavier
8023edcf3a Update README.md 2024-12-23 20:45:07 +03:00
Benedict Xavier
0cb50cd506 Update README.md 2024-12-20 13:55:39 +03:00
Benedict Xavier
50c048e158 Merge pull request #40 from serialjaywalker/master
feat: Check if in venv before attempting user install/update.
2024-12-17 20:34:51 +03:00
Serial_Jaywalker
c0a57c7814 Check if in venv before attempting user install/update. 2024-12-16 01:52:03 -08:00
benexl
bcdd88c725 chore: bump version 2024-12-13 08:34:24 +03:00
benexl
d45d438663 chore: bump version (vv2.8.6) 2024-12-13 08:18:54 +03:00
Benexl
3d12059e27 Update README.md 2024-12-12 20:04:31 +03:00
benexl
677f4690fa feat(anilist): make perPage configurable 2024-12-07 21:49:37 +03:00
Benexl
a79b59f727 Merge pull request #38 from she11sh0cked/master
fix(nix): add pypresence to flake.nix
2024-12-03 15:54:12 +03:00
she11sh0cked
5641c245e7 fix(nix): add pypresence to flake.nix 2024-12-03 13:49:52 +01:00
Benexl
058fc285cd Merge pull request #36 from gand0rf/master
Added pypresence discord function
2024-12-03 12:47:46 +03:00
Benexl
71cfe667c9 Merge branch 'master' into master 2024-12-03 12:39:56 +03:00
benex
d9692201aa fix: type errors 2024-12-03 12:38:29 +03:00
benex
1fd4087b41 fix: type errors 2024-12-03 12:32:08 +03:00
benex
787eb0c9ca refactor: conform all provider functions 2024-12-03 12:29:47 +03:00
benex
acd937f8ab fix(allanime): argument parsing 2024-12-03 12:07:53 +03:00
Gand0rf
52af68d13f minor chagne to discord.py 2024-12-02 20:26:49 -05:00
Gand0rf
1ff3074fad updated discord.py 2024-12-02 20:11:40 -05:00
Gand0rf
debaa2ffa6 Moved where threading switch is created to remove error 2024-12-02 19:37:49 -05:00
Gand0rf
5b6ccbe748 Fixing erros per github python test 2024-12-02 19:32:07 -05:00
Gand0rf
d6ca923951 Delete line from config.py 2024-12-02 19:25:50 -05:00
Gand0rf
0e9bf7f2de added pypresence with uv command 2024-12-02 19:16:25 -05:00
Gand0rf
ccad2435b0 Added pypresence discord function 2024-12-02 19:08:51 -05:00
benex
30fa9851dd feat(animepahe): refactor API calls to use query parameters and improve stream retrieval logic 2024-12-03 00:25:18 +03:00
benex
000bae9bb7 refactor(animepahe): update import path for process_animepahe_embed_page 2024-12-02 23:57:18 +03:00
benex
8c2bb71e08 feat(animepahe): enhance search functionality and improve error handling in episode retrieval 2024-12-02 23:54:35 +03:00
benex
57393b085a feat(animepahe): refactor episode stream retrieval and enhance error handling 2024-12-02 23:46:28 +03:00
benex
5f721847d7 feat(allanime): improve error handling for stream source retrieval 2024-12-02 23:19:18 +03:00
benex
383cb62ede style: format files 2024-12-02 22:26:03 +03:00
benex
434ac947dd chore: add pre-commit as dev dep 2024-12-02 21:06:48 +03:00
benex
d0fb39cede style: format files with ruff 2024-12-02 18:01:20 +03:00
benex
f98ae77587 feat(allanime): enhance stream source handling with new cases and regex for MP4 server (mp4-upload: success) 2024-12-02 16:27:58 +03:00
benex
33e1b0fb6f refactor(allanime): introduce default constants for search parameters and improve code readability 2024-12-02 12:19:03 +03:00
benex
7134702eb9 feat(allanime): add command-line interface for anime search and streaming 2024-12-02 12:08:38 +03:00
benex
cac7586a86 refactor(constants): update API_ENDPOINT to use f-string for improved readability 2024-12-02 12:08:37 +03:00
benex
0b9da27def refactor(allanime): enhance error handling in API response processing 2024-12-02 12:08:37 +03:00
benex
ddbb4ca451 refactor(anime_provider): simplify URL processing in AllAnime class 2024-12-02 12:08:37 +03:00
benex
757393aa36 refactor(allanime): update constants and improve naming for clarity 2024-12-02 12:08:37 +03:00
benex
eb54d5e995 chore: add VSCode settings for Python auto import completions 2024-12-02 12:08:37 +03:00
benex
0d95a38321 refactor(anime_provider): replace PROVIDER attribute with class name for improved clarity 2024-12-02 12:08:37 +03:00
benex
8d2734db74 refactor(allanime): streamline API methods and improve naming conventions 2024-12-02 12:08:37 +03:00
benex
b3abcb958b refactor: simplify debug_provider decorator and remove redundant provider name usage 2024-12-02 12:08:37 +03:00
benex
0667749e4c refactor: rename API classes for consistency and clarity 2024-12-02 12:08:37 +03:00
Benexl
57e73e6799 Update README.md 2024-11-28 20:00:45 +03:00
Benexl
7d890b9719 Update README.md 2024-11-28 19:51:28 +03:00
benex
8cbbcf458d chore: update lock files 2024-11-28 09:16:32 +03:00
benex
67bc25a527 chore: bump version (v2.8.4) 2024-11-28 09:16:23 +03:00
benex
e668f9326a feat(anilist): add support for relations and recommendations 2024-11-25 14:16:07 +03:00
benex
a02db6471f fix(hianim) always use fresh requests in extractors 2024-11-25 12:53:16 +03:00
Benexl
08b1f0c90c Merge pull request #31 from iMithrellas/feature-menu-order
feat: add  menu order feature
2024-11-24 22:56:31 +03:00
Benexl
3ec8dbee8c Merge branch 'master' into feature-menu-order 2024-11-24 22:55:19 +03:00
iMithrellas
473c11faca Added 'menu_order' into the default object and default config. 2024-11-24 20:45:23 +01:00
benex
320e3799d3 chore: update username 2024-11-24 15:05:33 +03:00
benex
a0f28ddf6d feat(cli): don't check for update if running notifier 2024-11-24 14:55:38 +03:00
benex
9512c3530a feat: check for updates after every 12hrs 2024-11-24 14:43:43 +03:00
iMithrellas
72602a0ec1 Removed an accidentaly added import by my LSP. 2024-11-24 12:02:23 +01:00
iMithrellas
4daf6a2b07 Merge branch 'Benexl:master' into feature-menu-order 2024-11-24 11:49:24 +01:00
benex
8b37927f6a fix(anilist-download): prefer romaji title over english when searching provider 2024-11-24 13:44:02 +03:00
benex
9d6f785a7f fix(anilist-interface): check if both total and stop time are defined 2024-11-24 13:26:57 +03:00
benex
897c34d98c chore(hianime): include source to solution 2024-11-24 13:26:28 +03:00
benex
28c75215bd chore: update flake.nix 2024-11-24 13:26:08 +03:00
benex
8697b27fe0 Merge branch 'hianime'
Recovers HD2 server of hianime
2024-11-24 13:14:22 +03:00
benex
b6e05c877b feat(hianime): finish HD2 server recovery 2024-11-24 13:10:52 +03:00
benex
d8c3ba6181 feat(hianime): finish megacloud extractor 2024-11-24 13:10:08 +03:00
benex
8b5c917038 Merge branch 'master' into hianime 2024-11-24 12:44:02 +03:00
Benexl
856f62c245 Update README.md 2024-11-24 11:48:30 +03:00
Vlastimil Urban
02dfc9d71c feat: configurable main_menu 2024-11-24 02:17:30 +01:00
benex
cef0bae528 feat(anilist-interface): finish next and previous page implementation 2024-11-23 17:44:53 +03:00
benex
4867720ad2 feat: implement experimental next and previous page. 2024-11-23 17:44:53 +03:00
benex
8d85e30150 feat(runtime): add current-page and current-data-loader to runtime 2024-11-23 17:44:53 +03:00
benex
eb99b7e6ba feat(anilist_api): also make the page configurable 2024-11-23 17:44:53 +03:00
Benexl
089c049f26 Merge pull request #28 from Type-Delta/psflag-fix
Fix(downloader): corrupted Parametric Stereo (PS) flag in downloaded .m3u8 videos
2024-11-23 14:15:30 +03:00
benex
a33e47d205 fix(config): use separate var for the config file val 2024-11-23 14:08:48 +03:00
benex
25dc35eaaf feat: print update message to stderr + disable auto check for updates (needs better implementation) 2024-11-23 14:08:22 +03:00
Type-Delta
525586e955 Merge branch 'Benexl:master' into psflag-fix 2024-11-23 17:11:58 +07:00
Type-Delta
5129219e23 fix: added --force-ffmpeg & --hls-use-mpegts options to properly handle some m3u8 streams 2024-11-23 17:10:31 +07:00
benex
7cd97c78b1 feat(config): make the max_cache_lifetime configurable 2024-11-22 23:21:58 +03:00
benex
27b4422ef3 feat(requests_cacher): make more reliable by ordering the results by created_at 2024-11-22 23:21:32 +03:00
benex
1c367c8aa1 feat(anilist-interface): add resume flag to auto continue from the most recent anime 2024-11-22 22:28:12 +03:00
Benex
7b6cc48b90 Update README.md 2024-11-22 09:56:26 +03:00
Benex
812d0110a7 Update README.md 2024-11-22 08:41:04 +03:00
Benex
60b05bf0ac Merge pull request #29 from Pixelizer09/master
feat: Update data.py
2024-11-22 08:22:12 +03:00
benex
d830cca3bc feat: init resurrection hianime 2024-11-22 08:14:45 +03:00
Pixelizer09
209e93b6d9 Merge branch 'master' into master 2024-11-22 11:28:44 +08:00
Pixelizer09
b10d9dc39a Update data.py 2024-11-22 11:27:29 +08:00
Benex
fe8cda094c Update README.md 2024-11-21 18:55:55 +03:00
Benex
33c06eab0a Update README.md 2024-11-21 18:07:13 +03:00
benex
f3f4be7410 chore: update lock files 2024-11-21 16:52:30 +03:00
benex
3915ef0fb6 chore: bump version (v2.8.3) 2024-11-21 16:52:23 +03:00
benex
20d26166dd docs: update readme 2024-11-21 16:50:07 +03:00
benex
ddca724bd8 chore: update deps 2024-11-21 16:40:21 +03:00
benex
b86c1a0479 feat: add fastanime anilist download beta 2024-11-21 16:40:09 +03:00
benex
1fa7830ddf Merge branch 'master' into fastanime-anilist-download 2024-11-21 15:35:36 +03:00
Benex
59abafbe16 Update README.md 2024-11-21 13:16:56 +03:00
Benex
b6eebb9736 Update README.md 2024-11-21 10:43:19 +03:00
benex
7797053102 chore: update lock files 2024-11-21 10:30:06 +03:00
benex
d763445f72 chore: bump version (v2.8.2) 2024-11-21 10:30:03 +03:00
benex
7bc6b14b5f docs: update config file docs 2024-11-21 10:29:39 +03:00
benex
f70d2ac8af feat(make_release): update to include nix files 2024-11-21 10:29:39 +03:00
benex
defdfc5a47 refactor: update todo 2024-11-21 10:29:39 +03:00
benex
e67eeda492 feat(updater): add ability to update on nix 2024-11-21 10:29:39 +03:00
benex
a17588d02c fix(interfaces): episode previews 2024-11-21 10:29:39 +03:00
Benex
67b59305c4 Update README.md 2024-11-21 07:43:53 +03:00
Pixelizer09
61db9aeea6 Update data.py 2024-11-21 10:57:02 +08:00
Benex
4f0768a060 Merge pull request #24 from gand0rf/chafa_call_update
fix(anilist_interface):  Updated chafa call
2024-11-19 22:14:29 +03:00
Gand0rf
21704cbbea Updated chafa call 2024-11-19 14:00:06 -05:00
Benex
886bc4d011 Update README.md 2024-11-19 19:23:51 +03:00
Benex
e3437e066a Update README.md 2024-11-19 19:18:49 +03:00
Benex
8f2795843a Update README.md 2024-11-19 19:03:53 +03:00
Benex
c6290592e8 Merge pull request #23 from she11sh0cked/master
fix(cli): prevent update check during completions
2024-11-19 19:01:57 +03:00
she11sh0cked
050ba740b8 fix(cli): prevent update check during completions 2024-11-19 15:24:55 +01:00
benex
0b1a27b223 chore: bump version (v2.8.1) 2024-11-19 14:08:42 +03:00
benex
bafd04b788 chore: update lock file 2024-11-19 14:08:08 +03:00
benex
fb5f51eea5 feat(config): add customization options 2024-11-19 14:08:08 +03:00
benex
799e1f0681 fix(updater): handle no internet 2024-11-19 14:08:08 +03:00
benex
53a2d953f8 feat: enable customization of the preview window 2024-11-19 14:08:08 +03:00
Benex
9ce5bc3c76 Update README.md 2024-11-19 11:38:33 +03:00
benex
dc58fc8536 chore: bump version (v2.8.0) 2024-11-19 10:06:57 +03:00
benex
1d5c3016fc chore: update lock file 2024-11-19 10:06:38 +03:00
benex
8737aea746 fix(player): set character encoding for compatibility with windows 2024-11-19 10:06:38 +03:00
Benedict Xavier
bd03866f5e Update README.md 2024-11-19 08:40:09 +03:00
Benedict Xavier
81690a8015 Update README.md 2024-11-19 08:37:26 +03:00
Benedict Xavier
933112a52b Update README.md 2024-11-19 08:34:23 +03:00
Benedict Xavier
eb513dfe0e Update README.md 2024-11-19 08:23:45 +03:00
Benedict Xavier
3928b77506 Update README.md 2024-11-19 08:17:44 +03:00
Benedict Xavier
95cb2bd78c Update README.md 2024-11-19 08:03:33 +03:00
Benedict Xavier
4fa1c45eb2 Update DISCLAIMER.md 2024-11-19 08:02:41 +03:00
Benedict Xavier
b9051bc792 Create DISCLAIMER.md 2024-11-19 08:01:23 +03:00
Benedict Xavier
a590024f1c Merge pull request #15 from piradata/patch-1
Update README.md typo
2024-11-19 07:42:10 +03:00
Benedict Xavier
2f51936679 Merge pull request #16 from AtticusHelvig/uvREADME
Update README.md for uv Instructions
2024-11-19 07:37:26 +03:00
Atticus Helvig
327c50d290 Update README.md 2024-11-18 20:32:38 -07:00
Piradata
031dfbb9b5 Update README.md typo 2024-11-19 00:15:47 -03:00
benex
050365302a chore: bump flake.nix 2024-11-18 20:10:21 +03:00
benex
0f248b1119 chore: update flake.nix to include external deps and all python deps 2024-11-18 20:04:57 +03:00
benex
871d5cf758 chore: add result to .gitignore 2024-11-18 20:04:22 +03:00
benex
320376d2e8 chore: bump version (v2.7.9) 2024-11-18 18:19:21 +03:00
benex
02e7fdff6f chore: update lock file 2024-11-18 18:17:03 +03:00
benex
2c5c28f295 refactor(config): remove useless functions 2024-11-18 18:17:03 +03:00
benex
2d3509ccc1 chore: bump version (v2.7.8) 2024-11-18 17:10:26 +03:00
benex
30babf2d69 feat(cli): alert the user that the cli is checking for updates 2024-11-18 17:10:10 +03:00
benex
cfbbabf898 chore: update uv.lock 2024-11-18 17:10:10 +03:00
benex
5ac6c45fdf fix(allanime_provider): give all the servers the referer header 2024-11-18 17:10:10 +03:00
Benedict Xavier
a14645b563 Update README.md 2024-11-18 15:17:32 +03:00
Benedict Xavier
90dbc26c46 Merge pull request #9 from KaylorBen/master
fix: for nix flake
2024-11-18 13:31:31 +03:00
Benjamin Kaylor
54cc830c35 sed flake preBuild 2024-11-18 03:23:32 -07:00
Benjamin Kaylor
4928ff5b74 undo pyproject.toml edit 2024-11-18 03:20:55 -07:00
Benjamin Kaylor
bb481fe21a fix for nix flake 2024-11-17 16:21:56 -07:00
benex
0d27b8f652 chore: bump version (v2.7.7) 2024-11-17 20:08:37 +03:00
benex
bdd3aae399 chore: update lockfile 2024-11-17 20:08:08 +03:00
benex
af94cd7eb5 fix: recent anime 2024-11-17 20:07:46 +03:00
benex
54044f9527 chore: bump version (v2.7.6) 2024-11-17 18:46:42 +03:00
benex
1e5c039ece chore: update flake.nix 2024-11-17 18:45:32 +03:00
benex
15555759dc feat: ask user if they want to update on new release 2024-11-17 18:45:17 +03:00
benex
0ed51e05cc chore: fix error is in flake.nix 2024-11-17 17:23:41 +03:00
benex
634ef6febf chore: update lock file 2024-11-17 14:28:02 +03:00
benex
bda4b2dbe1 chore: add flake.nix 2024-11-17 14:27:41 +03:00
benex
f015305e7c chore: add shell.nix 2024-11-17 14:27:32 +03:00
benex
d32b7e917f chore: bump version (v2.7.5) 2024-11-16 22:47:31 +03:00
benex
3b35e80199 chore: update lock file 2024-11-16 22:47:19 +03:00
benex
c65a1a2815 feat: by default check for updates when any command is ran 2024-11-16 22:47:04 +03:00
benex
0b3615c9f5 feat: add default rofi themes 2024-11-16 22:46:19 +03:00
Benex254
966301bce8 feat: register fastanime anilist download(s) 2024-10-20 10:39:53 +03:00
Benex254
d776880306 feat: init fastanime anilist download(s) 2024-10-20 10:14:03 +03:00
70 changed files with 4205 additions and 2179 deletions

1
.gitignore vendored
View File

@@ -176,3 +176,4 @@ app/View/SearchScreen/.search_screen.py.un~
app/View/SearchScreen/search_screen.py~
app/user_data.json
.buildozer
result

View File

@@ -1,5 +1,5 @@
default_language_version:
python: python3.10
python: python3.12
repos:
- repo: https://github.com/pycqa/isort
@@ -7,7 +7,7 @@ repos:
hooks:
- id: isort
name: isort (python)
args: ["--profile", "black"] # Ensure compatibility with Black
args: ["--profile", "black"]
- repo: https://github.com/PyCQA/autoflake
rev: v2.2.1
@@ -19,17 +19,15 @@ repos:
"--remove-unused-variables",
"--remove-all-unused-imports",
]
- repo: https://github.com/astral-sh/ruff-pre-commit
# Ruff version.
rev: v0.4.10
hooks:
# Run the linter.
- id: ruff
args: [--fix]
# - repo: https://github.com/astral-sh/ruff-pre-commit
# rev: v0.4.10
# hooks:
# - id: ruff
# args: [--fix]
- repo: https://github.com/psf/black-pre-commit-mirror
rev: 24.4.2
hooks:
- id: black
name: black
language_version: python3.10 # to ensure compatibilty
#language_version: python3.10

3
.vscode/settings.json vendored Normal file
View File

@@ -0,0 +1,3 @@
{
"python.analysis.autoImportCompletions": true
}

40
DISCLAIMER.md Normal file
View File

@@ -0,0 +1,40 @@
<h1 align="center">Disclaimer</h1>
<div align="center">
<h2>This project: fastanime</h2>
<br>
The core aim of this project is to co-relate automation and efficiency to extract what is provided to a user on the internet. All content available through the project is hosted by external non-affiliated sources.
<br>
<b>All content served through this project is publicly accessible. If your site is listed in this project, the code is pretty much public. Take necessary measures to counter the exploits used to extract content in your site.</b>
Think of this project as your normal browser, but a bit more straight-forward and specific. While an average browser makes hundreds of requests to get everything from a site, this project goes on to only make requests associated with getting the content served by the sites.
<b>
This project is to be used at the user's own risk, based on their government and laws.
This project has no control on the content it is serving, using copyrighted content from the providers is not going to be accounted for by the developer. It is the user's own risk.
</b>
<br>
<h2>DMCA and Copyright Infrigements</h3>
<br>
<b>
A browser is a tool, and the maliciousness of the tool is directly based on the user.
</b>
This project uses client-side content access mechanisms. Hence, the copyright infrigements or DMCA in this project's regards are to be forwarded to the associated site by the associated notifier of any such claims. This is one of the main reasons the sites are listed in this project.
<b>Do not harass the developer. Any personal information about the developer is intentionally not made public. Exploiting such information without consent in regards to this topic will lead to legal actions by the developer themselves.</b>

805
README.md
View File

@@ -1,76 +1,48 @@
# **FastAnime**
<p align="center">
<h1 align="center">FastAnime</h1>
</p>
<p align="center">
<sup>
Browse anime from the terminal
</sup>
</p>
<div align="center">
![PyPI - Downloads](https://img.shields.io/pypi/dm/fastanime) ![GitHub Actions Workflow Status](https://img.shields.io/github/actions/workflow/status/FastAnime/FastAnime/test.yml?label=Tests)
![Discord](https://img.shields.io/discord/1250887070906323096?label=Discord)
![GitHub Issues or Pull Requests](https://img.shields.io/github/issues/FastAnime/FastAnime)
![GitHub deployments](https://img.shields.io/github/deployments/FastAnime/fastanime/pypi?label=PyPi%20Publish)
![PyPI - License](https://img.shields.io/pypi/l/fastanime)
![Static Badge](https://img.shields.io/badge/lines%20of%20code-13k%2B-green)
</div>
Welcome to **FastAnime**, anime site experience from the terminal.
<p align="center">
<a href="https://discord.gg/HBEmAwvbHV">
<img src="https://invidget.switchblade.xyz/C4rhMA4mmK">
</a>
</p>
![fastanime](https://github.com/user-attachments/assets/9ab09f26-e4a8-4b70-a315-7def998cec63)
<details>
<summary><b>fzf mode</b></summary>
[fastanime-fzf.webm](https://github.com/user-attachments/assets/90875a57-198b-4c78-98d5-10a459001edd)
<summary>
<b>How it looks</b>
</summary>
**Anilist results menu:**
![image](https://github.com/user-attachments/assets/240023a7-7e4e-47dd-80ff-017d65081ee1)
**Episodes menu preview:**
![image](https://github.com/user-attachments/assets/580f86ef-326f-4ab3-9bd8-c1cb312fbfa6)
**Without preview images enabled:**
![image](https://github.com/user-attachments/assets/e1248a85-438f-4758-ae34-b0e0b224addd)
**Desktop notifications + episodes menu without image preview:**
![image](https://github.com/user-attachments/assets/b7802ef1-ca0d-45f5-a13a-e39c96a5d499)
</details>
<details>
<summary><b>rofi mode</b></summary>
[fa_rofi_mode.webm](https://github.com/user-attachments/assets/2ce669bf-b62f-4c44-bd79-cf0dcaddf37a)
</details>
<details>
<summary><b>Default mode</b></summary>
[fa_default_mode.webm](https://github.com/user-attachments/assets/1ce3a23d-f4a0-4bc1-8518-426ec7b3b69e)
</details>
Heavily inspired by [animdl](https://github.com/justfoolingaround/animdl), [jerry](https://github.com/justchokingaround/jerry/tree/main),[magic-tape](https://gitlab.com/christosangel/magic-tape/-/tree/main?ref_type=heads) and [ani-cli](https://github.com/pystardust/ani-cli).
<!--toc:start-->
- [**FastAnime**](#fastanime)
- [Installation](#installation)
- [Installation using your favourite package manager](#installation-using-your-favourite-package-manager)
- [Using uv](#using-uv)
- [Using pipx](#using-pipx)
- [Using pip](#using-pip)
- [Installing the bleeding edge version](#installing-the-bleeding-edge-version)
- [Building from the source](#building-from-the-source)
- [External Dependencies](#external-dependencies)
- [Usage](#usage)
- [The Commandline interface :fire:](#the-commandline-interface-fire)
- [The anilist command :fire: :fire: :fire:](#the-anilist-command-fire-fire-fire)
- [Running without any subcommand](#running-without-any-subcommand)
- [Subcommands](#subcommands)
- [download subcommand](#download-subcommand)
- [search subcommand](#search-subcommand)
- [grab subcommand](#grab-subcommand)
- [downloads subcommand](#downloads-subcommand)
- [config subcommand](#config-subcommand)
- [cache subcommand](#cache-subcommand)
- [update subcommand](#update-subcommand)
- [completions subcommand](#completions-subcommand)
- [fastanime serve](#fastanime-serve)
- [MPV specific commands](#mpv-specific-commands)
- [Key Bindings](#key-bindings)
- [Script Messages](#script-messages)
- [styling the default interface](#styling-the-default-interface)
- [Configuration](#configuration)
- [Contributing](#contributing)
- [Receiving Support](#receiving-support)
- [Supporting the Project](#supporting-the-project)
<!--toc:end-->
> [!IMPORTANT]
>
> This project currently scrapes allanime, hianime and animepahe, nyaa. The site is in the public domain and can be accessed by any one with a browser.
## Installation
@@ -84,6 +56,14 @@ The app can run wherever python can run. So all you need to have is python insta
On android you can use [termux](https://github.com/termux/termux-app).
If you have any difficulty consult for help on the [discord channel](https://discord.gg/HBEmAwvbHV)
### Installation on nixos
![Static Badge](https://img.shields.io/badge/NixOs-black?style=flat&logo=nixos)
```bash
nix profile install github:Benexl/fastanime
```
### Installation using your favourite package manager
Currently the app is only published on [pypi](https://pypi.org/project/fastanime/).
@@ -93,20 +73,20 @@ With the following extras available:
- api - which installs dependencies required to use `fastanime serve`
- mpv - which installs python mpv
- notifications - which installs plyer required for desktop notifications
#### Using uv
Recommended method of installation is using [uv](https://docs.astral.sh/uv/).
Recommended method of installation is using [uv](https://docs.astral.sh/uv/).
```bash
# generally:
uv tool install fastanime[standard]
uv tool install "fastanime[standard]"
# or stripped down installations:
uv tool install fastanime
uv tool install fastanime[api]
uv tool install fastanime[mpv]
uv tool install fastanime[notifications]
uv tool install "fastanime[api]"
uv tool install "fastanime[mpv]"
uv tool install "fastanime[notifications]"
```
@@ -159,7 +139,7 @@ Requirements:
To build from the source, follow these steps:
1. Clone the repository: `git clone https://github.com/FastAnime/FastAnime.git --depth 1`
1. Clone the repository: `git clone https://github.com/Benexl/FastAnime.git --depth 1`
2. Navigate into the folder: `cd FastAnime`
3. Then build and Install the app:
@@ -201,7 +181,7 @@ The only required external dependency, unless you won't be streaming, is [MPV](h
- [webtorrent-cli](https://github.com/webtorrent/webtorrent-cli) used when the provider is nyaa
- [ffmpeg](https://www.ffmpeg.org/) is required to be in your path environment variables to properly download [hls](https://www.cloudflare.com/en-gb/learning/video/what-is-http-live-streaming/) streams.
- [fzf](https://github.com/junegunn/fzf) 🔥 which is used as a better alternative to the ui.
- [rofi](https://github.com/davatorium/rofi) 🔥 which is used as another alternative ui + the the desktop entry ui
- [rofi](https://github.com/davatorium/rofi) 🔥 which is used as another alternative ui + the desktop entry ui
- [chafa](https://github.com/hpjansson/chafa) currently the best cross platform and cross terminal image viewer for the terminal.
- [icat](https://sw.kovidgoyal.net/kitty/kittens/icat/) an image viewer that only works in [kitty terminal](https://sw.kovidgoyal.net/kitty/), which is currently the best terminal in my opinion, and by far the best image renderer for the terminal thanks to kitty's terminal graphics protocol. Its terminal graphics is so op that you can [run a browser on it](https://github.com/chase/awrit?tab=readme-ov-file)!!
- [bash](https://www.gnu.org/software/bash/) is used as the preview script language.
@@ -210,6 +190,7 @@ The only required external dependency, unless you won't be streaming, is [MPV](h
- [syncplay](https://syncplay.pl/) to enable watch together.
- [feh](https://github.com/derf/feh) used in manga mode
## Usage
The project offers a featureful command-line interface and MPV interface through the use of python-mpv.
@@ -300,7 +281,7 @@ fastanime --manga search -t <manga-title>
#### The anilist command :fire: :fire: :fire:
Stream, browse, and discover anime efficiently from the terminal using the [AniList API](https://github.com/AniList/ApiV2-GraphQL-Docs).
Uses the [AniList API](https://github.com/AniList/ApiV2-GraphQL-Docs) to create a terminal anilist client which is then intergrated with the scraping capabilities of the project.
##### Running without any subcommand
@@ -362,6 +343,33 @@ For more details visit the anilist docs or just get the completions which will i
Like seriously **[get the completions](https://github.com/FastAnime/FastAnime#completions-subcommand)** and the experience will be a 💯 💯 better.
**Fastanime anilist download:**
Supports all the options for search except its used for downloading.
it also supports all options for `fastanime download`
Example:
```bash
# get anime with the tag of isekai
fastanime anilist download -T isekai
# get anime of 2024 and sort by popularity
# that has already finished airing or is releasing
# and is not in your anime lists
fastanime anilist download -y 2024 -s POPULARITY_DESC --status RELEASING --status FINISHED --not-on-list
# get anime of 2024 season WINTER
fastanime anilist download -y 2024 --season WINTER
# get anime genre action and tag isekai,magic
fastanime anilist download -g Action -T Isekai -T Magic
# get anime of 2024 thats finished airing
fastanime anilist download -y 2024 -S FINISHED
# get the most favourite anime movies
fastanime anilist download -f MOVIE -s FAVOURITES_DESC
```
The following are commands you can only run if you are signed in to your AniList account:
- `fastanime anilist watching`
@@ -600,7 +608,7 @@ fastanime config --view
> [!Note]
>
> If it opens [vim](https://www.vim.org/download.php) you can exit by typing `:q` .
> If it opens [vim](https://www.vim.org/download.php) you can exit by typing `:q` 😉.
#### cache subcommand
@@ -666,632 +674,6 @@ fastanime serve
fastanime serve --host <host> --port <port>
```
An example instance is hosted by [render](https://fastanime.onrender.com/)
Examples:
**search for anime by title:**
```bash
curl 'https://fastanime.onrender.com/search?title=dragon&translation_type=sub'
```
<details>
<summary>
Result
</summary>
```json
{
"pageInfo": {
"total": 22839
},
"results": [
{
"id": "ju2pgynxn9o9DZvse",
"title": "Dragon Ball Daima",
"type": "Show",
"availableEpisodes": {
"sub": 5,
"dub": 0,
"raw": 0
}
},
{
"id": "qpnhxfarTHfP7kjgR",
"title": "My WeChat connects to the Dragon Palace",
"type": "Show",
"availableEpisodes": {
"sub": 26,
"dub": 0,
"raw": 0
}
},
{
"id": "8aM5BBoEGLvjG3MZm",
"title": "Sayounara Ryuusei, Konnichiwa Jinsei",
"type": "Show",
"availableEpisodes": {
"sub": 6,
"dub": 0,
"raw": 0
}
},
{
"id": "Sg9Q9FyqBnJ9qtv5n",
"title": "Yarinaoshi Reijou wa Ryuutei Heika wo Kouryakuchuu",
"type": "Show",
"availableEpisodes": {
"sub": 5,
"dub": 0,
"raw": 0
}
},
{
"id": "gF2mKbWBatQudcF6A",
"title": "Throne of the Dragon King",
"type": "Show",
"availableEpisodes": {
"sub": 3,
"dub": 0,
"raw": 0
}
},
{
"id": "SXLNNoorPifT5ZStw",
"title": "Shi Cao Lao Long Bei Guan Yi E Long Zhi Ming Season 2",
"type": "Show",
"availableEpisodes": {
"sub": 7,
"dub": 0,
"raw": 0
}
},
{
"id": "v4ZkjtyftscNzYF2A",
"title": "I Have a Dragon in My Body Episode122-133",
"type": "Show",
"availableEpisodes": {
"sub": 77,
"dub": 0,
"raw": 0
}
},
{
"id": "9RSQCRJ3d554sBzoz",
"title": "City Immortal Emperor: Dragon King Temple",
"type": "Show",
"availableEpisodes": {
"sub": 20,
"dub": 0,
"raw": 0
}
},
{
"id": "t8C6zvsdJE5JJKDLE",
"title": "It Turns Out I Am the Peerless Dragon God",
"type": "Show",
"availableEpisodes": {
"sub": 2,
"dub": 0,
"raw": 0
}
},
{
"id": "xyDt3mJieZkD76P7S",
"title": "Urban Hidden Dragon",
"type": "Show",
"availableEpisodes": {
"sub": 13,
"dub": 0,
"raw": 0
}
},
{
"id": "8PoJiTEDAswkw8b3u",
"title": "The Collected Animations of ICAF (2001-2006)",
"type": "Show",
"availableEpisodes": {
"sub": 1,
"dub": 0,
"raw": 0
}
},
{
"id": "KZeMmRSsyJgz37EmH",
"title": "Dragon Master",
"type": "Show",
"availableEpisodes": {
"sub": 1,
"dub": 0,
"raw": 0
}
},
{
"id": "7a33i9m26poonyNLg",
"title": "I Have a Dragon in My Body",
"type": "Show",
"availableEpisodes": {
"sub": 79,
"dub": 0,
"raw": 0
}
},
{
"id": "uwwvBujGRsjCQ8kKM",
"title": "Cong Gu Huo Niao Kaishi: Long Cheng Fengyun",
"type": "Show",
"availableEpisodes": {
"sub": 16,
"dub": 0,
"raw": 0
}
},
{
"id": "RoexdZwHSTDwyzEzd",
"title": "Super Dragon Ball Heroes Meteor Mission",
"type": "Show",
"availableEpisodes": {
"sub": 6,
"dub": 0,
"raw": 0
}
},
{
"id": "gAcGCcMENjbWhBnR9",
"title": "Dungeon Meshi",
"type": "Show",
"availableEpisodes": {
"sub": 24,
"dub": 24,
"raw": 0
}
},
{
"id": "ZGh2QHiaCY5T5Mhi4",
"title": "Long Shidai",
"type": "Show",
"availableEpisodes": {
"sub": 9,
"dub": 0,
"raw": 1
}
},
{
"id": "gZSHt98fQpHRfJJXw",
"title": "Xanadu Dragonslayer Densetsu",
"type": "Show",
"availableEpisodes": {
"sub": 1,
"dub": 0,
"raw": 0
}
},
{
"id": "wo8pX4Sba97mFCAkc",
"title": "Vanguard Dragon God",
"type": "Show",
"availableEpisodes": {
"sub": 86,
"dub": 0,
"raw": 0
}
},
{
"id": "rrbCftmca3Y2TEiBX",
"title": "Super Dragon Ball Heroes Ultra God Mission",
"type": "Show",
"availableEpisodes": {
"sub": 10,
"dub": 0,
"raw": 0
}
},
{
"id": "JzSeXC2WtBBhn3guN",
"title": "Dragon King's Son-In-Law",
"type": "Show",
"availableEpisodes": {
"sub": 11,
"dub": 0,
"raw": 0
}
},
{
"id": "eE3txJGGk9atw7k2v",
"title": "Majutsushi Orphen Hagure Tabi: Seiiki-hen",
"type": "Show",
"availableEpisodes": {
"sub": 12,
"dub": 0,
"raw": 0
}
},
{
"id": "4X2JbZgiQrb2PTzex",
"title": "Yowai 5000-nen no Soushoku Dragon, Iwarenaki Jaryuu Nintei (Japanese Dub)",
"type": "Show",
"availableEpisodes": {
"sub": 12,
"dub": 0,
"raw": 0
}
},
{
"id": "SHp5NFDakKjPT5nJE",
"title": "Starting from Gu Huoniao: Dragon City Hegemony",
"type": "Show",
"availableEpisodes": {
"sub": 22,
"dub": 0,
"raw": 0
}
},
{
"id": "8LgaCGrz7Gz35LRpk",
"title": "Yuan Zun",
"type": "Show",
"availableEpisodes": {
"sub": 5,
"dub": 0,
"raw": 0
}
},
{
"id": "4GKHyjFC7Dyc7fBpT",
"title": "Shen Ji Long Wei",
"type": "Show",
"availableEpisodes": {
"sub": 26,
"dub": 0,
"raw": 0
}
},
{
"id": "2PQiuXiuJoTQTdgy4",
"title": "Long Zu",
"type": "Show",
"availableEpisodes": {
"sub": 15,
"dub": 0,
"raw": 0
}
},
{
"id": "rE47AepmBFRvZ6cne",
"title": "Jidao Long Shen",
"type": "Show",
"availableEpisodes": {
"sub": 40,
"dub": 0,
"raw": 0
}
},
{
"id": "c4JcjPbRfiuoJPB4F",
"title": "Dragon Quest: Dai no Daibouken (2020)",
"type": "Show",
"availableEpisodes": {
"sub": 101,
"dub": 100,
"raw": 0
}
},
{
"id": "nGRTwG7kj5rCPiAX4",
"title": "Dragon Quest: Dai no Daibouken Tachiagare!! Aban no Shito",
"type": "Show",
"availableEpisodes": {
"sub": 1,
"dub": 0,
"raw": 0
}
},
{
"id": "6LJBjT4RzJaucdmX3",
"title": "Dragon Slayer Eiyuu Densetsu: Ouji no Tabidachi",
"type": "Show",
"availableEpisodes": {
"sub": 1,
"dub": 1,
"raw": 0
}
},
{
"id": "JKbtxdw2cRqqmZgnS",
"title": "Dragon Quest: Dai no Daibouken Buchiyabure!! Shinsei 6 Daishougun",
"type": "Show",
"availableEpisodes": {
"sub": 1,
"dub": 0,
"raw": 0
}
},
{
"id": "pn32RijEHPfuTYt4h",
"title": "Dragon Quest Retsuden: Roto no Monshou",
"type": "Show",
"availableEpisodes": {
"sub": 1,
"dub": 0,
"raw": 0
}
},
{
"id": "xHwk6oo7jaDrMG9to",
"title": "Dragon Fist",
"type": "Show",
"availableEpisodes": {
"sub": 1,
"dub": 0,
"raw": 0
}
},
{
"id": "ugFXPFQW8kvLocZgx",
"title": "Yowai 5000-nen no Soushoku Dragon, Iwarenaki Jaryuu Nintei",
"type": "Show",
"availableEpisodes": {
"sub": 12,
"dub": 0,
"raw": 0
}
},
{
"id": "qSFMEcT4SufEhLZnq",
"title": "Doraemon Movie 8: Nobita to Ryuu no Kishi",
"type": "Show",
"availableEpisodes": {
"sub": 1,
"dub": 0,
"raw": 0
}
},
{
"id": "LTzXFSmQR878MdJaS",
"title": "Dragon Ball Specials",
"type": "Show",
"availableEpisodes": {
"sub": 2,
"dub": 0,
"raw": 0
}
},
{
"id": "XuTNNzF7DfapLFMFJ",
"title": "Dragon Ball Super: Super Hero",
"type": "Show",
"availableEpisodes": {
"sub": 1,
"dub": 1,
"raw": 0
}
},
{
"id": "n4S2spjyTHXHNAMDW",
"title": "Shin Ikkitousen",
"type": "Show",
"availableEpisodes": {
"sub": 3,
"dub": 3,
"raw": 0
}
},
{
"id": "srMRCkMEJA9Rmt7do",
"title": "Dragon Ball Z: Atsumare! Goku World",
"type": "Show",
"availableEpisodes": {
"sub": 1,
"dub": 0,
"raw": 0
}
}
]
}
```
</details>
**Get anime by id:**
```bash
curl 'https://fastanime.onrender.com/anime/8aM5BBoEGLvjG3MZm'
```
<details>
<summary>
Result
</summary>
```json
{
"id": "8aM5BBoEGLvjG3MZm",
"title": "Sayounara Ryuusei, Konnichiwa Jinsei",
"availableEpisodesDetail": {
"sub": ["6", "5", "4", "3", "2", "1"],
"dub": [],
"raw": []
},
"type": null
}
```
</details>
**Get episode streams by translation_type:**
```bash
curl 'https://fastanime.onrender.com/anime/8aM5BBoEGLvjG3MZm/watch?episode=3&translation_type=sub'
```
<details>
<summary>
Result
</summary>
```json
[
{
"server": "Yt",
"episode_title": "Sayounara Ryuusei, Konnichiwa Jinsei; Episode 3",
"headers": {
"Referer": "https://allanime.day/"
},
"subtitles": [],
"links": [
{
"link": "https://tools.fast4speed.rsvp//media9/videos/8aM5BBoEGLvjG3MZm/sub/3",
"quality": "1080"
}
]
},
{
"server": "sharepoint",
"headers": {},
"subtitles": [],
"episode_title": "Sayounara Ryuusei, Konnichiwa Jinsei; Episode 3",
"links": [
{
"link": "https://myanime.sharepoint.com/sites/chartlousty/_layouts/15/download.aspx?share=ERpIT0CTmOVHmO8386bNGZMBf7Emtoda_3bUMzCleWhp4g",
"mp4": true,
"resolutionStr": "Mp4",
"src": "https://myanime.sharepoint.com/sites/chartlousty/_layouts/15/download.aspx?share=ERpIT0CTmOVHmO8386bNGZMBf7Emtoda_3bUMzCleWhp4g",
"quality": "1080"
}
]
},
{
"server": "gogoanime",
"headers": {},
"subtitles": [],
"episode_title": "Sayounara Ryuusei, Konnichiwa Jinsei; Episode 3",
"links": [
{
"link": "https://www114.anzeat.pro/streamhls/6454b50a557e9fa52a60cfdee0b0906e/ep.3.1729188150.m3u8",
"hls": true,
"mp4": false,
"resolutionStr": "hls P",
"priority": 3,
"quality": "1080"
},
{
"link": "https://www114.anicdnstream.info/videos/hls/h1IUtAefmoWTc8hJhtr8OQ/1731106912/235294/6454b50a557e9fa52a60cfdee0b0906e/ep.3.1729188150.m3u8",
"hls": true,
"mp4": false,
"resolutionStr": "HLS1",
"priority": 2,
"quality": "720"
},
{
"link": "https://workfields.maverickki.lol/7d2473746a243c246e727276753c29297171713737322867686f65626875727463676b286f68606929706f62636975296e6a75296e374f53724763606b695152653e6e4c6e72743e495729373135373736303f373429343533343f32293032333264333667333331633f6067333467303665606263633664363f3630632963762835283731343f373e3e373336286b35733e242a2476677475634e6a75243c727473632a2462677263243c373135373634363236363636367b",
"hls": true,
"resolutionStr": "Alt",
"src": "https://workfields.maverickki.lol/7d2473746a243c246e727276753c29297171713737322867686f65626875727463676b286f68606929706f62636975296e6a75296e374f53724763606b695152653e6e4c6e72743e495729373135373736303f373429343533343f32293032333264333667333331633f6067333467303665606263633664363f3630632963762835283731343f373e3e373336286b35733e242a2476677475634e6a75243c727473632a2462677263243c373135373634363236363636367b",
"priority": 1,
"quality": "480"
}
]
}
]
```
</details>
**Get Episode Streams by AniList Id:**
```bash
curl 'https://fastanime.onrender.com/watch/269?episode=1&translation_type=dub'
```
<details>
<summary>
Results
</summary>
```json
[
{
"server": "gogoanime",
"headers": {},
"subtitles": [],
"episode_title": "Bleach; Episode 1",
"links": [
{
"link": "",
"hls": true,
"mp4": false,
"resolutionStr": "hls P",
"priority": 3,
"quality": "1080"
},
{
"link": "",
"hls": true,
"mp4": false,
"resolutionStr": "HLS1",
"priority": 2,
"quality": "720"
},
{
"link": "",
"hls": true,
"resolutionStr": "Alt",
"src": "",
"priority": 1,
"quality": "480"
}
]
},
{
"server": "Yt",
"episode_title": "Bleach; Episode 1",
"headers": {
"Referer": "https://allanime.day/"
},
"subtitles": [],
"links": [
{
"link": "",
"quality": "1080"
}
]
},
{
"server": "wixmp",
"headers": {},
"subtitles": [],
"episode_title": "Bleach; Episode 1",
"links": [
{
"link": "",
"hls": true,
"resolutionStr": "Hls",
"quality": "1080"
}
]
},
{
"server": "sharepoint",
"headers": {},
"subtitles": [],
"episode_title": "Bleach; Episode 1",
"links": [
{
"link": "",
"mp4": true,
"resolutionStr": "Mp4",
"src": "",
"quality": "1080"
}
]
}
]
```
</details>
### MPV specific commands
The project now allows on the fly media controls directly from mpv. This means you can go to the next or previous episode without the window ever closing thus offering a seamless experience.
@@ -1412,24 +794,25 @@ format = best[height<=1080]/bestvideo[height<=1080]+bestaudio/best
player = mpv
```
## Contributing
We welcome your issues and feature requests. However, due to time constraints, we currently do not plan to add another provider.
pr's are highly welcome
If you wish to contribute directly, please first open an issue describing your proposed changes so it can be discussed or if you are in a rush for the feature to be merged just open a pr.
If you find an anime title that does not correspond with a provider or is just weird just [edit the data file](https://github.com/FastAnime/FastAnime/blob/master/fastanime/Utility/data.py) and open a pr or if you don't want to do that open an issue.
## Receiving Support
For inquiries, join our [Discord Server](https://discord.gg/HBEmAwvbHV).
<p align="center">
<a href="https://discord.gg/HBEmAwvbHV">
<img src="https://invidget.switchblade.xyz/C4rhMA4mmK">
</a>
</p>
If you find an anime title that does not correspond with a provider or is just weird just [edit the data file](https://github.com/Benexl/FastAnime/blob/master/fastanime/Utility/data.py) and open a pr, issues will be ignored 😝.
## Supporting the Project
Show your support by starring our GitHub repository or [buying us a coffee](https://ko-fi.com/benex254).
More pr's less issues 🙃
Show your support by starring the GitHub repository.
[![ko-fi](https://ko-fi.com/img/githubbutton_sm.svg)](https://ko-fi.com/Y8Y8ZAA7N)
## Disclaimer
> [!IMPORTANT]
>
> This project currently scrapes allanime, hianime, nyaa, yugen and animepahe.
> The developer(s) of this application does not have any affiliation with the content providers available, and this application hosts zero content.
> [DISCLAIMER](https://github.com/Benexl/FastAnime/blob/master/DISCLAIMER.md)

View File

@@ -15,7 +15,7 @@ if TYPE_CHECKING:
logger = logging.getLogger(__name__)
# TODO: improve performance of this class and add cool features like auto retry
# TODO: add cool features like auto retry
class AnimeProvider:
"""Class that manages all anime sources adding some extra functionality to them.
Attributes:
@@ -62,11 +62,7 @@ class AnimeProvider:
)
def search_for_anime(
self,
user_query,
translation_type,
nsfw=True,
unknown=True,
self, search_keywords, translation_type, **kwargs
) -> "SearchResults | None":
"""core abstraction over all providers search functionality
@@ -82,7 +78,7 @@ class AnimeProvider:
"""
anime_provider = self.anime_provider
results = anime_provider.search_for_anime(
user_query, translation_type, nsfw, unknown
search_keywords, translation_type, **kwargs
)
return results
@@ -90,6 +86,7 @@ class AnimeProvider:
def get_anime(
self,
anime_id: str,
**kwargs,
) -> "Anime | None":
"""core abstraction over getting info of an anime from all providers
@@ -101,7 +98,7 @@ class AnimeProvider:
[TODO:return]
"""
anime_provider = self.anime_provider
results = anime_provider.get_anime(anime_id)
results = anime_provider.get_anime(anime_id, **kwargs)
return results
@@ -110,6 +107,7 @@ class AnimeProvider:
anime_id,
episode: str,
translation_type: str,
**kwargs,
) -> "Iterator[Server] | None":
"""core abstractions for getting juicy streams from all providers
@@ -124,6 +122,6 @@ class AnimeProvider:
"""
anime_provider = self.anime_provider
results = anime_provider.get_episode_streams(
anime_id, episode, translation_type
anime_id, episode, translation_type, **kwargs
)
return results

View File

@@ -12,7 +12,11 @@ anime_normalizer_raw = {
"Re:Zero kara Hajimeru Isekai Seikatsu Season 3": "Re:Zero kara Hajimeru Isekai Seikatsu 3rd Season",
},
"hianime": {"My Star": "Oshi no Ko"},
"animepahe": {"Azumanga Daiou The Animation": "Azumanga Daioh"},
"animepahe": {
"Azumanga Daiou The Animation": "Azumanga Daioh",
"Mairimashita! Iruma-kun 2nd Season": "Mairimashita! Iruma-kun 2",
"Mairimashita! Iruma-kun 3rd Season": "Mairimashita! Iruma-kun 3",
},
"nyaa": {},
"yugen": {},
}

View File

@@ -43,6 +43,8 @@ class YtDLPDownloader:
merge=False,
clean=False,
prompt=True,
force_ffmpeg=False,
hls_use_mpegts=False,
):
"""Helper function that downloads anime given url and path details
@@ -91,7 +93,21 @@ class YtDLPDownloader:
vid_path = ""
sub_path = ""
for i, url in enumerate(urls):
with yt_dlp.YoutubeDL(ydl_opts) as ydl:
options = ydl_opts
if i == 0:
if force_ffmpeg:
options = options | {
"external_downloader": {"default": "ffmpeg"},
"external_downloader_args": {
"ffmpeg_i1": ["-v", "error", "-stats"],
},
}
if hls_use_mpegts:
options = options | {
"hls_use_mpegts": hls_use_mpegts,
}
with yt_dlp.YoutubeDL(options) as ydl:
info = ydl.extract_info(url, download=True)
if not info:
continue

View File

@@ -6,10 +6,10 @@ if sys.version_info < (3, 10):
) # noqa: F541
__version__ = "v2.7.4"
__version__ = "v2.8.7"
APP_NAME = "FastAnime"
AUTHOR = "Benex254"
AUTHOR = "Benexl"
GIT_REPO = "github.com"
REPO = f"{GIT_REPO}/{AUTHOR}/{APP_NAME}"

View File

@@ -0,0 +1,80 @@
configuration {
font: "Sans 12";
line-margin: 10;
display-drun: "";
}
* {
background: #000000; /* Black background for everything */
background-alt: #000000; /* Ensures no alternation */
foreground: #CCCCCC;
selected: #3584E4;
active: #2E7D32;
urgent: #C62828;
}
window {
fullscreen: false;
background-color: rgba(0, 0, 0, 0.8); /* Solid black transparent background */
border-radius: 50px;
}
mainbox {
padding: 50px 50px;
background-color: transparent; /* Ensures black background fills entire main area */
children: [inputbar, listview];
spacing: 20px;
}
inputbar {
background-color: #333333; /* Dark gray background for input bar */
padding: 8px;
border-radius: 8px;
children: [prompt, entry];
}
prompt {
enabled: true;
padding: 8px;
background-color: @selected;
text-color: #000000;
border-radius: 4px;
}
entry {
padding: 8px;
background-color: transparent; /* Slightly lighter gray for visibility */
text-color: #FFFFFF; /* White text to make typing visible */
placeholder: "Search...";
placeholder-color: rgba(255, 255, 255, 0.5);
border-radius: 6px;
}
listview {
layout: vertical;
spacing: 8px;
lines: 9;
background-color: transparent; /* Consistent black background for list items */
}
element {
padding: 12px;
border-radius: 4px;
background-color: transparent; /* Uniform color for each list item */
text-color: @foreground;
}
element normal.normal {
background-color: transparent; /* Ensures no alternating color */
}
element selected.normal {
background-color: @selected;
text-color: #FFFFFF;
}
element-text {
background-color: transparent;
text-color: inherit;
vertical-align: 0.5;
}

View File

@@ -0,0 +1,55 @@
configuration {
font: "Sans 12";
}
* {
background-color: rgba(0, 0, 0, 0.7);
text-color: #FFFFFF;
}
window {
fullscreen: true;
transparency: "real";
background-color: transparent;
}
mainbox {
children: [ message, listview, inputbar ];
padding: 40% 30%;
background-color: transparent;
}
message {
border: 0;
padding: 10px;
border-radius:20px;
margin: 0 0 20px 0;
font: "Sans Bold 24"; /* Increased font size and made it bold */
}
inputbar {
children: [ prompt, entry ];
background-color: rgba(255, 255, 255, 0.1);
padding: 8px;
border-radius: 4px;
}
prompt {
padding: 8px;
}
entry {
padding: 8px;
background-color: transparent;
}
listview {
lines: 0;
}
/* Style for the message text specifically */
textbox {
horizontal-align: 0.5; /* Center the text */
font: "Sans Bold 24"; /* Match message font */
background-color: transparent;
}

View File

@@ -0,0 +1,55 @@
configuration {
font: "Sans 12";
}
* {
background-color: rgba(0, 0, 0, 0.7);
text-color: #FFFFFF;
}
window {
fullscreen: true;
transparency: "real";
background-color: transparent;
}
mainbox {
children: [ message, listview, inputbar ];
padding: 40% 30%;
background-color: transparent;
}
message {
border: 0;
padding: 10px;
border-radius:20px;
margin: 0 0 20px 0;
font: "Sans Bold 24"; /* Increased font size and made it bold */
}
inputbar {
children: [ prompt, entry ];
background-color: rgba(255, 255, 255, 0.1);
padding: 8px;
border-radius: 4px;
}
prompt {
padding: 8px;
}
entry {
padding: 8px;
background-color: transparent;
}
listview {
lines: 0;
}
/* Style for the message text specifically */
textbox {
horizontal-align: 0.5; /* Center the text */
font: "Sans Bold 24"; /* Match message font */
background-color: transparent;
}

View File

@@ -0,0 +1,120 @@
// Colours
* {
background-color: transparent; /* Transparent background for the global UI */
background: #000000; /* Solid black background */
background-transparent: #1D2330A0; /* Semi-transparent background */
text-color: #BBBBBB; /* Default text color (light gray) */
text-color-selected: #FFFFFF; /* Text color when selected (white) */
primary: rgba(53, 132, 228, 0.75); /* Blusish primary color */
important: rgba(53, 132, 228, 0.75); /* Bluish primary color */
}
configuration {
font: "Roboto 14"; /* Sets the global font to Roboto, size 14 */
show-icons: true; /* Option to display icons in the UI */
}
window {
fullscreen: true; /* The window will open in fullscreen */
height: 100%; /* Full window height */
width: 100%; /* Full window width */
transparency: "real"; /* Real transparency effect */
background-color: @background-transparent; /* Transparent background */
border: 0px; /* No border around the window */
border-color: @primary; /* Border color set to the primary color */
}
mainbox {
children: [prompt, inputbar-box, listview]; /* Main box contains prompt, input bar, and list view */
padding: 0px; /* No padding around the main box */
}
prompt {
width: 100%; /* Prompt takes full width */
margin: 10px 0px 0px 30px; /* Margin around the prompt */
text-color: @important; /* Text color for prompt (important color) */
font: "Roboto Bold 27"; /* Bold Roboto font, size 27 */
}
listview {
layout: vertical; /* Vertical layout for list items */
padding: 10px; /* Padding inside the list view */
spacing: 20px; /* Space between items in the list */
columns: 8; /* Maximum 8 items per row */
dynamic: true; /* Allows the list to dynamically adjust */
orientation: horizontal; /* Horizontal orientation for list items */
}
inputbar-box {
children: [dummy, inputbar, dummy]; /* Input bar is centered with dummy placeholders */
orientation: horizontal; /* Horizontal layout for input bar */
expand: false; /* Does not expand to fill the space */
}
inputbar {
children: [textbox-prompt, entry]; /* Contains a prompt and an entry field */
margin: 0px; /* No margin around the input bar */
background-color: @primary; /* Background color set to the primary color */
border: 4px; /* Border thickness around the input bar */
border-color: @primary; /* Border color matches the primary color */
border-radius: 8px; /* Rounded corners for the input bar */
}
textbox-prompt {
text-color: @background; /* Text color inside prompt matches the background color */
horizontal-align: 0.5; /* Horizontally centered */
vertical-align: 0.5; /* Vertically centered */
expand: false; /* Does not expand to fill available space */
}
entry {
expand: false; /* Entry field does not expand */
padding: 8px; /* Padding inside the entry field */
margin: -6px; /* Negative margin to position entry properly */
horizontal-align: 0; /* Left-aligned text inside the entry field */
width: 300; /* Fixed width for the entry field */
background-color: @background; /* Entry background color matches the global background */
border: 6px; /* Border thickness around the entry field */
border-color: @primary; /* Border color matches the primary color */
border-radius: 8px; /* Rounded corners for the entry field */
cursor: text; /* Cursor changes to text input cursor inside the entry field */
}
element {
children: [dummy, element-box, dummy]; /* Contains an element box with dummy placeholders */
padding: 5px; /* Padding around the element */
orientation: vertical; /* Vertical layout for element content */
border: 0px; /* No border around the element */
border-radius: 16px; /* Rounded corners for the element */
background-color: transparent; /* Transparent background */
width: 100px; /* Width of each element */
height: 50px; /* Height of each element */
}
element selected {
background-color: @primary; /* Background color of the element when selected */
}
element-box {
children: [element-icon, element-text]; /* Element box contains an icon and text */
orientation: vertical; /* Vertical layout for icon and text */
expand: false; /* Does not expand to fill available space */
cursor: pointer; /* Cursor changes to a pointer when hovering over the element */
}
element-icon {
padding: 10px; /* Padding inside the icon */
cursor: inherit; /* Inherits cursor style from the parent */
size: 33%; /* Icon size is set to 33% of the parent element */
margin: 10px; /* Margin around the icon */
}
element-text {
horizontal-align: 0.5; /* Horizontally center-aligns the text */
cursor: inherit; /* Inherits cursor style from the parent */
text-color: @text-color; /* Text color for element text */
}
element-text selected {
text-color: @text-color-selected; /* Text color when the element is selected */
}

View File

@@ -184,6 +184,7 @@ signal.signal(signal.SIGINT, handle_exit)
@click.option(
"--fresh-requests", is_flag=True, help="Force the requests cache to be updated"
)
@click.option("--no-config", is_flag=True, help="Don't load the user config")
@click.pass_context
def run_cli(
ctx: click.Context,
@@ -220,12 +221,65 @@ def run_cli(
sync_play,
player,
fresh_requests,
no_config,
):
import os
import sys
from .config import Config
ctx.obj = Config()
ctx.obj = Config(no_config)
if (
ctx.obj.check_for_updates
and ctx.invoked_subcommand != "completions"
and "notifier" not in sys.argv
):
import time
last_update = ctx.obj.user_data["meta"]["last_updated"]
now = time.time()
# checks after every 12 hours
if (now - last_update) > 43200:
ctx.obj.user_data["meta"]["last_updated"] = now
ctx.obj._update_user_data()
from .app_updater import check_for_updates
print("Checking for updates...", file=sys.stderr)
print("So you can enjoy the latest features and bug fixes", file=sys.stderr)
print(
"You can disable this by setting check_for_updates to False in the config",
file=sys.stderr,
)
is_latest, github_release_data = check_for_updates()
if not is_latest:
from rich.console import Console
from rich.markdown import Markdown
from rich.prompt import Confirm
from .app_updater import update_app
def _print_release(release_data):
console = Console()
body = Markdown(release_data["body"])
tag = github_release_data["tag_name"]
tag_title = release_data["name"]
github_page_url = release_data["html_url"]
console.print(f"Release Page: {github_page_url}")
console.print(f"Tag: {tag}")
console.print(f"Title: {tag_title}")
console.print(body)
if Confirm.ask(
"A new version of fastanime is available, would you like to update?"
):
_, release_json = update_app()
print("Successfully updated")
_print_release(release_json)
exit(0)
else:
print("You are using the latest version of fastanime", file=sys.stderr)
ctx.obj.manga = manga
if log:
import logging

View File

@@ -1,3 +1,4 @@
import os
import pathlib
import re
import shlex
@@ -15,14 +16,18 @@ API_URL = f"https://api.{GIT_REPO}/repos/{AUTHOR}/{APP_NAME}/releases/latest"
def check_for_updates():
USER_AGENT = f"{APP_NAME} user"
request = requests.get(
API_URL,
headers={
"User-Agent": USER_AGENT,
"X-GitHub-Api-Version": "2022-11-28",
"Accept": "application/vnd.github+json",
},
)
try:
request = requests.get(
API_URL,
headers={
"User-Agent": USER_AGENT,
"X-GitHub-Api-Version": "2022-11-28",
"Accept": "application/vnd.github+json",
},
)
except Exception:
print("You are not connected to the internet")
return True, {}
if request.status_code == 200:
release_json = request.json()
@@ -45,8 +50,9 @@ def check_for_updates():
return (is_latest, release_json)
else:
print("Failed to check for updates")
print(request.text)
return (False, {})
return (True, {})
def is_git_repo(author, repository):
@@ -83,7 +89,14 @@ def update_app(force=False):
tag_name = release_json["tag_name"]
print("[cyan]Updating app to version %s[/]" % tag_name)
if is_git_repo(AUTHOR, APP_NAME):
if os.path.exists("/nix/store") and os.path.exists("/run/current-system"):
NIX = shutil.which("nix")
if not NIX:
print("[red]Cannot find nix, it looks like your system is broken.[/]")
return False, release_json
process = subprocess.run([NIX, "profile", "upgrade", APP_NAME.lower()])
elif is_git_repo(AUTHOR, APP_NAME):
GIT_EXECUTABLE = shutil.which("git")
args = [
GIT_EXECUTABLE,
@@ -115,9 +128,13 @@ def update_app(force=False):
"install",
APP_NAME,
"-U",
"--user",
"--no-warn-script-location",
]
if sys.prefix == sys.base_prefix:
# ensure NOT in a venv, where --user flag can cause an error.
# TODO: Get value of 'include-system-site-packages' in pyenv.cfg.
args.append('--user')
process = subprocess.run(args)
if process.returncode == 0:
return True, release_json

View File

@@ -21,6 +21,8 @@ commands = {
"planning": "planning.planning",
"notifier": "notifier.notifier",
"stats": "stats.stats",
"download": "download.download",
"downloads": "downloads.downloads",
}
@@ -78,15 +80,13 @@ commands = {
fastanime --log-file anilist notifier
""",
)
@click.option("--resume", is_flag=True, help="Resume from the last session")
@click.pass_context
def anilist(ctx: click.Context):
def anilist(ctx: click.Context, resume: bool):
from typing import TYPE_CHECKING
from ....anilist import AniList
from ....AnimeProvider import AnimeProvider
from ...interfaces.anilist_interfaces import (
fastanime_main_menu as anilist_interface,
)
if TYPE_CHECKING:
from ...config import Config
@@ -96,4 +96,33 @@ def anilist(ctx: click.Context):
AniList.update_login_info(user, user["token"])
if ctx.invoked_subcommand is None:
fastanime_runtime_state = FastAnimeRuntimeState()
anilist_interface(ctx.obj, fastanime_runtime_state)
if resume:
from ...interfaces.anilist_interfaces import (
anime_provider_search_results_menu,
)
if not config.user_data["recent_anime"]:
click.echo("No recent anime found", err=True, color=True)
return
fastanime_runtime_state.anilist_results_data = {
"data": {"Page": {"media": config.user_data["recent_anime"]}}
}
fastanime_runtime_state.selected_anime_anilist = config.user_data[
"recent_anime"
][0]
fastanime_runtime_state.selected_anime_id_anilist = config.user_data[
"recent_anime"
][0]["id"]
fastanime_runtime_state.selected_anime_title_anilist = (
config.user_data["recent_anime"][0]["title"]["romaji"]
or config.user_data["recent_anime"][0]["title"]["english"]
)
anime_provider_search_results_menu(config, fastanime_runtime_state)
else:
from ...interfaces.anilist_interfaces import (
fastanime_main_menu as anilist_interface,
)
anilist_interface(ctx.obj, fastanime_runtime_state)

View File

@@ -42,5 +42,12 @@ def completed(config: "Config", dump_json):
from ...interfaces import anilist_interfaces
fastanime_runtime_state = FastAnimeRuntimeState()
fastanime_runtime_state.current_page = 1
fastanime_runtime_state.current_data_loader = (
lambda config, **kwargs: anilist_interfaces._handle_animelist(
config, fastanime_runtime_state, "Completed", **kwargs
)
)
fastanime_runtime_state.anilist_results_data = anime_list[1]
anilist_interfaces.anilist_results_menu(config, fastanime_runtime_state)

View File

@@ -0,0 +1,476 @@
sorts_available = [
"ID",
"ID_DESC",
"TITLE_ROMAJI",
"TITLE_ROMAJI_DESC",
"TITLE_ENGLISH",
"TITLE_ENGLISH_DESC",
"TITLE_NATIVE",
"TITLE_NATIVE_DESC",
"TYPE",
"TYPE_DESC",
"FORMAT",
"FORMAT_DESC",
"START_DATE",
"START_DATE_DESC",
"END_DATE",
"END_DATE_DESC",
"SCORE",
"SCORE_DESC",
"POPULARITY",
"POPULARITY_DESC",
"TRENDING",
"TRENDING_DESC",
"EPISODES",
"EPISODES_DESC",
"DURATION",
"DURATION_DESC",
"STATUS",
"STATUS_DESC",
"CHAPTERS",
"CHAPTERS_DESC",
"VOLUMES",
"VOLUMES_DESC",
"UPDATED_AT",
"UPDATED_AT_DESC",
"SEARCH_MATCH",
"FAVOURITES",
"FAVOURITES_DESC",
]
media_statuses_available = [
"FINISHED",
"RELEASING",
"NOT_YET_RELEASED",
"CANCELLED",
"HIATUS",
]
seasons_available = ["WINTER", "SPRING", "SUMMER", "FALL"]
genres_available = [
"Action",
"Adventure",
"Comedy",
"Drama",
"Ecchi",
"Fantasy",
"Horror",
"Mahou Shoujo",
"Mecha",
"Music",
"Mystery",
"Psychological",
"Romance",
"Sci-Fi",
"Slice of Life",
"Sports",
"Supernatural",
"Thriller",
"Hentai",
]
media_formats_available = [
"TV",
"TV_SHORT",
"MOVIE",
"SPECIAL",
"OVA",
"MUSIC",
"NOVEL",
"ONE_SHOT",
]
years_available = [
"1900",
"1910",
"1920",
"1930",
"1940",
"1950",
"1960",
"1970",
"1980",
"1990",
"2000",
"2004",
"2005",
"2006",
"2007",
"2008",
"2009",
"2010",
"2011",
"2012",
"2013",
"2014",
"2015",
"2016",
"2017",
"2018",
"2019",
"2020",
"2021",
"2022",
"2023",
"2024",
]
tags_available = {
"Cast": ["Polyamorous"],
"Cast Main Cast": [
"Anti-Hero",
"Elderly Protagonist",
"Ensemble Cast",
"Estranged Family",
"Female Protagonist",
"Male Protagonist",
"Primarily Adult Cast",
"Primarily Animal Cast",
"Primarily Child Cast",
"Primarily Female Cast",
"Primarily Male Cast",
"Primarily Teen Cast",
],
"Cast Traits": [
"Age Regression",
"Agender",
"Aliens",
"Amnesia",
"Angels",
"Anthropomorphism",
"Aromantic",
"Arranged Marriage",
"Artificial Intelligence",
"Asexual",
"Butler",
"Centaur",
"Chimera",
"Chuunibyou",
"Clone",
"Cosplay",
"Cowboys",
"Crossdressing",
"Cyborg",
"Delinquents",
"Demons",
"Detective",
"Dinosaurs",
"Disability",
"Dissociative Identities",
"Dragons",
"Dullahan",
"Elf",
"Fairy",
"Femboy",
"Ghost",
"Goblin",
"Gods",
"Gyaru",
"Hikikomori",
"Homeless",
"Idol",
"Kemonomimi",
"Kuudere",
"Maids",
"Mermaid",
"Monster Boy",
"Monster Girl",
"Nekomimi",
"Ninja",
"Nudity",
"Nun",
"Office Lady",
"Oiran",
"Ojou-sama",
"Orphan",
"Pirates",
"Robots",
"Samurai",
"Shrine Maiden",
"Skeleton",
"Succubus",
"Tanned Skin",
"Teacher",
"Tomboy",
"Transgender",
"Tsundere",
"Twins",
"Vampire",
"Veterinarian",
"Vikings",
"Villainess",
"VTuber",
"Werewolf",
"Witch",
"Yandere",
"Zombie",
],
"Demographic": ["Josei", "Kids", "Seinen", "Shoujo", "Shounen"],
"Setting": ["Matriarchy"],
"Setting Scene": [
"Bar",
"Boarding School",
"Circus",
"Coastal",
"College",
"Desert",
"Dungeon",
"Foreign",
"Inn",
"Konbini",
"Natural Disaster",
"Office",
"Outdoor",
"Prison",
"Restaurant",
"Rural",
"School",
"School Club",
"Snowscape",
"Urban",
"Work",
],
"Setting Time": [
"Achronological Order",
"Anachronism",
"Ancient China",
"Dystopian",
"Historical",
"Time Skip",
],
"Setting Universe": [
"Afterlife",
"Alternate Universe",
"Augmented Reality",
"Omegaverse",
"Post-Apocalyptic",
"Space",
"Urban Fantasy",
"Virtual World",
],
"Technical": [
"4-koma",
"Achromatic",
"Advertisement",
"Anthology",
"CGI",
"Episodic",
"Flash",
"Full CGI",
"Full Color",
"No Dialogue",
"Non-fiction",
"POV",
"Puppetry",
"Rotoscoping",
"Stop Motion",
],
"Theme Action": [
"Archery",
"Battle Royale",
"Espionage",
"Fugitive",
"Guns",
"Martial Arts",
"Spearplay",
"Swordplay",
],
"Theme Arts": [
"Acting",
"Calligraphy",
"Classic Literature",
"Drawing",
"Fashion",
"Food",
"Makeup",
"Photography",
"Rakugo",
"Writing",
],
"Theme Arts-Music": [
"Band",
"Classical Music",
"Dancing",
"Hip-hop Music",
"Jazz Music",
"Metal Music",
"Musical Theater",
"Rock Music",
],
"Theme Comedy": ["Parody", "Satire", "Slapstick", "Surreal Comedy"],
"Theme Drama": [
"Bullying",
"Class Struggle",
"Coming of Age",
"Conspiracy",
"Eco-Horror",
"Fake Relationship",
"Kingdom Management",
"Rehabilitation",
"Revenge",
"Suicide",
"Tragedy",
],
"Theme Fantasy": [
"Alchemy",
"Body Swapping",
"Cultivation",
"Fairy Tale",
"Henshin",
"Isekai",
"Kaiju",
"Magic",
"Mythology",
"Necromancy",
"Shapeshifting",
"Steampunk",
"Super Power",
"Superhero",
"Wuxia",
"Youkai",
],
"Theme Game": ["Board Game", "E-Sports", "Video Games"],
"Theme Game-Card & Board Game": [
"Card Battle",
"Go",
"Karuta",
"Mahjong",
"Poker",
"Shogi",
],
"Theme Game-Sport": [
"Acrobatics",
"Airsoft",
"American Football",
"Athletics",
"Badminton",
"Baseball",
"Basketball",
"Bowling",
"Boxing",
"Cheerleading",
"Cycling",
"Fencing",
"Fishing",
"Fitness",
"Football",
"Golf",
"Handball",
"Ice Skating",
"Judo",
"Lacrosse",
"Parkour",
"Rugby",
"Scuba Diving",
"Skateboarding",
"Sumo",
"Surfing",
"Swimming",
"Table Tennis",
"Tennis",
"Volleyball",
"Wrestling",
],
"Theme Other": [
"Adoption",
"Animals",
"Astronomy",
"Autobiographical",
"Biographical",
"Body Horror",
"Cannibalism",
"Chibi",
"Cosmic Horror",
"Crime",
"Crossover",
"Death Game",
"Denpa",
"Drugs",
"Economics",
"Educational",
"Environmental",
"Ero Guro",
"Filmmaking",
"Found Family",
"Gambling",
"Gender Bending",
"Gore",
"Language Barrier",
"LGBTQ+ Themes",
"Lost Civilization",
"Marriage",
"Medicine",
"Memory Manipulation",
"Meta",
"Mountaineering",
"Noir",
"Otaku Culture",
"Pandemic",
"Philosophy",
"Politics",
"Proxy Battle",
"Psychosexual",
"Reincarnation",
"Religion",
"Royal Affairs",
"Slavery",
"Software Development",
"Survival",
"Terrorism",
"Torture",
"Travel",
"War",
],
"Theme Other-Organisations": [
"Assassins",
"Criminal Organization",
"Cult",
"Firefighters",
"Gangs",
"Mafia",
"Military",
"Police",
"Triads",
"Yakuza",
],
"Theme Other-Vehicle": [
"Aviation",
"Cars",
"Mopeds",
"Motorcycles",
"Ships",
"Tanks",
"Trains",
],
"Theme Romance": [
"Age Gap",
"Bisexual",
"Boys' Love",
"Female Harem",
"Heterosexual",
"Love Triangle",
"Male Harem",
"Matchmaking",
"Mixed Gender Harem",
"Teens' Love",
"Unrequited Love",
"Yuri",
],
"Theme Sci Fi": [
"Cyberpunk",
"Space Opera",
"Time Loop",
"Time Manipulation",
"Tokusatsu",
],
"Theme Sci Fi-Mecha": ["Real Robot", "Super Robot"],
"Theme Slice of Life": [
"Agriculture",
"Cute Boys Doing Cute Things",
"Cute Girls Doing Cute Things",
"Family Life",
"Horticulture",
"Iyashikei",
"Parenthood",
],
}
tags_available_list = []
for tag_category, tags_in_category in tags_available.items():
tags_available_list.extend(tags_in_category)

View File

@@ -0,0 +1,394 @@
import click
from ...completion_functions import anime_titles_shell_complete
from .data import (
genres_available,
media_formats_available,
media_statuses_available,
seasons_available,
sorts_available,
tags_available_list,
years_available,
)
@click.command(
help="download anime using anilists api to get the titles",
short_help="download anime with anilist intergration",
)
@click.option("--title", "-t", shell_complete=anime_titles_shell_complete)
@click.option(
"--season",
help="The season the media was released",
type=click.Choice(seasons_available),
)
@click.option(
"--status",
"-S",
help="The media status of the anime",
multiple=True,
type=click.Choice(media_statuses_available),
)
@click.option(
"--sort",
"-s",
help="What to sort the search results on",
type=click.Choice(sorts_available),
)
@click.option(
"--genres",
"-g",
multiple=True,
help="the genres to filter by",
type=click.Choice(genres_available),
)
@click.option(
"--tags",
"-T",
multiple=True,
help="the tags to filter by",
type=click.Choice(tags_available_list),
)
@click.option(
"--media-format",
"-f",
multiple=True,
help="Media format",
type=click.Choice(media_formats_available),
)
@click.option(
"--year",
"-y",
type=click.Choice(years_available),
help="the year the media was released",
)
@click.option(
"--on-list/--not-on-list",
"-L/-no-L",
help="Whether the anime should be in your list or not",
type=bool,
)
@click.option(
"--episode-range",
"-r",
help="A range of episodes to download (start-end)",
)
@click.option(
"--force-unknown-ext",
"-F",
help="This option forces yt-dlp to download extensions its not aware of",
is_flag=True,
)
@click.option(
"--silent/--no-silent",
"-q/-V",
type=bool,
help="Download silently (during download)",
default=True,
)
@click.option("--verbose", "-v", is_flag=True, help="Download verbosely (everywhere)")
@click.option(
"--merge", "-m", is_flag=True, help="Merge the subfile with video using ffmpeg"
)
@click.option(
"--clean",
"-c",
is_flag=True,
help="After merging delete the original files",
)
@click.option(
"--wait-time",
"-w",
type=int,
help="The amount of time to wait after downloading is complete before the screen is completely cleared",
default=60,
)
@click.option(
"--prompt/--no-prompt",
help="Whether to prompt for anything instead just do the best thing",
default=True,
)
@click.option(
"--force-ffmpeg",
is_flag=True,
help="Force the use of FFmpeg for downloading (supports large variety of streams but slower)",
)
@click.option(
"--hls-use-mpegts",
is_flag=True,
help="Use mpegts for hls streams (useful for some streams: see Docs) (this option forces --force-ffmpeg to be True)",
)
@click.option(
"--max-results", "-M", type=int, help="The maximum number of results to show"
)
@click.pass_obj
def download(
config,
title,
season,
status,
sort,
genres,
tags,
media_format,
year,
on_list,
episode_range,
force_unknown_ext,
silent,
verbose,
merge,
clean,
wait_time,
prompt,
force_ffmpeg,
hls_use_mpegts,
max_results,
):
from rich import print
from ....anilist import AniList
force_ffmpeg |= hls_use_mpegts
success, anilist_search_results = AniList.search(
query=title,
sort=sort,
status_in=list(status),
genre_in=list(genres),
season=season,
tag_in=list(tags),
seasonYear=year,
format_in=list(media_format),
on_list=on_list,
max_results=max_results,
)
if success:
import time
from rich.progress import Progress
from thefuzz import fuzz
from ....AnimeProvider import AnimeProvider
from ....libs.anime_provider.types import Anime
from ....libs.fzf import fzf
from ....Utility.data import anime_normalizer
from ....Utility.downloader.downloader import downloader
from ...utils.tools import exit_app
from ...utils.utils import (
filter_by_quality,
fuzzy_inquirer,
move_preferred_subtitle_lang_to_top,
)
anime_provider = AnimeProvider(config.provider)
anilist_anime_info = None
translation_type = config.translation_type
download_dir = config.downloads_dir
anime_titles = [
(anime["title"]["romaji"] or anime["title"]["english"])
for anime in anilist_search_results["data"]["Page"]["media"]
]
print(f"[green bold]Queued:[/] {anime_titles}")
for i, anime_title in enumerate(anime_titles):
print(f"[green bold]Now Downloading: [/] {anime_title}")
# ---- search for anime ----
with Progress() as progress:
progress.add_task("Fetching Search Results...", total=None)
search_results = anime_provider.search_for_anime(
anime_title, translation_type=translation_type
)
if not search_results:
print(
"No search results found from provider for {}".format(anime_title)
)
continue
search_results = search_results["results"]
if not search_results:
print("Nothing muches your search term")
continue
search_results_ = {
search_result["title"]: search_result
for search_result in search_results
}
if config.auto_select:
selected_anime_title = max(
search_results_.keys(),
key=lambda title: fuzz.ratio(
anime_normalizer.get(title, title), anime_title
),
)
print("[cyan]Auto selecting:[/] ", selected_anime_title)
else:
choices = list(search_results_.keys())
if config.use_fzf:
selected_anime_title = fzf.run(
choices, "Please Select title", "FastAnime"
)
else:
selected_anime_title = fuzzy_inquirer(
choices,
"Please Select title",
)
# ---- fetch anime ----
with Progress() as progress:
progress.add_task("Fetching Anime...", total=None)
anime: Anime | None = anime_provider.get_anime(
search_results_[selected_anime_title]["id"]
)
if not anime:
print("Failed to fetch anime {}".format(selected_anime_title))
continue
episodes = sorted(
anime["availableEpisodesDetail"][config.translation_type], key=float
)
# where the magic happens
if episode_range:
if ":" in episode_range:
ep_range_tuple = episode_range.split(":")
if len(ep_range_tuple) == 2 and all(ep_range_tuple):
episodes_start, episodes_end = ep_range_tuple
episodes_range = episodes[
int(episodes_start) : int(episodes_end)
]
elif len(ep_range_tuple) == 3 and all(ep_range_tuple):
episodes_start, episodes_end, step = ep_range_tuple
episodes_range = episodes[
int(episodes_start) : int(episodes_end) : int(step)
]
else:
episodes_start, episodes_end = ep_range_tuple
if episodes_start.strip():
episodes_range = episodes[int(episodes_start) :]
elif episodes_end.strip():
episodes_range = episodes[: int(episodes_end)]
else:
episodes_range = episodes
else:
episodes_range = episodes[int(episode_range) :]
print(f"[green bold]Downloading: [/] {episodes_range}")
else:
episodes_range = sorted(episodes, key=float)
if config.normalize_titles:
anilist_anime_info = anilist_search_results["data"]["Page"]["media"][i]
# lets download em
for episode in episodes_range:
try:
episode = str(episode)
if episode not in episodes:
print(
f"[cyan]Warning[/]: Episode {episode} not found, skipping"
)
continue
with Progress() as progress:
progress.add_task("Fetching Episode Streams...", total=None)
streams = anime_provider.get_episode_streams(
anime["id"], episode, config.translation_type
)
if not streams:
print("No streams skipping")
continue
# ---- fetch servers ----
if config.server == "top":
with Progress() as progress:
progress.add_task("Fetching top server...", total=None)
server_name = next(streams, None)
if not server_name:
print("Sth went wrong when fetching the server")
continue
stream_link = filter_by_quality(
config.quality, server_name["links"]
)
if not stream_link:
print("[yellow bold]WARNING:[/] No streams found")
time.sleep(1)
print("Continuing...")
continue
link = stream_link["link"]
provider_headers = server_name["headers"]
episode_title = server_name["episode_title"]
subtitles = server_name["subtitles"]
else:
with Progress() as progress:
progress.add_task("Fetching servers", total=None)
# prompt for server selection
servers = {server["server"]: server for server in streams}
servers_names = list(servers.keys())
if config.server in servers_names:
server_name = config.server
else:
if config.use_fzf:
server_name = fzf.run(servers_names, "Select an link")
else:
server_name = fuzzy_inquirer(
servers_names,
"Select link",
)
stream_link = filter_by_quality(
config.quality, servers[server_name]["links"]
)
if not stream_link:
print("[yellow bold]WARNING:[/] No streams found")
time.sleep(1)
print("Continuing...")
continue
link = stream_link["link"]
provider_headers = servers[server_name]["headers"]
subtitles = servers[server_name]["subtitles"]
episode_title = servers[server_name]["episode_title"]
if anilist_anime_info:
selected_anime_title = (
anilist_anime_info["title"][config.preferred_language]
or anilist_anime_info["title"]["romaji"]
or anilist_anime_info["title"]["english"]
)
import re
for episode_detail in anilist_anime_info["streamingEpisodes"]:
if re.match(
f".*Episode {episode} .*", episode_detail["title"]
):
episode_title = episode_detail["title"]
break
print(f"[purple]Now Downloading:[/] {episode_title}")
subtitles = move_preferred_subtitle_lang_to_top(
subtitles, config.sub_lang
)
downloader._download_file(
link,
selected_anime_title,
episode_title,
download_dir,
silent,
vid_format=config.format,
force_unknown_ext=force_unknown_ext,
verbose=verbose,
headers=provider_headers,
sub=subtitles[0]["url"] if subtitles else "",
merge=merge,
clean=clean,
prompt=prompt,
force_ffmpeg=force_ffmpeg,
hls_use_mpegts=hls_use_mpegts,
)
except Exception as e:
print(e)
time.sleep(1)
print("Continuing...")
print("Done Downloading")
time.sleep(wait_time)
exit_app()
else:
from sys import exit
print("Failed to search for anime", anilist_search_results)
exit(1)

View File

@@ -0,0 +1,358 @@
import logging
from typing import TYPE_CHECKING
import click
from ...completion_functions import downloaded_anime_titles
logger = logging.getLogger(__name__)
if TYPE_CHECKING:
from ..config import Config
@click.command(
help="View and watch your downloads using mpv",
short_help="Watch downloads",
epilog="""
\b
\b\bExamples:
fastanime downloads
\b
# view individual episodes
fastanime downloads --view-episodes
# --- or ---
fastanime downloads -v
\b
# to set seek time when using ffmpegthumbnailer for local previews
# -1 means random and is the default
fastanime downloads --time-to-seek <intRange(-1,100)>
# --- or ---
fastanime downloads -t <intRange(-1,100)>
\b
# to watch a specific title
# be sure to get the completions for the best experience
fastanime downloads --title <title>
\b
# to get the path to the downloads folder set
fastanime downloads --path
# useful when you want to use the value for other programs
""",
)
@click.option("--path", "-p", help="print the downloads folder and exit", is_flag=True)
@click.option(
"--title",
"-T",
shell_complete=downloaded_anime_titles,
help="watch a specific title",
)
@click.option("--view-episodes", "-v", help="View individual episodes", is_flag=True)
@click.option(
"--ffmpegthumbnailer-seek-time",
"--time-to-seek",
"-t",
type=click.IntRange(-1, 100),
help="ffmpegthumbnailer seek time",
)
@click.pass_obj
def downloads(
config: "Config", path: bool, title, view_episodes, ffmpegthumbnailer_seek_time
):
import os
from ....cli.utils.mpv import run_mpv
from ....libs.fzf import fzf
from ....libs.rofi import Rofi
from ....Utility.utils import sort_by_episode_number
from ...utils.tools import exit_app
from ...utils.utils import fuzzy_inquirer
if not ffmpegthumbnailer_seek_time:
ffmpegthumbnailer_seek_time = config.ffmpegthumbnailer_seek_time
USER_VIDEOS_DIR = config.downloads_dir
if path:
print(USER_VIDEOS_DIR)
return
if not os.path.exists(USER_VIDEOS_DIR):
print("Downloads directory specified does not exist")
return
anime_downloads = sorted(
os.listdir(USER_VIDEOS_DIR),
)
anime_downloads.append("Exit")
def create_thumbnails(video_path, anime_title, downloads_thumbnail_cache_dir):
import os
import shutil
import subprocess
FFMPEG_THUMBNAILER = shutil.which("ffmpegthumbnailer")
if not FFMPEG_THUMBNAILER:
return
out = os.path.join(downloads_thumbnail_cache_dir, anime_title)
if ffmpegthumbnailer_seek_time == -1:
import random
seektime = str(random.randrange(0, 100))
else:
seektime = str(ffmpegthumbnailer_seek_time)
_ = subprocess.run(
[
FFMPEG_THUMBNAILER,
"-i",
video_path,
"-o",
out,
"-s",
"0",
"-t",
seektime,
],
stderr=subprocess.PIPE,
stdout=subprocess.PIPE,
)
def get_previews_anime(workers=None, bg=True):
import concurrent.futures
import random
import shutil
from pathlib import Path
if not shutil.which("ffmpegthumbnailer"):
print("ffmpegthumbnailer not found")
logger.error("ffmpegthumbnailer not found")
return
from ....constants import APP_CACHE_DIR
from ...utils.scripts import fzf_preview
downloads_thumbnail_cache_dir = os.path.join(APP_CACHE_DIR, "video_thumbnails")
Path(downloads_thumbnail_cache_dir).mkdir(parents=True, exist_ok=True)
def _worker():
# use concurrency to download the images as fast as possible
with concurrent.futures.ThreadPoolExecutor(max_workers=workers) as executor:
# load the jobs
future_to_url = {}
for anime_title in anime_downloads:
anime_path = os.path.join(USER_VIDEOS_DIR, anime_title)
if not os.path.isdir(anime_path):
continue
playlist = [
anime
for anime in sorted(
os.listdir(anime_path),
)
if "mp4" in anime
]
if playlist:
# actual link to download image from
video_path = os.path.join(anime_path, random.choice(playlist))
future_to_url[
executor.submit(
create_thumbnails,
video_path,
anime_title,
downloads_thumbnail_cache_dir,
)
] = anime_title
# execute the jobs
for future in concurrent.futures.as_completed(future_to_url):
url = future_to_url[future]
try:
future.result()
except Exception as e:
logger.error("%r generated an exception: %s" % (url, e))
if bg:
from threading import Thread
worker = Thread(target=_worker)
worker.daemon = True
worker.start()
else:
_worker()
os.environ["SHELL"] = shutil.which("bash") or "bash"
preview = """
%s
if [ -s %s/{} ]; then
if ! fzf-preview %s/{} 2>/dev/null; then
echo Loading...
fi
else echo Loading...
fi
""" % (
fzf_preview,
downloads_thumbnail_cache_dir,
downloads_thumbnail_cache_dir,
)
return preview
def get_previews_episodes(anime_playlist_path, workers=None, bg=True):
import shutil
from pathlib import Path
from ....constants import APP_CACHE_DIR
from ...utils.scripts import fzf_preview
if not shutil.which("ffmpegthumbnailer"):
print("ffmpegthumbnailer not found")
logger.error("ffmpegthumbnailer not found")
return
downloads_thumbnail_cache_dir = os.path.join(APP_CACHE_DIR, "video_thumbnails")
Path(downloads_thumbnail_cache_dir).mkdir(parents=True, exist_ok=True)
def _worker():
import concurrent.futures
# use concurrency to download the images as fast as possible
# anime_playlist_path = os.path.join(USER_VIDEOS_DIR, anime_playlist_path)
if not os.path.isdir(anime_playlist_path):
return
anime_episodes = sorted(
os.listdir(anime_playlist_path), key=sort_by_episode_number
)
with concurrent.futures.ThreadPoolExecutor(max_workers=workers) as executor:
# load the jobs
future_to_url = {}
for episode_title in anime_episodes:
episode_path = os.path.join(anime_playlist_path, episode_title)
# actual link to download image from
future_to_url[
executor.submit(
create_thumbnails,
episode_path,
episode_title,
downloads_thumbnail_cache_dir,
)
] = episode_title
# execute the jobs
for future in concurrent.futures.as_completed(future_to_url):
url = future_to_url[future]
try:
future.result()
except Exception as e:
logger.error("%r generated an exception: %s" % (url, e))
if bg:
from threading import Thread
worker = Thread(target=_worker)
worker.daemon = True
worker.start()
else:
_worker()
os.environ["SHELL"] = shutil.which("bash") or "bash"
preview = """
%s
if [ -s %s/{} ]; then
if ! fzf-preview %s/{} 2>/dev/null; then
echo Loading...
fi
else echo Loading...
fi
""" % (
fzf_preview,
downloads_thumbnail_cache_dir,
downloads_thumbnail_cache_dir,
)
return preview
def stream_episode(
anime_playlist_path,
):
if view_episodes:
if not os.path.isdir(anime_playlist_path):
print(anime_playlist_path, "is not dir")
exit_app(1)
return
episodes = sorted(
os.listdir(anime_playlist_path), key=sort_by_episode_number
)
downloaded_episodes = [*episodes, "Back"]
if config.use_fzf:
if not config.preview:
episode_title = fzf.run(
downloaded_episodes,
"Enter Episode ",
)
else:
preview = get_previews_episodes(anime_playlist_path)
episode_title = fzf.run(
downloaded_episodes,
"Enter Episode ",
preview=preview,
)
elif config.use_rofi:
episode_title = Rofi.run(downloaded_episodes, "Enter Episode")
else:
episode_title = fuzzy_inquirer(
downloaded_episodes,
"Enter Playlist Name",
)
if episode_title == "Back":
stream_anime()
return
episode_path = os.path.join(anime_playlist_path, episode_title)
if config.sync_play:
from ...utils.syncplay import SyncPlayer
SyncPlayer(episode_path)
else:
run_mpv(
episode_path,
player=config.player,
)
stream_episode(anime_playlist_path)
def stream_anime(title=None):
if title:
from thefuzz import fuzz
playlist_name = max(anime_downloads, key=lambda t: fuzz.ratio(title, t))
elif config.use_fzf:
if not config.preview:
playlist_name = fzf.run(
anime_downloads,
"Enter Playlist Name",
)
else:
preview = get_previews_anime()
playlist_name = fzf.run(
anime_downloads,
"Enter Playlist Name",
preview=preview,
)
elif config.use_rofi:
playlist_name = Rofi.run(anime_downloads, "Enter Playlist Name")
else:
playlist_name = fuzzy_inquirer(
anime_downloads,
"Enter Playlist Name",
)
if playlist_name == "Exit":
exit_app()
return
playlist = os.path.join(USER_VIDEOS_DIR, playlist_name)
if view_episodes:
stream_episode(
playlist,
)
else:
if config.sync_play:
from ...utils.syncplay import SyncPlayer
SyncPlayer(playlist)
else:
run_mpv(
playlist,
player=config.player,
)
stream_anime()
stream_anime(title)

View File

@@ -42,5 +42,12 @@ def dropped(config: "Config", dump_json):
from ...utils.tools import FastAnimeRuntimeState
fastanime_runtime_state = FastAnimeRuntimeState()
fastanime_runtime_state.current_page = 1
fastanime_runtime_state.current_data_loader = (
lambda config, **kwargs: anilist_interfaces._handle_animelist(
config, fastanime_runtime_state, "Dropped", **kwargs
)
)
fastanime_runtime_state.anilist_results_data = anime_list[1]
anilist_interfaces.anilist_results_menu(config, fastanime_runtime_state)

View File

@@ -26,6 +26,9 @@ def favourites(config, dump_json):
from ...utils.tools import FastAnimeRuntimeState
fastanime_runtime_state = FastAnimeRuntimeState()
fastanime_runtime_state.current_page = 1
fastanime_runtime_state.current_data_loader = AniList.get_most_favourite
fastanime_runtime_state.anilist_results_data = anime_data[1]
anilist_results_menu(config, fastanime_runtime_state)
else:

View File

@@ -41,6 +41,12 @@ def paused(config: "Config", dump_json):
from ...interfaces import anilist_interfaces
from ...utils.tools import FastAnimeRuntimeState
anilist_config = FastAnimeRuntimeState()
anilist_config.anilist_results_data = anime_list[1]
anilist_interfaces.anilist_results_menu(config, anilist_config)
fastanime_runtime_state = FastAnimeRuntimeState()
fastanime_runtime_state.current_page = 1
fastanime_runtime_state.current_data_loader = (
lambda config, **kwargs: anilist_interfaces._handle_animelist(
config, fastanime_runtime_state, "Paused", **kwargs
)
)
fastanime_runtime_state.anilist_results_data = anime_list[1]
anilist_interfaces.anilist_results_menu(config, fastanime_runtime_state)

View File

@@ -42,5 +42,12 @@ def planning(config: "Config", dump_json):
from ...utils.tools import FastAnimeRuntimeState
fastanime_runtime_state = FastAnimeRuntimeState()
fastanime_runtime_state.current_page = 1
fastanime_runtime_state.current_data_loader = (
lambda config, **kwargs: anilist_interfaces._handle_animelist(
config, fastanime_runtime_state, "Planned", **kwargs
)
)
fastanime_runtime_state.anilist_results_data = anime_list[1]
anilist_interfaces.anilist_results_menu(config, fastanime_runtime_state)

View File

@@ -25,6 +25,9 @@ def popular(config, dump_json):
from ...utils.tools import FastAnimeRuntimeState
fastanime_runtime_state = FastAnimeRuntimeState()
fastanime_runtime_state.current_page = 1
fastanime_runtime_state.current_data_loader = AniList.get_most_popular
fastanime_runtime_state.anilist_results_data = anime_data[1]
anilist_results_menu(config, fastanime_runtime_state)
else:

View File

@@ -26,6 +26,11 @@ def recent(config, dump_json):
from ...utils.tools import FastAnimeRuntimeState
fastanime_runtime_state = FastAnimeRuntimeState()
fastanime_runtime_state.current_page = 1
fastanime_runtime_state.current_data_loader = (
AniList.get_most_recently_updated
)
fastanime_runtime_state.anilist_results_data = anime_data[1]
anilist_results_menu(config, fastanime_runtime_state)
else:

View File

@@ -42,5 +42,12 @@ def rewatching(config: "Config", dump_json):
from ...utils.tools import FastAnimeRuntimeState
fastanime_runtime_state = FastAnimeRuntimeState()
fastanime_runtime_state.current_page = 1
fastanime_runtime_state.current_data_loader = (
lambda config, **kwargs: anilist_interfaces._handle_animelist(
config, fastanime_runtime_state, "Rewatching", **kwargs
)
)
fastanime_runtime_state.anilist_results_data = anime_list[1]
anilist_interfaces.anilist_results_menu(config, fastanime_runtime_state)

View File

@@ -25,6 +25,9 @@ def scores(config, dump_json):
from ...utils.tools import FastAnimeRuntimeState
fastanime_runtime_state = FastAnimeRuntimeState()
fastanime_runtime_state.current_page = 1
fastanime_runtime_state.current_data_loader = AniList.get_most_scored
fastanime_runtime_state.anilist_results_data = anime_data[1]
anilist_results_menu(config, fastanime_runtime_state)
else:

View File

@@ -1,369 +1,15 @@
import click
from ...completion_functions import anime_titles_shell_complete
tags_available = {
"Cast": ["Polyamorous"],
"Cast Main Cast": [
"Anti-Hero",
"Elderly Protagonist",
"Ensemble Cast",
"Estranged Family",
"Female Protagonist",
"Male Protagonist",
"Primarily Adult Cast",
"Primarily Animal Cast",
"Primarily Child Cast",
"Primarily Female Cast",
"Primarily Male Cast",
"Primarily Teen Cast",
],
"Cast Traits": [
"Age Regression",
"Agender",
"Aliens",
"Amnesia",
"Angels",
"Anthropomorphism",
"Aromantic",
"Arranged Marriage",
"Artificial Intelligence",
"Asexual",
"Butler",
"Centaur",
"Chimera",
"Chuunibyou",
"Clone",
"Cosplay",
"Cowboys",
"Crossdressing",
"Cyborg",
"Delinquents",
"Demons",
"Detective",
"Dinosaurs",
"Disability",
"Dissociative Identities",
"Dragons",
"Dullahan",
"Elf",
"Fairy",
"Femboy",
"Ghost",
"Goblin",
"Gods",
"Gyaru",
"Hikikomori",
"Homeless",
"Idol",
"Kemonomimi",
"Kuudere",
"Maids",
"Mermaid",
"Monster Boy",
"Monster Girl",
"Nekomimi",
"Ninja",
"Nudity",
"Nun",
"Office Lady",
"Oiran",
"Ojou-sama",
"Orphan",
"Pirates",
"Robots",
"Samurai",
"Shrine Maiden",
"Skeleton",
"Succubus",
"Tanned Skin",
"Teacher",
"Tomboy",
"Transgender",
"Tsundere",
"Twins",
"Vampire",
"Veterinarian",
"Vikings",
"Villainess",
"VTuber",
"Werewolf",
"Witch",
"Yandere",
"Zombie",
],
"Demographic": ["Josei", "Kids", "Seinen", "Shoujo", "Shounen"],
"Setting": ["Matriarchy"],
"Setting Scene": [
"Bar",
"Boarding School",
"Circus",
"Coastal",
"College",
"Desert",
"Dungeon",
"Foreign",
"Inn",
"Konbini",
"Natural Disaster",
"Office",
"Outdoor",
"Prison",
"Restaurant",
"Rural",
"School",
"School Club",
"Snowscape",
"Urban",
"Work",
],
"Setting Time": [
"Achronological Order",
"Anachronism",
"Ancient China",
"Dystopian",
"Historical",
"Time Skip",
],
"Setting Universe": [
"Afterlife",
"Alternate Universe",
"Augmented Reality",
"Omegaverse",
"Post-Apocalyptic",
"Space",
"Urban Fantasy",
"Virtual World",
],
"Technical": [
"4-koma",
"Achromatic",
"Advertisement",
"Anthology",
"CGI",
"Episodic",
"Flash",
"Full CGI",
"Full Color",
"No Dialogue",
"Non-fiction",
"POV",
"Puppetry",
"Rotoscoping",
"Stop Motion",
],
"Theme Action": [
"Archery",
"Battle Royale",
"Espionage",
"Fugitive",
"Guns",
"Martial Arts",
"Spearplay",
"Swordplay",
],
"Theme Arts": [
"Acting",
"Calligraphy",
"Classic Literature",
"Drawing",
"Fashion",
"Food",
"Makeup",
"Photography",
"Rakugo",
"Writing",
],
"Theme Arts-Music": [
"Band",
"Classical Music",
"Dancing",
"Hip-hop Music",
"Jazz Music",
"Metal Music",
"Musical Theater",
"Rock Music",
],
"Theme Comedy": ["Parody", "Satire", "Slapstick", "Surreal Comedy"],
"Theme Drama": [
"Bullying",
"Class Struggle",
"Coming of Age",
"Conspiracy",
"Eco-Horror",
"Fake Relationship",
"Kingdom Management",
"Rehabilitation",
"Revenge",
"Suicide",
"Tragedy",
],
"Theme Fantasy": [
"Alchemy",
"Body Swapping",
"Cultivation",
"Fairy Tale",
"Henshin",
"Isekai",
"Kaiju",
"Magic",
"Mythology",
"Necromancy",
"Shapeshifting",
"Steampunk",
"Super Power",
"Superhero",
"Wuxia",
"Youkai",
],
"Theme Game": ["Board Game", "E-Sports", "Video Games"],
"Theme Game-Card & Board Game": [
"Card Battle",
"Go",
"Karuta",
"Mahjong",
"Poker",
"Shogi",
],
"Theme Game-Sport": [
"Acrobatics",
"Airsoft",
"American Football",
"Athletics",
"Badminton",
"Baseball",
"Basketball",
"Bowling",
"Boxing",
"Cheerleading",
"Cycling",
"Fencing",
"Fishing",
"Fitness",
"Football",
"Golf",
"Handball",
"Ice Skating",
"Judo",
"Lacrosse",
"Parkour",
"Rugby",
"Scuba Diving",
"Skateboarding",
"Sumo",
"Surfing",
"Swimming",
"Table Tennis",
"Tennis",
"Volleyball",
"Wrestling",
],
"Theme Other": [
"Adoption",
"Animals",
"Astronomy",
"Autobiographical",
"Biographical",
"Body Horror",
"Cannibalism",
"Chibi",
"Cosmic Horror",
"Crime",
"Crossover",
"Death Game",
"Denpa",
"Drugs",
"Economics",
"Educational",
"Environmental",
"Ero Guro",
"Filmmaking",
"Found Family",
"Gambling",
"Gender Bending",
"Gore",
"Language Barrier",
"LGBTQ+ Themes",
"Lost Civilization",
"Marriage",
"Medicine",
"Memory Manipulation",
"Meta",
"Mountaineering",
"Noir",
"Otaku Culture",
"Pandemic",
"Philosophy",
"Politics",
"Proxy Battle",
"Psychosexual",
"Reincarnation",
"Religion",
"Royal Affairs",
"Slavery",
"Software Development",
"Survival",
"Terrorism",
"Torture",
"Travel",
"War",
],
"Theme Other-Organisations": [
"Assassins",
"Criminal Organization",
"Cult",
"Firefighters",
"Gangs",
"Mafia",
"Military",
"Police",
"Triads",
"Yakuza",
],
"Theme Other-Vehicle": [
"Aviation",
"Cars",
"Mopeds",
"Motorcycles",
"Ships",
"Tanks",
"Trains",
],
"Theme Romance": [
"Age Gap",
"Bisexual",
"Boys' Love",
"Female Harem",
"Heterosexual",
"Love Triangle",
"Male Harem",
"Matchmaking",
"Mixed Gender Harem",
"Teens' Love",
"Unrequited Love",
"Yuri",
],
"Theme Sci Fi": [
"Cyberpunk",
"Space Opera",
"Time Loop",
"Time Manipulation",
"Tokusatsu",
],
"Theme Sci Fi-Mecha": ["Real Robot", "Super Robot"],
"Theme Slice of Life": [
"Agriculture",
"Cute Boys Doing Cute Things",
"Cute Girls Doing Cute Things",
"Family Life",
"Horticulture",
"Iyashikei",
"Parenthood",
],
}
tags_available_list = []
for tag_category, tags_in_category in tags_available.items():
tags_available_list.extend(tags_in_category)
from .data import (
genres_available,
media_formats_available,
media_statuses_available,
seasons_available,
sorts_available,
tags_available_list,
years_available,
)
@click.command(
@@ -380,91 +26,27 @@ for tag_category, tags_in_category in tags_available.items():
@click.option(
"--season",
help="The season the media was released",
type=click.Choice(["WINTER", "SPRING", "SUMMER", "FALL"]),
type=click.Choice(seasons_available),
)
@click.option(
"--status",
"-S",
help="The media status of the anime",
multiple=True,
type=click.Choice(
["FINISHED", "RELEASING", "NOT_YET_RELEASED", "CANCELLED", "HIATUS"]
),
type=click.Choice(media_statuses_available),
)
@click.option(
"--sort",
"-s",
help="What to sort the search results on",
type=click.Choice(
[
"ID",
"ID_DESC",
"TITLE_ROMAJI",
"TITLE_ROMAJI_DESC",
"TITLE_ENGLISH",
"TITLE_ENGLISH_DESC",
"TITLE_NATIVE",
"TITLE_NATIVE_DESC",
"TYPE",
"TYPE_DESC",
"FORMAT",
"FORMAT_DESC",
"START_DATE",
"START_DATE_DESC",
"END_DATE",
"END_DATE_DESC",
"SCORE",
"SCORE_DESC",
"POPULARITY",
"POPULARITY_DESC",
"TRENDING",
"TRENDING_DESC",
"EPISODES",
"EPISODES_DESC",
"DURATION",
"DURATION_DESC",
"STATUS",
"STATUS_DESC",
"CHAPTERS",
"CHAPTERS_DESC",
"VOLUMES",
"VOLUMES_DESC",
"UPDATED_AT",
"UPDATED_AT_DESC",
"SEARCH_MATCH",
"FAVOURITES",
"FAVOURITES_DESC",
]
),
type=click.Choice(sorts_available),
)
@click.option(
"--genres",
"-g",
multiple=True,
help="the genres to filter by",
type=click.Choice(
[
"Action",
"Adventure",
"Comedy",
"Drama",
"Ecchi",
"Fantasy",
"Horror",
"Mahou Shoujo",
"Mecha",
"Music",
"Mystery",
"Psychological",
"Romance",
"Sci-Fi",
"Slice of Life",
"Sports",
"Supernatural",
"Thriller",
"Hentai",
]
),
type=click.Choice(genres_available),
)
@click.option(
"--tags",
@@ -478,49 +60,12 @@ for tag_category, tags_in_category in tags_available.items():
"-f",
multiple=True,
help="Media format",
type=click.Choice(
["TV", "TV_SHORT", "MOVIE", "SPECIAL", "OVA", "MUSIC", "NOVEL", "ONE_SHOT"]
),
type=click.Choice(media_formats_available),
)
@click.option(
"--year",
"-y",
type=click.Choice(
[
"1900",
"1910",
"1920",
"1930",
"1940",
"1950",
"1960",
"1970",
"1980",
"1990",
"2000",
"2004",
"2005",
"2006",
"2007",
"2008",
"2009",
"2010",
"2011",
"2012",
"2013",
"2014",
"2015",
"2016",
"2017",
"2018",
"2019",
"2020",
"2021",
"2022",
"2023",
"2024",
]
),
type=click.Choice(years_available),
help="the year the media was released",
)
@click.option(
@@ -566,6 +111,22 @@ def search(
from ...utils.tools import FastAnimeRuntimeState
fastanime_runtime_state = FastAnimeRuntimeState()
fastanime_runtime_state.current_page = 1
fastanime_runtime_state.current_data_loader = (
lambda page=1, **kwargs: AniList.search(
query=title,
sort=sort,
status_in=list(status),
genre_in=list(genres),
season=season,
tag_in=list(tags),
seasonYear=year,
format_in=list(media_format),
on_list=on_list,
page=page,
)
)
fastanime_runtime_state.anilist_results_data = search_results
anilist_results_menu(config, fastanime_runtime_state)
else:

View File

@@ -26,6 +26,9 @@ def trending(config, dump_json):
from ...utils.tools import FastAnimeRuntimeState
fastanime_runtime_state = FastAnimeRuntimeState()
fastanime_runtime_state.current_page = 1
fastanime_runtime_state.current_data_loader = AniList.get_trending
fastanime_runtime_state.anilist_results_data = data
anilist_results_menu(config, fastanime_runtime_state)
else:

View File

@@ -25,6 +25,9 @@ def upcoming(config, dump_json):
from ...utils.tools import FastAnimeRuntimeState
fastanime_runtime_state = FastAnimeRuntimeState()
fastanime_runtime_state.current_page = 1
fastanime_runtime_state.current_data_loader = AniList.get_upcoming_anime
fastanime_runtime_state.anilist_results_data = data
anilist_results_menu(config, fastanime_runtime_state)
else:

View File

@@ -42,5 +42,12 @@ def watching(config: "Config", dump_json):
from ...utils.tools import FastAnimeRuntimeState
fastanime_runtime_state = FastAnimeRuntimeState()
fastanime_runtime_state.current_page = 1
fastanime_runtime_state.current_data_loader = (
lambda config, **kwargs: anilist_interfaces._handle_animelist(
config, fastanime_runtime_state, "Watching", **kwargs
)
)
fastanime_runtime_state.anilist_results_data = anime_list[1]
anilist_interfaces.anilist_results_menu(config, fastanime_runtime_state)

View File

@@ -114,6 +114,16 @@ if TYPE_CHECKING:
help="Whether to prompt for anything instead just do the best thing",
default=True,
)
@click.option(
"--force-ffmpeg",
is_flag=True,
help="Force the use of FFmpeg for downloading (supports large variety of streams but slower)",
)
@click.option(
"--hls-use-mpegts",
is_flag=True,
help="Use mpegts for hls streams (useful for some streams: see Docs) (this option forces --force-ffmpeg to be True)",
)
@click.pass_obj
def download(
config: "Config",
@@ -127,6 +137,8 @@ def download(
clean,
wait_time,
prompt,
force_ffmpeg,
hls_use_mpegts,
):
import time
@@ -146,6 +158,8 @@ def download(
move_preferred_subtitle_lang_to_top,
)
force_ffmpeg |= hls_use_mpegts
anime_provider = AnimeProvider(config.provider)
anilist_anime_info = None
@@ -185,6 +199,8 @@ def download(
clean,
wait_time,
prompt,
force_ffmpeg,
hls_use_mpegts,
)
return
search_results = search_results["results"]
@@ -236,6 +252,8 @@ def download(
clean,
wait_time,
prompt,
force_ffmpeg,
hls_use_mpegts,
)
return
@@ -369,6 +387,8 @@ def download(
merge=merge,
clean=clean,
prompt=prompt,
force_ffmpeg=force_ffmpeg,
hls_use_mpegts=hls_use_mpegts,
)
except Exception as e:
print(e)

View File

@@ -213,25 +213,21 @@ def grab(
# lets download em
for episode in episodes_range:
try:
if episode not in episodes:
continue
streams = anime_provider.get_episode_streams(
anime["id"], episode, config.translation_type
)
if not streams:
continue
episode_streams = {server["server"]: server for server in streams}
if episode not in episodes:
continue
streams = anime_provider.get_episode_streams(
anime["id"], episode, config.translation_type
)
if not streams:
continue
episode_streams = {server["server"]: server for server in streams}
if episode_streams_only:
grabbed_anime[episode] = episode_streams
else:
grabbed_anime["episodes_streams"][ # pyright:ignore
episode
] = episode_streams
except Exception as e:
logger.error(e)
if episode_streams_only:
grabbed_anime[episode] = episode_streams
else:
grabbed_anime["episodes_streams"][ # pyright:ignore
episode
] = episode_streams
# grab the full data for single title and appen to final result or episode streams
grabbed_animes.append(grabbed_anime)

View File

@@ -5,12 +5,14 @@ from configparser import ConfigParser
from typing import TYPE_CHECKING
from ..constants import (
ASSETS_DIR,
S_PLATFORM,
USER_CONFIG_PATH,
USER_DATA_PATH,
USER_VIDEOS_DIR,
USER_WATCH_HISTORY_PATH,
S_PLATFORM,
)
from ..libs.fzf import FZF_DEFAULT_OPTS, HEADER
from ..libs.rofi import Rofi
logger = logging.getLogger(__name__)
@@ -27,35 +29,50 @@ class Config(object):
"https://anilist.co/api/v2/oauth/authorize?client_id=20148&response_type=token"
)
anime_provider: "AnimeProvider"
user_data = {"recent_anime": [], "animelist": [], "user": {}}
user_data = {
"recent_anime": [],
"animelist": [],
"user": {},
"meta": {"last_updated": 0},
}
default_config = {
"auto_next": "False",
"menu_order": "",
"auto_select": "True",
"cache_requests": "true",
"check_for_updates": "True",
"continue_from_history": "True",
"default_media_list_tracking": "None",
"downloads_dir": USER_VIDEOS_DIR,
"disable_mpv_popen": "True",
"discord": "False",
"episode_complete_at": "80",
"ffmpegthumbnailer_seek_time": "-1",
"force_forward_tracking": "true",
"force_window": "immediate",
"fzf_opts": FZF_DEFAULT_OPTS,
"header_color": "95,135,175",
"header_ascii_art": HEADER,
"format": "best[height<=1080]/bestvideo[height<=1080]+bestaudio/best",
"icons": "false",
"image_previews": "True" if S_PLATFORM != "win32" else "False",
"normalize_titles": "True",
"notification_duration": "2",
"max_cache_lifetime": "03:00:00",
"per_page": "15",
"player": "mpv",
"preferred_history": "local",
"preferred_language": "english",
"preview": "False",
"preview_header_color": "215,0,95",
"preview_separator_color": "208,208,208",
"provider": "allanime",
"quality": "1080",
"recent": "50",
"rofi_theme": "",
"rofi_theme_preview": "",
"rofi_theme_confirm": "",
"rofi_theme_input": "",
"rofi_theme": os.path.join(ASSETS_DIR, "rofi_theme.rasi"),
"rofi_theme_preview": os.path.join(ASSETS_DIR, "rofi_theme_preview.rasi"),
"rofi_theme_confirm": os.path.join(ASSETS_DIR, "rofi_theme_confirm.rasi"),
"rofi_theme_input": os.path.join(ASSETS_DIR, "rofi_theme_input.rasi"),
"server": "top",
"skip": "false",
"sort_by": "search match",
@@ -67,68 +84,106 @@ class Config(object):
"use_rofi": "false",
}
def __init__(self) -> None:
def __init__(self, no_config) -> None:
self.initialize_user_data_and_watch_history_recent_anime()
self.load_config()
self.load_config(no_config)
def load_config(self):
def load_config(self, no_config=False):
self.configparser = ConfigParser(self.default_config)
self.configparser.add_section("stream")
self.configparser.add_section("general")
self.configparser.add_section("anilist")
# --- set config values from file or using defaults ---
if os.path.exists(USER_CONFIG_PATH):
if os.path.exists(USER_CONFIG_PATH) and not no_config:
self.configparser.read(USER_CONFIG_PATH, encoding="utf-8")
# TODO: rewrite all this removing the useless functions
# hate technical debt
# why did i do this lol
self.auto_next = self.get_auto_next()
self.auto_select = self.get_auto_select()
self.cache_requests = self.get_cache_requests()
self.continue_from_history = self.get_continue_from_history()
self.default_media_list_tracking = self.get_default_media_list_tracking()
# get the configuration
self.auto_next = self.configparser.getboolean("stream", "auto_next")
self.auto_select = self.configparser.getboolean("stream", "auto_select")
self.cache_requests = self.configparser.getboolean("general", "cache_requests")
self.check_for_updates = self.configparser.getboolean(
"general", "check_for_updates"
)
self.continue_from_history = self.configparser.getboolean(
"stream", "continue_from_history"
)
self.default_media_list_tracking = self.configparser.get(
"general", "default_media_list_tracking"
)
self.disable_mpv_popen = self.configparser.getboolean(
"stream", "disable_mpv_popen"
)
self.downloads_dir = self.get_downloads_dir()
self.episode_complete_at = self.get_episode_complete_at()
self.ffmpegthumbnailer_seek_time = self.get_ffmpegthumnailer_seek_time()
self.force_forward_tracking = self.get_force_forward_tracking()
self.force_window = self.get_force_window()
self.format = self.get_format()
self.icons = self.get_icons()
self.image_previews = self.get_image_previews()
self.normalize_titles = self.get_normalize_titles()
self.notification_duration = self.get_notification_duration()
self.player = self.get_player()
self.preferred_history = self.get_preferred_history()
self.preferred_language = self.get_preferred_language()
self.preview = self.get_preview()
self.provider = self.get_provider()
self.quality = self.get_quality()
self.discord = self.configparser.getboolean("general", "discord")
self.downloads_dir = self.configparser.get("general", "downloads_dir")
self.episode_complete_at = self.configparser.getint(
"stream", "episode_complete_at"
)
self.ffmpegthumbnailer_seek_time = self.configparser.getint(
"general", "ffmpegthumbnailer_seek_time"
)
self.force_forward_tracking = self.configparser.getboolean(
"general", "force_forward_tracking"
)
self.force_window = self.configparser.get("stream", "force_window")
self.format = self.configparser.get("stream", "format")
self.fzf_opts = self.configparser.get("general", "fzf_opts")
self.header_color = self.configparser.get("general", "header_color")
self.header_ascii_art = self.configparser.get("general", "header_ascii_art")
self.icons = self.configparser.getboolean("general", "icons")
self.image_previews = self.configparser.getboolean("general", "image_previews")
self.normalize_titles = self.configparser.getboolean(
"general", "normalize_titles"
)
self.notification_duration = self.configparser.getint(
"general", "notification_duration"
)
self._max_cache_lifetime = self.configparser.get(
"general", "max_cache_lifetime"
)
max_cache_lifetime = list(map(int, self._max_cache_lifetime.split(":")))
self.max_cache_lifetime = (
max_cache_lifetime[0] * 86400
+ max_cache_lifetime[1] * 3600
+ max_cache_lifetime[2] * 60
)
self.per_page = self.configparser.get("anilist", "per_page")
self.player = self.configparser.get("stream", "player")
self.preferred_history = self.configparser.get("stream", "preferred_history")
self.preferred_language = self.configparser.get("general", "preferred_language")
self.preview = self.configparser.getboolean("general", "preview")
self.preview_separator_color = self.configparser.get(
"general", "preview_separator_color"
)
self.preview_header_color = self.configparser.get(
"general", "preview_header_color"
)
self.provider = self.configparser.get("general", "provider")
self.quality = self.configparser.get("stream", "quality")
self.recent = self.configparser.getint("general", "recent")
self.rofi_theme_confirm = self.configparser.get("general", "rofi_theme_confirm")
self.rofi_theme_input = self.configparser.get("general", "rofi_theme_input")
self.rofi_theme = self.configparser.get("general", "rofi_theme")
self.rofi_theme_preview = self.configparser.get("general", "rofi_theme_preview")
self.server = self.configparser.get("stream", "server")
self.skip = self.configparser.getboolean("stream", "skip")
self.sort_by = self.configparser.get("anilist", "sort_by")
self.menu_order = self.configparser.get("general", "menu_order")
self.sub_lang = self.configparser.get("general", "sub_lang")
self.translation_type = self.configparser.get("stream", "translation_type")
self.use_fzf = self.configparser.getboolean("general", "use_fzf")
self.use_python_mpv = self.configparser.getboolean("stream", "use_python_mpv")
self.use_rofi = self.configparser.getboolean("general", "use_rofi")
self.use_persistent_provider_store = self.configparser.getboolean(
"general", "use_persistent_provider_store"
)
self.recent = self.get_recent()
self.rofi_theme_confirm = self.get_rofi_theme_confirm()
self.rofi_theme_input = self.get_rofi_theme_input()
self.rofi_theme = self.get_rofi_theme()
self.rofi_theme_preview = self.get_rofi_theme_preview()
Rofi.rofi_theme_confirm = self.rofi_theme_confirm
Rofi.rofi_theme_input = self.rofi_theme_input
Rofi.rofi_theme = self.rofi_theme
Rofi.rofi_theme_input = self.rofi_theme_input
Rofi.rofi_theme_confirm = self.rofi_theme_confirm
Rofi.rofi_theme_preview = self.rofi_theme_preview
self.server = self.get_server()
self.skip = self.get_skip()
self.sort_by = self.get_sort_by()
self.sub_lang = self.get_sub_lang()
self.translation_type = self.get_translation_type()
self.use_fzf = self.get_use_fzf()
self.use_python_mpv = self.get_use_mpv_mod()
self.use_rofi = self.get_use_rofi()
self.use_persistent_provider_store = self.get_use_persistent_provider_store()
os.environ["FZF_DEFAULT_OPTS"] = self.fzf_opts
# ---- setup user data ------
self.anime_list: list = self.user_data.get("animelist", [])
@@ -152,7 +207,7 @@ class Config(object):
def update_recent(self, recent_anime: list):
recent_anime_ids = []
_recent_anime = []
for anime in recent_anime[::-1]:
for anime in recent_anime:
if (
anime["id"] not in recent_anime_ids
and len(recent_anime_ids) <= self.recent
@@ -205,122 +260,14 @@ class Config(object):
with open(USER_DATA_PATH, "w") as f:
json.dump(self.user_data, f)
# getters for user configuration
# --- general section ---
def get_provider(self):
return self.configparser.get("general", "provider")
def get_ffmpegthumnailer_seek_time(self):
return self.configparser.getint("general", "ffmpegthumbnailer_seek_time")
def get_preferred_language(self):
return self.configparser.get("general", "preferred_language")
def get_sub_lang(self):
return self.configparser.get("general", "sub_lang")
def get_downloads_dir(self):
return self.configparser.get("general", "downloads_dir")
def get_icons(self):
return self.configparser.getboolean("general", "icons")
def get_image_previews(self):
return self.configparser.getboolean("general", "image_previews")
def get_preview(self):
return self.configparser.getboolean("general", "preview")
def get_use_fzf(self):
return self.configparser.getboolean("general", "use_fzf")
def get_use_persistent_provider_store(self):
return self.configparser.getboolean("general", "use_persistent_provider_store")
# rofi conifiguration
def get_use_rofi(self):
return self.configparser.getboolean("general", "use_rofi")
def get_rofi_theme(self):
return self.configparser.get("general", "rofi_theme")
def get_rofi_theme_preview(self):
return self.configparser.get("general", "rofi_theme_preview")
def get_rofi_theme_input(self):
return self.configparser.get("general", "rofi_theme_input")
def get_rofi_theme_confirm(self):
return self.configparser.get("general", "rofi_theme_confirm")
def get_force_forward_tracking(self):
return self.configparser.getboolean("general", "force_forward_tracking")
def get_cache_requests(self):
return self.configparser.getboolean("general", "cache_requests")
def get_default_media_list_tracking(self):
return self.configparser.get("general", "default_media_list_tracking")
def get_normalize_titles(self):
return self.configparser.getboolean("general", "normalize_titles")
def get_recent(self):
return self.configparser.getint("general", "recent")
# --- stream section ---
def get_skip(self):
return self.configparser.getboolean("stream", "skip")
def get_auto_next(self):
return self.configparser.getboolean("stream", "auto_next")
def get_auto_select(self):
return self.configparser.getboolean("stream", "auto_select")
def get_continue_from_history(self):
return self.configparser.getboolean("stream", "continue_from_history")
def get_use_mpv_mod(self):
return self.configparser.getboolean("stream", "use_python_mpv")
def get_notification_duration(self):
return self.configparser.getint("general", "notification_duration")
def get_episode_complete_at(self):
return self.configparser.getint("stream", "episode_complete_at")
def get_force_window(self):
return self.configparser.get("stream", "force_window")
def get_translation_type(self):
return self.configparser.get("stream", "translation_type")
def get_preferred_history(self):
return self.configparser.get("stream", "preferred_history")
def get_quality(self):
return self.configparser.get("stream", "quality")
def get_server(self):
return self.configparser.get("stream", "server")
def get_format(self):
return self.configparser.get("stream", "format")
def get_player(self):
return self.configparser.get("stream", "player")
def get_sort_by(self):
return self.configparser.get("anilist", "sort_by")
def update_config(self, section: str, key: str, value: str):
self.configparser.set(section, key, value)
with open(USER_CONFIG_PATH, "w") as config:
self.configparser.write(config)
def __repr__(self):
new_line = "\n"
tab = "\t"
current_config_state = f"""\
#
# ███████╗░█████╗░░██████╗████████╗░█████╗░███╗░░██╗██╗███╗░░░███╗███████╗ ░█████╗░░█████╗░███╗░░██╗███████╗██╗░██████╗░
@@ -331,115 +278,171 @@ class Config(object):
# ╚═╝░░░░░╚═╝░░╚═╝╚═════╝░░░░╚═╝░░░╚═╝░░╚═╝╚═╝░░╚══╝╚═╝╚═╝░░░░░╚═╝╚══════╝ ░╚════╝░░╚════╝░╚═╝░░╚══╝╚═╝░░░░░╚═╝░╚═════╝░
#
[general]
# whether to show the icons in the tui [True/False]
# more like emojis
# by the way if you have any recommendations to which should be used where please
# Can you rice it?
# For the preview pane
preview_separator_color = {self.preview_separator_color}
preview_header_color = {self.preview_header_color}
# For the header
# Be sure to indent
header_ascii_art = {new_line.join([tab + line for line in self.header_ascii_art.split(new_line)])}
header_color = {self.header_color}
# To be passed to fzf
# Be sure to indent
fzf_opts = {new_line.join([tab + line for line in self.fzf_opts.split(new_line)])}
# Whether to show the icons in the TUI [True/False]
# More like emojis
# By the way, if you have any recommendations
# for which should be used where, please
# don't hesitate to share your opinion
# cause it's a lot of work to look for the right one for each menu option
# be sure to also give the replacement emoji
# because it's a lot of work
# to look for the right one for each menu option
# Be sure to also give the replacement emoji
icons = {self.icons}
# whether to normalize provider titles [True/False]
# basically takes the provider titles and finds the corresponding anilist title then changes the title to that
# useful for uniformity especially when downloading from different providers
# this also applies to episode titles
# Whether to normalize provider titles [True/False]
# Basically takes the provider titles and finds the corresponding Anilist title, then changes the title to that
# Useful for uniformity, especially when downloading from different providers
# This also applies to episode titles
normalize_titles = {self.normalize_titles}
# can be [allanime, animepahe, hianime]
# allanime is the most realible
# animepahe provides different links to streams of different quality so a quality can be selected reliably with --quality option
# hianime which is now hianime usually provides subs in different languuages and its servers are generally faster
# Whether to check for updates every time you run the script [True/False]
# This is useful for keeping your script up to date
# because there are always new features being added 😄
check_for_updates = {self.check_for_updates}
# Can be [allanime, animepahe, hianime, nyaa, yugen]
# Allanime is the most reliable
# Animepahe provides different links to streams of different quality, so a quality can be selected reliably with the --quality option
# Hianime usually provides subs in different languages, and its servers are generally faster
# NOTE: Currently, they are encrypting the video links
# though Im working on it
# However, you can still get the links to the subs
# with ```fastanime grab``` command
# Yugen meh
# Nyaa for those who prefer torrents, though not reliable due to auto-selection of results
# as most of the data in Nyaa is not structured
# though it works relatively well for new anime
# especially with SubsPlease and HorribleSubs
# Oh, and you should have webtorrent CLI to use this
provider = {self.provider}
# Display language [english, romaji]
# this is passed to anilist directly and is used to set the language which the anime titles will be in
# when using the anilist interface
# This is passed to Anilist directly and is used to set the language for anime titles
# when using the Anilist interface
preferred_language = {self.preferred_language}
# Download directory
# where you will find your videos after downloading them with 'fastanime download' command
# Where you will find your videos after downloading them with 'fastanime download' command
downloads_dir = {self.downloads_dir}
# whether to show a preview window when using fzf or rofi [True/False]
# the preview requires you have a commandline image viewer as documented in the README
# this is only when usinf fzf
# if you dont care about image previews it doesnt matter
# though its awesome
# try it and you will see
# Whether to show a preview window when using fzf or rofi [True/False]
# The preview requires you to have a command-line image viewer as documented in the README
# This is only when using fzf or rofi
# If you don't care about image and text previews, it doesnt matter
# though its awesome
# Try it, and you will see
preview = {self.preview}
# whether to show images in the preview [true/false]
# Whether to show images in the preview [True/False]
# Windows users: just switch to Linux 😄
# because even if you enable it
# it won't look pretty
# Just be satisfied with the text previews
# So forget it exists 🤣
image_previews = {self.image_previews}
# the time to seek when using ffmpegthumbnailer [-1 to 100]
# -1 means random and is the default
# ffmpegthumbnailer is used to generate previews and you can select at what time in the video to extract an image
# random makes things quite exciting cause you never no at what time it will extract the image from
# ffmpegthumbnailer is used to generate previews,
# allowing you to select the time in the video to extract an image.
# Random makes things quite exciting because you never know at what time it will extract the image.
# Used by the `fastanime downloads` command.
ffmpegthumbnailer_seek_time = {self.ffmpegthumbnailer_seek_time}
# specify the order of menu items in a comma-separated list.
# Only include the base names of menu options (e.g., "Trending", "Recent").
# The default value is 'Trending,Recent,Watching,Paused,Dropped,Planned,Completed,Rewatching,Recently Updated Anime,Search,Watch History,Random Anime,Most Popular Anime,Most Favourite Anime,Most Scored Anime,Upcoming Anime,Edit Config,Exit'.
# Leave blank to use the default menu order.
# You can also omit some options by not including them in the list.
menu_order = {self.menu_order}
# whether to use fzf as the interface for the anilist command and others. [True/False]
use_fzf = {self.use_fzf}
# whether to use rofi for the ui [True/False]
# it's more useful if you want to create a desktop entry
# which can be setup with 'fastanime config --desktop-entry'
# though if you want it to be your sole interface even when fastanime is run directly from the terminal
# whether to use rofi for the UI [True/False]
# It's more useful if you want to create a desktop entry,
# which can be set up with 'fastanime config --desktop-entry'.
# If you want it to be your sole interface even when fastanime is run directly from the terminal, enable this.
use_rofi = {self.use_rofi}
# rofi themes to use
# the values of this option is the path to the rofi config files to use
# i choose to split it into three since it gives the best look and feel
# you can refer to the rofi demo on github to see for your self
# by the way i recommend getting the rofi themes from this project;
# rofi themes to use <path>
# The value of this option is the path to the rofi config files to use.
# I chose to split it into 4 since it gives the best look and feel.
# You can refer to the rofi demo on GitHub to see for yourself.
# I need help designing the default rofi themes.
# If you fancy yourself a rofi ricer, please contribute to improving
# the default theme.
rofi_theme = {self.rofi_theme}
rofi_theme = {self.rofi_theme_preview}
rofi_theme_preview = {self.rofi_theme_preview}
rofi_theme_input = {self.rofi_theme_input}
rofi_theme_confirm = {self.rofi_theme_confirm}
# the duration in minutes a notification will stay in the screen
# used by notifier command
# the duration in minutes a notification will stay on the screen.
# Used by the notifier command.
notification_duration = {self.notification_duration}
# used when the provider gives subs of different languages
# currently its the case for:
# hianime
# the values for this option are the short names for countries
# regex is used to determine what you selected
# used when the provider offers subtitles in different languages.
# Currently, this is the case for:
# hianime.
# The values for this option are the short names for languages.
# Regex is used to determine what you selected.
sub_lang = {self.sub_lang}
# what is your default media list tracking [track/disabled/prompt]
# only affects your anilist anime list
# track - means your progress will always be reflected in your anilist anime list
# disabled - means progress tracking will no longer be reflected in your anime list
# prompt - means for every anime you will be prompted whether you want your progress to be tracked or not
# This only affects your anilist anime list.
# track - means your progress will always be reflected in your anilist anime list.
# disabled - means progress tracking will no longer be reflected in your anime list.
# prompt - means you will be prompted for each anime whether you want your progress to be tracked or not.
default_media_list_tracking = {self.default_media_list_tracking}
# whether media list tracking should only be updated when the next episode is greater than the previous
# this affects only your anilist anime list
# whether media list tracking should only be updated when the next episode is greater than the previous.
# This only affects your anilist anime list.
force_forward_tracking = {self.force_forward_tracking}
# whether to cache requests [true/false]
# this makes the experience better and more faster
# as data need not always be fetched from web server
# and instead can be gotten locally
# from the cached_requests_db
# This improves the experience by making it faster,
# as data doesn't always need to be fetched from the web server
# and can instead be retrieved locally from the cached_requests_db.
cache_requests = {self.cache_requests}
# whether to use a persistent store (basically a sqlitedb) for storing some data the provider requires
# to enable a seamless experience [true/false]
# this option exists primarily because i think it may help in the optimization
# of fastanime as a library in a website project
# for now i don't recommend changing it
# leave it as is
# the max lifetime for a cached request <days:hours:minutes>
# Defaults to 3 days = 03:00:00.
# This is the time after which a cached request will be deleted (technically).
max_cache_lifetime = {self._max_cache_lifetime}
# whether to use a persistent store (basically an SQLite DB) for storing some data the provider requires
# to enable a seamless experience. [true/false]
# This option exists primarily to optimize FastAnime as a library in a website project.
# For now, it's not recommended to change it. Leave it as is.
use_persistent_provider_store = {self.use_persistent_provider_store}
# no of recent anime to keep [0-50]
# 0 will disable recent anime tracking
# number of recent anime to keep [0-50].
# 0 will disable recent anime tracking.
recent = {self.recent}
# enable or disable Discord activity updater.
# If you want to enable it, please follow the link below to register the app with your Discord account:
# https://discord.com/oauth2/authorize?client_id=1292070065583165512
discord = {self.discord}
[stream]
# the quality of the stream [1080,720,480,360]
@@ -457,9 +460,12 @@ continue_from_history = {self.continue_from_history}
# which history to use [local/remote]
# local history means it will just use the watch history stored locally in your device
# the file that stores it is called watch_history.json and is stored next to your config file
# remote means it ignores the last episode stored locally and instead uses the one in your anilist anime list
# this config option is useful if you want to overwrite your local history or import history covered from another device or platform
# the file that stores it is called watch_history.json
# and is stored next to your config file
# remote means it ignores the last episode stored locally
# and instead uses the one in your anilist anime list
# this config option is useful if you want to overwrite your local history
# or import history covered from another device or platform
# since remote history will take precendence over whats available locally
preferred_history = {self.preferred_history}
@@ -469,7 +475,8 @@ translation_type = {self.translation_type}
# what server to use for a particular provider
# allanime: [dropbox, sharepoint, wetransfer, gogoanime, wixmp]
# animepahe: [kwik]
# hianime: [HD1, HD2, StreamSB, StreamTape]
# hianime: [HD1, HD2, StreamSB, StreamTape] : only HD2 for now
# yugen: [gogoanime]
# 'top' can also be used as a value for this option
# 'top' will cause fastanime to auto select the first server it sees
# this saves on resources and is faster since not all servers are being fetched
@@ -486,15 +493,21 @@ auto_next = {self.auto_next}
# this is because the providers sometime use non-standard names
# that are there own preference rather than the official names
# But 99% of the time will be accurate
# if this happens just turn of auto_select in the menus or from the commandline and manually select the correct anime title
# and then please open an issue at <> highlighting the normalized title and the title given by the provider for the anime you wished to watch
# or even better edit this file <> and open a pull request
# if this happens just turn off auto_select in the menus or from the commandline
# and manually select the correct anime title
# edit this file <https://github.com/Benexl/FastAnime/blob/master/fastanime/Utility/data.py>
# and to the dictionary of the provider
# the provider title (key) and their corresponding anilist names (value)
# and then please open a pr
# issues on the same will be ignored and then closed 😆
auto_select = {self.auto_select}
# whether to skip the opening and ending theme songs [True/False]
# NOTE: requires ani-skip to be in path
# for python-mpv users am planning to create this functionality n python without the use of an external script
# so its disabled for now
# and anyways Dan Da Dan
# taught as the importance of letting it flow 🙃
skip = {self.skip}
# at what percentage progress should the episode be considered as completed [0-100]
@@ -506,7 +519,8 @@ episode_complete_at = {self.episode_complete_at}
# whether to use python-mpv [True/False]
# to enable superior control over the player
# adding more options to it
# Enable this one and you will be wonder why you did not discover fastanime sooner
# Enabling this option and you will ask yourself
# why you did not discover fastanime sooner 🙃
# Since you basically don't have to close the player window
# to go to the next or previous episode, switch servers,
# change translation type or change to a given episode x
@@ -518,13 +532,13 @@ episode_complete_at = {self.episode_complete_at}
# personally it took me quite sometime to figure it out
# this is because of how windows handles shared libraries
# so just ask when you find yourself stuck
# or just switch to arch linux
# or just switch to nixos 😄
use_python_mpv = {self.use_python_mpv}
# whether to use popen to get the timestamps for continue_from_history
# implemented because popen does not work for some reason in nixos
# if you are on nixos and you have a solution to this problem please share
# implemented because popen does not work for some reason in nixos and apparently on mac as well
# if you are on nixos or mac and you have a solution to this problem please share
# i will be glad to hear it 😄
# So for now ignore this option
# and anyways the new method of getting timestamps is better
@@ -551,15 +565,16 @@ format = {self.format}
# since you will miss out on some features if you use the others
player = {self.player}
# NOTE:
# if you have any trouble setting up your config
# please don't be afraid to ask in our discord
# plus if there are any errors, improvements or suggestions please tell us in the discord
# or help us by contributing
# we appreciate all the help we can get
# since we may not always have the time to immediately implement the changes
[anilist]
per_page = {self.per_page}
#
# HOPE YOU ENJOY FASTANIME AND BE SURE TO STAR THE PROJECT ON GITHUB
# https://github.com/Benexl/FastAnime
#
# Also join the discord server
# where the anime tech community lives :)
# https://discord.gg/C4rhMA4mmK
#
"""
return current_config_state

View File

@@ -2,6 +2,7 @@ from __future__ import annotations
import os
import random
import threading
from typing import TYPE_CHECKING
from click import clear
@@ -14,6 +15,7 @@ from yt_dlp.utils import sanitize_filename
from ...anilist import AniList
from ...constants import USER_CONFIG_PATH
from ...libs.discord import discord
from ...libs.fzf import fzf
from ...libs.rofi import Rofi
from ...Utility.data import anime_normalizer
@@ -45,12 +47,17 @@ def calculate_percentage_completion(start_time, end_time):
[TODO:return]
"""
start = start_time.split(":")
end = end_time.split(":")
start_secs = int(start[0]) * 3600 + int(start[1]) * 60 + int(start[2])
end_secs = int(end[0]) * 3600 + int(end[1]) * 60 + int(end[2])
return start_secs / end_secs * 100
try:
start = start_time.split(":")
end = end_time.split(":")
start_secs = int(start[0]) * 3600 + int(start[1]) * 60 + int(start[2])
end_secs = int(end[0]) * 3600 + int(end[1]) * 60 + int(end[2])
return start_secs / end_secs * 100
except Exception:
return 0
def discord_updater(show,episode,switch):
discord.discord_connect(show,episode,switch)
def media_player_controls(
config: "Config", fastanime_runtime_state: "FastAnimeRuntimeState"
@@ -507,6 +514,12 @@ def provider_anime_episode_servers_menu(
"[bold magenta] Episode: [/]",
current_episode_number,
)
# update discord activity for user
switch = threading.Event()
if config.discord:
discord_proc = threading.Thread(target=discord_updater, args=(provider_anime_title,current_episode_number,switch))
discord_proc.start()
# try to get the timestamp you left off from if available
start_time = config.watch_history.get(str(anime_id_anilist), {}).get(
"episode_stopped_at", "0"
@@ -542,8 +555,8 @@ def provider_anime_episode_servers_menu(
if config.recent:
config.update_recent(
[
*config.user_data["recent_anime"],
fastanime_runtime_state.selected_anime_anilist,
*config.user_data["recent_anime"],
]
)
print("Updating recent anime...")
@@ -589,6 +602,10 @@ def provider_anime_episode_servers_menu(
)
print("Finished at: ", stop_time)
# stop discord activity updater
if config.discord:
switch.set()
# update_watch_history
# this will try to update the episode to be the next episode if delta has reached a specific threshhold
# this update will only apply locally
@@ -701,7 +718,7 @@ def provider_anime_episodes_menu(
total_time = user_watch_history.get(str(anime_id_anilist), {}).get(
"episode_total_length", "0"
)
if stop_time != "0" or total_time != "0":
if stop_time != "0" and total_time != "0":
percentage_completion_of_episode = calculate_percentage_completion(
stop_time, total_time
)
@@ -1340,6 +1357,58 @@ def media_actions_menu(
set_prefered_progress_tracking(config, fastanime_runtime_state, update=True)
media_actions_menu(config, fastanime_runtime_state)
def _relations(config: "Config", fastanime_runtime_state: "FastAnimeRuntimeState"):
"""Helper function to get anime recommendations
Args:
config: [TODO:description]
fastanime_runtime_state: [TODO:description]
"""
relations = AniList.get_related_anime_for(
fastanime_runtime_state.selected_anime_id_anilist
)
if not relations[0]:
print("No recommendations found", relations[1])
input("Enter to continue...")
media_actions_menu(config, fastanime_runtime_state)
return
relations = relations[1]["data"]["Page"]["relations"] # pyright:ignore
fastanime_runtime_state.anilist_results_data = {
"data": {"Page": {"media": relations["nodes"]}} # pyright:ignore
}
anilist_results_menu(config, fastanime_runtime_state)
def _recommendations(
config: "Config", fastanime_runtime_state: "FastAnimeRuntimeState"
):
"""Helper function to get anime recommendations
Args:
config: [TODO:description]
fastanime_runtime_state: [TODO:description]
"""
recommendations = AniList.get_recommended_anime_for(
fastanime_runtime_state.selected_anime_id_anilist
)
if not recommendations[0]:
print("No recommendations found", recommendations[1])
input("Enter to continue...")
media_actions_menu(config, fastanime_runtime_state)
return
fastanime_runtime_state.anilist_results_data = {
"data": {
"Page": {
"media": [
media["media"]
for media in recommendations[1]["data"]["Page"][
"recommendations" # pyright:ignore
]
]
}
}
}
anilist_results_menu(config, fastanime_runtime_state)
icons = config.icons
options = {
f"{'📽️ ' if icons else ''}Stream ({progress}/{episodes_total})": _stream_anime,
@@ -1349,6 +1418,8 @@ def media_actions_menu(
f"{'' if icons else ''}Progress Tracking": _set_progress_tracking,
f"{'📥 ' if icons else ''}Add to List": _add_to_list,
f"{'📤 ' if icons else ''}Remove from List": _remove_from_list,
f"{'📖 ' if icons else ''}Recommendations": _recommendations,
f"{'📖 ' if icons else ''}Relations": _relations,
f"{'📖 ' if icons else ''}View Info": _view_info,
f"{'🎧 ' if icons else ''}Change Translation Type": _change_translation_type,
f"{'💽 ' if icons else ''}Change Provider": _change_provider,
@@ -1421,7 +1492,7 @@ def anilist_results_menu(
anime_data[title] = anime
# prompt for the anime of choice
choices = [*anime_data.keys(), "Back"]
choices = [*anime_data.keys(), "Next Page", "Previous Page", "Back"]
if config.use_fzf:
if config.preview:
from .utils import get_fzf_anime_preview
@@ -1460,6 +1531,43 @@ def anilist_results_menu(
if selected_anime_title == "Back":
fastanime_main_menu(config, fastanime_runtime_state)
return
if selected_anime_title == "Next Page":
fastanime_runtime_state.current_page = page = (
fastanime_runtime_state.current_page + 1
)
success, data = fastanime_runtime_state.current_data_loader(
config=config, page=page
)
if success:
fastanime_runtime_state.anilist_results_data = data
anilist_results_menu(config, fastanime_runtime_state)
else:
print("Failed to get next page")
print(data)
input("Enter to continue...")
anilist_results_menu(config, fastanime_runtime_state)
return
if selected_anime_title == "Previous Page":
fastanime_runtime_state.current_page = page = (
(fastanime_runtime_state.current_page - 1)
if fastanime_runtime_state.current_page > 1
else 1
)
success, data = fastanime_runtime_state.current_data_loader(
config=config, page=page
)
if success:
fastanime_runtime_state.anilist_results_data = data
anilist_results_menu(config, fastanime_runtime_state)
else:
print("Failed to get previous page")
print(data)
input("Enter to continue...")
anilist_results_menu(config, fastanime_runtime_state)
return
selected_anime: "AnilistBaseMediaDataSchema" = anime_data[selected_anime_title]
fastanime_runtime_state.selected_anime_anilist = selected_anime
@@ -1474,8 +1582,11 @@ def anilist_results_menu(
#
# ---- FASTANIME MAIN MENU ----
#
def handle_animelist(
config: "Config", fastanime_runtime_state: "FastAnimeRuntimeState", list_type: str
def _handle_animelist(
config: "Config",
fastanime_runtime_state: "FastAnimeRuntimeState",
list_type: str,
page=1,
):
"""A helper function that handles user media lists
@@ -1508,13 +1619,13 @@ def handle_animelist(
status = "DROPPED"
case "Paused":
status = "PAUSED"
case "Repeating":
case "Rewatching":
status = "REPEATING"
case _:
return
# get the media list
anime_list = AniList.get_anime_list(status)
anime_list = AniList.get_anime_list(status, page=page)
# handle null
if not anime_list:
print("Sth went wrong", anime_list)
@@ -1545,6 +1656,56 @@ def handle_animelist(
return anime_list
def _anilist_search(config: "Config", page=1):
"""A function that enables seaching of an anime
Returns:
[TODO:return]
"""
# TODO: Add filters and other search features
if config.use_rofi:
search_term = str(Rofi.ask("Search for"))
else:
search_term = Prompt.ask("[cyan]Search for[/]")
return AniList.search(query=search_term, page=page)
def _anilist_random(config: "Config", page=1):
"""A function that generates random anilist ids enabling random discovery of anime
Returns:
[TODO:return]
"""
random_anime = range(1, 15000)
random_anime = random.sample(random_anime, k=50)
return AniList.search(id_in=list(random_anime))
def _watch_history(config: "Config", page=1):
"""Function that lets you see all the anime that has locally been saved to your watch history
Returns:
[TODO:return]
"""
watch_history = list(map(int, config.watch_history.keys()))
return AniList.search(id_in=watch_history, sort="TRENDING_DESC", page=page)
def _recent(config: "Config", page=1):
return (
True,
{"data": {"Page": {"media": config.user_data["recent_anime"]}}},
)
# WARNING: Will probably be depracated
def _anime_list(config: "Config", page=1):
anime_list = config.anime_list
return AniList.search(id_in=anime_list, pages=page)
def fastanime_main_menu(
config: "Config", fastanime_runtime_state: "FastAnimeRuntimeState"
):
@@ -1555,49 +1716,7 @@ def fastanime_main_menu(
fastanime_runtime_state: A query dict used to store data during navigation of the ui # initially this was very messy
"""
def _anilist_search():
"""A function that enables seaching of an anime
Returns:
[TODO:return]
"""
# TODO: Add filters and other search features
if config.use_rofi:
search_term = str(Rofi.ask("Search for"))
else:
search_term = Prompt.ask("[cyan]Search for[/]")
return AniList.search(query=search_term)
def _anilist_random():
"""A function that generates random anilist ids enabling random discovery of anime
Returns:
[TODO:return]
"""
random_anime = range(1, 15000)
random_anime = random.sample(random_anime, k=50)
return AniList.search(id_in=list(random_anime))
def _watch_history():
"""Function that lets you see all the anime that has locally been saved to your watch history
Returns:
[TODO:return]
"""
watch_history = list(map(int, config.watch_history.keys()))
return AniList.search(id_in=watch_history, sort="TRENDING_DESC")
def _recent():
return (True, {"data": {"Page": {"media": config.user_data["recent_anime"]}}})
# WARNING: Will probably be depracated
def _anime_list():
anime_list = config.anime_list
return AniList.search(id_in=anime_list)
def _edit_config():
def _edit_config(*args, **kwargs):
"""Helper function to edit your config when the ui is still running"""
from click import edit
@@ -1622,23 +1741,23 @@ def fastanime_main_menu(
options = {
f"{'🔥 ' if icons else ''}Trending": AniList.get_trending,
f"{'🎞️ ' if icons else ''}Recent": _recent,
f"{'📺 ' if icons else ''}Watching": lambda media_list_type="Watching": handle_animelist(
config, fastanime_runtime_state, media_list_type
f"{'📺 ' if icons else ''}Watching": lambda config, media_list_type="Watching", page=1: _handle_animelist(
config, fastanime_runtime_state, media_list_type, page=page
),
f"{'' if icons else ''}Paused": lambda media_list_type="Paused": handle_animelist(
config, fastanime_runtime_state, media_list_type
f"{'' if icons else ''}Paused": lambda config, media_list_type="Paused", page=1: _handle_animelist(
config, fastanime_runtime_state, media_list_type, page=page
),
f"{'🚮 ' if icons else ''}Dropped": lambda media_list_type="Dropped": handle_animelist(
config, fastanime_runtime_state, media_list_type
f"{'🚮 ' if icons else ''}Dropped": lambda config, media_list_type="Dropped", page=1: _handle_animelist(
config, fastanime_runtime_state, media_list_type, page=page
),
f"{'📑 ' if icons else ''}Planned": lambda media_list_type="Planned": handle_animelist(
config, fastanime_runtime_state, media_list_type
f"{'📑 ' if icons else ''}Planned": lambda config, media_list_type="Planned", page=1: _handle_animelist(
config, fastanime_runtime_state, media_list_type, page=page
),
f"{'' if icons else ''}Completed": lambda media_list_type="Completed": handle_animelist(
config, fastanime_runtime_state, media_list_type
f"{'' if icons else ''}Completed": lambda config, media_list_type="Completed", page=1: _handle_animelist(
config, fastanime_runtime_state, media_list_type, page=page
),
f"{'🔁 ' if icons else ''}Rewatching": lambda media_list_type="Repeating": handle_animelist(
config, fastanime_runtime_state, media_list_type
f"{'🔁 ' if icons else ''}Rewatching": lambda config, media_list_type="Rewatching", page=1: _handle_animelist(
config, fastanime_runtime_state, media_list_type, page=page
),
f"{'🔔 ' if icons else ''}Recently Updated Anime": AniList.get_most_recently_updated,
f"{'🔎 ' if icons else ''}Search": _anilist_search,
@@ -1652,6 +1771,18 @@ def fastanime_main_menu(
f"{'📝 ' if icons else ''}Edit Config": _edit_config,
f"{'' if icons else ''}Exit": exit_app,
}
# Load main menu order if set in config file
if config.menu_order:
menu_order_list = config.menu_order.split(",")
lookup = {key.split(" ", 1)[-1]: key for key in options}
ordered_dict = {
lookup[key]: options[lookup[key]]
for key in menu_order_list
if key in lookup
}
options = ordered_dict
# prompt user to select an action
choices = list(options.keys())
if config.use_fzf:
@@ -1667,7 +1798,9 @@ def fastanime_main_menu(
choices,
"Select Action",
)
anilist_data = options[action]()
fastanime_runtime_state.current_data_loader = options[action]
fastanime_runtime_state.current_page = 1
anilist_data = options[action](config=config)
# anilist data is a (bool,data)
# the bool indicated success
if anilist_data[0]:

View File

@@ -46,8 +46,12 @@ def aniskip(mal_id: int, episode: str):
# NOTE: May change this to a temp dir but there were issues so later
WORKING_DIR = APP_CACHE_DIR # tempfile.gettempdir()
HEADER_COLOR = 215, 0, 95
SEPARATOR_COLOR = 208, 208, 208
_HEADER_COLOR = os.environ.get("FASTANIME_PREVIEW_HEADER_COLOR", "215,0,95").split(",")
HEADER_COLOR = _HEADER_COLOR[0], _HEADER_COLOR[1], _HEADER_COLOR[2]
_SEPARATOR_COLOR = os.environ.get(
"FASTANIME_PREVIEW_SEPARATOR_COLOR", "208,208,208"
).split(",")
SEPARATOR_COLOR = _SEPARATOR_COLOR[0], _SEPARATOR_COLOR[1], _SEPARATOR_COLOR[2]
SINGLE_QUOTE = "'"
IMAGES_CACHE_DIR = os.path.join(WORKING_DIR, "images")
if not os.path.exists(IMAGES_CACHE_DIR):
@@ -65,7 +69,7 @@ def save_image_from_url(url: str, file_name: str):
file_name: filename to use
"""
image = requests.get(url)
with open(os.path.join(IMAGES_CACHE_DIR,f"{file_name}.png"), "wb") as f:
with open(os.path.join(IMAGES_CACHE_DIR, f"{file_name}.png"), "wb") as f:
f.write(image.content)
@@ -76,7 +80,14 @@ def save_info_from_str(info: str, file_name: str):
info: the information anilist has on the anime
file_name: the filename to use
"""
with open(os.path.join(ANIME_INFO_CACHE_DIR,file_name,), "w",encoding="utf-8") as f:
with open(
os.path.join(
ANIME_INFO_CACHE_DIR,
file_name,
),
"w",
encoding="utf-8",
) as f:
f.write(info)
@@ -92,7 +103,6 @@ def write_search_results(
titles: sanitized anime titles
workers:number of threads to use defaults to as many as possible
"""
# NOTE: Will probably make this a configuraable option
# use concurency to download and write as fast as possible
with concurrent.futures.ThreadPoolExecutor(max_workers=workers) as executor:
future_to_task = {}
@@ -274,7 +284,7 @@ def get_fzf_episode_preview(
anilist_results: the anilist results from an anilist action
"""
HEADER_COLOR = 215, 0, 95
# HEADER_COLOR = 215, 0, 95
import re
def _worker():
@@ -282,18 +292,16 @@ def get_fzf_episode_preview(
with concurrent.futures.ThreadPoolExecutor(max_workers=workers) as executor:
# load the jobs
future_to_url = {}
for episode in episodes:
episode_title = ""
image_url = ""
for episode_detail in anilist_result["streamingEpisodes"]:
if re.match(f"Episode {episode} ", episode_detail["title"]):
if re.match(f".*Episode {episode} .*", episode_detail["title"]):
episode_title = episode_detail["title"]
image_url = episode_detail["thumbnail"]
if episode_title and image_url:
# actual link to download image from
if not image_url:
continue
future_to_url[
executor.submit(save_image_from_url, image_url, episode)
] = image_url
@@ -304,13 +312,25 @@ def get_fzf_episode_preview(
echo -n -e "{get_true_fg("",*SEPARATOR_COLOR,bold=False)}"
((ll++))
done
echo "{get_true_fg('Anime Title:',*HEADER_COLOR)} {(anilist_result['title']['romaji'] or anilist_result['title']['english']).replace('"',SINGLE_QUOTE)}"
echo "{get_true_fg('Episode Title:',*HEADER_COLOR)} {str(episode_title).replace('"',SINGLE_QUOTE)}"
echo "{get_true_fg('Anime Title(eng):',*HEADER_COLOR)} {('' or anilist_result['title']['english']).replace('"',SINGLE_QUOTE)}"
echo "{get_true_fg('Anime Title(jp):',*HEADER_COLOR)} {(anilist_result['title']['romaji'] or '').replace('"',SINGLE_QUOTE)}"
ll=2
while [ $ll -le $FZF_PREVIEW_COLUMNS ];do
echo -n -e "{get_true_fg("",*SEPARATOR_COLOR,bold=False)}"
((ll++))
done
echo "{str(episode_title).replace('"',SINGLE_QUOTE)}"
ll=2
while [ $ll -le $FZF_PREVIEW_COLUMNS ];do
echo -n -e "{get_true_fg("",*SEPARATOR_COLOR,bold=False)}"
((ll++))
done
"""
)
future_to_url[
executor.submit(save_info_from_str, template, episode)
] = episode_title
executor.submit(save_info_from_str, template, str(episode))
] = str(episode)
# execute the jobs
for future in concurrent.futures.as_completed(future_to_url):
@@ -360,14 +380,15 @@ def get_fzf_episode_preview(
)
else:
preview = """
title={}
%s
show_image_previews="%s"
if [ $show_image_previews = "true" ];then
if [ -s %s/{} ]; then fzf-preview %s/{}
if [ -s %s/${title}.png ]; then fzf-preview %s/${title}.png
else echo Loading...
fi
fi
if [ -s %s/{} ]; then source %s/{}
if [ -f %s/${title} ]; then source %s/${title}
else echo Loading...
fi
""" % (

View File

@@ -1,8 +1,8 @@
import re
import logging
import os
import re
import shutil
import subprocess
import logging
import time
from ...constants import S_PLATFORM
@@ -59,7 +59,10 @@ def stream_video(MPV, url, mpv_args, custom_args):
process.wait()
else:
proc = subprocess.run(
[MPV, url, *mpv_args, *custom_args], capture_output=True, text=True
[MPV, url, *mpv_args, *custom_args],
capture_output=True,
text=True,
encoding="utf-8",
)
if proc.stdout:
for line in reversed(proc.stdout.split("\n")):
@@ -97,7 +100,7 @@ def run_mpv(
time.sleep(120)
return "0", "0"
cmd = [WEBTORRENT_CLI, link, f"--{player}"]
subprocess.run(cmd)
subprocess.run(cmd, encoding="utf-8")
return "0", "0"
if player == "vlc":
VLC = shutil.which("vlc")
@@ -148,7 +151,7 @@ def run_mpv(
if title:
args.append("--video-title")
args.append(title)
subprocess.run(args)
subprocess.run(args, encoding="utf-8")
return "0", "0"
else:
# Determine if mpv is available

View File

@@ -24,4 +24,10 @@ def print_img(url: str):
print("Error fetching image")
return
img_bytes = res.content
"""
Change made in call to chafa. Chafa dev dropped abilty
to pull from urls. Keeping old line here just in case.
subprocess.run([EXECUTABLE, url, "--size=15x15"], input=img_bytes)
"""
subprocess.run([EXECUTABLE, "--size=15x15"], input=img_bytes)

View File

@@ -1,7 +1,7 @@
from typing import TYPE_CHECKING
if TYPE_CHECKING:
from typing import Any
from typing import Any, Callable
from ...libs.anilist.types import AnilistBaseMediaDataSchema
from ...libs.anime_provider.types import Anime, EpisodeStream, SearchResult, Server
@@ -26,9 +26,11 @@ class FastAnimeRuntimeState(object):
selected_anime_title_anilist: str
# current_anilist_data: "AnilistDataSchema | AnilistMediaList"
anilist_results_data: "Any"
current_page: int
current_data_loader: "Callable"
def exit_app(exit_code=0, *args):
def exit_app(exit_code=0, *args, **kwargs):
import sys
from rich.console import Console

View File

@@ -108,7 +108,7 @@ def format_bytes_to_human(num_of_bytes: float, suffix: str = "B"):
return f"{num_of_bytes:.1f}Yi{suffix}"
def get_true_fg(string: str, r: int, g: int, b: int, bold: bool = True) -> str:
def get_true_fg(string: str, r, g, b, bold: bool = True) -> str:
"""Custom helper function that enables colored text in the terminal
Args:

View File

@@ -3,6 +3,7 @@ This is the core module availing all the abstractions of the anilist api
"""
import logging
import os
from typing import TYPE_CHECKING
import requests
@@ -142,6 +143,9 @@ class AniListApi:
self,
status: "AnilistMediaListStatus",
type="ANIME",
page=1,
perPage=os.environ.get("FASTANIME_PER_PAGE", 15),
**kwargs,
) -> tuple[bool, "AnilistMediaLists"] | tuple[bool, None]:
"""gets an anime list from your media list given the list status
@@ -151,7 +155,13 @@ class AniListApi:
Returns:
a media list
"""
variables = {"status": status, "userId": self.user_id, "type": type}
variables = {
"status": status,
"userId": self.user_id,
"type": type,
"page": page,
"perPage": int(perPage),
}
return self._make_authenticated_request(media_list_query, variables)
def get_medialist_entry(
@@ -306,6 +316,7 @@ class AniListApi:
def search(
self,
max_results=50,
query: str | None = None,
sort: str | None = None,
genre_in: list[str] | None = None,
@@ -350,60 +361,98 @@ class AniListApi:
variables = {"id": id}
return self.get_data(anime_query, variables)
def get_trending(self, type="ANIME", *_, **kwargs):
def get_trending(
self,
type="ANIME",
page=1,
perPage=os.environ.get("FASTANIME_PER_PAGE", 15),
*_,
**kwargs,
):
"""
Gets the currently trending anime
"""
variables = {"type": type}
variables = {"type": type, "page": page, "perPage": int(perPage)}
trending = self.get_data(trending_query, variables)
return trending
def get_most_favourite(self, type="ANIME", *_, **kwargs):
def get_most_favourite(
self,
type="ANIME",
page=1,
perPage=os.environ.get("FASTANIME_PER_PAGE", 15),
*_,
**kwargs,
):
"""
Gets the most favoured anime on anilist
"""
variables = {"type": type}
variables = {"type": type, "page": page, "perPage": int(perPage)}
most_favourite = self.get_data(most_favourite_query, variables)
return most_favourite
def get_most_scored(self, type="ANIME", *_, **kwargs):
def get_most_scored(
self,
type="ANIME",
page=1,
perPage=os.environ.get("FASTANIME_PER_PAGE", 15),
*_,
**kwargs,
):
"""
Gets most scored anime on anilist
"""
variables = {"type": type}
variables = {"type": type, "page": page, "perPage": int(perPage)}
most_scored = self.get_data(most_scored_query, variables)
return most_scored
def get_most_recently_updated(self, type="ANIME", *_, **kwargs):
def get_most_recently_updated(
self,
type="ANIME",
page=1,
perPage=os.environ.get("FASTANIME_PER_PAGE", 15),
*_,
**kwargs,
):
"""
Gets most recently updated anime from anilist
"""
variables = {"type": type}
variables = {"type": type, "page": page, "perPage": int(perPage)}
most_recently_updated = self.get_data(most_recently_updated_query, variables)
return most_recently_updated
def get_most_popular(
self,
type="ANIME",
page=1,
perPage=os.environ.get("FASTANIME_PER_PAGE", 15),
**kwargs,
):
"""
Gets most popular anime on anilist
"""
variables = {"type": type}
variables = {"type": type, "page": page, "perPage": int(perPage)}
most_popular = self.get_data(most_popular_query, variables)
return most_popular
def get_upcoming_anime(self, type="ANIME", page: int = 1, *_, **kwargs):
def get_upcoming_anime(
self,
type="ANIME",
page: int = 1,
perPage=os.environ.get("FASTANIME_PER_PAGE", 15),
*_,
**kwargs,
):
"""
Gets upcoming anime from anilist
"""
variables = {"page": page, "type": type}
variables = {"page": page, "type": type, "perPage": int(perPage)}
upcoming_anime = self.get_data(upcoming_anime_query, variables)
return upcoming_anime
# NOTE: THe following methods will probably be scraped soon
def get_recommended_anime_for(self, id: int, type="ANIME", *_, **kwargs):
variables = {"type": type}
def get_recommended_anime_for(self, mediaRecommendationId, page=1, *_, **kwargs):
variables = {"mediaRecommendationId": mediaRecommendationId, "page": page}
recommended_anime = self.get_data(recommended_query, variables)
return recommended_anime
@@ -412,7 +461,7 @@ class AniListApi:
characters = self.get_data(anime_characters_query, variables)
return characters
def get_related_anime_for(self, id: int, type="ANIME", *_, **kwargs):
def get_related_anime_for(self, id: int, *_, **kwargs):
variables = {"id": id}
related_anime = self.get_data(anime_relations_query, variables)
return related_anime

View File

@@ -193,8 +193,8 @@ mutation (
"""
media_list_query = """
query ($userId: Int, $status: MediaListStatus, $type: MediaType) {
Page {
query ($userId: Int, $status: MediaListStatus, $type: MediaType, $page: Int, $perPage: Int) {
Page(perPage: $perPage, page: $page) {
pageInfo {
currentPage
total
@@ -281,6 +281,7 @@ query ($userId: Int, $status: MediaListStatus, $type: MediaType) {
optional_variables = "\
$max_results:Int,\
$page:Int,\
$sort:[MediaSort],\
$id_in:[Int],\
@@ -310,7 +311,7 @@ $on_list:Boolean\
search_query = (
"""
query($query:String,%s){
Page(perPage: 50, page: $page) {
Page(perPage: $max_results, page: $page) {
pageInfo {
total
currentPage
@@ -405,8 +406,8 @@ query($query:String,%s){
)
trending_query = """
query ($type: MediaType) {
Page(perPage: 15) {
query ($type: MediaType, $page: Int,$perPage:Int) {
Page(perPage: $perPage, page: $page) {
media(sort: TRENDING_DESC, type: $type, genre_not_in: ["hentai"]) {
id
idMal
@@ -470,8 +471,8 @@ query ($type: MediaType) {
# mosts
most_favourite_query = """
query ($type: MediaType) {
Page(perPage: 15) {
query ($type: MediaType, $page: Int,$perPage:Int) {
Page(perPage: $perPage, page: $page) {
media(sort: FAVOURITES_DESC, type: $type, genre_not_in: ["hentai"]) {
id
idMal
@@ -538,8 +539,8 @@ query ($type: MediaType) {
"""
most_scored_query = """
query ($type: MediaType) {
Page(perPage: 15) {
query ($type: MediaType, $page: Int,$perPage:Int) {
Page(perPage: $perPage, page: $page) {
media(sort: SCORE_DESC, type: $type, genre_not_in: ["hentai"]) {
id
idMal
@@ -602,8 +603,8 @@ query ($type: MediaType) {
"""
most_popular_query = """
query ($type: MediaType) {
Page(perPage: 15) {
query ($type: MediaType, $page: Int,$perPage:Int) {
Page(perPage: $perPage, page: $page) {
media(sort: POPULARITY_DESC, type: $type, genre_not_in: ["hentai"]) {
id
idMal
@@ -666,8 +667,8 @@ query ($type: MediaType) {
"""
most_recently_updated_query = """
query ($type: MediaType) {
Page(perPage: 15) {
query ($type: MediaType, $page: Int,$perPage:Int) {
Page(perPage: $perPage, page: $page) {
media(
sort: UPDATED_AT_DESC
type: $type
@@ -737,63 +738,64 @@ query ($type: MediaType) {
"""
recommended_query = """
query ($type: MediaType) {
Page(perPage: 15) {
media(type: $type, genre_not_in: ["hentai"]) {
recommendations(sort: RATING_DESC) {
nodes {
media {
id
idMal
title {
english
romaji
native
}
coverImage {
medium
large
}
mediaListEntry {
status
id
progress
}
description
episodes
trailer {
site
id
}
genres
synonyms
averageScore
popularity
streamingEpisodes {
title
thumbnail
}
favourites
tags {
name
}
startDate {
year
month
day
}
endDate {
year
month
day
}
status
nextAiringEpisode {
timeUntilAiring
airingAt
episode
}
}
query ($mediaRecommendationId: Int, $page: Int) {
Page(perPage: 50, page: $page) {
recommendations(mediaRecommendationId: $mediaRecommendationId) {
media {
id
idMal
mediaListEntry {
status
id
progress
}
title {
english
romaji
native
}
coverImage {
medium
large
}
mediaListEntry {
status
id
progress
}
description
episodes
trailer {
site
id
}
genres
synonyms
averageScore
popularity
streamingEpisodes {
title
thumbnail
}
favourites
tags {
name
}
startDate {
year
month
day
}
endDate {
year
month
day
}
status
nextAiringEpisode {
timeUntilAiring
airingAt
episode
}
}
}
@@ -801,6 +803,7 @@ query ($type: MediaType) {
}
"""
anime_characters_query = """
query ($id: Int, $type: MediaType) {
Page {
@@ -837,66 +840,59 @@ query ($id: Int, $type: MediaType) {
anime_relations_query = """
query ($id: Int, $type: MediaType) {
Page(perPage: 20) {
media(
id: $id
sort: POPULARITY_DESC
type: $type
genre_not_in: ["hentai"]
) {
relations {
nodes {
id
idMal
title {
english
romaji
native
}
coverImage {
medium
large
}
mediaListEntry {
status
id
progress
}
description
episodes
trailer {
site
id
}
genres
synonyms
averageScore
popularity
streamingEpisodes {
title
thumbnail
}
favourites
tags {
name
}
startDate {
year
month
day
}
endDate {
year
month
day
}
query ($id: Int) {
Media(id: $id) {
relations {
nodes {
id
idMal
title {
english
romaji
native
}
coverImage {
medium
large
}
mediaListEntry {
status
nextAiringEpisode {
timeUntilAiring
airingAt
episode
}
id
progress
}
description
episodes
trailer {
site
id
}
genres
synonyms
averageScore
popularity
streamingEpisodes {
title
thumbnail
}
favourites
tags {
name
}
startDate {
year
month
day
}
endDate {
year
month
day
}
status
nextAiringEpisode {
timeUntilAiring
airingAt
episode
}
}
}
@@ -922,8 +918,8 @@ query ($id: Int,$type:MediaType) {
"""
upcoming_anime_query = """
query ($page: Int, $type: MediaType) {
Page(page: $page) {
query ($page: Int, $type: MediaType,$perPage:Int) {
Page(perPage: $perPage, page: $page) {
pageInfo {
total
perPage

View File

@@ -3,10 +3,10 @@ from .animepahe.constants import SERVERS_AVAILABLE as ANIMEPAHE_SERVERS
from .hianime.constants import SERVERS_AVAILABLE as HIANIME_SERVERS
anime_sources = {
"allanime": "api.AllAnimeAPI",
"animepahe": "api.AnimePaheApi",
"hianime": "api.HiAnimeApi",
"nyaa": "api.NyaaApi",
"yugen": "api.YugenApi"
"allanime": "api.AllAnime",
"animepahe": "api.AnimePahe",
"hianime": "api.HiAnime",
"nyaa": "api.Nyaa",
"yugen": "api.Yugen",
}
SERVERS_AVAILABLE = [*ALLANIME_SERVERS, *ANIMEPAHE_SERVERS, *HIANIME_SERVERS]

View File

@@ -1,8 +1,3 @@
"""a module that handles the scraping of allanime
abstraction over allanime api
"""
import json
import logging
from typing import TYPE_CHECKING
@@ -10,207 +5,215 @@ from typing import TYPE_CHECKING
from ...anime_provider.base_provider import AnimeProvider
from ..decorators import debug_provider
from ..utils import give_random_quality, one_digit_symmetric_xor
from .constants import ALLANIME_API_ENDPOINT, ALLANIME_BASE, ALLANIME_REFERER
from .gql_queries import ALLANIME_EPISODES_GQL, ALLANIME_SEARCH_GQL, ALLANIME_SHOW_GQL
from .constants import (
API_BASE_URL,
API_ENDPOINT,
API_REFERER,
DEFAULT_COUNTRY_OF_ORIGIN,
DEFAULT_NSFW,
DEFAULT_PAGE,
DEFAULT_PER_PAGE,
DEFAULT_UNKNOWN,
MP4_SERVER_JUICY_STREAM_REGEX,
)
from .gql_queries import EPISODES_GQL, SEARCH_GQL, SHOW_GQL
if TYPE_CHECKING:
from .types import AllAnimeEpisode
logger = logging.getLogger(__name__)
# TODO: create tests for the api
#
# ** Based on ani-cli **
class AllAnimeAPI(AnimeProvider):
class AllAnime(AnimeProvider):
"""
Provides a fast and effective interface to AllAnime site.
AllAnime is a provider class for fetching anime data from the AllAnime API.
Attributes:
HEADERS (dict): Default headers for API requests.
Methods:
_execute_graphql_query(query: str, variables: dict) -> dict:
Executes a GraphQL query and returns the response data.
search_for_anime(
**kwargs
) -> dict:
Searches for anime based on the provided keywords and other parameters.
get_anime(show_id: str) -> dict:
Retrieves detailed information about a specific anime by its ID.
_get_anime_episode(
show_id: str, episode, translation_type: str = "sub"
Retrieves information about a specific episode of an anime.
get_episode_streams(
) -> generator:
Retrieves streaming links for a specific episode of an anime.
"""
PROVIDER = "allanime"
api_endpoint = ALLANIME_API_ENDPOINT
HEADERS = {
"Referer": ALLANIME_REFERER,
"Referer": API_REFERER,
}
def _fetch_gql(self, query: str, variables: dict):
"""main abstraction over all requests to the allanime api
def _execute_graphql_query(self, query: str, variables: dict):
"""
Executes a GraphQL query using the provided query string and variables.
Args:
query: [TODO:description]
variables: [TODO:description]
query (str): The GraphQL query string to be executed.
variables (dict): A dictionary of variables to be used in the query.
Returns:
[TODO:return]
dict: The JSON response data from the GraphQL API.
Raises:
requests.exceptions.HTTPError: If the HTTP request returned an unsuccessful status code.
"""
response = self.session.get(
self.api_endpoint,
API_ENDPOINT,
params={
"variables": json.dumps(variables),
"query": query,
},
timeout=10,
)
if response.ok:
return response.json()["data"]
else:
logger.error("[ALLANIME-ERROR]: ", response.text)
return {}
response.raise_for_status()
return response.json()["data"]
@debug_provider(PROVIDER.upper())
@debug_provider
def search_for_anime(
self,
user_query: str,
translation_type: str = "sub",
nsfw=True,
unknown=True,
search_keywords: str,
translation_type: str,
*,
nsfw=DEFAULT_NSFW,
unknown=DEFAULT_UNKNOWN,
limit=DEFAULT_PER_PAGE,
page=DEFAULT_PAGE,
country_of_origin=DEFAULT_COUNTRY_OF_ORIGIN,
**kwargs,
):
"""search for an anime title using allanime provider
"""
Search for anime based on given keywords and filters.
Args:
nsfw ([TODO:parameter]): [TODO:description]
unknown ([TODO:parameter]): [TODO:description]
user_query: [TODO:description]
translation_type: [TODO:description]
**kwargs: [TODO:args]
search_keywords (str): The keywords to search for.
translation_type (str, optional): The type of translation to search for (e.g., "sub" or "dub"). Defaults to "sub".
limit (int, optional): The maximum number of results to return. Defaults to 40.
page (int, optional): The page number to return. Defaults to 1.
country_of_origin (str, optional): The country of origin filter. Defaults to "all".
nsfw (bool, optional): Whether to include adult content in the search results. Defaults to True.
unknown (bool, optional): Whether to include unknown content in the search results. Defaults to True.
**kwargs: Additional keyword arguments.
Returns:
[TODO:return]
dict: A dictionary containing the page information and a list of search results. Each result includes:
- id (str): The ID of the anime.
- title (str): The title of the anime.
- type (str): The type of the anime.
- availableEpisodes (int): The number of available episodes.
"""
search = {"allowAdult": nsfw, "allowUnknown": unknown, "query": user_query}
limit = 40
translationtype = translation_type
countryorigin = "all"
page = 1
variables = {
"search": search,
"limit": limit,
"page": page,
"translationtype": translationtype,
"countryorigin": countryorigin,
}
search_results = self._fetch_gql(ALLANIME_SEARCH_GQL, variables)
page_info = search_results["shows"]["pageInfo"]
results = []
for result in search_results["shows"]["edges"]:
normalized_result = {
"id": result["_id"],
"title": result["name"],
"type": result["__typename"],
"availableEpisodes": result["availableEpisodes"],
}
results.append(normalized_result)
normalized_search_results = {
"pageInfo": page_info,
"results": results,
}
return normalized_search_results
@debug_provider(PROVIDER.upper())
def get_anime(self, allanime_show_id: str):
"""get an anime details given its id
Args:
allanime_show_id: [TODO:description]
Returns:
[TODO:return]
"""
variables = {"showId": allanime_show_id}
anime = self._fetch_gql(ALLANIME_SHOW_GQL, variables)
id: str = anime["show"]["_id"]
title: str = anime["show"]["name"]
availableEpisodesDetail = anime["show"]["availableEpisodesDetail"]
self.store.set(allanime_show_id, "anime_info", {"title": title})
type = anime.get("__typename")
normalized_anime = {
"id": id,
"title": title,
"availableEpisodesDetail": availableEpisodesDetail,
"type": type,
}
return normalized_anime
@debug_provider(PROVIDER.upper())
def _get_anime_episode(
self, allanime_show_id: str, episode, translation_type: str = "sub"
) -> "AllAnimeEpisode | dict":
"""get the episode details and sources info
Args:
allanime_show_id: [TODO:description]
episode_string: [TODO:description]
translation_type: [TODO:description]
Returns:
[TODO:return]
"""
variables = {
"showId": allanime_show_id,
"translationType": translation_type,
"episodeString": episode,
}
episode = self._fetch_gql(ALLANIME_EPISODES_GQL, variables)
return episode["episode"]
@debug_provider(PROVIDER.upper())
def get_episode_streams(
self, anime_id, episode_number: str, translation_type="sub"
):
"""get the streams of an episode
Args:
translation_type ([TODO:parameter]): [TODO:description]
anime: [TODO:description]
episode_number: [TODO:description]
Yields:
[TODO:description]
"""
anime_title = (self.store.get(anime_id, "anime_info", "") or {"title": ""})[
"title"
]
allanime_episode = self._get_anime_episode(
anime_id, episode_number, translation_type
search_results = self._execute_graphql_query(
SEARCH_GQL,
variables={
"search": {
"allowAdult": nsfw,
"allowUnknown": unknown,
"query": search_keywords,
},
"limit": limit,
"page": page,
"translationtype": translation_type,
"countryorigin": country_of_origin,
},
)
if not allanime_episode:
return []
return {
"pageInfo": search_results["shows"]["pageInfo"],
"results": [
{
"id": result["_id"],
"title": result["name"],
"type": result["__typename"],
"availableEpisodes": result["availableEpisodes"],
}
for result in search_results["shows"]["edges"]
],
}
embeds = allanime_episode["sourceUrls"]
@debug_provider
def get_anime(self, id: str, **kwargs):
"""
Fetches anime details using the provided show ID.
Args:
id (str): The ID of the anime show to fetch details for.
Returns:
dict: A dictionary containing the anime details, including:
- id (str): The unique identifier of the anime show.
- title (str): The title of the anime show.
- availableEpisodesDetail (list): A list of available episodes details.
- type (str, optional): The type of the anime show.
"""
@debug_provider(self.PROVIDER.upper())
def _get_server(embed):
# filter the working streams no need to get all since the others are mostly hsl
# TODO: should i just get all the servers and handle the hsl??
if embed.get("sourceName", "") not in (
# priorities based on death note
"Sak", # 7
"S-mp4", # 7.9
"Luf-mp4", # 7.7
"Default", # 8.5
"Yt-mp4", # 7.9
"Kir", # NA
# "Vid-mp4" # 4
# "Ok", # 3.5
# "Ss-Hls", # 5.5
# "Mp4", # 4
):
return
url = embed.get("sourceUrl")
#
if not url:
return
if url.startswith("--"):
url = url[2:]
url = one_digit_symmetric_xor(56, url)
anime = self._execute_graphql_query(SHOW_GQL, variables={"showId": id})
self.store.set(id, "anime_info", {"title": anime["show"]["name"]})
return {
"id": anime["show"]["_id"],
"title": anime["show"]["name"],
"availableEpisodesDetail": anime["show"]["availableEpisodesDetail"],
"type": anime.get("__typename"),
}
if "tools.fast4speed.rsvp" in url:
@debug_provider
def _get_anime_episode(
self, anime_id: str, episode, translation_type: str = "sub"
) -> "AllAnimeEpisode":
"""
Fetches a specific episode of an anime by its ID and episode number.
Args:
anime_id (str): The unique identifier of the anime.
episode (str): The episode number or string identifier.
translation_type (str, optional): The type of translation for the episode. Defaults to "sub".
Returns:
AllAnimeEpisode: The episode details retrieved from the GraphQL query.
"""
return self._execute_graphql_query(
EPISODES_GQL,
variables={
"showId": anime_id,
"translationType": translation_type,
"episodeString": episode,
},
)["episode"]
@debug_provider
def _get_server(
self,
embed,
anime_title: str,
allanime_episode: "AllAnimeEpisode",
episode_number,
):
"""
Retrieves the streaming server information for a given anime episode based on the provided embed data.
Args:
embed (dict): A dictionary containing the embed data, including the source URL and source name.
anime_title (str): The title of the anime.
allanime_episode (AllAnimeEpisode): An object representing the episode details.
Returns:
dict: A dictionary containing server information, headers, subtitles, episode title, and links to the stream.
Returns None if no valid URL or stream is found.
Raises:
requests.exceptions.RequestException: If there is an issue with the HTTP request.
"""
url = embed.get("sourceUrl")
#
if not url:
return
if url.startswith("--"):
url = one_digit_symmetric_xor(56, url[2:])
# FIRST CASE
match embed["sourceName"]:
case "Yt-mp4":
logger.debug("Found streams from Yt")
return {
"server": "Yt",
"episode_title": f"{anime_title}; Episode {episode_number}",
"headers": {"Referer": f"https://{ALLANIME_BASE}/"},
"headers": {"Referer": f"https://{API_BASE_URL}/"},
"subtitles": [],
"links": [
{
@@ -219,77 +222,280 @@ class AllAnimeAPI(AnimeProvider):
}
],
}
case "Mp4":
logger.debug("Found streams from Mp4")
response = self.session.get(
url,
fresh=1, # pyright: ignore
timeout=10,
)
response.raise_for_status()
embed_html = response.text.replace(" ", "").replace("\n", "")
vid = MP4_SERVER_JUICY_STREAM_REGEX.search(embed_html)
if not vid:
return
return {
"server": "mp4-upload",
"headers": {"Referer": "https://www.mp4upload.com/"},
"subtitles": [],
"episode_title": (allanime_episode["notes"] or f"{anime_title}")
+ f"; Episode {episode_number}",
"links": [{"link": vid.group(1), "quality": "1080"}],
}
case "Fm-Hls":
# TODO: requires decoding obsfucated js (filemoon)
logger.debug("Found streams from Fm-Hls")
response = self.session.get(
url,
timeout=10,
)
response.raise_for_status()
embed_html = response.text.replace(" ", "").replace("\n", "")
vid = MP4_SERVER_JUICY_STREAM_REGEX.search(embed_html)
if not vid:
return
return {
"server": "filemoon",
"headers": {"Referer": "https://www.mp4upload.com/"},
"subtitles": [],
"episode_title": (allanime_episode["notes"] or f"{anime_title}")
+ f"; Episode {episode_number}",
"links": [{"link": vid.group(1), "quality": "1080"}],
}
case "Ok":
# TODO: requires decoding the obsfucated js (filemoon)
response = self.session.get(
url,
timeout=10,
)
response.raise_for_status()
embed_html = response.text.replace(" ", "").replace("\n", "")
vid = MP4_SERVER_JUICY_STREAM_REGEX.search(embed_html)
logger.debug("Found streams from Ok")
return {
"server": "filemoon",
"headers": {"Referer": f"https://{API_BASE_URL}/"},
"subtitles": [],
"episode_title": (allanime_episode["notes"] or f"{anime_title}")
+ f"; Episode {episode_number}",
"links": give_random_quality(response.json()["links"]),
}
case "Vid-mp4":
# TODO: requires some serious work i think : )
response = self.session.get(
url,
timeout=10,
)
response.raise_for_status()
embed_html = response.text.replace(" ", "").replace("\n", "")
logger.debug("Found streams from vid-mp4")
return {
"server": "Vid-mp4",
"headers": {"Referer": f"https://{API_BASE_URL}/"},
"subtitles": [],
"episode_title": (allanime_episode["notes"] or f"{anime_title}")
+ f"; Episode {episode_number}",
"links": give_random_quality(response.json()["links"]),
}
case "Ss-Hls":
# TODO: requires some serious work i think : )
response = self.session.get(
url,
timeout=10,
)
response.raise_for_status()
embed_html = response.text.replace(" ", "").replace("\n", "")
logger.debug("Found streams from Ss-Hls")
return {
"server": "StreamSb",
"headers": {"Referer": f"https://{API_BASE_URL}/"},
"subtitles": [],
"episode_title": (allanime_episode["notes"] or f"{anime_title}")
+ f"; Episode {episode_number}",
"links": give_random_quality(response.json()["links"]),
}
# get the stream url for an episode of the defined source names
embed_url = f"https://{ALLANIME_BASE}{url.replace('clock', 'clock.json')}"
resp = self.session.get(
embed_url,
timeout=10,
)
# get the stream url for an episode of the defined source names
response = self.session.get(
f"https://{API_BASE_URL}{url.replace('clock', 'clock.json')}",
timeout=10,
)
if resp.ok:
match embed["sourceName"]:
case "Luf-mp4":
logger.debug("allanime:Found streams from gogoanime")
return {
"server": "gogoanime",
"headers": {},
"subtitles": [],
"episode_title": (
allanime_episode["notes"] or f"{anime_title}"
)
+ f"; Episode {episode_number}",
"links": give_random_quality(resp.json()["links"]),
}
case "Kir":
logger.debug("allanime:Found streams from wetransfer")
return {
"server": "wetransfer",
"headers": {},
"subtitles": [],
"episode_title": (
allanime_episode["notes"] or f"{anime_title}"
)
+ f"; Episode {episode_number}",
"links": give_random_quality(resp.json()["links"]),
}
case "S-mp4":
logger.debug("allanime:Found streams from sharepoint")
return {
"server": "sharepoint",
"headers": {},
"subtitles": [],
"episode_title": (
allanime_episode["notes"] or f"{anime_title}"
)
+ f"; Episode {episode_number}",
"links": give_random_quality(resp.json()["links"]),
}
case "Sak":
logger.debug("allanime:Found streams from dropbox")
return {
"server": "dropbox",
"headers": {},
"subtitles": [],
"episode_title": (
allanime_episode["notes"] or f"{anime_title}"
)
+ f"; Episode {episode_number}",
"links": give_random_quality(resp.json()["links"]),
}
case "Default":
logger.debug("allanime:Found streams from wixmp")
return {
"server": "wixmp",
"headers": {},
"subtitles": [],
"episode_title": (
allanime_episode["notes"] or f"{anime_title}"
)
+ f"; Episode {episode_number}",
"links": give_random_quality(resp.json()["links"]),
}
response.raise_for_status()
for embed in embeds:
if server := _get_server(embed):
# SECOND CASE
match embed["sourceName"]:
case "Luf-mp4":
logger.debug("Found streams from gogoanime")
return {
"server": "gogoanime",
"headers": {"Referer": f"https://{API_BASE_URL}/"},
"subtitles": [],
"episode_title": (allanime_episode["notes"] or f"{anime_title}")
+ f"; Episode {episode_number}",
"links": give_random_quality(response.json()["links"]),
}
case "Kir":
logger.debug("Found streams from wetransfer")
return {
"server": "weTransfer",
"headers": {"Referer": f"https://{API_BASE_URL}/"},
"subtitles": [],
"episode_title": (allanime_episode["notes"] or f"{anime_title}")
+ f"; Episode {episode_number}",
"links": give_random_quality(response.json()["links"]),
}
case "S-mp4":
logger.debug("Found streams from sharepoint")
return {
"server": "sharepoint",
"headers": {"Referer": f"https://{API_BASE_URL}/"},
"subtitles": [],
"episode_title": (allanime_episode["notes"] or f"{anime_title}")
+ f"; Episode {episode_number}",
"links": give_random_quality(response.json()["links"]),
}
case "Sak":
logger.debug("Found streams from dropbox")
return {
"server": "dropbox",
"headers": {"Referer": f"https://{API_BASE_URL}/"},
"subtitles": [],
"episode_title": (allanime_episode["notes"] or f"{anime_title}")
+ f"; Episode {episode_number}",
"links": give_random_quality(response.json()["links"]),
}
case "Default":
logger.debug("Found streams from wixmp")
return {
"server": "wixmp",
"headers": {"Referer": f"https://{API_BASE_URL}/"},
"subtitles": [],
"episode_title": (allanime_episode["notes"] or f"{anime_title}")
+ f"; Episode {episode_number}",
"links": give_random_quality(response.json()["links"]),
}
case "Ak":
# TODO: works but needs further probing
logger.debug("Found streams from Ak")
return {
"server": "Ak",
"headers": {"Referer": f"https://{API_BASE_URL}/"},
"subtitles": [],
"episode_title": (allanime_episode["notes"] or f"{anime_title}")
+ f"; Episode {episode_number}",
"links": give_random_quality(response.json()["links"]),
}
@debug_provider
def get_episode_streams(
self, anime_id, episode_number: str, translation_type="sub", **kwargs
):
"""
Retrieve streaming information for a specific episode of an anime.
Args:
anime_id (str): The unique identifier for the anime.
episode_number (str): The episode number to retrieve streams for.
translation_type (str, optional): The type of translation for the episode (e.g., "sub" for subtitles). Defaults to "sub".
Yields:
dict: A dictionary containing streaming information for the episode, including:
- server (str): The name of the streaming server.
- episode_title (str): The title of the episode.
- headers (dict): HTTP headers required for accessing the stream.
- subtitles (list): A list of subtitles available for the episode.
- links (list): A list of dictionaries containing streaming links and their quality.
"""
anime_title = (self.store.get(anime_id, "anime_info", "") or {"title": ""})[
"title"
]
allanime_episode = self._get_anime_episode(
anime_id, episode_number, translation_type
)
for embed in allanime_episode["sourceUrls"]:
if embed.get("sourceName", "") not in (
# priorities based on death note
"Sak", # 7
"S-mp4", # 7.9
"Luf-mp4", # 7.7
"Default", # 8.5
"Yt-mp4", # 7.9
"Kir", # NA
"Mp4", # 4
# "Ak",#
# "Vid-mp4", # 4
# "Ok", # 3.5
# "Ss-Hls", # 5.5
# "Fm-Hls",#
):
logger.debug(f"Found {embed['sourceName']} but ignoring")
continue
if server := self._get_server(
embed, anime_title, allanime_episode, episode_number
):
yield server
if __name__ == "__main__":
import subprocess
allanime = AllAnime(cache_requests="True", use_persistent_provider_store="False")
search_term = input("Enter the search term for the anime: ")
translation_type = input("Enter the translation type (sub/dub): ")
search_results = allanime.search_for_anime(
search_keywords=search_term, translation_type=translation_type
)
if not search_results["results"]:
print("No results found.")
exit()
print("Search Results:")
for idx, result in enumerate(search_results["results"], start=1):
print(f"{idx}. {result['title']} (ID: {result['id']})")
anime_choice = int(input("Enter the number of the anime you want to watch: ")) - 1
anime_id = search_results["results"][anime_choice]["id"]
anime_details = allanime.get_anime(anime_id)
print(f"Selected Anime: {anime_details['title']}")
print("Available Episodes:")
for idx, episode in enumerate(
sorted(anime_details["availableEpisodesDetail"][translation_type], key=float),
start=1,
):
print(f"{idx}. Episode {episode}")
episode_choice = (
int(input("Enter the number of the episode you want to watch: ")) - 1
)
episode_number = anime_details["availableEpisodesDetail"][translation_type][
episode_choice
]
streams = list(
allanime.get_episode_streams(anime_id, episode_number, translation_type)
)
if not streams:
print("No streams available.")
exit()
print("Available Streams:")
for idx, stream in enumerate(streams, start=1):
print(f"{idx}. Server: {stream['server']}")
server_choice = int(input("Enter the number of the server you want to use: ")) - 1
selected_stream = streams[server_choice]
stream_link = selected_stream["links"][0]["link"]
mpv_args = ["mpv", stream_link]
headers = selected_stream["headers"]
if headers:
mpv_headers = "--http-header-fields="
for header_name, header_value in headers.items():
mpv_headers += f"{header_name}:{header_value},"
mpv_args.append(mpv_headers)
subprocess.run(mpv_args)

View File

@@ -1,4 +1,27 @@
SERVERS_AVAILABLE = ["sharepoint", "dropbox", "gogoanime", "weTransfer", "wixmp", "Yt"]
ALLANIME_BASE = "allanime.day"
ALLANIME_REFERER = "https://allanime.to/"
ALLANIME_API_ENDPOINT = "https://api.{}/api/".format(ALLANIME_BASE)
import re
SERVERS_AVAILABLE = [
"sharepoint",
"dropbox",
"gogoanime",
"weTransfer",
"wixmp",
"Yt",
"mp4-upload",
]
API_BASE_URL = "allanime.day"
API_REFERER = "https://allanime.to/"
API_ENDPOINT = f"https://api.{API_BASE_URL}/api/"
# search constants
DEFAULT_COUNTRY_OF_ORIGIN = "all"
DEFAULT_NSFW = True
DEFAULT_UNKNOWN = True
DEFAULT_PER_PAGE = 40
DEFAULT_PAGE = 1
# regex stuff
MP4_SERVER_JUICY_STREAM_REGEX = re.compile(
r"video/mp4\",src:\"(https?://.*/video\.mp4)\""
)

View File

@@ -1,4 +1,4 @@
ALLANIME_SEARCH_GQL = """
SEARCH_GQL = """
query (
$search: SearchInput
$limit: Int
@@ -27,7 +27,7 @@ query (
"""
ALLANIME_EPISODES_GQL = """\
EPISODES_GQL = """\
query (
$showId: String!
$translationType: VaildTranslationTypeEnumType!
@@ -45,7 +45,7 @@ query (
}
"""
ALLANIME_SHOW_GQL = """
SHOW_GQL = """
query ($showId: String!) {
show(_id: $showId) {
_id

View File

@@ -1,6 +1,5 @@
import logging
import random
import re
import time
from typing import TYPE_CHECKING
@@ -15,49 +14,33 @@ from ..decorators import debug_provider
from .constants import (
ANIMEPAHE_BASE,
ANIMEPAHE_ENDPOINT,
JUICY_STREAM_REGEX,
REQUEST_HEADERS,
SERVER_HEADERS,
)
from .utils import process_animepahe_embed_page
from .extractors import process_animepahe_embed_page
if TYPE_CHECKING:
from .types import AnimePaheAnimePage, AnimePaheSearchPage, AnimePaheSearchResult
JUICY_STREAM_REGEX = re.compile(r"source='(.*)';")
logger = logging.getLogger(__name__)
KWIK_RE = re.compile(r"Player\|(.+?)'")
class AnimePaheApi(AnimeProvider):
class AnimePahe(AnimeProvider):
search_page: "AnimePaheSearchPage"
anime: "AnimePaheAnimePage"
HEADERS = REQUEST_HEADERS
PROVIDER = "animepahe"
@debug_provider(PROVIDER.upper())
def search_for_anime(self, user_query: str, *args):
url = f"{ANIMEPAHE_ENDPOINT}m=search&q={user_query}"
@debug_provider
def search_for_anime(self, search_keywords: str, translation_type, **kwargs):
response = self.session.get(
url,
ANIMEPAHE_ENDPOINT, params={"m": "search", "q": search_keywords}
)
if not response.ok:
return
response.raise_for_status()
data: "AnimePaheSearchPage" = response.json()
self.search_page = data
for animepahe_search_result in data["data"]:
self.store.set(
str(animepahe_search_result["session"]),
"search_result",
animepahe_search_result,
)
return {
"pageInfo": {
"total": data["total"],
"perPage": data["per_page"],
"currentPage": data["current_page"],
},
"results": [
results = []
for result in data["data"]:
results.append(
{
"availableEpisodes": list(range(result["episodes"])),
"id": result["session"],
@@ -69,55 +52,81 @@ class AnimePaheApi(AnimeProvider):
"season": result["season"],
"poster": result["poster"],
}
for result in data["data"]
],
)
self.store.set(
str(result["session"]),
"search_result",
result,
)
return {
"pageInfo": {
"total": data["total"],
"perPage": data["per_page"],
"currentPage": data["current_page"],
},
"results": results,
}
@debug_provider(PROVIDER.upper())
def get_anime(self, session_id: str, *args):
@debug_provider
def _pages_loader(
self,
data,
session_id,
params,
page,
):
response = self.session.get(ANIMEPAHE_ENDPOINT, params=params)
response.raise_for_status()
if not data:
data.update(response.json())
else:
if ep_data := response.json().get("data"):
data["data"].extend(ep_data)
if response.json()["next_page_url"]:
# TODO: Refine this
time.sleep(
random.choice(
[
0.25,
0.1,
0.5,
0.75,
1,
]
)
)
page += 1
self._pages_loader(
data,
session_id,
params={
"m": "release",
"page": page,
"id": session_id,
"sort": "episode_asc",
},
page=page,
)
return data
@debug_provider
def get_anime(self, session_id: str, **kwargs):
page = 1
if d := self.store.get(str(session_id), "search_result"):
anime_result: "AnimePaheSearchResult" = d
data: "AnimePaheAnimePage" = {} # pyright:ignore
url = f"{ANIMEPAHE_ENDPOINT}m=release&id={session_id}&sort=episode_asc&page={page}"
def _pages_loader(
url,
page,
):
response = self.session.get(
url,
)
if response.ok:
if not data:
data.update(response.json())
else:
if ep_data := response.json().get("data"):
data["data"].extend(ep_data)
if response.json()["next_page_url"]:
# TODO: Refine this
time.sleep(
random.choice(
[
0.25,
0.1,
0.5,
0.75,
1,
]
)
)
page += 1
url = f"{ANIMEPAHE_ENDPOINT}m=release&id={session_id}&sort=episode_asc&page={page}"
_pages_loader(
url,
page,
)
_pages_loader(
url,
page,
data = self._pages_loader(
data,
session_id,
params={
"m": "release",
"id": session_id,
"sort": "episode_asc",
"page": page,
},
page=page,
)
if not data:
@@ -151,47 +160,13 @@ class AnimePaheApi(AnimeProvider):
],
}
@debug_provider(PROVIDER.upper())
def get_episode_streams(
self, anime_id, episode_number: str, translation_type, *args
):
anime_title = ""
episode = None
# extract episode details from memory
if d := self.store.get(str(anime_id), "anime_info"):
anime_title = d["title"]
episode = [
episode
for episode in d["data"]
if float(episode["episode"]) == float(episode_number)
]
if not episode:
logger.error(f"[ANIMEPAHE-ERROR]: episode {episode_number} doesn't exist")
return []
episode = episode[0]
# fetch the episode page
url = f"{ANIMEPAHE_BASE}/play/{anime_id}/{episode['session']}"
response = self.session.get(url)
# get the element containing links to juicy streams
c = get_element_by_id("resolutionMenu", response.text)
resolutionMenuItems = get_elements_html_by_class("dropdown-item", c)
# convert the elements containing embed links to a neat dict containing:
# data-src
# data-audio
# data-resolution
res_dicts = [extract_attributes(item) for item in resolutionMenuItems]
# get the episode title
episode_title = (
f"{episode['title'] or anime_title}; Episode {episode['episode']}"
)
@debug_provider
def _get_server(self, episode, res_dicts, anime_title, translation_type):
# get all links
streams = {
"server": "kwik",
"links": [],
"episode_title": episode_title,
"episode_title": f"{episode['title'] or anime_title}; Episode {episode['episode']}",
"subtitles": [],
"headers": {},
}
@@ -207,23 +182,22 @@ class AnimePaheApi(AnimeProvider):
logger.warning(
"[ANIMEPAHE-WARN]: embed url not found please report to the developers"
)
return []
continue
# get embed page
embed_response = self.session.get(
embed_url, headers={"User-Agent": self.USER_AGENT, **SERVER_HEADERS}
)
if not response.ok:
continue
embed_response.raise_for_status()
embed_page = embed_response.text
decoded_js = process_animepahe_embed_page(embed_page)
if not decoded_js:
logger.error("[ANIMEPAHE-ERROR]: failed to decode embed page")
return
continue
juicy_stream = JUICY_STREAM_REGEX.search(decoded_js)
if not juicy_stream:
logger.error("[ANIMEPAHE-ERROR]: failed to find juicy stream")
return
continue
juicy_stream = juicy_stream.group(1)
# add the link
streams["links"].append(
@@ -233,4 +207,119 @@ class AnimePaheApi(AnimeProvider):
"link": juicy_stream,
}
)
yield streams
return streams
@debug_provider
def get_episode_streams(
self, anime_id, episode_number: str, translation_type, **kwargs
):
anime_title = ""
# extract episode details from memory
anime_info = self.store.get(str(anime_id), "anime_info")
if not anime_info:
logger.error(
f"[ANIMEPAHE-ERROR]: Anime with ID {anime_id} not found in store"
)
return
anime_title = anime_info["title"]
episode = next(
(
ep
for ep in anime_info["data"]
if float(ep["episode"]) == float(episode_number)
),
None,
)
if not episode:
logger.error(
f"[ANIMEPAHE-ERROR]: Episode {episode_number} doesn't exist for anime {anime_title}"
)
return
# fetch the episode page
url = f"{ANIMEPAHE_BASE}/play/{anime_id}/{episode['session']}"
response = self.session.get(url)
response.raise_for_status()
# get the element containing links to juicy streams
c = get_element_by_id("resolutionMenu", response.text)
resolutionMenuItems = get_elements_html_by_class("dropdown-item", c)
# convert the elements containing embed links to a neat dict containing:
# data-src
# data-audio
# data-resolution
res_dicts = [extract_attributes(item) for item in resolutionMenuItems]
if _server := self._get_server(
episode, res_dicts, anime_title, translation_type
):
yield _server
if __name__ == "__main__":
import subprocess
animepahe = AnimePahe(cache_requests="True", use_persistent_provider_store="False")
search_term = input("Enter the search term for the anime: ")
translation_type = input("Enter the translation type (sub/dub): ")
search_results = animepahe.search_for_anime(
search_keywords=search_term, translation_type=translation_type
)
if not search_results or not search_results["results"]:
print("No results found.")
exit()
print("Search Results:")
for idx, result in enumerate(search_results["results"], start=1):
print(f"{idx}. {result['title']} (ID: {result['id']})")
anime_choice = int(input("Enter the number of the anime you want to watch: ")) - 1
anime_id = search_results["results"][anime_choice]["id"]
anime_details = animepahe.get_anime(anime_id)
if anime_details is None:
print("Failed to get anime details.")
exit()
print(f"Selected Anime: {anime_details['title']}")
print("Available Episodes:")
for idx, episode in enumerate(
sorted(anime_details["availableEpisodesDetail"][translation_type], key=float),
start=1,
):
print(f"{idx}. Episode {episode}")
episode_choice = (
int(input("Enter the number of the episode you want to watch: ")) - 1
)
episode_number = anime_details["availableEpisodesDetail"][translation_type][
episode_choice
]
streams = list(
animepahe.get_episode_streams(anime_id, episode_number, translation_type)
)
if not streams:
print("No streams available.")
exit()
print("Available Streams:")
for idx, stream in enumerate(streams, start=1):
print(f"{idx}. Server: {stream['server']}")
server_choice = int(input("Enter the number of the server you want to use: ")) - 1
selected_stream = streams[server_choice]
stream_link = selected_stream["links"][0]["link"]
mpv_args = ["mpv", stream_link]
headers = selected_stream["headers"]
if headers:
mpv_headers = "--http-header-fields="
for header_name, header_value in headers.items():
mpv_headers += f"{header_name}:{header_value},"
mpv_args.append(mpv_headers)
subprocess.run(mpv_args)

View File

@@ -1,6 +1,8 @@
import re
ANIMEPAHE = "animepahe.ru"
ANIMEPAHE_BASE = f"https://{ANIMEPAHE}"
ANIMEPAHE_ENDPOINT = f"{ANIMEPAHE_BASE}/api?"
ANIMEPAHE_ENDPOINT = f"{ANIMEPAHE_BASE}/api"
SERVERS_AVAILABLE = ["kwik"]
REQUEST_HEADERS = {
@@ -31,3 +33,5 @@ SERVER_HEADERS = {
"Priority": "u=4",
"TE": "trailers",
}
JUICY_STREAM_REGEX = re.compile(r"source='(.*)';")
KWIK_RE = re.compile(r"Player\|(.+?)'")

View File

@@ -10,7 +10,6 @@ from .providers_store import ProviderStore
class AnimeProvider:
session: requests.Session
PROVIDER = ""
USER_AGENT = random_user_agent()
HEADERS = {}
@@ -19,7 +18,10 @@ class AnimeProvider:
from ..common.requests_cacher import CachedRequestsSession
self.session = CachedRequestsSession(
os.path.join(APP_CACHE_DIR, "cached_requests.db")
os.path.join(APP_CACHE_DIR, "cached_requests.db"),
max_lifetime=int(
os.environ.get("FASTANIME_MAX_CACHE_LIFETIME", 259200)
),
)
else:
self.session = requests.session()
@@ -27,7 +29,7 @@ class AnimeProvider:
if use_persistent_provider_store.lower() == "true":
self.store = ProviderStore(
"persistent",
self.PROVIDER,
self.__class__.__name__,
os.path.join(APP_CACHE_DIR, "anime_providers_store.db"),
)
else:

View File

@@ -5,21 +5,19 @@ import os
logger = logging.getLogger(__name__)
def debug_provider(provider_name: str):
def _provider_function_decorator(provider_function):
@functools.wraps(provider_function)
def _provider_function_wrapper(*args, **kwargs):
if not os.environ.get("FASTANIME_DEBUG"):
try:
return provider_function(*args, **kwargs)
except Exception as e:
logger.error(f"[{provider_name}@{provider_function.__name__}]: {e}")
else:
return provider_function(*args, **kwargs)
def debug_provider(provider_function):
@functools.wraps(provider_function)
def _provider_function_wrapper(self, *args, **kwargs):
provider_name = self.__class__.__name__.upper()
if not os.environ.get("FASTANIME_DEBUG"):
try:
return provider_function(self, *args, **kwargs)
except Exception as e:
logger.error(f"[{provider_name}@{provider_function.__name__}]: {e}")
else:
return provider_function(self, *args, **kwargs)
return _provider_function_wrapper
return _provider_function_decorator
return _provider_function_wrapper
def ensure_internet_connection(provider_function):

View File

@@ -17,6 +17,7 @@ from ..base_provider import AnimeProvider
from ..decorators import debug_provider
from ..utils import give_random_quality
from .constants import SERVERS_AVAILABLE
from .extractors import MegaCloud
from .types import HiAnimeStream
logger = logging.getLogger(__name__)
@@ -38,13 +39,11 @@ class ParseAnchorAndImgTag(HTMLParser):
self.a_tag = {attr[0]: attr[1] for attr in attrs}
class HiAnimeApi(AnimeProvider):
class HiAnime(AnimeProvider):
# HEADERS = {"Referer": "https://hianime.to/home"}
PROVIDER = "hianime"
@debug_provider(PROVIDER.upper())
def search_for_anime(self, anime_title: str, *args):
@debug_provider
def search_for_anime(self, anime_title: str, translation_type, **kwargs):
query = quote_plus(anime_title)
url = f"https://hianime.to/search?keyword={query}"
response = self.session.get(url)
@@ -91,8 +90,8 @@ class HiAnimeApi(AnimeProvider):
self.store.set(result["id"], "search_result", result)
return {"pageInfo": {}, "results": results}
@debug_provider(PROVIDER.upper())
def get_anime(self, hianime_id, *args):
@debug_provider
def get_anime(self, hianime_id, **kwargs):
anime_result = {}
if d := self.store.get(str(hianime_id), "search_result"):
anime_result = d
@@ -144,8 +143,8 @@ class HiAnimeApi(AnimeProvider):
"episodes_info": episodes_info,
}
@debug_provider(PROVIDER.upper())
def get_episode_streams(self, anime_id, episode, translation_type, *args):
@debug_provider
def get_episode_streams(self, anime_id, episode, translation_type, **kwargs):
if d := self.store.get(str(anime_id), "anime_info"):
episodes_info = d
episode_details = [
@@ -191,53 +190,85 @@ class HiAnimeApi(AnimeProvider):
if not servers_html:
return
@debug_provider(self.PROVIDER.upper())
@debug_provider
def _get_server(server_name, server_html):
# keys: [ data-type: translation_type, data-id: embed_id, data-server-id: server_id ]
servers_info = extract_attributes(server_html)
embed_url = f"https://hianime.to/ajax/v2/episode/sources?id={servers_info['data-id']}"
server_id = servers_info["data-id"]
embed_url = (
f"https://hianime.to/ajax/v2/episode/sources?id={server_id}"
)
embed_response = self.session.get(embed_url)
if embed_response.ok:
embed_json = embed_response.json()
raw_link_to_streams = embed_json["link"]
match = LINK_TO_STREAMS_REGEX.match(raw_link_to_streams)
if not match:
return
provider_domain = match.group(1)
embed_type = match.group(2)
episode_number = match.group(3)
source_id = match.group(4)
match server_name:
# TODO: Finish the other servers
case "HD2":
data = MegaCloud(self.session).extract(
raw_link_to_streams
)
return {
"headers": {},
"subtitles": [
{
"url": track["file"],
"language": track["label"],
}
for track in data["tracks"]
if track["kind"] == "captions"
],
"server": server_name,
"episode_title": episode_details["title"],
"links": give_random_quality(
[
{"link": link["url"]}
for link in data["sources"]
]
),
}
case _:
# NOTE: THIS METHOD DOES'NT WORK will get the other servers later
match = LINK_TO_STREAMS_REGEX.match(raw_link_to_streams)
if not match:
return
provider_domain = match.group(1)
embed_type = match.group(2)
episode_number = match.group(3)
source_id = match.group(4)
link_to_streams = f"https://{provider_domain}/embed-{embed_type}/ajax/e-{episode_number}/getSources?id={source_id}"
link_to_streams_response = self.session.get(link_to_streams)
if link_to_streams_response.ok:
juicy_streams_json: "HiAnimeStream" = (
link_to_streams_response.json()
)
# TODO: Hianime decided to fucking encrypt shit
# so got to fix it later
return {
"headers": {},
"subtitles": [
{
"url": track["file"],
"language": track["label"],
link_to_streams = f"https://{provider_domain}/embed-{embed_type}/ajax/e-{episode_number}/getSources?id={source_id}"
link_to_streams_response = self.session.get(
link_to_streams
)
if link_to_streams_response.ok:
juicy_streams_json: "HiAnimeStream" = (
link_to_streams_response.json()
)
return {
"headers": {},
"subtitles": [
{
"url": track["file"],
"language": track["label"],
}
for track in juicy_streams_json["tracks"]
if track["kind"] == "captions"
],
"server": server_name,
"episode_title": episode_details["title"],
"links": give_random_quality(
[
{"link": link["file"]}
for link in juicy_streams_json["tracks"]
]
),
}
for track in juicy_streams_json["tracks"]
if track["kind"] == "captions"
],
"server": server_name,
"episode_title": episode_details["title"],
"links": give_random_quality(
[
{"link": link["file"]}
for link in juicy_streams_json["tracks"]
]
),
}
for server_name, server_html in zip(
cycle(SERVERS_AVAILABLE), servers_html
):
if server := _get_server(server_name, server_html):
yield server
if server_name == "HD2":
if server := _get_server(server_name, server_html):
yield server

View File

@@ -1 +1,26 @@
SERVERS_AVAILABLE = ["HD1", "HD2", "StreamSB", "StreamTape"]
""""
| "hd-1"
| "hd-2"
| "megacloud"
| "streamsb"
| "streamtape";
"""
"""
VidStreaming = "hd-1",
MegaCloud = "megacloud",
StreamSB = "streamsb",
StreamTape = "streamtape",
VidCloud = "hd-2",
AsianLoad = "asianload",
GogoCDN = "gogocdn",
MixDrop = "mixdrop",
UpCloud = "upcloud",
VizCloud = "vizcloud",
MyCloud = "mycloud",
Filemoon = "filemoon",
"""

View File

@@ -0,0 +1,191 @@
import hashlib
import json
import re
import time
from base64 import b64decode
from typing import TYPE_CHECKING, Dict, List
from Crypto.Cipher import AES
if TYPE_CHECKING:
from ...common.requests_cacher import CachedRequestsSession
# Constants
megacloud = {
"script": "https://megacloud.tv/js/player/a/prod/e1-player.min.js?v=",
"sources": "https://megacloud.tv/embed-2/ajax/e-1/getSources?id=",
}
class HiAnimeError(Exception):
def __init__(self, message, context, status_code):
super().__init__(f"{context}: {message} (Status: {status_code})")
self.context = context
self.status_code = status_code
# Adapted from https://github.com/ghoshRitesh12/aniwatch
class MegaCloud:
def __init__(self, session):
self.session: "CachedRequestsSession" = session
def extract(self, video_url: str) -> Dict:
try:
extracted_data = {
"tracks": [],
"intro": {"start": 0, "end": 0},
"outro": {"start": 0, "end": 0},
"sources": [],
}
video_id = video_url.split("/")[-1].split("?")[0]
response = self.session.get(
megacloud["sources"] + video_id,
headers={
"Accept": "*/*",
"X-Requested-With": "XMLHttpRequest",
"Referer": video_url,
},
fresh=1, # pyright: ignore
)
srcs_data = response.json()
if not srcs_data:
raise HiAnimeError(
"Url may have an invalid video id", "getAnimeEpisodeSources", 400
)
encrypted_string = srcs_data["sources"]
if not srcs_data["encrypted"] and isinstance(encrypted_string, list):
extracted_data.update(
{
"intro": srcs_data["intro"],
"outro": srcs_data["outro"],
"tracks": srcs_data["tracks"],
"sources": [
{"url": s["file"], "type": s["type"]}
for s in encrypted_string
],
}
)
return extracted_data
# Fetch decryption script
script_response = self.session.get(
megacloud["script"] + str(int(time.time() * 1000)),
fresh=1, # pyright: ignore
)
script_text = script_response.text
if not script_text:
raise HiAnimeError(
"Couldn't fetch script to decrypt resource",
"getAnimeEpisodeSources",
500,
)
vars_ = self.extract_variables(script_text)
if not vars_:
raise Exception(
"Can't find variables. Perhaps the extractor is outdated."
)
secret, encrypted_source = self.get_secret(encrypted_string, vars_)
decrypted = self.decrypt(encrypted_source, secret)
try:
sources = json.loads(decrypted)
extracted_data.update(
{
"intro": srcs_data["intro"],
"outro": srcs_data["outro"],
"tracks": srcs_data["tracks"],
"sources": [
{"url": s["file"], "type": s["type"]} for s in sources
],
}
)
return extracted_data
except Exception:
raise HiAnimeError(
"Failed to decrypt resource", "getAnimeEpisodeSources", 500
)
except Exception as err:
raise err
def extract_variables(self, text: str) -> List[List[int]]:
regex = r"case\s*0x[0-9a-f]+:(?![^;]*=partKey)\s*\w+\s*=\s*(\w+)\s*,\s*\w+\s*=\s*(\w+);"
matches = re.finditer(regex, text)
vars_ = []
for match in matches:
key1 = self.matching_key(match[1], text)
key2 = self.matching_key(match[2], text)
try:
vars_.append([int(key1, 16), int(key2, 16)])
except ValueError:
continue
return vars_
def get_secret(
self, encrypted_string: str, values: List[List[int]]
) -> tuple[str, str]:
secret = []
encrypted_source_array = list(encrypted_string)
current_index = 0
for start, length in values:
start += current_index
end = start + length
secret.extend(encrypted_string[start:end])
encrypted_source_array[start:end] = [""] * length
current_index += length
encrypted_source = "".join(encrypted_source_array) # .replace("\x00", "")
return ("".join(secret), encrypted_source)
def decrypt(self, encrypted: str, key_or_secret: str, maybe_iv: str = "") -> str:
if maybe_iv:
key = key_or_secret.encode()
iv = maybe_iv.encode()
contents = encrypted
else:
# Decode the Base64 string
cypher = b64decode(encrypted)
# Extract the salt from the cypher text
salt = cypher[8:16]
# Combine the key_or_secret with the salt
password = key_or_secret.encode() + salt
# Generate MD5 hashes
md5_hashes = []
digest = password
for _ in range(3):
md5 = hashlib.md5()
md5.update(digest)
md5_hashes.append(md5.digest())
digest = md5_hashes[-1] + password
# Derive the key and IV
key = md5_hashes[0] + md5_hashes[1]
iv = md5_hashes[2]
# Extract the encrypted contents
contents = cypher[16:]
# Initialize the AES decipher
decipher = AES.new(key, AES.MODE_CBC, iv)
# Decrypt and decode
decrypted = decipher.decrypt(contents).decode("utf-8") # pyright: ignore
# Remove any padding (PKCS#7)
pad = ord(decrypted[-1])
return decrypted[:-pad]
def matching_key(self, value: str, script: str) -> str:
match = re.search(rf",{value}=((?:0x)?[0-9a-fA-F]+)", script)
if match:
return match.group(1).replace("0x", "")
raise Exception("Failed to match the key")

View File

@@ -27,11 +27,10 @@ EXTRACT_USEFUL_INFO_PATTERN_2 = re.compile(
)
class NyaaApi(AnimeProvider):
class Nyaa(AnimeProvider):
search_results: SearchResults
PROVIDER = "nyaa"
@debug_provider(PROVIDER.upper())
@debug_provider
def search_for_anime(self, user_query: str, *args, **_):
self.search_results = search_for_anime_with_anilist(
user_query, True
@@ -39,7 +38,7 @@ class NyaaApi(AnimeProvider):
self.user_query = user_query
return self.search_results
@debug_provider(PROVIDER.upper())
@debug_provider
def get_anime(self, anilist_id: str, *_):
for anime in self.search_results["results"]:
if anime["id"] == anilist_id:
@@ -55,7 +54,7 @@ class NyaaApi(AnimeProvider):
},
}
@debug_provider(PROVIDER.upper())
@debug_provider
def get_episode_streams(
self,
anime_id: str,

View File

@@ -1,32 +1,32 @@
import base64
import re
from itertools import cycle
from yt_dlp.utils import (
get_element_text_and_html_by_tag,
get_elements_text_and_html_by_attribute,
extract_attributes,
get_element_by_attribute,
get_element_text_and_html_by_tag,
get_elements_text_and_html_by_attribute,
)
import re
from yt_dlp.utils.traversal import get_element_html_by_attribute
from .constants import YUGEN_ENDPOINT, SEARCH_URL
from ..decorators import debug_provider
from ..base_provider import AnimeProvider
from ..decorators import debug_provider
from .constants import SEARCH_URL, YUGEN_ENDPOINT
# ** Adapted from anipy-cli **
class YugenApi(AnimeProvider):
class Yugen(AnimeProvider):
"""
Provides a fast and effective interface to YugenApi site.
"""
PROVIDER = "yugen"
api_endpoint = YUGEN_ENDPOINT
# HEADERS = {
# "Referer": ALLANIME_REFERER,
# }
@debug_provider(PROVIDER.upper())
@debug_provider
def search_for_anime(
self,
user_query: str,
@@ -94,7 +94,7 @@ class YugenApi(AnimeProvider):
"results": results,
}
@debug_provider(PROVIDER.upper())
@debug_provider
def get_anime(self, anime_id: str, **kwargs):
identifier = base64.b64decode(anime_id).decode()
response = self.session.get(f"{YUGEN_ENDPOINT}/anime/{identifier}")
@@ -118,7 +118,9 @@ class YugenApi(AnimeProvider):
if sub_match:
eps = int(sub_match.group(1))
data_map["availableEpisodesDetail"]["sub"] = list(map(str,range(1, eps + 1)))
data_map["availableEpisodesDetail"]["sub"] = list(
map(str, range(1, eps + 1))
)
dub_match = re.search(
r'<div class="ap-.+?">Episodes \(Dub\)</div><span class="description" .+?>(\d+)</span></div>',
@@ -127,7 +129,9 @@ class YugenApi(AnimeProvider):
if dub_match:
eps = int(dub_match.group(1))
data_map["availableEpisodesDetail"]["dub"] = list(map(str,range(1, eps + 1)))
data_map["availableEpisodesDetail"]["dub"] = list(
map(str, range(1, eps + 1))
)
name = get_element_text_and_html_by_tag("h1", html_page)
if name is not None:
@@ -174,7 +178,7 @@ class YugenApi(AnimeProvider):
return data_map
@debug_provider(PROVIDER.upper())
@debug_provider
def get_episode_streams(
self, anime_id, episode_number: str, translation_type="sub"
):
@@ -212,5 +216,10 @@ class YugenApi(AnimeProvider):
"episode_title": f"{anime_title}; Episode {episode_number}",
"headers": {},
"subtitles": [],
"links": [{"quality": quality, "link": link} for quality,link in zip(cycle(["1080","720","480","360"]),res["hls"])],
"links": [
{"quality": quality, "link": link}
for quality, link in zip(
cycle(["1080", "720", "480", "360"]), res["hls"]
)
],
}

View File

@@ -1,4 +1,3 @@
YUGEN_ENDPOINT: str = "https://yugenanime.tv"
SEARCH_URL = YUGEN_ENDPOINT + "/api/discover/"

View File

@@ -80,7 +80,8 @@ class CachedRequestsSession(requests.Session):
response_headers TEXT,
data BLOB,
redirection_policy INT,
cache_expiry INTEGER
cache_expiry INTEGER,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
)"""
)
@@ -117,6 +118,8 @@ class CachedRequestsSession(requests.Session):
url = ?
AND redirection_policy = ?
AND cache_expiry > ?
ORDER BY created_at DESC
LIMIT 1
""",
(url, redirection_policy, int(time.time())),
)
@@ -162,8 +165,15 @@ class CachedRequestsSession(requests.Session):
logger.debug("Caching the current request")
cursor.execute(
f"""
INSERT INTO {self.table_name}
VALUES (?, ?, ?, ?, ?, ?, ?)
INSERT INTO {self.table_name} (
url,
status_code,
request_headers,
response_headers,
data,
redirection_policy,
cache_expiry
) VALUES (?, ?, ?, ?, ?, ?, ?)
""",
(
url,

View File

@@ -0,0 +1,11 @@
from pypresence import Presence
import time
def discord_connect(show, episode, switch):
presence = Presence(client_id = '1292070065583165512')
presence.connect()
if not switch.is_set():
presence.update(details = show, state = "Watching episode "+episode)
time.sleep(10)
else:
presence.close()

View File

@@ -41,8 +41,8 @@ class FZF:
stdout: [TODO:attribute]
"""
if not os.getenv("FZF_DEFAULT_OPTS"):
os.environ["FZF_DEFAULT_OPTS"] = FZF_DEFAULT_OPTS
# if not os.getenv("FZF_DEFAULT_OPTS"):
# os.environ["FZF_DEFAULT_OPTS"] = FZF_DEFAULT_OPTS
FZF_EXECUTABLE = shutil.which("fzf")
default_options = [
"--cycle",
@@ -157,10 +157,18 @@ class FZF:
Returns:
[TODO:return]
"""
_HEADER_COLOR = os.environ.get("FASTANIME_HEADER_COLOR", "215,0,95").split(",")
header = os.environ.get("FASTANIME_HEADER_ASCII_ART", HEADER)
header = "\n".join(
[
f"\033[38;2;{_HEADER_COLOR[0]};{_HEADER_COLOR[1]};{_HEADER_COLOR[2]};m{line}\033[0m"
for line in header.split("\n")
]
)
_commands = [
*self.default_options,
"--header",
HEADER,
header,
"--header-first",
"--prompt",
f"{prompt.title()}: ",
@@ -182,6 +190,7 @@ class FZF:
print(info)
input("Enter to try again")
return self.run(fzf_input, prompt, header, preview, expect, validator)
# os.environ["FZF_DEFAULT_OPTS"] = ""
return result

61
flake.lock generated Normal file
View File

@@ -0,0 +1,61 @@
{
"nodes": {
"flake-utils": {
"inputs": {
"systems": "systems"
},
"locked": {
"lastModified": 1731533236,
"narHash": "sha256-l0KFg5HjrsfsO/JpG+r7fRrqm12kzFHyUHqHCVpMMbI=",
"owner": "numtide",
"repo": "flake-utils",
"rev": "11707dc2f618dd54ca8739b309ec4fc024de578b",
"type": "github"
},
"original": {
"owner": "numtide",
"repo": "flake-utils",
"type": "github"
}
},
"nixpkgs": {
"locked": {
"lastModified": 1731676054,
"narHash": "sha256-OZiZ3m8SCMfh3B6bfGC/Bm4x3qc1m2SVEAlkV6iY7Yg=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "5e4fbfb6b3de1aa2872b76d49fafc942626e2add",
"type": "github"
},
"original": {
"owner": "NixOS",
"ref": "nixos-unstable",
"repo": "nixpkgs",
"type": "github"
}
},
"root": {
"inputs": {
"flake-utils": "flake-utils",
"nixpkgs": "nixpkgs"
}
},
"systems": {
"locked": {
"lastModified": 1681028828,
"narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
"owner": "nix-systems",
"repo": "default",
"rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
"type": "github"
},
"original": {
"owner": "nix-systems",
"repo": "default",
"type": "github"
}
}
},
"root": "root",
"version": 7
}

63
flake.nix Normal file
View File

@@ -0,0 +1,63 @@
{
description = "FastAnime Project Flake";
inputs = {
nixpkgs.url = "github:NixOS/nixpkgs/nixos-unstable";
flake-utils.url = "github:numtide/flake-utils";
};
outputs = { self, nixpkgs, flake-utils }: flake-utils.lib.eachDefaultSystem (system:
let
pkgs = import nixpkgs { inherit system; };
python = pkgs.python312;
pythonPackages = python.pkgs;
fastanimeEnv = pythonPackages.buildPythonApplication {
pname = "fastanime";
version = "2.8.7";
src = ./.;
preBuild = ''
sed -i 's/rich>=13.9.2/rich>=13.8.1/' pyproject.toml
sed -i 's/pycryptodome>=3.21.0/pycryptodome>=3.20.0/' pyproject.toml
'';
# Add runtime dependencies
propagatedBuildInputs = with pythonPackages; [
click
inquirerpy
requests
rich
thefuzz
yt-dlp
dbus-python
hatchling
plyer
mpv
fastapi
pycryptodome
pypresence
];
# Ensure compatibility with the pyproject.toml
format = "pyproject";
};
in
{
packages.default = fastanimeEnv;
# DevShell for development
devShells.default = pkgs.mkShell {
buildInputs = [
fastanimeEnv
pythonPackages.hatchling
pkgs.mpv
pkgs.libmpv
pkgs.fzf
pkgs.rofi
];
};
});
}

View File

@@ -5,7 +5,12 @@ VERSION=$1
[ "$VERSION" = "current" ] && fastanime --version && exit 0
sed -i "s/^version.*/version = \"$VERSION\"/" "$CLI_DIR/pyproject.toml" &&
sed -i "s/__version__.*/__version__ = \"v$VERSION\"/" "$CLI_DIR/fastanime/__init__.py" &&
git stage "$CLI_DIR/pyproject.toml" "$CLI_DIR/fastanime/__init__.py" &&
sed -i "s/version = .*/version = \"$VERSION\";/" "$CLI_DIR/flake.nix" &&
git stage "$CLI_DIR/pyproject.toml" "$CLI_DIR/fastanime/__init__.py" "$CLI_DIR/flake.nix" &&
git commit -m "chore: bump version (v$VERSION)" &&
# nix flake lock &&
uv lock &&
git stage "$CLI_DIR/flake.lock" "$CLI_DIR/uv.lock" &&
git commit -m "chore: update lock files" &&
git push &&
gh release create "v$VERSION"

View File

@@ -1,6 +1,6 @@
[project]
name = "fastanime"
version = "2.7.4"
version = "2.8.7"
description = "A browser anime site experience from the terminal"
license = "UNLICENSE"
readme = "README.md"
@@ -8,10 +8,12 @@ requires-python = ">=3.10"
dependencies = [
"click>=8.1.7",
"inquirerpy>=0.3.4",
"pycryptodome>=3.21.0",
"pypresence>=4.3.0",
"requests>=2.32.3",
"rich>=13.9.2",
"thefuzz>=0.22.1",
"yt-dlp>=2024.10.7",
"yt-dlp[default]>=2024.10.7",
]
[project.scripts]
@@ -29,6 +31,7 @@ build-backend = "hatchling.build"
[tool.uv]
dev-dependencies = [
"pre-commit>=4.0.1",
"pyinstaller>=6.11.1",
"pyright>=1.1.384",
"pytest>=8.3.3",

18
shell.nix Normal file
View File

@@ -0,0 +1,18 @@
let
pkgs = import <nixpkgs> {};
in pkgs.mkShell {
packages = [
(pkgs.python3.withPackages (python-pkgs: [
python-pkgs.yt-dlp
python-pkgs.dbus-python
python-pkgs.requests
python-pkgs.rich
python-pkgs.click
python-pkgs.inquirerpy
python-pkgs.mpv
python-pkgs.fastapi
python-pkgs.thefuzz
python-pkgs.plyer
]))
];
}

View File

@@ -1,3 +1,5 @@
from unittest.mock import patch
import pytest
from click.testing import CliRunner
@@ -147,3 +149,10 @@ def test_anilist_upcoming_help(runner: CliRunner):
def test_anilist_watching_help(runner: CliRunner):
result = runner.invoke(run_cli, ["anilist", "watching", "--help"])
assert result.exit_code == 0
def test_check_for_updates_not_called_on_completions(runner):
with patch("fastanime.cli.app_updater.check_for_updates") as mock_check_for_updates:
result = runner.invoke(run_cli, ["completions"])
assert result.exit_code == 0
mock_check_for_updates.assert_not_called()

325
uv.lock generated
View File

@@ -38,6 +38,98 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/e4/f5/f2b75d2fc6f1a260f340f0e7c6a060f4dd2961cc16884ed851b0d18da06a/anyio-4.6.2.post1-py3-none-any.whl", hash = "sha256:6d170c36fba3bdd840c73d3868c1e777e33676a69c3a72cf0a0d5d6d8009b61d", size = 90377 },
]
[[package]]
name = "brotli"
version = "1.1.0"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/2f/c2/f9e977608bdf958650638c3f1e28f85a1b075f075ebbe77db8555463787b/Brotli-1.1.0.tar.gz", hash = "sha256:81de08ac11bcb85841e440c13611c00b67d3bf82698314928d0b676362546724", size = 7372270 }
wheels = [
{ url = "https://files.pythonhosted.org/packages/6d/3a/dbf4fb970c1019a57b5e492e1e0eae745d32e59ba4d6161ab5422b08eefe/Brotli-1.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e1140c64812cb9b06c922e77f1c26a75ec5e3f0fb2bf92cc8c58720dec276752", size = 873045 },
{ url = "https://files.pythonhosted.org/packages/dd/11/afc14026ea7f44bd6eb9316d800d439d092c8d508752055ce8d03086079a/Brotli-1.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c8fd5270e906eef71d4a8d19b7c6a43760c6abcfcc10c9101d14eb2357418de9", size = 446218 },
{ url = "https://files.pythonhosted.org/packages/36/83/7545a6e7729db43cb36c4287ae388d6885c85a86dd251768a47015dfde32/Brotli-1.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1ae56aca0402a0f9a3431cddda62ad71666ca9d4dc3a10a142b9dce2e3c0cda3", size = 2903872 },
{ url = "https://files.pythonhosted.org/packages/32/23/35331c4d9391fcc0f29fd9bec2c76e4b4eeab769afbc4b11dd2e1098fb13/Brotli-1.1.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:43ce1b9935bfa1ede40028054d7f48b5469cd02733a365eec8a329ffd342915d", size = 2941254 },
{ url = "https://files.pythonhosted.org/packages/3b/24/1671acb450c902edb64bd765d73603797c6c7280a9ada85a195f6b78c6e5/Brotli-1.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:7c4855522edb2e6ae7fdb58e07c3ba9111e7621a8956f481c68d5d979c93032e", size = 2857293 },
{ url = "https://files.pythonhosted.org/packages/d5/00/40f760cc27007912b327fe15bf6bfd8eaecbe451687f72a8abc587d503b3/Brotli-1.1.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:38025d9f30cf4634f8309c6874ef871b841eb3c347e90b0851f63d1ded5212da", size = 3002385 },
{ url = "https://files.pythonhosted.org/packages/b8/cb/8aaa83f7a4caa131757668c0fb0c4b6384b09ffa77f2fba9570d87ab587d/Brotli-1.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e6a904cb26bfefc2f0a6f240bdf5233be78cd2488900a2f846f3c3ac8489ab80", size = 2911104 },
{ url = "https://files.pythonhosted.org/packages/bc/c4/65456561d89d3c49f46b7fbeb8fe6e449f13bdc8ea7791832c5d476b2faf/Brotli-1.1.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a37b8f0391212d29b3a91a799c8e4a2855e0576911cdfb2515487e30e322253d", size = 2809981 },
{ url = "https://files.pythonhosted.org/packages/05/1b/cf49528437bae28abce5f6e059f0d0be6fecdcc1d3e33e7c54b3ca498425/Brotli-1.1.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:e84799f09591700a4154154cab9787452925578841a94321d5ee8fb9a9a328f0", size = 2935297 },
{ url = "https://files.pythonhosted.org/packages/81/ff/190d4af610680bf0c5a09eb5d1eac6e99c7c8e216440f9c7cfd42b7adab5/Brotli-1.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f66b5337fa213f1da0d9000bc8dc0cb5b896b726eefd9c6046f699b169c41b9e", size = 2930735 },
{ url = "https://files.pythonhosted.org/packages/80/7d/f1abbc0c98f6e09abd3cad63ec34af17abc4c44f308a7a539010f79aae7a/Brotli-1.1.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:5dab0844f2cf82be357a0eb11a9087f70c5430b2c241493fc122bb6f2bb0917c", size = 2933107 },
{ url = "https://files.pythonhosted.org/packages/34/ce/5a5020ba48f2b5a4ad1c0522d095ad5847a0be508e7d7569c8630ce25062/Brotli-1.1.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e4fe605b917c70283db7dfe5ada75e04561479075761a0b3866c081d035b01c1", size = 2845400 },
{ url = "https://files.pythonhosted.org/packages/44/89/fa2c4355ab1eecf3994e5a0a7f5492c6ff81dfcb5f9ba7859bd534bb5c1a/Brotli-1.1.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:1e9a65b5736232e7a7f91ff3d02277f11d339bf34099a56cdab6a8b3410a02b2", size = 3031985 },
{ url = "https://files.pythonhosted.org/packages/af/a4/79196b4a1674143d19dca400866b1a4d1a089040df7b93b88ebae81f3447/Brotli-1.1.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:58d4b711689366d4a03ac7957ab8c28890415e267f9b6589969e74b6e42225ec", size = 2927099 },
{ url = "https://files.pythonhosted.org/packages/e9/54/1c0278556a097f9651e657b873ab08f01b9a9ae4cac128ceb66427d7cd20/Brotli-1.1.0-cp310-cp310-win32.whl", hash = "sha256:be36e3d172dc816333f33520154d708a2657ea63762ec16b62ece02ab5e4daf2", size = 333172 },
{ url = "https://files.pythonhosted.org/packages/f7/65/b785722e941193fd8b571afd9edbec2a9b838ddec4375d8af33a50b8dab9/Brotli-1.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:0c6244521dda65ea562d5a69b9a26120769b7a9fb3db2fe9545935ed6735b128", size = 357255 },
{ url = "https://files.pythonhosted.org/packages/96/12/ad41e7fadd5db55459c4c401842b47f7fee51068f86dd2894dd0dcfc2d2a/Brotli-1.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a3daabb76a78f829cafc365531c972016e4aa8d5b4bf60660ad8ecee19df7ccc", size = 873068 },
{ url = "https://files.pythonhosted.org/packages/95/4e/5afab7b2b4b61a84e9c75b17814198ce515343a44e2ed4488fac314cd0a9/Brotli-1.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c8146669223164fc87a7e3de9f81e9423c67a79d6b3447994dfb9c95da16e2d6", size = 446244 },
{ url = "https://files.pythonhosted.org/packages/9d/e6/f305eb61fb9a8580c525478a4a34c5ae1a9bcb12c3aee619114940bc513d/Brotli-1.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:30924eb4c57903d5a7526b08ef4a584acc22ab1ffa085faceb521521d2de32dd", size = 2906500 },
{ url = "https://files.pythonhosted.org/packages/3e/4f/af6846cfbc1550a3024e5d3775ede1e00474c40882c7bf5b37a43ca35e91/Brotli-1.1.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ceb64bbc6eac5a140ca649003756940f8d6a7c444a68af170b3187623b43bebf", size = 2943950 },
{ url = "https://files.pythonhosted.org/packages/b3/e7/ca2993c7682d8629b62630ebf0d1f3bb3d579e667ce8e7ca03a0a0576a2d/Brotli-1.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a469274ad18dc0e4d316eefa616d1d0c2ff9da369af19fa6f3daa4f09671fd61", size = 2918527 },
{ url = "https://files.pythonhosted.org/packages/b3/96/da98e7bedc4c51104d29cc61e5f449a502dd3dbc211944546a4cc65500d3/Brotli-1.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:524f35912131cc2cabb00edfd8d573b07f2d9f21fa824bd3fb19725a9cf06327", size = 2845489 },
{ url = "https://files.pythonhosted.org/packages/e8/ef/ccbc16947d6ce943a7f57e1a40596c75859eeb6d279c6994eddd69615265/Brotli-1.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:5b3cc074004d968722f51e550b41a27be656ec48f8afaeeb45ebf65b561481dd", size = 2914080 },
{ url = "https://files.pythonhosted.org/packages/80/d6/0bd38d758d1afa62a5524172f0b18626bb2392d717ff94806f741fcd5ee9/Brotli-1.1.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:19c116e796420b0cee3da1ccec3b764ed2952ccfcc298b55a10e5610ad7885f9", size = 2813051 },
{ url = "https://files.pythonhosted.org/packages/14/56/48859dd5d129d7519e001f06dcfbb6e2cf6db92b2702c0c2ce7d97e086c1/Brotli-1.1.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:510b5b1bfbe20e1a7b3baf5fed9e9451873559a976c1a78eebaa3b86c57b4265", size = 2938172 },
{ url = "https://files.pythonhosted.org/packages/3d/77/a236d5f8cd9e9f4348da5acc75ab032ab1ab2c03cc8f430d24eea2672888/Brotli-1.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:a1fd8a29719ccce974d523580987b7f8229aeace506952fa9ce1d53a033873c8", size = 2933023 },
{ url = "https://files.pythonhosted.org/packages/f1/87/3b283efc0f5cb35f7f84c0c240b1e1a1003a5e47141a4881bf87c86d0ce2/Brotli-1.1.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c247dd99d39e0338a604f8c2b3bc7061d5c2e9e2ac7ba9cc1be5a69cb6cd832f", size = 2935871 },
{ url = "https://files.pythonhosted.org/packages/f3/eb/2be4cc3e2141dc1a43ad4ca1875a72088229de38c68e842746b342667b2a/Brotli-1.1.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:1b2c248cd517c222d89e74669a4adfa5577e06ab68771a529060cf5a156e9757", size = 2847784 },
{ url = "https://files.pythonhosted.org/packages/66/13/b58ddebfd35edde572ccefe6890cf7c493f0c319aad2a5badee134b4d8ec/Brotli-1.1.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:2a24c50840d89ded6c9a8fdc7b6ed3692ed4e86f1c4a4a938e1e92def92933e0", size = 3034905 },
{ url = "https://files.pythonhosted.org/packages/84/9c/bc96b6c7db824998a49ed3b38e441a2cae9234da6fa11f6ed17e8cf4f147/Brotli-1.1.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f31859074d57b4639318523d6ffdca586ace54271a73ad23ad021acd807eb14b", size = 2929467 },
{ url = "https://files.pythonhosted.org/packages/e7/71/8f161dee223c7ff7fea9d44893fba953ce97cf2c3c33f78ba260a91bcff5/Brotli-1.1.0-cp311-cp311-win32.whl", hash = "sha256:39da8adedf6942d76dc3e46653e52df937a3c4d6d18fdc94a7c29d263b1f5b50", size = 333169 },
{ url = "https://files.pythonhosted.org/packages/02/8a/fece0ee1057643cb2a5bbf59682de13f1725f8482b2c057d4e799d7ade75/Brotli-1.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:aac0411d20e345dc0920bdec5548e438e999ff68d77564d5e9463a7ca9d3e7b1", size = 357253 },
{ url = "https://files.pythonhosted.org/packages/5c/d0/5373ae13b93fe00095a58efcbce837fd470ca39f703a235d2a999baadfbc/Brotli-1.1.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:32d95b80260d79926f5fab3c41701dbb818fde1c9da590e77e571eefd14abe28", size = 815693 },
{ url = "https://files.pythonhosted.org/packages/8e/48/f6e1cdf86751300c288c1459724bfa6917a80e30dbfc326f92cea5d3683a/Brotli-1.1.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b760c65308ff1e462f65d69c12e4ae085cff3b332d894637f6273a12a482d09f", size = 422489 },
{ url = "https://files.pythonhosted.org/packages/06/88/564958cedce636d0f1bed313381dfc4b4e3d3f6015a63dae6146e1b8c65c/Brotli-1.1.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:316cc9b17edf613ac76b1f1f305d2a748f1b976b033b049a6ecdfd5612c70409", size = 873081 },
{ url = "https://files.pythonhosted.org/packages/58/79/b7026a8bb65da9a6bb7d14329fd2bd48d2b7f86d7329d5cc8ddc6a90526f/Brotli-1.1.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:caf9ee9a5775f3111642d33b86237b05808dafcd6268faa492250e9b78046eb2", size = 446244 },
{ url = "https://files.pythonhosted.org/packages/e5/18/c18c32ecea41b6c0004e15606e274006366fe19436b6adccc1ae7b2e50c2/Brotli-1.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:70051525001750221daa10907c77830bc889cb6d865cc0b813d9db7fefc21451", size = 2906505 },
{ url = "https://files.pythonhosted.org/packages/08/c8/69ec0496b1ada7569b62d85893d928e865df29b90736558d6c98c2031208/Brotli-1.1.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7f4bf76817c14aa98cc6697ac02f3972cb8c3da93e9ef16b9c66573a68014f91", size = 2944152 },
{ url = "https://files.pythonhosted.org/packages/ab/fb/0517cea182219d6768113a38167ef6d4eb157a033178cc938033a552ed6d/Brotli-1.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d0c5516f0aed654134a2fc936325cc2e642f8a0e096d075209672eb321cff408", size = 2919252 },
{ url = "https://files.pythonhosted.org/packages/c7/53/73a3431662e33ae61a5c80b1b9d2d18f58dfa910ae8dd696e57d39f1a2f5/Brotli-1.1.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6c3020404e0b5eefd7c9485ccf8393cfb75ec38ce75586e046573c9dc29967a0", size = 2845955 },
{ url = "https://files.pythonhosted.org/packages/55/ac/bd280708d9c5ebdbf9de01459e625a3e3803cce0784f47d633562cf40e83/Brotli-1.1.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:4ed11165dd45ce798d99a136808a794a748d5dc38511303239d4e2363c0695dc", size = 2914304 },
{ url = "https://files.pythonhosted.org/packages/76/58/5c391b41ecfc4527d2cc3350719b02e87cb424ef8ba2023fb662f9bf743c/Brotli-1.1.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:4093c631e96fdd49e0377a9c167bfd75b6d0bad2ace734c6eb20b348bc3ea180", size = 2814452 },
{ url = "https://files.pythonhosted.org/packages/c7/4e/91b8256dfe99c407f174924b65a01f5305e303f486cc7a2e8a5d43c8bec3/Brotli-1.1.0-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:7e4c4629ddad63006efa0ef968c8e4751c5868ff0b1c5c40f76524e894c50248", size = 2938751 },
{ url = "https://files.pythonhosted.org/packages/5a/a6/e2a39a5d3b412938362bbbeba5af904092bf3f95b867b4a3eb856104074e/Brotli-1.1.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:861bf317735688269936f755fa136a99d1ed526883859f86e41a5d43c61d8966", size = 2933757 },
{ url = "https://files.pythonhosted.org/packages/13/f0/358354786280a509482e0e77c1a5459e439766597d280f28cb097642fc26/Brotli-1.1.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:87a3044c3a35055527ac75e419dfa9f4f3667a1e887ee80360589eb8c90aabb9", size = 2936146 },
{ url = "https://files.pythonhosted.org/packages/80/f7/daf538c1060d3a88266b80ecc1d1c98b79553b3f117a485653f17070ea2a/Brotli-1.1.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:c5529b34c1c9d937168297f2c1fde7ebe9ebdd5e121297ff9c043bdb2ae3d6fb", size = 2848055 },
{ url = "https://files.pythonhosted.org/packages/ad/cf/0eaa0585c4077d3c2d1edf322d8e97aabf317941d3a72d7b3ad8bce004b0/Brotli-1.1.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:ca63e1890ede90b2e4454f9a65135a4d387a4585ff8282bb72964fab893f2111", size = 3035102 },
{ url = "https://files.pythonhosted.org/packages/d8/63/1c1585b2aa554fe6dbce30f0c18bdbc877fa9a1bf5ff17677d9cca0ac122/Brotli-1.1.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e79e6520141d792237c70bcd7a3b122d00f2613769ae0cb61c52e89fd3443839", size = 2930029 },
{ url = "https://files.pythonhosted.org/packages/5f/3b/4e3fd1893eb3bbfef8e5a80d4508bec17a57bb92d586c85c12d28666bb13/Brotli-1.1.0-cp312-cp312-win32.whl", hash = "sha256:5f4d5ea15c9382135076d2fb28dde923352fe02951e66935a9efaac8f10e81b0", size = 333276 },
{ url = "https://files.pythonhosted.org/packages/3d/d5/942051b45a9e883b5b6e98c041698b1eb2012d25e5948c58d6bf85b1bb43/Brotli-1.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:906bc3a79de8c4ae5b86d3d75a8b77e44404b0f4261714306e3ad248d8ab0951", size = 357255 },
{ url = "https://files.pythonhosted.org/packages/0a/9f/fb37bb8ffc52a8da37b1c03c459a8cd55df7a57bdccd8831d500e994a0ca/Brotli-1.1.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:8bf32b98b75c13ec7cf774164172683d6e7891088f6316e54425fde1efc276d5", size = 815681 },
{ url = "https://files.pythonhosted.org/packages/06/b3/dbd332a988586fefb0aa49c779f59f47cae76855c2d00f450364bb574cac/Brotli-1.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:7bc37c4d6b87fb1017ea28c9508b36bbcb0c3d18b4260fcdf08b200c74a6aee8", size = 422475 },
{ url = "https://files.pythonhosted.org/packages/bb/80/6aaddc2f63dbcf2d93c2d204e49c11a9ec93a8c7c63261e2b4bd35198283/Brotli-1.1.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c0ef38c7a7014ffac184db9e04debe495d317cc9c6fb10071f7fefd93100a4f", size = 2906173 },
{ url = "https://files.pythonhosted.org/packages/ea/1d/e6ca79c96ff5b641df6097d299347507d39a9604bde8915e76bf026d6c77/Brotli-1.1.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:91d7cc2a76b5567591d12c01f019dd7afce6ba8cba6571187e21e2fc418ae648", size = 2943803 },
{ url = "https://files.pythonhosted.org/packages/ac/a3/d98d2472e0130b7dd3acdbb7f390d478123dbf62b7d32bda5c830a96116d/Brotli-1.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a93dde851926f4f2678e704fadeb39e16c35d8baebd5252c9fd94ce8ce68c4a0", size = 2918946 },
{ url = "https://files.pythonhosted.org/packages/c4/a5/c69e6d272aee3e1423ed005d8915a7eaa0384c7de503da987f2d224d0721/Brotli-1.1.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f0db75f47be8b8abc8d9e31bc7aad0547ca26f24a54e6fd10231d623f183d089", size = 2845707 },
{ url = "https://files.pythonhosted.org/packages/58/9f/4149d38b52725afa39067350696c09526de0125ebfbaab5acc5af28b42ea/Brotli-1.1.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:6967ced6730aed543b8673008b5a391c3b1076d834ca438bbd70635c73775368", size = 2936231 },
{ url = "https://files.pythonhosted.org/packages/5a/5a/145de884285611838a16bebfdb060c231c52b8f84dfbe52b852a15780386/Brotli-1.1.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:7eedaa5d036d9336c95915035fb57422054014ebdeb6f3b42eac809928e40d0c", size = 2848157 },
{ url = "https://files.pythonhosted.org/packages/50/ae/408b6bfb8525dadebd3b3dd5b19d631da4f7d46420321db44cd99dcf2f2c/Brotli-1.1.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:d487f5432bf35b60ed625d7e1b448e2dc855422e87469e3f450aa5552b0eb284", size = 3035122 },
{ url = "https://files.pythonhosted.org/packages/af/85/a94e5cfaa0ca449d8f91c3d6f78313ebf919a0dbd55a100c711c6e9655bc/Brotli-1.1.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:832436e59afb93e1836081a20f324cb185836c617659b07b129141a8426973c7", size = 2930206 },
{ url = "https://files.pythonhosted.org/packages/c2/f0/a61d9262cd01351df22e57ad7c34f66794709acab13f34be2675f45bf89d/Brotli-1.1.0-cp313-cp313-win32.whl", hash = "sha256:43395e90523f9c23a3d5bdf004733246fba087f2948f87ab28015f12359ca6a0", size = 333804 },
{ url = "https://files.pythonhosted.org/packages/7e/c1/ec214e9c94000d1c1974ec67ced1c970c148aa6b8d8373066123fc3dbf06/Brotli-1.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:9011560a466d2eb3f5a6e4929cf4a09be405c64154e12df0dd72713f6500e32b", size = 358517 },
]
[[package]]
name = "brotlicffi"
version = "1.1.0.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "cffi" },
]
sdist = { url = "https://files.pythonhosted.org/packages/95/9d/70caa61192f570fcf0352766331b735afa931b4c6bc9a348a0925cc13288/brotlicffi-1.1.0.0.tar.gz", hash = "sha256:b77827a689905143f87915310b93b273ab17888fd43ef350d4832c4a71083c13", size = 465192 }
wheels = [
{ url = "https://files.pythonhosted.org/packages/a2/11/7b96009d3dcc2c931e828ce1e157f03824a69fb728d06bfd7b2fc6f93718/brotlicffi-1.1.0.0-cp37-abi3-macosx_10_9_x86_64.whl", hash = "sha256:9b7ae6bd1a3f0df532b6d67ff674099a96d22bc0948955cb338488c31bfb8851", size = 453786 },
{ url = "https://files.pythonhosted.org/packages/d6/e6/a8f46f4a4ee7856fbd6ac0c6fb0dc65ed181ba46cd77875b8d9bbe494d9e/brotlicffi-1.1.0.0-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:19ffc919fa4fc6ace69286e0a23b3789b4219058313cf9b45625016bf7ff996b", size = 2911165 },
{ url = "https://files.pythonhosted.org/packages/be/20/201559dff14e83ba345a5ec03335607e47467b6633c210607e693aefac40/brotlicffi-1.1.0.0-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9feb210d932ffe7798ee62e6145d3a757eb6233aa9a4e7db78dd3690d7755814", size = 2927895 },
{ url = "https://files.pythonhosted.org/packages/cd/15/695b1409264143be3c933f708a3f81d53c4a1e1ebbc06f46331decbf6563/brotlicffi-1.1.0.0-cp37-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:84763dbdef5dd5c24b75597a77e1b30c66604725707565188ba54bab4f114820", size = 2851834 },
{ url = "https://files.pythonhosted.org/packages/b4/40/b961a702463b6005baf952794c2e9e0099bde657d0d7e007f923883b907f/brotlicffi-1.1.0.0-cp37-abi3-win32.whl", hash = "sha256:1b12b50e07c3911e1efa3a8971543e7648100713d4e0971b13631cce22c587eb", size = 341731 },
{ url = "https://files.pythonhosted.org/packages/1c/fa/5408a03c041114ceab628ce21766a4ea882aa6f6f0a800e04ee3a30ec6b9/brotlicffi-1.1.0.0-cp37-abi3-win_amd64.whl", hash = "sha256:994a4f0681bb6c6c3b0925530a1926b7a189d878e6e5e38fae8efa47c5d9c613", size = 366783 },
{ url = "https://files.pythonhosted.org/packages/e5/3b/bd4f3d2bcf2306ae66b0346f5b42af1962480b200096ffc7abc3bd130eca/brotlicffi-1.1.0.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:2e4aeb0bd2540cb91b069dbdd54d458da8c4334ceaf2d25df2f4af576d6766ca", size = 397397 },
{ url = "https://files.pythonhosted.org/packages/54/10/1fd57864449360852c535c2381ee7120ba8f390aa3869df967c44ca7eba1/brotlicffi-1.1.0.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4b7b0033b0d37bb33009fb2fef73310e432e76f688af76c156b3594389d81391", size = 379698 },
{ url = "https://files.pythonhosted.org/packages/e5/95/15aa422aa6450e6556e54a5fd1650ff59f470aed77ac739aa90ab63dc611/brotlicffi-1.1.0.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:54a07bb2374a1eba8ebb52b6fafffa2afd3c4df85ddd38fcc0511f2bb387c2a8", size = 378635 },
{ url = "https://files.pythonhosted.org/packages/6c/a7/f254e13b2cb43337d6d99a4ec10394c134e41bfda8a2eff15b75627f4a3d/brotlicffi-1.1.0.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7901a7dc4b88f1c1475de59ae9be59799db1007b7d059817948d8e4f12e24e35", size = 385719 },
{ url = "https://files.pythonhosted.org/packages/72/a9/0971251c4427c14b2a827dba3d910d4d3330dabf23d4278bf6d06a978847/brotlicffi-1.1.0.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:ce01c7316aebc7fce59da734286148b1d1b9455f89cf2c8a4dfce7d41db55c2d", size = 361760 },
]
[[package]]
name = "certifi"
version = "2024.8.30"
@@ -47,6 +139,72 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/12/90/3c9ff0512038035f59d279fddeb79f5f1eccd8859f06d6163c58798b9487/certifi-2024.8.30-py3-none-any.whl", hash = "sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8", size = 167321 },
]
[[package]]
name = "cffi"
version = "1.17.1"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "pycparser" },
]
sdist = { url = "https://files.pythonhosted.org/packages/fc/97/c783634659c2920c3fc70419e3af40972dbaf758daa229a7d6ea6135c90d/cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824", size = 516621 }
wheels = [
{ url = "https://files.pythonhosted.org/packages/90/07/f44ca684db4e4f08a3fdc6eeb9a0d15dc6883efc7b8c90357fdbf74e186c/cffi-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14", size = 182191 },
{ url = "https://files.pythonhosted.org/packages/08/fd/cc2fedbd887223f9f5d170c96e57cbf655df9831a6546c1727ae13fa977a/cffi-1.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67", size = 178592 },
{ url = "https://files.pythonhosted.org/packages/de/cc/4635c320081c78d6ffc2cab0a76025b691a91204f4aa317d568ff9280a2d/cffi-1.17.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:edae79245293e15384b51f88b00613ba9f7198016a5948b5dddf4917d4d26382", size = 426024 },
{ url = "https://files.pythonhosted.org/packages/b6/7b/3b2b250f3aab91abe5f8a51ada1b717935fdaec53f790ad4100fe2ec64d1/cffi-1.17.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45398b671ac6d70e67da8e4224a065cec6a93541bb7aebe1b198a61b58c7b702", size = 448188 },
{ url = "https://files.pythonhosted.org/packages/d3/48/1b9283ebbf0ec065148d8de05d647a986c5f22586b18120020452fff8f5d/cffi-1.17.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad9413ccdeda48c5afdae7e4fa2192157e991ff761e7ab8fdd8926f40b160cc3", size = 455571 },
{ url = "https://files.pythonhosted.org/packages/40/87/3b8452525437b40f39ca7ff70276679772ee7e8b394934ff60e63b7b090c/cffi-1.17.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5da5719280082ac6bd9aa7becb3938dc9f9cbd57fac7d2871717b1feb0902ab6", size = 436687 },
{ url = "https://files.pythonhosted.org/packages/8d/fb/4da72871d177d63649ac449aec2e8a29efe0274035880c7af59101ca2232/cffi-1.17.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bb1a08b8008b281856e5971307cc386a8e9c5b625ac297e853d36da6efe9c17", size = 446211 },
{ url = "https://files.pythonhosted.org/packages/ab/a0/62f00bcb411332106c02b663b26f3545a9ef136f80d5df746c05878f8c4b/cffi-1.17.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:045d61c734659cc045141be4bae381a41d89b741f795af1dd018bfb532fd0df8", size = 461325 },
{ url = "https://files.pythonhosted.org/packages/36/83/76127035ed2e7e27b0787604d99da630ac3123bfb02d8e80c633f218a11d/cffi-1.17.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6883e737d7d9e4899a8a695e00ec36bd4e5e4f18fabe0aca0efe0a4b44cdb13e", size = 438784 },
{ url = "https://files.pythonhosted.org/packages/21/81/a6cd025db2f08ac88b901b745c163d884641909641f9b826e8cb87645942/cffi-1.17.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6b8b4a92e1c65048ff98cfe1f735ef8f1ceb72e3d5f0c25fdb12087a23da22be", size = 461564 },
{ url = "https://files.pythonhosted.org/packages/f8/fe/4d41c2f200c4a457933dbd98d3cf4e911870877bd94d9656cc0fcb390681/cffi-1.17.1-cp310-cp310-win32.whl", hash = "sha256:c9c3d058ebabb74db66e431095118094d06abf53284d9c81f27300d0e0d8bc7c", size = 171804 },
{ url = "https://files.pythonhosted.org/packages/d1/b6/0b0f5ab93b0df4acc49cae758c81fe4e5ef26c3ae2e10cc69249dfd8b3ab/cffi-1.17.1-cp310-cp310-win_amd64.whl", hash = "sha256:0f048dcf80db46f0098ccac01132761580d28e28bc0f78ae0d58048063317e15", size = 181299 },
{ url = "https://files.pythonhosted.org/packages/6b/f4/927e3a8899e52a27fa57a48607ff7dc91a9ebe97399b357b85a0c7892e00/cffi-1.17.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a45e3c6913c5b87b3ff120dcdc03f6131fa0065027d0ed7ee6190736a74cd401", size = 182264 },
{ url = "https://files.pythonhosted.org/packages/6c/f5/6c3a8efe5f503175aaddcbea6ad0d2c96dad6f5abb205750d1b3df44ef29/cffi-1.17.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:30c5e0cb5ae493c04c8b42916e52ca38079f1b235c2f8ae5f4527b963c401caf", size = 178651 },
{ url = "https://files.pythonhosted.org/packages/94/dd/a3f0118e688d1b1a57553da23b16bdade96d2f9bcda4d32e7d2838047ff7/cffi-1.17.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f75c7ab1f9e4aca5414ed4d8e5c0e303a34f4421f8a0d47a4d019ceff0ab6af4", size = 445259 },
{ url = "https://files.pythonhosted.org/packages/2e/ea/70ce63780f096e16ce8588efe039d3c4f91deb1dc01e9c73a287939c79a6/cffi-1.17.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1ed2dd2972641495a3ec98445e09766f077aee98a1c896dcb4ad0d303628e41", size = 469200 },
{ url = "https://files.pythonhosted.org/packages/1c/a0/a4fa9f4f781bda074c3ddd57a572b060fa0df7655d2a4247bbe277200146/cffi-1.17.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:46bf43160c1a35f7ec506d254e5c890f3c03648a4dbac12d624e4490a7046cd1", size = 477235 },
{ url = "https://files.pythonhosted.org/packages/62/12/ce8710b5b8affbcdd5c6e367217c242524ad17a02fe5beec3ee339f69f85/cffi-1.17.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a24ed04c8ffd54b0729c07cee15a81d964e6fee0e3d4d342a27b020d22959dc6", size = 459721 },
{ url = "https://files.pythonhosted.org/packages/ff/6b/d45873c5e0242196f042d555526f92aa9e0c32355a1be1ff8c27f077fd37/cffi-1.17.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:610faea79c43e44c71e1ec53a554553fa22321b65fae24889706c0a84d4ad86d", size = 467242 },
{ url = "https://files.pythonhosted.org/packages/1a/52/d9a0e523a572fbccf2955f5abe883cfa8bcc570d7faeee06336fbd50c9fc/cffi-1.17.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a9b15d491f3ad5d692e11f6b71f7857e7835eb677955c00cc0aefcd0669adaf6", size = 477999 },
{ url = "https://files.pythonhosted.org/packages/44/74/f2a2460684a1a2d00ca799ad880d54652841a780c4c97b87754f660c7603/cffi-1.17.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:de2ea4b5833625383e464549fec1bc395c1bdeeb5f25c4a3a82b5a8c756ec22f", size = 454242 },
{ url = "https://files.pythonhosted.org/packages/f8/4a/34599cac7dfcd888ff54e801afe06a19c17787dfd94495ab0c8d35fe99fb/cffi-1.17.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fc48c783f9c87e60831201f2cce7f3b2e4846bf4d8728eabe54d60700b318a0b", size = 478604 },
{ url = "https://files.pythonhosted.org/packages/34/33/e1b8a1ba29025adbdcda5fb3a36f94c03d771c1b7b12f726ff7fef2ebe36/cffi-1.17.1-cp311-cp311-win32.whl", hash = "sha256:85a950a4ac9c359340d5963966e3e0a94a676bd6245a4b55bc43949eee26a655", size = 171727 },
{ url = "https://files.pythonhosted.org/packages/3d/97/50228be003bb2802627d28ec0627837ac0bf35c90cf769812056f235b2d1/cffi-1.17.1-cp311-cp311-win_amd64.whl", hash = "sha256:caaf0640ef5f5517f49bc275eca1406b0ffa6aa184892812030f04c2abf589a0", size = 181400 },
{ url = "https://files.pythonhosted.org/packages/5a/84/e94227139ee5fb4d600a7a4927f322e1d4aea6fdc50bd3fca8493caba23f/cffi-1.17.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:805b4371bf7197c329fcb3ead37e710d1bca9da5d583f5073b799d5c5bd1eee4", size = 183178 },
{ url = "https://files.pythonhosted.org/packages/da/ee/fb72c2b48656111c4ef27f0f91da355e130a923473bf5ee75c5643d00cca/cffi-1.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:733e99bc2df47476e3848417c5a4540522f234dfd4ef3ab7fafdf555b082ec0c", size = 178840 },
{ url = "https://files.pythonhosted.org/packages/cc/b6/db007700f67d151abadf508cbfd6a1884f57eab90b1bb985c4c8c02b0f28/cffi-1.17.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1257bdabf294dceb59f5e70c64a3e2f462c30c7ad68092d01bbbfb1c16b1ba36", size = 454803 },
{ url = "https://files.pythonhosted.org/packages/1a/df/f8d151540d8c200eb1c6fba8cd0dfd40904f1b0682ea705c36e6c2e97ab3/cffi-1.17.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da95af8214998d77a98cc14e3a3bd00aa191526343078b530ceb0bd710fb48a5", size = 478850 },
{ url = "https://files.pythonhosted.org/packages/28/c0/b31116332a547fd2677ae5b78a2ef662dfc8023d67f41b2a83f7c2aa78b1/cffi-1.17.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d63afe322132c194cf832bfec0dc69a99fb9bb6bbd550f161a49e9e855cc78ff", size = 485729 },
{ url = "https://files.pythonhosted.org/packages/91/2b/9a1ddfa5c7f13cab007a2c9cc295b70fbbda7cb10a286aa6810338e60ea1/cffi-1.17.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f79fc4fc25f1c8698ff97788206bb3c2598949bfe0fef03d299eb1b5356ada99", size = 471256 },
{ url = "https://files.pythonhosted.org/packages/b2/d5/da47df7004cb17e4955df6a43d14b3b4ae77737dff8bf7f8f333196717bf/cffi-1.17.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b62ce867176a75d03a665bad002af8e6d54644fad99a3c70905c543130e39d93", size = 479424 },
{ url = "https://files.pythonhosted.org/packages/0b/ac/2a28bcf513e93a219c8a4e8e125534f4f6db03e3179ba1c45e949b76212c/cffi-1.17.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:386c8bf53c502fff58903061338ce4f4950cbdcb23e2902d86c0f722b786bbe3", size = 484568 },
{ url = "https://files.pythonhosted.org/packages/d4/38/ca8a4f639065f14ae0f1d9751e70447a261f1a30fa7547a828ae08142465/cffi-1.17.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ceb10419a9adf4460ea14cfd6bc43d08701f0835e979bf821052f1805850fe8", size = 488736 },
{ url = "https://files.pythonhosted.org/packages/86/c5/28b2d6f799ec0bdecf44dced2ec5ed43e0eb63097b0f58c293583b406582/cffi-1.17.1-cp312-cp312-win32.whl", hash = "sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65", size = 172448 },
{ url = "https://files.pythonhosted.org/packages/50/b9/db34c4755a7bd1cb2d1603ac3863f22bcecbd1ba29e5ee841a4bc510b294/cffi-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903", size = 181976 },
{ url = "https://files.pythonhosted.org/packages/8d/f8/dd6c246b148639254dad4d6803eb6a54e8c85c6e11ec9df2cffa87571dbe/cffi-1.17.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f3a2b4222ce6b60e2e8b337bb9596923045681d71e5a082783484d845390938e", size = 182989 },
{ url = "https://files.pythonhosted.org/packages/8b/f1/672d303ddf17c24fc83afd712316fda78dc6fce1cd53011b839483e1ecc8/cffi-1.17.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0984a4925a435b1da406122d4d7968dd861c1385afe3b45ba82b750f229811e2", size = 178802 },
{ url = "https://files.pythonhosted.org/packages/0e/2d/eab2e858a91fdff70533cab61dcff4a1f55ec60425832ddfdc9cd36bc8af/cffi-1.17.1-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d01b12eeeb4427d3110de311e1774046ad344f5b1a7403101878976ecd7a10f3", size = 454792 },
{ url = "https://files.pythonhosted.org/packages/75/b2/fbaec7c4455c604e29388d55599b99ebcc250a60050610fadde58932b7ee/cffi-1.17.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:706510fe141c86a69c8ddc029c7910003a17353970cff3b904ff0686a5927683", size = 478893 },
{ url = "https://files.pythonhosted.org/packages/4f/b7/6e4a2162178bf1935c336d4da8a9352cccab4d3a5d7914065490f08c0690/cffi-1.17.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de55b766c7aa2e2a3092c51e0483d700341182f08e67c63630d5b6f200bb28e5", size = 485810 },
{ url = "https://files.pythonhosted.org/packages/c7/8a/1d0e4a9c26e54746dc08c2c6c037889124d4f59dffd853a659fa545f1b40/cffi-1.17.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c59d6e989d07460165cc5ad3c61f9fd8f1b4796eacbd81cee78957842b834af4", size = 471200 },
{ url = "https://files.pythonhosted.org/packages/26/9f/1aab65a6c0db35f43c4d1b4f580e8df53914310afc10ae0397d29d697af4/cffi-1.17.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd398dbc6773384a17fe0d3e7eeb8d1a21c2200473ee6806bb5e6a8e62bb73dd", size = 479447 },
{ url = "https://files.pythonhosted.org/packages/5f/e4/fb8b3dd8dc0e98edf1135ff067ae070bb32ef9d509d6cb0f538cd6f7483f/cffi-1.17.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:3edc8d958eb099c634dace3c7e16560ae474aa3803a5df240542b305d14e14ed", size = 484358 },
{ url = "https://files.pythonhosted.org/packages/f1/47/d7145bf2dc04684935d57d67dff9d6d795b2ba2796806bb109864be3a151/cffi-1.17.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:72e72408cad3d5419375fc87d289076ee319835bdfa2caad331e377589aebba9", size = 488469 },
{ url = "https://files.pythonhosted.org/packages/bf/ee/f94057fa6426481d663b88637a9a10e859e492c73d0384514a17d78ee205/cffi-1.17.1-cp313-cp313-win32.whl", hash = "sha256:e03eab0a8677fa80d646b5ddece1cbeaf556c313dcfac435ba11f107ba117b5d", size = 172475 },
{ url = "https://files.pythonhosted.org/packages/7c/fc/6a8cb64e5f0324877d503c854da15d76c1e50eb722e320b15345c4d0c6de/cffi-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a", size = 182009 },
]
[[package]]
name = "cfgv"
version = "3.4.0"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/11/74/539e56497d9bd1d484fd863dd69cbbfa653cd2aa27abfe35653494d85e94/cfgv-3.4.0.tar.gz", hash = "sha256:e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560", size = 7114 }
wheels = [
{ url = "https://files.pythonhosted.org/packages/c5/55/51844dd50c4fc7a33b653bfaba4c2456f06955289ca770a5dbd5fd267374/cfgv-3.4.0-py2.py3-none-any.whl", hash = "sha256:b7265b1f29fd3316bfcd2b330d63d024f2bfd8bcb8b0272f8e19a504856c48f9", size = 7249 },
]
[[package]]
name = "charset-normalizer"
version = "3.4.0"
@@ -121,7 +279,7 @@ name = "click"
version = "8.1.7"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "colorama", marker = "platform_system == 'Windows'" },
{ name = "colorama", marker = "sys_platform == 'win32'" },
]
sdist = { url = "https://files.pythonhosted.org/packages/96/d3/f04c7bfcf5c1862a2a5b845c6b2b360488cf47af55dfa79c98f6a6bf98b5/click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de", size = 336121 }
wheels = [
@@ -137,6 +295,15 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335 },
]
[[package]]
name = "distlib"
version = "0.3.9"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/0d/dd/1bec4c5ddb504ca60fc29472f3d27e8d4da1257a854e1d96742f15c1d02d/distlib-0.3.9.tar.gz", hash = "sha256:a60f20dea646b8a33f3e7772f74dc0b2d0772d2837ee1342a00645c81edf9403", size = 613923 }
wheels = [
{ url = "https://files.pythonhosted.org/packages/91/a1/cf2472db20f7ce4a6be1253a81cfdf85ad9c7885ffbed7047fb72c24cf87/distlib-0.3.9-py2.py3-none-any.whl", hash = "sha256:47f8c22fd27c27e25a65601af709b38e4f0a45ea4fc2e710f65755fa8caaaf87", size = 468973 },
]
[[package]]
name = "dnspython"
version = "2.7.0"
@@ -170,15 +337,17 @@ wheels = [
[[package]]
name = "fastanime"
version = "2.7.3"
version = "2.8.7"
source = { editable = "." }
dependencies = [
{ name = "click" },
{ name = "inquirerpy" },
{ name = "pycryptodome" },
{ name = "pypresence" },
{ name = "requests" },
{ name = "rich" },
{ name = "thefuzz" },
{ name = "yt-dlp" },
{ name = "yt-dlp", extra = ["default"] },
]
[package.optional-dependencies]
@@ -199,6 +368,7 @@ standard = [
[package.dev-dependencies]
dev = [
{ name = "pre-commit" },
{ name = "pyinstaller" },
{ name = "pyright" },
{ name = "pytest" },
@@ -215,14 +385,17 @@ requires-dist = [
{ name = "mpv", marker = "extra == 'standard'", specifier = ">=1.0.7" },
{ name = "plyer", marker = "extra == 'notifications'", specifier = ">=2.1.0" },
{ name = "plyer", marker = "extra == 'standard'", specifier = ">=2.1.0" },
{ name = "pycryptodome", specifier = ">=3.21.0" },
{ name = "pypresence", specifier = ">=4.3.0" },
{ name = "requests", specifier = ">=2.32.3" },
{ name = "rich", specifier = ">=13.9.2" },
{ name = "thefuzz", specifier = ">=0.22.1" },
{ name = "yt-dlp", specifier = ">=2024.10.7" },
{ name = "yt-dlp", extras = ["default"], specifier = ">=2024.10.7" },
]
[package.metadata.requires-dev]
dev = [
{ name = "pre-commit", specifier = ">=4.0.1" },
{ name = "pyinstaller", specifier = ">=6.11.1" },
{ name = "pyright", specifier = ">=1.1.384" },
{ name = "pytest", specifier = ">=8.3.3" },
@@ -271,6 +444,15 @@ standard = [
{ name = "uvicorn", extra = ["standard"] },
]
[[package]]
name = "filelock"
version = "3.16.1"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/9d/db/3ef5bb276dae18d6ec2124224403d1d67bccdbefc17af4cc8f553e341ab1/filelock-3.16.1.tar.gz", hash = "sha256:c249fbfcd5db47e5e2d6d62198e565475ee65e4831e2561c8e313fa7eb961435", size = 18037 }
wheels = [
{ url = "https://files.pythonhosted.org/packages/b9/f8/feced7779d755758a52d1f6635d990b8d98dc0a29fa568bbe0625f18fdf3/filelock-3.16.1-py3-none-any.whl", hash = "sha256:2082e5703d51fbf98ea75855d9d5527e33d8ff23099bec374a134febee6946b0", size = 16163 },
]
[[package]]
name = "h11"
version = "0.14.0"
@@ -345,6 +527,15 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/56/95/9377bcb415797e44274b51d46e3249eba641711cf3348050f76ee7b15ffc/httpx-0.27.2-py3-none-any.whl", hash = "sha256:7bb2708e112d8fdd7829cd4243970f0c223274051cb35ee80c03301ee29a3df0", size = 76395 },
]
[[package]]
name = "identify"
version = "2.6.3"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/1a/5f/05f0d167be94585d502b4adf8c7af31f1dc0b1c7e14f9938a88fdbbcf4a7/identify-2.6.3.tar.gz", hash = "sha256:62f5dae9b5fef52c84cc188514e9ea4f3f636b1d8799ab5ebc475471f9e47a02", size = 99179 }
wheels = [
{ url = "https://files.pythonhosted.org/packages/c9/f5/09644a3ad803fae9eca8efa17e1f2aef380c7f0b02f7ec4e8d446e51d64a/identify-2.6.3-py2.py3-none-any.whl", hash = "sha256:9edba65473324c2ea9684b1f944fe3191db3345e50b6d04571d10ed164f8d7bd", size = 99049 },
]
[[package]]
name = "idna"
version = "3.10"
@@ -488,6 +679,15 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/aa/3f/d835556e34804cd0078507ed0f8a550f15d2861b875656193dd3451b720b/mpv-1.0.7-py3-none-any.whl", hash = "sha256:520fb134c18185b69c7fce4aa3514f14371028022d92eb193818e9fefb1e9fe8", size = 45257 },
]
[[package]]
name = "mutagen"
version = "1.47.0"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/81/e6/64bc71b74eef4b68e61eb921dcf72dabd9e4ec4af1e11891bbd312ccbb77/mutagen-1.47.0.tar.gz", hash = "sha256:719fadef0a978c31b4cf3c956261b3c58b6948b32023078a2117b1de09f0fc99", size = 1274186 }
wheels = [
{ url = "https://files.pythonhosted.org/packages/b0/7a/620f945b96be1f6ee357d211d5bf74ab1b7fe72a9f1525aafbfe3aee6875/mutagen-1.47.0-py3-none-any.whl", hash = "sha256:edd96f50c5907a9539d8e5bba7245f62c9f520aef333d13392a79a4f70aca719", size = 194391 },
]
[[package]]
name = "nodeenv"
version = "1.9.1"
@@ -524,6 +724,15 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/8c/d7/8ff98376b1acc4503253b685ea09981697385ce344d4e3935c2af49e044d/pfzy-0.3.4-py3-none-any.whl", hash = "sha256:5f50d5b2b3207fa72e7ec0ef08372ef652685470974a107d0d4999fc5a903a96", size = 8537 },
]
[[package]]
name = "platformdirs"
version = "4.3.6"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/13/fc/128cc9cb8f03208bdbf93d3aa862e16d376844a14f9a0ce5cf4507372de4/platformdirs-4.3.6.tar.gz", hash = "sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907", size = 21302 }
wheels = [
{ url = "https://files.pythonhosted.org/packages/3c/a6/bc1012356d8ece4d66dd75c4b9fc6c1f6650ddd5991e421177d9f8f671be/platformdirs-4.3.6-py3-none-any.whl", hash = "sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb", size = 18439 },
]
[[package]]
name = "pluggy"
version = "1.5.0"
@@ -542,6 +751,22 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/d3/89/a41c2643fc8eabeb84791acb9d0e4d139b1e4b53473cc4dae947b5fa33ed/plyer-2.1.0-py2.py3-none-any.whl", hash = "sha256:1b1772060df8b3045ed4f08231690ec8f7de30f5a004aa1724665a9074eed113", size = 142266 },
]
[[package]]
name = "pre-commit"
version = "4.0.1"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "cfgv" },
{ name = "identify" },
{ name = "nodeenv" },
{ name = "pyyaml" },
{ name = "virtualenv" },
]
sdist = { url = "https://files.pythonhosted.org/packages/2e/c8/e22c292035f1bac8b9f5237a2622305bc0304e776080b246f3df57c4ff9f/pre_commit-4.0.1.tar.gz", hash = "sha256:80905ac375958c0444c65e9cebebd948b3cdb518f335a091a670a89d652139d2", size = 191678 }
wheels = [
{ url = "https://files.pythonhosted.org/packages/16/8f/496e10d51edd6671ebe0432e33ff800aa86775d2d147ce7d43389324a525/pre_commit-4.0.1-py2.py3-none-any.whl", hash = "sha256:efde913840816312445dc98787724647c65473daefe420785f885e8ed9a06878", size = 218713 },
]
[[package]]
name = "prompt-toolkit"
version = "3.0.48"
@@ -554,6 +779,63 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/a9/6a/fd08d94654f7e67c52ca30523a178b3f8ccc4237fce4be90d39c938a831a/prompt_toolkit-3.0.48-py3-none-any.whl", hash = "sha256:f49a827f90062e411f1ce1f854f2aedb3c23353244f8108b89283587397ac10e", size = 386595 },
]
[[package]]
name = "pycparser"
version = "2.22"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/1d/b2/31537cf4b1ca988837256c910a668b553fceb8f069bedc4b1c826024b52c/pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6", size = 172736 }
wheels = [
{ url = "https://files.pythonhosted.org/packages/13/a3/a812df4e2dd5696d1f351d58b8fe16a405b234ad2886a0dab9183fb78109/pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc", size = 117552 },
]
[[package]]
name = "pycryptodome"
version = "3.21.0"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/13/52/13b9db4a913eee948152a079fe58d035bd3d1a519584155da8e786f767e6/pycryptodome-3.21.0.tar.gz", hash = "sha256:f7787e0d469bdae763b876174cf2e6c0f7be79808af26b1da96f1a64bcf47297", size = 4818071 }
wheels = [
{ url = "https://files.pythonhosted.org/packages/a7/88/5e83de10450027c96c79dc65ac45e9d0d7a7fef334f39d3789a191f33602/pycryptodome-3.21.0-cp36-abi3-macosx_10_9_universal2.whl", hash = "sha256:2480ec2c72438430da9f601ebc12c518c093c13111a5c1644c82cdfc2e50b1e4", size = 2495937 },
{ url = "https://files.pythonhosted.org/packages/66/e1/8f28cd8cf7f7563319819d1e172879ccce2333781ae38da61c28fe22d6ff/pycryptodome-3.21.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:de18954104667f565e2fbb4783b56667f30fb49c4d79b346f52a29cb198d5b6b", size = 1634629 },
{ url = "https://files.pythonhosted.org/packages/6a/c1/f75a1aaff0c20c11df8dc8e2bf8057e7f73296af7dfd8cbb40077d1c930d/pycryptodome-3.21.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2de4b7263a33947ff440412339cb72b28a5a4c769b5c1ca19e33dd6cd1dcec6e", size = 2168708 },
{ url = "https://files.pythonhosted.org/packages/ea/66/6f2b7ddb457b19f73b82053ecc83ba768680609d56dd457dbc7e902c41aa/pycryptodome-3.21.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0714206d467fc911042d01ea3a1847c847bc10884cf674c82e12915cfe1649f8", size = 2254555 },
{ url = "https://files.pythonhosted.org/packages/2c/2b/152c330732a887a86cbf591ed69bd1b489439b5464806adb270f169ec139/pycryptodome-3.21.0-cp36-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7d85c1b613121ed3dbaa5a97369b3b757909531a959d229406a75b912dd51dd1", size = 2294143 },
{ url = "https://files.pythonhosted.org/packages/55/92/517c5c498c2980c1b6d6b9965dffbe31f3cd7f20f40d00ec4069559c5902/pycryptodome-3.21.0-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:8898a66425a57bcf15e25fc19c12490b87bd939800f39a03ea2de2aea5e3611a", size = 2160509 },
{ url = "https://files.pythonhosted.org/packages/39/1f/c74288f54d80a20a78da87df1818c6464ac1041d10988bb7d982c4153fbc/pycryptodome-3.21.0-cp36-abi3-musllinux_1_2_i686.whl", hash = "sha256:932c905b71a56474bff8a9c014030bc3c882cee696b448af920399f730a650c2", size = 2329480 },
{ url = "https://files.pythonhosted.org/packages/39/1b/d0b013bf7d1af7cf0a6a4fce13f5fe5813ab225313755367b36e714a63f8/pycryptodome-3.21.0-cp36-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:18caa8cfbc676eaaf28613637a89980ad2fd96e00c564135bf90bc3f0b34dd93", size = 2254397 },
{ url = "https://files.pythonhosted.org/packages/14/71/4cbd3870d3e926c34706f705d6793159ac49d9a213e3ababcdade5864663/pycryptodome-3.21.0-cp36-abi3-win32.whl", hash = "sha256:280b67d20e33bb63171d55b1067f61fbd932e0b1ad976b3a184303a3dad22764", size = 1775641 },
{ url = "https://files.pythonhosted.org/packages/43/1d/81d59d228381576b92ecede5cd7239762c14001a828bdba30d64896e9778/pycryptodome-3.21.0-cp36-abi3-win_amd64.whl", hash = "sha256:b7aa25fc0baa5b1d95b7633af4f5f1838467f1815442b22487426f94e0d66c53", size = 1812863 },
{ url = "https://files.pythonhosted.org/packages/25/b3/09ff7072e6d96c9939c24cf51d3c389d7c345bf675420355c22402f71b68/pycryptodome-3.21.0-pp27-pypy_73-manylinux2010_x86_64.whl", hash = "sha256:2cb635b67011bc147c257e61ce864879ffe6d03342dc74b6045059dfbdedafca", size = 1691593 },
{ url = "https://files.pythonhosted.org/packages/a8/91/38e43628148f68ba9b68dedbc323cf409e537fd11264031961fd7c744034/pycryptodome-3.21.0-pp27-pypy_73-win32.whl", hash = "sha256:4c26a2f0dc15f81ea3afa3b0c87b87e501f235d332b7f27e2225ecb80c0b1cdd", size = 1765997 },
{ url = "https://files.pythonhosted.org/packages/08/16/ae464d4ac338c1dd41f89c41f9488e54f7d2a3acf93bb920bb193b99f8e3/pycryptodome-3.21.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:d5ebe0763c982f069d3877832254f64974139f4f9655058452603ff559c482e8", size = 1615855 },
{ url = "https://files.pythonhosted.org/packages/1e/8c/b0cee957eee1950ce7655006b26a8894cee1dc4b8747ae913684352786eb/pycryptodome-3.21.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ee86cbde706be13f2dec5a42b52b1c1d1cbb90c8e405c68d0755134735c8dc6", size = 1650018 },
{ url = "https://files.pythonhosted.org/packages/93/4d/d7138068089b99f6b0368622e60f97a577c936d75f533552a82613060c58/pycryptodome-3.21.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0fd54003ec3ce4e0f16c484a10bc5d8b9bd77fa662a12b85779a2d2d85d67ee0", size = 1687977 },
{ url = "https://files.pythonhosted.org/packages/96/02/90ae1ac9f28be4df0ed88c127bf4acc1b102b40053e172759d4d1c54d937/pycryptodome-3.21.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:5dfafca172933506773482b0e18f0cd766fd3920bd03ec85a283df90d8a17bc6", size = 1788273 },
]
[[package]]
name = "pycryptodomex"
version = "3.21.0"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/11/dc/e66551683ade663b5f07d7b3bc46434bf703491dbd22ee12d1f979ca828f/pycryptodomex-3.21.0.tar.gz", hash = "sha256:222d0bd05381dd25c32dd6065c071ebf084212ab79bab4599ba9e6a3e0009e6c", size = 4818543 }
wheels = [
{ url = "https://files.pythonhosted.org/packages/22/5e/99f217d9881eead69607a2248dd7bbdf610837d7f5ad53f45a6cb71bbbfb/pycryptodomex-3.21.0-cp36-abi3-macosx_10_9_universal2.whl", hash = "sha256:34325b84c8b380675fd2320d0649cdcbc9cf1e0d1526edbe8fce43ed858cdc7e", size = 2499490 },
{ url = "https://files.pythonhosted.org/packages/ce/8f/4d0e2a859a6470289d64e39b419f01d2494dfa2e4995342d50f6c2834237/pycryptodomex-3.21.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:103c133d6cd832ae7266feb0a65b69e3a5e4dbbd6f3a3ae3211a557fd653f516", size = 1638037 },
{ url = "https://files.pythonhosted.org/packages/0c/9e/6e748c1fa814c956d356f93cf7192b19487ca56fc9e2a0bcde2bbc057601/pycryptodomex-3.21.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77ac2ea80bcb4b4e1c6a596734c775a1615d23e31794967416afc14852a639d3", size = 2172279 },
{ url = "https://files.pythonhosted.org/packages/46/3f/f5bef92b11750af9e3516d4e69736eeeff20a2818d34611508bef5a7b381/pycryptodomex-3.21.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9aa0cf13a1a1128b3e964dc667e5fe5c6235f7d7cfb0277213f0e2a783837cc2", size = 2258130 },
{ url = "https://files.pythonhosted.org/packages/de/4d/f0c65afd64ce435fd0547187ce6f99dfb37cdde16b05b57bca9f5c06966e/pycryptodomex-3.21.0-cp36-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:46eb1f0c8d309da63a2064c28de54e5e614ad17b7e2f88df0faef58ce192fc7b", size = 2297719 },
{ url = "https://files.pythonhosted.org/packages/1c/6a/2a1a101b0345ee70376ba93df8de6c8c01aac8341fda02970800873456a7/pycryptodomex-3.21.0-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:cc7e111e66c274b0df5f4efa679eb31e23c7545d702333dfd2df10ab02c2a2ce", size = 2164079 },
{ url = "https://files.pythonhosted.org/packages/3d/00/90a15f16c234815b660303c2d7266b41b401ea2605f3a90373e9d425e39f/pycryptodomex-3.21.0-cp36-abi3-musllinux_1_2_i686.whl", hash = "sha256:770d630a5c46605ec83393feaa73a9635a60e55b112e1fb0c3cea84c2897aa0a", size = 2333060 },
{ url = "https://files.pythonhosted.org/packages/61/74/49f5d20c514ccc631b940cc9dfec45dcce418dc84a98463a2e2ebec33904/pycryptodomex-3.21.0-cp36-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:52e23a0a6e61691134aa8c8beba89de420602541afaae70f66e16060fdcd677e", size = 2257982 },
{ url = "https://files.pythonhosted.org/packages/92/4b/d33ef74e2cc0025a259936661bb53432c5bbbadc561c5f2e023bcd73ce4c/pycryptodomex-3.21.0-cp36-abi3-win32.whl", hash = "sha256:a3d77919e6ff56d89aada1bd009b727b874d464cb0e2e3f00a49f7d2e709d76e", size = 1779052 },
{ url = "https://files.pythonhosted.org/packages/5b/be/7c991840af1184009fc86267160948350d1bf875f153c97bb471ad944e40/pycryptodomex-3.21.0-cp36-abi3-win_amd64.whl", hash = "sha256:b0e9765f93fe4890f39875e6c90c96cb341767833cfa767f41b490b506fa9ec0", size = 1816307 },
{ url = "https://files.pythonhosted.org/packages/af/ac/24125ad36778914a36f08d61ba5338cb9159382c638d9761ee19c8de822c/pycryptodomex-3.21.0-pp27-pypy_73-manylinux2010_x86_64.whl", hash = "sha256:feaecdce4e5c0045e7a287de0c4351284391fe170729aa9182f6bd967631b3a8", size = 1694999 },
{ url = "https://files.pythonhosted.org/packages/93/73/be7a54a5903508070e5508925ba94493a1f326cfeecfff750e3eb250ea28/pycryptodomex-3.21.0-pp27-pypy_73-win32.whl", hash = "sha256:365aa5a66d52fd1f9e0530ea97f392c48c409c2f01ff8b9a39c73ed6f527d36c", size = 1769437 },
{ url = "https://files.pythonhosted.org/packages/e5/9f/39a6187f3986841fa6a9f35c6fdca5030ef73ff708b45a993813a51d7d10/pycryptodomex-3.21.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:3efddfc50ac0ca143364042324046800c126a1d63816d532f2e19e6f2d8c0c31", size = 1619607 },
{ url = "https://files.pythonhosted.org/packages/f8/70/60bb08e9e9841b18d4669fb69d84b64ce900aacd7eb0ebebd4c7b9bdecd3/pycryptodomex-3.21.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0df2608682db8279a9ebbaf05a72f62a321433522ed0e499bc486a6889b96bf3", size = 1653571 },
{ url = "https://files.pythonhosted.org/packages/c9/6f/191b73509291c5ff0dddec9cc54797b1d73303c12b2e4017b24678e57099/pycryptodomex-3.21.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5823d03e904ea3e53aebd6799d6b8ec63b7675b5d2f4a4bd5e3adcb512d03b37", size = 1691548 },
{ url = "https://files.pythonhosted.org/packages/2d/c7/a0d3356f3074ac548afefa515ff46f3bea011deca607faf1c09b26dd5330/pycryptodomex-3.21.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:27e84eeff24250ffec32722334749ac2a57a5fd60332cd6a0680090e7c42877e", size = 1792099 },
]
[[package]]
name = "pydantic"
version = "2.9.2"
@@ -685,6 +967,15 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/a9/64/445861ee7a5fd32874c0f6cfe8222aacc8feda22539332e0d8ff50dadec6/pyinstaller_hooks_contrib-2024.10-py3-none-any.whl", hash = "sha256:ad47db0e153683b4151e10d231cb91f2d93c85079e78d76d9e0f57ac6c8a5e10", size = 338417 },
]
[[package]]
name = "pypresence"
version = "4.3.0"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/f4/2e/d110f862720b5e3ba1b0b719657385fc4151929befa2c6981f48360aa480/pypresence-4.3.0.tar.gz", hash = "sha256:a6191a3af33a9667f2a4ef0185577c86b962ee70aa82643c472768a6fed1fbf3", size = 10696 }
wheels = [
{ url = "https://files.pythonhosted.org/packages/1e/40/1d30b30e18f81eb71365681223971a9822a89b3d6ee5269dd2aa955bc228/pypresence-4.3.0-py2.py3-none-any.whl", hash = "sha256:af878c6d49315084f1b108aec86b31915080614d9421d6dd3a44737aba9ff13f", size = 11778 },
]
[[package]]
name = "pyright"
version = "1.1.388"
@@ -1064,6 +1355,20 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/63/9a/0962b05b308494e3202d3f794a6e85abe471fe3cafdbcf95c2e8c713aabd/uvloop-0.21.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a5c39f217ab3c663dc699c04cbd50c13813e31d917642d459fdcec07555cc553", size = 4660018 },
]
[[package]]
name = "virtualenv"
version = "20.28.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "distlib" },
{ name = "filelock" },
{ name = "platformdirs" },
]
sdist = { url = "https://files.pythonhosted.org/packages/bf/75/53316a5a8050069228a2f6d11f32046cfa94fbb6cc3f08703f59b873de2e/virtualenv-20.28.0.tar.gz", hash = "sha256:2c9c3262bb8e7b87ea801d715fae4495e6032450c71d2309be9550e7364049aa", size = 7650368 }
wheels = [
{ url = "https://files.pythonhosted.org/packages/10/f9/0919cf6f1432a8c4baa62511f8f8da8225432d22e83e3476f5be1a1edc6e/virtualenv-20.28.0-py3-none-any.whl", hash = "sha256:23eae1b4516ecd610481eda647f3a7c09aea295055337331bb4e6892ecce47b0", size = 4276702 },
]
[[package]]
name = "watchfiles"
version = "0.24.0"
@@ -1205,3 +1510,15 @@ sdist = { url = "https://files.pythonhosted.org/packages/52/50/0014e9099a9dc3dec
wheels = [
{ url = "https://files.pythonhosted.org/packages/6e/e4/e45c5067a79780954b905db4a42aa83d7aaefd91e32b18ab91c77600e668/yt_dlp-2024.11.4-py3-none-any.whl", hash = "sha256:589d51ed9f154624a45c1f0ceb3d68d0d1e2031460e8dbc62139be631c20b388", size = 3165645 },
]
[package.optional-dependencies]
default = [
{ name = "brotli", marker = "implementation_name == 'cpython'" },
{ name = "brotlicffi", marker = "implementation_name != 'cpython'" },
{ name = "certifi" },
{ name = "mutagen" },
{ name = "pycryptodomex" },
{ name = "requests" },
{ name = "urllib3" },
{ name = "websockets" },
]