Compare commits

...

102 Commits

Author SHA1 Message Date
Benex254
0ad16fee53 fix: typing issue in player 2024-09-22 22:34:07 +03:00
Benex254
249243aeb4 chore: use --all-extras flag in poetry install 2024-09-22 22:31:35 +03:00
Benex254
c208dc3579 chore: bump version 2024-09-22 22:27:04 +03:00
Benex254
ea93f2ba23 chore: make some dependencies optional 2024-09-22 22:26:37 +03:00
Benex254
d910a0bb6a chore: update depenedencies 2024-09-22 22:25:52 +03:00
Benex254
550fcfeddc feat: make plyer an optional dependency 2024-09-22 22:13:12 +03:00
Benex254
c6910e5a1c feat: improve prompt text 2024-09-22 22:13:12 +03:00
Benex254
8555edb521 feat: dont pass obj to providers 2024-09-22 22:13:12 +03:00
Benex254
139193ce29 chore: remove aniwave as a provider; you shall forever live in our hearts 2024-09-22 22:13:12 +03:00
Benex254
1a87375ccd feat: add debug mode for providers 2024-09-22 22:13:12 +03:00
BeneX254
83cbef40f6 Update README.md 2024-09-21 18:06:09 +03:00
Benex254
85b4fc75a1 docs: update the readme 2024-09-20 17:58:29 +03:00
Benex254
f2e2da378f feat: improved medi list tracking 2024-09-20 17:58:06 +03:00
Benex254
7c34bc9120 feat: restrict some genres in mini_anilist 2024-09-19 19:12:10 +03:00
Benex254
6f153f2acb feat: immprove help messages for all cli commands 2024-09-19 19:11:15 +03:00
Benex254
8171083978 chore: update deps 2024-09-18 20:10:35 +03:00
Benex254
db5b9a59b4 fix: fastanime update not working with pip installs 2024-09-18 20:09:34 +03:00
Benex254
6fa656ba11 chore: bump version 2024-09-18 19:59:54 +03:00
Benex254
de0682c1bb fix: invalid cmd 2024-09-18 19:59:40 +03:00
Benex254
a6a32d8de4 chore: bump version 2024-09-18 19:44:10 +03:00
Benex254
bb14b269de feat: add --player option 2024-09-18 19:42:47 +03:00
Benex254
14331d8bc2 feat: workaround image previews on android 2024-09-18 19:42:47 +03:00
BeneX254
1729464844 Update README.md 2024-09-17 21:44:35 +03:00
benex
5fb9747285 fix: default ui not persisting when using config --update 2024-09-15 14:59:01 +03:00
benex
394228d391 chore: bump version 2024-09-15 14:42:54 +03:00
benex
5d3c0cc6ec fix: unicode error on windows when writing the config file 2024-09-15 14:38:50 +03:00
BeneX254
3ef7c5248c Update README.md 2024-09-15 13:46:50 +03:00
Benex254
8bebc401fd fix: rename use_mpv_mod to use_python_mpv in config 2024-09-15 13:40:20 +03:00
Benex254
215b28457b docs: update readme 2024-09-15 13:39:58 +03:00
Benex254
dfd2bfc857 docs: update readme 2024-09-15 13:29:32 +03:00
Benex254
f991292e94 chore: bump version 2024-09-15 13:29:20 +03:00
Benex254
d837457f80 feat: improve config file docs 2024-09-15 13:22:14 +03:00
Benex254
343bdba31b feat: add the --update option to the config command which causes all config options passed to fastanime to be persisted to your config file 2024-09-15 13:22:14 +03:00
benex
1c1c2457e8 feat: improve the preview with a workaround 2024-09-15 10:05:20 +03:00
benex
b083bfb074 fix: previews not working on windows 2024-09-15 09:36:15 +03:00
benex
ea1abcb2ae feat: dont use roaming folder for the config file 2024-09-15 08:54:02 +03:00
benex
001030ba2b fix: unicode error when running fzf on wndows 2024-09-15 08:53:22 +03:00
BeneX254
eda8984781 Update README.md 2024-09-13 21:57:15 +03:00
Benex254
d8dc6f0a34 chore: bump version 2024-09-10 19:15:43 +03:00
Benex254
2d711a7a7f docs: update readme 2024-09-10 19:15:25 +03:00
Benex254
30ca25626a feat: add --titles option to downloads 2024-09-10 19:11:52 +03:00
Benex254
b1f5a558c8 feat: improve animepahe utils 2024-09-10 19:11:19 +03:00
Benex254
8062c8dc83 feat: stat command ?? 2024-08-23 20:51:53 +03:00
Benex254
cb7eed46bc docs: update readme 2024-08-23 17:44:23 +03:00
Benex254
4626eca89e feat: improvements on media list intergration 2024-08-23 17:44:10 +03:00
Benex254
0d549c5915 docs: update readme 2024-08-23 17:18:49 +03:00
Benex254
33c518ed4c chore: cleanup codebase 2024-08-23 17:18:36 +03:00
Benex254
8e155dcc74 chore: bump version 2024-08-23 16:05:45 +03:00
Benex254
7743b0423e chore: clean up codebase 2024-08-23 16:05:26 +03:00
Benex254
6346ea7343 docs: update readme 2024-08-23 11:40:08 +03:00
Benex254
32de01047f chore:bump version 2024-08-23 11:39:57 +03:00
Benex254
35c7f81afb fix: no chapter title 2024-08-23 11:39:45 +03:00
Benex254
2dbbb1c4df feat: add experimental manga support 2024-08-23 11:19:25 +03:00
Benex254
6a6efa9d56 chore: bump version 2024-08-22 20:39:44 +03:00
Benex254
e510dc3a11 docs: update readme 2024-08-22 20:39:15 +03:00
Benex254
9639fd8c05 feat: improve normalizing of titles 2024-08-22 20:35:43 +03:00
Benex254
add35ce682 chore: bump version 2024-08-22 19:09:31 +03:00
Benex254
6bcc77ea44 fix: incorrect episode regex 2024-08-22 19:09:00 +03:00
Benex254
1a72f88be3 docs: updaate readme 2024-08-22 18:31:23 +03:00
Benex254
1a9f1120b8 chore: bump version 2024-08-22 18:31:11 +03:00
Benex254
c2fc807688 feat: episode preview 2024-08-22 18:25:41 +03:00
Benex254
2b0ade093c feat: normalize anime titles 2024-08-22 17:32:53 +03:00
BeneX254
a26193706e Update README.md 2024-08-22 13:37:18 +03:00
BeneX254
ff3c57ef9b Update README.md 2024-08-22 13:31:53 +03:00
BeneX254
3b987bd07a Update README.md 2024-08-22 12:43:58 +03:00
BeneX254
e8474c0428 Update README.md 2024-08-22 12:37:43 +03:00
BeneX254
c78a759aa1 Update README.md 2024-08-22 00:38:46 +03:00
Benex254
d1aad70c48 feat: add awesome completions to search command 2024-08-21 23:49:39 +03:00
Benex254
62b36f3e58 fix: workaround over typing issue 2024-08-21 23:20:45 +03:00
Benex254
c5b905fb0d chore: update deps 2024-08-21 23:18:12 +03:00
Benex254
7d3dc671ed fix: workaround typing issue 2024-08-21 23:07:01 +03:00
Benex254
0ec3c7a5bb docs: update docs 2024-08-21 22:53:30 +03:00
Benex254
8e0619863a feat: search command 2024-08-21 22:53:18 +03:00
Benex254
e8a05ec4b8 feat: add dump json to anilist commands 2024-08-21 20:48:01 +03:00
Benex254
34e8b2abd1 feat: update download command 2024-08-21 19:45:57 +03:00
Benex254
161b6eb961 chore: bump version 2024-08-21 19:41:35 +03:00
Benex254
dd2090f85d docs: update 2024-08-21 19:41:01 +03:00
Benex254
8b1595a5da feat:update 2024-08-21 19:40:45 +03:00
Benex254
77ffa27ed8 chore: bump version 2024-08-21 17:37:09 +03:00
Benex254
15f79b65c9 feat: aniwave?? 2024-08-21 17:18:30 +03:00
Benex254
33c3af0241 chore: remove print and input statements 2024-08-21 16:00:52 +03:00
Benex254
9badde62fb feat: improve providers 2024-08-21 15:58:01 +03:00
Benex254
4e401dca40 fix: logging issue 2024-08-21 14:53:30 +03:00
Benex254
25422b1b7d feat: improve aniwatch provider api 2024-08-21 14:52:56 +03:00
Benex254
e8463f13b4 chore: reconfigure pyright 2024-08-21 11:42:48 +03:00
Benex254
556f42e41f fix: clean option of download command 2024-08-21 11:41:55 +03:00
Benex254
b99a4f7efc chore: bump version 2024-08-19 23:44:05 +03:00
Benex254
f6f45cf322 docs: update readme 2024-08-19 23:43:50 +03:00
Benex254
ae6db1847a feat: improve download functionality 2024-08-19 23:43:34 +03:00
Benex254
20d04ea07b feat(utils): add m3u8 quality selector 2024-08-19 17:27:52 +03:00
Benex254
8f3834453c chore: bump version 2024-08-19 15:28:04 +03:00
Benex254
7ad8b8a0e3 fix: return values 2024-08-19 15:25:36 +03:00
Benex254
80b41f06da feat:add new ui command 2024-08-19 15:25:05 +03:00
Benex254
e79321ed50 chore: bump version 2024-08-19 13:05:03 +03:00
Benex254
f7b5898dfa fix: some stuff 2024-08-19 13:04:30 +03:00
Benex254
144bf53081 chore: bump version 2024-08-19 11:01:13 +03:00
Benex254
16dded9724 fix: inability to properly detect terminal 2024-08-19 10:51:39 +03:00
Benex254
c47b158bff fix: logging issue 2024-08-19 10:51:11 +03:00
Benex254
9a36e15d9d feat: intergrate subs to python-mpv based player 2024-08-19 10:37:04 +03:00
Benex254
d6b2bd7761 fix: ep title 2024-08-19 10:36:20 +03:00
Benex254
2346552dc4 fix: logging issue 2024-08-19 00:38:51 +03:00
Benex254
ba275055db fix: logging issue 2024-08-19 00:38:29 +03:00
82 changed files with 5238 additions and 2463 deletions

View File

@@ -24,7 +24,7 @@ jobs:
path: ./.venv path: ./.venv
key: venv-${{ hashFiles('poetry.lock') }} key: venv-${{ hashFiles('poetry.lock') }}
- name: Install the project dependencies - name: Install the project dependencies
run: poetry install run: poetry install --all-extras
- name: build app - name: build app
run: poetry build run: poetry build
- name: Archive production artifacts - name: Archive production artifacts

View File

@@ -30,7 +30,7 @@ jobs:
path: ./.venv path: ./.venv
key: venv-${{ hashFiles('poetry.lock') }} key: venv-${{ hashFiles('poetry.lock') }}
- name: Install the project dependencies - name: Install the project dependencies
run: poetry install run: poetry install --all-extras
- name: run linter, formatters and sort imports - name: run linter, formatters and sort imports
run: | run: |
poetry run black . poetry run black .

346
README.md
View File

@@ -1,12 +1,21 @@
# FastAnime # **FastAnime**
![PyPI - Downloads](https://img.shields.io/pypi/dm/fastanime) ![GitHub Actions Workflow Status](https://img.shields.io/github/actions/workflow/status/Benex254/FastAnime/test.yml?label=Tests)
![Discord](https://img.shields.io/discord/1250887070906323096?label=Discord)
![GitHub Issues or Pull Requests](https://img.shields.io/github/issues/Benex254/FastAnime)
![GitHub deployments](https://img.shields.io/github/deployments/Benex254/fastanime/pypi?label=PyPi%20Publish)
![PyPI - License](https://img.shields.io/pypi/l/fastanime)
Welcome to **FastAnime**, anime site experience from the terminal. Welcome to **FastAnime**, anime site experience from the terminal.
**fzf mode** ![fastanime-demo](https://github.com/user-attachments/assets/16e29f54-e9fa-48c7-b944-bfacb31ae1b5)
<details>
<summary><b>fzf mode</b></summary>
[fa_fzf_demo.webm](https://github.com/user-attachments/assets/b1fecf25-e358-4e8b-a144-bcb7947210cf) [fa_fzf_demo.webm](https://github.com/user-attachments/assets/b1fecf25-e358-4e8b-a144-bcb7947210cf)
**other modes:** </details>
<details> <details>
<summary><b>rofi mode</b></summary> <summary><b>rofi mode</b></summary>
@@ -22,7 +31,7 @@ Welcome to **FastAnime**, anime site experience from the terminal.
</details> </details>
Heavily inspired by [animdl](https://github.com/justfoolingaround/animdl), [magic-tape](https://gitlab.com/christosangel/magic-tape/-/tree/main?ref_type=heads) and [ani-cli](https://github.com/pystardust/ani-cli). Heavily inspired by [animdl](https://github.com/justfoolingaround/animdl), [jerry](https://github.com/justchokingaround/jerry/tree/main),[magic-tape](https://gitlab.com/christosangel/magic-tape/-/tree/main?ref_type=heads) and [ani-cli](https://github.com/pystardust/ani-cli).
<!--toc:start--> <!--toc:start-->
@@ -50,6 +59,7 @@ Heavily inspired by [animdl](https://github.com/justfoolingaround/animdl), [magi
- [MPV specific commands](#mpv-specific-commands) - [MPV specific commands](#mpv-specific-commands)
- [Key Bindings](#key-bindings) - [Key Bindings](#key-bindings)
- [Script Messages](#script-messages) - [Script Messages](#script-messages)
- [styling the default interface](#styling-the-default-interface)
- [Configuration](#configuration) - [Configuration](#configuration)
- [Contributing](#contributing) - [Contributing](#contributing)
- [Receiving Support](#receiving-support) - [Receiving Support](#receiving-support)
@@ -62,9 +72,15 @@ Heavily inspired by [animdl](https://github.com/justfoolingaround/animdl), [magi
## Installation ## Installation
![Windows](https://img.shields.io/badge/-Windows_x64-blue.svg?style=for-the-badge&logo=windows)
![Linux/BSD](https://img.shields.io/badge/-Linux/BSD-red.svg?style=for-the-badge&logo=linux)
![Arch Linux](https://img.shields.io/badge/-Arch_Linux-black.svg?style=for-the-badge&logo=archlinux)
![MacOS](https://img.shields.io/badge/-MacOS-lightblue.svg?style=for-the-badge&logo=apple)
![Android](https://img.shields.io/badge/-Android-green.svg?style=for-the-badge&logo=android)
The app can run wherever python can run. So all you need to have is python installed on your device. The app can run wherever python can run. So all you need to have is python installed on your device.
On android you can use [termux](https://github.com/termux/termux-app). On android you can use [termux](https://github.com/termux/termux-app).
If you have any difficulty consult for help on the [discord channel](https://discord.gg/HRjySFjQ) If you have any difficulty consult for help on the [discord channel](https://discord.gg/HBEmAwvbHV)
### Installation using your favourite package manager ### Installation using your favourite package manager
@@ -162,6 +178,7 @@ The only required external dependency, unless you won't be streaming, is [MPV](h
> player because we believe nothing beats **MPV** and it provides > player because we believe nothing beats **MPV** and it provides
> everything you could ever need with a small footprint. > everything you could ever need with a small footprint.
> But if you have a reason feel free to encourage as to do so. > But if you have a reason feel free to encourage as to do so.
> However, on android this is not the case so vlc is also supported
**Other external dependencies that will just make your experience better:** **Other external dependencies that will just make your experience better:**
@@ -174,14 +191,17 @@ The only required external dependency, unless you won't be streaming, is [MPV](h
- [ani-skip](https://github.com/synacktraa/ani-skip) used for skipping the opening and ending theme songs - [ani-skip](https://github.com/synacktraa/ani-skip) used for skipping the opening and ending theme songs
- [ffmpegthumbnailer](https://github.com/dirkvdb/ffmpegthumbnailer) used for local previews of downloaded anime - [ffmpegthumbnailer](https://github.com/dirkvdb/ffmpegthumbnailer) used for local previews of downloaded anime
- [syncplay](https://syncplay.pl/) to enable watch together. - [syncplay](https://syncplay.pl/) to enable watch together.
- [feh](https://github.com/derf/feh) used in manga mode
## Usage ## Usage
The project offers a featureful command-line interface and MPV interface through the use of python-mpv. The project offers a featureful command-line interface and MPV interface through the use of python-mpv.
The project also offers subs in different languages thanks to aniwatch provider.
### The Commandline interface :fire: ### The Commandline interface :fire:
Designed for efficiency and automation. Plus has a beautiful pseudo-TUI in some of the commands. Designed for efficiency and automation. Plus has a beautiful pseudo-TUI in some of the commands.
If you are stuck anywhere just use `--help` before the command you would like to get help on
**Overview of main commands:** **Overview of main commands:**
@@ -220,7 +240,7 @@ Available options for the fastanime include:
- `--default` use the default ui - `--default` use the default ui
- `--preview` show a preview when using fzf - `--preview` show a preview when using fzf
- `--no-preview` dont show a preview when using fzf - `--no-preview` dont show a preview when using fzf
- `--format <yt-dlp format string>` or `-f <yt-dlp format string>` set the format of anime downloaded and streamed based on yt-dlp format. Works when `--server gogoanime` - `--format <yt-dlp format string>` or `-f <yt-dlp format string>` set the format of anime downloaded and streamed based on [yt-dlp format](https://github.com/yt-dlp/yt-dlp#format-selection). Works when `--server gogoanime` or on providers that provide multi quality streams eg aniwatch
- `--icons/--no-icons` toggle the visibility of the icons - `--icons/--no-icons` toggle the visibility of the icons
- `--skip/--no-skip` whether to skip the opening and ending theme songs. - `--skip/--no-skip` whether to skip the opening and ending theme songs.
- `--rofi` use rofi for the ui - `--rofi` use rofi for the ui
@@ -233,7 +253,9 @@ Available options for the fastanime include:
- `--use-mpv-mod/--use-default-player` whether to use python-mpv - `--use-mpv-mod/--use-default-player` whether to use python-mpv
- `--provider <allanime/animepahe>` anime site of choice to scrape from - `--provider <allanime/animepahe>` anime site of choice to scrape from
- `--sync-play` or `-sp` use syncplay for streaming anime so you can watch with your friends - `--sync-play` or `-sp` use syncplay for streaming anime so you can watch with your friends
- `--sub_lang <en/or any other common shortform for country>` regex is used to determine the appropriate. Only works when provider is aniwatch. - `--sub-lang <en/or any other common shortform for country>` regex is used to determine the appropriate. Only works when provider is aniwatch.
- `--normalize-titles/--no-normalize-titles` whether to normalize provider titles
- `--manga` toggle experimental manga mode
Example usage of the above options Example usage of the above options
@@ -247,13 +269,16 @@ fastanime --sync-play --server sharepoint search -t <anime-title>
fastanime --sync-play --server sharepoint anilist fastanime --sync-play --server sharepoint anilist
# downloading dubbed anime # downloading dubbed anime
fastanime --dub download <anime> fastanime --dub download -t <anime>
# use icons and fzf for a more elegant ui with preview # use icons and fzf for a more elegant ui with preview
fastanime --icons --preview --fzf anilist fastanime --icons --preview --fzf anilist
# use icons with default ui # use icons with default ui
fastanime --icons --default anilist fastanime --icons --default anilist
# viewing manga
fastanime --manga search -t <manga-title>
``` ```
#### The anilist command :fire: :fire: :fire: #### The anilist command :fire: :fire: :fire:
@@ -267,6 +292,7 @@ Run `fastanime anilist` to access the main interface.
##### Subcommands ##### Subcommands
The subcommands are mainly their as convenience. Since all the features already exist in the main interface. The subcommands are mainly their as convenience. Since all the features already exist in the main interface.
Most of the subcommands share the common option `--dump-json` or `-d` which will print only the json data and suppress the ui.
- `fastanime anilist trending`: Top 15 trending anime. - `fastanime anilist trending`: Top 15 trending anime.
- `fastanime anilist recent`: Top 15 recently updated anime. - `fastanime anilist recent`: Top 15 recently updated anime.
@@ -276,6 +302,49 @@ The subcommands are mainly their as convenience. Since all the features already
- `fastanime anilist favourites`: Top 15 favorite anime. - `fastanime anilist favourites`: Top 15 favorite anime.
- `fastanime anilist random`: get random anime - `fastanime anilist random`: get random anime
**FastAnime Anilist Search subcommand** 🔥 🔥 🔥
It is by far one of the most powerful commands.
It offers the following options:
- `--sort <MediaSort>` or `-s <MediaSort>`
- `--title <anime-title>` or `-t <anime-title>`
- `--tags <tag>` or `-T <tag>` can be specified multiple times for different tags to filter by.
- `--year <year>` or `-y <year>`
- `--status <MediaStatus>` or `-S <MediaStatus>` can be specified multiple times
- `--media-format <MediaFormat>` or `-f <MediaFormat>`
- `--season <MediaSeason>`
- `--genres <genre>` or `-g <genre>` can be specified multiple times.
- `--on-list/--not-on-list`
Example:
```bash
# get anime with the tag of isekai
fastanime anilist search -T isekai
# get anime of 2024 and sort by popularity
# that has already finished airing or is releasing
# and is not in your anime lists
fastanime anilist search -y 2024 -s POPULARITY_DESC --status RELEASING --status FINISHED --not-on-list
# get anime of 2024 season WINTER
fastanime anilist search -y 2024 --season WINTER
# get anime genre action and tag isekai,magic
fastanime anilist search -g Action -T Isekai -T Magic
# get anime of 2024 thats finished airing
fastanime anilist search -y 2024 -S FINISHED
# get the most favourite anime movies
fastanime anilist search -f MOVIE -s FAVOURITES_DESC
```
For more details visit the anilist docs or just get the completions which will improve the experience.
Like seriously **[get the completions](https://github.com/Benex254/FastAnime#completions-subcommand)** and the experience will be a 💯 💯 better.
The following are commands you can only run if you are signed in to your AniList account: The following are commands you can only run if you are signed in to your AniList account:
- `fastanime anilist watching` - `fastanime anilist watching`
@@ -285,7 +354,7 @@ The following are commands you can only run if you are signed in to your AniList
- `fastanime anilist paused` - `fastanime anilist paused`
- `fastanime anilist completed` - `fastanime anilist completed`
Plus: `fastanime anilist notifier` :fire: Plus: `fastanime anilist notifier` 🔥
```bash ```bash
# basic form # basic form
@@ -362,6 +431,24 @@ fastanime download -t <anime-title> -r ':<episodes-end>'
# remember python indexing starts at 0 # remember python indexing starts at 0
fastanime download -t <anime-title> -r '<episode-1>:<episode>' fastanime download -t <anime-title> -r '<episode-1>:<episode>'
# merge subtitles with ffmpeg to mkv format; aniwatch tends to give subs as separate files
# and dont prompt for anything
# eg existing file in destination instead remove
# and clean
# ie remove original files (sub file and vid file)
# only keep merged files
fastanime download -t <anime-title> --merge --clean --no-prompt
# EOF is used since -t always expects a title
# you can supply anime titles from file or -t at the same time
#
# from stdin
echo -e "<anime-title>\n<anime-title>\n<anime-title>" | fastanime download -t "EOF" -r <range> -f -
# from file
fastanime download -t "EOF" -r <range> -f <file-path>
``` ```
#### search subcommand #### search subcommand
@@ -465,6 +552,10 @@ fastanime downloads --time-to-seek <intRange(-1,100)>
# --- or --- # --- or ---
fastanime downloads -t <intRange(-1,100)> fastanime downloads -t <intRange(-1,100)>
# to watch a specific title
# be sure to get the completions for the best experience
fastanime downloads --title <title>
# to get the path to the downloads folder set # to get the path to the downloads folder set
fastanime downloads --path fastanime downloads --path
# useful when you want to use the value for other programs # useful when you want to use the value for other programs
@@ -579,79 +670,210 @@ script-message select-server <server-name>
script-message select-quality <1080/720/480/360> script-message select-quality <1080/720/480/360>
``` ```
## styling the default interface
The default interface uses inquirerPy which is customizable. Read here to findout more <https://inquirerpy.readthedocs.io/en/latest/pages/env.html>
## Configuration ## Configuration
The app includes sensible defaults but can be customized extensively. Configuration is stored in `.ini` format at `~/.config/FastAnime/config.ini` on arch linux; for the other operating systems you can check by running `fastanime config --path`. The app includes sensible defaults but can be customized extensively. Configuration is stored in `.ini` format at `~/.config/FastAnime/config.ini` on arch linux; for the other operating systems you can check by running `fastanime config --path`.
> [!TIP]
> You can now use the option `--update` to update your config file from the command-line
> For Example:
> `fastanime --icons --fzf --preview config --update`
> the above will set icons to true, use_fzf to true and preview to true in your config file
The default config:
```ini ```ini
#
# ███████╗░█████╗░░██████╗████████╗░█████╗░███╗░░██╗██╗███╗░░░███╗███████╗ ░█████╗░░█████╗░███╗░░██╗███████╗██╗░██████╗░
# ██╔════╝██╔══██╗██╔════╝╚══██╔══╝██╔══██╗████╗░██║██║████╗░████║██╔════╝ ██╔══██╗██╔══██╗████╗░██║██╔════╝██║██╔════╝░
# █████╗░░███████║╚█████╗░░░░██║░░░███████║██╔██╗██║██║██╔████╔██║█████╗░░ ██║░░╚═╝██║░░██║██╔██╗██║█████╗░░██║██║░░██╗░
# ██╔══╝░░██╔══██║░╚═══██╗░░░██║░░░██╔══██║██║╚████║██║██║╚██╔╝██║██╔══╝░░ ██║░░██╗██║░░██║██║╚████║██╔══╝░░██║██║░░╚██╗
# ██║░░░░░██║░░██║██████╔╝░░░██║░░░██║░░██║██║░╚███║██║██║░╚═╝░██║███████╗ ╚█████╔╝╚█████╔╝██║░╚███║██║░░░░░██║╚██████╔╝
# ╚═╝░░░░░╚═╝░░╚═╝╚═════╝░░░░╚═╝░░░╚═╝░░╚═╝╚═╝░░╚══╝╚═╝╚═╝░░░░░╚═╝╚══════╝ ░╚════╝░░╚════╝░╚═╝░░╚══╝╚═╝░░░░░╚═╝░╚═════╝░
#
[general]
# whether to show the icons in the tui [True/False]
# more like emojis
# by the way if you have any recommendations to which should be used where please
# don't hesitate to share your opinion
# cause it's a lot of work to look for the right one for each menu option
# be sure to also give the replacement emoji
icons = False
# the quality of the stream [1080,720,480,360]
# this option is usually only reliable when:
# provider=animepahe
# since it provides links that actually point to streams of different qualities
# while the rest just point to another link that can provide the anime from the same server
quality = 1080
# whether to normalize provider titles [True/False]
# basically takes the provider titles and finds the corresponding anilist title then changes the title to that
# useful for uniformity especially when downloading from different providers
# this also applies to episode titles
normalize_titles = True
# can be [allanime, animepahe, aniwatch]
# allanime is the most realible
# animepahe provides different links to streams of different quality so a quality can be selected reliably with --quality option
# aniwatch which is now hianime usually provides subs in different languuages and its servers are generally faster
provider = allanime
# Display language [english, romaji]
# this is passed to anilist directly and is used to set the language which the anime titles will be in
# when using the anilist interface
preferred_language = english
# Download directory
# where you will find your videos after downloading them with 'fastanime download' command
downloads_dir = ~/Videos/FastAnime
# whether to show a preview window when using fzf or rofi [True/False]
# the preview requires you have a commandline image viewer as documented in the README
# this is only when usinf fzf
# if you dont care about image previews it doesnt matter
# though its awesome
# try it and you will see
preview = False
# the time to seek when using ffmpegthumbnailer [-1 to 100]
# -1 means random and is the default
# ffmpegthumbnailer is used to generate previews and you can select at what time in the video to extract an image
# random makes things quite exciting cause you never no at what time it will extract the image from
ffmpegthumbnailer_seek_time = -1
# whether to use fzf as the interface for the anilist command and others. [True/False]
use_fzf = False
# whether to use rofi for the ui [True/False]
# it's more useful if you want to create a desktop entry
# which can be setup with 'fastanime config --desktop-entry'
# though if you want it to be your sole interface even when fastanime is run directly from the terminal
use_rofi = False
# rofi themes to use
# the values of this option is the path to the rofi config files to use
# i choose to split it into three since it gives the best look and feel
# you can refer to the rofi demo on github to see for your self
# by the way i recommend getting the rofi themes from this project;
rofi_theme =
rofi_theme_input =
rofi_theme_confirm =
# the duration in minutes a notification will stay in the screen
# used by notifier command
notification_duration = 2
# used when the provider gives subs of different languages
# currently its the case for:
# aniwatch
# the values for this option are the short names for countries
# regex is used to determine what you selected
sub_lang = eng
[stream] [stream]
continue_from_history = True # Auto continue from watch history # Auto continue from watch history [True/False]
# this will make fastanime to choose the episode that you last watched to completion
# and increment it by one
# and use that to auto select the episode you want to watch
continue_from_history = True
# which history to use [local/remote] # which history to use [local/remote]
# local history means it will just use the watch history stored locally in your device
# the file that stores it is called watch_history.json and is stored next to your config file
# remote means it ignores the last episode stored locally and instead uses the one in your anilist anime list
# this config option is useful if you want to overwrite your local history or import history covered from another device or platform
# since remote history will take precendence over whats available locally
preferred_history = local preferred_history = local
# force mpv window # Preferred language for anime [dub/sub]
# passed directly to mpv so values are same translation_type = sub
force_window = immediate
translation_type = sub # Preferred language for anime (options: dub, sub) # what server to use for a particular provider
# allanime: [dropbox, sharepoint, wetransfer, gogoanime, wixmp]
# animepahe: [kwik]
# aniwatch: [HD1, HD2, StreamSB, StreamTape]
# 'top' can also be used as a value for this option
# 'top' will cause fastanime to auto select the first server it sees
# this saves on resources and is faster since not all servers are being fetched
server = top
server = top # Default server (options: dropbox, sharepoint, wetransfer.gogoanime, top, wixmp) # Auto select next episode [True/False]
# this makes fastanime increment the current episode number
# then after using that value to fetch the next episode instead of prompting
# this option is useful for binging
auto_next = False
auto_next = False # Auto-select next episode # Auto select the anime provider results with fuzzy find. [True/False]
# Note this won't always be correct
# this is because the providers sometime use non-standard names
# that are there own preference rather than the official names
# But 99% of the time will be accurate
# if this happens just turn of auto_select in the menus or from the commandline and manually select the correct anime title
# and then please open an issue at <> highlighting the normalized title and the title given by the provider for the anime you wished to watch
# or even better edit this file <> and open a pull request
auto_select = True
# Auto select the anime provider results with fuzzy find. # whether to skip the opening and ending theme songs [True/False]
# Note this wont always be correct.But 99% of the time will be. # NOTE: requires ani-skip to be in path
auto_select=True # for python-mpv users am planning to create this functionality n python without the use of an external script
# so its disabled for now
# whether to skip the opening and ending theme songs skip = False
# note requires ani-skip to be in path
skip=false
# the maximum delta time in minutes after which the episode should be considered as completed # the maximum delta time in minutes after which the episode should be considered as completed
# used in the continue from time stamp # used in the continue from time stamp
error=3 error = 3
use_mpv_mod=False # whether to use python-mpv [True/False]
# to enable superior control over the player
# adding more options to it
# Enable this one and you will be wonder why you did not discover fastanime sooner
# Since you basically don't have to close the player window to go to the next or previous episode, switch servers, change translation type or
change to a given episode x
# so try it if you haven't already
# if you have any issues setting it up
# don't be afraid to ask
# especially on windows
# honestly it can be a pain to set it up there
# personally it took me quite sometime to figure it out
# this is because of how windows handles shared libraries
# so just ask when you find yourself stuck
# or just switch to arch linux
use_python_mpv = False
# force mpv window
# the default 'immediate' just makes mpv to open the window even if the video has not yet loaded
# done for asthetics
# passed directly to mpv so values are same
force_window = immediate
# the format of downloaded anime and trailer # the format of downloaded anime and trailer
# based on yt-dlp format and passed directly to it # based on yt-dlp format and passed directly to it
# learn more by looking it up on their site # learn more by looking it up on their site
# only works for downloaded anime if server=gogoanime # only works for downloaded anime if:
# since its the only one that offers different formats # provider=allanime, server=gogoanime
# the others tend not to # provider=allanime, server=wixmp
format=best[height<=1080]/bestvideo[height<=1080]+bestaudio/best # default # provider=aniwatch
# this is because they provider a m3u8 file that contans multiple quality streams
format = best[height<=1080]/bestvideo[height<=1080]+bestaudio/best
[general] # NOTE:
# can be [allanime,animepahe] # if you have any trouble setting up your config
provider = allanime # please don't be afraid to ask in our discord
# plus if there are any errors, improvements or suggestions please tell us in the discord
preferred_language = romaji # Display language (options: english, romaji) # or help us by contributing
# we appreciate all the help we can get
downloads_dir = <Default-videos-dir>/FastAnime # Download directory # since we may not always have the time to immediately implement the changes
#
preview=false # whether to show a preview window when using fzf or rofi # HOPE YOU ENJOY FASTANIME AND BE SURE TO STAR THE PROJECT ON GITHUB
#
use_fzf=False # whether to use fzf as the interface for the anilist command and others.
use_rofi=false # whether to use rofi for the ui
rofi_theme=<path-to-rofi-theme-file>
rofi_theme_input=<path-to-rofi-theme-file>
rofi_theme_confirm=<path-to-rofi-theme-file>
# whether to show the icons
icons=false
# the duration in minutes a notification will stay in the screen
# used by notifier command
notification_duration=2
[anilist]
# Not implemented yet
``` ```
## Contributing ## Contributing
@@ -660,12 +882,14 @@ We welcome your issues and feature requests. However, due to time constraints, w
If you wish to contribute directly, please first open an issue describing your proposed changes so it can be discussed or if you are in a rush for the feature to be merged just open a pr. If you wish to contribute directly, please first open an issue describing your proposed changes so it can be discussed or if you are in a rush for the feature to be merged just open a pr.
If you find an anime title that does not correspond with a provider or is just weird just [edit the data file](https://github.com/Benex254/FastAnime/blob/master/fastanime/Utility/data.py) and open a pr or if you don't want to do that open an issue.
## Receiving Support ## Receiving Support
For inquiries, join our [Discord Server](https://discord.gg/C4rhMA4mmK). For inquiries, join our [Discord Server](https://discord.gg/HBEmAwvbHV).
<p align="center"> <p align="center">
<a href="https://discord.gg/C4rhMA4mmK"> <a href="https://discord.gg/HBEmAwvbHV">
<img src="https://invidget.switchblade.xyz/C4rhMA4mmK"> <img src="https://invidget.switchblade.xyz/C4rhMA4mmK">
</a> </a>
</p> </p>

View File

@@ -12,7 +12,6 @@ from .libs.anime_provider import anime_sources
if TYPE_CHECKING: if TYPE_CHECKING:
from typing import Iterator from typing import Iterator
from .libs.anilist.types import AnilistBaseMediaDataSchema
from .libs.anime_provider.types import Anime, SearchResults, Server from .libs.anime_provider.types import Anime, SearchResults, Server
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@@ -37,12 +36,12 @@ class AnimeProvider:
self.provider = provider self.provider = provider
self.dynamic = dynamic self.dynamic = dynamic
self.retries = retries self.retries = retries
self.lazyload_provider() self.lazyload_provider(self.provider)
def lazyload_provider(self): def lazyload_provider(self, provider):
"""updates the current provider being used""" """updates the current provider being used"""
_, anime_provider_cls_name = anime_sources[self.provider].split(".", 1) _, anime_provider_cls_name = anime_sources[provider].split(".", 1)
package = f"fastanime.libs.anime_provider.{self.provider}" package = f"fastanime.libs.anime_provider.{provider}"
provider_api = importlib.import_module(".api", package) provider_api = importlib.import_module(".api", package)
anime_provider = getattr(provider_api, anime_provider_cls_name) anime_provider = getattr(provider_api, anime_provider_cls_name)
self.anime_provider = anime_provider() self.anime_provider = anime_provider()
@@ -51,7 +50,6 @@ class AnimeProvider:
self, self,
user_query, user_query,
translation_type, translation_type,
anilist_obj: "AnilistBaseMediaDataSchema | None" = None,
nsfw=True, nsfw=True,
unknown=True, unknown=True,
) -> "SearchResults | None": ) -> "SearchResults | None":
@@ -68,19 +66,15 @@ class AnimeProvider:
[TODO:return] [TODO:return]
""" """
anime_provider = self.anime_provider anime_provider = self.anime_provider
try: results = anime_provider.search_for_anime(
results = anime_provider.search_for_anime( user_query, translation_type, nsfw, unknown
user_query, translation_type, nsfw, unknown )
)
except Exception as e:
logging.error(e)
results = None
return results return results
def get_anime( def get_anime(
self, self,
anime_id: str, anime_id: str,
anilist_obj: "AnilistBaseMediaDataSchema | None" = None,
) -> "Anime | None": ) -> "Anime | None":
"""core abstraction over getting info of an anime from all providers """core abstraction over getting info of an anime from all providers
@@ -92,19 +86,16 @@ class AnimeProvider:
[TODO:return] [TODO:return]
""" """
anime_provider = self.anime_provider anime_provider = self.anime_provider
try: results = anime_provider.get_anime(anime_id)
results = anime_provider.get_anime(anime_id)
except Exception as e:
logging.error(e)
results = None
return results return results
def get_episode_streams( def get_episode_streams(
self, self,
anime, anime_id,
anime_title,
episode: str, episode: str,
translation_type: str, translation_type: str,
anilist_obj: "AnilistBaseMediaDataSchema|None" = None,
) -> "Iterator[Server] | None": ) -> "Iterator[Server] | None":
"""core abstractions for getting juicy streams from all providers """core abstractions for getting juicy streams from all providers
@@ -118,11 +109,7 @@ class AnimeProvider:
[TODO:return] [TODO:return]
""" """
anime_provider = self.anime_provider anime_provider = self.anime_provider
try: results = anime_provider.get_episode_streams(
results = anime_provider.get_episode_streams( anime_id, anime_title, episode, translation_type
anime, episode, translation_type )
) return results
except Exception as e:
logging.error(e)
results = None
return results # pyright:ignore

105
fastanime/MangaProvider.py Normal file
View File

@@ -0,0 +1,105 @@
"""An abstraction over all providers offering added features with a simple and well typed api
[TODO:description]
"""
import importlib
import logging
from typing import TYPE_CHECKING
from .libs.manga_provider import manga_sources
if TYPE_CHECKING:
pass
logger = logging.getLogger(__name__)
class MangaProvider:
"""Class that manages all anime sources adding some extra functionality to them.
Attributes:
PROVIDERS: [TODO:attribute]
provider: [TODO:attribute]
provider: [TODO:attribute]
dynamic: [TODO:attribute]
retries: [TODO:attribute]
manga_provider: [TODO:attribute]
"""
PROVIDERS = list(manga_sources.keys())
provider = PROVIDERS[0]
def __init__(self, provider="mangadex", dynamic=False, retries=0) -> None:
self.provider = provider
self.dynamic = dynamic
self.retries = retries
self.lazyload_provider(self.provider)
def lazyload_provider(self, provider):
"""updates the current provider being used"""
_, anime_provider_cls_name = manga_sources[provider].split(".", 1)
package = f"fastanime.libs.manga_provider.{provider}"
provider_api = importlib.import_module(".api", package)
manga_provider = getattr(provider_api, anime_provider_cls_name)
self.manga_provider = manga_provider()
def search_for_manga(
self,
user_query,
nsfw=True,
unknown=True,
):
"""core abstraction over all providers search functionality
Args:
user_query ([TODO:parameter]): [TODO:description]
translation_type ([TODO:parameter]): [TODO:description]
nsfw ([TODO:parameter]): [TODO:description]
manga_provider ([TODO:parameter]): [TODO:description]
anilist_obj: [TODO:description]
Returns:
[TODO:return]
"""
manga_provider = self.manga_provider
try:
results = manga_provider.search_for_manga(user_query, nsfw, unknown)
except Exception as e:
logger.error(e)
results = None
return results
def get_manga(
self,
anime_id: str,
):
"""core abstraction over getting info of an anime from all providers
Args:
anime_id: [TODO:description]
anilist_obj: [TODO:description]
Returns:
[TODO:return]
"""
manga_provider = self.manga_provider
try:
results = manga_provider.get_manga(anime_id)
except Exception as e:
logger.error(e)
results = None
return results
def get_chapter_thumbnails(
self,
manga_id: str,
chapter: str,
):
manga_provider = self.manga_provider
try:
results = manga_provider.get_chapter_thumbnails(manga_id, chapter)
except Exception as e:
logger.error(e)
results = None
return results # pyright:ignore

View File

@@ -3,12 +3,24 @@ Just contains some useful data used across the codebase
""" """
# useful incases where the anilist title is too different from the provider title # useful incases where the anilist title is too different from the provider title
anime_normalizer = { anime_normalizer_raw = {
"1P": "one piece", "allanime": {
"Magia Record: Mahou Shoujo Madoka☆Magica Gaiden (TV)": "Mahou Shoujo Madoka☆Magica", "1P": "one piece",
"Dungeon ni Deai o Motomeru no wa Machigatte Iru Darouka": "Dungeon ni Deai wo Motomeru no wa Machigatteiru Darou ka", "Magia Record: Mahou Shoujo Madoka☆Magica Gaiden (TV)": "Mahou Shoujo Madoka☆Magica",
'Hazurewaku no "Joutai Ijou Skill" de Saikyou ni Natta Ore ga Subete wo Juurin suru made': "Hazure Waku no [Joutai Ijou Skill] de Saikyou ni Natta Ore ga Subete wo Juurin Suru made", "Dungeon ni Deai o Motomeru no wa Machigatte Iru Darouka": "Dungeon ni Deai wo Motomeru no wa Machigatteiru Darou ka",
'Hazurewaku no "Joutai Ijou Skill" de Saikyou ni Natta Ore ga Subete wo Juurin suru made': "Hazure Waku no [Joutai Ijou Skill] de Saikyou ni Natta Ore ga Subete wo Juurin Suru made",
},
"aniwatch": {"My Star": "Oshi no Ko"},
"animepahe": {"Azumanga Daiou The Animation": "Azumanga Daioh"},
} }
anilist_sort_normalizer = {"search match": "SEARCH_MATCH"} def get_anime_normalizer():
"""Used because there are different providers"""
import os
current_provider = os.environ["CURRENT_FASTANIME_PROVIDER"]
return anime_normalizer_raw[current_provider]
anime_normalizer = get_anime_normalizer()

View File

@@ -1,8 +1,14 @@
import logging import logging
import os
import shutil
import subprocess
import tempfile
from queue import Queue from queue import Queue
from threading import Thread from threading import Thread
import yt_dlp import yt_dlp
from rich import print
from rich.prompt import Confirm
from yt_dlp.utils import sanitize_filename from yt_dlp.utils import sanitize_filename
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@@ -25,8 +31,6 @@ class YtDLPDownloader:
self._thread.daemon = True self._thread.daemon = True
self._thread.start() self._thread.start()
# Function to download the file
# TODO: untpack the title to its actual values episode_title and anime_title
def _download_file( def _download_file(
self, self,
url: str, url: str,
@@ -39,6 +43,9 @@ class YtDLPDownloader:
verbose=False, verbose=False,
headers={}, headers={},
sub="", sub="",
merge=False,
clean=False,
prompt=True,
): ):
"""Helper function that downloads anime given url and path details """Helper function that downloads anime given url and path details
@@ -64,8 +71,82 @@ class YtDLPDownloader:
urls = [url] urls = [url]
if sub: if sub:
urls.append(sub) urls.append(sub)
with yt_dlp.YoutubeDL(ydl_opts) as ydl: vid_path = ""
ydl.download(urls) sub_path = ""
for i, url in enumerate(urls):
with yt_dlp.YoutubeDL(ydl_opts) as ydl:
info = ydl.extract_info(url, download=True)
if not info:
continue
if i == 0:
vid_path = info["requested_downloads"][0]["filepath"]
else:
sub_path = info["requested_downloads"][0]["filepath"]
if sub_path and vid_path and merge:
self.merge_subtitles(vid_path, sub_path, clean, prompt)
def merge_subtitles(self, video_path, sub_path, clean, prompt):
# Extract the directory and filename
video_dir = os.path.dirname(video_path)
video_name = os.path.basename(video_path)
video_name, _ = os.path.splitext(video_name)
video_name += ".mkv"
FFMPEG_EXECUTABLE = shutil.which("ffmpeg")
if not FFMPEG_EXECUTABLE:
print("[yellow bold]WARNING: [/]FFmpeg not found")
return
# Create a temporary directory
with tempfile.TemporaryDirectory() as temp_dir:
# Temporary output path in the temporary directory
temp_output_path = os.path.join(temp_dir, video_name)
# FFmpeg command to merge subtitles
command = [
FFMPEG_EXECUTABLE,
"-hide_banner",
"-i",
video_path,
"-i",
sub_path,
"-c",
"copy",
"-map",
"0",
"-map",
"1",
temp_output_path,
]
# Run the command
try:
subprocess.run(command, check=True)
# Move the file back to the original directory with the original name
final_output_path = os.path.join(video_dir, video_name)
if os.path.exists(final_output_path):
if not prompt or Confirm.ask(
f"File exists({final_output_path}) would you like to overwrite it",
default=True,
):
# move file to dest
os.remove(final_output_path)
shutil.move(temp_output_path, final_output_path)
else:
shutil.move(temp_output_path, final_output_path)
# clean up
if clean:
print("[cyan]Cleaning original files...[/]")
os.remove(video_path)
os.remove(sub_path)
print(
f"[green bold]Subtitles merged successfully.[/] Output file: {final_output_path}"
)
except subprocess.CalledProcessError as e:
print(f"[red bold]Error[/] during merging subtitles: {e}")
except Exception as e:
print(f"[red bold]An error[/] occurred: {e}")
# WARN: May remove this legacy functionality # WARN: May remove this legacy functionality
def download_file(self, url: str, title, silent=True): def download_file(self, url: str, title, silent=True):

View File

@@ -30,10 +30,9 @@ def anime_title_percentage_match(
Returns: Returns:
int: the percentage match int: the percentage match
""" """
if normalized_anime_title := anime_normalizer.get( possible_user_requested_anime_title = anime_normalizer.get(
possible_user_requested_anime_title possible_user_requested_anime_title, possible_user_requested_anime_title
): )
possible_user_requested_anime_title = normalized_anime_title
# compares both the romaji and english names and gets highest Score # compares both the romaji and english names and gets highest Score
title_a = str(anime["title"]["romaji"]) title_a = str(anime["title"]["romaji"])
title_b = str(anime["title"]["english"]) title_b = str(anime["title"]["english"])

View File

@@ -6,7 +6,7 @@ if sys.version_info < (3, 10):
) # noqa: F541 ) # noqa: F541
__version__ = "v2.3.1" __version__ = "v2.5.4"
APP_NAME = "FastAnime" APP_NAME = "FastAnime"
AUTHOR = "Benex254" AUTHOR = "Benex254"

View File

@@ -4,7 +4,6 @@ import click
from .. import __version__ from .. import __version__
from ..libs.anime_provider import SERVERS_AVAILABLE, anime_sources from ..libs.anime_provider import SERVERS_AVAILABLE, anime_sources
from ..Utility.data import anilist_sort_normalizer
from .commands import LazyGroup from .commands import LazyGroup
commands = { commands = {
@@ -39,8 +38,32 @@ signal.signal(signal.SIGINT, handle_exit)
cls=LazyGroup, cls=LazyGroup,
help="A command line application for streaming anime that provides a complete and featureful interface", help="A command line application for streaming anime that provides a complete and featureful interface",
short_help="Stream Anime", short_help="Stream Anime",
epilog="""
\b
\b\bExamples:
# example of syncplay intergration
fastanime --sync-play --server sharepoint search -t <anime-title>
\b
# --- or ---
\b
# to watch with anilist intergration
fastanime --sync-play --server sharepoint anilist
\b
# downloading dubbed anime
fastanime --dub download -t <anime>
\b
# use icons and fzf for a more elegant ui with preview
fastanime --icons --preview --fzf anilist
\b
# use icons with default ui
fastanime --icons --default anilist
\b
# viewing manga
fastanime --manga search -t <manga-title>
""",
) )
@click.version_option(__version__, "--version") @click.version_option(__version__, "--version")
@click.option("--manga", "-m", help="Enable manga mode", is_flag=True)
@click.option("--log", help="Allow logging to stdout", is_flag=True) @click.option("--log", help="Allow logging to stdout", is_flag=True)
@click.option("--log-file", help="Allow logging to a file", is_flag=True) @click.option("--log-file", help="Allow logging to a file", is_flag=True)
@click.option("--rich-traceback", help="Use rich to output tracebacks", is_flag=True) @click.option("--rich-traceback", help="Use rich to output tracebacks", is_flag=True)
@@ -116,9 +139,9 @@ signal.signal(signal.SIGINT, handle_exit)
help="Auto select anime title?", help="Auto select anime title?",
) )
@click.option( @click.option(
"-S", "--normalize-titles/--no-normalize-titles",
"--sort-by", type=bool,
type=click.Choice(anilist_sort_normalizer.keys()), # pyright: ignore help="whether to normalize anime and episode titls given by providers",
) )
@click.option("-d", "--downloads-dir", type=click.Path(), help="Downloads location") @click.option("-d", "--downloads-dir", type=click.Path(), help="Downloads location")
@click.option("--fzf", is_flag=True, help="Use fzf for the ui") @click.option("--fzf", is_flag=True, help="Use fzf for the ui")
@@ -145,12 +168,19 @@ signal.signal(signal.SIGINT, handle_exit)
type=click.Path(), type=click.Path(),
) )
@click.option( @click.option(
"--use-mpv-mod/--use-default-player", help="Whether to use python-mpv", type=bool "--use-python-mpv/--use-default-player", help="Whether to use python-mpv", type=bool
) )
@click.option("--sync-play", "-sp", help="Use sync play", is_flag=True) @click.option("--sync-play", "-sp", help="Use sync play", is_flag=True)
@click.option(
"--player",
"-P",
help="the player to use when streaming",
type=click.Choice(["mpv", "vlc"]),
)
@click.pass_context @click.pass_context
def run_cli( def run_cli(
ctx: click.Context, ctx: click.Context,
manga,
log, log,
log_file, log_file,
rich_traceback, rich_traceback,
@@ -165,7 +195,7 @@ def run_cli(
quality, quality,
auto_next, auto_next,
auto_select, auto_select,
sort_by, normalize_titles,
downloads_dir, downloads_dir,
fzf, fzf,
default, default,
@@ -178,12 +208,14 @@ def run_cli(
rofi_theme, rofi_theme,
rofi_theme_confirm, rofi_theme_confirm,
rofi_theme_input, rofi_theme_input,
use_mpv_mod, use_python_mpv,
sync_play, sync_play,
player,
): ):
from .config import Config from .config import Config
ctx.obj = Config() ctx.obj = Config()
ctx.obj.manga = manga
if log: if log:
import logging import logging
@@ -192,7 +224,7 @@ def run_cli(
FORMAT = "%(message)s" FORMAT = "%(message)s"
logging.basicConfig( logging.basicConfig(
level="NOTSET", format=FORMAT, datefmt="[%X]", handlers=[RichHandler()] level=logging.DEBUG, format=FORMAT, datefmt="[%X]", handlers=[RichHandler()]
) )
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
logger.info("logging has been initialized") logger.info("logging has been initialized")
@@ -209,6 +241,10 @@ def run_cli(
datefmt="[%d/%m/%Y@%H:%M:%S]", datefmt="[%d/%m/%Y@%H:%M:%S]",
filemode="w", filemode="w",
) )
else:
import logging
logging.basicConfig(level=logging.CRITICAL)
if rich_traceback: if rich_traceback:
from rich.traceback import install from rich.traceback import install
@@ -217,7 +253,10 @@ def run_cli(
if sync_play: if sync_play:
ctx.obj.sync_play = sync_play ctx.obj.sync_play = sync_play
if provider: if provider:
import os
ctx.obj.provider = provider ctx.obj.provider = provider
os.environ["CURRENT_FASTANIME_PROVIDER"] = provider
if server: if server:
ctx.obj.server = server ctx.obj.server = server
if format: if format:
@@ -226,8 +265,15 @@ def run_cli(
ctx.obj.sub_lang = sub_lang ctx.obj.sub_lang = sub_lang
if ctx.get_parameter_source("continue_") == click.core.ParameterSource.COMMANDLINE: if ctx.get_parameter_source("continue_") == click.core.ParameterSource.COMMANDLINE:
ctx.obj.continue_from_history = continue_ ctx.obj.continue_from_history = continue_
if ctx.get_parameter_source("player") == click.core.ParameterSource.COMMANDLINE:
ctx.obj.player = player
if ctx.get_parameter_source("skip") == click.core.ParameterSource.COMMANDLINE: if ctx.get_parameter_source("skip") == click.core.ParameterSource.COMMANDLINE:
ctx.obj.skip = skip ctx.obj.skip = skip
if (
ctx.get_parameter_source("normalize_titles")
== click.core.ParameterSource.COMMANDLINE
):
ctx.obj.normalize_titles = normalize_titles
if quality: if quality:
ctx.obj.quality = quality ctx.obj.quality = quality
@@ -246,20 +292,19 @@ def run_cli(
): ):
ctx.obj.auto_select = auto_select ctx.obj.auto_select = auto_select
if ( if (
ctx.get_parameter_source("use_mpv_mod") ctx.get_parameter_source("use_python_mpv")
== click.core.ParameterSource.COMMANDLINE == click.core.ParameterSource.COMMANDLINE
): ):
ctx.obj.use_mpv_mod = use_mpv_mod ctx.obj.use_python_mpv = use_python_mpv
if sort_by:
ctx.obj.sort_by = sort_by
if downloads_dir: if downloads_dir:
ctx.obj.downloads_dir = downloads_dir ctx.obj.downloads_dir = downloads_dir
if translation_type: if translation_type:
ctx.obj.translation_type = translation_type ctx.obj.translation_type = translation_type
if fzf:
ctx.obj.use_fzf = True
if default: if default:
ctx.obj.use_fzf = False ctx.obj.use_fzf = False
ctx.obj.use_rofi = False
if fzf:
ctx.obj.use_fzf = True
if preview: if preview:
ctx.obj.preview = True ctx.obj.preview = True
if no_preview: if no_preview:

View File

@@ -112,6 +112,7 @@ def update_app():
"pip", "pip",
"install", "install",
APP_NAME, APP_NAME,
"-U",
"--user", "--user",
"--no-warn-script-location", "--no-warn-script-location",
] ]

View File

@@ -20,6 +20,7 @@ commands = {
"completed": "completed.completed", "completed": "completed.completed",
"planning": "planning.planning", "planning": "planning.planning",
"notifier": "notifier.notifier", "notifier": "notifier.notifier",
"stats": "stats.stats",
} }
@@ -29,6 +30,53 @@ commands = {
invoke_without_command=True, invoke_without_command=True,
help="A beautiful interface that gives you access to a commplete streaming experience", help="A beautiful interface that gives you access to a commplete streaming experience",
short_help="Access all streaming options", short_help="Access all streaming options",
epilog="""
\b
\b\bExamples:
# ---- search ----
\b
# get anime with the tag of isekai
fastanime anilist search -T isekai
\b
# get anime of 2024 and sort by popularity
# that has already finished airing or is releasing
# and is not in your anime lists
fastanime anilist search -y 2024 -s POPULARITY_DESC --status RELEASING --status FINISHED --not-on-list
\b
# get anime of 2024 season WINTER
fastanime anilist search -y 2024 --season WINTER
\b
# get anime genre action and tag isekai,magic
fastanime anilist search -g Action -T Isekai -T Magic
\b
# get anime of 2024 thats finished airing
fastanime anilist search -y 2024 -S FINISHED
\b
# get the most favourite anime movies
fastanime anilist search -f MOVIE -s FAVOURITES_DESC
\b
# ---- login ----
\b
# To sign in just run
fastanime anilist login
\b
# To view your login status
fastanime anilist login --status
\b
# To erase login data
fastanime anilist login --erase
\b
# ---- notifier ----
\b
# basic form
fastanime anilist notifier
\b
# with logging to stdout
fastanime --log anilist notifier
\b
# with logging to a file. stored in the same place as your config
fastanime --log-file anilist notifier
""",
) )
@click.pass_context @click.pass_context
def anilist(ctx: click.Context): def anilist(ctx: click.Context):

View File

@@ -7,16 +7,23 @@ if TYPE_CHECKING:
@click.command(help="View anime you completed") @click.command(help="View anime you completed")
@click.option(
"--dump-json",
"-d",
is_flag=True,
help="Only print out the results dont open anilist menu",
)
@click.pass_obj @click.pass_obj
def completed(config: "Config"): def completed(config: "Config", dump_json):
from sys import exit
from ....anilist import AniList from ....anilist import AniList
from ...interfaces import anilist_interfaces from ...utils.tools import FastAnimeRuntimeState
from ...utils.tools import FastAnimeRuntimeState, exit_app
if not config.user: if not config.user:
print("Not authenticated") print("Not authenticated")
print("Please run: fastanime anilist loggin") print("Please run: fastanime anilist loggin")
exit_app() exit(1)
anime_list = AniList.get_anime_list("COMPLETED") anime_list = AniList.get_anime_list("COMPLETED")
if not anime_list or not anime_list[1]: if not anime_list or not anime_list[1]:
return return
@@ -27,6 +34,13 @@ def completed(config: "Config"):
for mediaListItem in anime_list[1]["data"]["Page"]["mediaList"] for mediaListItem in anime_list[1]["data"]["Page"]["mediaList"]
] # pyright:ignore ] # pyright:ignore
anime_list[1]["data"]["Page"]["media"] = media # pyright:ignore anime_list[1]["data"]["Page"]["media"] = media # pyright:ignore
fastanime_runtime_state = FastAnimeRuntimeState() if dump_json:
fastanime_runtime_state.anilist_data = anime_list[1] import json
anilist_interfaces.anilist_results_menu(config, fastanime_runtime_state)
print(json.dumps(anime_list))
else:
from ...interfaces import anilist_interfaces
fastanime_runtime_state = FastAnimeRuntimeState()
fastanime_runtime_state.anilist_results_data = anime_list[1]
anilist_interfaces.anilist_results_menu(config, fastanime_runtime_state)

View File

@@ -7,26 +7,40 @@ if TYPE_CHECKING:
@click.command(help="View anime you dropped") @click.command(help="View anime you dropped")
@click.option(
"--dump-json",
"-d",
is_flag=True,
help="Only print out the results dont open anilist menu",
)
@click.pass_obj @click.pass_obj
def dropped(config: "Config"): def dropped(config: "Config", dump_json):
from sys import exit
from ....anilist import AniList from ....anilist import AniList
from ...interfaces import anilist_interfaces
from ...utils.tools import FastAnimeRuntimeState, exit_app
if not config.user: if not config.user:
print("Not authenticated") print("Not authenticated")
print("Please run: fastanime anilist loggin") print("Please run: fastanime anilist loggin")
exit_app() exit(1)
anime_list = AniList.get_anime_list("DROPPED") anime_list = AniList.get_anime_list("DROPPED")
if not anime_list: if not anime_list:
return exit(1)
if not anime_list[0] or not anime_list[1]: if not anime_list[0] or not anime_list[1]:
return exit(1)
media = [ media = [
mediaListItem["media"] mediaListItem["media"]
for mediaListItem in anime_list[1]["data"]["Page"]["mediaList"] for mediaListItem in anime_list[1]["data"]["Page"]["mediaList"]
] # pyright:ignore ] # pyright:ignore
anime_list[1]["data"]["Page"]["media"] = media # pyright:ignore anime_list[1]["data"]["Page"]["media"] = media # pyright:ignore
fastanime_runtime_state = FastAnimeRuntimeState() if dump_json:
fastanime_runtime_state.anilist_data = anime_list[1] import json
anilist_interfaces.anilist_results_menu(config, fastanime_runtime_state)
print(json.dumps(anime_list[1]))
else:
from ...interfaces import anilist_interfaces
from ...utils.tools import FastAnimeRuntimeState
fastanime_runtime_state = FastAnimeRuntimeState()
fastanime_runtime_state.anilist_results_data = anime_list[1]
anilist_interfaces.anilist_results_menu(config, fastanime_runtime_state)

View File

@@ -5,14 +5,30 @@ import click
help="Fetch the top 15 most favourited anime from anilist", help="Fetch the top 15 most favourited anime from anilist",
short_help="View most favourited anime", short_help="View most favourited anime",
) )
@click.option(
"--dump-json",
"-d",
is_flag=True,
help="Only print out the results dont open anilist menu",
)
@click.pass_obj @click.pass_obj
def favourites(config): def favourites(config, dump_json):
from ....anilist import AniList from ....anilist import AniList
from ...interfaces.anilist_interfaces import anilist_results_menu
from ...utils.tools import FastAnimeRuntimeState
anime_data = AniList.get_most_favourite() anime_data = AniList.get_most_favourite()
if anime_data[0]: if anime_data[0]:
fastanime_runtime_state = FastAnimeRuntimeState() if dump_json:
fastanime_runtime_state.anilist_data = anime_data[1] import json
anilist_results_menu(config, fastanime_runtime_state)
print(json.dumps(anime_data[1]))
else:
from ...interfaces.anilist_interfaces import anilist_results_menu
from ...utils.tools import FastAnimeRuntimeState
fastanime_runtime_state = FastAnimeRuntimeState()
fastanime_runtime_state.anilist_results_data = anime_data[1]
anilist_results_menu(config, fastanime_runtime_state)
else:
from sys import exit
exit(1)

View File

@@ -11,11 +11,11 @@ if TYPE_CHECKING:
@click.option("--erase", "-e", help="Erase your login details", is_flag=True) @click.option("--erase", "-e", help="Erase your login details", is_flag=True)
@click.pass_obj @click.pass_obj
def login(config: "Config", status, erase): def login(config: "Config", status, erase):
from sys import exit
from rich import print from rich import print
from rich.prompt import Confirm, Prompt from rich.prompt import Confirm, Prompt
from ...utils.tools import exit_app
if status: if status:
is_logged_in = True if config.user else False is_logged_in = True if config.user else False
message = ( message = (
@@ -23,16 +23,16 @@ def login(config: "Config", status, erase):
) )
print(message) print(message)
print(config.user) print(config.user)
exit_app() exit(0)
elif erase: elif erase:
if Confirm.ask( if Confirm.ask(
"Are you sure you want to erase your login status", default=False "Are you sure you want to erase your login status", default=False
): ):
config.update_user({}) config.update_user({})
print("Success") print("Success")
exit_app(0) exit(0)
else: else:
exit_app(1) exit(1)
else: else:
from click import launch from click import launch
@@ -41,7 +41,7 @@ def login(config: "Config", status, erase):
if config.user: if config.user:
print("Already logged in :confused:") print("Already logged in :confused:")
if not Confirm.ask("or would you like to reloggin", default=True): if not Confirm.ask("or would you like to reloggin", default=True):
exit_app() exit(0)
# ---- new loggin ----- # ---- new loggin -----
print( print(
f"A browser session will be opened ( [link]{config.fastanime_anilist_app_login_url}[/link] )", f"A browser session will be opened ( [link]{config.fastanime_anilist_app_login_url}[/link] )",
@@ -52,10 +52,10 @@ def login(config: "Config", status, erase):
user = AniList.login_user(token) user = AniList.login_user(token)
if not user: if not user:
print("Sth went wrong", user) print("Sth went wrong", user)
exit_app() exit(1)
return return
user["token"] = token user["token"] = token
config.update_user(user) config.update_user(user)
print("Successfully saved credentials") print("Successfully saved credentials")
print(user) print(user)
exit_app() exit(0)

View File

@@ -13,9 +13,15 @@ def notifier(config: "Config"):
import logging import logging
import os import os
import time import time
from sys import exit
import requests import requests
from plyer import notification
try:
from plyer import notification
except ImportError:
print("Please install plyer to use this command")
exit(1)
from ....anilist import AniList from ....anilist import AniList
from ....constants import APP_CACHE_DIR, APP_DATA_DIR, APP_NAME, ICON_PATH, PLATFORM from ....constants import APP_CACHE_DIR, APP_DATA_DIR, APP_NAME, ICON_PATH, PLATFORM
@@ -30,7 +36,7 @@ def notifier(config: "Config"):
if not config.user: if not config.user:
print("Not Authenticated") print("Not Authenticated")
print("Run the following to get started: fastanime anilist loggin") print("Run the following to get started: fastanime anilist loggin")
return exit(1)
run = True run = True
# WARNING: Mess around with this value at your own risk # WARNING: Mess around with this value at your own risk
timeout = 2 # time is in minutes timeout = 2 # time is in minutes

View File

@@ -7,26 +7,40 @@ if TYPE_CHECKING:
@click.command(help="View anime you paused on watching") @click.command(help="View anime you paused on watching")
@click.option(
"--dump-json",
"-d",
is_flag=True,
help="Only print out the results dont open anilist menu",
)
@click.pass_obj @click.pass_obj
def paused(config: "Config"): def paused(config: "Config", dump_json):
from sys import exit
from ....anilist import AniList from ....anilist import AniList
from ...interfaces import anilist_interfaces
from ...utils.tools import FastAnimeRuntimeState, exit_app
if not config.user: if not config.user:
print("Not authenticated") print("Not authenticated")
print("Please run: fastanime anilist loggin") print("Please run: fastanime anilist loggin")
exit_app() exit(1)
anime_list = AniList.get_anime_list("PAUSED") anime_list = AniList.get_anime_list("PAUSED")
if not anime_list: if not anime_list:
return exit(1)
if not anime_list[0] or not anime_list[1]: if not anime_list[0] or not anime_list[1]:
return exit(1)
media = [ media = [
mediaListItem["media"] mediaListItem["media"]
for mediaListItem in anime_list[1]["data"]["Page"]["mediaList"] for mediaListItem in anime_list[1]["data"]["Page"]["mediaList"]
] # pyright:ignore ] # pyright:ignore
anime_list[1]["data"]["Page"]["media"] = media # pyright:ignore anime_list[1]["data"]["Page"]["media"] = media # pyright:ignore
anilist_config = FastAnimeRuntimeState() if dump_json:
anilist_config.data = anime_list[1] import json
anilist_interfaces.anilist_results_menu(config, anilist_config)
print(json.dumps(anime_list[1]))
else:
from ...interfaces import anilist_interfaces
from ...utils.tools import FastAnimeRuntimeState
anilist_config = FastAnimeRuntimeState()
anilist_config.anilist_results_data = anime_list[1]
anilist_interfaces.anilist_results_menu(config, anilist_config)

View File

@@ -7,26 +7,40 @@ if TYPE_CHECKING:
@click.command(help="View anime you are planning on watching") @click.command(help="View anime you are planning on watching")
@click.option(
"--dump-json",
"-d",
is_flag=True,
help="Only print out the results dont open anilist menu",
)
@click.pass_obj @click.pass_obj
def planning(config: "Config"): def planning(config: "Config", dump_json):
from sys import exit
from ....anilist import AniList from ....anilist import AniList
from ...interfaces import anilist_interfaces
from ...utils.tools import FastAnimeRuntimeState, exit_app
if not config.user: if not config.user:
print("Not authenticated") print("Not authenticated")
print("Please run: fastanime anilist loggin") print("Please run: fastanime anilist loggin")
exit_app() exit(1)
anime_list = AniList.get_anime_list("PLANNING") anime_list = AniList.get_anime_list("PLANNING")
if not anime_list: if not anime_list:
return exit(1)
if not anime_list[0] or not anime_list[1]: if not anime_list[0] or not anime_list[1]:
return exit(1)
media = [ media = [
mediaListItem["media"] mediaListItem["media"]
for mediaListItem in anime_list[1]["data"]["Page"]["mediaList"] for mediaListItem in anime_list[1]["data"]["Page"]["mediaList"]
] # pyright:ignore ] # pyright:ignore
anime_list[1]["data"]["Page"]["media"] = media # pyright:ignore anime_list[1]["data"]["Page"]["media"] = media # pyright:ignore
fastanime_runtime_state = FastAnimeRuntimeState() if dump_json:
fastanime_runtime_state.anilist_data = anime_list[1] import json
anilist_interfaces.anilist_results_menu(config, fastanime_runtime_state)
print(json.dumps(anime_list[1]))
else:
from ...interfaces import anilist_interfaces
from ...utils.tools import FastAnimeRuntimeState
fastanime_runtime_state = FastAnimeRuntimeState()
fastanime_runtime_state.anilist_results_data = anime_list[1]
anilist_interfaces.anilist_results_menu(config, fastanime_runtime_state)

View File

@@ -4,14 +4,30 @@ import click
@click.command( @click.command(
help="Fetch the top 15 most popular anime", short_help="View most popular anime" help="Fetch the top 15 most popular anime", short_help="View most popular anime"
) )
@click.option(
"--dump-json",
"-d",
is_flag=True,
help="Only print out the results dont open anilist menu",
)
@click.pass_obj @click.pass_obj
def popular(config): def popular(config, dump_json):
from ....anilist import AniList from ....anilist import AniList
from ...interfaces.anilist_interfaces import anilist_results_menu
from ...utils.tools import FastAnimeRuntimeState
anime_data = AniList.get_most_popular() anime_data = AniList.get_most_popular()
if anime_data[0]: if anime_data[0]:
fastanime_runtime_state = FastAnimeRuntimeState() if dump_json:
fastanime_runtime_state.anilist_data = anime_data[1] import json
anilist_results_menu(config, fastanime_runtime_state)
print(json.dumps(anime_data[1]))
else:
from ...interfaces.anilist_interfaces import anilist_results_menu
from ...utils.tools import FastAnimeRuntimeState
fastanime_runtime_state = FastAnimeRuntimeState()
fastanime_runtime_state.anilist_results_data = anime_data[1]
anilist_results_menu(config, fastanime_runtime_state)
else:
from sys import exit
exit(1)

View File

@@ -5,23 +5,35 @@ import click
help="Get random anime from anilist based on a range of anilist anime ids that are seected at random", help="Get random anime from anilist based on a range of anilist anime ids that are seected at random",
short_help="View random anime", short_help="View random anime",
) )
@click.option(
"--dump-json",
"-d",
is_flag=True,
help="Only print out the results dont open anilist menu",
)
@click.pass_obj @click.pass_obj
def random_anime(config): def random_anime(config, dump_json):
import random import random
from ....anilist import AniList from ....anilist import AniList
from ...interfaces.anilist_interfaces import anilist_results_menu
from ...utils.tools import FastAnimeRuntimeState
random_anime = range(1, 15000) random_anime = range(1, 100000)
random_anime = random.sample(random_anime, k=50) random_anime = random.sample(random_anime, k=50)
anime_data = AniList.search(id_in=list(random_anime)) anime_data = AniList.search(id_in=list(random_anime))
if anime_data[0]: if anime_data[0]:
fastanime_runtime_state = FastAnimeRuntimeState() if dump_json:
fastanime_runtime_state.anilist_data = anime_data[1] import json
anilist_results_menu(config, fastanime_runtime_state)
print(json.dumps(anime_data[1]))
else:
from ...interfaces.anilist_interfaces import anilist_results_menu
from ...utils.tools import FastAnimeRuntimeState
fastanime_runtime_state = FastAnimeRuntimeState()
fastanime_runtime_state.anilist_results_data = anime_data[1]
anilist_results_menu(config, fastanime_runtime_state)
else: else:
print(anime_data[1]) exit(1)

View File

@@ -5,14 +5,30 @@ import click
help="Fetch the 15 most recently updated anime from anilist that are currently releasing", help="Fetch the 15 most recently updated anime from anilist that are currently releasing",
short_help="View recently updated anime", short_help="View recently updated anime",
) )
@click.option(
"--dump-json",
"-d",
is_flag=True,
help="Only print out the results dont open anilist menu",
)
@click.pass_obj @click.pass_obj
def recent(config): def recent(config, dump_json):
from ....anilist import AniList from ....anilist import AniList
from ...interfaces.anilist_interfaces import anilist_results_menu
from ...utils.tools import FastAnimeRuntimeState
anime_data = AniList.get_most_recently_updated() anime_data = AniList.get_most_recently_updated()
if anime_data[0]: if anime_data[0]:
fastanime_runtime_state = FastAnimeRuntimeState() if dump_json:
fastanime_runtime_state.anilist_data = anime_data[1] import json
anilist_results_menu(config, fastanime_runtime_state)
print(json.dumps(anime_data[1]))
else:
from ...interfaces.anilist_interfaces import anilist_results_menu
from ...utils.tools import FastAnimeRuntimeState
fastanime_runtime_state = FastAnimeRuntimeState()
fastanime_runtime_state.anilist_results_data = anime_data[1]
anilist_results_menu(config, fastanime_runtime_state)
else:
from sys import exit
exit(1)

View File

@@ -7,26 +7,40 @@ if TYPE_CHECKING:
@click.command(help="View anime you are rewatching") @click.command(help="View anime you are rewatching")
@click.option(
"--dump-json",
"-d",
is_flag=True,
help="Only print out the results dont open anilist menu",
)
@click.pass_obj @click.pass_obj
def rewatching(config: "Config"): def rewatching(config: "Config", dump_json):
from sys import exit
from ....anilist import AniList from ....anilist import AniList
from ...interfaces import anilist_interfaces
from ...utils.tools import FastAnimeRuntimeState, exit_app
if not config.user: if not config.user:
print("Not authenticated") print("Not authenticated")
print("Please run: fastanime anilist loggin") print("Please run: fastanime anilist loggin")
exit_app() exit(1)
anime_list = AniList.get_anime_list("REPEATING") anime_list = AniList.get_anime_list("REPEATING")
if not anime_list: if not anime_list:
return exit(1)
if not anime_list[0] or not anime_list[1]: if not anime_list[0] or not anime_list[1]:
return exit(1)
media = [ media = [
mediaListItem["media"] mediaListItem["media"]
for mediaListItem in anime_list[1]["data"]["Page"]["mediaList"] for mediaListItem in anime_list[1]["data"]["Page"]["mediaList"]
] # pyright:ignore ] # pyright:ignore
anime_list[1]["data"]["Page"]["media"] = media # pyright:ignore anime_list[1]["data"]["Page"]["media"] = media # pyright:ignore
fastanime_runtime_state = FastAnimeRuntimeState() if dump_json:
fastanime_runtime_state.anilist_data = anime_list[1] import json
anilist_interfaces.anilist_results_menu(config, fastanime_runtime_state)
print(json.dumps(anime_list[1]))
else:
from ...interfaces import anilist_interfaces
from ...utils.tools import FastAnimeRuntimeState
fastanime_runtime_state = FastAnimeRuntimeState()
fastanime_runtime_state.anilist_results_data = anime_list[1]
anilist_interfaces.anilist_results_menu(config, fastanime_runtime_state)

View File

@@ -4,14 +4,30 @@ import click
@click.command( @click.command(
help="Fetch the 15 most scored anime", short_help="View most scored anime" help="Fetch the 15 most scored anime", short_help="View most scored anime"
) )
@click.option(
"--dump-json",
"-d",
is_flag=True,
help="Only print out the results dont open anilist menu",
)
@click.pass_obj @click.pass_obj
def scores(config): def scores(config, dump_json):
from ....anilist import AniList from ....anilist import AniList
from ...interfaces.anilist_interfaces import anilist_results_menu
from ...utils.tools import FastAnimeRuntimeState
anime_data = AniList.get_most_scored() anime_data = AniList.get_most_scored()
if anime_data[0]: if anime_data[0]:
fastanime_runtime_state = FastAnimeRuntimeState() if dump_json:
fastanime_runtime_state.data = anime_data[1] import json
anilist_results_menu(config, fastanime_runtime_state)
print(json.dumps(anime_data[1]))
else:
from ...interfaces.anilist_interfaces import anilist_results_menu
from ...utils.tools import FastAnimeRuntimeState
fastanime_runtime_state = FastAnimeRuntimeState()
fastanime_runtime_state.anilist_results_data = anime_data[1]
anilist_results_menu(config, fastanime_runtime_state)
else:
from sys import exit
exit(1)

View File

@@ -2,20 +2,573 @@ import click
from ...completion_functions import anime_titles_shell_complete from ...completion_functions import anime_titles_shell_complete
tags_available = {
"Cast": ["Polyamorous"],
"Cast Main Cast": [
"Anti-Hero",
"Elderly Protagonist",
"Ensemble Cast",
"Estranged Family",
"Female Protagonist",
"Male Protagonist",
"Primarily Adult Cast",
"Primarily Animal Cast",
"Primarily Child Cast",
"Primarily Female Cast",
"Primarily Male Cast",
"Primarily Teen Cast",
],
"Cast Traits": [
"Age Regression",
"Agender",
"Aliens",
"Amnesia",
"Angels",
"Anthropomorphism",
"Aromantic",
"Arranged Marriage",
"Artificial Intelligence",
"Asexual",
"Butler",
"Centaur",
"Chimera",
"Chuunibyou",
"Clone",
"Cosplay",
"Cowboys",
"Crossdressing",
"Cyborg",
"Delinquents",
"Demons",
"Detective",
"Dinosaurs",
"Disability",
"Dissociative Identities",
"Dragons",
"Dullahan",
"Elf",
"Fairy",
"Femboy",
"Ghost",
"Goblin",
"Gods",
"Gyaru",
"Hikikomori",
"Homeless",
"Idol",
"Kemonomimi",
"Kuudere",
"Maids",
"Mermaid",
"Monster Boy",
"Monster Girl",
"Nekomimi",
"Ninja",
"Nudity",
"Nun",
"Office Lady",
"Oiran",
"Ojou-sama",
"Orphan",
"Pirates",
"Robots",
"Samurai",
"Shrine Maiden",
"Skeleton",
"Succubus",
"Tanned Skin",
"Teacher",
"Tomboy",
"Transgender",
"Tsundere",
"Twins",
"Vampire",
"Veterinarian",
"Vikings",
"Villainess",
"VTuber",
"Werewolf",
"Witch",
"Yandere",
"Zombie",
],
"Demographic": ["Josei", "Kids", "Seinen", "Shoujo", "Shounen"],
"Setting": ["Matriarchy"],
"Setting Scene": [
"Bar",
"Boarding School",
"Circus",
"Coastal",
"College",
"Desert",
"Dungeon",
"Foreign",
"Inn",
"Konbini",
"Natural Disaster",
"Office",
"Outdoor",
"Prison",
"Restaurant",
"Rural",
"School",
"School Club",
"Snowscape",
"Urban",
"Work",
],
"Setting Time": [
"Achronological Order",
"Anachronism",
"Ancient China",
"Dystopian",
"Historical",
"Time Skip",
],
"Setting Universe": [
"Afterlife",
"Alternate Universe",
"Augmented Reality",
"Omegaverse",
"Post-Apocalyptic",
"Space",
"Urban Fantasy",
"Virtual World",
],
"Technical": [
"4-koma",
"Achromatic",
"Advertisement",
"Anthology",
"CGI",
"Episodic",
"Flash",
"Full CGI",
"Full Color",
"No Dialogue",
"Non-fiction",
"POV",
"Puppetry",
"Rotoscoping",
"Stop Motion",
],
"Theme Action": [
"Archery",
"Battle Royale",
"Espionage",
"Fugitive",
"Guns",
"Martial Arts",
"Spearplay",
"Swordplay",
],
"Theme Arts": [
"Acting",
"Calligraphy",
"Classic Literature",
"Drawing",
"Fashion",
"Food",
"Makeup",
"Photography",
"Rakugo",
"Writing",
],
"Theme Arts-Music": [
"Band",
"Classical Music",
"Dancing",
"Hip-hop Music",
"Jazz Music",
"Metal Music",
"Musical Theater",
"Rock Music",
],
"Theme Comedy": ["Parody", "Satire", "Slapstick", "Surreal Comedy"],
"Theme Drama": [
"Bullying",
"Class Struggle",
"Coming of Age",
"Conspiracy",
"Eco-Horror",
"Fake Relationship",
"Kingdom Management",
"Rehabilitation",
"Revenge",
"Suicide",
"Tragedy",
],
"Theme Fantasy": [
"Alchemy",
"Body Swapping",
"Cultivation",
"Fairy Tale",
"Henshin",
"Isekai",
"Kaiju",
"Magic",
"Mythology",
"Necromancy",
"Shapeshifting",
"Steampunk",
"Super Power",
"Superhero",
"Wuxia",
"Youkai",
],
"Theme Game": ["Board Game", "E-Sports", "Video Games"],
"Theme Game-Card & Board Game": [
"Card Battle",
"Go",
"Karuta",
"Mahjong",
"Poker",
"Shogi",
],
"Theme Game-Sport": [
"Acrobatics",
"Airsoft",
"American Football",
"Athletics",
"Badminton",
"Baseball",
"Basketball",
"Bowling",
"Boxing",
"Cheerleading",
"Cycling",
"Fencing",
"Fishing",
"Fitness",
"Football",
"Golf",
"Handball",
"Ice Skating",
"Judo",
"Lacrosse",
"Parkour",
"Rugby",
"Scuba Diving",
"Skateboarding",
"Sumo",
"Surfing",
"Swimming",
"Table Tennis",
"Tennis",
"Volleyball",
"Wrestling",
],
"Theme Other": [
"Adoption",
"Animals",
"Astronomy",
"Autobiographical",
"Biographical",
"Body Horror",
"Cannibalism",
"Chibi",
"Cosmic Horror",
"Crime",
"Crossover",
"Death Game",
"Denpa",
"Drugs",
"Economics",
"Educational",
"Environmental",
"Ero Guro",
"Filmmaking",
"Found Family",
"Gambling",
"Gender Bending",
"Gore",
"Language Barrier",
"LGBTQ+ Themes",
"Lost Civilization",
"Marriage",
"Medicine",
"Memory Manipulation",
"Meta",
"Mountaineering",
"Noir",
"Otaku Culture",
"Pandemic",
"Philosophy",
"Politics",
"Proxy Battle",
"Psychosexual",
"Reincarnation",
"Religion",
"Royal Affairs",
"Slavery",
"Software Development",
"Survival",
"Terrorism",
"Torture",
"Travel",
"War",
],
"Theme Other-Organisations": [
"Assassins",
"Criminal Organization",
"Cult",
"Firefighters",
"Gangs",
"Mafia",
"Military",
"Police",
"Triads",
"Yakuza",
],
"Theme Other-Vehicle": [
"Aviation",
"Cars",
"Mopeds",
"Motorcycles",
"Ships",
"Tanks",
"Trains",
],
"Theme Romance": [
"Age Gap",
"Bisexual",
"Boys' Love",
"Female Harem",
"Heterosexual",
"Love Triangle",
"Male Harem",
"Matchmaking",
"Mixed Gender Harem",
"Teens' Love",
"Unrequited Love",
"Yuri",
],
"Theme Sci Fi": [
"Cyberpunk",
"Space Opera",
"Time Loop",
"Time Manipulation",
"Tokusatsu",
],
"Theme Sci Fi-Mecha": ["Real Robot", "Super Robot"],
"Theme Slice of Life": [
"Agriculture",
"Cute Boys Doing Cute Things",
"Cute Girls Doing Cute Things",
"Family Life",
"Horticulture",
"Iyashikei",
"Parenthood",
],
}
tags_available_list = []
for tag_category, tags_in_category in tags_available.items():
tags_available_list.extend(tags_in_category)
@click.command( @click.command(
help="Search for anime using anilists api and get top ~50 results", help="Search for anime using anilists api and get top ~50 results",
short_help="Search for anime", short_help="Search for anime",
) )
@click.argument("title", shell_complete=anime_titles_shell_complete) @click.option("--title", "-t", shell_complete=anime_titles_shell_complete)
@click.option(
"--dump-json",
"-d",
is_flag=True,
help="Only print out the results dont open anilist menu",
)
@click.option(
"--season",
help="The season the media was released",
type=click.Choice(["WINTER", "SPRING", "SUMMER", "FALL"]),
)
@click.option(
"--status",
"-S",
help="The media status of the anime",
multiple=True,
type=click.Choice(
["FINISHED", "RELEASING", "NOT_YET_RELEASED", "CANCELLED", "HIATUS"]
),
)
@click.option(
"--sort",
"-s",
help="What to sort the search results on",
type=click.Choice(
[
"ID",
"ID_DESC",
"TITLE_ROMAJI",
"TITLE_ROMAJI_DESC",
"TITLE_ENGLISH",
"TITLE_ENGLISH_DESC",
"TITLE_NATIVE",
"TITLE_NATIVE_DESC",
"TYPE",
"TYPE_DESC",
"FORMAT",
"FORMAT_DESC",
"START_DATE",
"START_DATE_DESC",
"END_DATE",
"END_DATE_DESC",
"SCORE",
"SCORE_DESC",
"POPULARITY",
"POPULARITY_DESC",
"TRENDING",
"TRENDING_DESC",
"EPISODES",
"EPISODES_DESC",
"DURATION",
"DURATION_DESC",
"STATUS",
"STATUS_DESC",
"CHAPTERS",
"CHAPTERS_DESC",
"VOLUMES",
"VOLUMES_DESC",
"UPDATED_AT",
"UPDATED_AT_DESC",
"SEARCH_MATCH",
"FAVOURITES",
"FAVOURITES_DESC",
]
),
)
@click.option(
"--genres",
"-g",
multiple=True,
help="the genres to filter by",
type=click.Choice(
[
"Action",
"Adventure",
"Comedy",
"Drama",
"Ecchi",
"Fantasy",
"Horror",
"Mahou Shoujo",
"Mecha",
"Music",
"Mystery",
"Psychological",
"Romance",
"Sci-Fi",
"Slice of Life",
"Sports",
"Supernatural",
"Thriller",
"Hentai",
]
),
)
@click.option(
"--tags",
"-T",
multiple=True,
help="the tags to filter by",
type=click.Choice(tags_available_list),
)
@click.option(
"--media-format",
"-f",
multiple=True,
help="Media format",
type=click.Choice(
["TV", "TV_SHORT", "MOVIE", "SPECIAL", "OVA", "MUSIC", "NOVEL", "ONE_SHOT"]
),
)
@click.option(
"--year",
"-y",
type=click.Choice(
[
"1900",
"1910",
"1920",
"1930",
"1940",
"1950",
"1960",
"1970",
"1980",
"1990",
"2000",
"2004",
"2005",
"2006",
"2007",
"2008",
"2009",
"2010",
"2011",
"2012",
"2013",
"2014",
"2015",
"2016",
"2017",
"2018",
"2019",
"2020",
"2021",
"2022",
"2023",
"2024",
]
),
help="the year the media was released",
)
@click.option(
"--on-list/--not-on-list",
"-L/-no-L",
help="Whether the anime should be in your list or not",
type=bool,
)
@click.pass_obj @click.pass_obj
def search(config, title): def search(
config,
title,
dump_json,
season,
status,
sort,
genres,
tags,
media_format,
year,
on_list,
):
from ....anilist import AniList from ....anilist import AniList
from ...interfaces.anilist_interfaces import anilist_results_menu
from ...utils.tools import FastAnimeRuntimeState
success, search_results = AniList.search(title) success, search_results = AniList.search(
query=title,
sort=sort,
status_in=list(status),
genre_in=list(genres),
season=season,
tag_in=list(tags),
seasonYear=year,
format_in=list(media_format),
on_list=on_list,
)
if success: if success:
fastanime_runtime_state = FastAnimeRuntimeState() if dump_json:
fastanime_runtime_state.anilist_data = search_results import json
anilist_results_menu(config, fastanime_runtime_state)
print(json.dumps(search_results))
else:
from ...interfaces.anilist_interfaces import anilist_results_menu
from ...utils.tools import FastAnimeRuntimeState
fastanime_runtime_state = FastAnimeRuntimeState()
fastanime_runtime_state.anilist_results_data = search_results
anilist_results_menu(config, fastanime_runtime_state)
else:
from sys import exit
exit(1)

View File

@@ -0,0 +1,63 @@
from typing import TYPE_CHECKING
import click
if TYPE_CHECKING:
from ...config import Config
@click.command(help="Print out your anilist stats")
@click.pass_obj
def stats(
config: "Config",
):
import shutil
import subprocess
from sys import exit
from rich.console import Console
console = Console()
from rich.markdown import Markdown
from rich.panel import Panel
from ....anilist import AniList
user_data = AniList.get_user_info()
if not user_data[0] or not user_data[1]:
print("Failed to get user info")
print(user_data[1])
exit(1)
KITTEN_EXECUTABLE = shutil.which("kitten")
if not KITTEN_EXECUTABLE:
print("Kitten not found")
exit(1)
image_url = user_data[1]["data"]["User"]["avatar"]["medium"]
user_name = user_data[1]["data"]["User"]["name"]
about = user_data[1]["data"]["User"]["about"] or ""
console.clear()
image_x = int(console.size.width * 0.1)
image_y = int(console.size.height * 0.1)
img_w = console.size.width // 3
img_h = console.size.height // 3
image_process = subprocess.run(
[
KITTEN_EXECUTABLE,
"icat",
"--clear",
"--place",
f"{img_w}x{img_h}@{image_x}x{image_y}",
image_url,
],
)
if not image_process.returncode == 0:
print("failed to get image from icat")
exit(1)
console.print(
Panel(
Markdown(about),
title=user_name,
)
)

View File

@@ -5,14 +5,30 @@ import click
help="Fetch the top 15 anime that are currently trending", help="Fetch the top 15 anime that are currently trending",
short_help="Trending anime 🔥🔥🔥", short_help="Trending anime 🔥🔥🔥",
) )
@click.option(
"--dump-json",
"-d",
is_flag=True,
help="Only print out the results dont open anilist menu",
)
@click.pass_obj @click.pass_obj
def trending(config): def trending(config, dump_json):
from ....anilist import AniList from ....anilist import AniList
from ...interfaces.anilist_interfaces import anilist_results_menu
from ...utils.tools import FastAnimeRuntimeState
success, data = AniList.get_trending() success, data = AniList.get_trending()
if success: if success:
fastanime_runtime_state = FastAnimeRuntimeState() if dump_json:
fastanime_runtime_state.anilist_data = data import json
anilist_results_menu(config, fastanime_runtime_state)
print(json.dumps(data))
else:
from ...interfaces.anilist_interfaces import anilist_results_menu
from ...utils.tools import FastAnimeRuntimeState
fastanime_runtime_state = FastAnimeRuntimeState()
fastanime_runtime_state.anilist_results_data = data
anilist_results_menu(config, fastanime_runtime_state)
else:
from sys import exit
exit(1)

View File

@@ -4,14 +4,30 @@ import click
@click.command( @click.command(
help="Fetch the 15 most anticipited anime", short_help="View upcoming anime" help="Fetch the 15 most anticipited anime", short_help="View upcoming anime"
) )
@click.option(
"--dump-json",
"-d",
is_flag=True,
help="Only print out the results dont open anilist menu",
)
@click.pass_obj @click.pass_obj
def upcoming(config): def upcoming(config, dump_json):
from ....anilist import AniList from ....anilist import AniList
from ...interfaces.anilist_interfaces import anilist_results_menu
from ...utils.tools import FastAnimeRuntimeState
success, data = AniList.get_upcoming_anime() success, data = AniList.get_upcoming_anime()
if success: if success:
fastanime_runtime_state = FastAnimeRuntimeState() if dump_json:
fastanime_runtime_state.anilist_data = data import json
anilist_results_menu(config, fastanime_runtime_state)
print(json.dumps(data))
else:
from ...interfaces.anilist_interfaces import anilist_results_menu
from ...utils.tools import FastAnimeRuntimeState
fastanime_runtime_state = FastAnimeRuntimeState()
fastanime_runtime_state.anilist_results_data = data
anilist_results_menu(config, fastanime_runtime_state)
else:
from sys import exit
exit(1)

View File

@@ -7,26 +7,40 @@ if TYPE_CHECKING:
@click.command(help="View anime you are watching") @click.command(help="View anime you are watching")
@click.option(
"--dump-json",
"-d",
is_flag=True,
help="Only print out the results dont open anilist menu",
)
@click.pass_obj @click.pass_obj
def watching(config: "Config"): def watching(config: "Config", dump_json):
from sys import exit
from ....anilist import AniList from ....anilist import AniList
from ...interfaces import anilist_interfaces
from ...utils.tools import FastAnimeRuntimeState, exit_app
if not config.user: if not config.user:
print("Not authenticated") print("Not authenticated")
print("Please run: fastanime anilist loggin") print("Please run: fastanime anilist loggin")
exit_app() exit(1)
anime_list = AniList.get_anime_list("CURRENT") anime_list = AniList.get_anime_list("CURRENT")
if not anime_list: if not anime_list:
return exit(1)
if not anime_list[0] or not anime_list[1]: if not anime_list[0] or not anime_list[1]:
return exit(1)
media = [ media = [
mediaListItem["media"] mediaListItem["media"]
for mediaListItem in anime_list[1]["data"]["Page"]["mediaList"] for mediaListItem in anime_list[1]["data"]["Page"]["mediaList"]
] # pyright:ignore ] # pyright:ignore
anime_list[1]["data"]["Page"]["media"] = media # pyright:ignore anime_list[1]["data"]["Page"]["media"] = media # pyright:ignore
fastanime_runtime_state = FastAnimeRuntimeState() if dump_json:
fastanime_runtime_state.anilist_data = anime_list[1] import json
anilist_interfaces.anilist_results_menu(config, fastanime_runtime_state)
print(json.dumps(anime_list[1]))
else:
from ...interfaces import anilist_interfaces
from ...utils.tools import FastAnimeRuntimeState
fastanime_runtime_state = FastAnimeRuntimeState()
fastanime_runtime_state.anilist_results_data = anime_list[1]
anilist_interfaces.anilist_results_menu(config, fastanime_runtime_state)

View File

@@ -1,7 +1,24 @@
import click import click
@click.command(help="Helper command to manage cache") @click.command(
help="Helper command to manage cache",
epilog="""
\b
\b\bExamples:
# delete everything in the cache dir
fastanime cache --clean
\b
# print the path to the cache dir and exit
fastanime cache --path
\b
# print the current size of the cache dir and exit
fastanime cache --size
\b
# open the cache dir and exit
fastanime cache
""",
)
@click.option("--clean", help="Clean the cache dir", is_flag=True) @click.option("--clean", help="Clean the cache dir", is_flag=True)
@click.option("--path", help="The path to the cache dir", is_flag=True) @click.option("--path", help="The path to the cache dir", is_flag=True)
@click.option("--size", help="The size of the cache dir", is_flag=True) @click.option("--size", help="The size of the cache dir", is_flag=True)

View File

@@ -1,7 +1,24 @@
import click import click
@click.command(help="Helper command to get shell completions") @click.command(
help="Helper command to get shell completions",
epilog="""
\b
\b\bExamples:
# try to detect your shell and print completions
fastanime completions
\b
# print fish completions
fastanime completions --fish
\b
# print bash completions
fastanime completions --bash
\b
# print zsh completions
fastanime completions --zsh
""",
)
@click.option("--fish", is_flag=True, help="print fish completions") @click.option("--fish", is_flag=True, help="print fish completions")
@click.option("--zsh", is_flag=True, help="print zsh completions") @click.option("--zsh", is_flag=True, help="print zsh completions")
@click.option("--bash", is_flag=True, help="print bash completions") @click.option("--bash", is_flag=True, help="print bash completions")

View File

@@ -7,8 +7,27 @@ if TYPE_CHECKING:
@click.command( @click.command(
help="Opens up your fastanime config in your preferred editor", help="Manage your config with ease",
short_help="Edit your config", short_help="Edit your config",
epilog="""
\b
\b\bExamples:
# Edit your config in your default editor
# NB: If it opens vim or vi exit with `:q`
fastanime config
\b
# get the path of the config file
fastanime config --path
\b
# print desktop entry info
fastanime config --desktop-entry
\b
# update your config without opening an editor
fastanime --icons --fzf --preview config --update
\b
# view the current contents of your config
fastanime config --view
""",
) )
@click.option("--path", "-p", help="Print the config location and exit", is_flag=True) @click.option("--path", "-p", help="Print the config location and exit", is_flag=True)
@click.option( @click.option(
@@ -20,8 +39,14 @@ if TYPE_CHECKING:
help="Configure the desktop entry of fastanime", help="Configure the desktop entry of fastanime",
is_flag=True, is_flag=True,
) )
@click.option(
"--update",
"-u",
help="Persist all the config options passed to fastanime to your config file",
is_flag=True,
)
@click.pass_obj @click.pass_obj
def config(config: "Config", path, view, desktop_entry): def config(user_config: "Config", path, view, desktop_entry, update):
import sys import sys
from rich import print from rich import print
@@ -32,7 +57,7 @@ def config(config: "Config", path, view, desktop_entry):
if path: if path:
print(USER_CONFIG_PATH) print(USER_CONFIG_PATH)
elif view: elif view:
print(config) print(user_config)
elif desktop_entry: elif desktop_entry:
import os import os
import shutil import shutil
@@ -87,7 +112,9 @@ def config(config: "Config", path, view, desktop_entry):
with open(desktop_entry_path) as f: with open(desktop_entry_path) as f:
print(f"Successfully wrote \n{f.read()}") print(f"Successfully wrote \n{f.read()}")
exit_app(0) exit_app(0)
elif update:
with open(USER_CONFIG_PATH, "w", encoding="utf-8") as file:
file.write(user_config.__str__())
print("update successfull")
else: else:
import click
click.edit(filename=USER_CONFIG_PATH) click.edit(filename=USER_CONFIG_PATH)

View File

@@ -1,4 +1,3 @@
import time
from typing import TYPE_CHECKING from typing import TYPE_CHECKING
import click import click
@@ -12,6 +11,53 @@ if TYPE_CHECKING:
@click.command( @click.command(
help="Download anime using the anime provider for a specified range", help="Download anime using the anime provider for a specified range",
short_help="Download anime", short_help="Download anime",
epilog="""
\b
\b\bExamples:
# Download all available episodes
# multiple titles can be specified with -t option
fastanime download -t <anime-title> -t <anime-title>
# -- or --
fastanime download -t <anime-title> -t <anime-title> -r ':'
\b
# download latest episode for the two anime titles
# the number can be any no of latest episodes but a minus sign
# must be present
fastanime download -t <anime-title> -t <anime-title> -r '-1'
\b
# latest 5
fastanime download -t <anime-title> -t <anime-title> -r '-5'
\b
# Download specific episode range
# be sure to observe the range Syntax
fastanime download -t <anime-title> -r '<episodes-start>:<episodes-end>:<step>'
\b
fastanime download -t <anime-title> -r '<episodes-start>:<episodes-end>'
\b
fastanime download -t <anime-title> -r '<episodes-start>:'
\b
fastanime download -t <anime-title> -r ':<episodes-end>'
\b
# download specific episode
# remember python indexing starts at 0
fastanime download -t <anime-title> -r '<episode-1>:<episode>'
\b
# merge subtitles with ffmpeg to mkv format; aniwatch tends to give subs as separate files
# and dont prompt for anything
# eg existing file in destination instead remove
# and clean
# ie remove original files (sub file and vid file)
# only keep merged files
fastanime download -t <anime-title> --merge --clean --no-prompt
\b
# EOF is used since -t always expects a title
# you can supply anime titles from file or -t at the same time
# from stdin
echo -e "<anime-title>\\n<anime-title>\\n<anime-title>" | fastanime download -t "EOF" -r <range> -f -
\b
# from file
fastanime download -t "EOF" -r <range> -f <file-path>
""",
) )
@click.option( @click.option(
"--anime-titles", "--anime-titles",
@@ -28,8 +74,14 @@ if TYPE_CHECKING:
help="A range of episodes to download (start-end)", help="A range of episodes to download (start-end)",
) )
@click.option( @click.option(
"--force-unknown-ext", "--file",
"-f", "-f",
type=click.File(),
help="A file to read from all anime to download",
)
@click.option(
"--force-unknown-ext",
"-F",
help="This option forces yt-dlp to download extensions its not aware of", help="This option forces yt-dlp to download extensions its not aware of",
is_flag=True, is_flag=True,
) )
@@ -41,15 +93,43 @@ if TYPE_CHECKING:
default=True, default=True,
) )
@click.option("--verbose", "-v", is_flag=True, help="Download verbosely (everywhere)") @click.option("--verbose", "-v", is_flag=True, help="Download verbosely (everywhere)")
@click.option(
"--merge", "-m", is_flag=True, help="Merge the subfile with video using ffmpeg"
)
@click.option(
"--clean",
"-c",
is_flag=True,
help="After merging delete the original files",
)
@click.option(
"--wait-time",
"-w",
type=int,
help="The amount of time to wait after downloading is complete before the screen is completely cleared",
default=60,
)
@click.option(
"--prompt/--no-prompt",
help="Whether to prompt for anything instead just do the best thing",
default=True,
)
@click.pass_obj @click.pass_obj
def download( def download(
config: "Config", config: "Config",
anime_titles: list, anime_titles: tuple,
episode_range, episode_range,
file,
force_unknown_ext, force_unknown_ext,
silent, silent,
verbose, verbose,
merge,
clean,
wait_time,
prompt,
): ):
import time
from rich import print from rich import print
from rich.progress import Progress from rich.progress import Progress
from thefuzz import fuzz from thefuzz import fuzz
@@ -57,6 +137,7 @@ def download(
from ...AnimeProvider import AnimeProvider from ...AnimeProvider import AnimeProvider
from ...libs.anime_provider.types import Anime from ...libs.anime_provider.types import Anime
from ...libs.fzf import fzf from ...libs.fzf import fzf
from ...Utility.data import anime_normalizer
from ...Utility.downloader.downloader import downloader from ...Utility.downloader.downloader import downloader
from ..utils.tools import exit_app from ..utils.tools import exit_app
from ..utils.utils import ( from ..utils.utils import (
@@ -66,12 +147,22 @@ def download(
) )
anime_provider = AnimeProvider(config.provider) anime_provider = AnimeProvider(config.provider)
anilist_anime_info = None
translation_type = config.translation_type translation_type = config.translation_type
download_dir = config.downloads_dir download_dir = config.downloads_dir
if file:
contents = file.read()
anime_titles_from_file = tuple(
[title for title in contents.split("\n") if title]
)
file.close()
anime_titles = (*anime_titles_from_file, *anime_titles)
print(f"[green bold]Queued:[/] {anime_titles}") print(f"[green bold]Queued:[/] {anime_titles}")
for anime_title in anime_titles: for anime_title in anime_titles:
if anime_title == "EOF":
break
print(f"[green bold]Now Downloading: [/] {anime_title}") print(f"[green bold]Now Downloading: [/] {anime_title}")
# ---- search for anime ---- # ---- search for anime ----
with Progress() as progress: with Progress() as progress:
@@ -83,28 +174,43 @@ def download(
print("Search results failed") print("Search results failed")
input("Enter to retry") input("Enter to retry")
download( download(
config, anime_title, episode_range, force_unknown_ext, silent, verbose config,
anime_title,
episode_range,
file,
force_unknown_ext,
silent,
verbose,
merge,
clean,
wait_time,
prompt,
) )
return return
search_results = search_results["results"] search_results = search_results["results"]
if not search_results: if not search_results:
print("Nothing muches your search term") print("Nothing muches your search term")
exit_app(1) continue
search_results_ = { search_results_ = {
search_result["title"]: search_result for search_result in search_results search_result["title"]: search_result for search_result in search_results
} }
if config.auto_select: if config.auto_select:
search_result = max( selected_anime_title = max(
search_results_.keys(), key=lambda title: fuzz.ratio(title, anime_title) search_results_.keys(),
key=lambda title: fuzz.ratio(
anime_normalizer.get(title, title), anime_title
),
) )
print("[cyan]Auto selecting:[/] ", search_result) print("[cyan]Auto selecting:[/] ", selected_anime_title)
else: else:
choices = list(search_results_.keys()) choices = list(search_results_.keys())
if config.use_fzf: if config.use_fzf:
search_result = fzf.run(choices, "Please Select title: ", "FastAnime") selected_anime_title = fzf.run(
choices, "Please Select title", "FastAnime"
)
else: else:
search_result = fuzzy_inquirer( selected_anime_title = fuzzy_inquirer(
choices, choices,
"Please Select title", "Please Select title",
) )
@@ -113,13 +219,23 @@ def download(
with Progress() as progress: with Progress() as progress:
progress.add_task("Fetching Anime...", total=None) progress.add_task("Fetching Anime...", total=None)
anime: Anime | None = anime_provider.get_anime( anime: Anime | None = anime_provider.get_anime(
search_results_[search_result]["id"] search_results_[selected_anime_title]["id"]
) )
if not anime: if not anime:
print("Sth went wring anime no found") print("Sth went wring anime no found")
input("Enter to continue...") input("Enter to continue...")
download( download(
config, anime_title, episode_range, force_unknown_ext, silent, verbose config,
anime_title,
episode_range,
file,
force_unknown_ext,
silent,
verbose,
merge,
clean,
wait_time,
prompt,
) )
return return
@@ -153,6 +269,11 @@ def download(
else: else:
episodes_range = sorted(episodes, key=float) episodes_range = sorted(episodes, key=float)
if config.normalize_titles:
from ...libs.common.mini_anilist import get_basic_anime_info_by_title
anilist_anime_info = get_basic_anime_info_by_title(anime["title"])
# lets download em # lets download em
for episode in episodes_range: for episode in episodes_range:
try: try:
@@ -163,7 +284,7 @@ def download(
with Progress() as progress: with Progress() as progress:
progress.add_task("Fetching Episode Streams...", total=None) progress.add_task("Fetching Episode Streams...", total=None)
streams = anime_provider.get_episode_streams( streams = anime_provider.get_episode_streams(
anime, episode, config.translation_type anime["id"], anime["title"], episode, config.translation_type
) )
if not streams: if not streams:
print("No streams skipping") print("No streams skipping")
@@ -198,7 +319,7 @@ def download(
server_name = config.server server_name = config.server
else: else:
if config.use_fzf: if config.use_fzf:
server_name = fzf.run(servers_names, "Select an link: ") server_name = fzf.run(servers_names, "Select an link")
else: else:
server_name = fuzzy_inquirer( server_name = fuzzy_inquirer(
servers_names, servers_names,
@@ -217,13 +338,26 @@ def download(
subtitles = servers[server_name]["subtitles"] subtitles = servers[server_name]["subtitles"]
episode_title = servers[server_name]["episode_title"] episode_title = servers[server_name]["episode_title"]
print(f"[purple]Now Downloading:[/] {search_result} Episode {episode}")
if anilist_anime_info:
selected_anime_title = (
anilist_anime_info["title"][config.preferred_language]
or anilist_anime_info["title"]["romaji"]
or anilist_anime_info["title"]["english"]
)
import re
for episode_detail in anilist_anime_info["episodes"]:
if re.match(f"Episode {episode} ", episode_detail["title"]):
episode_title = episode_detail["title"]
break
print(f"[purple]Now Downloading:[/] {episode_title}")
subtitles = move_preferred_subtitle_lang_to_top( subtitles = move_preferred_subtitle_lang_to_top(
subtitles, config.sub_lang subtitles, config.sub_lang
) )
downloader._download_file( downloader._download_file(
link, link,
anime["title"], selected_anime_title,
episode_title, episode_title,
download_dir, download_dir,
silent, silent,
@@ -232,10 +366,14 @@ def download(
verbose, verbose,
headers=provider_headers, headers=provider_headers,
sub=subtitles[0]["url"] if subtitles else "", sub=subtitles[0]["url"] if subtitles else "",
merge=merge,
clean=clean,
prompt=prompt,
) )
except Exception as e: except Exception as e:
print(e) print(e)
time.sleep(1) time.sleep(1)
print("Continuing...") print("Continuing...")
print("Done Downloading") print("Done Downloading")
time.sleep(wait_time)
exit_app() exit_app()

View File

@@ -3,25 +3,60 @@ from typing import TYPE_CHECKING
import click import click
from ..completion_functions import downloaded_anime_titles
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
if TYPE_CHECKING: if TYPE_CHECKING:
from ..config import Config from ..config import Config
@click.command( @click.command(
help="View and watch your downloads using mpv", short_help="Watch downloads" help="View and watch your downloads using mpv",
short_help="Watch downloads",
epilog="""
\b
\b\bExamples:
fastanime downloads
\b
# view individual episodes
fastanime downloads --view-episodes
# --- or ---
fastanime downloads -v
\b
# to set seek time when using ffmpegthumbnailer for local previews
# -1 means random and is the default
fastanime downloads --time-to-seek <intRange(-1,100)>
# --- or ---
fastanime downloads -t <intRange(-1,100)>
\b
# to watch a specific title
# be sure to get the completions for the best experience
fastanime downloads --title <title>
\b
# to get the path to the downloads folder set
fastanime downloads --path
# useful when you want to use the value for other programs
""",
) )
@click.option("--path", "-p", help="print the downloads folder and exit", is_flag=True) @click.option("--path", "-p", help="print the downloads folder and exit", is_flag=True)
@click.option(
"--title",
"-T",
shell_complete=downloaded_anime_titles,
help="watch a specific title",
)
@click.option("--view-episodes", "-v", help="View individual episodes", is_flag=True) @click.option("--view-episodes", "-v", help="View individual episodes", is_flag=True)
@click.option( @click.option(
"--ffmpegthumbnailer-seek-time", "--ffmpegthumbnailer-seek-time",
"--time-to-seek", "--time-to-seek",
"-t", "-t",
type=click.IntRange(-1, 100), type=click.IntRange(-1, 100),
help="ffmpegthumbnailer seek time [0-100]", help="ffmpegthumbnailer seek time",
) )
@click.pass_obj @click.pass_obj
def downloads(config: "Config", path: bool, view_episodes, ffmpegthumbnailer_seek_time): def downloads(
config: "Config", path: bool, title, view_episodes, ffmpegthumbnailer_seek_time
):
import os import os
from ...cli.utils.mpv import run_mpv from ...cli.utils.mpv import run_mpv
@@ -239,6 +274,7 @@ def downloads(config: "Config", path: bool, view_episodes, ffmpegthumbnailer_see
os.listdir(anime_playlist_path), key=sort_by_episode_number os.listdir(anime_playlist_path), key=sort_by_episode_number
) )
downloaded_episodes = [*episodes, "Back"] downloaded_episodes = [*episodes, "Back"]
if config.use_fzf: if config.use_fzf:
if not config.preview: if not config.preview:
episode_title = fzf.run( episode_title = fzf.run(
@@ -257,7 +293,7 @@ def downloads(config: "Config", path: bool, view_episodes, ffmpegthumbnailer_see
else: else:
episode_title = fuzzy_inquirer( episode_title = fuzzy_inquirer(
downloaded_episodes, downloaded_episodes,
"Enter Playlist Name: ", "Enter Playlist Name",
) )
if episode_title == "Back": if episode_title == "Back":
stream_anime() stream_anime()
@@ -268,11 +304,18 @@ def downloads(config: "Config", path: bool, view_episodes, ffmpegthumbnailer_see
SyncPlayer(episode_path) SyncPlayer(episode_path)
else: else:
run_mpv(episode_path) run_mpv(
episode_path,
player=config.player,
)
stream_episode(anime_playlist_path) stream_episode(anime_playlist_path)
def stream_anime(): def stream_anime(title=None):
if config.use_fzf: if title:
from thefuzz import fuzz
playlist_name = max(anime_downloads, key=lambda t: fuzz.ratio(title, t))
elif config.use_fzf:
if not config.preview: if not config.preview:
playlist_name = fzf.run( playlist_name = fzf.run(
anime_downloads, anime_downloads,
@@ -290,7 +333,7 @@ def downloads(config: "Config", path: bool, view_episodes, ffmpegthumbnailer_see
else: else:
playlist_name = fuzzy_inquirer( playlist_name = fuzzy_inquirer(
anime_downloads, anime_downloads,
"Enter Playlist Name: ", "Enter Playlist Name",
) )
if playlist_name == "Exit": if playlist_name == "Exit":
exit_app() exit_app()
@@ -306,7 +349,10 @@ def downloads(config: "Config", path: bool, view_episodes, ffmpegthumbnailer_see
SyncPlayer(playlist) SyncPlayer(playlist)
else: else:
run_mpv(playlist) run_mpv(
playlist,
player=config.player,
)
stream_anime() stream_anime()
stream_anime() stream_anime(title)

View File

@@ -11,6 +11,41 @@ if TYPE_CHECKING:
@click.command( @click.command(
help="Helper command to get streams for anime to use externally in a non-python application", help="Helper command to get streams for anime to use externally in a non-python application",
short_help="Print anime streams to standard out", short_help="Print anime streams to standard out",
epilog="""
\b
\b\bExamples:
# --- print anime info + episode streams ---
\b
# multiple titles can be specified with the -t option
fastanime grab -t <anime-title> -t <anime-title>
# -- or --
# print all available episodes
fastanime grab -t <anime-title> -r ':'
\b
# print the latest episode
fastanime grab -t <anime-title> -r '-1'
\b
# print a specific episode range
# be sure to observe the range Syntax
fastanime grab -t <anime-title> -r '<start>:<stop>'
\b
fastanime grab -t <anime-title> -r '<start>:<stop>:<step>'
\b
fastanime grab -t <anime-title> -r '<start>:'
\b
fastanime grab -t <anime-title> -r ':<end>'
\b
# --- grab options ---
\b
# print search results only
fastanime grab -t <anime-title> -r <range> --search-results-only
\b
# print anime info only
fastanime grab -t <anime-title> -r <range> --anime-info-only
\b
# print episode streams only
fastanime grab -t <anime-title> -r <range> --episode-streams-only
""",
) )
@click.option( @click.option(
"--anime-titles", "--anime-titles",
@@ -56,26 +91,19 @@ def grab(
from thefuzz import fuzz from thefuzz import fuzz
from ...AnimeProvider import AnimeProvider
logger = getLogger(__name__) logger = getLogger(__name__)
if config.manga:
manga_title = anime_titles[0]
from ...MangaProvider import MangaProvider
anime_provider = AnimeProvider(config.provider) manga_provider = MangaProvider()
search_data = manga_provider.search_for_manga(manga_title)
grabbed_animes = [] if not search_data:
for anime_title in anime_titles:
# ---- search for anime ----
search_results = anime_provider.search_for_anime(
anime_title, translation_type=config.translation_type
)
if not search_results:
exit(1) exit(1)
if search_results_only: if search_results_only:
# grab only search results skipping all lines after this print(json.dumps(search_data))
grabbed_animes.append(search_results) exit(0)
continue search_results = search_data["results"]
search_results = search_results["results"]
if not search_results: if not search_results:
logger.error("no results for your search") logger.error("no results for your search")
exit(1) exit(1)
@@ -83,83 +111,133 @@ def grab(
search_result["title"]: search_result for search_result in search_results search_result["title"]: search_result for search_result in search_results
} }
search_result = max( search_result_anime_title = max(
search_results_.keys(), key=lambda title: fuzz.ratio(title, anime_title) search_results_.keys(), key=lambda title: fuzz.ratio(title, anime_titles[0])
) )
manga_info = manga_provider.get_manga(
# ---- fetch anime ---- search_results_[search_result_anime_title]["id"]
anime = anime_provider.get_anime(search_results_[search_result]["id"]) )
if not anime: if not manga_info:
exit(1) return
if anime_info_only: if anime_info_only:
# grab only the anime data skipping all lines after this print(json.dumps(manga_info))
grabbed_animes.append(anime) exit(0)
continue
episodes = sorted( chapter_info = manga_provider.get_chapter_thumbnails(
anime["availableEpisodesDetail"][config.translation_type], key=float manga_info["id"], str(episode_range)
) )
if not chapter_info:
exit(1)
print(json.dumps(chapter_info))
# where the magic happens
if episode_range:
if ":" in episode_range:
ep_range_tuple = episode_range.split(":")
if len(ep_range_tuple) == 2 and all(ep_range_tuple):
episodes_start, episodes_end = ep_range_tuple
episodes_range = episodes[int(episodes_start) : int(episodes_end)]
elif len(ep_range_tuple) == 3 and all(ep_range_tuple):
episodes_start, episodes_end, step = ep_range_tuple
episodes_range = episodes[
int(episodes_start) : int(episodes_end) : int(step)
]
else:
episodes_start, episodes_end = ep_range_tuple
if episodes_start.strip():
episodes_range = episodes[int(episodes_start) :]
elif episodes_end.strip():
episodes_range = episodes[: int(episodes_end)]
else:
episodes_range = episodes
else:
episodes_range = episodes[int(episode_range) :]
else:
episodes_range = sorted(episodes, key=float)
if not episode_streams_only:
grabbed_anime = dict(anime)
grabbed_anime["requested_episodes"] = episodes_range
grabbed_anime["translation_type"] = config.translation_type
grabbed_anime["episodes_streams"] = {}
else:
grabbed_anime = {}
# lets download em
for episode in episodes_range:
try:
if episode not in episodes:
continue
streams = anime_provider.get_episode_streams(
anime, episode, config.translation_type
)
if not streams:
continue
episode_streams = {server["server"]: server for server in streams}
if episode_streams_only:
grabbed_anime[episode] = episode_streams
else:
grabbed_anime["episodes_streams"][ # pyright:ignore
episode
] = episode_streams
except Exception as e:
logger.error(e)
# grab the full data for single title and appen to final result or episode streams
grabbed_animes.append(grabbed_anime)
# print out the final result either {} or [] depending if more than one title os requested
if len(grabbed_animes) == 1:
print(json.dumps(grabbed_animes[0]))
else: else:
print(json.dumps(grabbed_animes)) from ...AnimeProvider import AnimeProvider
anime_provider = AnimeProvider(config.provider)
grabbed_animes = []
for anime_title in anime_titles:
# ---- search for anime ----
search_results = anime_provider.search_for_anime(
anime_title, translation_type=config.translation_type
)
if not search_results:
exit(1)
if search_results_only:
# grab only search results skipping all lines after this
grabbed_animes.append(search_results)
continue
search_results = search_results["results"]
if not search_results:
logger.error("no results for your search")
exit(1)
search_results_ = {
search_result["title"]: search_result
for search_result in search_results
}
search_result_anime_title = max(
search_results_.keys(), key=lambda title: fuzz.ratio(title, anime_title)
)
# ---- fetch anime ----
anime = anime_provider.get_anime(
search_results_[search_result_anime_title]["id"]
)
if not anime:
exit(1)
if anime_info_only:
# grab only the anime data skipping all lines after this
grabbed_animes.append(anime)
continue
episodes = sorted(
anime["availableEpisodesDetail"][config.translation_type], key=float
)
# where the magic happens
if episode_range:
if ":" in episode_range:
ep_range_tuple = episode_range.split(":")
if len(ep_range_tuple) == 2 and all(ep_range_tuple):
episodes_start, episodes_end = ep_range_tuple
episodes_range = episodes[
int(episodes_start) : int(episodes_end)
]
elif len(ep_range_tuple) == 3 and all(ep_range_tuple):
episodes_start, episodes_end, step = ep_range_tuple
episodes_range = episodes[
int(episodes_start) : int(episodes_end) : int(step)
]
else:
episodes_start, episodes_end = ep_range_tuple
if episodes_start.strip():
episodes_range = episodes[int(episodes_start) :]
elif episodes_end.strip():
episodes_range = episodes[: int(episodes_end)]
else:
episodes_range = episodes
else:
episodes_range = episodes[int(episode_range) :]
else:
episodes_range = sorted(episodes, key=float)
if not episode_streams_only:
grabbed_anime = dict(anime)
grabbed_anime["requested_episodes"] = episodes_range
grabbed_anime["translation_type"] = config.translation_type
grabbed_anime["episodes_streams"] = {}
else:
grabbed_anime = {}
# lets download em
for episode in episodes_range:
try:
if episode not in episodes:
continue
streams = anime_provider.get_episode_streams(
anime["id"], anime["title"], episode, config.translation_type
)
if not streams:
continue
episode_streams = {server["server"]: server for server in streams}
if episode_streams_only:
grabbed_anime[episode] = episode_streams
else:
grabbed_anime["episodes_streams"][ # pyright:ignore
episode
] = episode_streams
except Exception as e:
logger.error(e)
# grab the full data for single title and appen to final result or episode streams
grabbed_animes.append(grabbed_anime)
# print out the final result either {} or [] depending if more than one title os requested
if len(grabbed_animes) == 1:
print(json.dumps(grabbed_animes[0]))
else:
print(json.dumps(grabbed_animes))

View File

@@ -1,12 +1,39 @@
from typing import TYPE_CHECKING
import click import click
from ...cli.config import Config
from ..completion_functions import anime_titles_shell_complete from ..completion_functions import anime_titles_shell_complete
if TYPE_CHECKING:
from ...cli.config import Config
@click.command( @click.command(
help="This subcommand directly interacts with the provider to enable basic streaming. Useful for binging anime.", help="This subcommand directly interacts with the provider to enable basic streaming. Useful for binging anime.",
short_help="Binge anime", short_help="Binge anime",
epilog="""
\b
\b\bExamples:
# basic form where you will still be prompted for the episode number
# multiple titles can be specified with the -t option
fastanime search -t <anime-title> -t <anime-title>
\b
# binge all episodes with this command
fastanime search -t <anime-title> -r ':'
\b
# watch latest episode
fastanime search -t <anime-title> -r '-1'
\b
# binge a specific episode range with this command
# be sure to observe the range Syntax
fastanime search -t <anime-title> -r '<start>:<stop>'
\b
fastanime search -t <anime-title> -r '<start>:<stop>:<step>'
\b
fastanime search -t <anime-title> -r '<start>:'
\b
fastanime search -t <anime-title> -r ':<end>'
""",
) )
@click.option( @click.option(
"--anime-titles", "--anime-titles",
@@ -23,215 +50,336 @@ from ..completion_functions import anime_titles_shell_complete
help="A range of episodes to binge (start-end)", help="A range of episodes to binge (start-end)",
) )
@click.pass_obj @click.pass_obj
def search(config: Config, anime_titles: str, episode_range: str): def search(config: "Config", anime_titles: str, episode_range: str):
from click import clear from click import clear
from rich import print from rich import print
from rich.progress import Progress from rich.progress import Progress
from thefuzz import fuzz from thefuzz import fuzz
from ...AnimeProvider import AnimeProvider
from ...libs.anime_provider.types import Anime
from ...libs.fzf import fzf from ...libs.fzf import fzf
from ...libs.rofi import Rofi from ...libs.rofi import Rofi
from ..utils.mpv import run_mpv
from ..utils.tools import exit_app from ..utils.tools import exit_app
from ..utils.utils import ( from ..utils.utils import fuzzy_inquirer
filter_by_quality,
fuzzy_inquirer,
move_preferred_subtitle_lang_to_top,
)
anime_provider = AnimeProvider(config.provider) if config.manga:
from InquirerPy.prompts.number import NumberPrompt
from yt_dlp.utils import sanitize_filename
from ...MangaProvider import MangaProvider
from ..utils.feh import feh_manga_viewer
manga_title = anime_titles[0]
manga_provider = MangaProvider()
search_data = manga_provider.search_for_manga(manga_title)
if not search_data:
print("No search results")
exit(1)
search_results = search_data["results"]
print(f"[green bold]Streaming:[/] {anime_titles}")
for anime_title in anime_titles:
# ---- search for anime ----
with Progress() as progress:
progress.add_task("Fetching Search Results...", total=None)
search_results = anime_provider.search_for_anime(
anime_title, config.translation_type
)
if not search_results:
print("Search results not found")
input("Enter to retry")
search(config, anime_title, episode_range)
return
search_results = search_results["results"]
if not search_results:
print("Anime not found :cry:")
exit_app()
search_results_ = { search_results_ = {
search_result["title"]: search_result for search_result in search_results sanitize_filename(search_result["title"]): search_result
for search_result in search_results
} }
if config.auto_select: if config.auto_select:
search_result = max( search_result_manga_title = max(
search_results_.keys(), key=lambda title: fuzz.ratio(title, anime_title) search_results_.keys(),
key=lambda title: fuzz.ratio(title, manga_title),
) )
print("[cyan]Auto Selecting:[/] ", search_result) print("[cyan]Auto Selecting:[/] ", search_result_manga_title)
else: else:
choices = list(search_results_.keys()) choices = list(search_results_.keys())
preview = None
if config.preview:
from ..interfaces.utils import get_fzf_manga_preview
preview = get_fzf_manga_preview(search_results)
if config.use_fzf: if config.use_fzf:
search_result = fzf.run(choices, "Please Select title: ", "FastAnime") search_result_manga_title = fzf.run(
choices, "Please Select title", preview=preview
)
elif config.use_rofi: elif config.use_rofi:
search_result = Rofi.run(choices, "Please Select Title") search_result_manga_title = Rofi.run(choices, "Please Select Title")
else: else:
search_result = fuzzy_inquirer( search_result_manga_title = fuzzy_inquirer(
choices, choices,
"Please Select Title", "Please Select Title",
) )
# ---- fetch selected anime ---- anilist_id = search_results_[search_result_manga_title]["id"]
with Progress() as progress: manga_info = manga_provider.get_manga(anilist_id)
progress.add_task("Fetching Anime...", total=None) if not manga_info:
anime: Anime | None = anime_provider.get_anime( print("No manga info")
search_results_[search_result]["id"] exit(1)
anilist_helper = None
if config.user:
from ...anilist import AniList
AniList.login_user(config.user["token"])
anilist_helper = AniList
def _manga_viewer():
chapter_number = NumberPrompt("Select a chapter number").execute()
chapter_info = manga_provider.get_chapter_thumbnails(
manga_info["id"], str(chapter_number)
) )
if not anime: if not chapter_info:
print("Sth went wring anime no found") print("No chapter info")
input("Enter to continue...") input("Enter to retry...")
search(config, anime_title, episode_range) _manga_viewer()
return return
episodes_range = [] print(
episodes: list[str] = sorted( f"[purple bold]Now Reading: [/] {search_result_manga_title} [cyan bold]Chapter:[/] {chapter_info['title']}"
anime["availableEpisodesDetail"][config.translation_type], key=float )
) feh_manga_viewer(chapter_info["thumbnails"], str(chapter_info["title"]))
if episode_range: if anilist_helper:
if ":" in episode_range: anilist_helper.update_anime_list(
ep_range_tuple = episode_range.split(":") {"mediaId": anilist_id, "progress": chapter_number}
if len(ep_range_tuple) == 3 and all(ep_range_tuple): )
episodes_start, episodes_end, step = ep_range_tuple _manga_viewer()
episodes_range = episodes[
int(episodes_start) : int(episodes_end) : int(step) _manga_viewer()
] else:
from ...AnimeProvider import AnimeProvider
from ...libs.anime_provider.types import Anime
from ...Utility.data import anime_normalizer
from ..utils.mpv import run_mpv
from ..utils.utils import filter_by_quality, move_preferred_subtitle_lang_to_top
anime_provider = AnimeProvider(config.provider)
anilist_anime_info = None
print(f"[green bold]Streaming:[/] {anime_titles}")
for anime_title in anime_titles:
# ---- search for anime ----
with Progress() as progress:
progress.add_task("Fetching Search Results...", total=None)
search_results = anime_provider.search_for_anime(
anime_title, config.translation_type
)
if not search_results:
print("Search results not found")
input("Enter to retry")
search(config, anime_title, episode_range)
return
search_results = search_results["results"]
if not search_results:
print("Anime not found :cry:")
exit_app()
search_results_ = {
search_result["title"]: search_result
for search_result in search_results
}
if config.auto_select:
search_result_manga_title = max(
search_results_.keys(),
key=lambda title: fuzz.ratio(
anime_normalizer.get(title, title), anime_title
),
)
print("[cyan]Auto Selecting:[/] ", search_result_manga_title)
elif len(ep_range_tuple) == 2 and all(ep_range_tuple):
episodes_start, episodes_end = ep_range_tuple
episodes_range = episodes[int(episodes_start) : int(episodes_end)]
else:
episodes_start, episodes_end = ep_range_tuple
if episodes_start.strip():
episodes_range = episodes[int(episodes_start) :]
elif episodes_end.strip():
episodes_range = episodes[: int(episodes_end)]
else:
episodes_range = episodes
else: else:
episodes_range = episodes[int(episode_range) :] choices = list(search_results_.keys())
episodes_range = iter(episodes_range)
def stream_anime():
clear()
episode = None
if episodes_range:
try:
episode = next(episodes_range) # pyright:ignore
print(
f"[cyan]Auto selecting:[/] {search_result} [cyan]Episode:[/] {episode}"
)
except StopIteration:
print("[green]Completed binge sequence[/]:smile:")
return
if not episode or episode not in episodes:
choices = [*episodes, "end"]
if config.use_fzf: if config.use_fzf:
episode = fzf.run( search_result_manga_title = fzf.run(
choices, "Select an episode: ", header=search_result choices, "Please Select title", "FastAnime"
) )
elif config.use_rofi: elif config.use_rofi:
episode = Rofi.run(choices, "Select an episode") search_result_manga_title = Rofi.run(choices, "Please Select Title")
else: else:
episode = fuzzy_inquirer( search_result_manga_title = fuzzy_inquirer(
choices, choices,
"Select episode", "Please Select Title",
) )
if episode == "end":
return
# ---- fetch streams ---- # ---- fetch selected anime ----
with Progress() as progress: with Progress() as progress:
progress.add_task("Fetching Episode Streams...", total=None) progress.add_task("Fetching Anime...", total=None)
streams = anime_provider.get_episode_streams( anime: Anime | None = anime_provider.get_anime(
anime, episode, config.translation_type search_results_[search_result_manga_title]["id"]
) )
if not streams:
print("Failed to get streams") if not anime:
print("Sth went wring anime no found")
input("Enter to continue...")
search(config, anime_title, episode_range)
return
episodes_range = []
episodes: list[str] = sorted(
anime["availableEpisodesDetail"][config.translation_type], key=float
)
if episode_range:
if ":" in episode_range:
ep_range_tuple = episode_range.split(":")
if len(ep_range_tuple) == 3 and all(ep_range_tuple):
episodes_start, episodes_end, step = ep_range_tuple
episodes_range = episodes[
int(episodes_start) : int(episodes_end) : int(step)
]
elif len(ep_range_tuple) == 2 and all(ep_range_tuple):
episodes_start, episodes_end = ep_range_tuple
episodes_range = episodes[
int(episodes_start) : int(episodes_end)
]
else:
episodes_start, episodes_end = ep_range_tuple
if episodes_start.strip():
episodes_range = episodes[int(episodes_start) :]
elif episodes_end.strip():
episodes_range = episodes[: int(episodes_end)]
else:
episodes_range = episodes
else:
episodes_range = episodes[int(episode_range) :]
episodes_range = iter(episodes_range)
if config.normalize_titles:
from ...libs.common.mini_anilist import get_basic_anime_info_by_title
anilist_anime_info = get_basic_anime_info_by_title(anime["title"])
def stream_anime(anime: "Anime"):
clear()
episode = None
if episodes_range:
try:
episode = next(episodes_range) # pyright:ignore
print(
f"[cyan]Auto selecting:[/] {search_result_manga_title} [cyan]Episode:[/] {episode}"
)
except StopIteration:
print("[green]Completed binge sequence[/]:smile:")
return
if not episode or episode not in episodes:
choices = [*episodes, "end"]
if config.use_fzf:
episode = fzf.run(
choices,
"Select an episode",
header=search_result_manga_title,
)
elif config.use_rofi:
episode = Rofi.run(choices, "Select an episode")
else:
episode = fuzzy_inquirer(
choices,
"Select episode",
)
if episode == "end":
return return
try: # ---- fetch streams ----
# ---- fetch servers ---- with Progress() as progress:
if config.server == "top": progress.add_task("Fetching Episode Streams...", total=None)
with Progress() as progress: streams = anime_provider.get_episode_streams(
progress.add_task("Fetching top server...", total=None) anime["id"], anime["title"], episode, config.translation_type
server = next(streams, None) )
if not server: if not streams:
print("Sth went wrong when fetching the episode") print("Failed to get streams")
return
try:
# ---- fetch servers ----
if config.server == "top":
with Progress() as progress:
progress.add_task("Fetching top server...", total=None)
server = next(streams, None)
if not server:
print("Sth went wrong when fetching the episode")
input("Enter to continue")
stream_anime(anime)
return
stream_link = filter_by_quality(config.quality, server["links"])
if not stream_link:
print("Quality not found")
input("Enter to continue") input("Enter to continue")
stream_anime() stream_anime(anime)
return return
stream_link = filter_by_quality(config.quality, server["links"]) link = stream_link["link"]
if not stream_link: subtitles = server["subtitles"]
print("Quality not found") stream_headers = server["headers"]
input("Enter to continue") episode_title = server["episode_title"]
stream_anime()
return
link = stream_link["link"]
subtitles = server["subtitles"]
stream_headers = server["headers"]
episode_title = server["episode_title"]
else:
with Progress() as progress:
progress.add_task("Fetching servers", total=None)
# prompt for server selection
servers = {server["server"]: server for server in streams}
servers_names = list(servers.keys())
if config.server in servers_names:
server = config.server
else: else:
if config.use_fzf: with Progress() as progress:
server = fzf.run(servers_names, "Select an link: ") progress.add_task("Fetching servers", total=None)
elif config.use_rofi: # prompt for server selection
server = Rofi.run(servers_names, "Select an link") servers = {server["server"]: server for server in streams}
servers_names = list(servers.keys())
if config.server in servers_names:
server = config.server
else: else:
server = fuzzy_inquirer( if config.use_fzf:
servers_names, server = fzf.run(servers_names, "Select an link")
"Select link", elif config.use_rofi:
) server = Rofi.run(servers_names, "Select an link")
stream_link = filter_by_quality( else:
config.quality, servers[server]["links"] server = fuzzy_inquirer(
) servers_names,
if not stream_link: "Select link",
print("Quality not found") )
input("Enter to continue") stream_link = filter_by_quality(
stream_anime() config.quality, servers[server]["links"]
return )
link = stream_link["link"] if not stream_link:
stream_headers = servers[server]["headers"] print("Quality not found")
subtitles = servers[server]["subtitles"] input("Enter to continue")
episode_title = servers[server]["episode_title"] stream_anime(anime)
print(f"[purple]Now Playing:[/] {search_result} Episode {episode}") return
link = stream_link["link"]
stream_headers = servers[server]["headers"]
subtitles = servers[server]["subtitles"]
episode_title = servers[server]["episode_title"]
subtitles = move_preferred_subtitle_lang_to_top( selected_anime_title = search_result_manga_title
subtitles, config.sub_lang if anilist_anime_info:
) selected_anime_title = (
if config.sync_play: anilist_anime_info["title"][config.preferred_language]
from ..utils.syncplay import SyncPlayer or anilist_anime_info["title"]["romaji"]
or anilist_anime_info["title"]["english"]
)
import re
SyncPlayer( for episode_detail in anilist_anime_info["episodes"]:
link, episode_title, headers=stream_headers, subtitles=subtitles if re.match(f"Episode {episode} ", episode_detail["title"]):
episode_title = episode_detail["title"]
break
print(
f"[purple]Now Playing:[/] {selected_anime_title} Episode {episode}"
) )
else: subtitles = move_preferred_subtitle_lang_to_top(
run_mpv( subtitles, config.sub_lang
link, episode_title, headers=stream_headers, subtitles=subtitles
) )
except IndexError as e: if config.sync_play:
print(e) from ..utils.syncplay import SyncPlayer
input("Enter to continue")
stream_anime()
stream_anime() SyncPlayer(
link,
episode_title,
headers=stream_headers,
subtitles=subtitles,
)
else:
run_mpv(
link,
episode_title,
headers=stream_headers,
subtitles=subtitles,
player=config.player,
)
except IndexError as e:
print(e)
input("Enter to continue")
stream_anime(anime)
stream_anime(anime)

View File

@@ -1,7 +1,18 @@
import click import click
@click.command(help="Helper command to update fastanime to latest") @click.command(
help="Helper command to update fastanime to latest",
epilog="""
\b
\b\bExamples:
# update fastanime to latest
fastanime update
\b
# check for latest release
fastanime update --check
""",
)
@click.option("--check", "-c", help="Check for the latest release", is_flag=True) @click.option("--check", "-c", help="Check for the latest release", is_flag=True)
def update( def update(
check, check,

View File

@@ -6,22 +6,20 @@ ANILIST_ENDPOINT = "https://graphql.anilist.co"
anime_title_query = """ anime_title_query = """
query($query:String){ query ($query: String) {
Page(perPage:50){ Page(perPage: 50) {
pageInfo{ pageInfo {
total total
currentPage
hasNextPage
}
media(search:$query,type:ANIME){
id
idMal
title{
romaji
english
}
}
} }
media(search: $query, type: ANIME) {
id
idMal
title {
romaji
english
}
}
}
} }
""" """
@@ -46,20 +44,6 @@ def get_anime_titles(query: str, variables: dict = {}):
) )
anilist_data = response.json() anilist_data = response.json()
# ensuring you dont get blocked
if (
int(response.headers.get("X-RateLimit-Remaining", 0)) < 30
and not response.status_code == 500
):
print("Warning you are exceeding the allowed number of calls per minute")
logger.warning(
"You are exceeding the allowed number of calls per minute for the AniList api enforcing timeout"
)
print("Forced timeout will now be initiated")
import time
print("sleeping...")
time.sleep(1 * 60)
if response.status_code == 200: if response.status_code == 200:
eng_titles = [ eng_titles = [
anime["title"]["english"] anime["title"]["english"]
@@ -79,5 +63,33 @@ def get_anime_titles(query: str, variables: dict = {}):
return [] return []
def downloaded_anime_titles(ctx, param, incomplete):
import os
from ..constants import USER_VIDEOS_DIR
try:
titles = [
title
for title in os.listdir(USER_VIDEOS_DIR)
if title.lower().startswith(incomplete.lower()) or not incomplete
]
return titles
except Exception:
return []
def anime_titles_shell_complete(ctx, param, incomplete): def anime_titles_shell_complete(ctx, param, incomplete):
return [name for name in get_anime_titles(anime_title_query, {"query": incomplete})] incomplete = incomplete.strip()
if not incomplete:
incomplete = None
variables = {}
else:
variables = {"query": incomplete}
return get_anime_titles(anime_title_query, variables)
if __name__ == "__main__":
t = input("Enter title")
results = get_anime_titles(anime_title_query, {"query": t})
print(results)

View File

@@ -4,9 +4,12 @@ import os
from configparser import ConfigParser from configparser import ConfigParser
from typing import TYPE_CHECKING from typing import TYPE_CHECKING
from rich import print from ..constants import (
USER_CONFIG_PATH,
from ..constants import USER_CONFIG_PATH, USER_DATA_PATH, USER_VIDEOS_DIR USER_DATA_PATH,
USER_VIDEOS_DIR,
USER_WATCH_HISTORY_PATH,
)
from ..libs.rofi import Rofi from ..libs.rofi import Rofi
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@@ -15,100 +18,62 @@ if TYPE_CHECKING:
class Config(object): class Config(object):
"""class that handles and manages configuration and user data throughout the clis lifespan manga = False
Attributes:
anime_list: [TODO:attribute]
watch_history: [TODO:attribute]
fastanime_anilist_app_login_url: [TODO:attribute]
anime_provider: [TODO:attribute]
user_data: [TODO:attribute]
configparser: [TODO:attribute]
downloads_dir: [TODO:attribute]
provider: [TODO:attribute]
use_fzf: [TODO:attribute]
use_rofi: [TODO:attribute]
skip: [TODO:attribute]
icons: [TODO:attribute]
preview: [TODO:attribute]
translation_type: [TODO:attribute]
sort_by: [TODO:attribute]
continue_from_history: [TODO:attribute]
auto_next: [TODO:attribute]
auto_select: [TODO:attribute]
use_mpv_mod: [TODO:attribute]
quality: [TODO:attribute]
notification_duration: [TODO:attribute]
error: [TODO:attribute]
server: [TODO:attribute]
format: [TODO:attribute]
force_window: [TODO:attribute]
preferred_language: [TODO:attribute]
rofi_theme: [TODO:attribute]
rofi_theme: [TODO:attribute]
rofi_theme_input: [TODO:attribute]
rofi_theme_input: [TODO:attribute]
rofi_theme_confirm: [TODO:attribute]
rofi_theme_confirm: [TODO:attribute]
watch_history: [TODO:attribute]
anime_list: [TODO:attribute]
user: [TODO:attribute]
"""
sync_play = False sync_play = False
anime_list: list anime_list: list
watch_history: dict watch_history: dict = {}
fastanime_anilist_app_login_url = ( fastanime_anilist_app_login_url = (
"https://anilist.co/api/v2/oauth/authorize?client_id=20148&response_type=token" "https://anilist.co/api/v2/oauth/authorize?client_id=20148&response_type=token"
) )
anime_provider: "AnimeProvider" anime_provider: "AnimeProvider"
user_data = {"watch_history": {}, "animelist": [], "user": {}} user_data = {"watch_history": {}, "animelist": [], "user": {}}
default_options = {
"quality": "1080",
"auto_next": "False",
"auto_select": "True",
"sort_by": "search match",
"downloads_dir": USER_VIDEOS_DIR,
"translation_type": "sub",
"server": "top",
"continue_from_history": "True",
"preferred_history": "local",
"use_python_mpv": "false",
"force_window": "immediate",
"preferred_language": "english",
"use_fzf": "False",
"preview": "False",
"format": "best[height<=1080]/bestvideo[height<=1080]+bestaudio/best",
"provider": "allanime",
"icons": "false",
"notification_duration": "2",
"skip": "false",
"use_rofi": "false",
"rofi_theme": "",
"rofi_theme_input": "",
"rofi_theme_confirm": "",
"ffmpegthumnailer_seek_time": "-1",
"sub_lang": "eng",
"normalize_titles": "true",
"player": "mpv",
"episode_complete_at": "80",
"force_forward_tracking": "true",
"default_media_list_tracking": "None",
}
def __init__(self) -> None: def __init__(self) -> None:
self.initialize_user_data() self.initialize_user_data_and_watch_history()
self.load_config() self.load_config()
def load_config(self): def load_config(self):
self.configparser = ConfigParser( self.configparser = ConfigParser(self.default_options)
{
"quality": "1080",
"auto_next": "False",
"auto_select": "True",
"sort_by": "search match",
"downloads_dir": USER_VIDEOS_DIR,
"translation_type": "sub",
"server": "top",
"continue_from_history": "True",
"preferred_history": "local",
"use_mpv_mod": "false",
"force_window": "immediate",
"preferred_language": "english",
"use_fzf": "False",
"preview": "False",
"format": "best[height<=1080]/bestvideo[height<=1080]+bestaudio/best",
"provider": "allanime",
"error": "3",
"icons": "false",
"notification_duration": "2",
"skip": "false",
"use_rofi": "false",
"rofi_theme": "",
"rofi_theme_input": "",
"rofi_theme_confirm": "",
"ffmpegthumnailer_seek_time": "-1",
"sub_lang": "eng",
}
)
self.configparser.add_section("stream") self.configparser.add_section("stream")
self.configparser.add_section("general") self.configparser.add_section("general")
self.configparser.add_section("anilist") self.configparser.add_section("anilist")
if not os.path.exists(USER_CONFIG_PATH):
with open(USER_CONFIG_PATH, "w") as config:
self.configparser.write(config)
self.configparser.read(USER_CONFIG_PATH)
# --- set config values from file or using defaults --- # --- set config values from file or using defaults ---
if os.path.exists(USER_CONFIG_PATH):
self.configparser.read(USER_CONFIG_PATH, encoding="utf-8")
self.downloads_dir = self.get_downloads_dir() self.downloads_dir = self.get_downloads_dir()
self.sub_lang = self.get_sub_lang() self.sub_lang = self.get_sub_lang()
self.provider = self.get_provider() self.provider = self.get_provider()
@@ -121,13 +86,17 @@ class Config(object):
self.sort_by = self.get_sort_by() self.sort_by = self.get_sort_by()
self.continue_from_history = self.get_continue_from_history() self.continue_from_history = self.get_continue_from_history()
self.auto_next = self.get_auto_next() self.auto_next = self.get_auto_next()
self.normalize_titles = self.get_normalize_titles()
self.auto_select = self.get_auto_select() self.auto_select = self.get_auto_select()
self.use_mpv_mod = self.get_use_mpv_mod() self.use_python_mpv = self.get_use_mpv_mod()
self.quality = self.get_quality() self.quality = self.get_quality()
self.notification_duration = self.get_notification_duration() self.notification_duration = self.get_notification_duration()
self.error = self.get_error() self.episode_complete_at = self.get_episode_complete_at()
self.default_media_list_tracking = self.get_default_media_list_tracking()
self.force_forward_tracking = self.get_force_forward_tracking()
self.server = self.get_server() self.server = self.get_server()
self.format = self.get_format() self.format = self.get_format()
self.player = self.get_player()
self.force_window = self.get_force_window() self.force_window = self.get_force_window()
self.preferred_language = self.get_preferred_language() self.preferred_language = self.get_preferred_language()
self.preferred_history = self.get_preferred_history() self.preferred_history = self.get_preferred_history()
@@ -139,31 +108,41 @@ class Config(object):
Rofi.rofi_theme_confirm = self.rofi_theme_confirm Rofi.rofi_theme_confirm = self.rofi_theme_confirm
self.ffmpegthumbnailer_seek_time = self.get_ffmpegthumnailer_seek_time() self.ffmpegthumbnailer_seek_time = self.get_ffmpegthumnailer_seek_time()
# ---- setup user data ------ # ---- setup user data ------
self.watch_history: dict = self.user_data.get("watch_history", {})
self.anime_list: list = self.user_data.get("animelist", []) self.anime_list: list = self.user_data.get("animelist", [])
self.user: dict = self.user_data.get("user", {}) self.user: dict = self.user_data.get("user", {})
os.environ["CURRENT_FASTANIME_PROVIDER"] = self.provider
if not os.path.exists(USER_CONFIG_PATH):
with open(USER_CONFIG_PATH, "w", encoding="utf-8") as config:
config.write(self.__repr__())
def update_user(self, user): def update_user(self, user):
self.user = user self.user = user
self.user_data["user"] = user self.user_data["user"] = user
self._update_user_data() self._update_user_data()
def update_watch_history( def media_list_track(
self, anime_id: int, episode: str, start_time="0", total_time="0" self,
anime_id: int,
episode_no: str,
episode_stopped_at="0",
episode_total_length="0",
progress_tracking="prompt",
): ):
self.watch_history.update( self.watch_history.update(
{ {
str(anime_id): { str(anime_id): {
"episode": episode, "episode_no": episode_no,
"start_time": start_time, "episode_stopped_at": episode_stopped_at,
"total_time": total_time, "episode_total_length": episode_total_length,
"progress_tracking": progress_tracking,
} }
} }
) )
self.user_data["watch_history"] = self.watch_history with open(USER_WATCH_HISTORY_PATH, "w") as f:
self._update_user_data() json.dump(self.watch_history, f)
def initialize_user_data(self): def initialize_user_data_and_watch_history(self):
try: try:
if os.path.isfile(USER_DATA_PATH): if os.path.isfile(USER_DATA_PATH):
with open(USER_DATA_PATH, "r") as f: with open(USER_DATA_PATH, "r") as f:
@@ -171,6 +150,13 @@ class Config(object):
self.user_data.update(user_data) self.user_data.update(user_data)
except Exception as e: except Exception as e:
logger.error(e) logger.error(e)
try:
if os.path.isfile(USER_WATCH_HISTORY_PATH):
with open(USER_WATCH_HISTORY_PATH, "r") as f:
watch_history = json.load(f)
self.watch_history.update(watch_history)
except Exception as e:
logger.error(e)
def _update_user_data(self): def _update_user_data(self):
"""method that updates the actual user data file""" """method that updates the actual user data file"""
@@ -217,6 +203,15 @@ class Config(object):
def get_rofi_theme_confirm(self): def get_rofi_theme_confirm(self):
return self.configparser.get("general", "rofi_theme_confirm") return self.configparser.get("general", "rofi_theme_confirm")
def get_force_forward_tracking(self):
return self.configparser.getboolean("general", "force_forward_tracking")
def get_default_media_list_tracking(self):
return self.configparser.get("general", "default_media_list_tracking")
def get_normalize_titles(self):
return self.configparser.getboolean("general", "normalize_titles")
# --- stream section --- # --- stream section ---
def get_skip(self): def get_skip(self):
return self.configparser.getboolean("stream", "skip") return self.configparser.getboolean("stream", "skip")
@@ -231,13 +226,13 @@ class Config(object):
return self.configparser.getboolean("stream", "continue_from_history") return self.configparser.getboolean("stream", "continue_from_history")
def get_use_mpv_mod(self): def get_use_mpv_mod(self):
return self.configparser.getboolean("stream", "use_mpv_mod") return self.configparser.getboolean("stream", "use_python_mpv")
def get_notification_duration(self): def get_notification_duration(self):
return self.configparser.getint("general", "notification_duration") return self.configparser.getint("general", "notification_duration")
def get_error(self): def get_episode_complete_at(self):
return self.configparser.getint("stream", "error") return self.configparser.getint("stream", "episode_complete_at")
def get_force_window(self): def get_force_window(self):
return self.configparser.get("stream", "force_window") return self.configparser.get("stream", "force_window")
@@ -257,6 +252,9 @@ class Config(object):
def get_format(self): def get_format(self):
return self.configparser.get("stream", "format") return self.configparser.get("stream", "format")
def get_player(self):
return self.configparser.get("stream", "player")
def get_sort_by(self): def get_sort_by(self):
return self.configparser.get("anilist", "sort_by") return self.configparser.get("anilist", "sort_by")
@@ -266,108 +264,213 @@ class Config(object):
self.configparser.write(config) self.configparser.write(config)
def __repr__(self): def __repr__(self):
current_config_state = f""" current_config_state = f"""\
[stream] #
# Auto continue from watch history # ███████╗░█████╗░░██████╗████████╗░█████╗░███╗░░██╗██╗███╗░░░███╗███████╗ ░█████╗░░█████╗░███╗░░██╗███████╗██╗░██████╗░
continue_from_history = {self.continue_from_history} # ██╔════╝██╔══██╗██╔════╝╚══██╔══╝██╔══██╗████╗░██║██║████╗░████║██╔════╝ ██╔══██╗██╔══██╗████╗░██║██╔════╝██║██╔════╝░
# █████╗░░███████║╚█████╗░░░░██║░░░███████║██╔██╗██║██║██╔████╔██║█████╗░░ ██║░░╚═╝██║░░██║██╔██╗██║█████╗░░██║██║░░██╗░
# which hostory to use [local/remote] # ██╔══╝░░██╔══██║░╚═══██╗░░░██║░░░██╔══██║██║╚████║██║██║╚██╔╝██║██╔══╝░░ ██║░░██╗██║░░██║██║╚████║██╔══╝░░██║██║░░╚██╗
preferred_history = {self.preferred_history} # ██║░░░░░██║░░██║██████╔╝░░░██║░░░██║░░██║██║░╚███║██║██║░╚═╝░██║███████╗ ╚█████╔╝╚█████╔╝██║░╚███║██║░░░░░██║╚██████╔╝
# ╚═╝░░░░░╚═╝░░╚═╝╚═════╝░░░░╚═╝░░░╚═╝░░╚═╝╚═╝░░╚══╝╚═╝╚═╝░░░░░╚═╝╚══════╝ ░╚════╝░░╚════╝░╚═╝░░╚══╝╚═╝░░░░░╚═╝░╚═════╝░
#
# Preferred language for anime (options: dub, sub)
translation_type = {self.translation_type}
# Default server (options: dropbox, sharepoint, wetransfer.gogoanime, top, wixmp)
server = {self.server}
# Auto-select next episode
auto_next = {self.auto_next}
# Auto select the anime provider results with fuzzy find.
# Note this wont always be correct.But 99% of the time will be.
auto_select = {self.auto_select}
# whether to skip the opening and ending theme songs
# NOTE: requires ani-skip to be in path
skip = {self.skip}
# the maximum delta time in minutes after which the episode should be considered as completed
# used in the continue from time stamp
error = {self.error}
# whether to use python-mpv
# to enable superior control over the player
# adding more options to it
use_mpv_mod = {self.use_mpv_mod}
# force mpv window
# passed directly to mpv so values are same
force_window = immediate
# the format of downloaded anime and trailer
# based on yt-dlp format and passed directly to it
# learn more by looking it up on their site
# only works for downloaded anime if server=gogoanime
# since its the only one that offers different formats
# the others tend not to
format = {self.format}
[general] [general]
# whether to show the icons in the tui [True/False]
# more like emojis
# by the way if you have any recommendations to which should be used where please
# don't hesitate to share your opinion
# cause it's a lot of work to look for the right one for each menu option
# be sure to also give the replacement emoji
icons = {self.icons}
# can be [allanime,animepahe] # the quality of the stream [1080,720,480,360]
# this option is usually only reliable when:
# provider=animepahe
# since it provides links that actually point to streams of different qualities
# while the rest just point to another link that can provide the anime from the same server
quality = {self.quality}
# whether to normalize provider titles [True/False]
# basically takes the provider titles and finds the corresponding anilist title then changes the title to that
# useful for uniformity especially when downloading from different providers
# this also applies to episode titles
normalize_titles = {self.normalize_titles}
# can be [allanime, animepahe, aniwatch]
# allanime is the most realible
# animepahe provides different links to streams of different quality so a quality can be selected reliably with --quality option
# aniwatch which is now hianime usually provides subs in different languuages and its servers are generally faster
provider = {self.provider} provider = {self.provider}
# Display language (options: english, romaji) # Display language [english, romaji]
# this is passed to anilist directly and is used to set the language which the anime titles will be in
# when using the anilist interface
preferred_language = {self.preferred_language} preferred_language = {self.preferred_language}
# Download directory # Download directory
# where you will find your videos after downloading them with 'fastanime download' command
downloads_dir = {self.downloads_dir} downloads_dir = {self.downloads_dir}
# whether to show a preview window when using fzf or rofi # whether to show a preview window when using fzf or rofi [True/False]
# the preview requires you have a commandline image viewer as documented in the README
# this is only when usinf fzf
# if you dont care about image previews it doesnt matter
# though its awesome
# try it and you will see
preview = {self.preview} preview = {self.preview}
# the time to seek when using ffmpegthumbnailer [-1 to 100] # the time to seek when using ffmpegthumbnailer [-1 to 100]
# -1 means random and is the default # -1 means random and is the default
# ffmpegthumbnailer is used to generate previews and you can select at what time in the video to extract an image
# random makes things quite exciting cause you never no at what time it will extract the image from
ffmpegthumbnailer_seek_time = {self.ffmpegthumbnailer_seek_time} ffmpegthumbnailer_seek_time = {self.ffmpegthumbnailer_seek_time}
# whether to use fzf as the interface for the anilist command and others. # whether to use fzf as the interface for the anilist command and others. [True/False]
use_fzf = {self.use_fzf} use_fzf = {self.use_fzf}
# whether to use rofi for the ui # whether to use rofi for the ui [True/False]
# it's more useful if you want to create a desktop entry
# which can be setup with 'fastanime config --desktop-entry'
# though if you want it to be your sole interface even when fastanime is run directly from the terminal
use_rofi = {self.use_rofi} use_rofi = {self.use_rofi}
# rofi theme to use # rofi themes to use
# the values of this option is the path to the rofi config files to use
# i choose to split it into three since it gives the best look and feel
# you can refer to the rofi demo on github to see for your self
# by the way i recommend getting the rofi themes from this project;
rofi_theme = {self.rofi_theme} rofi_theme = {self.rofi_theme}
rofi_theme_input = {self.rofi_theme_input} rofi_theme_input = {self.rofi_theme_input}
rofi_theme_confirm = {self.rofi_theme_confirm} rofi_theme_confirm = {self.rofi_theme_confirm}
# whether to show the icons
icons = {self.icons}
# the duration in minutes a notification will stay in the screen # the duration in minutes a notification will stay in the screen
# used by notifier command # used by notifier command
notification_duration = {self.notification_duration} notification_duration = {self.notification_duration}
"""
# used when the provider gives subs of different languages
# currently its the case for:
# aniwatch
# the values for this option are the short names for countries
# regex is used to determine what you selected
sub_lang = {self.sub_lang}
# what is your default media list tracking [track/disabled/prompt]
# only affects your anilist anime list
# track - means your progress will always be reflected in your anilist anime list
# disabled - means progress tracking will no longer be reflected in your anime list
# prompt - means for every anime you will be prompted whether you want your progress to be tracked or not
default_media_list_tracking = {self.default_media_list_tracking}
# whether media list tracking should only be updated when the next episode is greater than the previous
# this affects only your anilist anime list
force_forward_tracking = {self.force_forward_tracking}
[stream]
# Auto continue from watch history [True/False]
# this will make fastanime to choose the episode that you last watched to completion
# and increment it by one
# and use that to auto select the episode you want to watch
continue_from_history = {self.continue_from_history}
# which history to use [local/remote]
# local history means it will just use the watch history stored locally in your device
# the file that stores it is called watch_history.json and is stored next to your config file
# remote means it ignores the last episode stored locally and instead uses the one in your anilist anime list
# this config option is useful if you want to overwrite your local history or import history covered from another device or platform
# since remote history will take precendence over whats available locally
preferred_history = {self.preferred_history}
# Preferred language for anime [dub/sub]
translation_type = {self.translation_type}
# what server to use for a particular provider
# allanime: [dropbox, sharepoint, wetransfer, gogoanime, wixmp]
# animepahe: [kwik]
# aniwatch: [HD1, HD2, StreamSB, StreamTape]
# 'top' can also be used as a value for this option
# 'top' will cause fastanime to auto select the first server it sees
# this saves on resources and is faster since not all servers are being fetched
server = {self.server}
# Auto select next episode [True/False]
# this makes fastanime increment the current episode number
# then after using that value to fetch the next episode instead of prompting
# this option is useful for binging
auto_next = {self.auto_next}
# Auto select the anime provider results with fuzzy find. [True/False]
# Note this won't always be correct
# this is because the providers sometime use non-standard names
# that are there own preference rather than the official names
# But 99% of the time will be accurate
# if this happens just turn of auto_select in the menus or from the commandline and manually select the correct anime title
# and then please open an issue at <> highlighting the normalized title and the title given by the provider for the anime you wished to watch
# or even better edit this file <> and open a pull request
auto_select = {self.auto_select}
# whether to skip the opening and ending theme songs [True/False]
# NOTE: requires ani-skip to be in path
# for python-mpv users am planning to create this functionality n python without the use of an external script
# so its disabled for now
skip = {self.skip}
# at what percentage progress should the episode be considered as completed [0-100]
# this value is used to determine whether to increment the current episode number and save it to your local list
# so you can continue immediately to the next episode without select it the next time you decide to watch the anime
# it is also used to determine whether your anilist anime list should be updated or not
episode_complete_at = {self.episode_complete_at}
# whether to use python-mpv [True/False]
# to enable superior control over the player
# adding more options to it
# Enable this one and you will be wonder why you did not discover fastanime sooner
# Since you basically don't have to close the player window to go to the next or previous episode, switch servers, change translation type or change to a given episode x
# so try it if you haven't already
# if you have any issues setting it up
# don't be afraid to ask
# especially on windows
# honestly it can be a pain to set it up there
# personally it took me quite sometime to figure it out
# this is because of how windows handles shared libraries
# so just ask when you find yourself stuck
# or just switch to arch linux
use_python_mpv = {self.use_python_mpv}
# force mpv window
# the default 'immediate' just makes mpv to open the window even if the video has not yet loaded
# done for asthetics
# passed directly to mpv so values are same
force_window = immediate
# the format of downloaded anime and trailer
# based on yt-dlp format and passed directly to it
# learn more by looking it up on their site
# only works for downloaded anime if:
# provider=allanime, server=gogoanime
# provider=allanime, server=wixmp
# provider=aniwatch
# this is because they provider a m3u8 file that contans multiple quality streams
format = {self.format}
# set the player to use for streaming [mpv/vlc]
# while this option exists i will still recommend that you use mpv
# since you will miss out on some features if you use the others
player = {self.player}
# NOTE:
# if you have any trouble setting up your config
# please don't be afraid to ask in our discord
# plus if there are any errors, improvements or suggestions please tell us in the discord
# or help us by contributing
# we appreciate all the help we can get
# since we may not always have the time to immediately implement the changes
#
# HOPE YOU ENJOY FASTANIME AND BE SURE TO STAR THE PROJECT ON GITHUB
#
"""
return current_config_state return current_config_state
def __str__(self): def __str__(self):
return self.__repr__() return self.__repr__()
# WARNING: depracated and will probably be removed
def update_anime_list(self, anime_id: int, remove=False):
if remove:
try:
self.anime_list.remove(anime_id)
print("Succesfully removed :cry:")
except Exception:
print(anime_id, "Nothing to remove :confused:")
else:
self.anime_list.append(anime_id)
self.user_data["animelist"] = list(set(self.anime_list))
self._update_user_data()
print("Succesfully added :smile:")
input("Enter to continue...")

View File

@@ -2,7 +2,6 @@ from __future__ import annotations
import os import os
import random import random
from datetime import datetime
from typing import TYPE_CHECKING from typing import TYPE_CHECKING
from click import clear from click import clear
@@ -21,7 +20,11 @@ from ...Utility.data import anime_normalizer
from ...Utility.utils import anime_title_percentage_match from ...Utility.utils import anime_title_percentage_match
from ..utils.mpv import run_mpv from ..utils.mpv import run_mpv
from ..utils.tools import exit_app from ..utils.tools import exit_app
from ..utils.utils import filter_by_quality, fuzzy_inquirer from ..utils.utils import (
filter_by_quality,
fuzzy_inquirer,
move_preferred_subtitle_lang_to_top,
)
from .utils import aniskip from .utils import aniskip
if TYPE_CHECKING: if TYPE_CHECKING:
@@ -31,8 +34,7 @@ if TYPE_CHECKING:
from ..utils.tools import FastAnimeRuntimeState from ..utils.tools import FastAnimeRuntimeState
# TODO: make the error handling more sane def calculate_percentage_completion(start_time, end_time):
def calculate_time_delta(start_time, end_time):
"""helper function used to calculate the difference between two timestamps in seconds """helper function used to calculate the difference between two timestamps in seconds
Args: Args:
@@ -42,16 +44,12 @@ def calculate_time_delta(start_time, end_time):
Returns: Returns:
[TODO:return] [TODO:return]
""" """
time_format = "%H:%M:%S"
# Convert string times to datetime objects start = start_time.split(":")
start = datetime.strptime(start_time, time_format) end = end_time.split(":")
end = datetime.strptime(end_time, time_format) start_secs = int(start[0]) * 3600 + int(start[1]) * 60 + int(start[2])
end_secs = int(end[0]) * 3600 + int(end[1]) * 60 + int(end[2])
# Calculate the difference return start_secs / end_secs * 100
delta = end - start
return delta
def media_player_controls( def media_player_controls(
@@ -99,10 +97,12 @@ def media_player_controls(
) )
if ( if (
config.watch_history[str(anime_id_anilist)]["episode"] config.watch_history[str(anime_id_anilist)]["episode_no"]
== current_episode_number == current_episode_number
): ):
start_time = config.watch_history[str(anime_id_anilist)]["start_time"] start_time = config.watch_history[str(anime_id_anilist)][
"episode_stopped_at"
]
print("[green]Continuing from:[/] ", start_time) print("[green]Continuing from:[/] ", start_time)
else: else:
start_time = "0" start_time = "0"
@@ -113,62 +113,64 @@ def media_player_controls(
current_episode_number, current_episode_number,
): ):
custom_args.extend(args) custom_args.extend(args)
subtitles = selected_server["subtitles"] subtitles = move_preferred_subtitle_lang_to_top(
selected_server["subtitles"], config.sub_lang
)
episode_title = selected_server["episode_title"]
if config.normalize_titles:
import re
for episode_detail in fastanime_runtime_state.selected_anime_anilist[
"streamingEpisodes"
]:
if re.match(
f"Episode {current_episode_number} ", episode_detail["title"]
):
episode_title = episode_detail["title"]
break
if config.sync_play: if config.sync_play:
from ..utils.syncplay import SyncPlayer from ..utils.syncplay import SyncPlayer
stop_time, total_time = SyncPlayer( stop_time, total_time = SyncPlayer(
current_episode_stream_link, current_episode_stream_link,
selected_server["episode_title"], episode_title,
headers=selected_server["headers"], headers=selected_server["headers"],
subtitles=subtitles, subtitles=subtitles,
) )
elif config.use_mpv_mod: elif config.use_python_mpv:
from ..utils.player import player from ..utils.player import player
mpv = player.create_player( player.create_player(
current_episode_stream_link, current_episode_stream_link,
config.anime_provider, config.anime_provider,
fastanime_runtime_state, fastanime_runtime_state,
config, config,
selected_server["episode_title"], episode_title,
start_time,
headers=selected_server["headers"], headers=selected_server["headers"],
subtitles=subtitles,
) )
# TODO: implement custom aniskip
if custom_args and None:
chapters_file = custom_args[0].split("=", 1)
script_opts = custom_args[1].split("=", 1)
mpv._set_property("chapters-file", chapters_file[1])
mpv._set_property("script-opts", script_opts[1])
if not start_time == "0":
mpv.start = start_time
mpv.wait_until_playing()
if subtitles:
mpv.sub_add(
subtitles[0]["url"], "select", None, subtitles[0]["language"]
)
mpv.wait_for_shutdown()
mpv.terminate()
stop_time = player.last_stop_time stop_time = player.last_stop_time
total_time = player.last_total_time total_time = player.last_total_time
else: else:
stop_time, total_time = run_mpv( stop_time, total_time = run_mpv(
current_episode_stream_link, current_episode_stream_link,
selected_server["episode_title"], episode_title,
start_time=start_time, start_time=start_time,
custom_args=custom_args, custom_args=custom_args,
headers=selected_server["headers"], headers=selected_server["headers"],
subtitles=subtitles, subtitles=subtitles,
player=config.player,
) )
# either update the watch history to the next episode or current depending on progress # either update the watch history to the next episode or current depending on progress
if stop_time == "0" or total_time == "0": if stop_time == "0" or total_time == "0":
episode = str(int(current_episode_number) + 1) episode = str(int(current_episode_number) + 1)
else: else:
error = 5 * 60 percentage_completion_of_episode = calculate_percentage_completion(
delta = calculate_time_delta(stop_time, total_time) stop_time, total_time
if delta.total_seconds() > error: )
if percentage_completion_of_episode < config.episode_complete_at:
episode = current_episode_number episode = current_episode_number
else: else:
episode = str(int(current_episode_number) + 1) episode = str(int(current_episode_number) + 1)
@@ -176,28 +178,34 @@ def media_player_controls(
total_time = "0" total_time = "0"
clear() clear()
config.update_watch_history(anime_id_anilist, episode, stop_time, total_time) config.media_list_track(
anime_id_anilist,
episode_no=episode,
episode_stopped_at=stop_time,
episode_total_length=total_time,
progress_tracking=fastanime_runtime_state.progress_tracking,
)
media_player_controls(config, fastanime_runtime_state) media_player_controls(config, fastanime_runtime_state)
def _next_episode(): def _next_episode():
"""watch the next episode""" """watch the next episode"""
# ensures you dont accidentally erase your progress for an in complete episode # ensures you dont accidentally erase your progress for an in complete episode
stop_time = config.watch_history.get(str(anime_id_anilist), {}).get( stop_time = config.watch_history.get(str(anime_id_anilist), {}).get(
"start_time", "0" "episode_stopped_at", "0"
) )
total_time = config.watch_history.get(str(anime_id_anilist), {}).get( total_time = config.watch_history.get(str(anime_id_anilist), {}).get(
"total_time", "0" "episode_total_length", "0"
) )
# compute if the episode is actually completed # compute if the episode is actually completed
error = config.error * 60
if stop_time == "0" or total_time == "0": if stop_time == "0" or total_time == "0":
dt = 0 percentage_completion_of_episode = 0
else: else:
delta = calculate_time_delta(stop_time, total_time) percentage_completion_of_episode = calculate_percentage_completion(
dt = delta.total_seconds() stop_time, total_time
if dt > error: )
if percentage_completion_of_episode < config.episode_complete_at:
if config.auto_next: if config.auto_next:
if config.use_rofi: if config.use_rofi:
if not Rofi.confirm( if not Rofi.confirm(
@@ -217,7 +225,7 @@ def media_player_controls(
"Are you sure you wish to continue to the next episode, your progress for the current episodes will be erased?", "Are you sure you wish to continue to the next episode, your progress for the current episodes will be erased?",
default=True, default=True,
): ):
media_player_controls(config, fastanime_runtime_state) media_actions_menu(config, fastanime_runtime_state)
return return
# all checks have passed lets go to the next episode # all checks have passed lets go to the next episode
@@ -231,7 +239,11 @@ def media_player_controls(
] ]
# update user config # update user config
config.update_watch_history(anime_id_anilist, available_episodes[next_episode]) config.media_list_track(
anime_id_anilist,
episode_no=available_episodes[next_episode],
progress_tracking=fastanime_runtime_state.progress_tracking,
)
# call interface # call interface
provider_anime_episode_servers_menu(config, fastanime_runtime_state) provider_anime_episode_servers_menu(config, fastanime_runtime_state)
@@ -255,7 +267,11 @@ def media_player_controls(
] ]
# update user config # update user config
config.update_watch_history(anime_id_anilist, available_episodes[prev_episode]) config.media_list_track(
anime_id_anilist,
episode_no=available_episodes[prev_episode],
progress_tracking=fastanime_runtime_state.progress_tracking,
)
# call interface # call interface
provider_anime_episode_servers_menu(config, fastanime_runtime_state) provider_anime_episode_servers_menu(config, fastanime_runtime_state)
@@ -268,7 +284,7 @@ def media_player_controls(
# prompt for new quality # prompt for new quality
if config.use_fzf: if config.use_fzf:
quality = fzf.run( quality = fzf.run(
options, prompt="Select Quality:", header="Quality Options" options, prompt="Select Quality", header="Quality Options"
) )
elif config.use_rofi: elif config.use_rofi:
quality = Rofi.run(options, "Select Quality") quality = Rofi.run(options, "Select Quality")
@@ -286,7 +302,7 @@ def media_player_controls(
options = ["sub", "dub"] options = ["sub", "dub"]
if config.use_fzf: if config.use_fzf:
translation_type = fzf.run( translation_type = fzf.run(
options, prompt="Select Translation Type: ", header="Lang Options" options, prompt="Select Translation Type", header="Lang Options"
).lower() ).lower()
elif config.use_rofi: elif config.use_rofi:
translation_type = Rofi.run(options, "Select Translation Type") translation_type = Rofi.run(options, "Select Translation Type")
@@ -332,7 +348,7 @@ def media_player_controls(
if config.use_fzf: if config.use_fzf:
action = fzf.run( action = fzf.run(
choices, choices,
prompt="Select Action:", prompt="Select Action",
) )
elif config.use_rofi: elif config.use_rofi:
action = Rofi.run(choices, "Select Action") action = Rofi.run(choices, "Select Action")
@@ -366,15 +382,15 @@ def provider_anime_episode_servers_menu(
anime_id_anilist: int = fastanime_runtime_state.selected_anime_id_anilist anime_id_anilist: int = fastanime_runtime_state.selected_anime_id_anilist
provider_anime: "Anime" = fastanime_runtime_state.provider_anime provider_anime: "Anime" = fastanime_runtime_state.provider_anime
server_name = None server_name = ""
# get streams for episode from provider # get streams for episode from provider
with Progress() as progress: with Progress() as progress:
progress.add_task("Fetching Episode Streams...", total=None) progress.add_task("Fetching Episode Streams...", total=None)
episode_streams_generator = anime_provider.get_episode_streams( episode_streams_generator = anime_provider.get_episode_streams(
provider_anime, provider_anime["id"],
provider_anime["title"],
current_episode_number, current_episode_number,
translation_type, translation_type,
fastanime_runtime_state.selected_anime_anilist,
) )
if not episode_streams_generator: if not episode_streams_generator:
if not config.use_rofi: if not config.use_rofi:
@@ -383,7 +399,7 @@ def provider_anime_episode_servers_menu(
else: else:
if not Rofi.confirm("Sth went wrong!!Enter to continue..."): if not Rofi.confirm("Sth went wrong!!Enter to continue..."):
exit(1) exit(1)
provider_anime_episode_servers_menu(config, fastanime_runtime_state) media_actions_menu(config, fastanime_runtime_state)
return return
if config.server == "top": if config.server == "top":
@@ -431,7 +447,7 @@ def provider_anime_episode_servers_menu(
if config.use_fzf: if config.use_fzf:
server_name = fzf.run( server_name = fzf.run(
choices, choices,
prompt="Select Server: ", prompt="Select Server",
header="Servers", header="Servers",
) )
elif config.use_rofi: elif config.use_rofi:
@@ -492,24 +508,17 @@ def provider_anime_episode_servers_menu(
"[bold magenta] Episode: [/]", "[bold magenta] Episode: [/]",
current_episode_number, current_episode_number,
) )
# -- update anilist progress if user --
if config.user and current_episode_number:
AniList.update_anime_list(
{
"mediaId": anime_id_anilist,
"progress": int(float(current_episode_number)),
}
)
# try to get the timestamp you left off from if available # try to get the timestamp you left off from if available
start_time = config.watch_history.get(str(anime_id_anilist), {}).get( start_time = config.watch_history.get(str(anime_id_anilist), {}).get(
"start_time", "0" "episode_stopped_at", "0"
) )
episode_in_history = config.watch_history.get(str(anime_id_anilist), {}).get( episode_in_history = config.watch_history.get(str(anime_id_anilist), {}).get(
"episode", "" "episode_no", ""
) )
if start_time != "0" and episode_in_history == current_episode_number: if start_time != "0" and episode_in_history == current_episode_number:
print("[green]Continuing from:[/] ", start_time) print("[green]Continuing from:[/] ", start_time)
else:
start_time = "0"
custom_args = [] custom_args = []
if config.skip: if config.skip:
if args := aniskip( if args := aniskip(
@@ -517,45 +526,45 @@ def provider_anime_episode_servers_menu(
current_episode_number, current_episode_number,
): ):
custom_args.extend(args) custom_args.extend(args)
subtitles = selected_server["subtitles"] subtitles = move_preferred_subtitle_lang_to_top(
selected_server["subtitles"], config.sub_lang
)
episode_title = selected_server["episode_title"]
if config.normalize_titles:
import re
for episode_detail in fastanime_runtime_state.selected_anime_anilist[
"streamingEpisodes"
]:
if re.match(f"Episode {current_episode_number} ", episode_detail["title"]):
episode_title = episode_detail["title"]
break
if config.sync_play: if config.sync_play:
from ..utils.syncplay import SyncPlayer from ..utils.syncplay import SyncPlayer
stop_time, total_time = SyncPlayer( stop_time, total_time = SyncPlayer(
current_stream_link, current_stream_link,
selected_server["episode_title"], episode_title,
headers=selected_server["headers"], headers=selected_server["headers"],
subtitles=subtitles, subtitles=subtitles,
) )
elif config.use_mpv_mod: elif config.use_python_mpv:
from ..utils.player import player from ..utils.player import player
mpv = player.create_player( if start_time == "0" and episode_in_history != current_episode_number:
start_time = "0"
player.create_player(
current_stream_link, current_stream_link,
anime_provider, anime_provider,
fastanime_runtime_state, fastanime_runtime_state,
config, config,
selected_server["episode_title"], episode_title,
start_time,
headers=selected_server["headers"], headers=selected_server["headers"],
subtitles=subtitles,
) )
# TODO: implement custom aniskip intergration
if custom_args and None:
chapters_file = custom_args[0].split("=", 1)
script_opts = custom_args[1].split("=", 1)
mpv._set_property("chapters-file", chapters_file[1])
mpv._set_property("script-opts", script_opts[1])
if not start_time == "0" and episode_in_history == current_episode_number:
mpv.start = start_time
mpv.wait_until_playing()
if subtitles:
# subs = ""
# for subtitle in subtitles:
# subs += f"{subtitle['url']},"
mpv.sub_add(subtitles[0]["url"], "select", None, subtitles[0]["language"])
# mpv.sub_files = subs
mpv.wait_for_shutdown()
mpv.terminate()
stop_time = player.last_stop_time stop_time = player.last_stop_time
total_time = player.last_total_time total_time = player.last_total_time
current_episode_number = fastanime_runtime_state.provider_current_episode_number current_episode_number = fastanime_runtime_state.provider_current_episode_number
@@ -564,11 +573,12 @@ def provider_anime_episode_servers_menu(
start_time = "0" start_time = "0"
stop_time, total_time = run_mpv( stop_time, total_time = run_mpv(
current_stream_link, current_stream_link,
selected_server["episode_title"], episode_title,
start_time=start_time, start_time=start_time,
custom_args=custom_args, custom_args=custom_args,
headers=selected_server["headers"], headers=selected_server["headers"],
subtitles=subtitles, subtitles=subtitles,
player=config.player,
) )
print("Finished at: ", stop_time) print("Finished at: ", stop_time)
@@ -576,7 +586,7 @@ def provider_anime_episode_servers_menu(
# this will try to update the episode to be the next episode if delta has reached a specific threshhold # this will try to update the episode to be the next episode if delta has reached a specific threshhold
# this update will only apply locally # this update will only apply locally
# the remote(anilist) is only updated when its certain you are going to open the player # the remote(anilist) is only updated when its certain you are going to open the player
available_episodes: list = sorted( available_episodes: list[str] = sorted(
fastanime_runtime_state.provider_available_episodes, key=float fastanime_runtime_state.provider_available_episodes, key=float
) )
if stop_time == "0" or total_time == "0": if stop_time == "0" or total_time == "0":
@@ -586,11 +596,36 @@ def provider_anime_episode_servers_menu(
next_episode = len(available_episodes) - 1 next_episode = len(available_episodes) - 1
episode = available_episodes[next_episode] episode = available_episodes[next_episode]
else: else:
error = config.error * 60 percentage_completion_of_episode = calculate_percentage_completion(
delta = calculate_time_delta(stop_time, total_time) stop_time, total_time
if delta.total_seconds() > error: )
if percentage_completion_of_episode < config.episode_complete_at:
episode = current_episode_number episode = current_episode_number
else: else:
# -- update anilist progress if user --
remote_progress = (
fastanime_runtime_state.selected_anime_anilist["mediaListEntry"] or {}
).get("progress")
disable_anilist_update = False
if remote_progress:
if (
float(remote_progress) > float(current_episode_number)
and config.force_forward_tracking
):
disable_anilist_update = True
if (
fastanime_runtime_state.progress_tracking == "track"
and config.user
and not disable_anilist_update
and current_episode_number
):
AniList.update_anime_list(
{
"mediaId": anime_id_anilist,
"progress": int(float(current_episode_number)),
}
)
# increment the episodes # increment the episodes
next_episode = available_episodes.index(current_episode_number) + 1 next_episode = available_episodes.index(current_episode_number) + 1
if next_episode >= len(available_episodes): if next_episode >= len(available_episodes):
@@ -599,11 +634,12 @@ def provider_anime_episode_servers_menu(
stop_time = "0" stop_time = "0"
total_time = "0" total_time = "0"
config.update_watch_history( config.media_list_track(
anime_id_anilist, anime_id_anilist,
episode, episode_no=episode,
start_time=stop_time, episode_stopped_at=stop_time,
total_time=total_time, episode_total_length=total_time,
progress_tracking=fastanime_runtime_state.progress_tracking,
) )
# switch to controls # switch to controls
@@ -635,7 +671,9 @@ def provider_anime_episodes_menu(
) )
# prompt for episode number # prompt for episode number
total_episodes = provider_anime["availableEpisodesDetail"][translation_type] total_episodes = sorted(
provider_anime["availableEpisodesDetail"][translation_type], key=float
)
current_episode_number = "" current_episode_number = ""
# auto select episode if continue from history otherwise prompt episode number # auto select episode if continue from history otherwise prompt episode number
@@ -643,7 +681,7 @@ def provider_anime_episodes_menu(
# the user watch history thats locally available # the user watch history thats locally available
# will be preferred over remote # will be preferred over remote
if ( if (
user_watch_history.get(str(anime_id_anilist), {}).get("episode") user_watch_history.get(str(anime_id_anilist), {}).get("episode_no")
in total_episodes in total_episodes
): ):
if ( if (
@@ -651,7 +689,7 @@ def provider_anime_episodes_menu(
or not selected_anime_anilist["mediaListEntry"] or not selected_anime_anilist["mediaListEntry"]
): ):
current_episode_number = user_watch_history[str(anime_id_anilist)][ current_episode_number = user_watch_history[str(anime_id_anilist)][
"episode" "episode_no"
] ]
else: else:
current_episode_number = str( current_episode_number = str(
@@ -682,11 +720,21 @@ def provider_anime_episodes_menu(
# prompt for episode number if not set # prompt for episode number if not set
if not current_episode_number or current_episode_number not in total_episodes: if not current_episode_number or current_episode_number not in total_episodes:
choices = [*total_episodes, "Back"] choices = [*total_episodes, "Back"]
preview = None
if config.preview:
from .utils import get_fzf_episode_preview
e = fastanime_runtime_state.selected_anime_anilist["episodes"]
if e:
eps = range(0, e + 1)
else:
eps = total_episodes
preview = get_fzf_episode_preview(
fastanime_runtime_state.selected_anime_anilist, eps
)
if config.use_fzf: if config.use_fzf:
current_episode_number = fzf.run( current_episode_number = fzf.run(
choices, choices, prompt="Select Episode", header=anime_title, preview=preview
prompt="Select Episode:",
header=anime_title,
) )
elif config.use_rofi: elif config.use_rofi:
current_episode_number = Rofi.run(choices, "Select Episode") current_episode_number = Rofi.run(choices, "Select Episode")
@@ -699,14 +747,14 @@ def provider_anime_episodes_menu(
if current_episode_number == "Back": if current_episode_number == "Back":
media_actions_menu(config, fastanime_runtime_state) media_actions_menu(config, fastanime_runtime_state)
return return
#
# try to get the start time and if not found default to "0" # # try to get the start time and if not found default to "0"
start_time = user_watch_history.get(str(anime_id_anilist), {}).get( # start_time = user_watch_history.get(str(anime_id_anilist), {}).get(
"start_time", "0" # "start_time", "0"
) # )
config.update_watch_history( # config.update_watch_history(
anime_id_anilist, current_episode_number, start_time=start_time # anime_id_anilist, current_episode_number, start_time=start_time
) # )
# update runtime data # update runtime data
fastanime_runtime_state.provider_available_episodes = total_episodes fastanime_runtime_state.provider_available_episodes = total_episodes
@@ -716,7 +764,9 @@ def provider_anime_episodes_menu(
provider_anime_episode_servers_menu(config, fastanime_runtime_state) provider_anime_episode_servers_menu(config, fastanime_runtime_state)
def fetch_anime_episode(config, fastanime_runtime_state: "FastAnimeRuntimeState"): def fetch_anime_episode(
config: "Config", fastanime_runtime_state: "FastAnimeRuntimeState"
):
selected_anime: "SearchResult" = ( selected_anime: "SearchResult" = (
fastanime_runtime_state.provider_anime_search_result fastanime_runtime_state.provider_anime_search_result
) )
@@ -724,7 +774,7 @@ def fetch_anime_episode(config, fastanime_runtime_state: "FastAnimeRuntimeState"
with Progress() as progress: with Progress() as progress:
progress.add_task("Fetching Anime Info...", total=None) progress.add_task("Fetching Anime Info...", total=None)
provider_anime = anime_provider.get_anime( provider_anime = anime_provider.get_anime(
selected_anime["id"], fastanime_runtime_state.selected_anime_anilist selected_anime["id"],
) )
if not provider_anime: if not provider_anime:
print( print(
@@ -735,7 +785,7 @@ def fetch_anime_episode(config, fastanime_runtime_state: "FastAnimeRuntimeState"
else: else:
if not Rofi.confirm("Sth went wrong!!Enter to continue..."): if not Rofi.confirm("Sth went wrong!!Enter to continue..."):
exit(1) exit(1)
return fetch_anime_episode(config, fastanime_runtime_state) return media_actions_menu(config, fastanime_runtime_state)
fastanime_runtime_state.provider_anime = provider_anime fastanime_runtime_state.provider_anime = provider_anime
provider_anime_episodes_menu(config, fastanime_runtime_state) provider_anime_episodes_menu(config, fastanime_runtime_state)
@@ -744,6 +794,39 @@ def fetch_anime_episode(config, fastanime_runtime_state: "FastAnimeRuntimeState"
# #
# ---- ANIME PROVIDER SEARCH RESULTS MENU ---- # ---- ANIME PROVIDER SEARCH RESULTS MENU ----
# #
def set_prefered_progress_tracking(
config: "Config", fastanime_runtime_state: "FastAnimeRuntimeState", update=False
):
if (
fastanime_runtime_state.progress_tracking == ""
or update
or fastanime_runtime_state.progress_tracking == "prompt"
):
if config.default_media_list_tracking == "track":
fastanime_runtime_state.progress_tracking = "track"
elif config.default_media_list_tracking == "disabled":
fastanime_runtime_state.progress_tracking = "disabled"
else:
options = ["disabled", "track"]
if config.use_fzf:
fastanime_runtime_state.progress_tracking = fzf.run(
options,
"Enter your preferred progress tracking for the current anime",
)
elif config.use_rofi:
fastanime_runtime_state.progress_tracking = Rofi.run(
options,
"Enter your preferred progress tracking for the current anime",
)
else:
fastanime_runtime_state.progress_tracking = fuzzy_inquirer(
options,
"Enter your preferred progress tracking for the current anime",
)
def anime_provider_search_results_menu( def anime_provider_search_results_menu(
config: "Config", fastanime_runtime_state: "FastAnimeRuntimeState" config: "Config", fastanime_runtime_state: "FastAnimeRuntimeState"
): ):
@@ -770,7 +853,6 @@ def anime_provider_search_results_menu(
provider_search_results = anime_provider.search_for_anime( provider_search_results = anime_provider.search_for_anime(
selected_anime_title, selected_anime_title,
translation_type, translation_type,
selected_anime_anilist,
) )
if not provider_search_results: if not provider_search_results:
print( print(
@@ -781,7 +863,7 @@ def anime_provider_search_results_menu(
else: else:
if not Rofi.confirm("Sth went wrong!!Enter to continue..."): if not Rofi.confirm("Sth went wrong!!Enter to continue..."):
exit(1) exit(1)
return anime_provider_search_results_menu(config, fastanime_runtime_state) return media_actions_menu(config, fastanime_runtime_state)
provider_search_results = { provider_search_results = {
anime["title"]: anime for anime in provider_search_results["results"] anime["title"]: anime for anime in provider_search_results["results"]
@@ -810,7 +892,7 @@ def anime_provider_search_results_menu(
if config.use_fzf: if config.use_fzf:
provider_anime_title = fzf.run( provider_anime_title = fzf.run(
choices, choices,
prompt="Select Search Result:", prompt="Select Search Result",
header="Anime Search Results", header="Anime Search Results",
) )
@@ -832,6 +914,11 @@ def anime_provider_search_results_menu(
fastanime_runtime_state.provider_anime_search_result = provider_search_results[ fastanime_runtime_state.provider_anime_search_result = provider_search_results[
provider_anime_title provider_anime_title
] ]
fastanime_runtime_state.progress_tracking = config.watch_history.get(
str(fastanime_runtime_state.selected_anime_id_anilist), {}
).get("progress_tracking", "prompt")
set_prefered_progress_tracking(config, fastanime_runtime_state)
fetch_anime_episode(config, fastanime_runtime_state) fetch_anime_episode(config, fastanime_runtime_state)
@@ -875,11 +962,12 @@ def media_actions_menu(
run_mpv( run_mpv(
trailer_url, trailer_url,
ytdl_format=config.format, ytdl_format=config.format,
player=config.player,
) )
media_actions_menu(config, fastanime_runtime_state) media_actions_menu(config, fastanime_runtime_state)
else: else:
if not config.use_rofi: if not config.use_rofi:
print("no trailer available :confused:") print("no trailer available :confused")
input("Enter to continue...") input("Enter to continue...")
else: else:
if not Rofi.confirm("No trailler found!!Enter to continue"): if not Rofi.confirm("No trailler found!!Enter to continue"):
@@ -954,7 +1042,7 @@ def media_actions_menu(
score = Rofi.ask("Enter Score", is_int=True) score = Rofi.ask("Enter Score", is_int=True)
score = max(100, min(0, score)) score = max(100, min(0, score))
else: else:
score = inquirer.number( score = inquirer.number( # pyright:ignore
message="Enter the score:", message="Enter the score:",
min_allowed=0, min_allowed=0,
max_allowed=100, max_allowed=100,
@@ -1012,7 +1100,7 @@ def media_actions_menu(
options = ["Sub", "Dub"] options = ["Sub", "Dub"]
if config.use_fzf: if config.use_fzf:
translation_type = fzf.run( translation_type = fzf.run(
options, prompt="Select Translation Type:", header="Language Options" options, prompt="Select Translation Type", header="Language Options"
) )
elif config.use_rofi: elif config.use_rofi:
translation_type = Rofi.run(options, "Select Translation Type") translation_type = Rofi.run(options, "Select Translation Type")
@@ -1027,6 +1115,42 @@ def media_actions_menu(
media_actions_menu(config, fastanime_runtime_state) media_actions_menu(config, fastanime_runtime_state)
def _change_player(
config: "Config", fastanime_runtime_state: "FastAnimeRuntimeState"
):
"""Change the translation type to use
Args:
config: [TODO:description]
fastanime_runtime_state: [TODO:description]
"""
# prompt for new translation type
options = ["syncplay", "mpv-mod", "default"]
if config.use_fzf:
player = fzf.run(
options,
prompt="Select Player",
)
elif config.use_rofi:
player = Rofi.run(options, "Select Player")
else:
player = fuzzy_inquirer(
options,
"Select Player",
)
# update internal config
if player == "syncplay":
config.sync_play = True
config.use_python_mpv = False
else:
config.sync_play = False
if player == "mpv-mod":
config.use_python_mpv = True
else:
config.use_python_mpv = False
media_actions_menu(config, fastanime_runtime_state)
def _view_info(config: "Config", fastanime_runtime_state: "FastAnimeRuntimeState"): def _view_info(config: "Config", fastanime_runtime_state: "FastAnimeRuntimeState"):
"""helper function to view info of an anime from terminal """helper function to view info of an anime from terminal
@@ -1140,10 +1264,12 @@ def media_actions_menu(
config: [TODO:description] config: [TODO:description]
fastanime_runtime_state: [TODO:description] fastanime_runtime_state: [TODO:description]
""" """
options = ["allanime", "animepahe"] from ...libs.anime_provider import anime_sources
options = list(anime_sources.keys())
if config.use_fzf: if config.use_fzf:
provider = fzf.run( provider = fzf.run(
options, prompt="Select Translation Type:", header="Language Options" options, prompt="Select Translation Type", header="Language Options"
) )
elif config.use_rofi: elif config.use_rofi:
provider = Rofi.run(options, "Select Translation Type") provider = Rofi.run(options, "Select Translation Type")
@@ -1155,7 +1281,7 @@ def media_actions_menu(
config.provider = provider config.provider = provider
config.anime_provider.provider = provider config.anime_provider.provider = provider
config.anime_provider.lazyload_provider() config.anime_provider.lazyload_provider(provider)
media_actions_menu(config, fastanime_runtime_state) media_actions_menu(config, fastanime_runtime_state)
@@ -1182,17 +1308,25 @@ def media_actions_menu(
config.continue_from_history = False config.continue_from_history = False
anime_provider_search_results_menu(config, fastanime_runtime_state) anime_provider_search_results_menu(config, fastanime_runtime_state)
def _set_progress_tracking(
config: "Config", fastanime_runtime_state: "FastAnimeRuntimeState"
):
set_prefered_progress_tracking(config, fastanime_runtime_state, update=True)
media_actions_menu(config, fastanime_runtime_state)
icons = config.icons icons = config.icons
options = { options = {
f"{'📽️ ' if icons else ''}Stream ({progress}/{episodes_total})": _stream_anime, f"{'📽️ ' if icons else ''}Stream ({progress}/{episodes_total})": _stream_anime,
f"{'📽️ ' if icons else ''}Episodes": _select_episode_to_stream, f"{'📽️ ' if icons else ''}Episodes": _select_episode_to_stream,
f"{'📼 ' if icons else ''}Watch Trailer": _watch_trailer, f"{'📼 ' if icons else ''}Watch Trailer": _watch_trailer,
f"{'' if icons else ''}Score Anime": _score_anime, f"{'' if icons else ''}Score Anime": _score_anime,
f"{'' if icons else ''}Progress Tracking": _set_progress_tracking,
f"{'📥 ' if icons else ''}Add to List": _add_to_list, f"{'📥 ' if icons else ''}Add to List": _add_to_list,
f"{'📤 ' if icons else ''}Remove from List": _remove_from_list, f"{'📤 ' if icons else ''}Remove from List": _remove_from_list,
f"{'📖 ' if icons else ''}View Info": _view_info, f"{'📖 ' if icons else ''}View Info": _view_info,
f"{'🎧 ' if icons else ''}Change Translation Type": _change_translation_type, f"{'🎧 ' if icons else ''}Change Translation Type": _change_translation_type,
f"{'💽 ' if icons else ''}Change Provider": _change_provider, f"{'💽 ' if icons else ''}Change Provider": _change_provider,
f"{'💽 ' if icons else ''}Change Player": _change_player,
f"{'🔘 ' if icons else ''}Toggle auto select anime": _toggle_auto_select, # WARN: problematic if you choose an anime that doesnt match id f"{'🔘 ' if icons else ''}Toggle auto select anime": _toggle_auto_select, # WARN: problematic if you choose an anime that doesnt match id
f"{'💠 ' if icons else ''}Toggle auto next episode": _toggle_auto_next, f"{'💠 ' if icons else ''}Toggle auto next episode": _toggle_auto_next,
f"{'🔘 ' if icons else ''}Toggle continue from history": _toggle_continue_from_history, f"{'🔘 ' if icons else ''}Toggle continue from history": _toggle_continue_from_history,
@@ -1201,7 +1335,7 @@ def media_actions_menu(
} }
choices = list(options.keys()) choices = list(options.keys())
if config.use_fzf: if config.use_fzf:
action = fzf.run(choices, prompt="Select Action:", header="Anime Menu") action = fzf.run(choices, prompt="Select Action", header="Anime Menu")
elif config.use_rofi: elif config.use_rofi:
action = Rofi.run(choices, "Select Action") action = Rofi.run(choices, "Select Action")
else: else:
@@ -1224,7 +1358,9 @@ def anilist_results_menu(
config: [TODO:description] config: [TODO:description]
fastanime_runtime_state: [TODO:description] fastanime_runtime_state: [TODO:description]
""" """
search_results = fastanime_runtime_state.anilist_data["data"]["Page"]["media"] search_results = fastanime_runtime_state.anilist_results_data["data"]["Page"][
"media"
]
anime_data = {} anime_data = {}
for anime in search_results: for anime in search_results:
@@ -1262,19 +1398,19 @@ def anilist_results_menu(
choices = [*anime_data.keys(), "Back"] choices = [*anime_data.keys(), "Back"]
if config.use_fzf: if config.use_fzf:
if config.preview: if config.preview:
from .utils import get_fzf_preview from .utils import get_fzf_anime_preview
preview = get_fzf_preview(search_results, anime_data.keys()) preview = get_fzf_anime_preview(search_results, anime_data.keys())
selected_anime_title = fzf.run( selected_anime_title = fzf.run(
choices, choices,
prompt="Select Anime: ", prompt="Select Anime",
header="Search Results", header="Search Results",
preview=preview, preview=preview,
) )
else: else:
selected_anime_title = fzf.run( selected_anime_title = fzf.run(
choices, choices,
prompt="Select Anime: ", prompt="Select Anime",
header="Search Results", header="Search Results",
) )
elif config.use_rofi: elif config.use_rofi:
@@ -1445,6 +1581,9 @@ def fastanime_main_menu(
else: else:
config.load_config() config.load_config()
config.anime_provider.provider = config.provider
config.anime_provider.lazyload_provider(config.provider)
fastanime_main_menu(config, fastanime_runtime_state) fastanime_main_menu(config, fastanime_runtime_state)
icons = config.icons icons = config.icons
@@ -1486,7 +1625,7 @@ def fastanime_main_menu(
if config.use_fzf: if config.use_fzf:
action = fzf.run( action = fzf.run(
choices, choices,
prompt="Select Action: ", prompt="Select Action",
header="Anilist Menu", header="Anilist Menu",
) )
elif config.use_rofi: elif config.use_rofi:
@@ -1500,7 +1639,7 @@ def fastanime_main_menu(
# anilist data is a (bool,data) # anilist data is a (bool,data)
# the bool indicated success # the bool indicated success
if anilist_data[0]: if anilist_data[0]:
fastanime_runtime_state.anilist_data = anilist_data[1] fastanime_runtime_state.anilist_results_data = anilist_data[1]
anilist_results_menu(config, fastanime_runtime_state) anilist_results_menu(config, fastanime_runtime_state)
else: else:

View File

@@ -7,7 +7,7 @@ import textwrap
from threading import Thread from threading import Thread
import requests import requests
from yt_dlp.utils import clean_html from yt_dlp.utils import clean_html, sanitize_filename
from ...constants import APP_CACHE_DIR from ...constants import APP_CACHE_DIR
from ...libs.anilist.types import AnilistBaseMediaDataSchema from ...libs.anilist.types import AnilistBaseMediaDataSchema
@@ -93,7 +93,7 @@ def write_search_results(
# NOTE: Will probably make this a configuraable option # NOTE: Will probably make this a configuraable option
HEADER_COLOR = 215, 0, 95 HEADER_COLOR = 215, 0, 95
SEPARATOR_COLOR = 208, 208, 208 SEPARATOR_COLOR = 208, 208, 208
SEPARATOR_WIDTH = 45 SEPARATOR_WIDTH = 30
# use concurency to download and write as fast as possible # use concurency to download and write as fast as possible
with concurrent.futures.ThreadPoolExecutor(max_workers=workers) as executor: with concurrent.futures.ThreadPoolExecutor(max_workers=workers) as executor:
future_to_task = {} future_to_task = {}
@@ -104,6 +104,11 @@ def write_search_results(
image_url image_url
) )
mediaListName = "Not in any of your lists"
progress = "UNKNOWN"
if anime_list := anime["mediaListEntry"]:
mediaListName = anime_list["status"]
progress = anime_list["progress"]
# handle the text data # handle the text data
template = f""" template = f"""
{get_true_fg("-"*SEPARATOR_WIDTH,*SEPARATOR_COLOR,bold=False)} {get_true_fg("-"*SEPARATOR_WIDTH,*SEPARATOR_COLOR,bold=False)}
@@ -118,6 +123,9 @@ def write_search_results(
{get_true_fg('Start Date:',*HEADER_COLOR)} {anilist_data_helper.format_anilist_date_object(anime['startDate'])} {get_true_fg('Start Date:',*HEADER_COLOR)} {anilist_data_helper.format_anilist_date_object(anime['startDate'])}
{get_true_fg('End Date:',*HEADER_COLOR)} {anilist_data_helper.format_anilist_date_object(anime['endDate'])} {get_true_fg('End Date:',*HEADER_COLOR)} {anilist_data_helper.format_anilist_date_object(anime['endDate'])}
{get_true_fg("-"*SEPARATOR_WIDTH,*SEPARATOR_COLOR,bold=False)} {get_true_fg("-"*SEPARATOR_WIDTH,*SEPARATOR_COLOR,bold=False)}
{get_true_fg('Media List:',*HEADER_COLOR)} {mediaListName}
{get_true_fg('Progress:',*HEADER_COLOR)} {progress}
{get_true_fg("-"*SEPARATOR_WIDTH,*SEPARATOR_COLOR,bold=False)}
{get_true_fg('Description:',*HEADER_COLOR)} {get_true_fg('Description:',*HEADER_COLOR)}
""" """
template = textwrap.dedent(template) template = textwrap.dedent(template)
@@ -168,23 +176,120 @@ def get_rofi_icons(
logger.error("%r generated an exception: %s" % (url, e)) logger.error("%r generated an exception: %s" % (url, e))
def get_fzf_preview( # get rofi icons
anilist_results: list[AnilistBaseMediaDataSchema], titles, wait=False def get_fzf_manga_preview(manga_results, workers=None, wait=False):
): """A helper function to make sure that the images are downloaded so they can be used as icons
"""A helper function that constructs data to be used for the fzf preview
Args: Args:
titles (list[str]): The sanitized titles to use, NOTE: its important that they are sanitized since thay will be used as filenames titles (list[str]): sanitized titles of the anime; NOTE: its important that they are sanitized since they are used as the filenames of the images
wait (bool): whether to block the ui as we wait for preview defaults to false workers ([TODO:parameter]): Number of threads to use to download the images; defaults to as many as possible
anilist_results: the anilist results got from an anilist action anilist_results: the anilist results from an anilist action
Returns:
THe fzf preview script to use
""" """
# ensure images and info exists
def _worker():
# use concurrency to download the images as fast as possible
with concurrent.futures.ThreadPoolExecutor(max_workers=workers) as executor:
# load the jobs
future_to_url = {}
for manga in manga_results:
image_url = manga["poster"]
future_to_url[
executor.submit(
save_image_from_url,
image_url,
sanitize_filename(manga["title"]),
)
] = image_url
# execute the jobs
for future in concurrent.futures.as_completed(future_to_url):
url = future_to_url[future]
try:
future.result()
except Exception as e:
logger.error("%r generated an exception: %s" % (url, e))
background_worker = Thread( background_worker = Thread(
target=write_search_results, args=(anilist_results, titles) target=_worker,
) )
# ensure images and info exists
background_worker.daemon = True
background_worker.start()
# the preview script is in bash so making sure fzf doesnt use any other shell lang to process the preview script
os.environ["SHELL"] = shutil.which("bash") or "bash"
preview = """
%s
if [ -s %s/{} ]; then fzf-preview %s/{}
else echo Loading...
fi
""" % (
fzf_preview,
IMAGES_CACHE_DIR,
IMAGES_CACHE_DIR,
)
if wait:
background_worker.join()
return preview
# get rofi icons
def get_fzf_episode_preview(
anilist_result: AnilistBaseMediaDataSchema, episodes, workers=None, wait=False
):
"""A helper function to make sure that the images are downloaded so they can be used as icons
Args:
titles (list[str]): sanitized titles of the anime; NOTE: its important that they are sanitized since they are used as the filenames of the images
workers ([TODO:parameter]): Number of threads to use to download the images; defaults to as many as possible
anilist_results: the anilist results from an anilist action
"""
HEADER_COLOR = 215, 0, 95
import re
def _worker():
# use concurrency to download the images as fast as possible
with concurrent.futures.ThreadPoolExecutor(max_workers=workers) as executor:
# load the jobs
future_to_url = {}
for episode in episodes:
episode_title = ""
image_url = ""
for episode_detail in anilist_result["streamingEpisodes"]:
if re.match(f"Episode {episode} ", episode_detail["title"]):
episode_title = episode_detail["title"]
image_url = episode_detail["thumbnail"]
if episode_title and image_url:
# actual link to download image from
if not image_url:
continue
future_to_url[
executor.submit(save_image_from_url, image_url, episode)
] = image_url
template = textwrap.dedent(
f"""
{get_true_fg('Anime Title:',*HEADER_COLOR)} {anilist_result['title']['romaji'] or anilist_result['title']['english']}
{get_true_fg('Episode Title:',*HEADER_COLOR)} {episode_title}
"""
)
future_to_url[
executor.submit(save_info_from_str, template, episode)
] = episode_title
# execute the jobs
for future in concurrent.futures.as_completed(future_to_url):
url = future_to_url[future]
try:
future.result()
except Exception as e:
logger.error("%r generated an exception: %s" % (url, e))
background_worker = Thread(
target=_worker,
)
# ensure images and info exists
background_worker.daemon = True background_worker.daemon = True
background_worker.start() background_worker.start()
@@ -208,3 +313,68 @@ def get_fzf_preview(
if wait: if wait:
background_worker.join() background_worker.join()
return preview return preview
def get_fzf_anime_preview(
anilist_results: list[AnilistBaseMediaDataSchema], titles, wait=False
):
"""A helper function that constructs data to be used for the fzf preview
Args:
titles (list[str]): The sanitized titles to use, NOTE: its important that they are sanitized since thay will be used as filenames
wait (bool): whether to block the ui as we wait for preview defaults to false
anilist_results: the anilist results got from an anilist action
Returns:
THe fzf preview script to use
"""
# ensure images and info exists
from ...constants import S_PLATFORM
background_worker = Thread(
target=write_search_results, args=(anilist_results, titles)
)
background_worker.daemon = True
background_worker.start()
# the preview script is in bash so making sure fzf doesnt use any other shell lang to process the preview script
os.environ["SHELL"] = shutil.which("bash") or "bash"
if S_PLATFORM == "win32":
preview = """
%s
title={}
dim=${FZF_PREVIEW_COLUMNS}x${FZF_PREVIEW_LINES}
if [ -s "%s\\\\\\$title" ]; then
if command -v chafa >/dev/null;then
chafa -f kitty -s $dim "%s\\\\\\$title"
fi
else echo Loading...
fi
if [ -s "%s\\\\\\$title" ]; then cat "%s\\\\\\$title"
else echo Loading...
fi
""" % (
fzf_preview,
IMAGES_CACHE_DIR.replace("\\","\\\\\\"),
IMAGES_CACHE_DIR.replace("\\","\\\\\\"),
ANIME_INFO_CACHE_DIR.replace("\\","\\\\\\"),
ANIME_INFO_CACHE_DIR.replace("\\","\\\\\\"),
)
else:
preview = """
%s
if [ -s %s/{} ]; then fzf-preview %s/{}
else echo Loading...
fi
if [ -s %s/{} ]; then cat %s/{}
else echo Loading...
fi
""" % (
fzf_preview,
IMAGES_CACHE_DIR,
IMAGES_CACHE_DIR,
ANIME_INFO_CACHE_DIR,
ANIME_INFO_CACHE_DIR,
)
if wait:
background_worker.join()
return preview

View File

@@ -0,0 +1,12 @@
import shutil
import subprocess
from sys import exit
def feh_manga_viewer(image_links: list[str], window_title: str):
FEH_EXECUTABLE = shutil.which("feh")
if not FEH_EXECUTABLE:
print("feh not found")
exit(1)
commands = [FEH_EXECUTABLE, *image_links, "--title", window_title]
subprocess.run(commands)

View File

@@ -50,75 +50,127 @@ def stream_video(MPV, url, mpv_args, custom_args):
def run_mpv( def run_mpv(
link: str, link: str,
title: str | None = "", title: str = "",
start_time: str = "0", start_time: str = "0",
ytdl_format="", ytdl_format="",
custom_args=[], custom_args=[],
headers={}, headers={},
subtitles=[], subtitles=[],
player="",
): ):
# Determine if mpv is available
MPV = shutil.which("mpv")
# If title is None, set a default value # If title is None, set a default value
# Regex to check if the link is a YouTube URL # Regex to check if the link is a YouTube URL
youtube_regex = r"(https?://)?(www\.)?(youtube|youtu|youtube-nocookie)\.(com|be)/.+" youtube_regex = r"(https?://)?(www\.)?(youtube|youtu|youtube-nocookie)\.(com|be)/.+"
if not MPV and not S_PLATFORM == "win32": if player == "vlc":
# Determine if the link is a YouTube URL VLC = shutil.which("vlc")
if re.match(youtube_regex, link): if not VLC and not S_PLATFORM == "win32":
# Android specific commands to launch mpv with a YouTube URL # Determine if the link is a YouTube URL
args = [ if re.match(youtube_regex, link):
"nohup", # Android specific commands to launch mpv with a YouTube URL
"am", args = [
"start", "nohup",
"--user", "am",
"0", "start",
"-a", "--user",
"android.intent.action.VIEW", "0",
"-d", "-a",
link, "android.intent.action.VIEW",
"-n", "-d",
"com.google.android.youtube/.UrlActivity", link,
] "-n",
"com.google.android.youtube/.UrlActivity",
]
return "0", "0"
else:
args = [
"nohup",
"am",
"start",
"--user",
"0",
"-a",
"android.intent.action.VIEW",
"-d",
link,
"-n",
"org.videolan.vlc/org.videolan.vlc.gui.video.VideoPlayerActivity",
"-e",
"title",
title,
]
subprocess.run(args)
return "0", "0" return "0", "0"
else: else:
# Android specific commands to launch mpv with a regular URL args = ["vlc", link]
args = [ for subtitle in subtitles:
"nohup", args.append("--sub-file")
"am", args.append(subtitle["url"])
"start", break
"--user", if title:
"0", args.append("--video-title")
"-a", args.append(title)
"android.intent.action.VIEW", subprocess.run(args)
"-d", return "0", "0"
link,
"-n",
"is.xyz.mpv/.MPVActivity",
]
subprocess.run(args)
return "0", "0"
else: else:
# General mpv command with custom arguments # Determine if mpv is available
mpv_args = [] MPV = shutil.which("mpv")
if headers: if not MPV and not S_PLATFORM == "win32":
mpv_headers = "--http-header-fields=" # Determine if the link is a YouTube URL
for header_name, header_value in headers.items(): if re.match(youtube_regex, link):
mpv_headers += f"{header_name}:{header_value}," # Android specific commands to launch mpv with a YouTube URL
mpv_args.append(mpv_headers) args = [
for subtitle in subtitles: "nohup",
mpv_args.append(f"--sub-file={subtitle['url']}") "am",
if start_time != "0": "start",
mpv_args.append(f"--start={start_time}") "--user",
if title: "0",
mpv_args.append(f"--title={title}") "-a",
if ytdl_format: "android.intent.action.VIEW",
mpv_args.append(f"--ytdl-format={ytdl_format}") "-d",
stop_time, total_time = stream_video(MPV, link, mpv_args, custom_args) link,
return stop_time, total_time "-n",
"com.google.android.youtube/.UrlActivity",
]
return "0", "0"
else:
# Android specific commands to launch mpv with a regular URL
args = [
"nohup",
"am",
"start",
"--user",
"0",
"-a",
"android.intent.action.VIEW",
"-d",
link,
"-n",
"is.xyz.mpv/.MPVActivity",
]
subprocess.run(args)
return "0", "0"
else:
# General mpv command with custom arguments
mpv_args = []
if headers:
mpv_headers = "--http-header-fields="
for header_name, header_value in headers.items():
mpv_headers += f"{header_name}:{header_value},"
mpv_args.append(mpv_headers)
for subtitle in subtitles:
mpv_args.append(f"--sub-file={subtitle['url']}")
if start_time != "0":
mpv_args.append(f"--start={start_time}")
if title:
mpv_args.append(f"--title={title}")
if ytdl_format:
mpv_args.append(f"--ytdl-format={ytdl_format}")
stop_time, total_time = stream_video(MPV, link, mpv_args, custom_args)
return stop_time, total_time
# Example usage # Example usage

View File

@@ -3,13 +3,14 @@ from typing import TYPE_CHECKING
import mpv import mpv
from ...anilist import AniList from ...anilist import AniList
from .utils import filter_by_quality from .utils import filter_by_quality, move_preferred_subtitle_lang_to_top
if TYPE_CHECKING: if TYPE_CHECKING:
from typing import Literal from typing import Literal
from ...AnimeProvider import AnimeProvider from ...AnimeProvider import AnimeProvider
from ..config import Config from ..config import Config
from .tools import FastAnimeRuntimeState
def format_time(duration_in_secs: float): def format_time(duration_in_secs: float):
@@ -22,6 +23,7 @@ def format_time(duration_in_secs: float):
class MpvPlayer(object): class MpvPlayer(object):
anime_provider: "AnimeProvider" anime_provider: "AnimeProvider"
config: "Config" config: "Config"
subs = []
mpv_player: "mpv.MPV" mpv_player: "mpv.MPV"
last_stop_time: str = "0" last_stop_time: str = "0"
last_total_time: str = "0" last_total_time: str = "0"
@@ -66,7 +68,11 @@ class MpvPlayer(object):
current_episode_number = ( current_episode_number = (
fastanime_runtime_state.provider_current_episode_number fastanime_runtime_state.provider_current_episode_number
) )
config.update_watch_history(anime_id_anilist, str(current_episode_number)) config.media_list_track(
anime_id_anilist,
episode_no=str(current_episode_number),
progress_tracking=fastanime_runtime_state.progress_tracking,
)
elif type == "reload": elif type == "reload":
if current_episode_number not in total_episodes: if current_episode_number not in total_episodes:
self.mpv_player.show_text("Episode not available") self.mpv_player.show_text("Episode not available")
@@ -82,7 +88,11 @@ class MpvPlayer(object):
self.mpv_player.show_text(f"Fetching episode {ep_no}") self.mpv_player.show_text(f"Fetching episode {ep_no}")
current_episode_number = ep_no current_episode_number = ep_no
config.update_watch_history(anime_id_anilist, str(ep_no)) config.media_list_track(
anime_id_anilist,
episode_no=str(ep_no),
progress_tracking=fastanime_runtime_state.progress_tracking,
)
fastanime_runtime_state.provider_current_episode_number = str(ep_no) fastanime_runtime_state.provider_current_episode_number = str(ep_no)
else: else:
self.mpv_player.show_text("Fetching previous episode...") self.mpv_player.show_text("Fetching previous episode...")
@@ -95,7 +105,11 @@ class MpvPlayer(object):
current_episode_number = ( current_episode_number = (
fastanime_runtime_state.provider_current_episode_number fastanime_runtime_state.provider_current_episode_number
) )
config.update_watch_history(anime_id_anilist, str(current_episode_number)) config.media_list_track(
anime_id_anilist,
episode_no=str(current_episode_number),
progress_tracking=fastanime_runtime_state.progress_tracking,
)
# update episode progress # update episode progress
if config.user and current_episode_number: if config.user and current_episode_number:
AniList.update_anime_list( AniList.update_anime_list(
@@ -106,14 +120,14 @@ class MpvPlayer(object):
) )
# get them juicy streams # get them juicy streams
episode_streams = anime_provider.get_episode_streams( episode_streams = anime_provider.get_episode_streams(
provider_anime, provider_anime["id"],
provider_anime["title"],
current_episode_number, current_episode_number,
translation_type, translation_type,
fastanime_runtime_state.selected_anime_anilist,
) )
if not episode_streams: if not episode_streams:
self.mpv_player.show_text("No streams were found") self.mpv_player.show_text("No streams were found")
return None return
# always select the first # always select the first
if server == "top": if server == "top":
@@ -131,8 +145,20 @@ class MpvPlayer(object):
self.mpv_player.show_text( self.mpv_player.show_text(
f"Invalid server!!; servers available are: {episode_streams_dict.keys()}", f"Invalid server!!; servers available are: {episode_streams_dict.keys()}",
) )
return None return
self.current_media_title = selected_server["episode_title"] self.current_media_title = selected_server["episode_title"]
if config.normalize_titles:
import re
for episode_detail in fastanime_runtime_state.selected_anime_anilist[
"streamingEpisodes"
]:
if re.match(
f"Episode {current_episode_number} ", episode_detail["title"]
):
self.current_media_title = episode_detail["title"]
break
links = selected_server["links"] links = selected_server["links"]
stream_link_ = filter_by_quality(quality, links) stream_link_ = filter_by_quality(quality, links)
@@ -142,17 +168,23 @@ class MpvPlayer(object):
self.mpv_player._set_property("start", "0") self.mpv_player._set_property("start", "0")
stream_link = stream_link_["link"] stream_link = stream_link_["link"]
fastanime_runtime_state.provider_current_episode_stream_link = stream_link fastanime_runtime_state.provider_current_episode_stream_link = stream_link
self.subs = move_preferred_subtitle_lang_to_top(
selected_server["subtitles"], config.sub_lang
)
return stream_link return stream_link
def create_player( def create_player(
self, self,
stream_link, stream_link,
anime_provider: "AnimeProvider", anime_provider: "AnimeProvider",
fastanime_runtime_state, fastanime_runtime_state: "FastAnimeRuntimeState",
config: "Config", config: "Config",
title, title,
start_time,
headers={}, headers={},
subtitles=[],
): ):
self.subs = subtitles
self.anime_provider = anime_provider self.anime_provider = anime_provider
self.fastanime_runtime_state = fastanime_runtime_state self.fastanime_runtime_state = fastanime_runtime_state
self.config = config self.config = config
@@ -171,17 +203,6 @@ class MpvPlayer(object):
osc=True, osc=True,
ytdl=True, ytdl=True,
) )
mpv_player.force_window = config.force_window
# mpv_player.cache = "yes"
# mpv_player.cache_pause = "no"
mpv_player.title = title
mpv_headers = ""
if headers:
for header_name, header_value in headers.items():
mpv_headers += f"{header_name}:{header_value},"
mpv_player.http_header_fields = mpv_headers
mpv_player.play(stream_link)
# -- events -- # -- events --
@mpv_player.event_callback("file-loaded") @mpv_player.event_callback("file-loaded")
@@ -190,6 +211,22 @@ class MpvPlayer(object):
self.player_fetching = False self.player_fetching = False
if isinstance(d, float): if isinstance(d, float):
self.last_total_time = format_time(d) self.last_total_time = format_time(d)
try:
if not mpv_player.core_shutdown:
if self.subs:
for i, subtitle in enumerate(self.subs):
if i == 0:
flag = "select"
else:
flag = "auto"
mpv_player.sub_add(
subtitle["url"], flag, None, subtitle["language"]
)
self.subs = []
except mpv.ShutdownError:
pass
except Exception:
pass
@mpv_player.property_observer("time-pos") @mpv_player.property_observer("time-pos")
def handle_time_start_update(*args): def handle_time_start_update(*args):
@@ -218,7 +255,9 @@ class MpvPlayer(object):
def _next_episode(): def _next_episode():
url = self.get_episode("next") url = self.get_episode("next")
if url: if url:
mpv_player.loadfile(url, options=f"title={self.current_media_title}") mpv_player.loadfile(
url,
)
mpv_player.title = self.current_media_title mpv_player.title = self.current_media_title
@mpv_player.on_key_press("shift+p") @mpv_player.on_key_press("shift+p")
@@ -244,7 +283,6 @@ class MpvPlayer(object):
mpv_player.show_text("Changing translation type...") mpv_player.show_text("Changing translation type...")
anime = anime_provider.get_anime( anime = anime_provider.get_anime(
fastanime_runtime_state.provider_anime_search_result["id"], fastanime_runtime_state.provider_anime_search_result["id"],
fastanime_runtime_state.selected_anime_anilist,
) )
if not anime: if not anime:
mpv_player.show_text("Failed to update translation type") mpv_player.show_text("Failed to update translation type")
@@ -327,7 +365,23 @@ class MpvPlayer(object):
mpv_player.register_message_handler("select-quality", select_quality) mpv_player.register_message_handler("select-quality", select_quality)
self.mpv_player = mpv_player self.mpv_player = mpv_player
return mpv_player mpv_player.force_window = config.force_window
# mpv_player.cache = "yes"
# mpv_player.cache_pause = "no"
mpv_player.title = title
mpv_headers = ""
if headers:
for header_name, header_value in headers.items():
mpv_headers += f"{header_name}:{header_value},"
mpv_player.http_header_fields = mpv_headers
mpv_player.play(stream_link)
if not start_time == "0":
mpv_player.start = start_time
mpv_player.wait_for_shutdown()
mpv_player.terminate()
player = MpvPlayer() player = MpvPlayer()

View File

@@ -1,78 +1,53 @@
# this script was written by the fzf devs as an example on how to preview images
# its only here for convinience
fzf_preview = r""" fzf_preview = r"""
# #
# The purpose of this script is to demonstrate how to preview a file or an # Adapted from the preview script in the fzf repo
# image in the preview window of fzf.
# #
# Dependencies: # Dependencies:
# - https://github.com/sharkdp/bat
# - https://github.com/hpjansson/chafa # - https://github.com/hpjansson/chafa
# - https://iterm2.com/utilities/imgcat # - https://iterm2.com/utilities/imgcat
fzf-preview(){ #
if [[ $# -ne 1 ]]; then fzf-preview() {
>&2 echo "usage: $0 FILENAME" file=${1/#\~\//$HOME/}
exit 1 dim=${FZF_PREVIEW_COLUMNS}x${FZF_PREVIEW_LINES}
fi if [[ $dim = x ]]; then
dim=$(stty size </dev/tty | awk '{print $2 "x" $1}')
elif ! [[ $KITTY_WINDOW_ID ]] && ((FZF_PREVIEW_TOP + FZF_PREVIEW_LINES == $(stty size </dev/tty | awk '{print $1}'))); then
# Avoid scrolling issue when the Sixel image touches the bottom of the screen
# * https://github.com/junegunn/fzf/issues/2544
dim=${FZF_PREVIEW_COLUMNS}x$((FZF_PREVIEW_LINES - 1))
fi
file=${1/#\~\//$HOME/} # 1. Use kitty icat on kitty terminal
type=$(file --dereference --mime -- "$file") if [[ $KITTY_WINDOW_ID ]]; then
# 1. 'memory' is the fastest option but if you want the image to be scrollable,
# you have to use 'stream'.
#
# 2. The last line of the output is the ANSI reset code without newline.
# This confuses fzf and makes it render scroll offset indicator.
# So we remove the last line and append the reset code to its previous line.
kitty icat --clear --transfer-mode=memory --unicode-placeholder --stdin=no --place="$dim@0x0" "$file" | sed '$d' | sed $'$s/$/\e[m/'
if [[ ! $type =~ image/ ]]; then # 2. Use chafa with Sixel output
if [[ $type =~ =binary ]]; then elif command -v chafa >/dev/null; then
file "$1" case "$(uname -a)" in
exit # termux does not support sixel graphics
fi # and produces weird output
*ndroid*) chafa -s "$dim" "$file";;
*) chafa -f sixel -s "$dim" "$file";;
esac
# Add a new line character so that fzf can display multiple images in the preview window
echo
# Sometimes bat is installed as batcat. # 3. If chafa is not found but imgcat is available, use it on iTerm2
if command -v batcat > /dev/null; then elif command -v imgcat >/dev/null; then
batname="batcat" # NOTE: We should use https://iterm2.com/utilities/it2check to check if the
elif command -v bat > /dev/null; then # user is running iTerm2. But for the sake of simplicity, we just assume
batname="bat" # that's the case here.
else imgcat -W "${dim%%x*}" -H "${dim##*x}" "$file"
cat "$1"
exit
fi
${batname} --style="${BAT_STYLE:-numbers}" --color=always --pager=never -- "$file" # 4. Cannot find any suitable method to preview the image
exit else
fi echo install chafa or imgcat or install kitty terminal so you can enjoy image previews
fi
dim=${FZF_PREVIEW_COLUMNS}x${FZF_PREVIEW_LINES}
if [[ $dim = x ]]; then
dim=$(stty size < /dev/tty | awk '{print $2 "x" $1}')
elif ! [[ $KITTY_WINDOW_ID ]] && (( FZF_PREVIEW_TOP + FZF_PREVIEW_LINES == $(stty size < /dev/tty | awk '{print $1}') )); then
# Avoid scrolling issue when the Sixel image touches the bottom of the screen
# * https://github.com/junegunn/fzf/issues/2544
dim=${FZF_PREVIEW_COLUMNS}x$((FZF_PREVIEW_LINES - 1))
fi
# 1. Use kitty icat on kitty terminal
if [[ $KITTY_WINDOW_ID ]]; then
# 1. 'memory' is the fastest option but if you want the image to be scrollable,
# you have to use 'stream'.
#
# 2. The last line of the output is the ANSI reset code without newline.
# This confuses fzf and makes it render scroll offset indicator.
# So we remove the last line and append the reset code to its previous line.
kitty icat --clear --transfer-mode=memory --unicode-placeholder --stdin=no --place="$dim@0x0" "$file" | sed '$d' | sed $'$s/$/\e[m/'
# 2. Use chafa with Sixel output
elif command -v chafa > /dev/null; then
chafa -f sixel -s "$dim" "$file"
# Add a new line character so that fzf can display multiple images in the preview window
echo
# 3. If chafa is not found but imgcat is available, use it on iTerm2
elif command -v imgcat > /dev/null; then
# NOTE: We should use https://iterm2.com/utilities/it2check to check if the
# user is running iTerm2. But for the sake of simplicity, we just assume
# that's the case here.
imgcat -W "${dim%%x*}" -H "${dim##*x}" "$file"
# 4. Cannot find any suitable method to preview the image
else
file "$file"
fi
} }
""" """

View File

@@ -1,41 +1,49 @@
# TODO: add typing from typing import TYPE_CHECKING
class FastAnimeRuntimeState(dict):
if TYPE_CHECKING:
from typing import Any
from ...libs.anilist.types import AnilistBaseMediaDataSchema
from ...libs.anime_provider.types import Anime, EpisodeStream, SearchResult, Server
class FastAnimeRuntimeState(object):
"""A class that manages fastanime runtime during anilist command runtime""" """A class that manages fastanime runtime during anilist command runtime"""
def __getattr__(self, attr): provider_current_episode_stream_link: str
try: provider_current_server: "Server"
return self.__getitem__(attr) provider_current_server_name: str
except KeyError: provider_available_episodes: list[str]
raise AttributeError( provider_current_episode_number: str
"%r object has no attribute %r" % (self.__class__.__name__, attr) provider_server_episode_streams: list["EpisodeStream"]
) provider_anime_title: str
provider_anime: "Anime"
provider_anime_search_result: "SearchResult"
progress_tracking: str = ""
def __setattr__(self, attr, value): selected_anime_anilist: "AnilistBaseMediaDataSchema"
self.__setitem__(attr, value) selected_anime_id_anilist: int
selected_anime_title_anilist: str
# current_anilist_data: "AnilistDataSchema | AnilistMediaList"
anilist_results_data: "Any"
def exit_app(exit_code=0, *args): def exit_app(exit_code=0, *args):
import os
import shutil
import sys import sys
from rich.console import Console
from ...constants import APP_NAME, ICON_PATH, USER_NAME from ...constants import APP_NAME, ICON_PATH, USER_NAME
def is_running_in_terminal(): console = Console()
if not console.is_terminal:
try: try:
shutil.get_terminal_size() from plyer import notification
return ( except ImportError:
sys.stdin print(
and sys.stdin.isatty() "Plyer is not installed; install it for desktop notifications to be enabled"
and sys.stdout.isatty()
and os.getenv("TERM") is not None
) )
except OSError: exit(1)
return False
if not is_running_in_terminal():
from plyer import notification
notification.notify( notification.notify(
app_name=APP_NAME, app_name=APP_NAME,
app_icon=ICON_PATH, app_icon=ICON_PATH,
@@ -43,7 +51,6 @@ def exit_app(exit_code=0, *args):
title="Shutting down", title="Shutting down",
) # pyright:ignore ) # pyright:ignore
else: else:
from rich import print console.clear()
console.print("Have a good day :smile:", USER_NAME)
print("Have a good day :smile:", USER_NAME)
sys.exit(exit_code) sys.exit(exit_code)

View File

@@ -19,6 +19,27 @@ BG_GREEN = "\033[48;2;120;233;12;m"
GREEN = "\033[38;2;45;24;45;m" GREEN = "\033[38;2;45;24;45;m"
def get_requested_quality_or_default_to_first(url, quality):
import yt_dlp
with yt_dlp.YoutubeDL({"quiet": True, "silent": True, "no_warnings": True}) as ydl:
m3u8_info = ydl.extract_info(url, False)
if not m3u8_info:
return
m3u8_formats = m3u8_info["formats"]
quality = int(quality)
quality_u = quality - 80
quality_l = quality + 80
for m3u8_format in m3u8_formats:
if m3u8_format["height"] == quality or (
m3u8_format["height"] < quality_u and m3u8_format["height"] > quality_l
):
return m3u8_format["url"]
else:
return m3u8_formats[0]["url"]
def move_preferred_subtitle_lang_to_top(sub_list, lang_str): def move_preferred_subtitle_lang_to_top(sub_list, lang_str):
"""Moves the dictionary with the given ID to the front of the list. """Moves the dictionary with the given ID to the front of the list.
@@ -125,7 +146,7 @@ def fuzzy_inquirer(choices: list, prompt: str, **kwargs):
from click import clear from click import clear
clear() clear()
action = inquirer.fuzzy( action = inquirer.fuzzy( # pyright:ignore
prompt, prompt,
choices, choices,
height="100%", height="100%",

View File

@@ -3,7 +3,9 @@ import sys
from pathlib import Path from pathlib import Path
from platform import system from platform import system
from . import APP_NAME, AUTHOR, __version__ import click
from . import APP_NAME, __version__
PLATFORM = system() PLATFORM = system()
@@ -17,19 +19,20 @@ if PLATFORM == "Windows":
ICON_PATH = os.path.join(ASSETS_DIR, "logo.ico") ICON_PATH = os.path.join(ASSETS_DIR, "logo.ico")
else: else:
ICON_PATH = os.path.join(ASSETS_DIR, "logo.png") ICON_PATH = os.path.join(ASSETS_DIR, "logo.png")
PREVIEW_IMAGE = os.path.join(ASSETS_DIR, "preview") # PREVIEW_IMAGE = os.path.join(ASSETS_DIR, "preview")
# ----- user configs and data ----- # ----- user configs and data -----
S_PLATFORM = sys.platform S_PLATFORM = sys.platform
APP_DATA_DIR = click.get_app_dir(APP_NAME, roaming=False)
if S_PLATFORM == "win32": if S_PLATFORM == "win32":
# app data # app data
app_data_dir_base = os.getenv("LOCALAPPDATA") # app_data_dir_base = os.getenv("LOCALAPPDATA")
if not app_data_dir_base: # if not app_data_dir_base:
raise RuntimeError("Could not determine app data dir please report to devs") # raise RuntimeError("Could not determine app data dir please report to devs")
APP_DATA_DIR = os.path.join(app_data_dir_base, AUTHOR, APP_NAME) # APP_DATA_DIR = os.path.join(app_data_dir_base, AUTHOR, APP_NAME)
#
# cache dir # cache dir
APP_CACHE_DIR = os.path.join(APP_DATA_DIR, "cache") APP_CACHE_DIR = os.path.join(APP_DATA_DIR, "cache")
@@ -39,9 +42,9 @@ if S_PLATFORM == "win32":
elif S_PLATFORM == "darwin": elif S_PLATFORM == "darwin":
# app data # app data
app_data_dir_base = os.path.expanduser("~/Library/Application Support") # app_data_dir_base = os.path.expanduser("~/Library/Application Support")
APP_DATA_DIR = os.path.join(app_data_dir_base, APP_NAME, __version__) # APP_DATA_DIR = os.path.join(app_data_dir_base, APP_NAME, __version__)
#
# cache dir # cache dir
cache_dir_base = os.path.expanduser("~/Library/Caches") cache_dir_base = os.path.expanduser("~/Library/Caches")
APP_CACHE_DIR = os.path.join(cache_dir_base, APP_NAME, __version__) APP_CACHE_DIR = os.path.join(cache_dir_base, APP_NAME, __version__)
@@ -50,12 +53,12 @@ elif S_PLATFORM == "darwin":
video_dir_base = os.path.expanduser("~/Movies") video_dir_base = os.path.expanduser("~/Movies")
USER_VIDEOS_DIR = os.path.join(video_dir_base, APP_NAME) USER_VIDEOS_DIR = os.path.join(video_dir_base, APP_NAME)
else: else:
# app data # # app data
app_data_dir_base = os.environ.get("XDG_CONFIG_HOME", "") # app_data_dir_base = os.environ.get("XDG_CONFIG_HOME", "")
if not app_data_dir_base.strip(): # if not app_data_dir_base.strip():
app_data_dir_base = os.path.expanduser("~/.config") # app_data_dir_base = os.path.expanduser("~/.config")
APP_DATA_DIR = os.path.join(app_data_dir_base, APP_NAME) # APP_DATA_DIR = os.path.join(app_data_dir_base, APP_NAME)
#
# cache dir # cache dir
cache_dir_base = os.environ.get("XDG_CACHE_HOME", "") cache_dir_base = os.environ.get("XDG_CACHE_HOME", "")
if not cache_dir_base.strip(): if not cache_dir_base.strip():
@@ -75,6 +78,7 @@ Path(USER_VIDEOS_DIR).mkdir(parents=True, exist_ok=True)
# useful paths # useful paths
USER_DATA_PATH = os.path.join(APP_DATA_DIR, "user_data.json") USER_DATA_PATH = os.path.join(APP_DATA_DIR, "user_data.json")
USER_WATCH_HISTORY_PATH = os.path.join(APP_DATA_DIR, "watch_history.json")
USER_CONFIG_PATH = os.path.join(APP_DATA_DIR, "config.ini") USER_CONFIG_PATH = os.path.join(APP_DATA_DIR, "config.ini")
LOG_FILE_PATH = os.path.join(APP_DATA_DIR, "fastanime.log") LOG_FILE_PATH = os.path.join(APP_DATA_DIR, "fastanime.log")

View File

@@ -15,6 +15,7 @@ from .queries_graphql import (
delete_list_entry_query, delete_list_entry_query,
get_logged_in_user_query, get_logged_in_user_query,
get_medialist_item_query, get_medialist_item_query,
get_user_info,
media_list_mutation, media_list_mutation,
media_list_query, media_list_query,
most_favourite_query, most_favourite_query,
@@ -34,8 +35,9 @@ if TYPE_CHECKING:
AnilistMediaLists, AnilistMediaLists,
AnilistMediaListStatus, AnilistMediaListStatus,
AnilistNotifications, AnilistNotifications,
AnilistUser, AnilistUser_,
AnilistUserData, AnilistUserData,
AnilistViewerData,
) )
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
ANILIST_ENDPOINT = "https://graphql.anilist.co" ANILIST_ENDPOINT = "https://graphql.anilist.co"
@@ -77,7 +79,7 @@ class AniListApi:
return return
if not success or not user: if not success or not user:
return return
user_info: AnilistUser = user["data"]["Viewer"] user_info: "AnilistUser_" = user["data"]["Viewer"]
self.user_id = user_info["id"] self.user_id = user_info["id"]
return user_info return user_info
@@ -91,7 +93,7 @@ class AniListApi:
""" """
return self._make_authenticated_request(notification_query) return self._make_authenticated_request(notification_query)
def update_login_info(self, user: "AnilistUser", token: str): def update_login_info(self, user: "AnilistUser_", token: str):
"""method used to login a user enabling authenticated requests """method used to login a user enabling authenticated requests
Args: Args:
@@ -103,7 +105,18 @@ class AniListApi:
self.session.headers.update(self.headers) self.session.headers.update(self.headers)
self.user_id = user["id"] self.user_id = user["id"]
def get_logged_in_user(self) -> tuple[bool, "AnilistUserData"] | tuple[bool, None]: def get_user_info(self) -> tuple[bool, "AnilistUserData"] | tuple[bool, None]:
"""get the details of the user who is currently logged in
Returns:
an anilist user
"""
return self._make_authenticated_request(get_user_info, {"userId": self.user_id})
def get_logged_in_user(
self,
) -> tuple[bool, "AnilistViewerData"] | tuple[bool, None]:
"""get the details of the user who is currently logged in """get the details of the user who is currently logged in
Returns: Returns:
@@ -309,9 +322,14 @@ class AniListApi:
status_not_in: list[str] | None = None, status_not_in: list[str] | None = None,
endDate_greater: int | None = None, endDate_greater: int | None = None,
endDate_lesser: int | None = None, endDate_lesser: int | None = None,
start_greater: int | None = None, startDate_greater: int | None = None,
start_lesser: int | None = None, startDate_lesser: int | None = None,
startDate: str | None = None,
seasonYear: str | None = None,
page: int | None = None, page: int | None = None,
season: str | None = None,
format_in: list[str] | None = None,
on_list: bool | None = None,
type="ANIME", type="ANIME",
**kwargs, **kwargs,
): ):
@@ -320,7 +338,7 @@ class AniListApi:
""" """
variables = {} variables = {}
for key, val in list(locals().items())[1:]: for key, val in list(locals().items())[1:]:
if val is not None and key not in ["variables"]: if (val or val is False) and key not in ["variables"]:
variables[key] = val variables[key] = val
search_results = self.get_data(search_query, variables=variables) search_results = self.get_data(search_query, variables=variables)
return search_results return search_results

View File

@@ -3,7 +3,6 @@ This module contains all the preset queries for the sake of neatness and convini
Mostly for internal usage Mostly for internal usage
""" """
# TODO: Format the queries
mark_as_read_mutation = """ mark_as_read_mutation = """
mutation{ mutation{
UpdateUser{ UpdateUser{
@@ -17,7 +16,6 @@ query($id:Int){
pageInfo{ pageInfo{
total total
} }
reviews(mediaId:$id){ reviews(mediaId:$id){
summary summary
user{ user{
@@ -35,50 +33,48 @@ query($id:Int){
""" """
notification_query = """ notification_query = """
query{ query {
Page(perPage:5){ Page(perPage: 5) {
pageInfo { pageInfo {
total total
}
notifications(resetNotificationCount:true,type:AIRING) {
... on AiringNotification {
id
type
episode
contexts
createdAt
media {
id
idMal
title {
romaji
english
}
coverImage{
medium
}
}
}
}
} }
notifications(resetNotificationCount: true, type: AIRING) {
... on AiringNotification {
id
type
episode
contexts
createdAt
media {
id
idMal
title {
romaji
english
}
coverImage {
medium
}
}
}
}
}
} }
""" """
get_medialist_item_query = """ get_medialist_item_query = """
query($mediaId:Int){ query ($mediaId: Int) {
MediaList(mediaId:$mediaId){ MediaList(mediaId: $mediaId) {
id id
} }
} }
""" """
delete_list_entry_query = """ delete_list_entry_query = """
mutation($id:Int){ mutation ($id: Int) {
DeleteMediaListEntry(id:$id){ DeleteMediaListEntry(id: $id) {
deleted deleted
}
}
} }
""" """
@@ -97,9 +93,85 @@ query{
} }
""" """
get_user_info = """
query ($userId: Int) {
User(id: $userId) {
name
about
avatar {
large
medium
}
bannerImage
statistics {
anime {
count
minutesWatched
episodesWatched
genres {
count
meanScore
genre
}
tags {
tag {
id
}
count
meanScore
}
}
manga {
count
meanScore
chaptersRead
volumesRead
tags {
count
meanScore
}
genres {
count
meanScore
}
}
}
favourites {
anime {
nodes {
title {
romaji
english
}
}
}
manga {
nodes {
title {
romaji
english
}
}
}
}
}
}
"""
media_list_mutation = """ media_list_mutation = """
mutation($mediaId:Int,$scoreRaw:Int,$repeat:Int,$progress:Int,$status:MediaListStatus){ mutation (
SaveMediaListEntry(mediaId:$mediaId,scoreRaw:$scoreRaw,progress:$progress,repeat:$repeat,status:$status){ $mediaId: Int
$scoreRaw: Int
$repeat: Int
$progress: Int
$status: MediaListStatus
) {
SaveMediaListEntry(
mediaId: $mediaId
scoreRaw: $scoreRaw
progress: $progress
repeat: $repeat
status: $status
) {
id id
status status
mediaId mediaId
@@ -116,21 +188,19 @@ mutation($mediaId:Int,$scoreRaw:Int,$repeat:Int,$progress:Int,$status:MediaListS
month month
day day
} }
} }
} }
""" """
media_list_query = """ media_list_query = """
query ($userId: Int, $status: MediaListStatus,$type:MediaType) { query ($userId: Int, $status: MediaListStatus, $type: MediaType) {
Page { Page {
pageInfo { pageInfo {
currentPage currentPage
total total
} }
mediaList(userId: $userId, status: $status, type: $type) { mediaList(userId: $userId, status: $status, type: $type) {
mediaId mediaId
media { media {
id id
idMal idMal
@@ -147,6 +217,10 @@ query ($userId: Int, $status: MediaListStatus,$type:MediaType) {
id id
} }
popularity popularity
streamingEpisodes {
title
thumbnail
}
favourites favourites
averageScore averageScore
episodes episodes
@@ -172,10 +246,10 @@ query ($userId: Int, $status: MediaListStatus,$type:MediaType) {
} }
status status
description description
mediaListEntry{ mediaListEntry {
status status
id id
progress progress
} }
nextAiringEpisode { nextAiringEpisode {
timeUntilAiring timeUntilAiring
@@ -199,7 +273,6 @@ query ($userId: Int, $status: MediaListStatus,$type:MediaType) {
day day
} }
createdAt createdAt
} }
} }
} }
@@ -221,72 +294,83 @@ $popularity_greater:Int,\
$popularity_lesser:Int,\ $popularity_lesser:Int,\
$averageScore_greater:Int,\ $averageScore_greater:Int,\
$averageScore_lesser:Int,\ $averageScore_lesser:Int,\
$seasonYear:Int,\
$startDate_greater:FuzzyDateInt,\ $startDate_greater:FuzzyDateInt,\
$startDate_lesser:FuzzyDateInt,\ $startDate_lesser:FuzzyDateInt,\
$startDate:FuzzyDateInt,\
$endDate_greater:FuzzyDateInt,\ $endDate_greater:FuzzyDateInt,\
$endDate_lesser:FuzzyDateInt,\ $endDate_lesser:FuzzyDateInt,\
$format_in:[MediaFormat],\
$type:MediaType\ $type:MediaType\
$season:MediaSeason\
$on_list:Boolean\
" "
# FuzzyDateInt = (yyyymmdd)
# MediaStatus = (FINISHED,RELEASING,NOT_YET_RELEASED,CANCELLED,HIATUS)
search_query = ( search_query = (
""" """
query($query:String,%s){ query($query:String,%s){
Page(perPage:50,page:$page){ Page(perPage: 50, page: $page) {
pageInfo{ pageInfo {
total total
currentPage currentPage
hasNextPage hasNextPage
} }
media( media(
search:$query, search: $query
id_in:$id_in, id_in: $id_in
genre_in:$genre_in, genre_in: $genre_in
genre_not_in:$genre_not_in, genre_not_in: $genre_not_in
tag_in:$tag_in, tag_in: $tag_in
tag_not_in:$tag_not_in, tag_not_in: $tag_not_in
status_in:$status_in, status_in: $status_in
status:$status, status: $status
status_not_in:$status_not_in, startDate: $startDate
popularity_greater:$popularity_greater, status_not_in: $status_not_in
popularity_lesser:$popularity_lesser, popularity_greater: $popularity_greater
averageScore_greater:$averageScore_greater, popularity_lesser: $popularity_lesser
averageScore_lesser:$averageScore_lesser, averageScore_greater: $averageScore_greater
startDate_greater:$startDate_greater, averageScore_lesser: $averageScore_lesser
startDate_lesser:$startDate_lesser, startDate_greater: $startDate_greater
endDate_greater:$endDate_greater, startDate_lesser: $startDate_lesser
endDate_lesser:$endDate_lesser, endDate_greater: $endDate_greater
sort:$sort, endDate_lesser: $endDate_lesser
type:$type format_in: $format_in
) sort: $sort
{ season: $season
seasonYear: $seasonYear
type: $type
onList:$on_list
) {
id id
idMal idMal
title{ title {
romaji romaji
english english
} }
coverImage{ coverImage {
medium medium
large large
} }
trailer { trailer {
site site
id id
} }
mediaListEntry{ mediaListEntry {
status status
id id
progress progress
} }
popularity popularity
streamingEpisodes {
title
thumbnail
}
favourites favourites
averageScore averageScore
episodes episodes
genres genres
studios{ studios {
nodes{ nodes {
name name
isAnimationStudio isAnimationStudio
} }
@@ -319,17 +403,16 @@ query($query:String,%s){
) )
trending_query = """ trending_query = """
query($type:MediaType){ query ($type: MediaType) {
Page(perPage:15){ Page(perPage: 15) {
media(sort: TRENDING_DESC, type: $type, genre_not_in: ["hentai"]) {
media(sort:TRENDING_DESC,type:$type,genre_not_in:["hentai"]){
id id
idMal idMal
title{ title {
romaji romaji
english english
} }
coverImage{ coverImage {
medium medium
large large
} }
@@ -338,6 +421,10 @@ query($type:MediaType){
id id
} }
popularity popularity
streamingEpisodes {
title
thumbnail
}
favourites favourites
averageScore averageScore
genres genres
@@ -350,18 +437,18 @@ query($type:MediaType){
} }
} }
tags { tags {
name name
} }
startDate { startDate {
year year
month month
day day
} }
mediaListEntry{ mediaListEntry {
status status
id id
progress progress
} }
endDate { endDate {
year year
month month
@@ -380,30 +467,37 @@ query($type:MediaType){
# mosts # mosts
most_favourite_query = """ most_favourite_query = """
query($type:MediaType){ query ($type: MediaType) {
Page(perPage:15){ Page(perPage: 15) {
media(sort:FAVOURITES_DESC,type:$type,genre_not_in:["hentai"]){ media(sort: FAVOURITES_DESC, type: $type, genre_not_in: ["hentai"]) {
id id
idMal idMal
title{ title {
romaji romaji
english english
} }
coverImage{ coverImage {
medium medium
large large
} }
trailer { trailer {
site site
id id
} }
mediaListEntry{ mediaListEntry {
status status
id id
progress progress
} }
popularity popularity
streamingEpisodes {
title
thumbnail
}
streamingEpisodes {
title
thumbnail
}
favourites favourites
averageScore averageScore
episodes episodes
@@ -416,7 +510,7 @@ query($type:MediaType){
} }
} }
tags { tags {
name name
} }
startDate { startDate {
year year
@@ -440,30 +534,33 @@ query($type:MediaType){
""" """
most_scored_query = """ most_scored_query = """
query($type:MediaType){ query ($type: MediaType) {
Page(perPage:15){ Page(perPage: 15) {
media(sort:SCORE_DESC,type:$type,genre_not_in:["hentai"]){ media(sort: SCORE_DESC, type: $type, genre_not_in: ["hentai"]) {
id id
idMal idMal
title{ title {
romaji romaji
english english
} }
coverImage{ coverImage {
medium medium
large large
} }
trailer { trailer {
site site
id id
} }
mediaListEntry{ mediaListEntry {
status status
id id
progress progress
} }
popularity popularity
streamingEpisodes {
title
thumbnail
}
episodes episodes
favourites favourites
averageScore averageScore
@@ -476,7 +573,7 @@ query($type:MediaType){
} }
} }
tags { tags {
name name
} }
startDate { startDate {
year year
@@ -500,35 +597,38 @@ query($type:MediaType){
""" """
most_popular_query = """ most_popular_query = """
query($type:MediaType){ query ($type: MediaType) {
Page(perPage:15){ Page(perPage: 15) {
media(sort:POPULARITY_DESC,type:$type,genre_not_in:["hentai"]){ media(sort: POPULARITY_DESC, type: $type, genre_not_in: ["hentai"]) {
id id
idMal idMal
title{ title {
romaji romaji
english english
} }
coverImage{ coverImage {
medium medium
large large
} }
trailer { trailer {
site site
id id
} }
popularity popularity
streamingEpisodes {
title
thumbnail
}
favourites favourites
averageScore averageScore
description description
episodes episodes
genres genres
mediaListEntry{ mediaListEntry {
status status
id id
progress progress
} }
studios { studios {
nodes { nodes {
name name
@@ -536,8 +636,8 @@ query($type:MediaType){
} }
} }
tags { tags {
name name
} }
startDate { startDate {
year year
month month
@@ -553,36 +653,47 @@ query($type:MediaType){
timeUntilAiring timeUntilAiring
airingAt airingAt
episode episode
} }
} }
} }
} }
""" """
most_recently_updated_query = """ most_recently_updated_query = """
query($type:MediaType){ query ($type: MediaType) {
Page(perPage:15){ Page(perPage: 15) {
media(sort:UPDATED_AT_DESC,type:$type,averageScore_greater:50,genre_not_in:["hentai"],status:RELEASING){ media(
sort: UPDATED_AT_DESC
type: $type
averageScore_greater: 50
genre_not_in: ["hentai"]
status: RELEASING
) {
id id
idMal idMal
title{ title {
romaji romaji
english english
} }
coverImage{ coverImage {
medium medium
large large
} }
trailer { trailer {
site site
id id
} }
mediaListEntry{ mediaListEntry {
status status
id id
progress progress
} }
popularity popularity
streamingEpisodes {
title
thumbnail
}
favourites favourites
averageScore averageScore
description description
@@ -595,7 +706,7 @@ query($type:MediaType){
} }
} }
tags { tags {
name name
} }
startDate { startDate {
year year
@@ -619,38 +730,41 @@ query($type:MediaType){
""" """
recommended_query = """ recommended_query = """
query($type:MediaType){ query ($type: MediaType) {
Page(perPage:15) { Page(perPage: 15) {
media( type: $type,genre_not_in:["hentai"]) { media(type: $type, genre_not_in: ["hentai"]) {
recommendations(sort:RATING_DESC){ recommendations(sort: RATING_DESC) {
nodes{ nodes {
media{ media {
id id
idMal idMal
title{ title {
english english
romaji romaji
native native
} }
coverImage{ coverImage {
medium medium
large large
} }
mediaListEntry{ mediaListEntry {
status status
id id
progress progress
} }
description description
episodes episodes
trailer{ trailer {
site site
id id
} }
genres genres
averageScore averageScore
popularity popularity
streamingEpisodes {
title
thumbnail
}
favourites favourites
tags { tags {
name name
@@ -680,9 +794,9 @@ query($type:MediaType){
""" """
anime_characters_query = """ anime_characters_query = """
query($id:Int,$type:MediaType){ query ($id: Int, $type: MediaType) {
Page { Page {
media(id:$id, type: $type) { media(id: $id, type: $type) {
characters { characters {
nodes { nodes {
name { name {
@@ -715,13 +829,18 @@ query($id:Int,$type:MediaType){
anime_relations_query = """ anime_relations_query = """
query ($id: Int,$type:MediaType) { query ($id: Int, $type: MediaType) {
Page(perPage: 20) { Page(perPage: 20) {
media(id: $id, sort: POPULARITY_DESC, type: $type,genre_not_in:["hentai"]) { media(
id: $id
sort: POPULARITY_DESC
type: $type
genre_not_in: ["hentai"]
) {
relations { relations {
nodes { nodes {
id id
idMal idMal
title { title {
english english
romaji romaji
@@ -731,11 +850,11 @@ query ($id: Int,$type:MediaType) {
medium medium
large large
} }
mediaListEntry{ mediaListEntry {
status status
id id
progress progress
} }
description description
episodes episodes
trailer { trailer {
@@ -745,26 +864,30 @@ query ($id: Int,$type:MediaType) {
genres genres
averageScore averageScore
popularity popularity
streamingEpisodes {
title
thumbnail
}
favourites favourites
tags { tags {
name name
} }
startDate { startDate {
year year
month month
day day
} }
endDate { endDate {
year year
month month
day day
} }
status status
nextAiringEpisode { nextAiringEpisode {
timeUntilAiring timeUntilAiring
airingAt airingAt
episode episode
} }
} }
} }
} }
@@ -790,7 +913,7 @@ query ($id: Int,$type:MediaType) {
""" """
upcoming_anime_query = """ upcoming_anime_query = """
query ($page: Int,$type:MediaType) { query ($page: Int, $type: MediaType) {
Page(page: $page) { Page(page: $page) {
pageInfo { pageInfo {
total total
@@ -798,9 +921,14 @@ query ($page: Int,$type:MediaType) {
currentPage currentPage
hasNextPage hasNextPage
} }
media(type: $type, status: NOT_YET_RELEASED,sort:POPULARITY_DESC,genre_not_in:["hentai"]) { media(
type: $type
status: NOT_YET_RELEASED
sort: POPULARITY_DESC
genre_not_in: ["hentai"]
) {
id id
idMal idMal
title { title {
romaji romaji
english english
@@ -813,12 +941,16 @@ query ($page: Int,$type:MediaType) {
site site
id id
} }
mediaListEntry{ mediaListEntry {
status status
id id
progress progress
} }
popularity popularity
streamingEpisodes {
title
thumbnail
}
favourites favourites
averageScore averageScore
genres genres
@@ -855,20 +987,20 @@ query ($page: Int,$type:MediaType) {
""" """
anime_query = """ anime_query = """
query($id:Int){ query ($id: Int) {
Page{ Page {
media(id:$id) { media(id: $id) {
id id
idMal idMal
title { title {
romaji romaji
english english
} }
mediaListEntry{ mediaListEntry {
status status
id id
progress progress
} }
nextAiringEpisode { nextAiringEpisode {
timeUntilAiring timeUntilAiring
airingAt airingAt
@@ -882,7 +1014,6 @@ query($id:Int){
node { node {
name { name {
full full
} }
gender gender
dateOfBirth { dateOfBirth {
@@ -935,6 +1066,11 @@ query($id:Int){
countryOfOrigin countryOfOrigin
averageScore averageScore
popularity popularity
streamingEpisodes {
title
thumbnail
}
favourites favourites
source source
hashtag hashtag

View File

@@ -19,7 +19,7 @@ class AnilistImage(TypedDict):
large: str large: str
class AnilistUser(TypedDict): class AnilistUser_(TypedDict):
id: int id: int
name: str name: str
bannerImage: str | None bannerImage: str | None
@@ -28,11 +28,26 @@ class AnilistUser(TypedDict):
class AnilistViewer(TypedDict): class AnilistViewer(TypedDict):
Viewer: AnilistUser Viewer: AnilistUser_
class AnilistViewerData(TypedDict):
data: AnilistViewer
class AnilistUser(TypedDict):
name: str
about: str | None
avatar: AnilistImage
bannerImage: str | None
class AnilistUserInfo(TypedDict):
User: AnilistUser
class AnilistUserData(TypedDict): class AnilistUserData(TypedDict):
data: AnilistViewer data: AnilistUserInfo
class AnilistMediaTrailer(TypedDict): class AnilistMediaTrailer(TypedDict):
@@ -69,7 +84,7 @@ class AnilistMediaNextAiringEpisode(TypedDict):
class AnilistReview(TypedDict): class AnilistReview(TypedDict):
summary: str summary: str
user: AnilistUser user: AnilistUser_
class AnilistReviewNodes(TypedDict): class AnilistReviewNodes(TypedDict):
@@ -114,16 +129,17 @@ class AnilistCharactersEdges(TypedDict):
edges: list[AnilistCharactersEdge] edges: list[AnilistCharactersEdge]
class AnilistMediaList_(TypedDict):
id: int
progress: int
AnilistMediaListStatus = Literal[ AnilistMediaListStatus = Literal[
"CURRENT", "PLANNING", "COMPLETED", "DROPPED", "PAUSED", "REPEATING" "CURRENT", "PLANNING", "COMPLETED", "DROPPED", "PAUSED", "REPEATING"
] ]
class AnilistMediaList_(TypedDict):
id: int
progress: int
status: AnilistMediaListStatus
class AnilistMediaListProperties(TypedDict): class AnilistMediaListProperties(TypedDict):
status: AnilistMediaListStatus status: AnilistMediaListStatus
score: float score: float
@@ -136,6 +152,11 @@ class AnilistMediaListProperties(TypedDict):
hiddenFromStatusLists: bool hiddenFromStatusLists: bool
class StreamingEpisode(TypedDict):
title: str
thumbnail: str
class AnilistBaseMediaDataSchema(TypedDict): class AnilistBaseMediaDataSchema(TypedDict):
""" """
This a convenience class is used to type the received Anilist data to enhance dev experience This a convenience class is used to type the received Anilist data to enhance dev experience
@@ -159,6 +180,8 @@ class AnilistBaseMediaDataSchema(TypedDict):
status: str status: str
nextAiringEpisode: AnilistMediaNextAiringEpisode nextAiringEpisode: AnilistMediaNextAiringEpisode
season: str season: str
streamingEpisodes: list[StreamingEpisode]
chapters: int
seasonYear: int seasonYear: int
duration: int duration: int
synonyms: list[str] synonyms: list[str]

View File

@@ -1,10 +1,10 @@
from .allanime import SERVERS_AVAILABLE as ALLANIME_SERVERS from .allanime.constants import SERVERS_AVAILABLE as ALLANIME_SERVERS
from .animepahe import SERVERS_AVAILABLE as ANIMEPAHESERVERS from .animepahe.constants import SERVERS_AVAILABLE as ANIMEPAHESERVERS
from .aniwatch import SERVERS_AVAILABLE as ANIWATCHSERVERS from .hianime.constants import SERVERS_AVAILABLE as ANIWATCHSERVERS
anime_sources = { anime_sources = {
"allanime": "api.AllAnimeAPI", "allanime": "api.AllAnimeAPI",
"animepahe": "api.AnimePaheApi", "animepahe": "api.AnimePaheApi",
"aniwatch": "api.AniWatchApi", "hianime": "api.HiAnimeApi",
} }
SERVERS_AVAILABLE = [*ALLANIME_SERVERS, *ANIMEPAHESERVERS, *ANIWATCHSERVERS] SERVERS_AVAILABLE = [*ALLANIME_SERVERS, *ANIMEPAHESERVERS, *ANIWATCHSERVERS]

View File

@@ -1 +0,0 @@
SERVERS_AVAILABLE = ["sharepoint", "dropbox", "gogoanime", "weTransfer", "wixmp", "Yt"]

View File

@@ -7,23 +7,14 @@ import json
import logging import logging
from typing import TYPE_CHECKING from typing import TYPE_CHECKING
from requests.exceptions import Timeout
from ...anime_provider.base_provider import AnimeProvider from ...anime_provider.base_provider import AnimeProvider
from ..decorators import debug_provider
from ..utils import give_random_quality, one_digit_symmetric_xor from ..utils import give_random_quality, one_digit_symmetric_xor
from .constants import ( from .constants import ALLANIME_API_ENDPOINT, ALLANIME_BASE, ALLANIME_REFERER
ALLANIME_API_ENDPOINT,
ALLANIME_BASE,
ALLANIME_REFERER,
USER_AGENT,
)
from .gql_queries import ALLANIME_EPISODES_GQL, ALLANIME_SEARCH_GQL, ALLANIME_SHOW_GQL from .gql_queries import ALLANIME_EPISODES_GQL, ALLANIME_SEARCH_GQL, ALLANIME_SHOW_GQL
if TYPE_CHECKING: if TYPE_CHECKING:
from typing import Iterator from .types import AllAnimeEpisode
from ....libs.anime_provider.allanime.types import AllAnimeEpisode
from ....libs.anime_provider.types import Anime, Server
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@@ -36,6 +27,9 @@ class AllAnimeAPI(AnimeProvider):
""" """
api_endpoint = ALLANIME_API_ENDPOINT api_endpoint = ALLANIME_API_ENDPOINT
HEADERS = {
"Referer": ALLANIME_REFERER,
}
def _fetch_gql(self, query: str, variables: dict): def _fetch_gql(self, query: str, variables: dict):
"""main abstraction over all requests to the allanime api """main abstraction over all requests to the allanime api
@@ -47,30 +41,21 @@ class AllAnimeAPI(AnimeProvider):
Returns: Returns:
[TODO:return] [TODO:return]
""" """
try: response = self.session.get(
response = self.session.get( self.api_endpoint,
self.api_endpoint, params={
params={ "variables": json.dumps(variables),
"variables": json.dumps(variables), "query": query,
"query": query, },
}, timeout=10,
headers={"Referer": ALLANIME_REFERER, "User-Agent": USER_AGENT}, )
timeout=10, if response.ok:
) return response.json()["data"]
if response.status_code == 200: else:
return response.json()["data"] logger.error("[ALLANIME-ERROR]: ", response.text)
else:
logger.error("allanime(ERROR): ", response.text)
return {}
except Timeout:
logger.error(
"allanime(Error):Timeout exceeded this could mean allanime is down or you have lost internet connection"
)
return {}
except Exception as e:
logger.error(f"allanime:Error: {e}")
return {} return {}
@debug_provider("ALLANIME")
def search_for_anime( def search_for_anime(
self, self,
user_query: str, user_query: str,
@@ -103,29 +88,25 @@ class AllAnimeAPI(AnimeProvider):
"translationtype": translationtype, "translationtype": translationtype,
"countryorigin": countryorigin, "countryorigin": countryorigin,
} }
try: search_results = self._fetch_gql(ALLANIME_SEARCH_GQL, variables)
search_results = self._fetch_gql(ALLANIME_SEARCH_GQL, variables) page_info = search_results["shows"]["pageInfo"]
page_info = search_results["shows"]["pageInfo"] results = []
results = [] for result in search_results["shows"]["edges"]:
for result in search_results["shows"]["edges"]: normalized_result = {
normalized_result = { "id": result["_id"],
"id": result["_id"], "title": result["name"],
"title": result["name"], "type": result["__typename"],
"type": result["__typename"], "availableEpisodes": result["availableEpisodes"],
"availableEpisodes": result["availableEpisodes"],
}
results.append(normalized_result)
normalized_search_results = {
"pageInfo": page_info,
"results": results,
} }
return normalized_search_results results.append(normalized_result)
except Exception as e: normalized_search_results = {
logger.error(f"FA(AllAnime): {e}") "pageInfo": page_info,
return {} "results": results,
}
return normalized_search_results
@debug_provider("ALLANIME")
def get_anime(self, allanime_show_id: str): def get_anime(self, allanime_show_id: str):
"""get an anime details given its id """get an anime details given its id
@@ -136,23 +117,20 @@ class AllAnimeAPI(AnimeProvider):
[TODO:return] [TODO:return]
""" """
variables = {"showId": allanime_show_id} variables = {"showId": allanime_show_id}
try: anime = self._fetch_gql(ALLANIME_SHOW_GQL, variables)
anime = self._fetch_gql(ALLANIME_SHOW_GQL, variables) id: str = anime["show"]["_id"]
id: str = anime["show"]["_id"] title: str = anime["show"]["name"]
title: str = anime["show"]["name"] availableEpisodesDetail = anime["show"]["availableEpisodesDetail"]
availableEpisodesDetail = anime["show"]["availableEpisodesDetail"] type = anime.get("__typename")
type = anime.get("__typename") normalized_anime = {
normalized_anime = { "id": id,
"id": id, "title": title,
"title": title, "availableEpisodesDetail": availableEpisodesDetail,
"availableEpisodesDetail": availableEpisodesDetail, "type": type,
"type": type, }
} return normalized_anime
return normalized_anime
except Exception as e:
logger.error(f"AllAnime(get_anime): {e}")
return None
@debug_provider("ALLANIME")
def _get_anime_episode( def _get_anime_episode(
self, allanime_show_id: str, episode_string: str, translation_type: str = "sub" self, allanime_show_id: str, episode_string: str, translation_type: str = "sub"
) -> "AllAnimeEpisode | dict": ) -> "AllAnimeEpisode | dict":
@@ -171,16 +149,13 @@ class AllAnimeAPI(AnimeProvider):
"translationType": translation_type, "translationType": translation_type,
"episodeString": episode_string, "episodeString": episode_string,
} }
try: episode = self._fetch_gql(ALLANIME_EPISODES_GQL, variables)
episode = self._fetch_gql(ALLANIME_EPISODES_GQL, variables) return episode["episode"]
return episode["episode"]
except Exception as e:
logger.error(f"FA(AllAnime): {e}")
return {}
@debug_provider("ALLANIME")
def get_episode_streams( def get_episode_streams(
self, anime: "Anime", episode_number: str, translation_type="sub" self, anime_id, anime_title, episode_number: str, translation_type="sub"
) -> "Iterator[Server] | None": ):
"""get the streams of an episode """get the streams of an episode
Args: Args:
@@ -191,7 +166,6 @@ class AllAnimeAPI(AnimeProvider):
Yields: Yields:
[TODO:description] [TODO:description]
""" """
anime_id = anime["id"]
allanime_episode = self._get_anime_episode( allanime_episode = self._get_anime_episode(
anime_id, episode_number, translation_type anime_id, episode_number, translation_type
) )
@@ -199,214 +173,117 @@ class AllAnimeAPI(AnimeProvider):
return [] return []
embeds = allanime_episode["sourceUrls"] embeds = allanime_episode["sourceUrls"]
try:
for embed in embeds:
try:
# filter the working streams no need to get all since the others are mostly hsl
# TODO: should i just get all the servers and handle the hsl??
if embed.get("sourceName", "") not in (
# priorities based on death note
"Sak", # 7
"S-mp4", # 7.9
"Luf-mp4", # 7.7
"Default", # 8.5
"Yt-mp4", # 7.9
"Kir", # NA
# "Vid-mp4" # 4
# "Ok", # 3.5
# "Ss-Hls", # 5.5
# "Mp4", # 4
):
continue
url = embed.get("sourceUrl")
#
if not url:
continue
if url.startswith("--"):
url = url[2:]
url = one_digit_symmetric_xor(56, url)
if "tools.fast4speed.rsvp" in url: @debug_provider("ALLANIME")
yield { def _get_server(embed):
"server": "Yt", # filter the working streams no need to get all since the others are mostly hsl
"episode_title": f'{anime["title"]}; Episode {episode_number}', # TODO: should i just get all the servers and handle the hsl??
"headers": {"Referer": f"https://{ALLANIME_BASE}/"}, if embed.get("sourceName", "") not in (
# priorities based on death note
"Sak", # 7
"S-mp4", # 7.9
"Luf-mp4", # 7.7
"Default", # 8.5
"Yt-mp4", # 7.9
"Kir", # NA
# "Vid-mp4" # 4
# "Ok", # 3.5
# "Ss-Hls", # 5.5
# "Mp4", # 4
):
return
url = embed.get("sourceUrl")
#
if not url:
return
if url.startswith("--"):
url = url[2:]
url = one_digit_symmetric_xor(56, url)
if "tools.fast4speed.rsvp" in url:
return {
"server": "Yt",
"episode_title": f"{anime_title}; Episode {episode_number}",
"headers": {"Referer": f"https://{ALLANIME_BASE}/"},
"subtitles": [],
"links": [
{
"link": url,
"quality": "1080",
}
],
}
# get the stream url for an episode of the defined source names
embed_url = f"https://{ALLANIME_BASE}{url.replace('clock', 'clock.json')}"
resp = self.session.get(
embed_url,
timeout=10,
)
if resp.ok:
match embed["sourceName"]:
case "Luf-mp4":
logger.debug("allanime:Found streams from gogoanime")
return {
"server": "gogoanime",
"headers": {},
"subtitles": [], "subtitles": [],
"links": [ "episode_title": (
{ allanime_episode["notes"] or f"{anime_title}"
"link": url, )
"quality": "1080", + f"; Episode {episode_number}",
} "links": give_random_quality(resp.json()["links"]),
], }
} # pyright:ignore case "Kir":
continue logger.debug("allanime:Found streams from wetransfer")
return {
"server": "wetransfer",
"headers": {},
"subtitles": [],
"episode_title": (
allanime_episode["notes"] or f"{anime_title}"
)
+ f"; Episode {episode_number}",
"links": give_random_quality(resp.json()["links"]),
}
case "S-mp4":
logger.debug("allanime:Found streams from sharepoint")
return {
"server": "sharepoint",
"headers": {},
"subtitles": [],
"episode_title": (
allanime_episode["notes"] or f"{anime_title}"
)
+ f"; Episode {episode_number}",
"links": give_random_quality(resp.json()["links"]),
}
case "Sak":
logger.debug("allanime:Found streams from dropbox")
return {
"server": "dropbox",
"headers": {},
"subtitles": [],
"episode_title": (
allanime_episode["notes"] or f"{anime_title}"
)
+ f"; Episode {episode_number}",
"links": give_random_quality(resp.json()["links"]),
}
case "Default":
logger.debug("allanime:Found streams from wixmp")
return {
"server": "wixmp",
"headers": {},
"subtitles": [],
"episode_title": (
allanime_episode["notes"] or f"{anime_title}"
)
+ f"; Episode {episode_number}",
"links": give_random_quality(resp.json()["links"]),
}
# get the stream url for an episode of the defined source names for embed in embeds:
embed_url = ( if server := _get_server(embed):
f"https://{ALLANIME_BASE}{url.replace('clock', 'clock.json')}" yield server
)
resp = self.session.get(
embed_url,
headers={
"Referer": ALLANIME_REFERER,
"User-Agent": USER_AGENT,
},
timeout=10,
)
if resp.status_code == 200:
match embed["sourceName"]:
case "Luf-mp4":
logger.debug("allanime:Found streams from gogoanime")
yield {
"server": "gogoanime",
"headers": {},
"subtitles": [],
"episode_title": (
allanime_episode["notes"] or f'{anime["title"]}'
)
+ f"; Episode {episode_number}",
"links": give_random_quality(resp.json()["links"]),
} # pyright:ignore
case "Kir":
logger.debug("allanime:Found streams from wetransfer")
yield {
"server": "wetransfer",
"headers": {},
"subtitles": [],
"episode_title": (
allanime_episode["notes"] or f'{anime["title"]}'
)
+ f"; Episode {episode_number}",
"links": give_random_quality(resp.json()["links"]),
} # pyright:ignore
case "S-mp4":
logger.debug("allanime:Found streams from sharepoint")
yield {
"server": "sharepoint",
"headers": {},
"subtitles": [],
"episode_title": (
allanime_episode["notes"] or f'{anime["title"]}'
)
+ f"; Episode {episode_number}",
"links": give_random_quality(resp.json()["links"]),
} # pyright:ignore
case "Sak":
logger.debug("allanime:Found streams from dropbox")
yield {
"server": "dropbox",
"headers": {},
"subtitles": [],
"episode_title": (
allanime_episode["notes"] or f'{anime["title"]}'
)
+ f"; Episode {episode_number}",
"links": give_random_quality(resp.json()["links"]),
} # pyright:ignore
case "Default":
logger.debug("allanime:Found streams from wixmp")
yield {
"server": "wixmp",
"headers": {},
"subtitles": [],
"episode_title": (
allanime_episode["notes"] or f'{anime["title"]}'
)
+ f"; Episode {episode_number}",
"links": give_random_quality(resp.json()["links"]),
} # pyright:ignore
except Timeout:
logger.error(
"Timeout has been exceeded this could mean allanime is down or you have lost internet connection"
)
except Exception as e:
logger.error(f"FA(Allanime): {e}")
except Exception as e:
logger.error(f"FA(Allanime): {e}")
return []
if __name__ == "__main__":
anime_provider = AllAnimeAPI()
# lets see if it works :)
import subprocess
import sys
from InquirerPy import inquirer, validator # pyright:ignore
anime = input("Enter the anime name: ")
translation = input("Enter the translation type: ")
search_results = anime_provider.search_for_anime(
anime, translation_type=translation.strip()
)
if not search_results:
raise Exception("No results found")
search_results = search_results["results"]
options = {show["title"]: show for show in search_results}
anime = inquirer.fuzzy(
"Enter the anime title",
list(options.keys()),
validate=validator.EmptyInputValidator(),
).execute()
if anime is None:
print("No anime was selected")
sys.exit(1)
anime_result = options[anime]
anime_data = anime_provider.get_anime(anime_result["id"])
if not anime_data:
raise Exception("Anime not found")
availableEpisodesDetail = anime_data["availableEpisodesDetail"]
if not availableEpisodesDetail.get(translation.strip()):
raise Exception("No episodes found")
stream_link = True
while stream_link != "quit":
print("select episode")
episode = inquirer.fuzzy(
"Choose an episode",
availableEpisodesDetail[translation.strip()],
validate=validator.EmptyInputValidator(),
).execute()
if episode is None:
print("No episode was selected")
sys.exit(1)
if not anime_data:
print("Sth went wrong")
break
episode_streams_ = anime_provider.get_episode_streams(
anime_data, # pyright: ignore
episode,
translation.strip(),
)
if episode_streams_ is None:
raise Exception("Episode not found")
episode_streams = list(episode_streams_)
stream_links = []
for server in episode_streams:
stream_links.extend([link["link"] for link in server["links"]])
stream_links.append("back")
stream_link = inquirer.fuzzy(
"Choose a link to stream",
stream_links,
validate=validator.EmptyInputValidator(),
).execute()
if stream_link == "quit":
print("Have a nice day")
sys.exit()
if not stream_link:
raise Exception("No stream was selected")
title = episode_streams[0].get(
"episode_title", "%s: Episode %s" % (anime_data["title"], episode)
)
subprocess.run(["mpv", f"--title={title}", stream_link])

View File

@@ -1,6 +1,4 @@
from yt_dlp.utils.networking import random_user_agent SERVERS_AVAILABLE = ["sharepoint", "dropbox", "gogoanime", "weTransfer", "wixmp", "Yt"]
ALLANIME_BASE = "allanime.day" ALLANIME_BASE = "allanime.day"
ALLANIME_REFERER = "https://allanime.to/" ALLANIME_REFERER = "https://allanime.to/"
ALLANIME_API_ENDPOINT = "https://api.{}/api/".format(ALLANIME_BASE) ALLANIME_API_ENDPOINT = "https://api.{}/api/".format(ALLANIME_BASE)
USER_AGENT = random_user_agent()

View File

@@ -1,56 +1,56 @@
ALLANIME_SEARCH_GQL = """ ALLANIME_SEARCH_GQL = """
query( query (
$search: SearchInput $search: SearchInput
$limit: Int $limit: Int
$page: Int $page: Int
$translationType: VaildTranslationTypeEnumType $translationType: VaildTranslationTypeEnumType
$countryOrigin: VaildCountryOriginEnumType $countryOrigin: VaildCountryOriginEnumType
) { ) {
shows( shows(
search: $search search: $search
limit: $limit limit: $limit
page: $page page: $page
translationType: $translationType translationType: $translationType
countryOrigin: $countryOrigin countryOrigin: $countryOrigin
) { ) {
pageInfo { pageInfo {
total total
}
edges {
_id
name
availableEpisodes
__typename
}
} }
edges {
_id
name
availableEpisodes
__typename
}
}
} }
""" """
ALLANIME_EPISODES_GQL = """\ ALLANIME_EPISODES_GQL = """\
query ($showId: String!, $translationType: VaildTranslationTypeEnumType!, $episodeString: String!) { query (
episode( $showId: String!
showId: $showId $translationType: VaildTranslationTypeEnumType!
translationType: $translationType $episodeString: String!
episodeString: $episodeString ) {
) { episode(
showId: $showId
episodeString translationType: $translationType
sourceUrls episodeString: $episodeString
notes ) {
} episodeString
}""" sourceUrls
notes
}
}
"""
ALLANIME_SHOW_GQL = """ ALLANIME_SHOW_GQL = """
query ($showId: String!) { query ($showId: String!) {
show( show(_id: $showId) {
_id: $showId _id
) { name
availableEpisodesDetail
_id }
name
availableEpisodesDetail
}
} }
""" """

View File

@@ -1 +0,0 @@
SERVERS_AVAILABLE = ["kwik"]

View File

@@ -11,6 +11,7 @@ from yt_dlp.utils import (
) )
from ..base_provider import AnimeProvider from ..base_provider import AnimeProvider
from ..decorators import debug_provider
from .constants import ( from .constants import (
ANIMEPAHE_BASE, ANIMEPAHE_BASE,
ANIMEPAHE_ENDPOINT, ANIMEPAHE_ENDPOINT,
@@ -20,7 +21,6 @@ from .constants import (
from .utils import process_animepahe_embed_page from .utils import process_animepahe_embed_page
if TYPE_CHECKING: if TYPE_CHECKING:
from ..types import Anime
from .types import AnimePaheAnimePage, AnimePaheSearchPage, AnimeSearchResult from .types import AnimePaheAnimePage, AnimePaheSearchPage, AnimeSearchResult
JUICY_STREAM_REGEX = re.compile(r"source='(.*)';") JUICY_STREAM_REGEX = re.compile(r"source='(.*)';")
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@@ -28,205 +28,200 @@ logger = logging.getLogger(__name__)
KWIK_RE = re.compile(r"Player\|(.+?)'") KWIK_RE = re.compile(r"Player\|(.+?)'")
# TODO: hack this to completion
class AnimePaheApi(AnimeProvider): class AnimePaheApi(AnimeProvider):
search_page: "AnimePaheSearchPage" search_page: "AnimePaheSearchPage"
anime: "AnimePaheAnimePage" anime: "AnimePaheAnimePage"
HEADERS = REQUEST_HEADERS
@debug_provider("ANIMEPAHE")
def search_for_anime(self, user_query: str, *args): def search_for_anime(self, user_query: str, *args):
try: url = f"{ANIMEPAHE_ENDPOINT}m=search&q={user_query}"
url = f"{ANIMEPAHE_ENDPOINT}m=search&q={user_query}" response = self.session.get(
headers = {**REQUEST_HEADERS} url,
response = self.session.get(url, headers=headers) )
if not response.status_code == 200: if not response.ok:
return return
data: "AnimePaheSearchPage" = response.json() data: "AnimePaheSearchPage" = response.json()
self.search_page = data self.search_page = data
return { return {
"pageInfo": { "pageInfo": {
"total": data["total"], "total": data["total"],
"perPage": data["per_page"], "perPage": data["per_page"],
"currentPage": data["current_page"], "currentPage": data["current_page"],
}, },
"results": [ "results": [
{ {
"availableEpisodes": list(range(result["episodes"])), "availableEpisodes": list(range(result["episodes"])),
"id": result["session"], "id": result["session"],
"title": result["title"], "title": result["title"],
"type": result["type"], "type": result["type"],
"year": result["year"], "year": result["year"],
"score": result["score"], "score": result["score"],
"status": result["status"], "status": result["status"],
"season": result["season"], "season": result["season"],
"poster": result["poster"], "poster": result["poster"],
} }
for result in data["data"] for result in data["data"]
], ],
} }
except Exception as e:
logger.error(f"AnimePahe(search): {e}")
return {}
@debug_provider("ANIMEPAHE")
def get_anime(self, session_id: str, *args): def get_anime(self, session_id: str, *args):
page = 1 page = 1
try: anime_result: "AnimeSearchResult" = [
anime_result: "AnimeSearchResult" = [ anime
anime for anime in self.search_page["data"]
for anime in self.search_page["data"] if anime["session"] == session_id
if anime["session"] == session_id ][0]
][0] data: "AnimePaheAnimePage" = {} # pyright:ignore
data: "AnimePaheAnimePage" = {} # pyright:ignore
url = f"{ANIMEPAHE_ENDPOINT}m=release&id={session_id}&sort=episode_asc&page={page}" url = f"{ANIMEPAHE_ENDPOINT}m=release&id={session_id}&sort=episode_asc&page={page}"
def _pages_loader( def _pages_loader(
url,
page,
):
response = self.session.get(
url, url,
page,
):
response = self.session.get(url, headers=REQUEST_HEADERS)
if response.status_code == 200:
if not data:
data.update(response.json())
else:
if ep_data := response.json().get("data"):
data["data"].extend(ep_data)
if response.json()["next_page_url"]:
# TODO: Refine this
time.sleep(
random.choice(
[
0.25,
0.1,
0.5,
0.75,
1,
]
)
)
page += 1
url = f"{ANIMEPAHE_ENDPOINT}m=release&id={session_id}&sort=episode_asc&page={page}"
_pages_loader(
url,
page,
)
_pages_loader(
url,
page,
) )
if response.ok:
if not data:
data.update(response.json())
else:
if ep_data := response.json().get("data"):
data["data"].extend(ep_data)
if response.json()["next_page_url"]:
# TODO: Refine this
time.sleep(
random.choice(
[
0.25,
0.1,
0.5,
0.75,
1,
]
)
)
page += 1
url = f"{ANIMEPAHE_ENDPOINT}m=release&id={session_id}&sort=episode_asc&page={page}"
_pages_loader(
url,
page,
)
if not data: _pages_loader(
return {} url,
self.anime = data # pyright:ignore page,
episodes = list(map(str, [episode["episode"] for episode in data["data"]])) )
title = ""
return { if not data:
"id": session_id,
"title": anime_result["title"],
"year": anime_result["year"],
"season": anime_result["season"],
"poster": anime_result["poster"],
"score": anime_result["score"],
"availableEpisodesDetail": {
"sub": episodes,
"dub": episodes,
"raw": episodes,
},
"episodesInfo": [
{
"title": f"{episode['title'] or title};{episode['episode']}",
"episode": episode["episode"],
"id": episode["session"],
"translation_type": episode["audio"],
"duration": episode["duration"],
"poster": episode["snapshot"],
}
for episode in data["data"]
],
}
except Exception as e:
logger.error(f"AnimePahe(anime): {e}")
return {} return {}
self.anime = data # pyright:ignore
episodes = list(map(str, [episode["episode"] for episode in data["data"]]))
title = ""
return {
"id": session_id,
"title": anime_result["title"],
"year": anime_result["year"],
"season": anime_result["season"],
"poster": anime_result["poster"],
"score": anime_result["score"],
"availableEpisodesDetail": {
"sub": episodes,
"dub": episodes,
"raw": episodes,
},
"episodesInfo": [
{
"title": f"{episode['title'] or title};{episode['episode']}",
"episode": episode["episode"],
"id": episode["session"],
"translation_type": episode["audio"],
"duration": episode["duration"],
"poster": episode["snapshot"],
}
for episode in data["data"]
],
}
@debug_provider("ANIMEPAHE")
def get_episode_streams( def get_episode_streams(
self, anime: "Anime", episode_number: str, translation_type, *args self, anime_id, anime_title, episode_number: str, translation_type, *args
): ):
try: # extract episode details from memory
# extract episode details from memory episode = [
episode = [ episode
episode for episode in self.anime["data"]
for episode in self.anime["data"] if float(episode["episode"]) == float(episode_number)
if float(episode["episode"]) == float(episode_number) ]
]
if not episode: if not episode:
logger.error( logger.error(f"[ANIMEPAHE-ERROR]: episode {episode_number} doesn't exist")
f"AnimePahe(streams): episode {episode_number} doesn't exist" return []
episode = episode[0]
# fetch the episode page
url = f"{ANIMEPAHE_BASE}/play/{anime_id}/{episode['session']}"
response = self.session.get(url)
# get the element containing links to juicy streams
c = get_element_by_id("resolutionMenu", response.text)
resolutionMenuItems = get_elements_html_by_class("dropdown-item", c)
# convert the elements containing embed links to a neat dict containing:
# data-src
# data-audio
# data-resolution
res_dicts = [extract_attributes(item) for item in resolutionMenuItems]
# get the episode title
episode_title = (
f"{episode['title'] or anime_title}; Episode {episode['episode']}"
)
# get all links
streams = {
"server": "kwik",
"links": [],
"episode_title": episode_title,
"subtitles": [],
"headers": {},
}
for res_dict in res_dicts:
# get embed url
embed_url = res_dict["data-src"]
data_audio = "dub" if res_dict["data-audio"] == "eng" else "sub"
# filter streams by translation_type
if data_audio != translation_type:
continue
if not embed_url:
logger.warn(
"[ANIMEPAHE-WARN]: embed url not found please report to the developers"
) )
return [] return []
episode = episode[0] # get embed page
embed_response = self.session.get(
anime_id = anime["id"] embed_url, headers={"User-Agent": self.USER_AGENT, **SERVER_HEADERS}
# fetch the episode page
url = f"{ANIMEPAHE_BASE}/play/{anime_id}/{episode['session']}"
response = self.session.get(url, headers=REQUEST_HEADERS)
# get the element containing links to juicy streams
c = get_element_by_id("resolutionMenu", response.text)
resolutionMenuItems = get_elements_html_by_class("dropdown-item", c)
# convert the elements containing embed links to a neat dict containing:
# data-src
# data-audio
# data-resolution
res_dicts = [extract_attributes(item) for item in resolutionMenuItems]
# get the episode title
episode_title = (
f"{episode['title'] or anime['title']}; Episode {episode['episode']}"
) )
# get all links if not response.ok:
streams = { continue
"server": "kwik", embed_page = embed_response.text
"links": [],
"episode_title": episode_title,
"subtitles": [],
"headers": {},
}
for res_dict in res_dicts:
# get embed url
embed_url = res_dict["data-src"]
data_audio = "dub" if res_dict["data-audio"] == "eng" else "sub"
# filter streams by translation_type
if data_audio != translation_type:
continue
if not embed_url: decoded_js = process_animepahe_embed_page(embed_page)
logger.warn( if not decoded_js:
"AnimePahe: embed url not found please report to the developers" logger.error("[ANIMEPAHE-ERROR]: failed to decode embed page")
) return
return [] juicy_stream = JUICY_STREAM_REGEX.search(decoded_js)
# get embed page if not juicy_stream:
embed_response = self.session.get(embed_url, headers=SERVER_HEADERS) logger.error("[ANIMEPAHE-ERROR]: failed to find juicy stream")
embed_page = embed_response.text return
juicy_stream = juicy_stream.group(1)
decoded_js = process_animepahe_embed_page(embed_page) # add the link
if not decoded_js: streams["links"].append(
logger.error("Animepahe: failed to decode embed page") {
return "quality": res_dict["data-resolution"],
juicy_stream = JUICY_STREAM_REGEX.search(decoded_js) "translation_type": data_audio,
if not juicy_stream: "link": juicy_stream,
logger.error("Animepahe: failed to find juicy stream") }
return )
juicy_stream = juicy_stream.group(1) yield streams
# add the link
streams["links"].append(
{
"quality": res_dict["data-resolution"],
"translation_type": data_audio,
"link": juicy_stream,
}
)
yield streams
except Exception as e:
logger.error(f"Animepahe: {e}")

View File

@@ -1,18 +1,14 @@
from yt_dlp.utils.networking import random_user_agent
USER_AGENT = random_user_agent()
ANIMEPAHE = "animepahe.ru" ANIMEPAHE = "animepahe.ru"
ANIMEPAHE_BASE = f"https://{ANIMEPAHE}" ANIMEPAHE_BASE = f"https://{ANIMEPAHE}"
ANIMEPAHE_ENDPOINT = f"{ANIMEPAHE_BASE}/api?" ANIMEPAHE_ENDPOINT = f"{ANIMEPAHE_BASE}/api?"
SERVERS_AVAILABLE = ["kwik"]
REQUEST_HEADERS = { REQUEST_HEADERS = {
"Cookie": "__ddgid_=VvX0ebHrH2DsFZo4; __ddgmark_=3savRpSVFhvZcn5x; __ddg2_=buBJ3c4pNBYKFZNp; __ddg1_=rbVADKr9URtt55zoIGFa; SERVERID=janna; XSRF-TOKEN=eyJpdiI6IjV5bFNtd0phUHgvWGJxc25wL0VJSUE9PSIsInZhbHVlIjoicEJTZktlR2hxR2JZTWhnL0JzazlvZU5TQTR2bjBWZ2dDb0RwUXVUUWNSclhQWUhLRStYSmJmWmUxWkpiYkFRYU12RjFWejlSWHorME1wZG5qQ1U0TnFlNnBFR2laQjN1MjdyNjc5TjVPdXdJb2o5VkU1bEduRW9pRHNDTHh6Sy8iLCJtYWMiOiI0OTc0ZmNjY2UwMGJkOWY2MWNkM2NlMjk2ZGMyZGJmMWE0NTdjZTdkNGI2Y2IwNTIzZmFiZWU5ZTE2OTk0YmU4IiwidGFnIjoiIn0%3D; laravel_session=eyJpdiI6ImxvdlpqREFnTjdaeFJubUlXQWlJVWc9PSIsInZhbHVlIjoiQnE4R3VHdjZ4M1NDdEVWM1ZqMUxtNnVERnJCcmtCUHZKNzRPR2RFbzNFcStTL29xdnVTbWhsNVRBUXEybVZWNU1UYVlTazFqYlN5UjJva1k4czNGaXBTbkJJK01oTUd3VHRYVHBoc3dGUWxHYnFlS2NJVVNFbTFqMVBWdFpuVUgiLCJtYWMiOiI1NDdjZTVkYmNhNjUwZTMxZmRlZmVmMmRlMGNiYjAwYjlmYjFjY2U0MDc1YTQzZThiMTIxMjJlYTg1NTA4YjBmIiwidGFnIjoiIn0%3D; latest=5592 ", "Cookie": "__ddgid_=VvX0ebHrH2DsFZo4; __ddgmark_=3savRpSVFhvZcn5x; __ddg2_=buBJ3c4pNBYKFZNp; __ddg1_=rbVADKr9URtt55zoIGFa; SERVERID=janna; XSRF-TOKEN=eyJpdiI6IjV5bFNtd0phUHgvWGJxc25wL0VJSUE9PSIsInZhbHVlIjoicEJTZktlR2hxR2JZTWhnL0JzazlvZU5TQTR2bjBWZ2dDb0RwUXVUUWNSclhQWUhLRStYSmJmWmUxWkpiYkFRYU12RjFWejlSWHorME1wZG5qQ1U0TnFlNnBFR2laQjN1MjdyNjc5TjVPdXdJb2o5VkU1bEduRW9pRHNDTHh6Sy8iLCJtYWMiOiI0OTc0ZmNjY2UwMGJkOWY2MWNkM2NlMjk2ZGMyZGJmMWE0NTdjZTdkNGI2Y2IwNTIzZmFiZWU5ZTE2OTk0YmU4IiwidGFnIjoiIn0%3D; laravel_session=eyJpdiI6ImxvdlpqREFnTjdaeFJubUlXQWlJVWc9PSIsInZhbHVlIjoiQnE4R3VHdjZ4M1NDdEVWM1ZqMUxtNnVERnJCcmtCUHZKNzRPR2RFbzNFcStTL29xdnVTbWhsNVRBUXEybVZWNU1UYVlTazFqYlN5UjJva1k4czNGaXBTbkJJK01oTUd3VHRYVHBoc3dGUWxHYnFlS2NJVVNFbTFqMVBWdFpuVUgiLCJtYWMiOiI1NDdjZTVkYmNhNjUwZTMxZmRlZmVmMmRlMGNiYjAwYjlmYjFjY2U0MDc1YTQzZThiMTIxMjJlYTg1NTA4YjBmIiwidGFnIjoiIn0%3D; latest=5592 ",
"Host": ANIMEPAHE, "Host": ANIMEPAHE,
"User-Agent": USER_AGENT,
"Accept": "application , text/javascript, */*; q=0.01", "Accept": "application , text/javascript, */*; q=0.01",
"Accept-Encoding": "gzip, deflate, br, zstd", "Accept-Encoding": "Utf-8",
"Referer": ANIMEPAHE_BASE, "Referer": ANIMEPAHE_BASE,
"X-Requested-With": "XMLHttpRequest",
"DNT": "1", "DNT": "1",
"Connection": "keep-alive", "Connection": "keep-alive",
"Sec-Fetch-Dest": "empty", "Sec-Fetch-Dest": "empty",
@@ -21,19 +17,17 @@ REQUEST_HEADERS = {
"TE": "trailers", "TE": "trailers",
} }
SERVER_HEADERS = { SERVER_HEADERS = {
"User-Agent": USER_AGENT, "Host": "kwik.si",
"Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/png,image/svg+xml,*/*;q=0.8", "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/png,image/svg+xml,*/*;q=0.8",
"Accept-Language": "en-US,en;q=0.5", "Accept-Language": "en-US,en;q=0.5",
"Accept-Encoding": "gzip, deflate, br, zstd", "Accept-Encoding": "Utf-8",
"DNT": "1", "DNT": "1",
"Alt-Used": "kwik.si",
"Connection": "keep-alive", "Connection": "keep-alive",
"Referer": ANIMEPAHE_BASE, "Referer": "https://animepahe.ru/",
"Cookie": "kwik_session=eyJpdiI6IlZ5UDd0c0lKTDB1NXlhTHZPeWxFc2c9PSIsInZhbHVlIjoieDJZbGhZUG1QZDNaeWtqR3lwWFNnREdhaHBxNVZRMWNDOHVucGpiMHRJOVdhVmpBc3lpTko1VExRMTFWcE1yUVJtVitoTWdOOU5ObTQ0Q0dHU0MzZU0yRUVvNmtWcUdmY3R4UWx4YklJTmpUL0ZodjhtVEpjWU96cEZoUUhUbVYiLCJtYWMiOiI2OGY2YThkOGU0MTgwOThmYzcyZThmNzFlZjlhMzQzMDgwNjlmMTc4NTIzMzc2YjE3YjNmMWQyNTk4NzczMmZiIiwidGFnIjoiIn0%3D; srv=s0; cf_clearance=QMoZtUpZrX0Mh4XJiFmFSSmoWndISPne5FcsGmKKvTQ-1723297585-1.0.1.1-6tVUnP.aef9XeNj0CnN.19D1el_r53t.lhqddX.J88gohH9UnsPWKeJ4yT0pTbcaGRbPuXTLOS.U72.wdy.gMg",
"Upgrade-Insecure-Requests": "1", "Upgrade-Insecure-Requests": "1",
"Sec-Fetch-Dest": "iframe", "Sec-Fetch-Dest": "iframe",
"Sec-Fetch-Mode": "navigate", "Sec-Fetch-Mode": "navigate",
"Sec-Fetch-Site": "cross-site", "Sec-Fetch-Site": "cross-site",
"Sec-Fetch-User": "?1",
"Priority": "u=4", "Priority": "u=4",
"TE": "trailers",
} }

View File

@@ -21,12 +21,12 @@ def animepahe_embed_decoder(
encoded_js_p: str, encoded_js_p: str,
base_a: int, base_a: int,
no_of_keys_c: int, no_of_keys_c: int,
key_values_k: list, values_to_replace_with_k: list,
decode_mapper_d: dict = {},
): ):
decode_mapper_d: dict = {}
for i in range(no_of_keys_c): for i in range(no_of_keys_c):
key = animepahe_key_creator(i, base_a) key = animepahe_key_creator(i, base_a)
val = key_values_k[i] or key val = values_to_replace_with_k[i] or key
decode_mapper_d[key] = val decode_mapper_d[key] = val
return re.sub( return re.sub(
r"\b\w+\b", lambda match: decode_mapper_d[match.group(0)], encoded_js_p r"\b\w+\b", lambda match: decode_mapper_d[match.group(0)], encoded_js_p
@@ -64,18 +64,12 @@ def process_animepahe_embed_page(embed_page: str):
if __name__ == "__main__": if __name__ == "__main__":
data = """<script>eval(function(p,a,c,k,e,d){e=function(c){return(c<a?'':e(parseInt(c/a)))+((c=c%a)>35?String.fromCharCode(c+29):c.toString(36))};if(!''.replace(/^/,String)){while(c--){d[e(c)]=k[c]||e(c)}k=[function(e){return d[e]}];e=function(){return'\\w+'};c=1};while(c--){if(k[c]){p=p.replace(new RegExp('\\b'+e(c)+'\\b','g'),k[c])}}return p}('f $7={H:a(2){4 B(9.7.h(y z("(?:(?:^|.*;)\\\\s*"+d(2).h(/[\\-\\.\\+\\*]/g,"\\\\$&")+"\\\\s*\\\\=\\\\s*([^;]*).*$)|^.*$"),"$1"))||G},E:a(2,q,3,6,5,t){k(!2||/^(?:8|r\\-v|o|m|p)$/i.D(2)){4 w}f b="";k(3){F(3.J){j K:b=3===P?"; 8=O, I N Q M:u:u A":"; r-v="+3;n;j L:b="; 8="+3;n;j S:b="; 8="+3.Z();n}}9.7=d(2)+"="+d(q)+b+(5?"; m="+5:"")+(6?"; o="+6:"")+(t?"; p":"");4 x},Y:a(2,6,5){k(!2||!11.C(2)){4 w}9.7=d(2)+"=; 8=12, R 10 W l:l:l A"+(5?"; m="+5:"")+(6?"; o="+6:"");4 x},C:a(2){4(y z("(?:^|;\\\\s*)"+d(2).h(/[\\-\\.\\+\\*]/g,"\\\\$&")+"\\\\s*\\\\=")).D(9.7)},X:a(){f c=9.7.h(/((?:^|\\s*;)[^\\=]+)(?=;|$)|^\\s*|\\s*(?:\\=[^;]*)?(?:\\1|$)/g,"").T(/\\s*(?:\\=[^;]*)?;\\s*/);U(f e=0;e<c.V;e++){c[e]=B(c[e])}4 c}};',62,65,'||sKey|vEnd|return|sDomain|sPath|cookie|expires|document|function|sExpires|aKeys|encodeURIComponent|nIdx|var||replace||case|if|00|domain|break|path|secure|sValue|max||bSecure|59|age|false|true|new|RegExp|GMT|decodeURIComponent|hasItem|test|setItem|switch|null|getItem|31|constructor|Number|String|23|Dec|Fri|Infinity|9999|01|Date|split|for|length|1970|keys|removeItem|toUTCString|Jan|this|Thu'.split('|'),0,{}));eval(function(p,a,c,k,e,d){e=function(c){return(c<a?'':e(parseInt(c/a)))+((c=c%a)>35?String.fromCharCode(c+29):c.toString(36))};if(!''.replace(/^/,String)){while(c--){d[e(c)]=k[c]||e(c)}k=[function(e){return d[e]}];e=function(){return'\\w+'};c=1};while(c--){if(k[c]){p=p.replace(new RegExp('\\b'+e(c)+'\\b','g'),k[c])}}return p}('h o=\'1D://1C-E.1B.1A.1z/1y/E/1x/1w/1v.1u\';h d=s.r(\'d\');h 0=B 1t(d,{\'1s\':{\'1r\':i},\'1q\':\'16:9\',\'D\':1,\'1p\':5,\'1o\':{\'1n\':\'1m\'},1l:[\'7-1k\',\'7\',\'1j\',\'1i-1h\',\'1g\',\'1f-1e\',\'1d\',\'D\',\'1c\',\'1b\',\'1a\',\'19\',\'C\',\'18\'],\'C\':{\'17\':i}});8(!A.15()){d.14=o}x{j z={13:12,11:10,Z:Y,X:i,W:i};h c=B A(z);c.V(o);c.U(d);g.c=c}0.3("T",6=>{g.S.R.Q("P")});0.O=1;k v(b,n,m){8(b.y){b.y(n,m,N)}x 8(b.w){b.w(\'3\'+n,m)}}j 4=k(l){g.M.L(l,\'*\')};v(g,\'l\',k(e){j a=e.a;8(a===\'7\')0.7();8(a===\'f\')0.f();8(a===\'u\')0.u()});0.3(\'t\',6=>{4(\'t\')});0.3(\'7\',6=>{4(\'7\')});0.3(\'f\',6=>{4(\'f\')});0.3(\'K\',6=>{4(0.q);s.r(\'.J-I\').H=G(0.q.F(2))});0.3(\'p\',6=>{4(\'p\')});',62,102,'player|||on|sendMessage||event|play|if||data|element|hls|video||pause|window|const|true|var|function|message|eventHandler|eventName|source|ended|currentTime|querySelector|document|ready|stop|bindEvent|attachEvent|else|addEventListener|config|Hls|new|fullscreen|volume|01|toFixed|String|innerHTML|timestamp|ss|timeupdate|postMessage|parent|false|speed|landscape|lock|orientation|screen|enterfullscreen|attachMedia|loadSource|lowLatencyMode|enableWorker|Infinity|backBufferLength|600|maxMaxBufferLength|180|maxBufferLength|src|isSupported||iosNative|capture|airplay|pip|settings|captions|mute|time|current|progress|forward|fast|rewind|large|controls|kwik|key|storage|seekTime|ratio|global|keyboard|Plyr|m3u8|uwu|b92a392054c041a3f9c6eecabeb0e127183f44e547828447b10bca8d77523e6f|03|stream|org|nextcdn|files|eu|https'.split('|'),0,{}))""" # Testing time
a = 62 filepath = input("Enter file name: ")
c = 102 if filepath:
k = "player|||on|sendMessage||event|play|if||data|element|hls|video||pause|window|const|true|var|function|message|eventHandler|eventName|source|ended|currentTime|querySelector|document|ready|stop|bindEvent|attachEvent|else|addEventListener|config|Hls|new|fullscreen|volume|toFixed|String|innerHTML|timestamp|ss|timeupdate|postMessage|parent|false|speed|landscape|lock|orientation|screen|enterfullscreen|attachMedia|loadSource|lowLatencyMode|enableWorker|Infinity|backBufferLength|600|maxMaxBufferLength||180|maxBufferLength|src|isSupported||iosNative|capture|airplay|pip|settings|captions|mute|time|current|progress|forward|fast|rewind|large|controls|kwik|key|storage|seekTime|ratio|global|keyboard|Plyr|m3u8|uwu|cda74eaebce25a12f5e548f7c220bb5dc245700b0280bdb45ff98b2fe4803d2b|06|stream|org|nextcdn|files|eu|https".split( with open(filepath, "r") as file:
"|" data = file.read()
) else:
data = """<script>eval(function(p,a,c,k,e,d){e=function(c){return(c<a?'':e(parseInt(c/a)))+((c=c%a)>35?String.fromCharCode(c+29):c.toString(36))};if(!''.replace(/^/,String)){while(c--){d[e(c)]=k[c]||e(c)}k=[function(e){return d[e]}];e=function(){return'\\w+'};c=1};while(c--){if(k[c]){p=p.replace(new RegExp('\\b'+e(c)+'\\b','g'),k[c])}}return p}('f $7={H:a(2){4 B(9.7.h(y z("(?:(?:^|.*;)\\\\s*"+d(2).h(/[\\-\\.\\+\\*]/g,"\\\\$&")+"\\\\s*\\\\=\\\\s*([^;]*).*$)|^.*$"),"$1"))||G},E:a(2,q,3,6,5,t){k(!2||/^(?:8|r\\-v|o|m|p)$/i.D(2)){4 w}f b="";k(3){F(3.J){j K:b=3===P?"; 8=O, I N Q M:u:u A":"; r-v="+3;n;j L:b="; 8="+3;n;j S:b="; 8="+3.Z();n}}9.7=d(2)+"="+d(q)+b+(5?"; m="+5:"")+(6?"; o="+6:"")+(t?"; p":"");4 x},Y:a(2,6,5){k(!2||!11.C(2)){4 w}9.7=d(2)+"=; 8=12, R 10 W l:l:l A"+(5?"; m="+5:"")+(6?"; o="+6:"");4 x},C:a(2){4(y z("(?:^|;\\\\s*)"+d(2).h(/[\\-\\.\\+\\*]/g,"\\\\$&")+"\\\\s*\\\\=")).D(9.7)},X:a(){f c=9.7.h(/((?:^|\\s*;)[^\\=]+)(?=;|$)|^\\s*|\\s*(?:\\=[^;]*)?(?:\\1|$)/g,"").T(/\\s*(?:\\=[^;]*)?;\\s*/);U(f e=0;e<c.V;e++){c[e]=B(c[e])}4 c}};',62,65,'||sKey|vEnd|return|sDomain|sPath|cookie|expires|document|function|sExpires|aKeys|encodeURIComponent|nIdx|var||replace||case|if|00|domain|break|path|secure|sValue|max||bSecure|59|age|false|true|new|RegExp|GMT|decodeURIComponent|hasItem|test|setItem|switch|null|getItem|31|constructor|Number|String|23|Dec|Fri|Infinity|9999|01|Date|split|for|length|1970|keys|removeItem|toUTCString|Jan|this|Thu'.split('|'),0,{}));eval(function(p,a,c,k,e,d){e=function(c){return(c<a?'':e(parseInt(c/a)))+((c=c%a)>35?String.fromCharCode(c+29):c.toString(36))};if(!''.replace(/^/,String)){while(c--){d[e(c)]=k[c]||e(c)}k=[function(e){return d[e]}];e=function(){return'\\w+'};c=1};while(c--){if(k[c]){p=p.replace(new RegExp('\\b'+e(c)+'\\b','g'),k[c])}}return p}('h o=\'1D://1C-E.1B.1A.1z/1y/E/1x/1w/1v.1u\';h d=s.r(\'d\');h 0=B 1t(d,{\'1s\':{\'1r\':i},\'1q\':\'16:9\',\'D\':1,\'1p\':5,\'1o\':{\'1n\':\'1m\'},1l:[\'7-1k\',\'7\',\'1j\',\'1i-1h\',\'1g\',\'1f-1e\',\'1d\',\'D\',\'1c\',\'1b\',\'1a\',\'19\',\'C\',\'18\'],\'C\':{\'17\':i}});8(!A.15()){d.14=o}x{j z={13:12,11:10,Z:Y,X:i,W:i};h c=B A(z);c.V(o);c.U(d);g.c=c}0.3("T",6=>{g.S.R.Q("P")});0.O=1;k v(b,n,m){8(b.y){b.y(n,m,N)}x 8(b.w){b.w(\'3\'+n,m)}}j 4=k(l){g.M.L(l,\'*\')};v(g,\'l\',k(e){j a=e.a;8(a===\'7\')0.7();8(a===\'f\')0.f();8(a===\'u\')0.u()});0.3(\'t\',6=>{4(\'t\')});0.3(\'7\',6=>{4(\'7\')});0.3(\'f\',6=>{4(\'f\')});0.3(\'K\',6=>{4(0.q);s.r(\'.J-I\').H=G(0.q.F(2))});0.3(\'p\',6=>{4(\'p\')});',62,102,'player|||on|sendMessage||event|play|if||data|element|hls|video||pause|window|const|true|var|function|message|eventHandler|eventName|source|ended|currentTime|querySelector|document|ready|stop|bindEvent|attachEvent|else|addEventListener|config|Hls|new|fullscreen|volume|01|toFixed|String|innerHTML|timestamp|ss|timeupdate|postMessage|parent|false|speed|landscape|lock|orientation|screen|enterfullscreen|attachMedia|loadSource|lowLatencyMode|enableWorker|Infinity|backBufferLength|600|maxMaxBufferLength|180|maxBufferLength|src|isSupported||iosNative|capture|airplay|pip|settings|captions|mute|time|current|progress|forward|fast|rewind|large|controls|kwik|key|storage|seekTime|ratio|global|keyboard|Plyr|m3u8|uwu|b92a392054c041a3f9c6eecabeb0e127183f44e547828447b10bca8d77523e6f|03|stream|org|nextcdn|files|eu|https'.split('|'),0,{}))</script>"""
p = "h o='1D://1C-11.1B.1A.1z/1y/11/1x/1w/1v.1u';h d=s.r('d');h 0=B 1t(d,{'1s':{'1r':i},'1q':'16:9','D':1,'1p':5,'1o':{'1n':'1m'},1l:['7-1k','7','1j','1i-1h','1g','1f-1e','1d','D','1c','1b','1a','19','C','18'],'C':{'17':i}});8(!A.15()){d.14=o}x{j z={13:12,10:Z,Y:X,W:i,V:i};h c=B A(z);c.U(o);c.T(d);g.c=c}0.3(\"S\",6=>{g.R.Q.P(\"O\")});0.N=1;k v(b,n,m){8(b.y){b.y(n,m,M)}x 8(b.w){b.w('3'+n,m)}}j 4=k(l){g.L.K(l,'*')};v(g,'l',k(e){j a=e.a;8(a==='7')0.7();8(a==='f')0.f();8(a==='u')0.u()});0.3('t',6=>{4('t')});0.3('7',6=>{4('7')});0.3('f',6=>{4('f')});0.3('J',6=>{4(0.q);s.r('.I-H').G=F(0.q.E(2))});0.3('p',6=>{4('p')});" print(process_animepahe_embed_page(data))
result = animepahe_embed_decoder(
p,
a,
c,
k,
)
print(result) # Output: j player = B A();

View File

@@ -1,167 +0,0 @@
import logging
import re
from itertools import cycle
from yt_dlp.utils import (
extract_attributes,
get_element_html_by_class,
get_elements_html_by_class,
)
from ..base_provider import AnimeProvider
from ..common import fetch_anime_info_from_bal
from ..mini_anilist import search_for_anime_with_anilist
from ..utils import give_random_quality
from . import SERVERS_AVAILABLE
from .types import AniWatchStream
logger = logging.getLogger(__name__)
LINK_TO_STREAMS_REGEX = re.compile(r".*://(.*)/embed-(2|4|6)/e-([0-9])/(.*)\?.*")
class AniWatchApi(AnimeProvider):
def search_for_anime(self, anime_title: str, *args):
try:
return search_for_anime_with_anilist(anime_title)
except Exception as e:
logger.error(e)
def get_anime(self, anilist_id, *args):
try:
bal_results = fetch_anime_info_from_bal(anilist_id)
if not bal_results:
return
ZORO = bal_results["Sites"]["Zoro"]
aniwatch_id = list(ZORO.keys())[0]
anime_url = f"https://hianime.to/ajax/v2/episode/list/{aniwatch_id}"
response = self.session.get(anime_url, timeout=10)
if response.status_code == 200:
response_json = response.json()
aniwatch_anime_page = response_json["html"]
episodes_info_container_html = get_element_html_by_class(
"ss-list", aniwatch_anime_page
)
episodes_info_html_list = get_elements_html_by_class(
"ep-item", episodes_info_container_html
)
# keys: [ data-number: episode_number, data-id: episode_id, title: episode_title , href:episode_page_url]
episodes_info_dicts = [
extract_attributes(episode_dict)
for episode_dict in episodes_info_html_list
]
episodes = [episode["data-number"] for episode in episodes_info_dicts]
self.episodes_info = [
{
"id": episode["data-id"],
"title": f"{episode['title'] or ZORO['title']}; Episode {episode['data-number']}",
"episode": episode["data-number"],
}
for episode in episodes_info_dicts
]
return {
"id": aniwatch_id,
"availableEpisodesDetail": {
"dub": episodes,
"sub": episodes,
"raw": episodes,
},
"poster": ZORO[aniwatch_id]["image"],
"title": ZORO[aniwatch_id]["title"],
"episodes_info": self.episodes_info,
}
except Exception as e:
logger.error(e)
def get_episode_streams(self, anime, episode, translation_type, *args):
try:
episode_details = [
episode_details
for episode_details in self.episodes_info
if episode_details["episode"] == episode
]
if not episode_details:
return
episode_details = episode_details[0]
episode_url = f"https://hianime.to/ajax/v2/episode/servers?episodeId={episode_details['id']}"
response = self.session.get(episode_url)
if response.status_code == 200:
response_json = response.json()
episode_page_html = response_json["html"]
servers_containers_html = get_elements_html_by_class(
"ps__-list", episode_page_html
)
if not servers_containers_html:
return
# sub servers
try:
servers_html_sub = get_elements_html_by_class(
"server-item", servers_containers_html[0]
)
except Exception:
logger.warn("AniWatch: sub not found")
servers_html_sub = None
# dub servers
try:
servers_html_dub = get_elements_html_by_class(
"server-item", servers_containers_html[1]
)
except Exception:
logger.warn("AniWatch: dub not found")
servers_html_dub = None
if translation_type == "dub":
servers_html = servers_html_dub
else:
servers_html = servers_html_sub
if not servers_html:
return
for server_name, server_html in zip(
cycle(SERVERS_AVAILABLE), servers_html
):
try:
# keys: [ data-type: translation_type, data-id: embed_id, data-server-id: server_id ]
servers_info = extract_attributes(server_html)
embed_url = f"https://hianime.to/ajax/v2/episode/sources?id={servers_info['data-id']}"
embed_response = self.session.get(embed_url)
if embed_response.status_code == 200:
embed_json = embed_response.json()
raw_link_to_streams = embed_json["link"]
match = LINK_TO_STREAMS_REGEX.match(raw_link_to_streams)
if not match:
continue
provider_domain = match.group(1)
embed_type = match.group(2)
episode_number = match.group(3)
source_id = match.group(4)
link_to_streams = f"https://{provider_domain}/embed-{embed_type}/ajax/e-{episode_number}/getSources?id={source_id}"
link_to_streams_response = self.session.get(link_to_streams)
if link_to_streams_response.status_code == 200:
juicy_streams_json: "AniWatchStream" = (
link_to_streams_response.json()
)
yield {
"headers": {},
"subtitles": [
{
"url": track["file"],
"language": track["label"],
}
for track in juicy_streams_json["tracks"]
if track["kind"] == "captions"
],
"server": server_name,
"episode_title": episode_details["title"],
"links": give_random_quality(
[
{"link": link["file"], "type": link["type"]}
for link in juicy_streams_json["sources"]
]
),
}
except Exception as e:
logger.error(e)
except Exception as e:
logger.error(e)

View File

@@ -1,8 +1,13 @@
import requests import requests
from yt_dlp.utils.networking import random_user_agent
class AnimeProvider: class AnimeProvider:
session: requests.Session session: requests.Session
USER_AGENT = random_user_agent()
HEADERS = {}
def __init__(self) -> None: def __init__(self) -> None:
self.session = requests.session() self.session = requests.session()
self.session.headers.update({"User-Agent": self.USER_AGENT, **self.HEADERS})

View File

@@ -0,0 +1,39 @@
import functools
import logging
import os
logger = logging.getLogger(__name__)
def debug_provider(provider_name: str):
def _provider_function_decorator(provider_function):
@functools.wraps(provider_function)
def _provider_function_wrapper(*args, **kwargs):
if not os.environ.get("FASTANIME_DEBUG"):
try:
return provider_function(*args, **kwargs)
except Exception as e:
logger.error(f"[{provider_name}@{provider_function.__name__}]: {e}")
else:
return provider_function(*args, **kwargs)
return _provider_function_wrapper
return _provider_function_decorator
def ensure_internet_connection(provider_function):
@functools.wraps(provider_function)
def _wrapper(*args, **kwargs):
import requests
try:
requests.get("https://google.com", timeout=5)
except requests.ConnectionError:
from sys import exit
print("You are not connected to the internet;Aborting...")
exit(1)
return provider_function(*args, **kwargs)
return _wrapper

View File

@@ -0,0 +1,233 @@
import logging
import re
from html.parser import HTMLParser
from itertools import cycle
from urllib.parse import quote_plus
from yt_dlp.utils import (
clean_html,
extract_attributes,
get_element_by_class,
get_element_html_by_class,
get_elements_by_class,
get_elements_html_by_class,
)
from ..base_provider import AnimeProvider
from ..decorators import debug_provider
from ..utils import give_random_quality
from .constants import SERVERS_AVAILABLE
from .types import AniWatchStream
logger = logging.getLogger(__name__)
LINK_TO_STREAMS_REGEX = re.compile(r".*://(.*)/embed-(2|4|6)/e-([0-9])/(.*)\?.*")
IMAGE_HTML_ELEMENT_REGEX = re.compile(r"<img.*?>")
class ParseAnchorAndImgTag(HTMLParser):
def __init__(self):
super().__init__()
self.img_tag = None
self.a_tag = None
def handle_starttag(self, tag, attrs):
if tag == "img":
self.img_tag = {attr[0]: attr[1] for attr in attrs}
if tag == "a":
self.a_tag = {attr[0]: attr[1] for attr in attrs}
class HiAnimeApi(AnimeProvider):
# HEADERS = {"Referer": "https://hianime.to/home"}
@debug_provider("ANIWATCH")
def search_for_anime(self, anime_title: str, *args):
query = quote_plus(anime_title)
url = f"https://hianime.to/search?keyword={query}"
response = self.session.get(url)
if not response.ok:
return
search_page = response.text
search_results_html_items = get_elements_by_class("flw-item", search_page)
results = []
for search_results_html_item in search_results_html_items:
film_poster_html = get_element_by_class(
"film-poster", search_results_html_item
)
if not film_poster_html:
continue
# get availableEpisodes
episodes_html = get_element_html_by_class("tick-sub", film_poster_html)
episodes = clean_html(episodes_html) or 12
# get anime id and poster image url
parser = ParseAnchorAndImgTag()
parser.feed(film_poster_html)
image_data = parser.img_tag
anime_link_data = parser.a_tag
if not image_data or not anime_link_data:
continue
episodes = int(episodes)
# finally!!
image_link = image_data["data-src"]
anime_id = anime_link_data["data-id"]
title = anime_link_data["title"]
results.append(
{
"availableEpisodes": list(range(1, episodes)),
"id": anime_id,
"title": title,
"poster": image_link,
}
)
self.search_results = results
return {"pageInfo": {}, "results": results}
@debug_provider("ANIWATCH")
def get_anime(self, aniwatch_id, *args):
anime_result = {}
for anime in self.search_results:
if anime["id"] == aniwatch_id:
anime_result = anime
break
anime_url = f"https://hianime.to/ajax/v2/episode/list/{aniwatch_id}"
response = self.session.get(anime_url, timeout=10)
if response.ok:
response_json = response.json()
aniwatch_anime_page = response_json["html"]
episodes_info_container_html = get_element_html_by_class(
"ss-list", aniwatch_anime_page
)
episodes_info_html_list = get_elements_html_by_class(
"ep-item", episodes_info_container_html
)
# keys: [ data-number: episode_number, data-id: episode_id, title: episode_title , href:episode_page_url]
episodes_info_dicts = [
extract_attributes(episode_dict)
for episode_dict in episodes_info_html_list
]
episodes = [episode["data-number"] for episode in episodes_info_dicts]
self.episodes_info = [
{
"id": episode["data-id"],
"title": (
(episode["title"] or "").replace(
f"Episode {episode['data-number']}", ""
)
or anime_result["title"]
)
+ f"; Episode {episode['data-number']}",
"episode": episode["data-number"],
}
for episode in episodes_info_dicts
]
return {
"id": aniwatch_id,
"availableEpisodesDetail": {
"dub": episodes,
"sub": episodes,
"raw": episodes,
},
"poster": anime_result["poster"],
"title": anime_result["title"],
"episodes_info": self.episodes_info,
}
@debug_provider("ANIWATCH")
def get_episode_streams(
self, anime_id, anime_title, episode, translation_type, *args
):
episode_details = [
episode_details
for episode_details in self.episodes_info
if episode_details["episode"] == episode
]
if not episode_details:
return
episode_details = episode_details[0]
episode_url = f"https://hianime.to/ajax/v2/episode/servers?episodeId={episode_details['id']}"
response = self.session.get(episode_url)
if response.ok:
response_json = response.json()
episode_page_html = response_json["html"]
servers_containers_html = get_elements_html_by_class(
"ps__-list", episode_page_html
)
if not servers_containers_html:
return
# sub servers
try:
servers_html_sub = get_elements_html_by_class(
"server-item", servers_containers_html[0]
)
except Exception:
logger.warning("AniWatch: sub not found")
servers_html_sub = None
# dub servers
try:
servers_html_dub = get_elements_html_by_class(
"server-item", servers_containers_html[1]
)
except Exception:
logger.warning("AniWatch: dub not found")
servers_html_dub = None
if translation_type == "dub":
servers_html = servers_html_dub
else:
servers_html = servers_html_sub
if not servers_html:
return
@debug_provider("ANIWATCH")
def _get_server(server_name, server_html):
# keys: [ data-type: translation_type, data-id: embed_id, data-server-id: server_id ]
servers_info = extract_attributes(server_html)
embed_url = f"https://hianime.to/ajax/v2/episode/sources?id={servers_info['data-id']}"
embed_response = self.session.get(embed_url)
if embed_response.ok:
embed_json = embed_response.json()
raw_link_to_streams = embed_json["link"]
match = LINK_TO_STREAMS_REGEX.match(raw_link_to_streams)
if not match:
return
provider_domain = match.group(1)
embed_type = match.group(2)
episode_number = match.group(3)
source_id = match.group(4)
link_to_streams = f"https://{provider_domain}/embed-{embed_type}/ajax/e-{episode_number}/getSources?id={source_id}"
link_to_streams_response = self.session.get(link_to_streams)
if link_to_streams_response.ok:
juicy_streams_json: "AniWatchStream" = (
link_to_streams_response.json()
)
return {
"headers": {},
"subtitles": [
{
"url": track["file"],
"language": track["label"],
}
for track in juicy_streams_json["tracks"]
if track["kind"] == "captions"
],
"server": server_name,
"episode_title": episode_details["title"],
"links": give_random_quality(
[
{"link": link["file"], "type": link["type"]}
for link in juicy_streams_json["sources"]
]
),
}
for server_name, server_html in zip(cycle(SERVERS_AVAILABLE), servers_html):
if server := _get_server(server_name, server_html):
yield server

View File

@@ -1,153 +0,0 @@
import logging
from typing import TYPE_CHECKING
from requests import post
from thefuzz import fuzz
if TYPE_CHECKING:
from ..anilist.types import AnilistDataSchema
logger = logging.getLogger(__name__)
ANILIST_ENDPOINT = "https://graphql.anilist.co"
"""
query($query:String){
Page(perPage:50){
pageInfo{
total
currentPage
hasNextPage
}
media(search:$query,type:ANIME){
id
idMal
title{
romaji
english
}
episodes
status
nextAiringEpisode {
timeUntilAiring
airingAt
episode
}
}
}
}
"""
def search_for_anime_with_anilist(anime_title: str):
query = """
query($query:String){
Page(perPage:50){
pageInfo{
total
currentPage
hasNextPage
}
media(search:$query,type:ANIME){
id
idMal
title{
romaji
english
}
episodes
status
nextAiringEpisode {
timeUntilAiring
airingAt
episode
}
}
}
}
"""
response = post(
ANILIST_ENDPOINT,
json={"query": query, "variables": {"query": anime_title}},
timeout=10,
)
if response.status_code == 200:
anilist_data: "AnilistDataSchema" = response.json()
return {
"pageInfo": anilist_data["data"]["Page"]["pageInfo"],
"results": [
{
"id": anime_result["id"],
"title": anime_result["title"]["romaji"]
or anime_result["title"]["english"],
"type": "anime",
"availableEpisodes": list(
range(
1,
(
anime_result["episodes"]
if not anime_result["status"] == "RELEASING"
and anime_result["episodes"]
else (
anime_result["nextAiringEpisode"]["episode"] - 1
if anime_result["nextAiringEpisode"]
else 0
)
),
)
),
}
for anime_result in anilist_data["data"]["Page"]["media"]
],
}
def get_mal_id_and_anilist_id(anime_title: str) -> "dict[str,int] | None":
"""the abstraction over all none authenticated requests and that returns data of a similar type
Args:
query: the anilist query
variables: the anilist api variables
Returns:
a boolean indicating success and none or an anilist object depending on success
"""
query = """
query($query:String){
Page(perPage:50){
pageInfo{
total
currentPage
hasNextPage
}
media(search:$query,type:ANIME){
id
idMal
title{
romaji
english
}
}
}
}
"""
try:
variables = {"query": anime_title}
response = post(
ANILIST_ENDPOINT,
json={"query": query, "variables": variables},
timeout=10,
)
anilist_data: "AnilistDataSchema" = response.json()
if response.status_code == 200:
anime = max(
anilist_data["data"]["Page"]["media"],
key=lambda anime: max(
(
fuzz.ratio(anime, str(anime["title"]["romaji"])),
fuzz.ratio(anime_title, str(anime["title"]["english"])),
)
),
)
return {"id_anilist": anime["id"], "id_mal": anime["idMal"]}
except Exception as e:
logger.error(f"Something unexpected occured {e}")

View File

@@ -0,0 +1,15 @@
import logging
from requests import get
logger = logging.getLogger(__name__)
def fetch_anime_info_from_bal(anilist_id):
try:
url = f"https://raw.githubusercontent.com/bal-mackup/mal-backup/master/anilist/anime/{anilist_id}.json"
response = get(url, timeout=11)
if response.status_code == 200:
return response.json()
except Exception as e:
logger.error(e)

View File

@@ -0,0 +1,286 @@
import logging
from typing import TYPE_CHECKING
from requests import post
from thefuzz import fuzz
if TYPE_CHECKING:
from ..anilist.types import AnilistDataSchema
logger = logging.getLogger(__name__)
ANILIST_ENDPOINT = "https://graphql.anilist.co"
"""
query ($query: String) {
Page(perPage: 50) {
pageInfo {
total
currentPage
hasNextPage
}
media(search: $query, type: ANIME) {
id
idMal
title {
romaji
english
}
episodes
status
nextAiringEpisode {
timeUntilAiring
airingAt
episode
}
}
}
}
"""
def search_for_manga_with_anilist(manga_title: str):
query = """
query ($query: String) {
Page(perPage: 50) {
pageInfo {
currentPage
}
media(search: $query, type: MANGA,genre_not_in: ["hentai"]) {
id
idMal
title {
romaji
english
}
chapters
status
coverImage {
medium
large
}
}
}
}
"""
response = post(
ANILIST_ENDPOINT,
json={"query": query, "variables": {"query": manga_title}},
timeout=10,
)
if response.status_code == 200:
anilist_data: "AnilistDataSchema" = response.json()
return {
"pageInfo": anilist_data["data"]["Page"]["pageInfo"],
"results": [
{
"id": anime_result["id"],
"poster": anime_result["coverImage"]["large"],
"title": (
anime_result["title"]["romaji"]
or anime_result["title"]["english"]
)
+ f" [Chapters: {anime_result['chapters']}]",
"type": "manga",
"availableChapters": list(
range(
1,
(
anime_result["chapters"]
if anime_result["chapters"]
else 0
),
)
),
}
for anime_result in anilist_data["data"]["Page"]["media"]
],
}
def search_for_anime_with_anilist(anime_title: str):
query = """
query ($query: String) {
Page(perPage: 50) {
pageInfo {
total
currentPage
hasNextPage
}
media(search: $query, type: ANIME,genre_not_in: ["hentai"]) {
id
idMal
title {
romaji
english
}
episodes
status
nextAiringEpisode {
timeUntilAiring
airingAt
episode
}
}
}
}
"""
response = post(
ANILIST_ENDPOINT,
json={"query": query, "variables": {"query": anime_title}},
timeout=10,
)
if response.status_code == 200:
anilist_data: "AnilistDataSchema" = response.json()
return {
"pageInfo": anilist_data["data"]["Page"]["pageInfo"],
"results": [
{
"id": anime_result["id"],
"title": anime_result["title"]["romaji"]
or anime_result["title"]["english"],
"type": "anime",
"availableEpisodes": list(
range(
1,
(
anime_result["episodes"]
if not anime_result["status"] == "RELEASING"
and anime_result["episodes"]
else (
anime_result["nextAiringEpisode"]["episode"] - 1
if anime_result["nextAiringEpisode"]
else 0
)
),
)
),
}
for anime_result in anilist_data["data"]["Page"]["media"]
],
}
def get_mal_id_and_anilist_id(anime_title: str) -> "dict[str,int] | None":
"""the abstraction over all none authenticated requests and that returns data of a similar type
Args:
query: the anilist query
variables: the anilist api variables
Returns:
a boolean indicating success and none or an anilist object depending on success
"""
query = """
query ($query: String) {
Page(perPage: 50) {
pageInfo {
total
currentPage
hasNextPage
}
media(search: $query, type: ANIME) {
id
idMal
title {
romaji
english
}
}
}
}
"""
try:
variables = {"query": anime_title}
response = post(
ANILIST_ENDPOINT,
json={"query": query, "variables": variables},
timeout=10,
)
anilist_data: "AnilistDataSchema" = response.json()
if response.status_code == 200:
anime = max(
anilist_data["data"]["Page"]["media"],
key=lambda anime: max(
(
fuzz.ratio(anime, str(anime["title"]["romaji"])),
fuzz.ratio(anime_title, str(anime["title"]["english"])),
)
),
)
return {"id_anilist": anime["id"], "id_mal": anime["idMal"]}
except Exception as e:
logger.error(f"Something unexpected occured {e}")
def get_basic_anime_info_by_title(anime_title: str):
"""the abstraction over all none authenticated requests and that returns data of a similar type
Args:
query: the anilist query
variables: the anilist api variables
Returns:
a boolean indicating success and none or an anilist object depending on success
"""
query = """
query ($query: String) {
Page(perPage: 50) {
pageInfo {
total
}
media(search: $query, type: ANIME,genre_not_in: ["hentai"]) {
id
idMal
title {
romaji
english
}
streamingEpisodes {
title
}
}
}
}
"""
from ...Utility.data import anime_normalizer
# normalize the title
anime_title = anime_normalizer.get(anime_title, anime_title)
try:
variables = {"query": anime_title}
response = post(
ANILIST_ENDPOINT,
json={"query": query, "variables": variables},
timeout=10,
)
anilist_data: "AnilistDataSchema" = response.json()
if response.status_code == 200:
anime = max(
anilist_data["data"]["Page"]["media"],
key=lambda anime: max(
(
fuzz.ratio(
anime_title.lower(), str(anime["title"]["romaji"]).lower()
),
fuzz.ratio(
anime_title.lower(), str(anime["title"]["english"]).lower()
),
)
),
)
return {
"idAnilist": anime["id"],
"idMal": anime["idMal"],
"title": {
"english": anime["title"]["english"],
"romaji": anime["title"]["romaji"],
},
"episodes": [
{"title": episode["title"]}
for episode in anime["streamingEpisodes"]
if episode
],
}
except Exception as e:
logger.error(f"Something unexpected occured {e}")

View File

@@ -5,7 +5,6 @@ import subprocess
import sys import sys
from typing import Callable, List from typing import Callable, List
# TODO: will probably scrap art not to useful
from click import clear from click import clear
from rich import print from rich import print
@@ -123,7 +122,9 @@ class FZF:
[self.FZF_EXECUTABLE, *commands], [self.FZF_EXECUTABLE, *commands],
input=fzf_input, input=fzf_input,
stdout=subprocess.PIPE, stdout=subprocess.PIPE,
universal_newlines=True,
text=True, text=True,
encoding="utf-8",
) )
if not result or result.returncode != 0 or not result.stdout: if not result or result.returncode != 0 or not result.stdout:
print("sth went wrong:confused:") print("sth went wrong:confused:")
@@ -162,7 +163,7 @@ class FZF:
HEADER, HEADER,
"--header-first", "--header-first",
"--prompt", "--prompt",
prompt.title(), f"{prompt.title()}: ",
] # pyright:ignore ] # pyright:ignore
if preview: if preview:

View File

@@ -0,0 +1 @@
manga_sources = {"mangadex": "api.MangaDexApi"}

View File

@@ -0,0 +1,13 @@
import requests
from yt_dlp.utils.networking import random_user_agent
class MangaProvider:
session: requests.Session
USER_AGENT = random_user_agent()
HEADERS = {}
def __init__(self) -> None:
self.session = requests.session()
self.session.headers.update({"User-Agent": self.USER_AGENT, **self.HEADERS})

View File

@@ -0,0 +1,15 @@
import logging
from requests import get
logger = logging.getLogger(__name__)
def fetch_manga_info_from_bal(anilist_id):
try:
url = f"https://raw.githubusercontent.com/bal-mackup/mal-backup/master/anilist/manga/{anilist_id}.json"
response = get(url, timeout=11)
if response.ok:
return response.json()
except Exception as e:
logger.error(e)

View File

@@ -0,0 +1,51 @@
import logging
from ...common.mini_anilist import search_for_manga_with_anilist
from ..base_provider import MangaProvider
from ..common import fetch_manga_info_from_bal
logger = logging.getLogger(__name__)
class MangaDexApi(MangaProvider):
def search_for_manga(self, title: str, *args):
try:
search_results = search_for_manga_with_anilist(title)
return search_results
except Exception as e:
logger.error(f"[MANGADEX-ERROR]: {e}")
def get_manga(self, anilist_manga_id: str):
bal_data = fetch_manga_info_from_bal(anilist_manga_id)
if not bal_data:
return
manga_id, MangaDexManga = next(iter(bal_data["Sites"]["Mangadex"].items()))
return {
"id": manga_id,
"title": MangaDexManga["title"],
"poster": MangaDexManga["image"],
"availableChapters": [],
}
def get_chapter_thumbnails(self, manga_id, chapter):
chapter_info_url = f"https://api.mangadex.org/chapter?manga={manga_id}&translatedLanguage[]=en&chapter={chapter}&includeEmptyPages=0"
chapter_info_response = self.session.get(chapter_info_url)
if not chapter_info_response.ok:
return
chapter_info = next(iter(chapter_info_response.json()["data"]))
chapters_thumbnails_url = (
f"https://api.mangadex.org/at-home/server/{chapter_info['id']}"
)
chapter_thumbnails_response = self.session.get(chapters_thumbnails_url)
if not chapter_thumbnails_response.ok:
return
chapter_thumbnails_info = chapter_thumbnails_response.json()
base_url = chapter_thumbnails_info["baseUrl"]
hash = chapter_thumbnails_info["chapter"]["hash"]
return {
"thumbnails": [
f"{base_url}/data/{hash}/{chapter_thumbnail}"
for chapter_thumbnail in chapter_thumbnails_info["chapter"]["data"]
],
"title": chapter_info["attributes"]["title"],
}

View File

@@ -2,8 +2,6 @@ import subprocess
from shutil import which from shutil import which
from sys import exit from sys import exit
from plyer import notification
from fastanime import APP_NAME from fastanime import APP_NAME
from ...constants import ICON_PATH from ...constants import ICON_PATH
@@ -25,7 +23,7 @@ class RofiApi:
args = [self.ROFI_EXECUTABLE] args = [self.ROFI_EXECUTABLE]
if self.rofi_theme: if self.rofi_theme:
args.extend(["-no-config", "-theme", self.rofi_theme]) args.extend(["-no-config", "-theme", self.rofi_theme])
args.extend(["-p", prompt_text, "-i", "-show-icons", "-dmenu"]) args.extend(["-p", f"{prompt_text.title()}", "-i", "-show-icons", "-dmenu"])
result = subprocess.run( result = subprocess.run(
args, args,
input=rofi_input, input=rofi_input,
@@ -35,6 +33,13 @@ class RofiApi:
choice = result.stdout.strip() choice = result.stdout.strip()
if not choice: if not choice:
try:
from plyer import notification
except ImportError:
print(
"Plyer is not installed; install it for desktop notifications to be enabled"
)
exit(1)
notification.notify( notification.notify(
app_name=APP_NAME, app_name=APP_NAME,
app_icon=ICON_PATH, app_icon=ICON_PATH,
@@ -64,6 +69,13 @@ class RofiApi:
choice = result.stdout.strip() choice = result.stdout.strip()
if not choice or choice not in options: if not choice or choice not in options:
try:
from plyer import notification
except ImportError:
print(
"Plyer is not installed; install it for desktop notifications to be enabled"
)
exit(1)
notification.notify( notification.notify(
app_name=APP_NAME, app_name=APP_NAME,
app_icon=ICON_PATH, app_icon=ICON_PATH,
@@ -91,6 +103,13 @@ class RofiApi:
choice = result.stdout.strip() choice = result.stdout.strip()
if not choice: if not choice:
try:
from plyer import notification
except ImportError:
print(
"Plyer is not installed; install it for desktop notifications to be enabled"
)
exit(1)
notification.notify( notification.notify(
app_name=APP_NAME, app_name=APP_NAME,
app_icon=ICON_PATH, app_icon=ICON_PATH,
@@ -120,6 +139,13 @@ class RofiApi:
user_input = result.stdout.strip() user_input = result.stdout.strip()
if not user_input: if not user_input:
try:
from plyer import notification
except ImportError:
print(
"Plyer is not installed; install it for desktop notifications to be enabled"
)
exit(1)
notification.notify( notification.notify(
app_name=APP_NAME, app_name=APP_NAME,
app_icon=ICON_PATH, app_icon=ICON_PATH,

647
poetry.lock generated
View File

@@ -194,100 +194,100 @@ cffi = ">=1.0.0"
[[package]] [[package]]
name = "cachetools" name = "cachetools"
version = "5.4.0" version = "5.5.0"
description = "Extensible memoizing collections and decorators" description = "Extensible memoizing collections and decorators"
optional = false optional = false
python-versions = ">=3.7" python-versions = ">=3.7"
files = [ files = [
{file = "cachetools-5.4.0-py3-none-any.whl", hash = "sha256:3ae3b49a3d5e28a77a0be2b37dbcb89005058959cb2323858c2657c4a8cab474"}, {file = "cachetools-5.5.0-py3-none-any.whl", hash = "sha256:02134e8439cdc2ffb62023ce1debca2944c3f289d66bb17ead3ab3dede74b292"},
{file = "cachetools-5.4.0.tar.gz", hash = "sha256:b8adc2e7c07f105ced7bc56dbb6dfbe7c4a00acce20e2227b3f355be89bc6827"}, {file = "cachetools-5.5.0.tar.gz", hash = "sha256:2cc24fb4cbe39633fb7badd9db9ca6295d766d9c2995f245725a46715d050f2a"},
] ]
[[package]] [[package]]
name = "certifi" name = "certifi"
version = "2024.7.4" version = "2024.8.30"
description = "Python package for providing Mozilla's CA Bundle." description = "Python package for providing Mozilla's CA Bundle."
optional = false optional = false
python-versions = ">=3.6" python-versions = ">=3.6"
files = [ files = [
{file = "certifi-2024.7.4-py3-none-any.whl", hash = "sha256:c198e21b1289c2ab85ee4e67bb4b4ef3ead0892059901a8d5b622f24a1101e90"}, {file = "certifi-2024.8.30-py3-none-any.whl", hash = "sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8"},
{file = "certifi-2024.7.4.tar.gz", hash = "sha256:5a1e7645bc0ec61a09e26c36f6106dd4cf40c6db3a1fb6352b0244e7fb057c7b"}, {file = "certifi-2024.8.30.tar.gz", hash = "sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9"},
] ]
[[package]] [[package]]
name = "cffi" name = "cffi"
version = "1.17.0" version = "1.17.1"
description = "Foreign Function Interface for Python calling C code." description = "Foreign Function Interface for Python calling C code."
optional = false optional = false
python-versions = ">=3.8" python-versions = ">=3.8"
files = [ files = [
{file = "cffi-1.17.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f9338cc05451f1942d0d8203ec2c346c830f8e86469903d5126c1f0a13a2bcbb"}, {file = "cffi-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14"},
{file = "cffi-1.17.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a0ce71725cacc9ebf839630772b07eeec220cbb5f03be1399e0457a1464f8e1a"}, {file = "cffi-1.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67"},
{file = "cffi-1.17.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c815270206f983309915a6844fe994b2fa47e5d05c4c4cef267c3b30e34dbe42"}, {file = "cffi-1.17.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:edae79245293e15384b51f88b00613ba9f7198016a5948b5dddf4917d4d26382"},
{file = "cffi-1.17.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d6bdcd415ba87846fd317bee0774e412e8792832e7805938987e4ede1d13046d"}, {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45398b671ac6d70e67da8e4224a065cec6a93541bb7aebe1b198a61b58c7b702"},
{file = "cffi-1.17.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8a98748ed1a1df4ee1d6f927e151ed6c1a09d5ec21684de879c7ea6aa96f58f2"}, {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad9413ccdeda48c5afdae7e4fa2192157e991ff761e7ab8fdd8926f40b160cc3"},
{file = "cffi-1.17.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0a048d4f6630113e54bb4b77e315e1ba32a5a31512c31a273807d0027a7e69ab"}, {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5da5719280082ac6bd9aa7becb3938dc9f9cbd57fac7d2871717b1feb0902ab6"},
{file = "cffi-1.17.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:24aa705a5f5bd3a8bcfa4d123f03413de5d86e497435693b638cbffb7d5d8a1b"}, {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bb1a08b8008b281856e5971307cc386a8e9c5b625ac297e853d36da6efe9c17"},
{file = "cffi-1.17.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:856bf0924d24e7f93b8aee12a3a1095c34085600aa805693fb7f5d1962393206"}, {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:045d61c734659cc045141be4bae381a41d89b741f795af1dd018bfb532fd0df8"},
{file = "cffi-1.17.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:4304d4416ff032ed50ad6bb87416d802e67139e31c0bde4628f36a47a3164bfa"}, {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6883e737d7d9e4899a8a695e00ec36bd4e5e4f18fabe0aca0efe0a4b44cdb13e"},
{file = "cffi-1.17.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:331ad15c39c9fe9186ceaf87203a9ecf5ae0ba2538c9e898e3a6967e8ad3db6f"}, {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6b8b4a92e1c65048ff98cfe1f735ef8f1ceb72e3d5f0c25fdb12087a23da22be"},
{file = "cffi-1.17.0-cp310-cp310-win32.whl", hash = "sha256:669b29a9eca6146465cc574659058ed949748f0809a2582d1f1a324eb91054dc"}, {file = "cffi-1.17.1-cp310-cp310-win32.whl", hash = "sha256:c9c3d058ebabb74db66e431095118094d06abf53284d9c81f27300d0e0d8bc7c"},
{file = "cffi-1.17.0-cp310-cp310-win_amd64.whl", hash = "sha256:48b389b1fd5144603d61d752afd7167dfd205973a43151ae5045b35793232aa2"}, {file = "cffi-1.17.1-cp310-cp310-win_amd64.whl", hash = "sha256:0f048dcf80db46f0098ccac01132761580d28e28bc0f78ae0d58048063317e15"},
{file = "cffi-1.17.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c5d97162c196ce54af6700949ddf9409e9833ef1003b4741c2b39ef46f1d9720"}, {file = "cffi-1.17.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a45e3c6913c5b87b3ff120dcdc03f6131fa0065027d0ed7ee6190736a74cd401"},
{file = "cffi-1.17.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5ba5c243f4004c750836f81606a9fcb7841f8874ad8f3bf204ff5e56332b72b9"}, {file = "cffi-1.17.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:30c5e0cb5ae493c04c8b42916e52ca38079f1b235c2f8ae5f4527b963c401caf"},
{file = "cffi-1.17.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bb9333f58fc3a2296fb1d54576138d4cf5d496a2cc118422bd77835e6ae0b9cb"}, {file = "cffi-1.17.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f75c7ab1f9e4aca5414ed4d8e5c0e303a34f4421f8a0d47a4d019ceff0ab6af4"},
{file = "cffi-1.17.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:435a22d00ec7d7ea533db494da8581b05977f9c37338c80bc86314bec2619424"}, {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1ed2dd2972641495a3ec98445e09766f077aee98a1c896dcb4ad0d303628e41"},
{file = "cffi-1.17.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d1df34588123fcc88c872f5acb6f74ae59e9d182a2707097f9e28275ec26a12d"}, {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:46bf43160c1a35f7ec506d254e5c890f3c03648a4dbac12d624e4490a7046cd1"},
{file = "cffi-1.17.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:df8bb0010fdd0a743b7542589223a2816bdde4d94bb5ad67884348fa2c1c67e8"}, {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a24ed04c8ffd54b0729c07cee15a81d964e6fee0e3d4d342a27b020d22959dc6"},
{file = "cffi-1.17.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8b5b9712783415695663bd463990e2f00c6750562e6ad1d28e072a611c5f2a6"}, {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:610faea79c43e44c71e1ec53a554553fa22321b65fae24889706c0a84d4ad86d"},
{file = "cffi-1.17.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ffef8fd58a36fb5f1196919638f73dd3ae0db1a878982b27a9a5a176ede4ba91"}, {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a9b15d491f3ad5d692e11f6b71f7857e7835eb677955c00cc0aefcd0669adaf6"},
{file = "cffi-1.17.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4e67d26532bfd8b7f7c05d5a766d6f437b362c1bf203a3a5ce3593a645e870b8"}, {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:de2ea4b5833625383e464549fec1bc395c1bdeeb5f25c4a3a82b5a8c756ec22f"},
{file = "cffi-1.17.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:45f7cd36186db767d803b1473b3c659d57a23b5fa491ad83c6d40f2af58e4dbb"}, {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fc48c783f9c87e60831201f2cce7f3b2e4846bf4d8728eabe54d60700b318a0b"},
{file = "cffi-1.17.0-cp311-cp311-win32.whl", hash = "sha256:a9015f5b8af1bb6837a3fcb0cdf3b874fe3385ff6274e8b7925d81ccaec3c5c9"}, {file = "cffi-1.17.1-cp311-cp311-win32.whl", hash = "sha256:85a950a4ac9c359340d5963966e3e0a94a676bd6245a4b55bc43949eee26a655"},
{file = "cffi-1.17.0-cp311-cp311-win_amd64.whl", hash = "sha256:b50aaac7d05c2c26dfd50c3321199f019ba76bb650e346a6ef3616306eed67b0"}, {file = "cffi-1.17.1-cp311-cp311-win_amd64.whl", hash = "sha256:caaf0640ef5f5517f49bc275eca1406b0ffa6aa184892812030f04c2abf589a0"},
{file = "cffi-1.17.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aec510255ce690d240f7cb23d7114f6b351c733a74c279a84def763660a2c3bc"}, {file = "cffi-1.17.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:805b4371bf7197c329fcb3ead37e710d1bca9da5d583f5073b799d5c5bd1eee4"},
{file = "cffi-1.17.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2770bb0d5e3cc0e31e7318db06efcbcdb7b31bcb1a70086d3177692a02256f59"}, {file = "cffi-1.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:733e99bc2df47476e3848417c5a4540522f234dfd4ef3ab7fafdf555b082ec0c"},
{file = "cffi-1.17.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:db9a30ec064129d605d0f1aedc93e00894b9334ec74ba9c6bdd08147434b33eb"}, {file = "cffi-1.17.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1257bdabf294dceb59f5e70c64a3e2f462c30c7ad68092d01bbbfb1c16b1ba36"},
{file = "cffi-1.17.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a47eef975d2b8b721775a0fa286f50eab535b9d56c70a6e62842134cf7841195"}, {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da95af8214998d77a98cc14e3a3bd00aa191526343078b530ceb0bd710fb48a5"},
{file = "cffi-1.17.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f3e0992f23bbb0be00a921eae5363329253c3b86287db27092461c887b791e5e"}, {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d63afe322132c194cf832bfec0dc69a99fb9bb6bbd550f161a49e9e855cc78ff"},
{file = "cffi-1.17.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6107e445faf057c118d5050560695e46d272e5301feffda3c41849641222a828"}, {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f79fc4fc25f1c8698ff97788206bb3c2598949bfe0fef03d299eb1b5356ada99"},
{file = "cffi-1.17.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eb862356ee9391dc5a0b3cbc00f416b48c1b9a52d252d898e5b7696a5f9fe150"}, {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b62ce867176a75d03a665bad002af8e6d54644fad99a3c70905c543130e39d93"},
{file = "cffi-1.17.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c1c13185b90bbd3f8b5963cd8ce7ad4ff441924c31e23c975cb150e27c2bf67a"}, {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:386c8bf53c502fff58903061338ce4f4950cbdcb23e2902d86c0f722b786bbe3"},
{file = "cffi-1.17.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:17c6d6d3260c7f2d94f657e6872591fe8733872a86ed1345bda872cfc8c74885"}, {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ceb10419a9adf4460ea14cfd6bc43d08701f0835e979bf821052f1805850fe8"},
{file = "cffi-1.17.0-cp312-cp312-win32.whl", hash = "sha256:c3b8bd3133cd50f6b637bb4322822c94c5ce4bf0d724ed5ae70afce62187c492"}, {file = "cffi-1.17.1-cp312-cp312-win32.whl", hash = "sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65"},
{file = "cffi-1.17.0-cp312-cp312-win_amd64.whl", hash = "sha256:dca802c8db0720ce1c49cce1149ff7b06e91ba15fa84b1d59144fef1a1bc7ac2"}, {file = "cffi-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903"},
{file = "cffi-1.17.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:6ce01337d23884b21c03869d2f68c5523d43174d4fc405490eb0091057943118"}, {file = "cffi-1.17.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f3a2b4222ce6b60e2e8b337bb9596923045681d71e5a082783484d845390938e"},
{file = "cffi-1.17.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:cab2eba3830bf4f6d91e2d6718e0e1c14a2f5ad1af68a89d24ace0c6b17cced7"}, {file = "cffi-1.17.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0984a4925a435b1da406122d4d7968dd861c1385afe3b45ba82b750f229811e2"},
{file = "cffi-1.17.0-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:14b9cbc8f7ac98a739558eb86fabc283d4d564dafed50216e7f7ee62d0d25377"}, {file = "cffi-1.17.1-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d01b12eeeb4427d3110de311e1774046ad344f5b1a7403101878976ecd7a10f3"},
{file = "cffi-1.17.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b00e7bcd71caa0282cbe3c90966f738e2db91e64092a877c3ff7f19a1628fdcb"}, {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:706510fe141c86a69c8ddc029c7910003a17353970cff3b904ff0686a5927683"},
{file = "cffi-1.17.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:41f4915e09218744d8bae14759f983e466ab69b178de38066f7579892ff2a555"}, {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de55b766c7aa2e2a3092c51e0483d700341182f08e67c63630d5b6f200bb28e5"},
{file = "cffi-1.17.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e4760a68cab57bfaa628938e9c2971137e05ce48e762a9cb53b76c9b569f1204"}, {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c59d6e989d07460165cc5ad3c61f9fd8f1b4796eacbd81cee78957842b834af4"},
{file = "cffi-1.17.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:011aff3524d578a9412c8b3cfaa50f2c0bd78e03eb7af7aa5e0df59b158efb2f"}, {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd398dbc6773384a17fe0d3e7eeb8d1a21c2200473ee6806bb5e6a8e62bb73dd"},
{file = "cffi-1.17.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:a003ac9edc22d99ae1286b0875c460351f4e101f8c9d9d2576e78d7e048f64e0"}, {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:3edc8d958eb099c634dace3c7e16560ae474aa3803a5df240542b305d14e14ed"},
{file = "cffi-1.17.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ef9528915df81b8f4c7612b19b8628214c65c9b7f74db2e34a646a0a2a0da2d4"}, {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:72e72408cad3d5419375fc87d289076ee319835bdfa2caad331e377589aebba9"},
{file = "cffi-1.17.0-cp313-cp313-win32.whl", hash = "sha256:70d2aa9fb00cf52034feac4b913181a6e10356019b18ef89bc7c12a283bf5f5a"}, {file = "cffi-1.17.1-cp313-cp313-win32.whl", hash = "sha256:e03eab0a8677fa80d646b5ddece1cbeaf556c313dcfac435ba11f107ba117b5d"},
{file = "cffi-1.17.0-cp313-cp313-win_amd64.whl", hash = "sha256:b7b6ea9e36d32582cda3465f54c4b454f62f23cb083ebc7a94e2ca6ef011c3a7"}, {file = "cffi-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a"},
{file = "cffi-1.17.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:964823b2fc77b55355999ade496c54dde161c621cb1f6eac61dc30ed1b63cd4c"}, {file = "cffi-1.17.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:636062ea65bd0195bc012fea9321aca499c0504409f413dc88af450b57ffd03b"},
{file = "cffi-1.17.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:516a405f174fd3b88829eabfe4bb296ac602d6a0f68e0d64d5ac9456194a5b7e"}, {file = "cffi-1.17.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c7eac2ef9b63c79431bc4b25f1cd649d7f061a28808cbc6c47b534bd789ef964"},
{file = "cffi-1.17.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dec6b307ce928e8e112a6bb9921a1cb00a0e14979bf28b98e084a4b8a742bd9b"}, {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e221cf152cff04059d011ee126477f0d9588303eb57e88923578ace7baad17f9"},
{file = "cffi-1.17.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e4094c7b464cf0a858e75cd14b03509e84789abf7b79f8537e6a72152109c76e"}, {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:31000ec67d4221a71bd3f67df918b1f88f676f1c3b535a7eb473255fdc0b83fc"},
{file = "cffi-1.17.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2404f3de742f47cb62d023f0ba7c5a916c9c653d5b368cc966382ae4e57da401"}, {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6f17be4345073b0a7b8ea599688f692ac3ef23ce28e5df79c04de519dbc4912c"},
{file = "cffi-1.17.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3aa9d43b02a0c681f0bfbc12d476d47b2b2b6a3f9287f11ee42989a268a1833c"}, {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2b1fac190ae3ebfe37b979cc1ce69c81f4e4fe5746bb401dca63a9062cdaf1"},
{file = "cffi-1.17.0-cp38-cp38-win32.whl", hash = "sha256:0bb15e7acf8ab35ca8b24b90af52c8b391690ef5c4aec3d31f38f0d37d2cc499"}, {file = "cffi-1.17.1-cp38-cp38-win32.whl", hash = "sha256:7596d6620d3fa590f677e9ee430df2958d2d6d6de2feeae5b20e82c00b76fbf8"},
{file = "cffi-1.17.0-cp38-cp38-win_amd64.whl", hash = "sha256:93a7350f6706b31f457c1457d3a3259ff9071a66f312ae64dc024f049055f72c"}, {file = "cffi-1.17.1-cp38-cp38-win_amd64.whl", hash = "sha256:78122be759c3f8a014ce010908ae03364d00a1f81ab5c7f4a7a5120607ea56e1"},
{file = "cffi-1.17.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1a2ddbac59dc3716bc79f27906c010406155031a1c801410f1bafff17ea304d2"}, {file = "cffi-1.17.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b2ab587605f4ba0bf81dc0cb08a41bd1c0a5906bd59243d56bad7668a6fc6c16"},
{file = "cffi-1.17.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6327b572f5770293fc062a7ec04160e89741e8552bf1c358d1a23eba68166759"}, {file = "cffi-1.17.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:28b16024becceed8c6dfbc75629e27788d8a3f9030691a1dbf9821a128b22c36"},
{file = "cffi-1.17.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dbc183e7bef690c9abe5ea67b7b60fdbca81aa8da43468287dae7b5c046107d4"}, {file = "cffi-1.17.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1d599671f396c4723d016dbddb72fe8e0397082b0a77a4fab8028923bec050e8"},
{file = "cffi-1.17.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5bdc0f1f610d067c70aa3737ed06e2726fd9d6f7bfee4a351f4c40b6831f4e82"}, {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca74b8dbe6e8e8263c0ffd60277de77dcee6c837a3d0881d8c1ead7268c9e576"},
{file = "cffi-1.17.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6d872186c1617d143969defeadac5a904e6e374183e07977eedef9c07c8953bf"}, {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7f5baafcc48261359e14bcd6d9bff6d4b28d9103847c9e136694cb0501aef87"},
{file = "cffi-1.17.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0d46ee4764b88b91f16661a8befc6bfb24806d885e27436fdc292ed7e6f6d058"}, {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98e3969bcff97cae1b2def8ba499ea3d6f31ddfdb7635374834cf89a1a08ecf0"},
{file = "cffi-1.17.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f76a90c345796c01d85e6332e81cab6d70de83b829cf1d9762d0a3da59c7932"}, {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cdf5ce3acdfd1661132f2a9c19cac174758dc2352bfe37d98aa7512c6b7178b3"},
{file = "cffi-1.17.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0e60821d312f99d3e1569202518dddf10ae547e799d75aef3bca3a2d9e8ee693"}, {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9755e4345d1ec879e3849e62222a18c7174d65a6a92d5b346b1863912168b595"},
{file = "cffi-1.17.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:eb09b82377233b902d4c3fbeeb7ad731cdab579c6c6fda1f763cd779139e47c3"}, {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f1e22e8c4419538cb197e4dd60acc919d7696e5ef98ee4da4e01d3f8cfa4cc5a"},
{file = "cffi-1.17.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:24658baf6224d8f280e827f0a50c46ad819ec8ba380a42448e24459daf809cf4"}, {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c03e868a0b3bc35839ba98e74211ed2b05d2119be4e8a0f224fba9384f1fe02e"},
{file = "cffi-1.17.0-cp39-cp39-win32.whl", hash = "sha256:0fdacad9e0d9fc23e519efd5ea24a70348305e8d7d85ecbb1a5fa66dc834e7fb"}, {file = "cffi-1.17.1-cp39-cp39-win32.whl", hash = "sha256:e31ae45bc2e29f6b2abd0de1cc3b9d5205aa847cafaecb8af1476a609a2f6eb7"},
{file = "cffi-1.17.0-cp39-cp39-win_amd64.whl", hash = "sha256:7cbc78dc018596315d4e7841c8c3a7ae31cc4d638c9b627f87d52e8abaaf2d29"}, {file = "cffi-1.17.1-cp39-cp39-win_amd64.whl", hash = "sha256:d016c76bdd850f3c626af19b0542c9677ba156e4ee4fccfdd7848803533ef662"},
{file = "cffi-1.17.0.tar.gz", hash = "sha256:f3157624b7558b914cb039fd1af735e5e8049a87c817cc215109ad1c8779df76"}, {file = "cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824"},
] ]
[package.dependencies] [package.dependencies]
@@ -466,29 +466,29 @@ test = ["pytest (>=6)"]
[[package]] [[package]]
name = "filelock" name = "filelock"
version = "3.15.4" version = "3.16.1"
description = "A platform independent file lock." description = "A platform independent file lock."
optional = false optional = false
python-versions = ">=3.8" python-versions = ">=3.8"
files = [ files = [
{file = "filelock-3.15.4-py3-none-any.whl", hash = "sha256:6ca1fffae96225dab4c6eaf1c4f4f28cd2568d3ec2a44e15a08520504de468e7"}, {file = "filelock-3.16.1-py3-none-any.whl", hash = "sha256:2082e5703d51fbf98ea75855d9d5527e33d8ff23099bec374a134febee6946b0"},
{file = "filelock-3.15.4.tar.gz", hash = "sha256:2207938cbc1844345cb01a5a95524dae30f0ce089eba5b00378295a17e3e90cb"}, {file = "filelock-3.16.1.tar.gz", hash = "sha256:c249fbfcd5db47e5e2d6d62198e565475ee65e4831e2561c8e313fa7eb961435"},
] ]
[package.extras] [package.extras]
docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] docs = ["furo (>=2024.8.6)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2.4.1)"]
testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8.0.1)", "pytest (>=7.4.3)", "pytest-asyncio (>=0.21)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)", "virtualenv (>=20.26.2)"] testing = ["covdefaults (>=2.3)", "coverage (>=7.6.1)", "diff-cover (>=9.2)", "pytest (>=8.3.3)", "pytest-asyncio (>=0.24)", "pytest-cov (>=5)", "pytest-mock (>=3.14)", "pytest-timeout (>=2.3.1)", "virtualenv (>=20.26.4)"]
typing = ["typing-extensions (>=4.8)"] typing = ["typing-extensions (>=4.12.2)"]
[[package]] [[package]]
name = "identify" name = "identify"
version = "2.6.0" version = "2.6.1"
description = "File identification library for Python" description = "File identification library for Python"
optional = false optional = false
python-versions = ">=3.8" python-versions = ">=3.8"
files = [ files = [
{file = "identify-2.6.0-py2.py3-none-any.whl", hash = "sha256:e79ae4406387a9d300332b5fd366d8994f1525e8414984e1a59e058b2eda2dd0"}, {file = "identify-2.6.1-py2.py3-none-any.whl", hash = "sha256:53863bcac7caf8d2ed85bd20312ea5dcfc22226800f6d6881f232d861db5a8f0"},
{file = "identify-2.6.0.tar.gz", hash = "sha256:cb171c685bdc31bcc4c1734698736a7d5b6c8bf2e0c15117f4d469c8640ae5cf"}, {file = "identify-2.6.1.tar.gz", hash = "sha256:91478c5fb7c3aac5ff7bf9b4344f803843dc586832d5f110d672b19aa1984c98"},
] ]
[package.extras] [package.extras]
@@ -496,15 +496,18 @@ license = ["ukkonen"]
[[package]] [[package]]
name = "idna" name = "idna"
version = "3.7" version = "3.10"
description = "Internationalized Domain Names in Applications (IDNA)" description = "Internationalized Domain Names in Applications (IDNA)"
optional = false optional = false
python-versions = ">=3.5" python-versions = ">=3.6"
files = [ files = [
{file = "idna-3.7-py3-none-any.whl", hash = "sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0"}, {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"},
{file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"}, {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"},
] ]
[package.extras]
all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"]
[[package]] [[package]]
name = "iniconfig" name = "iniconfig"
version = "2.0.0" version = "2.0.0"
@@ -520,7 +523,7 @@ files = [
name = "inquirerpy" name = "inquirerpy"
version = "0.3.4" version = "0.3.4"
description = "Python port of Inquirer.js (A collection of common interactive command-line user interfaces)" description = "Python port of Inquirer.js (A collection of common interactive command-line user interfaces)"
optional = false optional = true
python-versions = ">=3.7,<4.0" python-versions = ">=3.7,<4.0"
files = [ files = [
{file = "InquirerPy-0.3.4-py3-none-any.whl", hash = "sha256:c65fdfbac1fa00e3ee4fb10679f4d3ed7a012abf4833910e63c295827fe2a7d4"}, {file = "InquirerPy-0.3.4-py3-none-any.whl", hash = "sha256:c65fdfbac1fa00e3ee4fb10679f4d3ed7a012abf4833910e63c295827fe2a7d4"},
@@ -552,7 +555,7 @@ colors = ["colorama (>=0.4.6)"]
name = "markdown-it-py" name = "markdown-it-py"
version = "3.0.0" version = "3.0.0"
description = "Python port of markdown-it. Markdown parsing, done right!" description = "Python port of markdown-it. Markdown parsing, done right!"
optional = false optional = true
python-versions = ">=3.8" python-versions = ">=3.8"
files = [ files = [
{file = "markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb"}, {file = "markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb"},
@@ -576,7 +579,7 @@ testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"]
name = "mdurl" name = "mdurl"
version = "0.1.2" version = "0.1.2"
description = "Markdown URL utilities" description = "Markdown URL utilities"
optional = false optional = true
python-versions = ">=3.7" python-versions = ">=3.7"
files = [ files = [
{file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"}, {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"},
@@ -587,7 +590,7 @@ files = [
name = "mpv" name = "mpv"
version = "1.0.7" version = "1.0.7"
description = "A python interface to the mpv media player" description = "A python interface to the mpv media player"
optional = false optional = true
python-versions = ">=3.9" python-versions = ">=3.9"
files = [ files = [
{file = "mpv-1.0.7-py3-none-any.whl", hash = "sha256:520fb134c18185b69c7fce4aa3514f14371028022d92eb193818e9fefb1e9fe8"}, {file = "mpv-1.0.7-py3-none-any.whl", hash = "sha256:520fb134c18185b69c7fce4aa3514f14371028022d92eb193818e9fefb1e9fe8"},
@@ -657,7 +660,7 @@ files = [
name = "pfzy" name = "pfzy"
version = "0.3.4" version = "0.3.4"
description = "Python port of the fzy fuzzy string matching algorithm" description = "Python port of the fzy fuzzy string matching algorithm"
optional = false optional = true
python-versions = ">=3.7,<4.0" python-versions = ">=3.7,<4.0"
files = [ files = [
{file = "pfzy-0.3.4-py3-none-any.whl", hash = "sha256:5f50d5b2b3207fa72e7ec0ef08372ef652685470974a107d0d4999fc5a903a96"}, {file = "pfzy-0.3.4-py3-none-any.whl", hash = "sha256:5f50d5b2b3207fa72e7ec0ef08372ef652685470974a107d0d4999fc5a903a96"},
@@ -669,19 +672,19 @@ docs = ["Sphinx (>=4.1.2,<5.0.0)", "furo (>=2021.8.17-beta.43,<2022.0.0)", "myst
[[package]] [[package]]
name = "platformdirs" name = "platformdirs"
version = "4.2.2" version = "4.3.6"
description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`."
optional = false optional = false
python-versions = ">=3.8" python-versions = ">=3.8"
files = [ files = [
{file = "platformdirs-4.2.2-py3-none-any.whl", hash = "sha256:2d7a1657e36a80ea911db832a8a6ece5ee53d8de21edd5cc5879af6530b1bfee"}, {file = "platformdirs-4.3.6-py3-none-any.whl", hash = "sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb"},
{file = "platformdirs-4.2.2.tar.gz", hash = "sha256:38b7b51f512eed9e84a22788b4bce1de17c0adb134d6becb09836e37d8654cd3"}, {file = "platformdirs-4.3.6.tar.gz", hash = "sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907"},
] ]
[package.extras] [package.extras]
docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] docs = ["furo (>=2024.8.6)", "proselint (>=0.14)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2.4)"]
test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=8.3.2)", "pytest-cov (>=5)", "pytest-mock (>=3.14)"]
type = ["mypy (>=1.8)"] type = ["mypy (>=1.11.2)"]
[[package]] [[package]]
name = "pluggy" name = "pluggy"
@@ -702,7 +705,7 @@ testing = ["pytest", "pytest-benchmark"]
name = "plyer" name = "plyer"
version = "2.1.0" version = "2.1.0"
description = "Platform-independent wrapper for platform-dependent APIs" description = "Platform-independent wrapper for platform-dependent APIs"
optional = false optional = true
python-versions = "*" python-versions = "*"
files = [ files = [
{file = "plyer-2.1.0-py2.py3-none-any.whl", hash = "sha256:1b1772060df8b3045ed4f08231690ec8f7de30f5a004aa1724665a9074eed113"}, {file = "plyer-2.1.0-py2.py3-none-any.whl", hash = "sha256:1b1772060df8b3045ed4f08231690ec8f7de30f5a004aa1724665a9074eed113"},
@@ -737,7 +740,7 @@ virtualenv = ">=20.10.0"
name = "prompt-toolkit" name = "prompt-toolkit"
version = "3.0.47" version = "3.0.47"
description = "Library for building powerful interactive command lines in Python" description = "Library for building powerful interactive command lines in Python"
optional = false optional = true
python-versions = ">=3.7.0" python-versions = ">=3.7.0"
files = [ files = [
{file = "prompt_toolkit-3.0.47-py3-none-any.whl", hash = "sha256:0d7bfa67001d5e39d02c224b663abc33687405033a8c422d0d675a5a13361d10"}, {file = "prompt_toolkit-3.0.47-py3-none-any.whl", hash = "sha256:0d7bfa67001d5e39d02c224b663abc33687405033a8c422d0d675a5a13361d10"},
@@ -814,7 +817,7 @@ files = [
name = "pygments" name = "pygments"
version = "2.18.0" version = "2.18.0"
description = "Pygments is a syntax highlighting package written in Python." description = "Pygments is a syntax highlighting package written in Python."
optional = false optional = true
python-versions = ">=3.8" python-versions = ">=3.8"
files = [ files = [
{file = "pygments-2.18.0-py3-none-any.whl", hash = "sha256:b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a"}, {file = "pygments-2.18.0-py3-none-any.whl", hash = "sha256:b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a"},
@@ -826,13 +829,13 @@ windows-terminal = ["colorama (>=0.4.6)"]
[[package]] [[package]]
name = "pyproject-api" name = "pyproject-api"
version = "1.7.1" version = "1.8.0"
description = "API to interact with the python pyproject.toml based projects" description = "API to interact with the python pyproject.toml based projects"
optional = false optional = false
python-versions = ">=3.8" python-versions = ">=3.8"
files = [ files = [
{file = "pyproject_api-1.7.1-py3-none-any.whl", hash = "sha256:2dc1654062c2b27733d8fd4cdda672b22fe8741ef1dde8e3a998a9547b071eeb"}, {file = "pyproject_api-1.8.0-py3-none-any.whl", hash = "sha256:3d7d347a047afe796fd5d1885b1e391ba29be7169bd2f102fcd378f04273d228"},
{file = "pyproject_api-1.7.1.tar.gz", hash = "sha256:7ebc6cd10710f89f4cf2a2731710a98abce37ebff19427116ff2174c9236a827"}, {file = "pyproject_api-1.8.0.tar.gz", hash = "sha256:77b8049f2feb5d33eefcc21b57f1e279636277a8ac8ad6b5871037b243778496"},
] ]
[package.dependencies] [package.dependencies]
@@ -840,18 +843,18 @@ packaging = ">=24.1"
tomli = {version = ">=2.0.1", markers = "python_version < \"3.11\""} tomli = {version = ">=2.0.1", markers = "python_version < \"3.11\""}
[package.extras] [package.extras]
docs = ["furo (>=2024.5.6)", "sphinx-autodoc-typehints (>=2.2.1)"] docs = ["furo (>=2024.8.6)", "sphinx-autodoc-typehints (>=2.4.1)"]
testing = ["covdefaults (>=2.3)", "pytest (>=8.2.2)", "pytest-cov (>=5)", "pytest-mock (>=3.14)", "setuptools (>=70.1)"] testing = ["covdefaults (>=2.3)", "pytest (>=8.3.3)", "pytest-cov (>=5)", "pytest-mock (>=3.14)", "setuptools (>=75.1)"]
[[package]] [[package]]
name = "pyright" name = "pyright"
version = "1.1.376" version = "1.1.381"
description = "Command line wrapper for pyright" description = "Command line wrapper for pyright"
optional = false optional = false
python-versions = ">=3.7" python-versions = ">=3.7"
files = [ files = [
{file = "pyright-1.1.376-py3-none-any.whl", hash = "sha256:0f2473b12c15c46b3207f0eec224c3cea2bdc07cd45dd4a037687cbbca0fbeff"}, {file = "pyright-1.1.381-py3-none-any.whl", hash = "sha256:5dc0aa80a265675d36abab59c674ae01dbe476714f91845b61b841d34aa99081"},
{file = "pyright-1.1.376.tar.gz", hash = "sha256:bffd63b197cd0810395bb3245c06b01f95a85ddf6bfa0e5644ed69c841e954dd"}, {file = "pyright-1.1.381.tar.gz", hash = "sha256:314cf0c1351c189524fb10c7ac20688ecd470e8cc505c394d642c9c80bf7c3a5"},
] ]
[package.dependencies] [package.dependencies]
@@ -863,13 +866,13 @@ dev = ["twine (>=3.4.1)"]
[[package]] [[package]]
name = "pytest" name = "pytest"
version = "8.3.2" version = "8.3.3"
description = "pytest: simple powerful testing with Python" description = "pytest: simple powerful testing with Python"
optional = false optional = false
python-versions = ">=3.8" python-versions = ">=3.8"
files = [ files = [
{file = "pytest-8.3.2-py3-none-any.whl", hash = "sha256:4ba08f9ae7dcf84ded419494d229b48d0903ea6407b030eaec46df5e6a73bba5"}, {file = "pytest-8.3.3-py3-none-any.whl", hash = "sha256:a6853c7375b2663155079443d2e45de913a911a11d669df02a50814944db57b2"},
{file = "pytest-8.3.2.tar.gz", hash = "sha256:c132345d12ce551242c87269de812483f5bcc87cdbb4722e48487ba194f9fdce"}, {file = "pytest-8.3.3.tar.gz", hash = "sha256:70b98107bd648308a7952b06e6ca9a50bc660be218d53c257cc1fc94fda10181"},
] ]
[package.dependencies] [package.dependencies]
@@ -947,119 +950,119 @@ files = [
[[package]] [[package]]
name = "rapidfuzz" name = "rapidfuzz"
version = "3.9.6" version = "3.9.7"
description = "rapid fuzzy string matching" description = "rapid fuzzy string matching"
optional = false optional = false
python-versions = ">=3.8" python-versions = ">=3.8"
files = [ files = [
{file = "rapidfuzz-3.9.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a7ed0d0b9c85720f0ae33ac5efc8dc3f60c1489dad5c29d735fbdf2f66f0431f"}, {file = "rapidfuzz-3.9.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ccf68e30b80e903f2309f90a438dbd640dd98e878eeb5ad361a288051ee5b75c"},
{file = "rapidfuzz-3.9.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f3deff6ab7017ed21b9aec5874a07ad13e6b2a688af055837f88b743c7bfd947"}, {file = "rapidfuzz-3.9.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:696a79018ef989bf1c9abd9005841cee18005ccad4748bad8a4c274c47b6241a"},
{file = "rapidfuzz-3.9.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c3f9fc060160507b2704f7d1491bd58453d69689b580cbc85289335b14fe8ca"}, {file = "rapidfuzz-3.9.7-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4eebf6c93af0ae866c22b403a84747580bb5c10f0d7b51c82a87f25405d4dcb"},
{file = "rapidfuzz-3.9.6-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c4e86c2b3827fa6169ad6e7d4b790ce02a20acefb8b78d92fa4249589bbc7a2c"}, {file = "rapidfuzz-3.9.7-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0e9125377fa3d21a8abd4fbdbcf1c27be73e8b1850f0b61b5b711364bf3b59db"},
{file = "rapidfuzz-3.9.6-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f982e1aafb4bd8207a5e073b1efef9e68a984e91330e1bbf364f9ed157ed83f0"}, {file = "rapidfuzz-3.9.7-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c12d180b17a22d107c8747de9c68d0b9c1d15dcda5445ff9bf9f4ccfb67c3e16"},
{file = "rapidfuzz-3.9.6-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9196a51d0ec5eaaaf5bca54a85b7b1e666fc944c332f68e6427503af9fb8c49e"}, {file = "rapidfuzz-3.9.7-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c1318d42610c26dcd68bd3279a1bf9e3605377260867c9a8ed22eafc1bd93a7c"},
{file = "rapidfuzz-3.9.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fb5a514064e02585b1cc09da2fe406a6dc1a7e5f3e92dd4f27c53e5f1465ec81"}, {file = "rapidfuzz-3.9.7-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd5fa6e3c6e0333051c1f3a49f0807b3366f4131c8d6ac8c3e05fd0d0ce3755c"},
{file = "rapidfuzz-3.9.6-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:e3a4244f65dbc3580b1275480118c3763f9dc29fc3dd96610560cb5e140a4d4a"}, {file = "rapidfuzz-3.9.7-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:fcf79b686962d7bec458a0babc904cb4fa319808805e036b9d5a531ee6b9b835"},
{file = "rapidfuzz-3.9.6-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:f6ebb910a702e41641e1e1dada3843bc11ba9107a33c98daef6945a885a40a07"}, {file = "rapidfuzz-3.9.7-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:8b01153c7466d0bad48fba77a303d5a768e66f24b763853469f47220b3de4661"},
{file = "rapidfuzz-3.9.6-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:624fbe96115fb39addafa288d583b5493bc76dab1d34d0ebba9987d6871afdf9"}, {file = "rapidfuzz-3.9.7-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:94baaeea0b4f8632a6da69348b1e741043eba18d4e3088d674d3f76586b6223d"},
{file = "rapidfuzz-3.9.6-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:1c59f1c1507b7a557cf3c410c76e91f097460da7d97e51c985343798e9df7a3c"}, {file = "rapidfuzz-3.9.7-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:6c5b32875646cb7f60c193ade99b2e4b124f19583492115293cd00f6fb198b17"},
{file = "rapidfuzz-3.9.6-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f6f0256cb27b6a0fb2e1918477d1b56473cd04acfa245376a342e7c15806a396"}, {file = "rapidfuzz-3.9.7-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:110b6294396bc0a447648627479c9320f095c2034c0537f687592e0f58622638"},
{file = "rapidfuzz-3.9.6-cp310-cp310-win32.whl", hash = "sha256:24d473d00d23a30a85802b502b417a7f5126019c3beec91a6739fe7b95388b24"}, {file = "rapidfuzz-3.9.7-cp310-cp310-win32.whl", hash = "sha256:3445a35c4c8d288f2b2011eb61bce1227c633ce85a3154e727170f37c0266bb2"},
{file = "rapidfuzz-3.9.6-cp310-cp310-win_amd64.whl", hash = "sha256:248f6d2612e661e2b5f9a22bbd5862a1600e720da7bb6ad8a55bb1548cdfa423"}, {file = "rapidfuzz-3.9.7-cp310-cp310-win_amd64.whl", hash = "sha256:0d1415a732ee75e74a90af12020b77a0b396b36c60afae1bde3208a78cd2c9fc"},
{file = "rapidfuzz-3.9.6-cp310-cp310-win_arm64.whl", hash = "sha256:e03fdf0e74f346ed7e798135df5f2a0fb8d6b96582b00ebef202dcf2171e1d1d"}, {file = "rapidfuzz-3.9.7-cp310-cp310-win_arm64.whl", hash = "sha256:836f4d88b8bd0fff2ebe815dcaab8aa6c8d07d1d566a7e21dd137cf6fe11ed5b"},
{file = "rapidfuzz-3.9.6-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:52e4675f642fbc85632f691b67115a243cd4d2a47bdcc4a3d9a79e784518ff97"}, {file = "rapidfuzz-3.9.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d098ce6162eb5e48fceb0745455bc950af059df6113eec83e916c129fca11408"},
{file = "rapidfuzz-3.9.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1f93a2f13038700bd245b927c46a2017db3dcd4d4ff94687d74b5123689b873b"}, {file = "rapidfuzz-3.9.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:048d55d36c02c6685a2b2741688503c3d15149694506655b6169dcfd3b6c2585"},
{file = "rapidfuzz-3.9.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42b70500bca460264b8141d8040caee22e9cf0418c5388104ff0c73fb69ee28f"}, {file = "rapidfuzz-3.9.7-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c33211cfff9aec425bb1bfedaf94afcf337063aa273754f22779d6dadebef4c2"},
{file = "rapidfuzz-3.9.6-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a1e037fb89f714a220f68f902fc6300ab7a33349f3ce8ffae668c3b3a40b0b06"}, {file = "rapidfuzz-3.9.7-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e6d9db2fa4e9be171e9bb31cf2d2575574774966b43f5b951062bb2e67885852"},
{file = "rapidfuzz-3.9.6-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6792f66d59b86ccfad5e247f2912e255c85c575789acdbad8e7f561412ffed8a"}, {file = "rapidfuzz-3.9.7-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d4e049d5ad61448c9a020d1061eba20944c4887d720c4069724beb6ea1692507"},
{file = "rapidfuzz-3.9.6-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:68d9cffe710b67f1969cf996983608cee4490521d96ea91d16bd7ea5dc80ea98"}, {file = "rapidfuzz-3.9.7-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cfa74aac64c85898b93d9c80bb935a96bf64985e28d4ee0f1a3d1f3bf11a5106"},
{file = "rapidfuzz-3.9.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63daaeeea76da17fa0bbe7fb05cba8ed8064bb1a0edf8360636557f8b6511961"}, {file = "rapidfuzz-3.9.7-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:965693c2e9efd425b0f059f5be50ef830129f82892fa1858e220e424d9d0160f"},
{file = "rapidfuzz-3.9.6-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d214e063bffa13e3b771520b74f674b22d309b5720d4df9918ff3e0c0f037720"}, {file = "rapidfuzz-3.9.7-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8501000a5eb8037c4b56857724797fe5a8b01853c363de91c8d0d0ad56bef319"},
{file = "rapidfuzz-3.9.6-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:ed443a2062460f44c0346cb9d269b586496b808c2419bbd6057f54061c9b9c75"}, {file = "rapidfuzz-3.9.7-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:8d92c552c6b7577402afdd547dcf5d31ea6c8ae31ad03f78226e055cfa37f3c6"},
{file = "rapidfuzz-3.9.6-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:5b0c9b227ee0076fb2d58301c505bb837a290ae99ee628beacdb719f0626d749"}, {file = "rapidfuzz-3.9.7-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:1ee2086f490cb501d86b7e386c1eb4e3a0ccbb0c99067089efaa8c79012c8952"},
{file = "rapidfuzz-3.9.6-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:82c9722b7dfaa71e8b61f8c89fed0482567fb69178e139fe4151fc71ed7df782"}, {file = "rapidfuzz-3.9.7-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:1de91e7fd7f525e10ea79a6e62c559d1b0278ec097ad83d9da378b6fab65a265"},
{file = "rapidfuzz-3.9.6-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:c18897c95c0a288347e29537b63608a8f63a5c3cb6da258ac46fcf89155e723e"}, {file = "rapidfuzz-3.9.7-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a4da514d13f4433e16960a17f05b67e0af30ac771719c9a9fb877e5004f74477"},
{file = "rapidfuzz-3.9.6-cp311-cp311-win32.whl", hash = "sha256:3e910cf08944da381159587709daaad9e59d8ff7bca1f788d15928f3c3d49c2a"}, {file = "rapidfuzz-3.9.7-cp311-cp311-win32.whl", hash = "sha256:a40184c67db8252593ec518e17fb8a6e86d7259dc9f2d6c0bf4ff4db8cf1ad4b"},
{file = "rapidfuzz-3.9.6-cp311-cp311-win_amd64.whl", hash = "sha256:59c4a61fab676d37329fc3a671618a461bfeef53a4d0b8b12e3bc24a14e166f8"}, {file = "rapidfuzz-3.9.7-cp311-cp311-win_amd64.whl", hash = "sha256:c4f28f1930b09a2c300357d8465b388cecb7e8b2f454a5d5425561710b7fd07f"},
{file = "rapidfuzz-3.9.6-cp311-cp311-win_arm64.whl", hash = "sha256:8b4afea244102332973377fddbe54ce844d0916e1c67a5123432291717f32ffa"}, {file = "rapidfuzz-3.9.7-cp311-cp311-win_arm64.whl", hash = "sha256:675b75412a943bb83f1f53e2e54fd18c80ef15ed642dc6eb0382d1949419d904"},
{file = "rapidfuzz-3.9.6-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:70591b28b218fff351b88cdd7f2359a01a71f9f7f5a2e465ce3715ed4b3c422b"}, {file = "rapidfuzz-3.9.7-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:1ef6a1a8f0b12f8722f595f15c62950c9a02d5abc64742561299ffd49f6c6944"},
{file = "rapidfuzz-3.9.6-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ee2d8355c7343c631a03e57540ea06e8717c19ecf5ff64ea07e0498f7f161457"}, {file = "rapidfuzz-3.9.7-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:32532af1d70c6ec02ea5ac7ee2766dfff7c8ae8c761abfe8da9e527314e634e8"},
{file = "rapidfuzz-3.9.6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:708fb675de0f47b9635d1cc6fbbf80d52cb710d0a1abbfae5c84c46e3abbddc3"}, {file = "rapidfuzz-3.9.7-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ae1a38bade755aa9dd95a81cda949e1bf9cd92b79341ccc5e2189c9e7bdfc5ec"},
{file = "rapidfuzz-3.9.6-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1d66c247c2d3bb7a9b60567c395a15a929d0ebcc5f4ceedb55bfa202c38c6e0c"}, {file = "rapidfuzz-3.9.7-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d73ee2df41224c87336448d279b5b6a3a75f36e41dd3dcf538c0c9cce36360d8"},
{file = "rapidfuzz-3.9.6-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:15146301b32e6e3d2b7e8146db1a26747919d8b13690c7f83a4cb5dc111b3a08"}, {file = "rapidfuzz-3.9.7-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:be3a1fc3e2ab3bdf93dc0c83c00acca8afd2a80602297d96cf4a0ba028333cdf"},
{file = "rapidfuzz-3.9.6-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a7a03da59b6c7c97e657dd5cd4bcaab5fe4a2affd8193958d6f4d938bee36679"}, {file = "rapidfuzz-3.9.7-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:603f48f621272a448ff58bb556feb4371252a02156593303391f5c3281dfaeac"},
{file = "rapidfuzz-3.9.6-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0d2c2fe19e392dbc22695b6c3b2510527e2b774647e79936bbde49db7742d6f1"}, {file = "rapidfuzz-3.9.7-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:268f8e1ca50fc61c0736f3fe9d47891424adf62d96ed30196f30f4bd8216b41f"},
{file = "rapidfuzz-3.9.6-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:91aaee4c94cb45930684f583ffc4e7c01a52b46610971cede33586cf8a04a12e"}, {file = "rapidfuzz-3.9.7-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:5f8bf3f0d02935751d8660abda6044821a861f6229f7d359f98bcdcc7e66c39b"},
{file = "rapidfuzz-3.9.6-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:3f5702828c10768f9281180a7ff8597da1e5002803e1304e9519dd0f06d79a85"}, {file = "rapidfuzz-3.9.7-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b997ff3b39d4cee9fb025d6c46b0a24bd67595ce5a5b652a97fb3a9d60beb651"},
{file = "rapidfuzz-3.9.6-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:ccd1763b608fb4629a0b08f00b3c099d6395e67c14e619f6341b2c8429c2f310"}, {file = "rapidfuzz-3.9.7-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:ca66676c8ef6557f9b81c5b2b519097817a7c776a6599b8d6fcc3e16edd216fe"},
{file = "rapidfuzz-3.9.6-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:cc7a0d4b2cb166bc46d02c8c9f7551cde8e2f3c9789df3827309433ee9771163"}, {file = "rapidfuzz-3.9.7-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:35d3044cb635ca6b1b2b7b67b3597bd19f34f1753b129eb6d2ae04cf98cd3945"},
{file = "rapidfuzz-3.9.6-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:7496f53d40560a58964207b52586783633f371683834a8f719d6d965d223a2eb"}, {file = "rapidfuzz-3.9.7-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:5a93c9e60904cb76e7aefef67afffb8b37c4894f81415ed513db090f29d01101"},
{file = "rapidfuzz-3.9.6-cp312-cp312-win32.whl", hash = "sha256:5eb1a9272ca71bc72be5415c2fa8448a6302ea4578e181bb7da9db855b367df0"}, {file = "rapidfuzz-3.9.7-cp312-cp312-win32.whl", hash = "sha256:579d107102c0725f7c79b4e79f16d3cf4d7c9208f29c66b064fa1fd4641d5155"},
{file = "rapidfuzz-3.9.6-cp312-cp312-win_amd64.whl", hash = "sha256:0d21fc3c0ca507a1180152a6dbd129ebaef48facde3f943db5c1055b6e6be56a"}, {file = "rapidfuzz-3.9.7-cp312-cp312-win_amd64.whl", hash = "sha256:953b3780765c8846866faf891ee4290f6a41a6dacf4fbcd3926f78c9de412ca6"},
{file = "rapidfuzz-3.9.6-cp312-cp312-win_arm64.whl", hash = "sha256:43bb27a57c29dc5fa754496ba6a1a508480d21ae99ac0d19597646c16407e9f3"}, {file = "rapidfuzz-3.9.7-cp312-cp312-win_arm64.whl", hash = "sha256:7c20c1474b068c4bd45bf2fd0ad548df284f74e9a14a68b06746c56e3aa8eb70"},
{file = "rapidfuzz-3.9.6-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:83a5ac6547a9d6eedaa212975cb8f2ce2aa07e6e30833b40e54a52b9f9999aa4"}, {file = "rapidfuzz-3.9.7-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:fde81b1da9a947f931711febe2e2bee694e891f6d3e6aa6bc02c1884702aea19"},
{file = "rapidfuzz-3.9.6-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:10f06139142ecde67078ebc9a745965446132b998f9feebffd71acdf218acfcc"}, {file = "rapidfuzz-3.9.7-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:47e92c155a14f44511ea8ebcc6bc1535a1fe8d0a7d67ad3cc47ba61606df7bcf"},
{file = "rapidfuzz-3.9.6-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:74720c3f24597f76c7c3e2c4abdff55f1664f4766ff5b28aeaa689f8ffba5fab"}, {file = "rapidfuzz-3.9.7-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8772b745668260c5c4d069c678bbaa68812e6c69830f3771eaad521af7bc17f8"},
{file = "rapidfuzz-3.9.6-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ce2bce52b5c150878e558a0418c2b637fb3dbb6eb38e4eb27d24aa839920483e"}, {file = "rapidfuzz-3.9.7-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:578302828dd97ee2ba507d2f71d62164e28d2fc7bc73aad0d2d1d2afc021a5d5"},
{file = "rapidfuzz-3.9.6-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1611199f178793ca9a060c99b284e11f6d7d124998191f1cace9a0245334d219"}, {file = "rapidfuzz-3.9.7-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fc3e6081069eea61593f1d6839029da53d00c8c9b205c5534853eaa3f031085c"},
{file = "rapidfuzz-3.9.6-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0308b2ad161daf502908a6e21a57c78ded0258eba9a8f5e2545e2dafca312507"}, {file = "rapidfuzz-3.9.7-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0b1c2d504eddf97bc0f2eba422c8915576dbf025062ceaca2d68aecd66324ad9"},
{file = "rapidfuzz-3.9.6-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3eda91832201b86e3b70835f91522587725bec329ec68f2f7faf5124091e5ca7"}, {file = "rapidfuzz-3.9.7-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6fb76e5a21034f0307c51c5a2fc08856f698c53a4c593b17d291f7d6e9d09ca3"},
{file = "rapidfuzz-3.9.6-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ece873c093aedd87fc07c2a7e333d52e458dc177016afa1edaf157e82b6914d8"}, {file = "rapidfuzz-3.9.7-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:d4ba2318ef670ce505f42881a5d2af70f948124646947341a3c6ccb33cd70369"},
{file = "rapidfuzz-3.9.6-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:d97d3c9d209d5c30172baea5966f2129e8a198fec4a1aeb2f92abb6e82a2edb1"}, {file = "rapidfuzz-3.9.7-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:057bb03f39e285047d7e9412e01ecf31bb2d42b9466a5409d715d587460dd59b"},
{file = "rapidfuzz-3.9.6-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:6c4550d0db4931f5ebe9f0678916d1b06f06f5a99ba0b8a48b9457fd8959a7d4"}, {file = "rapidfuzz-3.9.7-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:a8feac9006d5c9758438906f093befffc4290de75663dbb2098461df7c7d28dd"},
{file = "rapidfuzz-3.9.6-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:b6b8dd4af6324fc325d9483bec75ecf9be33e590928c9202d408e4eafff6a0a6"}, {file = "rapidfuzz-3.9.7-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:95b8292383e717e10455f2c917df45032b611141e43d1adf70f71b1566136b11"},
{file = "rapidfuzz-3.9.6-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:16122ae448bc89e2bea9d81ce6cb0f751e4e07da39bd1e70b95cae2493857853"}, {file = "rapidfuzz-3.9.7-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e9fbf659537d246086d0297628b3795dc3e4a384101ecc01e5791c827b8d7345"},
{file = "rapidfuzz-3.9.6-cp313-cp313-win32.whl", hash = "sha256:71cc168c305a4445109cd0d4925406f6e66bcb48fde99a1835387c58af4ecfe9"}, {file = "rapidfuzz-3.9.7-cp313-cp313-win32.whl", hash = "sha256:1dc516ac6d32027be2b0196bedf6d977ac26debd09ca182376322ad620460feb"},
{file = "rapidfuzz-3.9.6-cp313-cp313-win_amd64.whl", hash = "sha256:59ee78f2ecd53fef8454909cda7400fe2cfcd820f62b8a5d4dfe930102268054"}, {file = "rapidfuzz-3.9.7-cp313-cp313-win_amd64.whl", hash = "sha256:b4f86e09d3064dca0b014cd48688964036a904a2d28048f00c8f4640796d06a8"},
{file = "rapidfuzz-3.9.6-cp313-cp313-win_arm64.whl", hash = "sha256:58b4ce83f223605c358ae37e7a2d19a41b96aa65b1fede99cc664c9053af89ac"}, {file = "rapidfuzz-3.9.7-cp313-cp313-win_arm64.whl", hash = "sha256:19c64d8ddb2940b42a4567b23f1681af77f50a5ff6c9b8e85daba079c210716e"},
{file = "rapidfuzz-3.9.6-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9f469dbc9c4aeaac7dd005992af74b7dff94aa56a3ea063ce64e4b3e6736dd2f"}, {file = "rapidfuzz-3.9.7-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:fbda3dd68d8b28ccb20ffb6f756fefd9b5ba570a772bedd7643ed441f5793308"},
{file = "rapidfuzz-3.9.6-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a9ed7ad9adb68d0fe63a156fe752bbf5f1403ed66961551e749641af2874da92"}, {file = "rapidfuzz-3.9.7-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2379e0b2578ad3ac7004f223251550f08bca873ff76c169b09410ec562ad78d8"},
{file = "rapidfuzz-3.9.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:39ffe48ffbeedf78d120ddfb9d583f2ca906712159a4e9c3c743c9f33e7b1775"}, {file = "rapidfuzz-3.9.7-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d1eff95362f993b0276fd3839aee48625b09aac8938bb0c23b40d219cba5dc5"},
{file = "rapidfuzz-3.9.6-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8502ccdea9084d54b6f737d96a3b60a84e3afed9d016686dc979b49cdac71613"}, {file = "rapidfuzz-3.9.7-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cd9360e30041690912525a210e48a897b49b230768cc8af1c702e5395690464f"},
{file = "rapidfuzz-3.9.6-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6a4bec4956e06b170ca896ba055d08d4c457dac745548172443982956a80e118"}, {file = "rapidfuzz-3.9.7-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a93cd834b3c315ab437f0565ee3a2f42dd33768dc885ccbabf9710b131cf70d2"},
{file = "rapidfuzz-3.9.6-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2c0488b1c273be39e109ff885ccac0448b2fa74dea4c4dc676bcf756c15f16d6"}, {file = "rapidfuzz-3.9.7-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4ff196996240db7075f62c7bc4506f40a3c80cd4ae3ab0e79ac6892283a90859"},
{file = "rapidfuzz-3.9.6-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0542c036cb6acf24edd2c9e0411a67d7ba71e29e4d3001a082466b86fc34ff30"}, {file = "rapidfuzz-3.9.7-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:948dcee7aaa1cd14358b2a7ef08bf0be42bf89049c3a906669874a715fc2c937"},
{file = "rapidfuzz-3.9.6-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:0a96b52c9f26857bf009e270dcd829381e7a634f7ddd585fa29b87d4c82146d9"}, {file = "rapidfuzz-3.9.7-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:d95751f505a301af1aaf086c19f34536056d6c8efa91b2240de532a3db57b543"},
{file = "rapidfuzz-3.9.6-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:6edd3cd7c4aa8c68c716d349f531bd5011f2ca49ddade216bb4429460151559f"}, {file = "rapidfuzz-3.9.7-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:90db86fa196eecf96cb6db09f1083912ea945c50c57188039392d810d0b784e1"},
{file = "rapidfuzz-3.9.6-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:50b2fb55d7ed58c66d49c9f954acd8fc4a3f0e9fd0ff708299bd8abb68238d0e"}, {file = "rapidfuzz-3.9.7-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:3171653212218a162540a3c8eb8ae7d3dcc8548540b69eaecaf3b47c14d89c90"},
{file = "rapidfuzz-3.9.6-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:32848dfe54391636b84cda1823fd23e5a6b1dbb8be0e9a1d80e4ee9903820994"}, {file = "rapidfuzz-3.9.7-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:36dd6e820379c37a1ffefc8a52b648758e867cd9d78ee5b5dc0c9a6a10145378"},
{file = "rapidfuzz-3.9.6-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:29146cb7a1bf69c87e928b31bffa54f066cb65639d073b36e1425f98cccdebc6"}, {file = "rapidfuzz-3.9.7-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:7b702de95666a1f7d5c6b47eacadfe2d2794af3742d63d2134767d13e5d1c713"},
{file = "rapidfuzz-3.9.6-cp38-cp38-win32.whl", hash = "sha256:aed13e5edacb0ecadcc304cc66e93e7e77ff24f059c9792ee602c0381808e10c"}, {file = "rapidfuzz-3.9.7-cp38-cp38-win32.whl", hash = "sha256:9030e7238c0df51aed5c9c5ed8eee2bdd47a2ae788e562c1454af2851c3d1906"},
{file = "rapidfuzz-3.9.6-cp38-cp38-win_amd64.whl", hash = "sha256:af440e36b828922256d0b4d79443bf2cbe5515fc4b0e9e96017ec789b36bb9fc"}, {file = "rapidfuzz-3.9.7-cp38-cp38-win_amd64.whl", hash = "sha256:f847fb0fbfb72482b1c05c59cbb275c58a55b73708a7f77a83f8035ee3c86497"},
{file = "rapidfuzz-3.9.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:efa674b407424553024522159296690d99d6e6b1192cafe99ca84592faff16b4"}, {file = "rapidfuzz-3.9.7-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:97f2ce529d2a70a60c290f6ab269a2bbf1d3b47b9724dccc84339b85f7afb044"},
{file = "rapidfuzz-3.9.6-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0b40ff76ee19b03ebf10a0a87938f86814996a822786c41c3312d251b7927849"}, {file = "rapidfuzz-3.9.7-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e2957fdad10bb83b1982b02deb3604a3f6911a5e545f518b59c741086f92d152"},
{file = "rapidfuzz-3.9.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:16a6c7997cb5927ced6f617122eb116ba514ec6b6f60f4803e7925ef55158891"}, {file = "rapidfuzz-3.9.7-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d5262383634626eb45c536017204b8163a03bc43bda880cf1bdd7885db9a163"},
{file = "rapidfuzz-3.9.6-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3f42504bdc8d770987fc3d99964766d42b2a03e4d5b0f891decdd256236bae0"}, {file = "rapidfuzz-3.9.7-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:364587827d7cbd41afa0782adc2d2d19e3f07d355b0750a02a8e33ad27a9c368"},
{file = "rapidfuzz-3.9.6-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad9462aa2be9f60b540c19a083471fdf28e7cf6434f068b631525b5e6251b35e"}, {file = "rapidfuzz-3.9.7-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ecc24af7f905f3d6efb371a01680116ffea8d64e266618fb9ad1602a9b4f7934"},
{file = "rapidfuzz-3.9.6-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1629698e68f47609a73bf9e73a6da3a4cac20bc710529215cbdf111ab603665b"}, {file = "rapidfuzz-3.9.7-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9dc86aa6b29d174713c5f4caac35ffb7f232e3e649113e8d13812b35ab078228"},
{file = "rapidfuzz-3.9.6-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:68bc7621843d8e9a7fd1b1a32729465bf94b47b6fb307d906da168413331f8d6"}, {file = "rapidfuzz-3.9.7-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e3dcfbe7266e74a707173a12a7b355a531f2dcfbdb32f09468e664330da14874"},
{file = "rapidfuzz-3.9.6-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:c6254c50f15bc2fcc33cb93a95a81b702d9e6590f432a7f7822b8c7aba9ae288"}, {file = "rapidfuzz-3.9.7-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:b23806fbdd6b510ba9ac93bb72d503066263b0fba44b71b835be9f063a84025f"},
{file = "rapidfuzz-3.9.6-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:7e535a114fa575bc143e175e4ca386a467ec8c42909eff500f5f0f13dc84e3e0"}, {file = "rapidfuzz-3.9.7-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:5551d68264c1bb6943f542da83a4dc8940ede52c5847ef158698799cc28d14f5"},
{file = "rapidfuzz-3.9.6-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:d50acc0e9d67e4ba7a004a14c42d1b1e8b6ca1c515692746f4f8e7948c673167"}, {file = "rapidfuzz-3.9.7-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:13d8675a1fa7e2b19650ca7ef9a6ec01391d4bb12ab9e0793e8eb024538b4a34"},
{file = "rapidfuzz-3.9.6-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:fa742ec60bec53c5a211632cf1d31b9eb5a3c80f1371a46a23ac25a1fa2ab209"}, {file = "rapidfuzz-3.9.7-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:9b6a5de507b9be6de688dae40143b656f7a93b10995fb8bd90deb555e7875c60"},
{file = "rapidfuzz-3.9.6-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:c256fa95d29cbe5aa717db790b231a9a5b49e5983d50dc9df29d364a1db5e35b"}, {file = "rapidfuzz-3.9.7-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:111a20a3c090cf244d9406e60500b6c34b2375ba3a5009e2b38fd806fe38e337"},
{file = "rapidfuzz-3.9.6-cp39-cp39-win32.whl", hash = "sha256:89acbf728b764421036c173a10ada436ecca22999851cdc01d0aa904c70d362d"}, {file = "rapidfuzz-3.9.7-cp39-cp39-win32.whl", hash = "sha256:22589c0b8ccc6c391ce7f776c93a8c92c96ab8d34e1a19f1bd2b12a235332632"},
{file = "rapidfuzz-3.9.6-cp39-cp39-win_amd64.whl", hash = "sha256:c608fcba8b14d86c04cb56b203fed31a96e8a1ebb4ce99e7b70313c5bf8cf497"}, {file = "rapidfuzz-3.9.7-cp39-cp39-win_amd64.whl", hash = "sha256:6f83221db5755b8f34222e40607d87f1176a8d5d4dbda4a55a0f0b67d588a69c"},
{file = "rapidfuzz-3.9.6-cp39-cp39-win_arm64.whl", hash = "sha256:d41c00ded0e22e9dba88ff23ebe0dc9d2a5f21ba2f88e185ea7374461e61daa9"}, {file = "rapidfuzz-3.9.7-cp39-cp39-win_arm64.whl", hash = "sha256:3665b92e788578c3bb334bd5b5fa7ee1a84bafd68be438e3110861d1578c63a0"},
{file = "rapidfuzz-3.9.6-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:a65c2f63218ea2dedd56fc56361035e189ca123bd9c9ce63a9bef6f99540d681"}, {file = "rapidfuzz-3.9.7-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:d7df9c2194c7ec930b33c991c55dbd0c10951bd25800c0b7a7b571994ebbced5"},
{file = "rapidfuzz-3.9.6-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:680dc78a5f889d3b89f74824b89fe357f49f88ad10d2c121e9c3ad37bac1e4eb"}, {file = "rapidfuzz-3.9.7-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:68bd888eafd07b09585dcc8bc2716c5ecdb7eed62827470664d25588982b2873"},
{file = "rapidfuzz-3.9.6-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b8ca862927a0b05bd825e46ddf82d0724ea44b07d898ef639386530bf9b40f15"}, {file = "rapidfuzz-3.9.7-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d1230e0f9026851a6a432beaa0ce575dda7b39fe689b576f99a0704fbb81fc9c"},
{file = "rapidfuzz-3.9.6-pp310-pypy310_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2116fa1fbff21fa52cd46f3cfcb1e193ba1d65d81f8b6e123193451cd3d6c15e"}, {file = "rapidfuzz-3.9.7-pp310-pypy310_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a3b36e1c61b796ae1777f3e9e11fd39898b09d351c9384baf6e3b7e6191d8ced"},
{file = "rapidfuzz-3.9.6-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4dcb7d9afd740370a897c15da61d3d57a8d54738d7c764a99cedb5f746d6a003"}, {file = "rapidfuzz-3.9.7-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9dba13d86806fcf3fe9c9919f58575e0090eadfb89c058bde02bcc7ab24e4548"},
{file = "rapidfuzz-3.9.6-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:1a5bd6401bb489e14cbb5981c378d53ede850b7cc84b2464cad606149cc4e17d"}, {file = "rapidfuzz-3.9.7-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:1f1a33e84056b7892c721d84475d3bde49a145126bc4c6efe0d6d0d59cb31c29"},
{file = "rapidfuzz-3.9.6-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:29fda70b9d03e29df6fc45cc27cbcc235534b1b0b2900e0a3ae0b43022aaeef5"}, {file = "rapidfuzz-3.9.7-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:3492c7a42b7fa9f0051d7fcce9893e95ed91c97c9ec7fb64346f3e070dd318ed"},
{file = "rapidfuzz-3.9.6-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:88144f5f52ae977df9352029488326afadd7a7f42c6779d486d1f82d43b2b1f2"}, {file = "rapidfuzz-3.9.7-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:ece45eb2af8b00f90d10f7419322e8804bd42fb1129026f9bfe712c37508b514"},
{file = "rapidfuzz-3.9.6-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:715aeaabafba2709b9dd91acb2a44bad59d60b4616ef90c08f4d4402a3bbca60"}, {file = "rapidfuzz-3.9.7-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9dcd14cf4876f04b488f6e54a7abd3e9b31db5f5a6aba0ce90659917aaa8c088"},
{file = "rapidfuzz-3.9.6-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:af26ebd3714224fbf9bebbc27bdbac14f334c15f5d7043699cd694635050d6ca"}, {file = "rapidfuzz-3.9.7-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:521c58c72ed8a612b25cda378ff10dee17e6deb4ee99a070b723519a345527b9"},
{file = "rapidfuzz-3.9.6-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:101bd2df438861a005ed47c032631b7857dfcdb17b82beeeb410307983aac61d"}, {file = "rapidfuzz-3.9.7-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:18669bb6cdf7d40738526d37e550df09ba065b5a7560f3d802287988b6cb63cf"},
{file = "rapidfuzz-3.9.6-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:2185e8e29809b97ad22a7f99281d1669a89bdf5fa1ef4ef1feca36924e675367"}, {file = "rapidfuzz-3.9.7-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:7abe2dbae81120a64bb4f8d3fcafe9122f328c9f86d7f327f174187a5af4ed86"},
{file = "rapidfuzz-3.9.6-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:9e53c72d08f0e9c6e4a369e52df5971f311305b4487690c62e8dd0846770260c"}, {file = "rapidfuzz-3.9.7-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:a3c0783910911f4f24655826d007c9f4360f08107410952c01ee3df98c713eb2"},
{file = "rapidfuzz-3.9.6-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:a0cb157162f0cdd62e538c7bd298ff669847fc43a96422811d5ab933f4c16c3a"}, {file = "rapidfuzz-3.9.7-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:03126f9a040ff21d2a110610bfd6b93b79377ce8b4121edcb791d61b7df6eec5"},
{file = "rapidfuzz-3.9.6-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4bb5ff2bd48132ed5e7fbb8f619885facb2e023759f2519a448b2c18afe07e5d"}, {file = "rapidfuzz-3.9.7-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:591908240f4085e2ade5b685c6e8346e2ed44932cffeaac2fb32ddac95b55c7f"},
{file = "rapidfuzz-3.9.6-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6dc37f601865e8407e3a8037ffbc3afe0b0f837b2146f7632bd29d087385babe"}, {file = "rapidfuzz-3.9.7-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e9012d86c6397edbc9da4ac0132de7f8ee9d6ce857f4194d5684c4ddbcdd1c5c"},
{file = "rapidfuzz-3.9.6-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a657eee4b94668faf1fa2703bdd803654303f7e468eb9ba10a664d867ed9e779"}, {file = "rapidfuzz-3.9.7-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:df596ddd3db38aa513d4c0995611267b3946e7cbe5a8761b50e9306dfec720ee"},
{file = "rapidfuzz-3.9.6-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:51be6ab5b1d5bb32abd39718f2a5e3835502e026a8272d139ead295c224a6f5e"}, {file = "rapidfuzz-3.9.7-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:3ed5adb752f4308fcc8f4fb6f8eb7aa4082f9d12676fda0a74fa5564242a8107"},
{file = "rapidfuzz-3.9.6.tar.gz", hash = "sha256:5cf2a7d621e4515fee84722e93563bf77ff2cbe832a77a48b81f88f9e23b9e8d"}, {file = "rapidfuzz-3.9.7.tar.gz", hash = "sha256:f1c7296534c1afb6f495aa95871f14ccdc197c6db42965854e483100df313030"},
] ]
[package.extras] [package.extras]
@@ -1088,13 +1091,13 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"]
[[package]] [[package]]
name = "rich" name = "rich"
version = "13.7.1" version = "13.8.1"
description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal"
optional = false optional = true
python-versions = ">=3.7.0" python-versions = ">=3.7.0"
files = [ files = [
{file = "rich-13.7.1-py3-none-any.whl", hash = "sha256:4edbae314f59eb482f54e9e30bf00d33350aaa94f4bfcd4e9e3110e64d0d7222"}, {file = "rich-13.8.1-py3-none-any.whl", hash = "sha256:1760a3c0848469b97b558fc61c85233e3dafb69c7a071b4d60c38099d3cd4c06"},
{file = "rich-13.7.1.tar.gz", hash = "sha256:9be308cb1fe2f1f57d67ce99e95af38a1e2bc71ad9813b0e247cf7ffbcc3a432"}, {file = "rich-13.8.1.tar.gz", hash = "sha256:8260cda28e3db6bf04d2d1ef4dbc03ba80a824c88b0e7668a0f23126a424844a"},
] ]
[package.dependencies] [package.dependencies]
@@ -1157,17 +1160,17 @@ files = [
[[package]] [[package]]
name = "tox" name = "tox"
version = "4.18.0" version = "4.20.0"
description = "tox is a generic virtualenv management and test command line tool" description = "tox is a generic virtualenv management and test command line tool"
optional = false optional = false
python-versions = ">=3.8" python-versions = ">=3.8"
files = [ files = [
{file = "tox-4.18.0-py3-none-any.whl", hash = "sha256:0a457400cf70615dc0627eb70d293e80cd95d8ce174bb40ac011011f0c03a249"}, {file = "tox-4.20.0-py3-none-any.whl", hash = "sha256:21a8005e3d3fe5658a8e36b8ca3ed13a4230429063c5cc2a2fdac6ee5aa0de34"},
{file = "tox-4.18.0.tar.gz", hash = "sha256:5dfa1cab9f146becd6e351333a82f9e0ade374451630ba65ee54584624c27b58"}, {file = "tox-4.20.0.tar.gz", hash = "sha256:5b78a49b6eaaeab3ae4186415e7c97d524f762ae967c63562687c3e5f0ec23d5"},
] ]
[package.dependencies] [package.dependencies]
cachetools = ">=5.4" cachetools = ">=5.5"
chardet = ">=5.2" chardet = ">=5.2"
colorama = ">=0.4.6" colorama = ">=0.4.6"
filelock = ">=3.15.4" filelock = ">=3.15.4"
@@ -1179,8 +1182,8 @@ tomli = {version = ">=2.0.1", markers = "python_version < \"3.11\""}
virtualenv = ">=20.26.3" virtualenv = ">=20.26.3"
[package.extras] [package.extras]
docs = ["furo (>=2024.7.18)", "sphinx (>=7.4.7)", "sphinx-argparse-cli (>=1.16)", "sphinx-autodoc-typehints (>=2.2.3)", "sphinx-copybutton (>=0.5.2)", "sphinx-inline-tabs (>=2023.4.21)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.11)"] docs = ["furo (>=2024.8.6)", "sphinx (>=8.0.2)", "sphinx-argparse-cli (>=1.17)", "sphinx-autodoc-typehints (>=2.4)", "sphinx-copybutton (>=0.5.2)", "sphinx-inline-tabs (>=2023.4.21)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=24.8)"]
testing = ["build[virtualenv] (>=1.2.1)", "covdefaults (>=2.3)", "detect-test-pollution (>=1.2)", "devpi-process (>=1)", "diff-cover (>=9.1.1)", "distlib (>=0.3.8)", "flaky (>=3.8.1)", "hatch-vcs (>=0.4)", "hatchling (>=1.25)", "psutil (>=6)", "pytest (>=8.3.2)", "pytest-cov (>=5)", "pytest-mock (>=3.14)", "pytest-xdist (>=3.6.1)", "re-assert (>=1.1)", "setuptools (>=70.3)", "time-machine (>=2.14.2)", "wheel (>=0.43)"] testing = ["build[virtualenv] (>=1.2.2)", "covdefaults (>=2.3)", "detect-test-pollution (>=1.2)", "devpi-process (>=1)", "diff-cover (>=9.1.1)", "distlib (>=0.3.8)", "flaky (>=3.8.1)", "hatch-vcs (>=0.4)", "hatchling (>=1.25)", "psutil (>=6)", "pytest (>=8.3.2)", "pytest-cov (>=5)", "pytest-mock (>=3.14)", "pytest-xdist (>=3.6.1)", "re-assert (>=1.1)", "setuptools (>=74.1.2)", "time-machine (>=2.15)", "wheel (>=0.44)"]
[[package]] [[package]]
name = "typing-extensions" name = "typing-extensions"
@@ -1195,13 +1198,13 @@ files = [
[[package]] [[package]]
name = "urllib3" name = "urllib3"
version = "2.2.2" version = "2.2.3"
description = "HTTP library with thread-safe connection pooling, file post, and more." description = "HTTP library with thread-safe connection pooling, file post, and more."
optional = false optional = false
python-versions = ">=3.8" python-versions = ">=3.8"
files = [ files = [
{file = "urllib3-2.2.2-py3-none-any.whl", hash = "sha256:a448b2f64d686155468037e1ace9f2d2199776e17f0a46610480d311f73e3472"}, {file = "urllib3-2.2.3-py3-none-any.whl", hash = "sha256:ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac"},
{file = "urllib3-2.2.2.tar.gz", hash = "sha256:dd505485549a7a552833da5e6063639d0d177c04f23bc3864e41e5dc5f612168"}, {file = "urllib3-2.2.3.tar.gz", hash = "sha256:e7d814a81dad81e6caf2ec9fdedb284ecc9c73076b62654547cc64ccdcae26e9"},
] ]
[package.extras] [package.extras]
@@ -1212,13 +1215,13 @@ zstd = ["zstandard (>=0.18.0)"]
[[package]] [[package]]
name = "virtualenv" name = "virtualenv"
version = "20.26.3" version = "20.26.5"
description = "Virtual Python Environment builder" description = "Virtual Python Environment builder"
optional = false optional = false
python-versions = ">=3.7" python-versions = ">=3.7"
files = [ files = [
{file = "virtualenv-20.26.3-py3-none-any.whl", hash = "sha256:8cc4a31139e796e9a7de2cd5cf2489de1217193116a8fd42328f1bd65f434589"}, {file = "virtualenv-20.26.5-py3-none-any.whl", hash = "sha256:4f3ac17b81fba3ce3bd6f4ead2749a72da5929c01774948e243db9ba41df4ff6"},
{file = "virtualenv-20.26.3.tar.gz", hash = "sha256:4c43a2a236279d9ea36a0d76f98d84bd6ca94ac4e0f4a3b9d46d05e10fea542a"}, {file = "virtualenv-20.26.5.tar.gz", hash = "sha256:ce489cac131aa58f4b25e321d6d186171f78e6cb13fafbf32a840cee67733ff4"},
] ]
[package.dependencies] [package.dependencies]
@@ -1234,7 +1237,7 @@ test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess
name = "wcwidth" name = "wcwidth"
version = "0.2.13" version = "0.2.13"
description = "Measures the displayed width of unicode strings in a terminal" description = "Measures the displayed width of unicode strings in a terminal"
optional = false optional = true
python-versions = "*" python-versions = "*"
files = [ files = [
{file = "wcwidth-0.2.13-py2.py3-none-any.whl", hash = "sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859"}, {file = "wcwidth-0.2.13-py2.py3-none-any.whl", hash = "sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859"},
@@ -1243,83 +1246,97 @@ files = [
[[package]] [[package]]
name = "websockets" name = "websockets"
version = "12.0" version = "13.1"
description = "An implementation of the WebSocket Protocol (RFC 6455 & 7692)" description = "An implementation of the WebSocket Protocol (RFC 6455 & 7692)"
optional = false optional = false
python-versions = ">=3.8" python-versions = ">=3.8"
files = [ files = [
{file = "websockets-12.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d554236b2a2006e0ce16315c16eaa0d628dab009c33b63ea03f41c6107958374"}, {file = "websockets-13.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f48c749857f8fb598fb890a75f540e3221d0976ed0bf879cf3c7eef34151acee"},
{file = "websockets-12.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2d225bb6886591b1746b17c0573e29804619c8f755b5598d875bb4235ea639be"}, {file = "websockets-13.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c7e72ce6bda6fb9409cc1e8164dd41d7c91466fb599eb047cfda72fe758a34a7"},
{file = "websockets-12.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:eb809e816916a3b210bed3c82fb88eaf16e8afcf9c115ebb2bacede1797d2547"}, {file = "websockets-13.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f779498eeec470295a2b1a5d97aa1bc9814ecd25e1eb637bd9d1c73a327387f6"},
{file = "websockets-12.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c588f6abc13f78a67044c6b1273a99e1cf31038ad51815b3b016ce699f0d75c2"}, {file = "websockets-13.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4676df3fe46956fbb0437d8800cd5f2b6d41143b6e7e842e60554398432cf29b"},
{file = "websockets-12.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5aa9348186d79a5f232115ed3fa9020eab66d6c3437d72f9d2c8ac0c6858c558"}, {file = "websockets-13.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a7affedeb43a70351bb811dadf49493c9cfd1ed94c9c70095fd177e9cc1541fa"},
{file = "websockets-12.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6350b14a40c95ddd53e775dbdbbbc59b124a5c8ecd6fbb09c2e52029f7a9f480"}, {file = "websockets-13.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1971e62d2caa443e57588e1d82d15f663b29ff9dfe7446d9964a4b6f12c1e700"},
{file = "websockets-12.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:70ec754cc2a769bcd218ed8d7209055667b30860ffecb8633a834dde27d6307c"}, {file = "websockets-13.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:5f2e75431f8dc4a47f31565a6e1355fb4f2ecaa99d6b89737527ea917066e26c"},
{file = "websockets-12.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6e96f5ed1b83a8ddb07909b45bd94833b0710f738115751cdaa9da1fb0cb66e8"}, {file = "websockets-13.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:58cf7e75dbf7e566088b07e36ea2e3e2bd5676e22216e4cad108d4df4a7402a0"},
{file = "websockets-12.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4d87be612cbef86f994178d5186add3d94e9f31cc3cb499a0482b866ec477603"}, {file = "websockets-13.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:c90d6dec6be2c7d03378a574de87af9b1efea77d0c52a8301dd831ece938452f"},
{file = "websockets-12.0-cp310-cp310-win32.whl", hash = "sha256:befe90632d66caaf72e8b2ed4d7f02b348913813c8b0a32fae1cc5fe3730902f"}, {file = "websockets-13.1-cp310-cp310-win32.whl", hash = "sha256:730f42125ccb14602f455155084f978bd9e8e57e89b569b4d7f0f0c17a448ffe"},
{file = "websockets-12.0-cp310-cp310-win_amd64.whl", hash = "sha256:363f57ca8bc8576195d0540c648aa58ac18cf85b76ad5202b9f976918f4219cf"}, {file = "websockets-13.1-cp310-cp310-win_amd64.whl", hash = "sha256:5993260f483d05a9737073be197371940c01b257cc45ae3f1d5d7adb371b266a"},
{file = "websockets-12.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:5d873c7de42dea355d73f170be0f23788cf3fa9f7bed718fd2830eefedce01b4"}, {file = "websockets-13.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:61fc0dfcda609cda0fc9fe7977694c0c59cf9d749fbb17f4e9483929e3c48a19"},
{file = "websockets-12.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3f61726cae9f65b872502ff3c1496abc93ffbe31b278455c418492016e2afc8f"}, {file = "websockets-13.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ceec59f59d092c5007e815def4ebb80c2de330e9588e101cf8bd94c143ec78a5"},
{file = "websockets-12.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ed2fcf7a07334c77fc8a230755c2209223a7cc44fc27597729b8ef5425aa61a3"}, {file = "websockets-13.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c1dca61c6db1166c48b95198c0b7d9c990b30c756fc2923cc66f68d17dc558fd"},
{file = "websockets-12.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e332c210b14b57904869ca9f9bf4ca32f5427a03eeb625da9b616c85a3a506c"}, {file = "websockets-13.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:308e20f22c2c77f3f39caca508e765f8725020b84aa963474e18c59accbf4c02"},
{file = "websockets-12.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5693ef74233122f8ebab026817b1b37fe25c411ecfca084b29bc7d6efc548f45"}, {file = "websockets-13.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:62d516c325e6540e8a57b94abefc3459d7dab8ce52ac75c96cad5549e187e3a7"},
{file = "websockets-12.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e9e7db18b4539a29cc5ad8c8b252738a30e2b13f033c2d6e9d0549b45841c04"}, {file = "websockets-13.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87c6e35319b46b99e168eb98472d6c7d8634ee37750d7693656dc766395df096"},
{file = "websockets-12.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6e2df67b8014767d0f785baa98393725739287684b9f8d8a1001eb2839031447"}, {file = "websockets-13.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:5f9fee94ebafbc3117c30be1844ed01a3b177bb6e39088bc6b2fa1dc15572084"},
{file = "websockets-12.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:bea88d71630c5900690fcb03161ab18f8f244805c59e2e0dc4ffadae0a7ee0ca"}, {file = "websockets-13.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:7c1e90228c2f5cdde263253fa5db63e6653f1c00e7ec64108065a0b9713fa1b3"},
{file = "websockets-12.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:dff6cdf35e31d1315790149fee351f9e52978130cef6c87c4b6c9b3baf78bc53"}, {file = "websockets-13.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:6548f29b0e401eea2b967b2fdc1c7c7b5ebb3eeb470ed23a54cd45ef078a0db9"},
{file = "websockets-12.0-cp311-cp311-win32.whl", hash = "sha256:3e3aa8c468af01d70332a382350ee95f6986db479ce7af14d5e81ec52aa2b402"}, {file = "websockets-13.1-cp311-cp311-win32.whl", hash = "sha256:c11d4d16e133f6df8916cc5b7e3e96ee4c44c936717d684a94f48f82edb7c92f"},
{file = "websockets-12.0-cp311-cp311-win_amd64.whl", hash = "sha256:25eb766c8ad27da0f79420b2af4b85d29914ba0edf69f547cc4f06ca6f1d403b"}, {file = "websockets-13.1-cp311-cp311-win_amd64.whl", hash = "sha256:d04f13a1d75cb2b8382bdc16ae6fa58c97337253826dfe136195b7f89f661557"},
{file = "websockets-12.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0e6e2711d5a8e6e482cacb927a49a3d432345dfe7dea8ace7b5790df5932e4df"}, {file = "websockets-13.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:9d75baf00138f80b48f1eac72ad1535aac0b6461265a0bcad391fc5aba875cfc"},
{file = "websockets-12.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:dbcf72a37f0b3316e993e13ecf32f10c0e1259c28ffd0a85cee26e8549595fbc"}, {file = "websockets-13.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:9b6f347deb3dcfbfde1c20baa21c2ac0751afaa73e64e5b693bb2b848efeaa49"},
{file = "websockets-12.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:12743ab88ab2af1d17dd4acb4645677cb7063ef4db93abffbf164218a5d54c6b"}, {file = "websockets-13.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:de58647e3f9c42f13f90ac7e5f58900c80a39019848c5547bc691693098ae1bd"},
{file = "websockets-12.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7b645f491f3c48d3f8a00d1fce07445fab7347fec54a3e65f0725d730d5b99cb"}, {file = "websockets-13.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1b54689e38d1279a51d11e3467dd2f3a50f5f2e879012ce8f2d6943f00e83f0"},
{file = "websockets-12.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9893d1aa45a7f8b3bc4510f6ccf8db8c3b62120917af15e3de247f0780294b92"}, {file = "websockets-13.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cf1781ef73c073e6b0f90af841aaf98501f975d306bbf6221683dd594ccc52b6"},
{file = "websockets-12.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f38a7b376117ef7aff996e737583172bdf535932c9ca021746573bce40165ed"}, {file = "websockets-13.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d23b88b9388ed85c6faf0e74d8dec4f4d3baf3ecf20a65a47b836d56260d4b9"},
{file = "websockets-12.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:f764ba54e33daf20e167915edc443b6f88956f37fb606449b4a5b10ba42235a5"}, {file = "websockets-13.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3c78383585f47ccb0fcf186dcb8a43f5438bd7d8f47d69e0b56f71bf431a0a68"},
{file = "websockets-12.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:1e4b3f8ea6a9cfa8be8484c9221ec0257508e3a1ec43c36acdefb2a9c3b00aa2"}, {file = "websockets-13.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:d6d300f8ec35c24025ceb9b9019ae9040c1ab2f01cddc2bcc0b518af31c75c14"},
{file = "websockets-12.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:9fdf06fd06c32205a07e47328ab49c40fc1407cdec801d698a7c41167ea45113"}, {file = "websockets-13.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a9dcaf8b0cc72a392760bb8755922c03e17a5a54e08cca58e8b74f6902b433cf"},
{file = "websockets-12.0-cp312-cp312-win32.whl", hash = "sha256:baa386875b70cbd81798fa9f71be689c1bf484f65fd6fb08d051a0ee4e79924d"}, {file = "websockets-13.1-cp312-cp312-win32.whl", hash = "sha256:2f85cf4f2a1ba8f602298a853cec8526c2ca42a9a4b947ec236eaedb8f2dc80c"},
{file = "websockets-12.0-cp312-cp312-win_amd64.whl", hash = "sha256:ae0a5da8f35a5be197f328d4727dbcfafa53d1824fac3d96cdd3a642fe09394f"}, {file = "websockets-13.1-cp312-cp312-win_amd64.whl", hash = "sha256:38377f8b0cdeee97c552d20cf1865695fcd56aba155ad1b4ca8779a5b6ef4ac3"},
{file = "websockets-12.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5f6ffe2c6598f7f7207eef9a1228b6f5c818f9f4d53ee920aacd35cec8110438"}, {file = "websockets-13.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:a9ab1e71d3d2e54a0aa646ab6d4eebfaa5f416fe78dfe4da2839525dc5d765c6"},
{file = "websockets-12.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9edf3fc590cc2ec20dc9d7a45108b5bbaf21c0d89f9fd3fd1685e223771dc0b2"}, {file = "websockets-13.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:b9d7439d7fab4dce00570bb906875734df13d9faa4b48e261c440a5fec6d9708"},
{file = "websockets-12.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:8572132c7be52632201a35f5e08348137f658e5ffd21f51f94572ca6c05ea81d"}, {file = "websockets-13.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:327b74e915cf13c5931334c61e1a41040e365d380f812513a255aa804b183418"},
{file = "websockets-12.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:604428d1b87edbf02b233e2c207d7d528460fa978f9e391bd8aaf9c8311de137"}, {file = "websockets-13.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:325b1ccdbf5e5725fdcb1b0e9ad4d2545056479d0eee392c291c1bf76206435a"},
{file = "websockets-12.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1a9d160fd080c6285e202327aba140fc9a0d910b09e423afff4ae5cbbf1c7205"}, {file = "websockets-13.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:346bee67a65f189e0e33f520f253d5147ab76ae42493804319b5716e46dddf0f"},
{file = "websockets-12.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87b4aafed34653e465eb77b7c93ef058516cb5acf3eb21e42f33928616172def"}, {file = "websockets-13.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:91a0fa841646320ec0d3accdff5b757b06e2e5c86ba32af2e0815c96c7a603c5"},
{file = "websockets-12.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b2ee7288b85959797970114deae81ab41b731f19ebcd3bd499ae9ca0e3f1d2c8"}, {file = "websockets-13.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:18503d2c5f3943e93819238bf20df71982d193f73dcecd26c94514f417f6b135"},
{file = "websockets-12.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:7fa3d25e81bfe6a89718e9791128398a50dec6d57faf23770787ff441d851967"}, {file = "websockets-13.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:a9cd1af7e18e5221d2878378fbc287a14cd527fdd5939ed56a18df8a31136bb2"},
{file = "websockets-12.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:a571f035a47212288e3b3519944f6bf4ac7bc7553243e41eac50dd48552b6df7"}, {file = "websockets-13.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:70c5be9f416aa72aab7a2a76c90ae0a4fe2755c1816c153c1a2bcc3333ce4ce6"},
{file = "websockets-12.0-cp38-cp38-win32.whl", hash = "sha256:3c6cc1360c10c17463aadd29dd3af332d4a1adaa8796f6b0e9f9df1fdb0bad62"}, {file = "websockets-13.1-cp313-cp313-win32.whl", hash = "sha256:624459daabeb310d3815b276c1adef475b3e6804abaf2d9d2c061c319f7f187d"},
{file = "websockets-12.0-cp38-cp38-win_amd64.whl", hash = "sha256:1bf386089178ea69d720f8db6199a0504a406209a0fc23e603b27b300fdd6892"}, {file = "websockets-13.1-cp313-cp313-win_amd64.whl", hash = "sha256:c518e84bb59c2baae725accd355c8dc517b4a3ed8db88b4bc93c78dae2974bf2"},
{file = "websockets-12.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:ab3d732ad50a4fbd04a4490ef08acd0517b6ae6b77eb967251f4c263011a990d"}, {file = "websockets-13.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:c7934fd0e920e70468e676fe7f1b7261c1efa0d6c037c6722278ca0228ad9d0d"},
{file = "websockets-12.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a1d9697f3337a89691e3bd8dc56dea45a6f6d975f92e7d5f773bc715c15dde28"}, {file = "websockets-13.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:149e622dc48c10ccc3d2760e5f36753db9cacf3ad7bc7bbbfd7d9c819e286f23"},
{file = "websockets-12.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1df2fbd2c8a98d38a66f5238484405b8d1d16f929bb7a33ed73e4801222a6f53"}, {file = "websockets-13.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a569eb1b05d72f9bce2ebd28a1ce2054311b66677fcd46cf36204ad23acead8c"},
{file = "websockets-12.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23509452b3bc38e3a057382c2e941d5ac2e01e251acce7adc74011d7d8de434c"}, {file = "websockets-13.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:95df24ca1e1bd93bbca51d94dd049a984609687cb2fb08a7f2c56ac84e9816ea"},
{file = "websockets-12.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e5fc14ec6ea568200ea4ef46545073da81900a2b67b3e666f04adf53ad452ec"}, {file = "websockets-13.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d8dbb1bf0c0a4ae8b40bdc9be7f644e2f3fb4e8a9aca7145bfa510d4a374eeb7"},
{file = "websockets-12.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:46e71dbbd12850224243f5d2aeec90f0aaa0f2dde5aeeb8fc8df21e04d99eff9"}, {file = "websockets-13.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:035233b7531fb92a76beefcbf479504db8c72eb3bff41da55aecce3a0f729e54"},
{file = "websockets-12.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b81f90dcc6c85a9b7f29873beb56c94c85d6f0dac2ea8b60d995bd18bf3e2aae"}, {file = "websockets-13.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:e4450fc83a3df53dec45922b576e91e94f5578d06436871dce3a6be38e40f5db"},
{file = "websockets-12.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:a02413bc474feda2849c59ed2dfb2cddb4cd3d2f03a2fedec51d6e959d9b608b"}, {file = "websockets-13.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:463e1c6ec853202dd3657f156123d6b4dad0c546ea2e2e38be2b3f7c5b8e7295"},
{file = "websockets-12.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:bbe6013f9f791944ed31ca08b077e26249309639313fff132bfbf3ba105673b9"}, {file = "websockets-13.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:6d6855bbe70119872c05107e38fbc7f96b1d8cb047d95c2c50869a46c65a8e96"},
{file = "websockets-12.0-cp39-cp39-win32.whl", hash = "sha256:cbe83a6bbdf207ff0541de01e11904827540aa069293696dd528a6640bd6a5f6"}, {file = "websockets-13.1-cp38-cp38-win32.whl", hash = "sha256:204e5107f43095012b00f1451374693267adbb832d29966a01ecc4ce1db26faf"},
{file = "websockets-12.0-cp39-cp39-win_amd64.whl", hash = "sha256:fc4e7fa5414512b481a2483775a8e8be7803a35b30ca805afa4998a84f9fd9e8"}, {file = "websockets-13.1-cp38-cp38-win_amd64.whl", hash = "sha256:485307243237328c022bc908b90e4457d0daa8b5cf4b3723fd3c4a8012fce4c6"},
{file = "websockets-12.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:248d8e2446e13c1d4326e0a6a4e9629cb13a11195051a73acf414812700badbd"}, {file = "websockets-13.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:9b37c184f8b976f0c0a231a5f3d6efe10807d41ccbe4488df8c74174805eea7d"},
{file = "websockets-12.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f44069528d45a933997a6fef143030d8ca8042f0dfaad753e2906398290e2870"}, {file = "websockets-13.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:163e7277e1a0bd9fb3c8842a71661ad19c6aa7bb3d6678dc7f89b17fbcc4aeb7"},
{file = "websockets-12.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c4e37d36f0d19f0a4413d3e18c0d03d0c268ada2061868c1e6f5ab1a6d575077"}, {file = "websockets-13.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4b889dbd1342820cc210ba44307cf75ae5f2f96226c0038094455a96e64fb07a"},
{file = "websockets-12.0-pp310-pypy310_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d829f975fc2e527a3ef2f9c8f25e553eb7bc779c6665e8e1d52aa22800bb38b"}, {file = "websockets-13.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:586a356928692c1fed0eca68b4d1c2cbbd1ca2acf2ac7e7ebd3b9052582deefa"},
{file = "websockets-12.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:2c71bd45a777433dd9113847af751aae36e448bc6b8c361a566cb043eda6ec30"}, {file = "websockets-13.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7bd6abf1e070a6b72bfeb71049d6ad286852e285f146682bf30d0296f5fbadfa"},
{file = "websockets-12.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:0bee75f400895aef54157b36ed6d3b308fcab62e5260703add87f44cee9c82a6"}, {file = "websockets-13.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d2aad13a200e5934f5a6767492fb07151e1de1d6079c003ab31e1823733ae79"},
{file = "websockets-12.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:423fc1ed29f7512fceb727e2d2aecb952c46aa34895e9ed96071821309951123"}, {file = "websockets-13.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:df01aea34b6e9e33572c35cd16bae5a47785e7d5c8cb2b54b2acdb9678315a17"},
{file = "websockets-12.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:27a5e9964ef509016759f2ef3f2c1e13f403725a5e6a1775555994966a66e931"}, {file = "websockets-13.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:e54affdeb21026329fb0744ad187cf812f7d3c2aa702a5edb562b325191fcab6"},
{file = "websockets-12.0-pp38-pypy38_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c3181df4583c4d3994d31fb235dc681d2aaad744fbdbf94c4802485ececdecf2"}, {file = "websockets-13.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:9ef8aa8bdbac47f4968a5d66462a2a0935d044bf35c0e5a8af152d58516dbeb5"},
{file = "websockets-12.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:b067cb952ce8bf40115f6c19f478dc71c5e719b7fbaa511359795dfd9d1a6468"}, {file = "websockets-13.1-cp39-cp39-win32.whl", hash = "sha256:deeb929efe52bed518f6eb2ddc00cc496366a14c726005726ad62c2dd9017a3c"},
{file = "websockets-12.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:00700340c6c7ab788f176d118775202aadea7602c5cc6be6ae127761c16d6b0b"}, {file = "websockets-13.1-cp39-cp39-win_amd64.whl", hash = "sha256:7c65ffa900e7cc958cd088b9a9157a8141c991f8c53d11087e6fb7277a03f81d"},
{file = "websockets-12.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e469d01137942849cff40517c97a30a93ae79917752b34029f0ec72df6b46399"}, {file = "websockets-13.1-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:5dd6da9bec02735931fccec99d97c29f47cc61f644264eb995ad6c0c27667238"},
{file = "websockets-12.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffefa1374cd508d633646d51a8e9277763a9b78ae71324183693959cf94635a7"}, {file = "websockets-13.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:2510c09d8e8df777177ee3d40cd35450dc169a81e747455cc4197e63f7e7bfe5"},
{file = "websockets-12.0-pp39-pypy39_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba0cab91b3956dfa9f512147860783a1829a8d905ee218a9837c18f683239611"}, {file = "websockets-13.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f1c3cf67185543730888b20682fb186fc8d0fa6f07ccc3ef4390831ab4b388d9"},
{file = "websockets-12.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:2cb388a5bfb56df4d9a406783b7f9dbefb888c09b71629351cc6b036e9259370"}, {file = "websockets-13.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bcc03c8b72267e97b49149e4863d57c2d77f13fae12066622dc78fe322490fe6"},
{file = "websockets-12.0-py3-none-any.whl", hash = "sha256:dc284bbc8d7c78a6c69e0c7325ab46ee5e40bb4d50e494d8131a07ef47500e9e"}, {file = "websockets-13.1-pp310-pypy310_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:004280a140f220c812e65f36944a9ca92d766b6cc4560be652a0a3883a79ed8a"},
{file = "websockets-12.0.tar.gz", hash = "sha256:81df9cbcbb6c260de1e007e58c011bfebe2dafc8435107b0537f393dd38c8b1b"}, {file = "websockets-13.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:e2620453c075abeb0daa949a292e19f56de518988e079c36478bacf9546ced23"},
{file = "websockets-13.1-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:9156c45750b37337f7b0b00e6248991a047be4aa44554c9886fe6bdd605aab3b"},
{file = "websockets-13.1-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:80c421e07973a89fbdd93e6f2003c17d20b69010458d3a8e37fb47874bd67d51"},
{file = "websockets-13.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:82d0ba76371769d6a4e56f7e83bb8e81846d17a6190971e38b5de108bde9b0d7"},
{file = "websockets-13.1-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e9875a0143f07d74dc5e1ded1c4581f0d9f7ab86c78994e2ed9e95050073c94d"},
{file = "websockets-13.1-pp38-pypy38_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a11e38ad8922c7961447f35c7b17bffa15de4d17c70abd07bfbe12d6faa3e027"},
{file = "websockets-13.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:4059f790b6ae8768471cddb65d3c4fe4792b0ab48e154c9f0a04cefaabcd5978"},
{file = "websockets-13.1-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:25c35bf84bf7c7369d247f0b8cfa157f989862c49104c5cf85cb5436a641d93e"},
{file = "websockets-13.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:83f91d8a9bb404b8c2c41a707ac7f7f75b9442a0a876df295de27251a856ad09"},
{file = "websockets-13.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7a43cfdcddd07f4ca2b1afb459824dd3c6d53a51410636a2c7fc97b9a8cf4842"},
{file = "websockets-13.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:48a2ef1381632a2f0cb4efeff34efa97901c9fbc118e01951ad7cfc10601a9bb"},
{file = "websockets-13.1-pp39-pypy39_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:459bf774c754c35dbb487360b12c5727adab887f1622b8aed5755880a21c4a20"},
{file = "websockets-13.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:95858ca14a9f6fa8413d29e0a585b31b278388aa775b8a81fa24830123874678"},
{file = "websockets-13.1-py3-none-any.whl", hash = "sha256:a9a396a6ad26130cdae92ae10c36af09d9bfe6cafe69670fd3b6da9b07b4044f"},
{file = "websockets-13.1.tar.gz", hash = "sha256:a3b3366087c1bc0a2795111edcadddb8b3b59509d5db5d7ea3fdd69f954a8878"},
] ]
[[package]] [[package]]
@@ -1353,7 +1370,13 @@ secretstorage = ["cffi", "secretstorage"]
static-analysis = ["autopep8 (>=2.0,<3.0)", "ruff (>=0.5.0,<0.6.0)"] static-analysis = ["autopep8 (>=2.0,<3.0)", "ruff (>=0.5.0,<0.6.0)"]
test = ["pytest (>=8.1,<9.0)"] test = ["pytest (>=8.1,<9.0)"]
[extras]
cli = ["click", "inquirerpy", "rich"]
full = ["click", "inquirerpy", "mpv", "plyer", "rich"]
mpv = ["mpv"]
notifications = ["plyer"]
[metadata] [metadata]
lock-version = "2.0" lock-version = "2.0"
python-versions = "^3.10" python-versions = "^3.10"
content-hash = "7d20e2d0c0c3c8f3a48d9160a2b4a11a5f353d23bb5d7a06ec527fe08e425b91" content-hash = "aa6445db170dfcb5ed647d78bf5969696025eb935107c1a5ff2812666216f67c"

View File

@@ -1,6 +1,6 @@
[tool.poetry] [tool.poetry]
name = "fastanime" name = "fastanime"
version = "2.3.1" version = "2.5.4.dev1"
description = "A browser anime site experience from the terminal" description = "A browser anime site experience from the terminal"
authors = ["Benextempest <benextempest@gmail.com>"] authors = ["Benextempest <benextempest@gmail.com>"]
license = "UNLICENSE" license = "UNLICENSE"
@@ -9,14 +9,21 @@ readme = "README.md"
[tool.poetry.dependencies] [tool.poetry.dependencies]
python = "^3.10" python = "^3.10"
yt-dlp = "^2024.5.27" yt-dlp = "^2024.5.27"
rich = "^13.7.1"
click = "^8.1.7"
inquirerpy = "^0.3.4"
thefuzz = "^0.22.1" thefuzz = "^0.22.1"
requests = "^2.32.3" requests = "^2.32.3"
plyer = "^2.1.0" rich = { version = "^13.7.1", optional = true }
click = { version = "^8.1.7", optional = true }
inquirerpy = { version = "^0.3.4", optional = true }
mpv = { version = "^1.0.7", optional = true }
plyer = { version = "^2.1.0", optional = true }
[tool.poetry.extras]
full = ["plyer", "mpv", "rich", "click", "inquirerpy"]
cli = ["rich", "click", "inquirerpy"]
mpv = ["mpv"]
notifications = ["plyer"]
mpv = "^1.0.7"
[tool.poetry.group.dev.dependencies] [tool.poetry.group.dev.dependencies]
black = "^24.4.2" black = "^24.4.2"
isort = "^5.13.2" isort = "^5.13.2"

View File

@@ -1,4 +1,5 @@
{ {
"typeCheckingMode": "standard", "venvPath": ".",
"reportPrivateImportUsage": false "venv": ".venv",
"pythonVersion": "3.10"
} }

View File

@@ -1,4 +1,3 @@
# TODO: Write tests to make sure all click commands work
import pytest import pytest
from click.testing import CliRunner from click.testing import CliRunner

View File

@@ -7,7 +7,7 @@ env_list = lint, pyright, py{310,311}
description = run unit tests description = run unit tests
deps =poetry deps =poetry
commands = commands =
poetry install poetry install --all-extras
poetry run pytest poetry run pytest
[testenv:lint] [testenv:lint]
@@ -15,7 +15,7 @@ description = run linters
skip_install = true skip_install = true
deps =poetry deps =poetry
commands = commands =
poetry install poetry install --all-extras
poetry run black . poetry run black .
[testenv:pyright] [testenv:pyright]
@@ -23,5 +23,5 @@ description = run type checking
skip_install = true skip_install = true
deps =poetry deps =poetry
commands = commands =
poetry install --no-root poetry install --no-root --all-extras
poetry run pyright poetry run pyright