mirror of
https://github.com/Benexl/FastAnime.git
synced 2025-12-06 04:41:06 -08:00
Compare commits
318 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
fafeee2367 | ||
|
|
e03063cd76 | ||
|
|
93b38b055f | ||
|
|
045635fb55 | ||
|
|
de7f773e9e | ||
|
|
ef6a465bd2 | ||
|
|
0c623af8a4 | ||
|
|
0589f83998 | ||
|
|
e17608afd5 | ||
|
|
b915654685 | ||
|
|
2ce9bf6c47 | ||
|
|
3c22232432 | ||
|
|
3474e9520c | ||
|
|
e9bacf4f9c | ||
|
|
ef422ed6fd | ||
|
|
d0f5366908 | ||
|
|
3557205feb | ||
|
|
ba4c41d888 | ||
|
|
1427a3193c | ||
|
|
b5cee20e56 | ||
|
|
be7f464073 | ||
|
|
c7f8f168f5 | ||
|
|
ba59fbdcb0 | ||
|
|
9f54fa4998 | ||
|
|
3c9688b32c | ||
|
|
1f046447bb | ||
|
|
87e3a275bb | ||
|
|
037b5c36a4 | ||
|
|
7d8b60fb14 | ||
|
|
0ad16fee53 | ||
|
|
249243aeb4 | ||
|
|
c208dc3579 | ||
|
|
ea93f2ba23 | ||
|
|
d910a0bb6a | ||
|
|
550fcfeddc | ||
|
|
c6910e5a1c | ||
|
|
8555edb521 | ||
|
|
139193ce29 | ||
|
|
1a87375ccd | ||
|
|
83cbef40f6 | ||
|
|
85b4fc75a1 | ||
|
|
f2e2da378f | ||
|
|
7c34bc9120 | ||
|
|
6f153f2acb | ||
|
|
8171083978 | ||
|
|
db5b9a59b4 | ||
|
|
6fa656ba11 | ||
|
|
de0682c1bb | ||
|
|
a6a32d8de4 | ||
|
|
bb14b269de | ||
|
|
14331d8bc2 | ||
|
|
1729464844 | ||
|
|
5fb9747285 | ||
|
|
394228d391 | ||
|
|
5d3c0cc6ec | ||
|
|
3ef7c5248c | ||
|
|
8bebc401fd | ||
|
|
215b28457b | ||
|
|
dfd2bfc857 | ||
|
|
f991292e94 | ||
|
|
d837457f80 | ||
|
|
343bdba31b | ||
|
|
1c1c2457e8 | ||
|
|
b083bfb074 | ||
|
|
ea1abcb2ae | ||
|
|
001030ba2b | ||
|
|
eda8984781 | ||
|
|
d8dc6f0a34 | ||
|
|
2d711a7a7f | ||
|
|
30ca25626a | ||
|
|
b1f5a558c8 | ||
|
|
8062c8dc83 | ||
|
|
cb7eed46bc | ||
|
|
4626eca89e | ||
|
|
0d549c5915 | ||
|
|
33c518ed4c | ||
|
|
8e155dcc74 | ||
|
|
7743b0423e | ||
|
|
6346ea7343 | ||
|
|
32de01047f | ||
|
|
35c7f81afb | ||
|
|
2dbbb1c4df | ||
|
|
6a6efa9d56 | ||
|
|
e510dc3a11 | ||
|
|
9639fd8c05 | ||
|
|
add35ce682 | ||
|
|
6bcc77ea44 | ||
|
|
1a72f88be3 | ||
|
|
1a9f1120b8 | ||
|
|
c2fc807688 | ||
|
|
2b0ade093c | ||
|
|
a26193706e | ||
|
|
ff3c57ef9b | ||
|
|
3b987bd07a | ||
|
|
e8474c0428 | ||
|
|
c78a759aa1 | ||
|
|
d1aad70c48 | ||
|
|
62b36f3e58 | ||
|
|
c5b905fb0d | ||
|
|
7d3dc671ed | ||
|
|
0ec3c7a5bb | ||
|
|
8e0619863a | ||
|
|
e8a05ec4b8 | ||
|
|
34e8b2abd1 | ||
|
|
161b6eb961 | ||
|
|
dd2090f85d | ||
|
|
8b1595a5da | ||
|
|
77ffa27ed8 | ||
|
|
15f79b65c9 | ||
|
|
33c3af0241 | ||
|
|
9badde62fb | ||
|
|
4e401dca40 | ||
|
|
25422b1b7d | ||
|
|
e8463f13b4 | ||
|
|
556f42e41f | ||
|
|
b99a4f7efc | ||
|
|
f6f45cf322 | ||
|
|
ae6db1847a | ||
|
|
20d04ea07b | ||
|
|
8f3834453c | ||
|
|
7ad8b8a0e3 | ||
|
|
80b41f06da | ||
|
|
e79321ed50 | ||
|
|
f7b5898dfa | ||
|
|
144bf53081 | ||
|
|
16dded9724 | ||
|
|
c47b158bff | ||
|
|
9a36e15d9d | ||
|
|
d6b2bd7761 | ||
|
|
2346552dc4 | ||
|
|
ba275055db | ||
|
|
de4ddf2f3a | ||
|
|
9c94d824d1 | ||
|
|
495f3cfbf6 | ||
|
|
b56c9ae3dd | ||
|
|
5e9ef87526 | ||
|
|
b68d6d6fe9 | ||
|
|
5870cc6640 | ||
|
|
7a43d58d82 | ||
|
|
fc7efebc8d | ||
|
|
528be74194 | ||
|
|
ab782acf2f | ||
|
|
45836d1ebc | ||
|
|
dff059d8eb | ||
|
|
4010cfc9c8 | ||
|
|
6329730820 | ||
|
|
006592ae7d | ||
|
|
831dcf4e88 | ||
|
|
0d2cf7ed66 | ||
|
|
aa6dc2b98e | ||
|
|
2e5cde3365 | ||
|
|
d75a03e594 | ||
|
|
9268c02683 | ||
|
|
89913036c9 | ||
|
|
2244026c67 | ||
|
|
c70564474b | ||
|
|
74514c9fbc | ||
|
|
077e9ab8c4 | ||
|
|
b05f7f1640 | ||
|
|
3382b720e3 | ||
|
|
f72c2d4b17 | ||
|
|
ff027991e0 | ||
|
|
21cdc6b015 | ||
|
|
29a2e3e6d1 | ||
|
|
5b3b9f740b | ||
|
|
5bc0e52179 | ||
|
|
40f1c4fba5 | ||
|
|
454341eaf5 | ||
|
|
abab2540a3 | ||
|
|
b2bc8cbace | ||
|
|
90bbf3c033 | ||
|
|
ac91b1770a | ||
|
|
19d42b7924 | ||
|
|
9ec3136734 | ||
|
|
943fca43cf | ||
|
|
b2e00feb94 | ||
|
|
f726c8d55c | ||
|
|
57db2e0626 | ||
|
|
40f66b5fde | ||
|
|
c87417e5e7 | ||
|
|
a841dd6f66 | ||
|
|
d6e85bad5c | ||
|
|
b590ac1e91 | ||
|
|
9cfa3aeea5 | ||
|
|
18c60691ca | ||
|
|
2e9fadf3b2 | ||
|
|
510b47b187 | ||
|
|
49c4d0eec0 | ||
|
|
8367f7bbed | ||
|
|
0182f674e0 | ||
|
|
2b50fb4c97 | ||
|
|
2602a20aa7 | ||
|
|
13200e2d1f | ||
|
|
22f6e89400 | ||
|
|
8409fa7d43 | ||
|
|
c81da78190 | ||
|
|
e17ea4bb89 | ||
|
|
0087728aa8 | ||
|
|
9e48e02f7a | ||
|
|
1291d55ab0 | ||
|
|
b5c6a1e39e | ||
|
|
d6adb30802 | ||
|
|
1d08a69a85 | ||
|
|
1087ab3408 | ||
|
|
51afd504df | ||
|
|
75efc9d73a | ||
|
|
6b68086cff | ||
|
|
3686cdfdb3 | ||
|
|
83c98936d1 | ||
|
|
0891cb279a | ||
|
|
95ba96f537 | ||
|
|
586790173b | ||
|
|
1d19449ab7 | ||
|
|
e1f73334ef | ||
|
|
4faac017b5 | ||
|
|
bfbd2a57a0 | ||
|
|
9519472f83 | ||
|
|
5c0c119cbc | ||
|
|
87eb257a10 | ||
|
|
4a08076c3b | ||
|
|
0d239e6793 | ||
|
|
0a0d47ae88 | ||
|
|
2ba07d47b3 | ||
|
|
f1b520fe3c | ||
|
|
8cfcc26468 | ||
|
|
cd51edf0b8 | ||
|
|
6eb28cfa3d | ||
|
|
542d39fa6a | ||
|
|
e5e328148f | ||
|
|
cea1a67d64 | ||
|
|
97c6dc7968 | ||
|
|
d97072e298 | ||
|
|
7cd246478e | ||
|
|
8afe1df3a9 | ||
|
|
452c2a3569 | ||
|
|
f738069794 | ||
|
|
d178eb976e | ||
|
|
d58dae6d6b | ||
|
|
136cf841e1 | ||
|
|
748d321f36 | ||
|
|
3e71239981 | ||
|
|
571ab488f8 | ||
|
|
62878311c6 | ||
|
|
432e9374cb | ||
|
|
a4974fbba7 | ||
|
|
b935e80928 | ||
|
|
8999d88d23 | ||
|
|
9608bace07 | ||
|
|
09b88df49a | ||
|
|
ccaacc9948 | ||
|
|
96d88b0f47 | ||
|
|
6939471e48 | ||
|
|
b1a2307c4d | ||
|
|
1ead2fb176 | ||
|
|
51e3ca004e | ||
|
|
3814db460f | ||
|
|
4102685cbc | ||
|
|
c80a8235e1 | ||
|
|
622427b748 | ||
|
|
e36413cdef | ||
|
|
7e9a510706 | ||
|
|
9185a08102 | ||
|
|
4f754a5129 | ||
|
|
cec7aaebdb | ||
|
|
bfd580ec79 | ||
|
|
aabd356c0b | ||
|
|
f929e83a62 | ||
|
|
74cacded6e | ||
|
|
82b6b849cf | ||
|
|
5dbc3a16f7 | ||
|
|
912535d166 | ||
|
|
09ce90f8b0 | ||
|
|
ec40406437 | ||
|
|
a1d1fca538 | ||
|
|
88776ce134 | ||
|
|
35caa93a56 | ||
|
|
4e19c2c108 | ||
|
|
b7ba85fb96 | ||
|
|
eb8b7ea534 | ||
|
|
d2aae33654 | ||
|
|
a2872db216 | ||
|
|
030f77bbf3 | ||
|
|
8f5e01295f | ||
|
|
7b2096e0eb | ||
|
|
45ccaec458 | ||
|
|
b4cbb57f29 | ||
|
|
c4255fc748 | ||
|
|
fa42d0e403 | ||
|
|
0e5cb56970 | ||
|
|
33d69cb95a | ||
|
|
1fbca22be8 | ||
|
|
1a88b6e998 | ||
|
|
3c09268da9 | ||
|
|
c0e5f5dd49 | ||
|
|
1bbc9506c2 | ||
|
|
c66cc52d53 | ||
|
|
e5f4a61a4e | ||
|
|
739d041c58 | ||
|
|
f12d5ab06c | ||
|
|
c3a3041cfb | ||
|
|
594c687c8b | ||
|
|
91b5d3ea40 | ||
|
|
8c30a7667c | ||
|
|
179fbe59ac | ||
|
|
5bfc210f59 | ||
|
|
eb9c200fca | ||
|
|
603efd56e8 | ||
|
|
4d74dfa339 | ||
|
|
4681e38153 | ||
|
|
242003500d | ||
|
|
66ab365657 | ||
|
|
bc8d7b2e28 | ||
|
|
db3a1f7175 | ||
|
|
3a51e0225e | ||
|
|
7b3388939c | ||
|
|
fcf875bdb2 | ||
|
|
0e4624297c | ||
|
|
91b54dfcb9 |
2
.github/workflows/build.yml
vendored
2
.github/workflows/build.yml
vendored
@@ -24,7 +24,7 @@ jobs:
|
||||
path: ./.venv
|
||||
key: venv-${{ hashFiles('poetry.lock') }}
|
||||
- name: Install the project dependencies
|
||||
run: poetry install
|
||||
run: poetry install --all-extras
|
||||
- name: build app
|
||||
run: poetry build
|
||||
- name: Archive production artifacts
|
||||
|
||||
2
.github/workflows/test.yml
vendored
2
.github/workflows/test.yml
vendored
@@ -30,7 +30,7 @@ jobs:
|
||||
path: ./.venv
|
||||
key: venv-${{ hashFiles('poetry.lock') }}
|
||||
- name: Install the project dependencies
|
||||
run: poetry install
|
||||
run: poetry install --all-extras
|
||||
- name: run linter, formatters and sort imports
|
||||
run: |
|
||||
poetry run black .
|
||||
|
||||
547
README.md
547
README.md
@@ -1,19 +1,27 @@
|
||||
# FastAnime
|
||||
# **FastAnime**
|
||||
|
||||
 
|
||||

|
||||

|
||||

|
||||

|
||||
|
||||
Welcome to **FastAnime**, anime site experience from the terminal.
|
||||
|
||||

|
||||
|
||||
**fzf mode**
|
||||
<details>
|
||||
<summary><b>fzf mode</b></summary>
|
||||
|
||||
[fa_fzf_demo.webm](https://github.com/user-attachments/assets/b1fecf25-e358-4e8b-a144-bcb7947210cf)
|
||||
|
||||
|
||||
**other modes:**
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary><b>rofi mode</b></summary>
|
||||
|
||||
[fa_rofi_mode.webm](https://github.com/user-attachments/assets/2ce669bf-b62f-4c44-bd79-cf0dcaddf37a)
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
@@ -23,14 +31,11 @@ Welcome to **FastAnime**, anime site experience from the terminal.
|
||||
|
||||
</details>
|
||||
|
||||
|
||||
Heavily inspired by [animdl](https://github.com/justfoolingaround/animdl), [magic-tape](https://gitlab.com/christosangel/magic-tape/-/tree/main?ref_type=heads) and [ani-cli](https://github.com/pystardust/ani-cli).
|
||||
|
||||
|
||||
Heavily inspired by [animdl](https://github.com/justfoolingaround/animdl), [jerry](https://github.com/justchokingaround/jerry/tree/main),[magic-tape](https://gitlab.com/christosangel/magic-tape/-/tree/main?ref_type=heads) and [ani-cli](https://github.com/pystardust/ani-cli).
|
||||
|
||||
<!--toc:start-->
|
||||
|
||||
- [FastAnime](#fastanime)
|
||||
- [**FastAnime**](#fastanime)
|
||||
- [Installation](#installation)
|
||||
- [Installation using your favourite package manager](#installation-using-your-favourite-package-manager)
|
||||
- [Using pipx](#using-pipx)
|
||||
@@ -40,13 +45,21 @@ Heavily inspired by [animdl](https://github.com/justfoolingaround/animdl), [magi
|
||||
- [External Dependencies](#external-dependencies)
|
||||
- [Usage](#usage)
|
||||
- [The Commandline interface :fire:](#the-commandline-interface-fire)
|
||||
- [The anilist command](#the-anilist-command)
|
||||
- [The anilist command :fire: :fire: :fire:](#the-anilist-command-fire-fire-fire)
|
||||
- [Running without any subcommand](#running-without-any-subcommand)
|
||||
- [Subcommands](#subcommands)
|
||||
- [download subcommand](#download-subcommand)
|
||||
- [search subcommand](#search-subcommand)
|
||||
- [grab subcommand](#grab-subcommand)
|
||||
- [downloads subcommand](#downloads-subcommand)
|
||||
- [config subcommand](#config-subcommand)
|
||||
- [cache subcommand](#cache-subcommand)
|
||||
- [update subcommand](#update-subcommand)
|
||||
- [completions subcommand](#completions-subcommand)
|
||||
- [MPV specific commands](#mpv-specific-commands)
|
||||
- [Key Bindings](#key-bindings)
|
||||
- [Script Messages](#script-messages)
|
||||
- [styling the default interface](#styling-the-default-interface)
|
||||
- [Configuration](#configuration)
|
||||
- [Contributing](#contributing)
|
||||
- [Receiving Support](#receiving-support)
|
||||
@@ -55,14 +68,19 @@ Heavily inspired by [animdl](https://github.com/justfoolingaround/animdl), [magi
|
||||
|
||||
> [!IMPORTANT]
|
||||
>
|
||||
> This project currently scrapes allanime and is in no way related to them. The site is in the public domain and can be access by any one with a browser.
|
||||
|
||||
> This project currently scrapes allanime, hianime and animepahe, nyaa. The site is in the public domain and can be accessed by any one with a browser.
|
||||
|
||||
## Installation
|
||||
|
||||

|
||||

|
||||

|
||||

|
||||

|
||||
|
||||
The app can run wherever python can run. So all you need to have is python installed on your device.
|
||||
On android you can use [termux](https://github.com/termux/termux-app).
|
||||
If you have any difficulty consult for help on the [discord channel](https://discord.gg/HRjySFjQ)
|
||||
If you have any difficulty consult for help on the [discord channel](https://discord.gg/HBEmAwvbHV)
|
||||
|
||||
### Installation using your favourite package manager
|
||||
|
||||
@@ -75,12 +93,21 @@ Preferred method of installation since [Pipx](https://github.com/pypa/pipx) crea
|
||||
```bash
|
||||
|
||||
pipx install fastanime
|
||||
# -- or for the development version --
|
||||
pipx install 'fastanime==<latest-pre-release-tag>.dev1'
|
||||
# example
|
||||
# pipx install 'fastanime==0.60.1.dev1'
|
||||
|
||||
```
|
||||
|
||||
#### Using pip
|
||||
|
||||
```bash
|
||||
pip install fastanime
|
||||
# -- or for the development version --
|
||||
pip install 'fastanime==<latest-pre-release-tag>.dev1'
|
||||
# example
|
||||
# pip install 'fastanime==0.60.1.dev1'
|
||||
```
|
||||
|
||||
### Installing the bleeding edge version
|
||||
@@ -110,7 +137,7 @@ Requirements:
|
||||
|
||||
To build from the source, follow these steps:
|
||||
|
||||
1. Clone the repository: `git clone https://github.com/Benex254/FastAnime.git`
|
||||
1. Clone the repository: `git clone https://github.com/Benex254/FastAnime.git --depth 1`
|
||||
2. Navigate into the folder: `cd FastAnime`
|
||||
3. Then build and Install the app:
|
||||
|
||||
@@ -143,7 +170,7 @@ fastanime --version
|
||||
|
||||
### External Dependencies
|
||||
|
||||
The only required external dependency, unless you won't be streaming, is [MPV](https://mpv.io/installation/), which i recommend installing with [uosc](https://github.com/tomasklaen/uosc) and [thumbfast](https://github.com/po5/thumbfast) for the best experience since they add a better interface to it.
|
||||
The only required external dependency, unless you won't be streaming, is [MPV](https://mpv.io/installation/), which i recommend installing with [uosc](https://github.com/tomasklaen/uosc) :fire: and [thumbfast](https://github.com/po5/thumbfast) for the best experience since they add a better interface to it.
|
||||
|
||||
> [!NOTE]
|
||||
>
|
||||
@@ -151,59 +178,109 @@ The only required external dependency, unless you won't be streaming, is [MPV](h
|
||||
> player because we believe nothing beats **MPV** and it provides
|
||||
> everything you could ever need with a small footprint.
|
||||
> But if you have a reason feel free to encourage as to do so.
|
||||
> However, on android this is not the case so vlc is also supported
|
||||
|
||||
**Other dependencies that will just make your experience better:**
|
||||
**Other external dependencies that will just make your experience better:**
|
||||
|
||||
- [fzf](https://github.com/junegunn/fzf) :fire: which is used as a better alternative to the ui.
|
||||
- [webtorrent-cli](https://github.com/webtorrent/webtorrent-cli) used when the provider is nyaa
|
||||
- [ffmpeg](https://www.ffmpeg.org/) is required to be in your path environment variables to properly download [hls](https://www.cloudflare.com/en-gb/learning/video/what-is-http-live-streaming/) streams.
|
||||
- [fzf](https://github.com/junegunn/fzf) 🔥 which is used as a better alternative to the ui.
|
||||
- [rofi](https://github.com/davatorium/rofi) 🔥 which is used as another alternative ui + the the desktop entry ui
|
||||
- [chafa](https://github.com/hpjansson/chafa) currently the best cross platform and cross terminal image viewer for the terminal.
|
||||
- [icat](https://sw.kovidgoyal.net/kitty/kittens/icat/) an image viewer that only works in [kitty terminal](https://sw.kovidgoyal.net/kitty/), which is currently the best terminal in my opinion, and by far the best image renderer for the terminal thanks to kitty's terminal graphics protocol. Its terminal graphics is so op that you can [run a browser on it](https://github.com/chase/awrit?tab=readme-ov-file)!!
|
||||
- [bash](https://www.gnu.org/software/bash/) is used as the preview script language.
|
||||
- [ani-skip](https://github.com/synacktraa/ani-skip) :fire: used for skipping the opening and ending theme songs
|
||||
- [ani-skip](https://github.com/synacktraa/ani-skip) used for skipping the opening and ending theme songs
|
||||
- [ffmpegthumbnailer](https://github.com/dirkvdb/ffmpegthumbnailer) used for local previews of downloaded anime
|
||||
- [syncplay](https://syncplay.pl/) to enable watch together.
|
||||
- [feh](https://github.com/derf/feh) used in manga mode
|
||||
|
||||
## Usage
|
||||
|
||||
The app offers both a graphical interface (under development) and a robust command-line interface.
|
||||
|
||||
> [!NOTE]
|
||||
>
|
||||
> The GUI is mostly in hiatus; use the CLI for now.
|
||||
> However, you can try it out before i decided to change my objective by checking out this [release](https://github.com/Benex254/FastAnime/tree/v0.20.0).
|
||||
> But be reassured for those who aren't terminal chads, i will still complete the GUI for the fun of it
|
||||
The project offers a featureful command-line interface and MPV interface through the use of python-mpv.
|
||||
The project also offers subs in different languages thanks to hianime provider.
|
||||
|
||||
### The Commandline interface :fire:
|
||||
|
||||
Designed for power users who prefer efficiency over browser-based streaming and still want the experience in their terminal.
|
||||
Designed for efficiency and automation. Plus has a beautiful pseudo-TUI in some of the commands.
|
||||
If you are stuck anywhere just use `--help` before the command you would like to get help on
|
||||
|
||||
Overview of main commands:
|
||||
**Overview of main commands:**
|
||||
|
||||
- `fastanime anilist`: Powerful command for browsing and exploring anime due to AniList integration.
|
||||
- `fastanime download`: Download anime.
|
||||
- `fastanime search`: Powerful command meant for binging since it doesn't require the interfaces
|
||||
- `fastanime downloads`: View downloaded anime and watch with MPV.
|
||||
- `fastanime config`: Quickly edit configuration settings.
|
||||
- `fastanime cache`: Quickly manage the cache fastanime uses
|
||||
- `fastanime update`: Quickly update fastanime
|
||||
- `fastanime grab`: print streams to stdout to use in non python application.
|
||||
|
||||
Configuration is directly passed into this command at run time to override your config.
|
||||
**Overview of options**
|
||||
|
||||
Available options include:
|
||||
Most options are directly passed into fastanime directly and are shared by multiple subcommands.
|
||||
|
||||
- `--server;-s <server>` set the default server to auto select
|
||||
- `--continue;-c/--no-continue;-no-c` whether to continue from the last episode you were watching
|
||||
- `--quality;-q <0|1|2|3>` the link to choose from server
|
||||
- `--translation-type;- <dub|sub` what language for anime
|
||||
- `--auto-select;-a/--no-auto-select;-no-a` auto select title from provider results
|
||||
- `--auto-next;-A;/--no-auto-next;-no-A` auto select next episode
|
||||
- `-downloads-dir;-d <path>` set the folder to download anime into
|
||||
Most of the options override your config file.
|
||||
|
||||
This is a convention to make the dev time faster since it reduces redundancy and also makes switching of subcommands with the same options easier to the end user.
|
||||
|
||||
In general `fastanime --<option-name>`
|
||||
|
||||
Available options for the fastanime include:
|
||||
|
||||
- `--server <server>` or `-s <server>` set the default server to auto select
|
||||
- `--continue/--no-continue` or `-c/-no-c` whether to continue from the last episode you were watching
|
||||
- `--local-history/--remote-history` whether to use remote or local history defaults to local
|
||||
- `--quality <1080/720/480/360>` or `-q <1080/720/480/360>` the link to choose from server
|
||||
- `--translation-type <dub/sub>` or `-t <dub/sub>` what language for anime
|
||||
- `--dub` dubbed anime
|
||||
- `--sub` subbed anime
|
||||
- `--auto-select/--no-auto-select` or `-a/-no-a` auto select title from provider results
|
||||
- `--auto-next/--no-auto-next` or `-A/-no-A` auto select next episode
|
||||
- `-downloads-dir <path>` or `-d <path>` set the folder to download anime into
|
||||
- `--fzf` use fzf for the ui
|
||||
- `--default` use the default ui
|
||||
- `--preview` show a preview when using fzf
|
||||
- `--no-preview` dont show a preview when using fzf
|
||||
- `--format <yt-dlp format string>` set the format of anime downloaded and streamed based on yt-dlp format. Works when `--server gogoanime`
|
||||
- `--format <yt-dlp format string>` or `-f <yt-dlp format string>` set the format of anime downloaded and streamed based on [yt-dlp format](https://github.com/yt-dlp/yt-dlp#format-selection). Works when `--server gogoanime` or on providers that provide multi quality streams eg hianime
|
||||
- `--icons/--no-icons` toggle the visibility of the icons
|
||||
- `--skip/--no-skip` whether to skip the opening and ending theme songs.
|
||||
- `--rofi` use rofi for the ui
|
||||
- `--rofi-theme <path>` theme to use with rofi
|
||||
- `--rofi-theme-input <path>` theme to use with rofi input
|
||||
- `--rofi-theme-confirm <path>` theme to use with rofi confirm
|
||||
- `--log` allow logging to stdout
|
||||
- `--log-file` allow logging to a file
|
||||
- `--rich-traceback` allow rich traceback
|
||||
- `--use-mpv-mod/--use-default-player` whether to use python-mpv
|
||||
- `--provider <allanime/animepahe/hianime/nyaa>` anime site of choice to scrape from
|
||||
- `--sync-play` or `-sp` use syncplay for streaming anime so you can watch with your friends
|
||||
- `--sub-lang <en/or any other common shortform for country>` regex is used to determine the appropriate. Only works when provider is hianime.
|
||||
- `--normalize-titles/--no-normalize-titles` whether to normalize provider titles
|
||||
- `--manga` toggle experimental manga mode
|
||||
|
||||
Example usage of the above options
|
||||
|
||||
```bash
|
||||
# example of syncplay intergration
|
||||
fastanime --sync-play --server sharepoint search -t <anime-title>
|
||||
|
||||
# --- or ---
|
||||
|
||||
# to watch with anilist intergration
|
||||
fastanime --sync-play --server sharepoint anilist
|
||||
|
||||
# downloading dubbed anime
|
||||
fastanime --dub download -t <anime>
|
||||
|
||||
# use icons and fzf for a more elegant ui with preview
|
||||
fastanime --icons --preview --fzf anilist
|
||||
|
||||
# use icons with default ui
|
||||
fastanime --icons --default anilist
|
||||
|
||||
# viewing manga
|
||||
fastanime --manga search -t <manga-title>
|
||||
```
|
||||
|
||||
#### The anilist command :fire: :fire: :fire:
|
||||
|
||||
@@ -216,6 +293,7 @@ Run `fastanime anilist` to access the main interface.
|
||||
##### Subcommands
|
||||
|
||||
The subcommands are mainly their as convenience. Since all the features already exist in the main interface.
|
||||
Most of the subcommands share the common option `--dump-json` or `-d` which will print only the json data and suppress the ui.
|
||||
|
||||
- `fastanime anilist trending`: Top 15 trending anime.
|
||||
- `fastanime anilist recent`: Top 15 recently updated anime.
|
||||
@@ -225,6 +303,49 @@ The subcommands are mainly their as convenience. Since all the features already
|
||||
- `fastanime anilist favourites`: Top 15 favorite anime.
|
||||
- `fastanime anilist random`: get random anime
|
||||
|
||||
**FastAnime Anilist Search subcommand** 🔥 🔥 🔥
|
||||
|
||||
It is by far one of the most powerful commands.
|
||||
It offers the following options:
|
||||
|
||||
- `--sort <MediaSort>` or `-s <MediaSort>`
|
||||
- `--title <anime-title>` or `-t <anime-title>`
|
||||
- `--tags <tag>` or `-T <tag>` can be specified multiple times for different tags to filter by.
|
||||
- `--year <year>` or `-y <year>`
|
||||
- `--status <MediaStatus>` or `-S <MediaStatus>` can be specified multiple times
|
||||
- `--media-format <MediaFormat>` or `-f <MediaFormat>`
|
||||
- `--season <MediaSeason>`
|
||||
- `--genres <genre>` or `-g <genre>` can be specified multiple times.
|
||||
- `--on-list/--not-on-list`
|
||||
|
||||
Example:
|
||||
|
||||
```bash
|
||||
# get anime with the tag of isekai
|
||||
fastanime anilist search -T isekai
|
||||
|
||||
# get anime of 2024 and sort by popularity
|
||||
# that has already finished airing or is releasing
|
||||
# and is not in your anime lists
|
||||
fastanime anilist search -y 2024 -s POPULARITY_DESC --status RELEASING --status FINISHED --not-on-list
|
||||
|
||||
# get anime of 2024 season WINTER
|
||||
fastanime anilist search -y 2024 --season WINTER
|
||||
|
||||
# get anime genre action and tag isekai,magic
|
||||
fastanime anilist search -g Action -T Isekai -T Magic
|
||||
|
||||
# get anime of 2024 thats finished airing
|
||||
fastanime anilist search -y 2024 -S FINISHED
|
||||
|
||||
# get the most favourite anime movies
|
||||
fastanime anilist search -f MOVIE -s FAVOURITES_DESC
|
||||
```
|
||||
|
||||
For more details visit the anilist docs or just get the completions which will improve the experience.
|
||||
|
||||
Like seriously **[get the completions](https://github.com/Benex254/FastAnime#completions-subcommand)** and the experience will be a 💯 💯 better.
|
||||
|
||||
The following are commands you can only run if you are signed in to your AniList account:
|
||||
|
||||
- `fastanime anilist watching`
|
||||
@@ -234,7 +355,7 @@ The following are commands you can only run if you are signed in to your AniList
|
||||
- `fastanime anilist paused`
|
||||
- `fastanime anilist completed`
|
||||
|
||||
Plus: `fastanime anilist notifier` :fire:
|
||||
Plus: `fastanime anilist notifier` 🔥
|
||||
|
||||
```bash
|
||||
# basic form
|
||||
@@ -247,7 +368,7 @@ fastanime --log anilist notifier
|
||||
fastanime --log-file anilist notifier
|
||||
```
|
||||
|
||||
The above commands will start a loop that checks every 2 minutes if any of the anime in your watch list that are aireing has just released a new episode.
|
||||
The above commands will start a loop that checks every 2 minutes if any of the anime in your watch list that are airing has just released a new episode.
|
||||
|
||||
The notification will consist of a cover image of the anime in none windows systems.
|
||||
|
||||
@@ -265,12 +386,15 @@ end
|
||||
> [!NOTE]
|
||||
> To sign in just run `fastanime anilist login` and follow the instructions.
|
||||
> To view your login status `fastanime anilist login --status`
|
||||
> To erase login data `fastanime anilist login --erase`
|
||||
|
||||
#### download subcommand
|
||||
|
||||
Download anime to watch later dub or sub with this one command.
|
||||
Its optimized for scripting due to fuzzy matching.
|
||||
Its optimized for scripting due to fuzzy matching; basically you don't have to manually select search results.
|
||||
|
||||
So every step of the way has been and can be automated.
|
||||
Uses a list slicing syntax similar to that of python as the value for the `-r` option.
|
||||
|
||||
> [!NOTE]
|
||||
>
|
||||
@@ -281,29 +405,132 @@ So every step of the way has been and can be automated.
|
||||
|
||||
```bash
|
||||
# Download all available episodes
|
||||
fastanime download <anime-title>
|
||||
# multiple titles can be specified with -t option
|
||||
fastanime download -t <anime-title> -t <anime-title>
|
||||
# -- or --
|
||||
fastanime download -t <anime-title> -t <anime-title> -r ':'
|
||||
|
||||
# download latest episode for the two anime titles
|
||||
# the number can be any no of latest episodes but a minus sign
|
||||
# must be present
|
||||
fastanime download -t <anime-title> -t <anime-title> -r '-1'
|
||||
|
||||
# latest 5
|
||||
fastanime download -t <anime-title> -t <anime-title> -r '-5'
|
||||
|
||||
# Download specific episode range
|
||||
# be sure to observe the range Syntax
|
||||
fastanime download <anime-title> -r <episodes-start>-<episodes-end>
|
||||
fastanime download -t <anime-title> -r '<episodes-start>:<episodes-end>:<step>'
|
||||
|
||||
fastanime download -t <anime-title> -r '<episodes-start>:<episodes-end>'
|
||||
|
||||
fastanime download -t <anime-title> -r '<episodes-start>:'
|
||||
|
||||
fastanime download -t <anime-title> -r ':<episodes-end>'
|
||||
|
||||
# download specific episode
|
||||
# remember python indexing starts at 0
|
||||
fastanime download -t <anime-title> -r '<episode-1>:<episode>'
|
||||
|
||||
# merge subtitles with ffmpeg to mkv format; hianime tends to give subs as separate files
|
||||
# and dont prompt for anything
|
||||
# eg existing file in destination instead remove
|
||||
# and clean
|
||||
# ie remove original files (sub file and vid file)
|
||||
# only keep merged files
|
||||
fastanime download -t <anime-title> --merge --clean --no-prompt
|
||||
|
||||
# EOF is used since -t always expects a title
|
||||
# you can supply anime titles from file or -t at the same time
|
||||
#
|
||||
# from stdin
|
||||
echo -e "<anime-title>\n<anime-title>\n<anime-title>" | fastanime download -t "EOF" -r <range> -f -
|
||||
|
||||
# from file
|
||||
fastanime download -t "EOF" -r <range> -f <file-path>
|
||||
|
||||
|
||||
```
|
||||
|
||||
#### search subcommand
|
||||
|
||||
Powerful command mainly aimed at binging anime. Since it doesn't require interaction with the interfaces.
|
||||
|
||||
Uses a list slicing syntax similar to that of python as the value of the `-r` option.
|
||||
|
||||
**Syntax:**
|
||||
|
||||
```bash
|
||||
# basic form where you will still be promted for the episode number
|
||||
fastanime search <anime-title>
|
||||
# basic form where you will still be prompted for the episode number
|
||||
# multiple titles can be specified with the -t option
|
||||
fastanime search -t <anime-title> -t <anime-title>
|
||||
|
||||
# binge all episodes with this command
|
||||
fastanime search <anime-title> -
|
||||
fastanime search -t <anime-title> -r ':'
|
||||
|
||||
# watch latest episode
|
||||
fastanime search -t <anime-title> -r '-1'
|
||||
|
||||
# binge a specific episode range with this command
|
||||
# be sure to observe the range Syntax
|
||||
fastanime search <anime-title> <episodes-start>-<episodes-end>
|
||||
fastanime search -t <anime-title> -r '<start>:<stop>'
|
||||
|
||||
fastanime search -t <anime-title> -r '<start>:<stop>:<step>'
|
||||
|
||||
fastanime search -t <anime-title> -r '<start>:'
|
||||
|
||||
fastanime search -t <anime-title> -r ':<end>'
|
||||
```
|
||||
|
||||
#### grab subcommand
|
||||
|
||||
Helper command to print streams to stdout so it can be used by non-python applications.
|
||||
|
||||
The format of the printed out data is json and can be either an array or object depending on how many anime titles have been specified in the command-line or through a subprocess.
|
||||
|
||||
> [!TIP]
|
||||
> For python applications just use its python api, for even greater and easier control.
|
||||
> So just add fastanime as one of your dependencies.
|
||||
|
||||
Uses a list slicing syntax similar to that of python as the value of the `-r` option.
|
||||
|
||||
**Syntax:**
|
||||
|
||||
```bash
|
||||
# --- print anime info + episode streams ---
|
||||
|
||||
# multiple titles can be specified with the -t option
|
||||
fastanime grab -t <anime-title> -t <anime-title>
|
||||
|
||||
# -- or --
|
||||
|
||||
# print all available episodes
|
||||
fastanime grab -t <anime-title> -r ':'
|
||||
|
||||
# print the latest episode
|
||||
fastanime grab -t <anime-title> -r '-1'
|
||||
|
||||
# print a specific episode range
|
||||
# be sure to observe the range Syntax
|
||||
fastanime grab -t <anime-title> -r '<start>:<stop>'
|
||||
|
||||
fastanime grab -t <anime-title> -r '<start>:<stop>:<step>'
|
||||
|
||||
fastanime grab -t <anime-title> -r '<start>:'
|
||||
|
||||
fastanime grab -t <anime-title> -r ':<end>'
|
||||
|
||||
# --- grab options ---
|
||||
|
||||
# print search results only
|
||||
fastanime grab -t <anime-title> -r <range> --search-results-only
|
||||
|
||||
# print anime info only
|
||||
fastanime grab -t <anime-title> -r <range> --anime-info-only
|
||||
|
||||
# print episode streams only
|
||||
fastanime grab -t <anime-title> -r <range> --episode-streams-only
|
||||
|
||||
```
|
||||
|
||||
#### downloads subcommand
|
||||
@@ -315,9 +542,25 @@ View and stream the anime you downloaded using MPV.
|
||||
```bash
|
||||
fastanime downloads
|
||||
|
||||
# view individual episodes
|
||||
fastanime downloads --view-episodes
|
||||
# --- or ---
|
||||
fastanime downloads -v
|
||||
|
||||
# to set seek time when using ffmpegthumbnailer for local previews
|
||||
# -1 means random and is the default
|
||||
fastanime downloads --time-to-seek <intRange(-1,100)>
|
||||
# --- or ---
|
||||
fastanime downloads -t <intRange(-1,100)>
|
||||
|
||||
# to watch a specific title
|
||||
# be sure to get the completions for the best experience
|
||||
fastanime downloads --title <title>
|
||||
|
||||
# to get the path to the downloads folder set
|
||||
fastanime downloads --path
|
||||
# useful when you want to use the value for other programs
|
||||
|
||||
```
|
||||
|
||||
#### config subcommand
|
||||
@@ -331,81 +574,205 @@ fastanime config
|
||||
|
||||
# to get config path which is useful if you want to use it for another program.
|
||||
fastanime config --path
|
||||
|
||||
# add a desktop entry
|
||||
fastanime config --desktop-entry
|
||||
|
||||
# view current contents of your configuration or can be used to get an example config
|
||||
fastanime config --view
|
||||
```
|
||||
|
||||
> [!Note]
|
||||
>
|
||||
> If it opens [vim](https://www.vim.org/download.php) you can exit by typing `:q` in case you don't know.
|
||||
> If it opens [vim](https://www.vim.org/download.php) you can exit by typing `:q` .
|
||||
|
||||
#### cache subcommand
|
||||
|
||||
Easily manage the data fastanime has cached; for the previews.
|
||||
|
||||
**Syntax:**
|
||||
|
||||
```bash
|
||||
# delete everything in the cache dir
|
||||
fastanime cache --clean
|
||||
|
||||
# print the path to the cache dir and exit
|
||||
fastanime cache --path
|
||||
|
||||
# print the current size of the cache dir and exit
|
||||
fastanime cache --size
|
||||
|
||||
# open the cache dir and exit
|
||||
fastanime cache
|
||||
```
|
||||
|
||||
#### update subcommand
|
||||
|
||||
Easily update fastanime to latest
|
||||
|
||||
**Syntax:**
|
||||
|
||||
```bash
|
||||
# update fastanime to latest
|
||||
fastanime update
|
||||
|
||||
# check for latest release
|
||||
fastanime update --check
|
||||
```
|
||||
|
||||
#### completions subcommand
|
||||
|
||||
Helper command to setup shell completions
|
||||
|
||||
**Syntax:**
|
||||
|
||||
```bash
|
||||
# try to detect your shell and print completions
|
||||
fastanime completions
|
||||
# print fish completions
|
||||
fastanime completions --fish
|
||||
# print bash completions
|
||||
fastanime completions --bash
|
||||
# print zsh completions
|
||||
fastanime completions --zsh
|
||||
```
|
||||
|
||||
### MPV specific commands
|
||||
|
||||
The project now allows on the fly media controls directly from mpv. This means you can go to the next or previous episode without the window ever closing thus offering a seamless experience.
|
||||
This is all powered with [python-mpv]() which enables writing mpv scripts with python just like how it would be done in lua.
|
||||
|
||||
#### Key Bindings
|
||||
|
||||
`<shift>+n` fetch the next episode
|
||||
|
||||
`<shift>+p` fetch the previous episode
|
||||
|
||||
`<shift>+t` toggle the translation type from dub to sub
|
||||
|
||||
`<shift>+a` toggle auto next episode
|
||||
|
||||
`<shit>+r` reload episode
|
||||
|
||||
#### Script Messages
|
||||
|
||||
Commands issued in the MPV console.
|
||||
|
||||
Examples:
|
||||
|
||||
```bash
|
||||
# to select episode from mpv without window closing
|
||||
script-message select-episode <episode-number>
|
||||
|
||||
# to select server from mpv without window closing
|
||||
script-message select-server <server-name>
|
||||
|
||||
# to select quality
|
||||
script-message select-quality <1080/720/480/360>
|
||||
```
|
||||
|
||||
## styling the default interface
|
||||
|
||||
The default interface uses inquirerPy which is customizable. Read here to findout more <https://inquirerpy.readthedocs.io/en/latest/pages/env.html>
|
||||
|
||||
## Configuration
|
||||
|
||||
The app includes sensible defaults but can be customized extensively. Configuration is stored in `.ini` format at `~/.config/FastAnime/config.ini` on Linux and mac or somewhere on windows; you can check by running `fastanime config --path`.
|
||||
The app includes sensible defaults but can be customized extensively. Configuration is stored in `.ini` format at `~/.config/FastAnime/config.ini` on arch linux; for the other operating systems you can check by running `fastanime config --path`.
|
||||
|
||||
> [!TIP]
|
||||
> You can now use the option `--update` to update your config file from the command-line
|
||||
> For Example:
|
||||
> `fastanime --icons --fzf --preview config --update`
|
||||
> the above will set icons to true, use_fzf to true and preview to true in your config file
|
||||
|
||||
By default if a config file does not exist it will be auto created with comments to explain each and every option.
|
||||
The default config:
|
||||
|
||||
```ini
|
||||
[stream]
|
||||
continue_from_history = True # Auto continue from watch history
|
||||
translation_type = sub # Preferred language for anime (options: dub, sub)
|
||||
server = top # Default server (options: dropbox, sharepoint, wetransfer.gogoanime, top, wixmp)
|
||||
auto_next = False # Auto-select next episode
|
||||
# Auto select the anime provider results with fuzzy find.
|
||||
# Note this wont always be correct.But 99% of the time will be.
|
||||
auto_select=True
|
||||
# whether to skip the opening and ending theme songs
|
||||
# note requires ani-skip to be in path
|
||||
skip=false
|
||||
# the maximum delta time in minutes after which the episode should be considered as completed
|
||||
# used in the continue from time stamp
|
||||
error=3
|
||||
|
||||
# the format of downloaded anime and trailer
|
||||
# based on yt-dlp format and passed directly to it
|
||||
# learn more by looking it up on their site
|
||||
# only works for downloaded anime if server=gogoanime
|
||||
# since its the only one that offers different formats
|
||||
# the others tend not to
|
||||
format=best[height<=1080]/bestvideo[height<=1080]+bestaudio/best # default
|
||||
|
||||
[general]
|
||||
preferred_language = romaji # Display language (options: english, romaji)
|
||||
downloads_dir = <Default-videos-dir>/FastAnime # Download directory
|
||||
preview=false # whether to show a preview window when using fzf or rofi
|
||||
icons = False
|
||||
|
||||
use_fzf=False # whether to use fzf as the interface for the anilist command and others.
|
||||
quality = 1080
|
||||
|
||||
use_rofi=false # whether to use rofi for the ui
|
||||
rofi_theme=<path-to-rofi-theme-file>
|
||||
rofi_theme_input=<path-to-rofi-theme-file>
|
||||
rofi_theme_confirm=<path-to-rofi-theme-file>
|
||||
normalize_titles = True
|
||||
|
||||
provider = allanime
|
||||
|
||||
preferred_language = english
|
||||
|
||||
downloads_dir = ~/Videos/FastAnime
|
||||
|
||||
preview = False
|
||||
|
||||
ffmpegthumbnailer_seek_time = -1
|
||||
|
||||
use_fzf = False
|
||||
|
||||
use_rofi = False
|
||||
|
||||
rofi_theme =
|
||||
|
||||
rofi_theme_input =
|
||||
|
||||
rofi_theme_confirm =
|
||||
|
||||
notification_duration = 2
|
||||
|
||||
sub_lang = eng
|
||||
|
||||
default_media_list_tracking = None
|
||||
|
||||
force_forward_tracking = True
|
||||
|
||||
cache_requests = True
|
||||
|
||||
use_persistent_provider_store = False
|
||||
|
||||
|
||||
# whether to show the icons
|
||||
icons=false
|
||||
[stream]
|
||||
continue_from_history = True
|
||||
|
||||
# the duration in minutes a notification will stay in the screen
|
||||
# used by notifier command
|
||||
notification_duration=2
|
||||
preferred_history = local
|
||||
|
||||
[anilist]
|
||||
# Not implemented yet
|
||||
translation_type = sub
|
||||
|
||||
server = top
|
||||
|
||||
auto_next = False
|
||||
|
||||
auto_select = True
|
||||
|
||||
skip = False
|
||||
|
||||
episode_complete_at = 80
|
||||
|
||||
use_python_mpv = False
|
||||
|
||||
force_window = immediate
|
||||
|
||||
format = best[height<=1080]/bestvideo[height<=1080]+bestaudio/best
|
||||
|
||||
player = mpv
|
||||
```
|
||||
|
||||
## Contributing
|
||||
|
||||
We welcome your issues and feature requests. However, due to time constraints, we currently do not plan to add another provider.
|
||||
|
||||
If you wish to contribute directly, please first open an issue describing your proposed changes so it can be discussed.
|
||||
If you wish to contribute directly, please first open an issue describing your proposed changes so it can be discussed or if you are in a rush for the feature to be merged just open a pr.
|
||||
|
||||
If you find an anime title that does not correspond with a provider or is just weird just [edit the data file](https://github.com/Benex254/FastAnime/blob/master/fastanime/Utility/data.py) and open a pr or if you don't want to do that open an issue.
|
||||
|
||||
## Receiving Support
|
||||
|
||||
For inquiries, join our [Discord Server](https://discord.gg/4NUTj5Pt).
|
||||
For inquiries, join our [Discord Server](https://discord.gg/HBEmAwvbHV).
|
||||
|
||||
<p align="center">
|
||||
<a href="https://discord.gg/C4rhMA4mmK">
|
||||
<a href="https://discord.gg/HBEmAwvbHV">
|
||||
<img src="https://invidget.switchblade.xyz/C4rhMA4mmK">
|
||||
</a>
|
||||
</p>
|
||||
|
||||
|
||||
## Supporting the Project
|
||||
|
||||
Show your support by starring our GitHub repository or [buying us a coffee](https://ko-fi.com/benex254).
|
||||
|
||||
@@ -1,10 +1,8 @@
|
||||
"""An abstraction over all providers offering added features with a simple and well typed api
|
||||
|
||||
[TODO:description]
|
||||
"""
|
||||
"""An abstraction over all providers offering added features with a simple and well typed api"""
|
||||
|
||||
import importlib
|
||||
import logging
|
||||
import os
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from .libs.anime_provider import anime_sources
|
||||
@@ -12,7 +10,6 @@ from .libs.anime_provider import anime_sources
|
||||
if TYPE_CHECKING:
|
||||
from typing import Iterator
|
||||
|
||||
from .libs.anilist.anilist_data_schema import AnilistBaseMediaDataSchema
|
||||
from .libs.anime_provider.types import Anime, SearchResults, Server
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
@@ -33,25 +30,41 @@ class AnimeProvider:
|
||||
PROVIDERS = list(anime_sources.keys())
|
||||
provider = PROVIDERS[0]
|
||||
|
||||
def __init__(self, provider, dynamic=False, retries=0) -> None:
|
||||
def __init__(
|
||||
self,
|
||||
provider,
|
||||
cache_requests=os.environ.get("FASTANIME_CACHE_REQUESTS", "false"),
|
||||
use_persistent_provider_store=os.environ.get(
|
||||
"FASTANIME_USE_PERSISTENT_PROVIDER_STORE", "false"
|
||||
),
|
||||
dynamic=False,
|
||||
retries=0,
|
||||
) -> None:
|
||||
self.provider = provider
|
||||
self.dynamic = dynamic
|
||||
self.retries = retries
|
||||
self.lazyload_provider()
|
||||
self.cache_requests = cache_requests
|
||||
self.use_persistent_provider_store = use_persistent_provider_store
|
||||
self.lazyload_provider(self.provider)
|
||||
|
||||
def lazyload_provider(self):
|
||||
def lazyload_provider(self, provider):
|
||||
"""updates the current provider being used"""
|
||||
_, anime_provider_cls_name = anime_sources[self.provider].split(".", 1)
|
||||
package = f"fastanime.libs.anime_provider.{self.provider}"
|
||||
try:
|
||||
self.anime_provider.session.kill_connection_to_db()
|
||||
except Exception:
|
||||
pass
|
||||
_, anime_provider_cls_name = anime_sources[provider].split(".", 1)
|
||||
package = f"fastanime.libs.anime_provider.{provider}"
|
||||
provider_api = importlib.import_module(".api", package)
|
||||
anime_provider = getattr(provider_api, anime_provider_cls_name)
|
||||
self.anime_provider = anime_provider()
|
||||
self.anime_provider = anime_provider(
|
||||
self.cache_requests, self.use_persistent_provider_store
|
||||
)
|
||||
|
||||
def search_for_anime(
|
||||
self,
|
||||
user_query,
|
||||
translation_type,
|
||||
anilist_obj: "AnilistBaseMediaDataSchema | None" = None,
|
||||
nsfw=True,
|
||||
unknown=True,
|
||||
) -> "SearchResults | None":
|
||||
@@ -68,19 +81,15 @@ class AnimeProvider:
|
||||
[TODO:return]
|
||||
"""
|
||||
anime_provider = self.anime_provider
|
||||
try:
|
||||
results = anime_provider.search_for_anime(
|
||||
user_query, translation_type, nsfw, unknown
|
||||
)
|
||||
except Exception as e:
|
||||
logging.error(e)
|
||||
results = None
|
||||
results = anime_provider.search_for_anime(
|
||||
user_query, translation_type, nsfw, unknown
|
||||
)
|
||||
|
||||
return results
|
||||
|
||||
def get_anime(
|
||||
self,
|
||||
anime_id: str,
|
||||
anilist_obj: "AnilistBaseMediaDataSchema | None" = None,
|
||||
) -> "Anime | None":
|
||||
"""core abstraction over getting info of an anime from all providers
|
||||
|
||||
@@ -92,19 +101,15 @@ class AnimeProvider:
|
||||
[TODO:return]
|
||||
"""
|
||||
anime_provider = self.anime_provider
|
||||
try:
|
||||
results = anime_provider.get_anime(anime_id)
|
||||
except Exception as e:
|
||||
logging.error(e)
|
||||
results = None
|
||||
results = anime_provider.get_anime(anime_id)
|
||||
|
||||
return results
|
||||
|
||||
def get_episode_streams(
|
||||
self,
|
||||
anime,
|
||||
anime_id,
|
||||
episode: str,
|
||||
translation_type: str,
|
||||
anilist_obj: "AnilistBaseMediaDataSchema|None" = None,
|
||||
) -> "Iterator[Server] | None":
|
||||
"""core abstractions for getting juicy streams from all providers
|
||||
|
||||
@@ -118,11 +123,7 @@ class AnimeProvider:
|
||||
[TODO:return]
|
||||
"""
|
||||
anime_provider = self.anime_provider
|
||||
try:
|
||||
results = anime_provider.get_episode_streams(
|
||||
anime, episode, translation_type
|
||||
)
|
||||
except Exception as e:
|
||||
logging.error(e)
|
||||
results = None
|
||||
return results # pyright:ignore
|
||||
results = anime_provider.get_episode_streams(
|
||||
anime_id, episode, translation_type
|
||||
)
|
||||
return results
|
||||
|
||||
105
fastanime/MangaProvider.py
Normal file
105
fastanime/MangaProvider.py
Normal file
@@ -0,0 +1,105 @@
|
||||
"""An abstraction over all providers offering added features with a simple and well typed api
|
||||
|
||||
[TODO:description]
|
||||
"""
|
||||
|
||||
import importlib
|
||||
import logging
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from .libs.manga_provider import manga_sources
|
||||
|
||||
if TYPE_CHECKING:
|
||||
pass
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class MangaProvider:
|
||||
"""Class that manages all anime sources adding some extra functionality to them.
|
||||
Attributes:
|
||||
PROVIDERS: [TODO:attribute]
|
||||
provider: [TODO:attribute]
|
||||
provider: [TODO:attribute]
|
||||
dynamic: [TODO:attribute]
|
||||
retries: [TODO:attribute]
|
||||
manga_provider: [TODO:attribute]
|
||||
"""
|
||||
|
||||
PROVIDERS = list(manga_sources.keys())
|
||||
provider = PROVIDERS[0]
|
||||
|
||||
def __init__(self, provider="mangadex", dynamic=False, retries=0) -> None:
|
||||
self.provider = provider
|
||||
self.dynamic = dynamic
|
||||
self.retries = retries
|
||||
self.lazyload_provider(self.provider)
|
||||
|
||||
def lazyload_provider(self, provider):
|
||||
"""updates the current provider being used"""
|
||||
_, anime_provider_cls_name = manga_sources[provider].split(".", 1)
|
||||
package = f"fastanime.libs.manga_provider.{provider}"
|
||||
provider_api = importlib.import_module(".api", package)
|
||||
manga_provider = getattr(provider_api, anime_provider_cls_name)
|
||||
self.manga_provider = manga_provider()
|
||||
|
||||
def search_for_manga(
|
||||
self,
|
||||
user_query,
|
||||
nsfw=True,
|
||||
unknown=True,
|
||||
):
|
||||
"""core abstraction over all providers search functionality
|
||||
|
||||
Args:
|
||||
user_query ([TODO:parameter]): [TODO:description]
|
||||
translation_type ([TODO:parameter]): [TODO:description]
|
||||
nsfw ([TODO:parameter]): [TODO:description]
|
||||
manga_provider ([TODO:parameter]): [TODO:description]
|
||||
anilist_obj: [TODO:description]
|
||||
|
||||
Returns:
|
||||
[TODO:return]
|
||||
"""
|
||||
manga_provider = self.manga_provider
|
||||
try:
|
||||
results = manga_provider.search_for_manga(user_query, nsfw, unknown)
|
||||
except Exception as e:
|
||||
logger.error(e)
|
||||
results = None
|
||||
return results
|
||||
|
||||
def get_manga(
|
||||
self,
|
||||
anime_id: str,
|
||||
):
|
||||
"""core abstraction over getting info of an anime from all providers
|
||||
|
||||
Args:
|
||||
anime_id: [TODO:description]
|
||||
anilist_obj: [TODO:description]
|
||||
|
||||
Returns:
|
||||
[TODO:return]
|
||||
"""
|
||||
manga_provider = self.manga_provider
|
||||
try:
|
||||
results = manga_provider.get_manga(anime_id)
|
||||
except Exception as e:
|
||||
logger.error(e)
|
||||
results = None
|
||||
return results
|
||||
|
||||
def get_chapter_thumbnails(
|
||||
self,
|
||||
manga_id: str,
|
||||
chapter: str,
|
||||
):
|
||||
manga_provider = self.manga_provider
|
||||
try:
|
||||
results = manga_provider.get_chapter_thumbnails(manga_id, chapter)
|
||||
except Exception as e:
|
||||
logger.error(e)
|
||||
results = None
|
||||
return results # pyright:ignore
|
||||
@@ -0,0 +1,4 @@
|
||||
"""This package exist as away to expose functions and classes that my be useful to a developer using the fastanime library
|
||||
|
||||
[TODO:description]
|
||||
"""
|
||||
|
||||
@@ -1,13 +1,12 @@
|
||||
from datetime import datetime
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from ..libs.anilist.anilist_data_schema import (
|
||||
AnilistDateObject,
|
||||
AnilistMediaNextAiringEpisode,
|
||||
)
|
||||
if TYPE_CHECKING:
|
||||
from ..libs.anilist.types import AnilistDateObject, AnilistMediaNextAiringEpisode
|
||||
|
||||
|
||||
# TODO: Add formating options for the final date
|
||||
def format_anilist_date_object(anilist_date_object: AnilistDateObject):
|
||||
def format_anilist_date_object(anilist_date_object: "AnilistDateObject"):
|
||||
if anilist_date_object:
|
||||
return f"{anilist_date_object['day']}/{anilist_date_object['month']}/{anilist_date_object['year']}"
|
||||
else:
|
||||
@@ -28,7 +27,7 @@ def format_list_data_with_comma(data: list | None):
|
||||
return "None"
|
||||
|
||||
|
||||
def extract_next_airing_episode(airing_episode: AnilistMediaNextAiringEpisode):
|
||||
def extract_next_airing_episode(airing_episode: "AnilistMediaNextAiringEpisode"):
|
||||
if airing_episode:
|
||||
return f"{airing_episode['episode']} on {format_anilist_timestamp(airing_episode['airingAt'])}"
|
||||
else:
|
||||
|
||||
@@ -3,12 +3,25 @@ Just contains some useful data used across the codebase
|
||||
"""
|
||||
|
||||
# useful incases where the anilist title is too different from the provider title
|
||||
anime_normalizer = {
|
||||
"1P": "one piece",
|
||||
"Magia Record: Mahou Shoujo Madoka☆Magica Gaiden (TV)": "Mahou Shoujo Madoka☆Magica",
|
||||
"Dungeon ni Deai o Motomeru no wa Machigatte Iru Darouka": "Dungeon ni Deai wo Motomeru no wa Machigatteiru Darou ka",
|
||||
'Hazurewaku no "Joutai Ijou Skill" de Saikyou ni Natta Ore ga Subete wo Juurin suru made': "Hazure Waku no [Joutai Ijou Skill] de Saikyou ni Natta Ore ga Subete wo Juurin Suru made",
|
||||
anime_normalizer_raw = {
|
||||
"allanime": {
|
||||
"1P": "one piece",
|
||||
"Magia Record: Mahou Shoujo Madoka☆Magica Gaiden (TV)": "Mahou Shoujo Madoka☆Magica",
|
||||
"Dungeon ni Deai o Motomeru no wa Machigatte Iru Darouka": "Dungeon ni Deai wo Motomeru no wa Machigatteiru Darou ka",
|
||||
'Hazurewaku no "Joutai Ijou Skill" de Saikyou ni Natta Ore ga Subete wo Juurin suru made': "Hazure Waku no [Joutai Ijou Skill] de Saikyou ni Natta Ore ga Subete wo Juurin Suru made",
|
||||
},
|
||||
"hianime": {"My Star": "Oshi no Ko"},
|
||||
"animepahe": {"Azumanga Daiou The Animation": "Azumanga Daioh"},
|
||||
"nyaa": {},
|
||||
}
|
||||
|
||||
|
||||
anilist_sort_normalizer = {"search match": "SEARCH_MATCH"}
|
||||
def get_anime_normalizer():
|
||||
"""Used because there are different providers"""
|
||||
import os
|
||||
|
||||
current_provider = os.environ["FASTANIME_PROVIDER"]
|
||||
return anime_normalizer_raw[current_provider]
|
||||
|
||||
|
||||
anime_normalizer = get_anime_normalizer()
|
||||
|
||||
6
fastanime/Utility/downloader/_yt_dlp.py
Normal file
6
fastanime/Utility/downloader/_yt_dlp.py
Normal file
@@ -0,0 +1,6 @@
|
||||
from yt_dlp import YoutubeDL
|
||||
|
||||
|
||||
# TODO: create a class that makes yt-dlp's YoutubeDL fit in more with fastanime
|
||||
class YtDlp(YoutubeDL):
|
||||
pass
|
||||
@@ -1,10 +1,15 @@
|
||||
import logging
|
||||
import os
|
||||
import shutil
|
||||
import subprocess
|
||||
import tempfile
|
||||
from queue import Queue
|
||||
from threading import Thread
|
||||
|
||||
import yt_dlp
|
||||
|
||||
from ..utils import sanitize_filename
|
||||
from rich import print
|
||||
from rich.prompt import Confirm
|
||||
from yt_dlp.utils import sanitize_filename
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -26,23 +31,158 @@ class YtDLPDownloader:
|
||||
self._thread.daemon = True
|
||||
self._thread.start()
|
||||
|
||||
# Function to download the file
|
||||
# TODO: untpack the title to its actual values episode_title and anime_title
|
||||
def _download_file(self, url: str, download_dir, title, silent, vid_format="best"):
|
||||
anime_title = sanitize_filename(title[0])
|
||||
episode_title = sanitize_filename(title[1])
|
||||
def _download_file(
|
||||
self,
|
||||
url: str,
|
||||
anime_title: str,
|
||||
episode_title: str,
|
||||
download_dir: str,
|
||||
silent: bool,
|
||||
vid_format: str = "best",
|
||||
force_unknown_ext=False,
|
||||
verbose=False,
|
||||
headers={},
|
||||
sub="",
|
||||
merge=False,
|
||||
clean=False,
|
||||
prompt=True,
|
||||
):
|
||||
"""Helper function that downloads anime given url and path details
|
||||
|
||||
Args:
|
||||
url: [TODO:description]
|
||||
anime_title: [TODO:description]
|
||||
episode_title: [TODO:description]
|
||||
download_dir: [TODO:description]
|
||||
silent: [TODO:description]
|
||||
vid_format: [TODO:description]
|
||||
"""
|
||||
anime_title = sanitize_filename(anime_title)
|
||||
episode_title = sanitize_filename(episode_title)
|
||||
if url.endswith(".torrent"):
|
||||
WEBTORRENT_CLI = shutil.which("webtorrent")
|
||||
if not WEBTORRENT_CLI:
|
||||
import time
|
||||
|
||||
print(
|
||||
"webtorrent cli is not installed which is required for downloading and streaming from nyaa\nplease install it or use another provider"
|
||||
)
|
||||
time.sleep(120)
|
||||
return
|
||||
cmd = [
|
||||
WEBTORRENT_CLI,
|
||||
"download",
|
||||
url,
|
||||
"--out",
|
||||
os.path.join(download_dir, anime_title, episode_title),
|
||||
]
|
||||
subprocess.run(cmd)
|
||||
return
|
||||
ydl_opts = {
|
||||
# Specify the output path and template
|
||||
"http_headers": headers,
|
||||
"outtmpl": f"{download_dir}/{anime_title}/{episode_title}.%(ext)s",
|
||||
"silent": silent,
|
||||
"verbose": False,
|
||||
"verbose": verbose,
|
||||
"format": vid_format,
|
||||
"compat_opts": ("allow-unsafe-ext",) if force_unknown_ext else tuple(),
|
||||
}
|
||||
urls = [url]
|
||||
if sub:
|
||||
urls.append(sub)
|
||||
vid_path = ""
|
||||
sub_path = ""
|
||||
for i, url in enumerate(urls):
|
||||
with yt_dlp.YoutubeDL(ydl_opts) as ydl:
|
||||
info = ydl.extract_info(url, download=True)
|
||||
if not info:
|
||||
continue
|
||||
if i == 0:
|
||||
vid_path: str = info["requested_downloads"][0]["filepath"]
|
||||
if vid_path.endswith(".unknown_video"):
|
||||
print("Normalizing path...")
|
||||
_vid_path = vid_path.replace(".unknown_video", ".mp4")
|
||||
shutil.move(vid_path, _vid_path)
|
||||
vid_path = _vid_path
|
||||
print("successfully normalized path")
|
||||
|
||||
with yt_dlp.YoutubeDL(ydl_opts) as ydl:
|
||||
ydl.download([url])
|
||||
else:
|
||||
sub_path = info["requested_downloads"][0]["filepath"]
|
||||
if sub_path and vid_path and merge:
|
||||
self.merge_subtitles(vid_path, sub_path, clean, prompt)
|
||||
|
||||
def merge_subtitles(self, video_path, sub_path, clean, prompt):
|
||||
# Extract the directory and filename
|
||||
video_dir = os.path.dirname(video_path)
|
||||
video_name = os.path.basename(video_path)
|
||||
video_name, _ = os.path.splitext(video_name)
|
||||
video_name += ".mkv"
|
||||
|
||||
FFMPEG_EXECUTABLE = shutil.which("ffmpeg")
|
||||
if not FFMPEG_EXECUTABLE:
|
||||
print("[yellow bold]WARNING: [/]FFmpeg not found")
|
||||
return
|
||||
# Create a temporary directory
|
||||
with tempfile.TemporaryDirectory() as temp_dir:
|
||||
# Temporary output path in the temporary directory
|
||||
temp_output_path = os.path.join(temp_dir, video_name)
|
||||
# FFmpeg command to merge subtitles
|
||||
command = [
|
||||
FFMPEG_EXECUTABLE,
|
||||
"-hide_banner",
|
||||
"-i",
|
||||
video_path,
|
||||
"-i",
|
||||
sub_path,
|
||||
"-c",
|
||||
"copy",
|
||||
"-map",
|
||||
"0",
|
||||
"-map",
|
||||
"1",
|
||||
temp_output_path,
|
||||
]
|
||||
|
||||
# Run the command
|
||||
try:
|
||||
subprocess.run(command, check=True)
|
||||
|
||||
# Move the file back to the original directory with the original name
|
||||
final_output_path = os.path.join(video_dir, video_name)
|
||||
|
||||
if os.path.exists(final_output_path):
|
||||
if not prompt or Confirm.ask(
|
||||
f"File exists({final_output_path}) would you like to overwrite it",
|
||||
default=True,
|
||||
):
|
||||
# move file to dest
|
||||
os.remove(final_output_path)
|
||||
shutil.move(temp_output_path, final_output_path)
|
||||
else:
|
||||
shutil.move(temp_output_path, final_output_path)
|
||||
# clean up
|
||||
if clean:
|
||||
print("[cyan]Cleaning original files...[/]")
|
||||
os.remove(video_path)
|
||||
os.remove(sub_path)
|
||||
|
||||
print(
|
||||
f"[green bold]Subtitles merged successfully.[/] Output file: {final_output_path}"
|
||||
)
|
||||
except subprocess.CalledProcessError as e:
|
||||
print(f"[red bold]Error[/] during merging subtitles: {e}")
|
||||
except Exception as e:
|
||||
print(f"[red bold]An error[/] occurred: {e}")
|
||||
|
||||
# WARN: May remove this legacy functionality
|
||||
def download_file(self, url: str, title, silent=True):
|
||||
"""A helper that just does things in the background
|
||||
|
||||
Args:
|
||||
title ([TODO:parameter]): [TODO:description]
|
||||
silent ([TODO:parameter]): [TODO:description]
|
||||
url: [TODO:description]
|
||||
"""
|
||||
self.downloads_queue.put((self._download_file, (url, title, silent)))
|
||||
|
||||
|
||||
|
||||
@@ -1,40 +0,0 @@
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
|
||||
from ..constants import USER_DATA_PATH
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
# TODO: merger this functionality with the config object
|
||||
class UserData:
|
||||
user_data = {"watch_history": {}, "animelist": [], "user": {}}
|
||||
|
||||
def __init__(self):
|
||||
try:
|
||||
if os.path.isfile(USER_DATA_PATH):
|
||||
with open(USER_DATA_PATH, "r") as f:
|
||||
user_data = json.load(f)
|
||||
self.user_data.update(user_data)
|
||||
except Exception as e:
|
||||
logger.error(e)
|
||||
|
||||
def update_watch_history(self, watch_history: dict):
|
||||
self.user_data["watch_history"] = watch_history
|
||||
self._update_user_data()
|
||||
|
||||
def update_user_info(self, user: dict):
|
||||
self.user_data["user"] = user
|
||||
self._update_user_data()
|
||||
|
||||
def update_animelist(self, anime_list: list):
|
||||
self.user_data["animelist"] = list(set(anime_list))
|
||||
self._update_user_data()
|
||||
|
||||
def _update_user_data(self):
|
||||
with open(USER_DATA_PATH, "w") as f:
|
||||
json.dump(self.user_data, f)
|
||||
|
||||
|
||||
user_data_helper = UserData()
|
||||
@@ -1,79 +1,25 @@
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
from functools import lru_cache
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from thefuzz import fuzz
|
||||
|
||||
from fastanime.libs.anilist.anilist_data_schema import AnilistBaseMediaDataSchema
|
||||
|
||||
from .data import anime_normalizer
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ..libs.anilist.types import AnilistBaseMediaDataSchema
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@lru_cache()
|
||||
def remove_html_tags(text: str):
|
||||
clean = re.compile("<.*?>")
|
||||
return re.sub(clean, "", text)
|
||||
def sort_by_episode_number(filename: str):
|
||||
import re
|
||||
|
||||
|
||||
@lru_cache()
|
||||
def sanitize_filename(filename: str):
|
||||
"""
|
||||
Sanitize a string to be safe for use as a file name.
|
||||
|
||||
:param filename: The original filename string.
|
||||
:return: A sanitized filename string.
|
||||
"""
|
||||
# List of characters not allowed in filenames on various operating systems
|
||||
invalid_chars = r'[<>:"/\\|?*\0]'
|
||||
reserved_names = {
|
||||
"CON",
|
||||
"PRN",
|
||||
"AUX",
|
||||
"NUL",
|
||||
"COM1",
|
||||
"COM2",
|
||||
"COM3",
|
||||
"COM4",
|
||||
"COM5",
|
||||
"COM6",
|
||||
"COM7",
|
||||
"COM8",
|
||||
"COM9",
|
||||
"LPT1",
|
||||
"LPT2",
|
||||
"LPT3",
|
||||
"LPT4",
|
||||
"LPT5",
|
||||
"LPT6",
|
||||
"LPT7",
|
||||
"LPT8",
|
||||
"LPT9",
|
||||
}
|
||||
|
||||
# Replace invalid characters with an underscore
|
||||
sanitized = re.sub(invalid_chars, " ", filename)
|
||||
|
||||
# Remove leading and trailing whitespace
|
||||
sanitized = sanitized.strip()
|
||||
|
||||
# Check for reserved filenames
|
||||
name, ext = os.path.splitext(sanitized)
|
||||
if name.upper() in reserved_names:
|
||||
name += "_file"
|
||||
sanitized = name + ext
|
||||
|
||||
# Ensure the filename is not empty
|
||||
if not sanitized:
|
||||
sanitized = "default_filename"
|
||||
|
||||
return sanitized
|
||||
match = re.search(r"\d+", filename)
|
||||
return int(match.group()) if match else 0
|
||||
|
||||
|
||||
def anime_title_percentage_match(
|
||||
possible_user_requested_anime_title: str, anime: AnilistBaseMediaDataSchema
|
||||
possible_user_requested_anime_title: str, anime: "AnilistBaseMediaDataSchema"
|
||||
) -> float:
|
||||
"""Returns the percentage match between the possible title and user title
|
||||
|
||||
@@ -84,10 +30,9 @@ def anime_title_percentage_match(
|
||||
Returns:
|
||||
int: the percentage match
|
||||
"""
|
||||
if normalized_anime_title := anime_normalizer.get(
|
||||
possible_user_requested_anime_title
|
||||
):
|
||||
possible_user_requested_anime_title = normalized_anime_title
|
||||
possible_user_requested_anime_title = anime_normalizer.get(
|
||||
possible_user_requested_anime_title, possible_user_requested_anime_title
|
||||
)
|
||||
# compares both the romaji and english names and gets highest Score
|
||||
title_a = str(anime["title"]["romaji"])
|
||||
title_b = str(anime["title"]["english"])
|
||||
@@ -97,10 +42,3 @@ def anime_title_percentage_match(
|
||||
)
|
||||
logger.info(f"{locals()}")
|
||||
return percentage_ratio
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
# Example usage
|
||||
unsafe_filename = "CON:example?file*name.txt"
|
||||
safe_filename = sanitize_filename(unsafe_filename)
|
||||
print(safe_filename) # Output: 'CON_example_file_name.txt'
|
||||
|
||||
@@ -6,7 +6,7 @@ if sys.version_info < (3, 10):
|
||||
) # noqa: F541
|
||||
|
||||
|
||||
__version__ = "v0.60.1"
|
||||
__version__ = "v2.6.0"
|
||||
|
||||
APP_NAME = "FastAnime"
|
||||
AUTHOR = "Benex254"
|
||||
|
||||
@@ -3,9 +3,7 @@ import signal
|
||||
import click
|
||||
|
||||
from .. import __version__
|
||||
from ..libs.anime_provider import anime_sources
|
||||
from ..libs.anime_provider.allanime.constants import SERVERS_AVAILABLE
|
||||
from ..Utility.data import anilist_sort_normalizer
|
||||
from ..libs.anime_provider import SERVERS_AVAILABLE, anime_sources
|
||||
from .commands import LazyGroup
|
||||
|
||||
commands = {
|
||||
@@ -15,6 +13,9 @@ commands = {
|
||||
"config": "config.config",
|
||||
"downloads": "downloads.downloads",
|
||||
"cache": "cache.cache",
|
||||
"completions": "completions.completions",
|
||||
"update": "update.update",
|
||||
"grab": "grab.grab",
|
||||
}
|
||||
|
||||
|
||||
@@ -37,12 +38,35 @@ signal.signal(signal.SIGINT, handle_exit)
|
||||
cls=LazyGroup,
|
||||
help="A command line application for streaming anime that provides a complete and featureful interface",
|
||||
short_help="Stream Anime",
|
||||
epilog="""
|
||||
\b
|
||||
\b\bExamples:
|
||||
# example of syncplay intergration
|
||||
fastanime --sync-play --server sharepoint search -t <anime-title>
|
||||
\b
|
||||
# --- or ---
|
||||
\b
|
||||
# to watch with anilist intergration
|
||||
fastanime --sync-play --server sharepoint anilist
|
||||
\b
|
||||
# downloading dubbed anime
|
||||
fastanime --dub download -t <anime>
|
||||
\b
|
||||
# use icons and fzf for a more elegant ui with preview
|
||||
fastanime --icons --preview --fzf anilist
|
||||
\b
|
||||
# use icons with default ui
|
||||
fastanime --icons --default anilist
|
||||
\b
|
||||
# viewing manga
|
||||
fastanime --manga search -t <manga-title>
|
||||
""",
|
||||
)
|
||||
@click.version_option(__version__, "--version")
|
||||
@click.option("--manga", "-m", help="Enable manga mode", is_flag=True)
|
||||
@click.option("--log", help="Allow logging to stdout", is_flag=True)
|
||||
@click.option("--log-file", help="Allow logging to a file", is_flag=True)
|
||||
@click.option("--rich-traceback", help="Use rich to output tracebacks", is_flag=True)
|
||||
@click.option("--update", help="Update fastanime to the latest version", is_flag=True)
|
||||
@click.option(
|
||||
"-p",
|
||||
"--provider",
|
||||
@@ -52,7 +76,7 @@ signal.signal(signal.SIGINT, handle_exit)
|
||||
@click.option(
|
||||
"-s",
|
||||
"--server",
|
||||
type=click.Choice([*SERVERS_AVAILABLE, "top"], case_sensitive=False),
|
||||
type=click.Choice([*SERVERS_AVAILABLE, "top"]),
|
||||
help="Server of choice",
|
||||
)
|
||||
@click.option(
|
||||
@@ -68,6 +92,11 @@ signal.signal(signal.SIGINT, handle_exit)
|
||||
type=bool,
|
||||
help="Continue from last episode?",
|
||||
)
|
||||
@click.option(
|
||||
"--local-history/--remote-history",
|
||||
type=bool,
|
||||
help="Whether to continue from local history or remote history",
|
||||
)
|
||||
@click.option(
|
||||
"--skip/--no-skip",
|
||||
type=bool,
|
||||
@@ -76,7 +105,14 @@ signal.signal(signal.SIGINT, handle_exit)
|
||||
@click.option(
|
||||
"-q",
|
||||
"--quality",
|
||||
type=click.IntRange(0, 3),
|
||||
type=click.Choice(
|
||||
[
|
||||
"360",
|
||||
"480",
|
||||
"720",
|
||||
"1080",
|
||||
]
|
||||
),
|
||||
help="set the quality of the stream",
|
||||
)
|
||||
@click.option(
|
||||
@@ -85,6 +121,11 @@ signal.signal(signal.SIGINT, handle_exit)
|
||||
type=click.Choice(["dub", "sub"]),
|
||||
help="Anime language[dub/sub]",
|
||||
)
|
||||
@click.option(
|
||||
"-sl",
|
||||
"--sub-lang",
|
||||
help="Set the preferred language for subs",
|
||||
)
|
||||
@click.option(
|
||||
"-A/-no-A",
|
||||
"--auto-next/--no-auto-next",
|
||||
@@ -98,9 +139,9 @@ signal.signal(signal.SIGINT, handle_exit)
|
||||
help="Auto select anime title?",
|
||||
)
|
||||
@click.option(
|
||||
"-S",
|
||||
"--sort-by",
|
||||
type=click.Choice(anilist_sort_normalizer.keys()), # pyright: ignore
|
||||
"--normalize-titles/--no-normalize-titles",
|
||||
type=bool,
|
||||
help="whether to normalize anime and episode titls given by providers",
|
||||
)
|
||||
@click.option("-d", "--downloads-dir", type=click.Path(), help="Downloads location")
|
||||
@click.option("--fzf", is_flag=True, help="Use fzf for the ui")
|
||||
@@ -126,23 +167,35 @@ signal.signal(signal.SIGINT, handle_exit)
|
||||
help="Rofi theme to use for the user input prompt",
|
||||
type=click.Path(),
|
||||
)
|
||||
@click.option(
|
||||
"--use-python-mpv/--use-default-player", help="Whether to use python-mpv", type=bool
|
||||
)
|
||||
@click.option("--sync-play", "-sp", help="Use sync play", is_flag=True)
|
||||
@click.option(
|
||||
"--player",
|
||||
"-P",
|
||||
help="the player to use when streaming",
|
||||
type=click.Choice(["mpv", "vlc"]),
|
||||
)
|
||||
@click.pass_context
|
||||
def run_cli(
|
||||
ctx: click.Context,
|
||||
manga,
|
||||
log,
|
||||
log_file,
|
||||
rich_traceback,
|
||||
update,
|
||||
provider,
|
||||
server,
|
||||
format,
|
||||
continue_,
|
||||
local_history,
|
||||
skip,
|
||||
translation_type,
|
||||
sub_lang,
|
||||
quality,
|
||||
auto_next,
|
||||
auto_select,
|
||||
sort_by,
|
||||
normalize_titles,
|
||||
downloads_dir,
|
||||
fzf,
|
||||
default,
|
||||
@@ -155,10 +208,14 @@ def run_cli(
|
||||
rofi_theme,
|
||||
rofi_theme_confirm,
|
||||
rofi_theme_input,
|
||||
use_python_mpv,
|
||||
sync_play,
|
||||
player,
|
||||
):
|
||||
from .config import Config
|
||||
|
||||
ctx.obj = Config()
|
||||
ctx.obj.manga = manga
|
||||
if log:
|
||||
import logging
|
||||
|
||||
@@ -167,44 +224,53 @@ def run_cli(
|
||||
FORMAT = "%(message)s"
|
||||
|
||||
logging.basicConfig(
|
||||
level="NOTSET", format=FORMAT, datefmt="[%X]", handlers=[RichHandler()]
|
||||
level=logging.DEBUG, format=FORMAT, datefmt="[%X]", handlers=[RichHandler()]
|
||||
)
|
||||
logger = logging.getLogger(__name__)
|
||||
logger.info("logging has been initialized")
|
||||
elif log_file:
|
||||
import logging
|
||||
|
||||
from ..constants import NOTIFIER_LOG_FILE_PATH
|
||||
from ..constants import LOG_FILE_PATH
|
||||
|
||||
format = "%(asctime)s%(levelname)s: %(message)s"
|
||||
logging.basicConfig(
|
||||
level=logging.DEBUG,
|
||||
filename=NOTIFIER_LOG_FILE_PATH,
|
||||
filename=LOG_FILE_PATH,
|
||||
format=format,
|
||||
datefmt="[%d/%m/%Y@%H:%M:%S]",
|
||||
filemode="w",
|
||||
)
|
||||
else:
|
||||
import logging
|
||||
|
||||
logging.basicConfig(level=logging.CRITICAL)
|
||||
if rich_traceback:
|
||||
from rich.traceback import install
|
||||
|
||||
install()
|
||||
if update and None:
|
||||
from .app_updater import update_app
|
||||
|
||||
update_app()
|
||||
return
|
||||
|
||||
if sync_play:
|
||||
ctx.obj.sync_play = sync_play
|
||||
if provider:
|
||||
ctx.obj.provider = provider
|
||||
ctx.obj.load_config()
|
||||
if server:
|
||||
ctx.obj.server = server
|
||||
if format:
|
||||
ctx.obj.format = format
|
||||
if sub_lang:
|
||||
ctx.obj.sub_lang = sub_lang
|
||||
if ctx.get_parameter_source("continue_") == click.core.ParameterSource.COMMANDLINE:
|
||||
ctx.obj.continue_from_history = continue_
|
||||
if ctx.get_parameter_source("player") == click.core.ParameterSource.COMMANDLINE:
|
||||
ctx.obj.player = player
|
||||
if ctx.get_parameter_source("skip") == click.core.ParameterSource.COMMANDLINE:
|
||||
ctx.obj.skip = skip
|
||||
if (
|
||||
ctx.get_parameter_source("normalize_titles")
|
||||
== click.core.ParameterSource.COMMANDLINE
|
||||
):
|
||||
ctx.obj.normalize_titles = normalize_titles
|
||||
|
||||
if quality:
|
||||
ctx.obj.quality = quality
|
||||
@@ -212,21 +278,30 @@ def run_cli(
|
||||
ctx.obj.auto_next = auto_next
|
||||
if ctx.get_parameter_source("icons") == click.core.ParameterSource.COMMANDLINE:
|
||||
ctx.obj.icons = icons
|
||||
if (
|
||||
ctx.get_parameter_source("local_history")
|
||||
== click.core.ParameterSource.COMMANDLINE
|
||||
):
|
||||
ctx.obj.preferred_history = "local" if local_history else "remote"
|
||||
if (
|
||||
ctx.get_parameter_source("auto_select")
|
||||
== click.core.ParameterSource.COMMANDLINE
|
||||
):
|
||||
ctx.obj.auto_select = auto_select
|
||||
if sort_by:
|
||||
ctx.obj.sort_by = sort_by
|
||||
if (
|
||||
ctx.get_parameter_source("use_python_mpv")
|
||||
== click.core.ParameterSource.COMMANDLINE
|
||||
):
|
||||
ctx.obj.use_python_mpv = use_python_mpv
|
||||
if downloads_dir:
|
||||
ctx.obj.downloads_dir = downloads_dir
|
||||
if translation_type:
|
||||
ctx.obj.translation_type = translation_type
|
||||
if fzf:
|
||||
ctx.obj.use_fzf = True
|
||||
if default:
|
||||
ctx.obj.use_fzf = False
|
||||
ctx.obj.use_rofi = False
|
||||
if fzf:
|
||||
ctx.obj.use_fzf = True
|
||||
if preview:
|
||||
ctx.obj.preview = True
|
||||
if no_preview:
|
||||
@@ -252,3 +327,4 @@ def run_cli(
|
||||
if rofi_theme_confirm:
|
||||
ctx.obj.rofi_theme_confirm = rofi_theme_confirm
|
||||
Rofi.rofi_theme_confirm = rofi_theme_confirm
|
||||
ctx.obj.set_fastanime_config_environs()
|
||||
|
||||
@@ -2,8 +2,8 @@ import pathlib
|
||||
import re
|
||||
import shlex
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
from subprocess import PIPE, Popen
|
||||
|
||||
import requests
|
||||
from rich import print
|
||||
@@ -26,7 +26,24 @@ def check_for_updates():
|
||||
|
||||
if request.status_code == 200:
|
||||
release_json = request.json()
|
||||
return (release_json["tag_name"] == __version__, release_json)
|
||||
remote_tag = list(
|
||||
map(int, release_json["tag_name"].replace("v", "").split("."))
|
||||
)
|
||||
local_tag = list(map(int, __version__.replace("v", "").split(".")))
|
||||
if (
|
||||
(remote_tag[0] > local_tag[0])
|
||||
or (remote_tag[1] > local_tag[1] and remote_tag[0] == local_tag[0])
|
||||
or (
|
||||
remote_tag[2] > local_tag[2]
|
||||
and remote_tag[0] == local_tag[0]
|
||||
and remote_tag[1] == local_tag[1]
|
||||
)
|
||||
):
|
||||
is_latest = False
|
||||
else:
|
||||
is_latest = True
|
||||
|
||||
return (is_latest, release_json)
|
||||
else:
|
||||
print(request.text)
|
||||
return (False, {})
|
||||
@@ -34,71 +51,73 @@ def check_for_updates():
|
||||
|
||||
def is_git_repo(author, repository):
|
||||
# Check if the current directory contains a .git folder
|
||||
if not pathlib.Path("./.git").exists():
|
||||
git_dir = pathlib.Path(".git")
|
||||
if not git_dir.exists() or not git_dir.is_dir():
|
||||
return False
|
||||
|
||||
repository_qualname = f"{author}/{repository}"
|
||||
|
||||
# Read the .git/config file to find the remote repository URL
|
||||
config_path = pathlib.Path("./.git/config")
|
||||
# Check if the config file exists
|
||||
config_path = git_dir / "config"
|
||||
if not config_path.exists():
|
||||
return False
|
||||
print("here")
|
||||
|
||||
with open(config_path, "r") as git_config:
|
||||
git_config_content = git_config.read()
|
||||
|
||||
# Use regex to find the repository URL in the config file
|
||||
repo_name_pattern = r"\[remote \"origin\"\]\s+url = .*\/([^/]+\/[^/]+)\.git"
|
||||
match = re.search(repo_name_pattern, git_config_content)
|
||||
print(match)
|
||||
|
||||
if match is None:
|
||||
try:
|
||||
# Read the .git/config file to find the remote repository URL
|
||||
with config_path.open("r") as git_config:
|
||||
git_config_content = git_config.read()
|
||||
except (FileNotFoundError, PermissionError):
|
||||
return False
|
||||
|
||||
# Extract the repository name and compare with the expected repository_qualname
|
||||
config_repo_name = match.group(1)
|
||||
return config_repo_name == repository_qualname
|
||||
# Use regex to find the repository URL in the config file
|
||||
repo_name_pattern = r"url\s*=\s*.+/([^/]+/[^/]+)\.git"
|
||||
match = re.search(repo_name_pattern, git_config_content)
|
||||
|
||||
# Return True if match found and repository name matches
|
||||
return bool(match) and match.group(1) == f"{author}/{repository}"
|
||||
|
||||
|
||||
def update_app():
|
||||
is_latest, release_json = check_for_updates()
|
||||
if is_latest:
|
||||
print("[green]App is up to date[/]")
|
||||
return
|
||||
return False, release_json
|
||||
tag_name = release_json["tag_name"]
|
||||
|
||||
print("[cyan]Updating app to version %s[/]" % tag_name)
|
||||
if is_git_repo(AUTHOR, APP_NAME):
|
||||
executable = shutil.which("git")
|
||||
GIT_EXECUTABLE = shutil.which("git")
|
||||
args = [
|
||||
executable,
|
||||
GIT_EXECUTABLE,
|
||||
"pull",
|
||||
]
|
||||
|
||||
print(f"Pulling latest changes from the repository via git: {shlex.join(args)}")
|
||||
|
||||
if not executable:
|
||||
return print("[red]Cannot find git.[/]")
|
||||
if not GIT_EXECUTABLE:
|
||||
print("[red]Cannot find git please install it.[/]")
|
||||
return False, release_json
|
||||
|
||||
process = Popen(
|
||||
process = subprocess.run(
|
||||
args,
|
||||
stdout=PIPE,
|
||||
stderr=PIPE,
|
||||
)
|
||||
|
||||
process.communicate()
|
||||
else:
|
||||
executable = sys.executable
|
||||
if PIPX_EXECUTABLE := shutil.which("pipx"):
|
||||
process = subprocess.run([PIPX_EXECUTABLE, "upgrade", APP_NAME])
|
||||
else:
|
||||
PYTHON_EXECUTABLE = sys.executable
|
||||
|
||||
args = [
|
||||
executable,
|
||||
"-m",
|
||||
"pip",
|
||||
"install",
|
||||
APP_NAME,
|
||||
"--user",
|
||||
"--no-warn-script-location",
|
||||
]
|
||||
process = Popen(args)
|
||||
process.communicate()
|
||||
args = [
|
||||
PYTHON_EXECUTABLE,
|
||||
"-m",
|
||||
"pip",
|
||||
"install",
|
||||
APP_NAME,
|
||||
"-U",
|
||||
"--user",
|
||||
"--no-warn-script-location",
|
||||
]
|
||||
process = subprocess.run(args)
|
||||
if process.returncode == 0:
|
||||
return True, release_json
|
||||
else:
|
||||
return False, release_json
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import click
|
||||
|
||||
from ...utils.tools import QueryDict
|
||||
from ...utils.tools import FastAnimeRuntimeState
|
||||
from .__lazyloader__ import LazyGroup
|
||||
|
||||
commands = {
|
||||
@@ -20,6 +20,7 @@ commands = {
|
||||
"completed": "completed.completed",
|
||||
"planning": "planning.planning",
|
||||
"notifier": "notifier.notifier",
|
||||
"stats": "stats.stats",
|
||||
}
|
||||
|
||||
|
||||
@@ -29,6 +30,53 @@ commands = {
|
||||
invoke_without_command=True,
|
||||
help="A beautiful interface that gives you access to a commplete streaming experience",
|
||||
short_help="Access all streaming options",
|
||||
epilog="""
|
||||
\b
|
||||
\b\bExamples:
|
||||
# ---- search ----
|
||||
\b
|
||||
# get anime with the tag of isekai
|
||||
fastanime anilist search -T isekai
|
||||
\b
|
||||
# get anime of 2024 and sort by popularity
|
||||
# that has already finished airing or is releasing
|
||||
# and is not in your anime lists
|
||||
fastanime anilist search -y 2024 -s POPULARITY_DESC --status RELEASING --status FINISHED --not-on-list
|
||||
\b
|
||||
# get anime of 2024 season WINTER
|
||||
fastanime anilist search -y 2024 --season WINTER
|
||||
\b
|
||||
# get anime genre action and tag isekai,magic
|
||||
fastanime anilist search -g Action -T Isekai -T Magic
|
||||
\b
|
||||
# get anime of 2024 thats finished airing
|
||||
fastanime anilist search -y 2024 -S FINISHED
|
||||
\b
|
||||
# get the most favourite anime movies
|
||||
fastanime anilist search -f MOVIE -s FAVOURITES_DESC
|
||||
\b
|
||||
# ---- login ----
|
||||
\b
|
||||
# To sign in just run
|
||||
fastanime anilist login
|
||||
\b
|
||||
# To view your login status
|
||||
fastanime anilist login --status
|
||||
\b
|
||||
# To erase login data
|
||||
fastanime anilist login --erase
|
||||
\b
|
||||
# ---- notifier ----
|
||||
\b
|
||||
# basic form
|
||||
fastanime anilist notifier
|
||||
\b
|
||||
# with logging to stdout
|
||||
fastanime --log anilist notifier
|
||||
\b
|
||||
# with logging to a file. stored in the same place as your config
|
||||
fastanime --log-file anilist notifier
|
||||
""",
|
||||
)
|
||||
@click.pass_context
|
||||
def anilist(ctx: click.Context):
|
||||
@@ -36,7 +84,9 @@ def anilist(ctx: click.Context):
|
||||
|
||||
from ....anilist import AniList
|
||||
from ....AnimeProvider import AnimeProvider
|
||||
from ...interfaces.anilist_interfaces import anilist as anilist_interface
|
||||
from ...interfaces.anilist_interfaces import (
|
||||
fastanime_main_menu as anilist_interface,
|
||||
)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ...config import Config
|
||||
@@ -45,5 +95,5 @@ def anilist(ctx: click.Context):
|
||||
if user := ctx.obj.user:
|
||||
AniList.update_login_info(user, user["token"])
|
||||
if ctx.invoked_subcommand is None:
|
||||
anilist_config = QueryDict()
|
||||
anilist_interface(ctx.obj, anilist_config)
|
||||
fastanime_runtime_state = FastAnimeRuntimeState()
|
||||
anilist_interface(ctx.obj, fastanime_runtime_state)
|
||||
|
||||
@@ -7,16 +7,23 @@ if TYPE_CHECKING:
|
||||
|
||||
|
||||
@click.command(help="View anime you completed")
|
||||
@click.option(
|
||||
"--dump-json",
|
||||
"-d",
|
||||
is_flag=True,
|
||||
help="Only print out the results dont open anilist menu",
|
||||
)
|
||||
@click.pass_obj
|
||||
def completed(config: "Config"):
|
||||
def completed(config: "Config", dump_json):
|
||||
from sys import exit
|
||||
|
||||
from ....anilist import AniList
|
||||
from ...interfaces import anilist_interfaces
|
||||
from ...utils.tools import QueryDict, exit_app
|
||||
from ...utils.tools import FastAnimeRuntimeState
|
||||
|
||||
if not config.user:
|
||||
print("Not authenticated")
|
||||
print("Please run: fastanime anilist loggin")
|
||||
exit_app()
|
||||
exit(1)
|
||||
anime_list = AniList.get_anime_list("COMPLETED")
|
||||
if not anime_list or not anime_list[1]:
|
||||
return
|
||||
@@ -27,6 +34,13 @@ def completed(config: "Config"):
|
||||
for mediaListItem in anime_list[1]["data"]["Page"]["mediaList"]
|
||||
] # pyright:ignore
|
||||
anime_list[1]["data"]["Page"]["media"] = media # pyright:ignore
|
||||
anilist_config = QueryDict()
|
||||
anilist_config.data = anime_list[1]
|
||||
anilist_interfaces.select_anime(config, anilist_config)
|
||||
if dump_json:
|
||||
import json
|
||||
|
||||
print(json.dumps(anime_list))
|
||||
else:
|
||||
from ...interfaces import anilist_interfaces
|
||||
|
||||
fastanime_runtime_state = FastAnimeRuntimeState()
|
||||
fastanime_runtime_state.anilist_results_data = anime_list[1]
|
||||
anilist_interfaces.anilist_results_menu(config, fastanime_runtime_state)
|
||||
|
||||
@@ -7,26 +7,40 @@ if TYPE_CHECKING:
|
||||
|
||||
|
||||
@click.command(help="View anime you dropped")
|
||||
@click.option(
|
||||
"--dump-json",
|
||||
"-d",
|
||||
is_flag=True,
|
||||
help="Only print out the results dont open anilist menu",
|
||||
)
|
||||
@click.pass_obj
|
||||
def dropped(config: "Config"):
|
||||
def dropped(config: "Config", dump_json):
|
||||
from sys import exit
|
||||
|
||||
from ....anilist import AniList
|
||||
from ...interfaces import anilist_interfaces
|
||||
from ...utils.tools import QueryDict, exit_app
|
||||
|
||||
if not config.user:
|
||||
print("Not authenticated")
|
||||
print("Please run: fastanime anilist loggin")
|
||||
exit_app()
|
||||
exit(1)
|
||||
anime_list = AniList.get_anime_list("DROPPED")
|
||||
if not anime_list:
|
||||
return
|
||||
exit(1)
|
||||
if not anime_list[0] or not anime_list[1]:
|
||||
return
|
||||
exit(1)
|
||||
media = [
|
||||
mediaListItem["media"]
|
||||
for mediaListItem in anime_list[1]["data"]["Page"]["mediaList"]
|
||||
] # pyright:ignore
|
||||
anime_list[1]["data"]["Page"]["media"] = media # pyright:ignore
|
||||
anilist_config = QueryDict()
|
||||
anilist_config.data = anime_list[1]
|
||||
anilist_interfaces.select_anime(config, anilist_config)
|
||||
if dump_json:
|
||||
import json
|
||||
|
||||
print(json.dumps(anime_list[1]))
|
||||
else:
|
||||
from ...interfaces import anilist_interfaces
|
||||
from ...utils.tools import FastAnimeRuntimeState
|
||||
|
||||
fastanime_runtime_state = FastAnimeRuntimeState()
|
||||
fastanime_runtime_state.anilist_results_data = anime_list[1]
|
||||
anilist_interfaces.anilist_results_menu(config, fastanime_runtime_state)
|
||||
|
||||
@@ -5,14 +5,30 @@ import click
|
||||
help="Fetch the top 15 most favourited anime from anilist",
|
||||
short_help="View most favourited anime",
|
||||
)
|
||||
@click.option(
|
||||
"--dump-json",
|
||||
"-d",
|
||||
is_flag=True,
|
||||
help="Only print out the results dont open anilist menu",
|
||||
)
|
||||
@click.pass_obj
|
||||
def favourites(config):
|
||||
def favourites(config, dump_json):
|
||||
from ....anilist import AniList
|
||||
from ...interfaces.anilist_interfaces import select_anime
|
||||
from ...utils.tools import QueryDict
|
||||
|
||||
anime_data = AniList.get_most_favourite()
|
||||
if anime_data[0]:
|
||||
anilist_config = QueryDict()
|
||||
anilist_config.data = anime_data[1]
|
||||
select_anime(config, anilist_config)
|
||||
if dump_json:
|
||||
import json
|
||||
|
||||
print(json.dumps(anime_data[1]))
|
||||
else:
|
||||
from ...interfaces.anilist_interfaces import anilist_results_menu
|
||||
from ...utils.tools import FastAnimeRuntimeState
|
||||
|
||||
fastanime_runtime_state = FastAnimeRuntimeState()
|
||||
fastanime_runtime_state.anilist_results_data = anime_data[1]
|
||||
anilist_results_menu(config, fastanime_runtime_state)
|
||||
else:
|
||||
from sys import exit
|
||||
|
||||
exit(1)
|
||||
|
||||
@@ -8,41 +8,54 @@ if TYPE_CHECKING:
|
||||
|
||||
@click.command(help="Login to your anilist account")
|
||||
@click.option("--status", "-s", help="Whether you are logged in or not", is_flag=True)
|
||||
@click.option("--erase", "-e", help="Erase your login details", is_flag=True)
|
||||
@click.pass_obj
|
||||
def login(config: "Config", status):
|
||||
from click import launch
|
||||
def login(config: "Config", status, erase):
|
||||
from sys import exit
|
||||
|
||||
from rich import print
|
||||
from rich.prompt import Confirm, Prompt
|
||||
|
||||
from ....anilist import AniList
|
||||
from ...utils.tools import exit_app
|
||||
|
||||
if status:
|
||||
is_logged_in = True if config.user else False
|
||||
message = (
|
||||
"You are logged in :happy:" if is_logged_in else "You arent logged in :cry"
|
||||
"You are logged in :smile:" if is_logged_in else "You arent logged in :cry:"
|
||||
)
|
||||
print(message)
|
||||
print(config.user)
|
||||
exit_app()
|
||||
if config.user:
|
||||
print("Already logged in :confused:")
|
||||
if not Confirm.ask("or would you like to reloggin", default=True):
|
||||
exit_app()
|
||||
# ---- new loggin -----
|
||||
print(
|
||||
f"A browser session will be opened ( [link]{config.fastanime_anilist_app_login_url}[/link] )",
|
||||
)
|
||||
launch(config.fastanime_anilist_app_login_url, wait=True)
|
||||
print("Please paste the token provided here")
|
||||
token = Prompt.ask("Enter token")
|
||||
user = AniList.login_user(token)
|
||||
if not user:
|
||||
print("Sth went wrong", user)
|
||||
exit_app()
|
||||
return
|
||||
user["token"] = token
|
||||
config.update_user(user)
|
||||
print("Successfully saved credentials")
|
||||
print(user)
|
||||
exit_app()
|
||||
exit(0)
|
||||
elif erase:
|
||||
if Confirm.ask(
|
||||
"Are you sure you want to erase your login status", default=False
|
||||
):
|
||||
config.update_user({})
|
||||
print("Success")
|
||||
exit(0)
|
||||
else:
|
||||
exit(1)
|
||||
else:
|
||||
from click import launch
|
||||
|
||||
from ....anilist import AniList
|
||||
|
||||
if config.user:
|
||||
print("Already logged in :confused:")
|
||||
if not Confirm.ask("or would you like to reloggin", default=True):
|
||||
exit(0)
|
||||
# ---- new loggin -----
|
||||
print(
|
||||
f"A browser session will be opened ( [link]{config.fastanime_anilist_app_login_url}[/link] )",
|
||||
)
|
||||
launch(config.fastanime_anilist_app_login_url, wait=True)
|
||||
print("Please paste the token provided here")
|
||||
token = Prompt.ask("Enter token")
|
||||
user = AniList.login_user(token)
|
||||
if not user:
|
||||
print("Sth went wrong", user)
|
||||
exit(1)
|
||||
return
|
||||
user["token"] = token
|
||||
config.update_user(user)
|
||||
print("Successfully saved credentials")
|
||||
print(user)
|
||||
exit(0)
|
||||
|
||||
@@ -13,9 +13,15 @@ def notifier(config: "Config"):
|
||||
import logging
|
||||
import os
|
||||
import time
|
||||
from sys import exit
|
||||
|
||||
import requests
|
||||
from plyer import notification
|
||||
|
||||
try:
|
||||
from plyer import notification
|
||||
except ImportError:
|
||||
print("Please install plyer to use this command")
|
||||
exit(1)
|
||||
|
||||
from ....anilist import AniList
|
||||
from ....constants import APP_CACHE_DIR, APP_DATA_DIR, APP_NAME, ICON_PATH, PLATFORM
|
||||
@@ -30,7 +36,7 @@ def notifier(config: "Config"):
|
||||
if not config.user:
|
||||
print("Not Authenticated")
|
||||
print("Run the following to get started: fastanime anilist loggin")
|
||||
return
|
||||
exit(1)
|
||||
run = True
|
||||
# WARNING: Mess around with this value at your own risk
|
||||
timeout = 2 # time is in minutes
|
||||
|
||||
@@ -7,26 +7,40 @@ if TYPE_CHECKING:
|
||||
|
||||
|
||||
@click.command(help="View anime you paused on watching")
|
||||
@click.option(
|
||||
"--dump-json",
|
||||
"-d",
|
||||
is_flag=True,
|
||||
help="Only print out the results dont open anilist menu",
|
||||
)
|
||||
@click.pass_obj
|
||||
def paused(config: "Config"):
|
||||
def paused(config: "Config", dump_json):
|
||||
from sys import exit
|
||||
|
||||
from ....anilist import AniList
|
||||
from ...interfaces import anilist_interfaces
|
||||
from ...utils.tools import QueryDict, exit_app
|
||||
|
||||
if not config.user:
|
||||
print("Not authenticated")
|
||||
print("Please run: fastanime anilist loggin")
|
||||
exit_app()
|
||||
exit(1)
|
||||
anime_list = AniList.get_anime_list("PAUSED")
|
||||
if not anime_list:
|
||||
return
|
||||
exit(1)
|
||||
if not anime_list[0] or not anime_list[1]:
|
||||
return
|
||||
exit(1)
|
||||
media = [
|
||||
mediaListItem["media"]
|
||||
for mediaListItem in anime_list[1]["data"]["Page"]["mediaList"]
|
||||
] # pyright:ignore
|
||||
anime_list[1]["data"]["Page"]["media"] = media # pyright:ignore
|
||||
anilist_config = QueryDict()
|
||||
anilist_config.data = anime_list[1]
|
||||
anilist_interfaces.select_anime(config, anilist_config)
|
||||
if dump_json:
|
||||
import json
|
||||
|
||||
print(json.dumps(anime_list[1]))
|
||||
else:
|
||||
from ...interfaces import anilist_interfaces
|
||||
from ...utils.tools import FastAnimeRuntimeState
|
||||
|
||||
anilist_config = FastAnimeRuntimeState()
|
||||
anilist_config.anilist_results_data = anime_list[1]
|
||||
anilist_interfaces.anilist_results_menu(config, anilist_config)
|
||||
|
||||
@@ -7,26 +7,40 @@ if TYPE_CHECKING:
|
||||
|
||||
|
||||
@click.command(help="View anime you are planning on watching")
|
||||
@click.option(
|
||||
"--dump-json",
|
||||
"-d",
|
||||
is_flag=True,
|
||||
help="Only print out the results dont open anilist menu",
|
||||
)
|
||||
@click.pass_obj
|
||||
def planning(config: "Config"):
|
||||
def planning(config: "Config", dump_json):
|
||||
from sys import exit
|
||||
|
||||
from ....anilist import AniList
|
||||
from ...interfaces import anilist_interfaces
|
||||
from ...utils.tools import QueryDict, exit_app
|
||||
|
||||
if not config.user:
|
||||
print("Not authenticated")
|
||||
print("Please run: fastanime anilist loggin")
|
||||
exit_app()
|
||||
exit(1)
|
||||
anime_list = AniList.get_anime_list("PLANNING")
|
||||
if not anime_list:
|
||||
return
|
||||
exit(1)
|
||||
if not anime_list[0] or not anime_list[1]:
|
||||
return
|
||||
exit(1)
|
||||
media = [
|
||||
mediaListItem["media"]
|
||||
for mediaListItem in anime_list[1]["data"]["Page"]["mediaList"]
|
||||
] # pyright:ignore
|
||||
anime_list[1]["data"]["Page"]["media"] = media # pyright:ignore
|
||||
anilist_config = QueryDict()
|
||||
anilist_config.data = anime_list[1]
|
||||
anilist_interfaces.select_anime(config, anilist_config)
|
||||
if dump_json:
|
||||
import json
|
||||
|
||||
print(json.dumps(anime_list[1]))
|
||||
else:
|
||||
from ...interfaces import anilist_interfaces
|
||||
from ...utils.tools import FastAnimeRuntimeState
|
||||
|
||||
fastanime_runtime_state = FastAnimeRuntimeState()
|
||||
fastanime_runtime_state.anilist_results_data = anime_list[1]
|
||||
anilist_interfaces.anilist_results_menu(config, fastanime_runtime_state)
|
||||
|
||||
@@ -4,14 +4,30 @@ import click
|
||||
@click.command(
|
||||
help="Fetch the top 15 most popular anime", short_help="View most popular anime"
|
||||
)
|
||||
@click.option(
|
||||
"--dump-json",
|
||||
"-d",
|
||||
is_flag=True,
|
||||
help="Only print out the results dont open anilist menu",
|
||||
)
|
||||
@click.pass_obj
|
||||
def popular(config):
|
||||
def popular(config, dump_json):
|
||||
from ....anilist import AniList
|
||||
from ...interfaces.anilist_interfaces import select_anime
|
||||
from ...utils.tools import QueryDict
|
||||
|
||||
anime_data = AniList.get_most_popular()
|
||||
if anime_data[0]:
|
||||
anilist_config = QueryDict()
|
||||
anilist_config.data = anime_data[1]
|
||||
select_anime(config, anilist_config)
|
||||
if dump_json:
|
||||
import json
|
||||
|
||||
print(json.dumps(anime_data[1]))
|
||||
else:
|
||||
from ...interfaces.anilist_interfaces import anilist_results_menu
|
||||
from ...utils.tools import FastAnimeRuntimeState
|
||||
|
||||
fastanime_runtime_state = FastAnimeRuntimeState()
|
||||
fastanime_runtime_state.anilist_results_data = anime_data[1]
|
||||
anilist_results_menu(config, fastanime_runtime_state)
|
||||
else:
|
||||
from sys import exit
|
||||
|
||||
exit(1)
|
||||
|
||||
@@ -5,23 +5,35 @@ import click
|
||||
help="Get random anime from anilist based on a range of anilist anime ids that are seected at random",
|
||||
short_help="View random anime",
|
||||
)
|
||||
@click.option(
|
||||
"--dump-json",
|
||||
"-d",
|
||||
is_flag=True,
|
||||
help="Only print out the results dont open anilist menu",
|
||||
)
|
||||
@click.pass_obj
|
||||
def random_anime(config):
|
||||
def random_anime(config, dump_json):
|
||||
import random
|
||||
|
||||
from ....anilist import AniList
|
||||
from ...interfaces.anilist_interfaces import select_anime
|
||||
from ...utils.tools import QueryDict
|
||||
|
||||
random_anime = range(1, 15000)
|
||||
random_anime = range(1, 100000)
|
||||
|
||||
random_anime = random.sample(random_anime, k=50)
|
||||
|
||||
anime_data = AniList.search(id_in=list(random_anime))
|
||||
|
||||
if anime_data[0]:
|
||||
anilist_config = QueryDict()
|
||||
anilist_config.data = anime_data[1]
|
||||
select_anime(config, anilist_config)
|
||||
if dump_json:
|
||||
import json
|
||||
|
||||
print(json.dumps(anime_data[1]))
|
||||
else:
|
||||
from ...interfaces.anilist_interfaces import anilist_results_menu
|
||||
from ...utils.tools import FastAnimeRuntimeState
|
||||
|
||||
fastanime_runtime_state = FastAnimeRuntimeState()
|
||||
fastanime_runtime_state.anilist_results_data = anime_data[1]
|
||||
anilist_results_menu(config, fastanime_runtime_state)
|
||||
else:
|
||||
print(anime_data[1])
|
||||
exit(1)
|
||||
|
||||
@@ -5,14 +5,30 @@ import click
|
||||
help="Fetch the 15 most recently updated anime from anilist that are currently releasing",
|
||||
short_help="View recently updated anime",
|
||||
)
|
||||
@click.option(
|
||||
"--dump-json",
|
||||
"-d",
|
||||
is_flag=True,
|
||||
help="Only print out the results dont open anilist menu",
|
||||
)
|
||||
@click.pass_obj
|
||||
def recent(config):
|
||||
def recent(config, dump_json):
|
||||
from ....anilist import AniList
|
||||
from ...interfaces.anilist_interfaces import select_anime
|
||||
from ...utils.tools import QueryDict
|
||||
|
||||
anime_data = AniList.get_most_recently_updated()
|
||||
if anime_data[0]:
|
||||
anilist_config = QueryDict()
|
||||
anilist_config.data = anime_data[1]
|
||||
select_anime(config, anilist_config)
|
||||
if dump_json:
|
||||
import json
|
||||
|
||||
print(json.dumps(anime_data[1]))
|
||||
else:
|
||||
from ...interfaces.anilist_interfaces import anilist_results_menu
|
||||
from ...utils.tools import FastAnimeRuntimeState
|
||||
|
||||
fastanime_runtime_state = FastAnimeRuntimeState()
|
||||
fastanime_runtime_state.anilist_results_data = anime_data[1]
|
||||
anilist_results_menu(config, fastanime_runtime_state)
|
||||
else:
|
||||
from sys import exit
|
||||
|
||||
exit(1)
|
||||
|
||||
@@ -7,26 +7,40 @@ if TYPE_CHECKING:
|
||||
|
||||
|
||||
@click.command(help="View anime you are rewatching")
|
||||
@click.option(
|
||||
"--dump-json",
|
||||
"-d",
|
||||
is_flag=True,
|
||||
help="Only print out the results dont open anilist menu",
|
||||
)
|
||||
@click.pass_obj
|
||||
def rewatching(config: "Config"):
|
||||
def rewatching(config: "Config", dump_json):
|
||||
from sys import exit
|
||||
|
||||
from ....anilist import AniList
|
||||
from ...interfaces import anilist_interfaces
|
||||
from ...utils.tools import QueryDict, exit_app
|
||||
|
||||
if not config.user:
|
||||
print("Not authenticated")
|
||||
print("Please run: fastanime anilist loggin")
|
||||
exit_app()
|
||||
exit(1)
|
||||
anime_list = AniList.get_anime_list("REPEATING")
|
||||
if not anime_list:
|
||||
return
|
||||
exit(1)
|
||||
if not anime_list[0] or not anime_list[1]:
|
||||
return
|
||||
exit(1)
|
||||
media = [
|
||||
mediaListItem["media"]
|
||||
for mediaListItem in anime_list[1]["data"]["Page"]["mediaList"]
|
||||
] # pyright:ignore
|
||||
anime_list[1]["data"]["Page"]["media"] = media # pyright:ignore
|
||||
anilist_config = QueryDict()
|
||||
anilist_config.data = anime_list[1]
|
||||
anilist_interfaces.select_anime(config, anilist_config)
|
||||
if dump_json:
|
||||
import json
|
||||
|
||||
print(json.dumps(anime_list[1]))
|
||||
else:
|
||||
from ...interfaces import anilist_interfaces
|
||||
from ...utils.tools import FastAnimeRuntimeState
|
||||
|
||||
fastanime_runtime_state = FastAnimeRuntimeState()
|
||||
fastanime_runtime_state.anilist_results_data = anime_list[1]
|
||||
anilist_interfaces.anilist_results_menu(config, fastanime_runtime_state)
|
||||
|
||||
@@ -4,14 +4,30 @@ import click
|
||||
@click.command(
|
||||
help="Fetch the 15 most scored anime", short_help="View most scored anime"
|
||||
)
|
||||
@click.option(
|
||||
"--dump-json",
|
||||
"-d",
|
||||
is_flag=True,
|
||||
help="Only print out the results dont open anilist menu",
|
||||
)
|
||||
@click.pass_obj
|
||||
def scores(config):
|
||||
def scores(config, dump_json):
|
||||
from ....anilist import AniList
|
||||
from ...interfaces.anilist_interfaces import select_anime
|
||||
from ...utils.tools import QueryDict
|
||||
|
||||
anime_data = AniList.get_most_scored()
|
||||
if anime_data[0]:
|
||||
anilist_config = QueryDict()
|
||||
anilist_config.data = anime_data[1]
|
||||
select_anime(config, anilist_config)
|
||||
if dump_json:
|
||||
import json
|
||||
|
||||
print(json.dumps(anime_data[1]))
|
||||
else:
|
||||
from ...interfaces.anilist_interfaces import anilist_results_menu
|
||||
from ...utils.tools import FastAnimeRuntimeState
|
||||
|
||||
fastanime_runtime_state = FastAnimeRuntimeState()
|
||||
fastanime_runtime_state.anilist_results_data = anime_data[1]
|
||||
anilist_results_menu(config, fastanime_runtime_state)
|
||||
else:
|
||||
from sys import exit
|
||||
|
||||
exit(1)
|
||||
|
||||
@@ -1,21 +1,574 @@
|
||||
import click
|
||||
|
||||
from ...completion_functions import anime_titles_shell_complete
|
||||
|
||||
tags_available = {
|
||||
"Cast": ["Polyamorous"],
|
||||
"Cast Main Cast": [
|
||||
"Anti-Hero",
|
||||
"Elderly Protagonist",
|
||||
"Ensemble Cast",
|
||||
"Estranged Family",
|
||||
"Female Protagonist",
|
||||
"Male Protagonist",
|
||||
"Primarily Adult Cast",
|
||||
"Primarily Animal Cast",
|
||||
"Primarily Child Cast",
|
||||
"Primarily Female Cast",
|
||||
"Primarily Male Cast",
|
||||
"Primarily Teen Cast",
|
||||
],
|
||||
"Cast Traits": [
|
||||
"Age Regression",
|
||||
"Agender",
|
||||
"Aliens",
|
||||
"Amnesia",
|
||||
"Angels",
|
||||
"Anthropomorphism",
|
||||
"Aromantic",
|
||||
"Arranged Marriage",
|
||||
"Artificial Intelligence",
|
||||
"Asexual",
|
||||
"Butler",
|
||||
"Centaur",
|
||||
"Chimera",
|
||||
"Chuunibyou",
|
||||
"Clone",
|
||||
"Cosplay",
|
||||
"Cowboys",
|
||||
"Crossdressing",
|
||||
"Cyborg",
|
||||
"Delinquents",
|
||||
"Demons",
|
||||
"Detective",
|
||||
"Dinosaurs",
|
||||
"Disability",
|
||||
"Dissociative Identities",
|
||||
"Dragons",
|
||||
"Dullahan",
|
||||
"Elf",
|
||||
"Fairy",
|
||||
"Femboy",
|
||||
"Ghost",
|
||||
"Goblin",
|
||||
"Gods",
|
||||
"Gyaru",
|
||||
"Hikikomori",
|
||||
"Homeless",
|
||||
"Idol",
|
||||
"Kemonomimi",
|
||||
"Kuudere",
|
||||
"Maids",
|
||||
"Mermaid",
|
||||
"Monster Boy",
|
||||
"Monster Girl",
|
||||
"Nekomimi",
|
||||
"Ninja",
|
||||
"Nudity",
|
||||
"Nun",
|
||||
"Office Lady",
|
||||
"Oiran",
|
||||
"Ojou-sama",
|
||||
"Orphan",
|
||||
"Pirates",
|
||||
"Robots",
|
||||
"Samurai",
|
||||
"Shrine Maiden",
|
||||
"Skeleton",
|
||||
"Succubus",
|
||||
"Tanned Skin",
|
||||
"Teacher",
|
||||
"Tomboy",
|
||||
"Transgender",
|
||||
"Tsundere",
|
||||
"Twins",
|
||||
"Vampire",
|
||||
"Veterinarian",
|
||||
"Vikings",
|
||||
"Villainess",
|
||||
"VTuber",
|
||||
"Werewolf",
|
||||
"Witch",
|
||||
"Yandere",
|
||||
"Zombie",
|
||||
],
|
||||
"Demographic": ["Josei", "Kids", "Seinen", "Shoujo", "Shounen"],
|
||||
"Setting": ["Matriarchy"],
|
||||
"Setting Scene": [
|
||||
"Bar",
|
||||
"Boarding School",
|
||||
"Circus",
|
||||
"Coastal",
|
||||
"College",
|
||||
"Desert",
|
||||
"Dungeon",
|
||||
"Foreign",
|
||||
"Inn",
|
||||
"Konbini",
|
||||
"Natural Disaster",
|
||||
"Office",
|
||||
"Outdoor",
|
||||
"Prison",
|
||||
"Restaurant",
|
||||
"Rural",
|
||||
"School",
|
||||
"School Club",
|
||||
"Snowscape",
|
||||
"Urban",
|
||||
"Work",
|
||||
],
|
||||
"Setting Time": [
|
||||
"Achronological Order",
|
||||
"Anachronism",
|
||||
"Ancient China",
|
||||
"Dystopian",
|
||||
"Historical",
|
||||
"Time Skip",
|
||||
],
|
||||
"Setting Universe": [
|
||||
"Afterlife",
|
||||
"Alternate Universe",
|
||||
"Augmented Reality",
|
||||
"Omegaverse",
|
||||
"Post-Apocalyptic",
|
||||
"Space",
|
||||
"Urban Fantasy",
|
||||
"Virtual World",
|
||||
],
|
||||
"Technical": [
|
||||
"4-koma",
|
||||
"Achromatic",
|
||||
"Advertisement",
|
||||
"Anthology",
|
||||
"CGI",
|
||||
"Episodic",
|
||||
"Flash",
|
||||
"Full CGI",
|
||||
"Full Color",
|
||||
"No Dialogue",
|
||||
"Non-fiction",
|
||||
"POV",
|
||||
"Puppetry",
|
||||
"Rotoscoping",
|
||||
"Stop Motion",
|
||||
],
|
||||
"Theme Action": [
|
||||
"Archery",
|
||||
"Battle Royale",
|
||||
"Espionage",
|
||||
"Fugitive",
|
||||
"Guns",
|
||||
"Martial Arts",
|
||||
"Spearplay",
|
||||
"Swordplay",
|
||||
],
|
||||
"Theme Arts": [
|
||||
"Acting",
|
||||
"Calligraphy",
|
||||
"Classic Literature",
|
||||
"Drawing",
|
||||
"Fashion",
|
||||
"Food",
|
||||
"Makeup",
|
||||
"Photography",
|
||||
"Rakugo",
|
||||
"Writing",
|
||||
],
|
||||
"Theme Arts-Music": [
|
||||
"Band",
|
||||
"Classical Music",
|
||||
"Dancing",
|
||||
"Hip-hop Music",
|
||||
"Jazz Music",
|
||||
"Metal Music",
|
||||
"Musical Theater",
|
||||
"Rock Music",
|
||||
],
|
||||
"Theme Comedy": ["Parody", "Satire", "Slapstick", "Surreal Comedy"],
|
||||
"Theme Drama": [
|
||||
"Bullying",
|
||||
"Class Struggle",
|
||||
"Coming of Age",
|
||||
"Conspiracy",
|
||||
"Eco-Horror",
|
||||
"Fake Relationship",
|
||||
"Kingdom Management",
|
||||
"Rehabilitation",
|
||||
"Revenge",
|
||||
"Suicide",
|
||||
"Tragedy",
|
||||
],
|
||||
"Theme Fantasy": [
|
||||
"Alchemy",
|
||||
"Body Swapping",
|
||||
"Cultivation",
|
||||
"Fairy Tale",
|
||||
"Henshin",
|
||||
"Isekai",
|
||||
"Kaiju",
|
||||
"Magic",
|
||||
"Mythology",
|
||||
"Necromancy",
|
||||
"Shapeshifting",
|
||||
"Steampunk",
|
||||
"Super Power",
|
||||
"Superhero",
|
||||
"Wuxia",
|
||||
"Youkai",
|
||||
],
|
||||
"Theme Game": ["Board Game", "E-Sports", "Video Games"],
|
||||
"Theme Game-Card & Board Game": [
|
||||
"Card Battle",
|
||||
"Go",
|
||||
"Karuta",
|
||||
"Mahjong",
|
||||
"Poker",
|
||||
"Shogi",
|
||||
],
|
||||
"Theme Game-Sport": [
|
||||
"Acrobatics",
|
||||
"Airsoft",
|
||||
"American Football",
|
||||
"Athletics",
|
||||
"Badminton",
|
||||
"Baseball",
|
||||
"Basketball",
|
||||
"Bowling",
|
||||
"Boxing",
|
||||
"Cheerleading",
|
||||
"Cycling",
|
||||
"Fencing",
|
||||
"Fishing",
|
||||
"Fitness",
|
||||
"Football",
|
||||
"Golf",
|
||||
"Handball",
|
||||
"Ice Skating",
|
||||
"Judo",
|
||||
"Lacrosse",
|
||||
"Parkour",
|
||||
"Rugby",
|
||||
"Scuba Diving",
|
||||
"Skateboarding",
|
||||
"Sumo",
|
||||
"Surfing",
|
||||
"Swimming",
|
||||
"Table Tennis",
|
||||
"Tennis",
|
||||
"Volleyball",
|
||||
"Wrestling",
|
||||
],
|
||||
"Theme Other": [
|
||||
"Adoption",
|
||||
"Animals",
|
||||
"Astronomy",
|
||||
"Autobiographical",
|
||||
"Biographical",
|
||||
"Body Horror",
|
||||
"Cannibalism",
|
||||
"Chibi",
|
||||
"Cosmic Horror",
|
||||
"Crime",
|
||||
"Crossover",
|
||||
"Death Game",
|
||||
"Denpa",
|
||||
"Drugs",
|
||||
"Economics",
|
||||
"Educational",
|
||||
"Environmental",
|
||||
"Ero Guro",
|
||||
"Filmmaking",
|
||||
"Found Family",
|
||||
"Gambling",
|
||||
"Gender Bending",
|
||||
"Gore",
|
||||
"Language Barrier",
|
||||
"LGBTQ+ Themes",
|
||||
"Lost Civilization",
|
||||
"Marriage",
|
||||
"Medicine",
|
||||
"Memory Manipulation",
|
||||
"Meta",
|
||||
"Mountaineering",
|
||||
"Noir",
|
||||
"Otaku Culture",
|
||||
"Pandemic",
|
||||
"Philosophy",
|
||||
"Politics",
|
||||
"Proxy Battle",
|
||||
"Psychosexual",
|
||||
"Reincarnation",
|
||||
"Religion",
|
||||
"Royal Affairs",
|
||||
"Slavery",
|
||||
"Software Development",
|
||||
"Survival",
|
||||
"Terrorism",
|
||||
"Torture",
|
||||
"Travel",
|
||||
"War",
|
||||
],
|
||||
"Theme Other-Organisations": [
|
||||
"Assassins",
|
||||
"Criminal Organization",
|
||||
"Cult",
|
||||
"Firefighters",
|
||||
"Gangs",
|
||||
"Mafia",
|
||||
"Military",
|
||||
"Police",
|
||||
"Triads",
|
||||
"Yakuza",
|
||||
],
|
||||
"Theme Other-Vehicle": [
|
||||
"Aviation",
|
||||
"Cars",
|
||||
"Mopeds",
|
||||
"Motorcycles",
|
||||
"Ships",
|
||||
"Tanks",
|
||||
"Trains",
|
||||
],
|
||||
"Theme Romance": [
|
||||
"Age Gap",
|
||||
"Bisexual",
|
||||
"Boys' Love",
|
||||
"Female Harem",
|
||||
"Heterosexual",
|
||||
"Love Triangle",
|
||||
"Male Harem",
|
||||
"Matchmaking",
|
||||
"Mixed Gender Harem",
|
||||
"Teens' Love",
|
||||
"Unrequited Love",
|
||||
"Yuri",
|
||||
],
|
||||
"Theme Sci Fi": [
|
||||
"Cyberpunk",
|
||||
"Space Opera",
|
||||
"Time Loop",
|
||||
"Time Manipulation",
|
||||
"Tokusatsu",
|
||||
],
|
||||
"Theme Sci Fi-Mecha": ["Real Robot", "Super Robot"],
|
||||
"Theme Slice of Life": [
|
||||
"Agriculture",
|
||||
"Cute Boys Doing Cute Things",
|
||||
"Cute Girls Doing Cute Things",
|
||||
"Family Life",
|
||||
"Horticulture",
|
||||
"Iyashikei",
|
||||
"Parenthood",
|
||||
],
|
||||
}
|
||||
tags_available_list = []
|
||||
for tag_category, tags_in_category in tags_available.items():
|
||||
tags_available_list.extend(tags_in_category)
|
||||
|
||||
|
||||
@click.command(
|
||||
help="Search for anime using anilists api and get top ~50 results",
|
||||
short_help="Search for anime",
|
||||
)
|
||||
@click.argument(
|
||||
"title",
|
||||
@click.option("--title", "-t", shell_complete=anime_titles_shell_complete)
|
||||
@click.option(
|
||||
"--dump-json",
|
||||
"-d",
|
||||
is_flag=True,
|
||||
help="Only print out the results dont open anilist menu",
|
||||
)
|
||||
@click.option(
|
||||
"--season",
|
||||
help="The season the media was released",
|
||||
type=click.Choice(["WINTER", "SPRING", "SUMMER", "FALL"]),
|
||||
)
|
||||
@click.option(
|
||||
"--status",
|
||||
"-S",
|
||||
help="The media status of the anime",
|
||||
multiple=True,
|
||||
type=click.Choice(
|
||||
["FINISHED", "RELEASING", "NOT_YET_RELEASED", "CANCELLED", "HIATUS"]
|
||||
),
|
||||
)
|
||||
@click.option(
|
||||
"--sort",
|
||||
"-s",
|
||||
help="What to sort the search results on",
|
||||
type=click.Choice(
|
||||
[
|
||||
"ID",
|
||||
"ID_DESC",
|
||||
"TITLE_ROMAJI",
|
||||
"TITLE_ROMAJI_DESC",
|
||||
"TITLE_ENGLISH",
|
||||
"TITLE_ENGLISH_DESC",
|
||||
"TITLE_NATIVE",
|
||||
"TITLE_NATIVE_DESC",
|
||||
"TYPE",
|
||||
"TYPE_DESC",
|
||||
"FORMAT",
|
||||
"FORMAT_DESC",
|
||||
"START_DATE",
|
||||
"START_DATE_DESC",
|
||||
"END_DATE",
|
||||
"END_DATE_DESC",
|
||||
"SCORE",
|
||||
"SCORE_DESC",
|
||||
"POPULARITY",
|
||||
"POPULARITY_DESC",
|
||||
"TRENDING",
|
||||
"TRENDING_DESC",
|
||||
"EPISODES",
|
||||
"EPISODES_DESC",
|
||||
"DURATION",
|
||||
"DURATION_DESC",
|
||||
"STATUS",
|
||||
"STATUS_DESC",
|
||||
"CHAPTERS",
|
||||
"CHAPTERS_DESC",
|
||||
"VOLUMES",
|
||||
"VOLUMES_DESC",
|
||||
"UPDATED_AT",
|
||||
"UPDATED_AT_DESC",
|
||||
"SEARCH_MATCH",
|
||||
"FAVOURITES",
|
||||
"FAVOURITES_DESC",
|
||||
]
|
||||
),
|
||||
)
|
||||
@click.option(
|
||||
"--genres",
|
||||
"-g",
|
||||
multiple=True,
|
||||
help="the genres to filter by",
|
||||
type=click.Choice(
|
||||
[
|
||||
"Action",
|
||||
"Adventure",
|
||||
"Comedy",
|
||||
"Drama",
|
||||
"Ecchi",
|
||||
"Fantasy",
|
||||
"Horror",
|
||||
"Mahou Shoujo",
|
||||
"Mecha",
|
||||
"Music",
|
||||
"Mystery",
|
||||
"Psychological",
|
||||
"Romance",
|
||||
"Sci-Fi",
|
||||
"Slice of Life",
|
||||
"Sports",
|
||||
"Supernatural",
|
||||
"Thriller",
|
||||
"Hentai",
|
||||
]
|
||||
),
|
||||
)
|
||||
@click.option(
|
||||
"--tags",
|
||||
"-T",
|
||||
multiple=True,
|
||||
help="the tags to filter by",
|
||||
type=click.Choice(tags_available_list),
|
||||
)
|
||||
@click.option(
|
||||
"--media-format",
|
||||
"-f",
|
||||
multiple=True,
|
||||
help="Media format",
|
||||
type=click.Choice(
|
||||
["TV", "TV_SHORT", "MOVIE", "SPECIAL", "OVA", "MUSIC", "NOVEL", "ONE_SHOT"]
|
||||
),
|
||||
)
|
||||
@click.option(
|
||||
"--year",
|
||||
"-y",
|
||||
type=click.Choice(
|
||||
[
|
||||
"1900",
|
||||
"1910",
|
||||
"1920",
|
||||
"1930",
|
||||
"1940",
|
||||
"1950",
|
||||
"1960",
|
||||
"1970",
|
||||
"1980",
|
||||
"1990",
|
||||
"2000",
|
||||
"2004",
|
||||
"2005",
|
||||
"2006",
|
||||
"2007",
|
||||
"2008",
|
||||
"2009",
|
||||
"2010",
|
||||
"2011",
|
||||
"2012",
|
||||
"2013",
|
||||
"2014",
|
||||
"2015",
|
||||
"2016",
|
||||
"2017",
|
||||
"2018",
|
||||
"2019",
|
||||
"2020",
|
||||
"2021",
|
||||
"2022",
|
||||
"2023",
|
||||
"2024",
|
||||
]
|
||||
),
|
||||
help="the year the media was released",
|
||||
)
|
||||
@click.option(
|
||||
"--on-list/--not-on-list",
|
||||
"-L/-no-L",
|
||||
help="Whether the anime should be in your list or not",
|
||||
type=bool,
|
||||
)
|
||||
@click.pass_obj
|
||||
def search(config, title):
|
||||
def search(
|
||||
config,
|
||||
title,
|
||||
dump_json,
|
||||
season,
|
||||
status,
|
||||
sort,
|
||||
genres,
|
||||
tags,
|
||||
media_format,
|
||||
year,
|
||||
on_list,
|
||||
):
|
||||
from ....anilist import AniList
|
||||
from ...interfaces.anilist_interfaces import select_anime
|
||||
from ...utils.tools import QueryDict
|
||||
|
||||
success, search_results = AniList.search(title)
|
||||
success, search_results = AniList.search(
|
||||
query=title,
|
||||
sort=sort,
|
||||
status_in=list(status),
|
||||
genre_in=list(genres),
|
||||
season=season,
|
||||
tag_in=list(tags),
|
||||
seasonYear=year,
|
||||
format_in=list(media_format),
|
||||
on_list=on_list,
|
||||
)
|
||||
if success:
|
||||
anilist_config = QueryDict()
|
||||
anilist_config.data = search_results
|
||||
select_anime(config, anilist_config)
|
||||
if dump_json:
|
||||
import json
|
||||
|
||||
print(json.dumps(search_results))
|
||||
else:
|
||||
from ...interfaces.anilist_interfaces import anilist_results_menu
|
||||
from ...utils.tools import FastAnimeRuntimeState
|
||||
|
||||
fastanime_runtime_state = FastAnimeRuntimeState()
|
||||
fastanime_runtime_state.anilist_results_data = search_results
|
||||
anilist_results_menu(config, fastanime_runtime_state)
|
||||
else:
|
||||
from sys import exit
|
||||
|
||||
exit(1)
|
||||
|
||||
63
fastanime/cli/commands/anilist/stats.py
Normal file
63
fastanime/cli/commands/anilist/stats.py
Normal file
@@ -0,0 +1,63 @@
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
import click
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ...config import Config
|
||||
|
||||
|
||||
@click.command(help="Print out your anilist stats")
|
||||
@click.pass_obj
|
||||
def stats(
|
||||
config: "Config",
|
||||
):
|
||||
import shutil
|
||||
import subprocess
|
||||
from sys import exit
|
||||
|
||||
from rich.console import Console
|
||||
|
||||
console = Console()
|
||||
from rich.markdown import Markdown
|
||||
from rich.panel import Panel
|
||||
|
||||
from ....anilist import AniList
|
||||
|
||||
user_data = AniList.get_user_info()
|
||||
if not user_data[0] or not user_data[1]:
|
||||
print("Failed to get user info")
|
||||
print(user_data[1])
|
||||
exit(1)
|
||||
|
||||
KITTEN_EXECUTABLE = shutil.which("kitten")
|
||||
if not KITTEN_EXECUTABLE:
|
||||
print("Kitten not found")
|
||||
exit(1)
|
||||
|
||||
image_url = user_data[1]["data"]["User"]["avatar"]["medium"]
|
||||
user_name = user_data[1]["data"]["User"]["name"]
|
||||
about = user_data[1]["data"]["User"]["about"] or ""
|
||||
console.clear()
|
||||
image_x = int(console.size.width * 0.1)
|
||||
image_y = int(console.size.height * 0.1)
|
||||
img_w = console.size.width // 3
|
||||
img_h = console.size.height // 3
|
||||
image_process = subprocess.run(
|
||||
[
|
||||
KITTEN_EXECUTABLE,
|
||||
"icat",
|
||||
"--clear",
|
||||
"--place",
|
||||
f"{img_w}x{img_h}@{image_x}x{image_y}",
|
||||
image_url,
|
||||
],
|
||||
)
|
||||
if not image_process.returncode == 0:
|
||||
print("failed to get image from icat")
|
||||
exit(1)
|
||||
console.print(
|
||||
Panel(
|
||||
Markdown(about),
|
||||
title=user_name,
|
||||
)
|
||||
)
|
||||
@@ -5,14 +5,30 @@ import click
|
||||
help="Fetch the top 15 anime that are currently trending",
|
||||
short_help="Trending anime 🔥🔥🔥",
|
||||
)
|
||||
@click.option(
|
||||
"--dump-json",
|
||||
"-d",
|
||||
is_flag=True,
|
||||
help="Only print out the results dont open anilist menu",
|
||||
)
|
||||
@click.pass_obj
|
||||
def trending(config):
|
||||
def trending(config, dump_json):
|
||||
from ....anilist import AniList
|
||||
from ...interfaces.anilist_interfaces import select_anime
|
||||
from ...utils.tools import QueryDict
|
||||
|
||||
success, data = AniList.get_trending()
|
||||
if success:
|
||||
anilist_config = QueryDict()
|
||||
anilist_config.data = data
|
||||
select_anime(config, anilist_config)
|
||||
if dump_json:
|
||||
import json
|
||||
|
||||
print(json.dumps(data))
|
||||
else:
|
||||
from ...interfaces.anilist_interfaces import anilist_results_menu
|
||||
from ...utils.tools import FastAnimeRuntimeState
|
||||
|
||||
fastanime_runtime_state = FastAnimeRuntimeState()
|
||||
fastanime_runtime_state.anilist_results_data = data
|
||||
anilist_results_menu(config, fastanime_runtime_state)
|
||||
else:
|
||||
from sys import exit
|
||||
|
||||
exit(1)
|
||||
|
||||
@@ -4,14 +4,30 @@ import click
|
||||
@click.command(
|
||||
help="Fetch the 15 most anticipited anime", short_help="View upcoming anime"
|
||||
)
|
||||
@click.option(
|
||||
"--dump-json",
|
||||
"-d",
|
||||
is_flag=True,
|
||||
help="Only print out the results dont open anilist menu",
|
||||
)
|
||||
@click.pass_obj
|
||||
def upcoming(config):
|
||||
def upcoming(config, dump_json):
|
||||
from ....anilist import AniList
|
||||
from ...interfaces.anilist_interfaces import select_anime
|
||||
from ...utils.tools import QueryDict
|
||||
|
||||
success, data = AniList.get_upcoming_anime()
|
||||
if success:
|
||||
anilist_config = QueryDict()
|
||||
anilist_config.data = data
|
||||
select_anime(config, anilist_config)
|
||||
if dump_json:
|
||||
import json
|
||||
|
||||
print(json.dumps(data))
|
||||
else:
|
||||
from ...interfaces.anilist_interfaces import anilist_results_menu
|
||||
from ...utils.tools import FastAnimeRuntimeState
|
||||
|
||||
fastanime_runtime_state = FastAnimeRuntimeState()
|
||||
fastanime_runtime_state.anilist_results_data = data
|
||||
anilist_results_menu(config, fastanime_runtime_state)
|
||||
else:
|
||||
from sys import exit
|
||||
|
||||
exit(1)
|
||||
|
||||
@@ -7,26 +7,40 @@ if TYPE_CHECKING:
|
||||
|
||||
|
||||
@click.command(help="View anime you are watching")
|
||||
@click.option(
|
||||
"--dump-json",
|
||||
"-d",
|
||||
is_flag=True,
|
||||
help="Only print out the results dont open anilist menu",
|
||||
)
|
||||
@click.pass_obj
|
||||
def watching(config: "Config"):
|
||||
def watching(config: "Config", dump_json):
|
||||
from sys import exit
|
||||
|
||||
from ....anilist import AniList
|
||||
from ...interfaces import anilist_interfaces
|
||||
from ...utils.tools import QueryDict, exit_app
|
||||
|
||||
if not config.user:
|
||||
print("Not authenticated")
|
||||
print("Please run: fastanime anilist loggin")
|
||||
exit_app()
|
||||
exit(1)
|
||||
anime_list = AniList.get_anime_list("CURRENT")
|
||||
if not anime_list:
|
||||
return
|
||||
exit(1)
|
||||
if not anime_list[0] or not anime_list[1]:
|
||||
return
|
||||
exit(1)
|
||||
media = [
|
||||
mediaListItem["media"]
|
||||
for mediaListItem in anime_list[1]["data"]["Page"]["mediaList"]
|
||||
] # pyright:ignore
|
||||
anime_list[1]["data"]["Page"]["media"] = media # pyright:ignore
|
||||
anilist_config = QueryDict()
|
||||
anilist_config.data = anime_list[1]
|
||||
anilist_interfaces.select_anime(config, anilist_config)
|
||||
if dump_json:
|
||||
import json
|
||||
|
||||
print(json.dumps(anime_list[1]))
|
||||
else:
|
||||
from ...interfaces import anilist_interfaces
|
||||
from ...utils.tools import FastAnimeRuntimeState
|
||||
|
||||
fastanime_runtime_state = FastAnimeRuntimeState()
|
||||
fastanime_runtime_state.anilist_results_data = anime_list[1]
|
||||
anilist_interfaces.anilist_results_menu(config, fastanime_runtime_state)
|
||||
|
||||
@@ -1,7 +1,24 @@
|
||||
import click
|
||||
|
||||
|
||||
@click.command(help="Helper command to manage cache")
|
||||
@click.command(
|
||||
help="Helper command to manage cache",
|
||||
epilog="""
|
||||
\b
|
||||
\b\bExamples:
|
||||
# delete everything in the cache dir
|
||||
fastanime cache --clean
|
||||
\b
|
||||
# print the path to the cache dir and exit
|
||||
fastanime cache --path
|
||||
\b
|
||||
# print the current size of the cache dir and exit
|
||||
fastanime cache --size
|
||||
\b
|
||||
# open the cache dir and exit
|
||||
fastanime cache
|
||||
""",
|
||||
)
|
||||
@click.option("--clean", help="Clean the cache dir", is_flag=True)
|
||||
@click.option("--path", help="The path to the cache dir", is_flag=True)
|
||||
@click.option("--size", help="The size of the cache dir", is_flag=True)
|
||||
@@ -25,11 +42,15 @@ def cache(clean, path, size):
|
||||
elif size:
|
||||
import os
|
||||
|
||||
from ..utils.utils import sizeof_fmt
|
||||
from ..utils.utils import format_bytes_to_human
|
||||
|
||||
total_size = 0
|
||||
for dirpath, dirnames, filenames in os.walk(APP_CACHE_DIR):
|
||||
for f in filenames:
|
||||
fp = os.path.join(dirpath, f)
|
||||
total_size += os.path.getsize(fp)
|
||||
print("Total Size: ", sizeof_fmt(total_size))
|
||||
print("Total Size: ", format_bytes_to_human(total_size))
|
||||
else:
|
||||
import click
|
||||
|
||||
click.launch(APP_CACHE_DIR)
|
||||
|
||||
141
fastanime/cli/commands/completions.py
Normal file
141
fastanime/cli/commands/completions.py
Normal file
@@ -0,0 +1,141 @@
|
||||
import click
|
||||
|
||||
|
||||
@click.command(
|
||||
help="Helper command to get shell completions",
|
||||
epilog="""
|
||||
\b
|
||||
\b\bExamples:
|
||||
# try to detect your shell and print completions
|
||||
fastanime completions
|
||||
\b
|
||||
# print fish completions
|
||||
fastanime completions --fish
|
||||
\b
|
||||
# print bash completions
|
||||
fastanime completions --bash
|
||||
\b
|
||||
# print zsh completions
|
||||
fastanime completions --zsh
|
||||
""",
|
||||
)
|
||||
@click.option("--fish", is_flag=True, help="print fish completions")
|
||||
@click.option("--zsh", is_flag=True, help="print zsh completions")
|
||||
@click.option("--bash", is_flag=True, help="print bash completions")
|
||||
def completions(fish, zsh, bash):
|
||||
if not fish or not zsh or not bash:
|
||||
import os
|
||||
|
||||
shell_env = os.environ.get("SHELL", "")
|
||||
if "fish" in shell_env:
|
||||
current_shell = "fish"
|
||||
elif "zsh" in shell_env:
|
||||
current_shell = "zsh"
|
||||
elif "bash" in shell_env:
|
||||
current_shell = "bash"
|
||||
else:
|
||||
current_shell = None
|
||||
else:
|
||||
current_shell = None
|
||||
if fish or current_shell == "fish" and not zsh and not bash:
|
||||
print(
|
||||
"""
|
||||
function _fastanime_completion;
|
||||
set -l response (env _FASTANIME_COMPLETE=fish_complete COMP_WORDS=(commandline -cp) COMP_CWORD=(commandline -t) fastanime);
|
||||
|
||||
for completion in $response;
|
||||
set -l metadata (string split "," $completion);
|
||||
|
||||
if test $metadata[1] = "dir";
|
||||
__fish_complete_directories $metadata[2];
|
||||
else if test $metadata[1] = "file";
|
||||
__fish_complete_path $metadata[2];
|
||||
else if test $metadata[1] = "plain";
|
||||
echo $metadata[2];
|
||||
end;
|
||||
end;
|
||||
end;
|
||||
|
||||
complete --no-files --command fastanime --arguments "(_fastanime_completion)";
|
||||
"""
|
||||
)
|
||||
elif zsh or current_shell == "zsh" and not bash:
|
||||
print(
|
||||
"""
|
||||
#compdef fastanime
|
||||
|
||||
_fastanime_completion() {
|
||||
local -a completions
|
||||
local -a completions_with_descriptions
|
||||
local -a response
|
||||
(( ! $+commands[fastanime] )) && return 1
|
||||
|
||||
response=("${(@f)$(env COMP_WORDS="${words[*]}" COMP_CWORD=$((CURRENT-1)) _FASTANIME_COMPLETE=zsh_complete fastanime)}")
|
||||
|
||||
for type key descr in ${response}; do
|
||||
if [[ "$type" == "plain" ]]; then
|
||||
if [[ "$descr" == "_" ]]; then
|
||||
completions+=("$key")
|
||||
else
|
||||
completions_with_descriptions+=("$key":"$descr")
|
||||
fi
|
||||
elif [[ "$type" == "dir" ]]; then
|
||||
_path_files -/
|
||||
elif [[ "$type" == "file" ]]; then
|
||||
_path_files -f
|
||||
fi
|
||||
done
|
||||
|
||||
if [ -n "$completions_with_descriptions" ]; then
|
||||
_describe -V unsorted completions_with_descriptions -U
|
||||
fi
|
||||
|
||||
if [ -n "$completions" ]; then
|
||||
compadd -U -V unsorted -a completions
|
||||
fi
|
||||
}
|
||||
|
||||
if [[ $zsh_eval_context[-1] == loadautofunc ]]; then
|
||||
# autoload from fpath, call function directly
|
||||
_fastanime_completion "$@"
|
||||
else
|
||||
# eval/source/. command, register function for later
|
||||
compdef _fastanime_completion fastanime
|
||||
fi
|
||||
"""
|
||||
)
|
||||
elif bash or current_shell == "bash":
|
||||
print(
|
||||
"""
|
||||
_fastanime_completion() {
|
||||
local IFS=$'\n'
|
||||
local response
|
||||
|
||||
response=$(env COMP_WORDS="${COMP_WORDS[*]}" COMP_CWORD=$COMP_CWORD _FASTANIME_COMPLETE=bash_complete $1)
|
||||
|
||||
for completion in $response; do
|
||||
IFS=',' read type value <<< "$completion"
|
||||
|
||||
if [[ $type == 'dir' ]]; then
|
||||
COMPREPLY=()
|
||||
compopt -o dirnames
|
||||
elif [[ $type == 'file' ]]; then
|
||||
COMPREPLY=()
|
||||
compopt -o default
|
||||
elif [[ $type == 'plain' ]]; then
|
||||
COMPREPLY+=($value)
|
||||
fi
|
||||
done
|
||||
|
||||
return 0
|
||||
}
|
||||
|
||||
_fastanime_completion_setup() {
|
||||
complete -o nosort -F _fastanime_completion fastanime
|
||||
}
|
||||
|
||||
_fastanime_completion_setup;
|
||||
"""
|
||||
)
|
||||
else:
|
||||
print("Could not detect shell")
|
||||
@@ -1,48 +1,120 @@
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
import click
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ..config import Config
|
||||
|
||||
|
||||
@click.command(
|
||||
help="Opens up your fastanime config in your preferred editor",
|
||||
help="Manage your config with ease",
|
||||
short_help="Edit your config",
|
||||
epilog="""
|
||||
\b
|
||||
\b\bExamples:
|
||||
# Edit your config in your default editor
|
||||
# NB: If it opens vim or vi exit with `:q`
|
||||
fastanime config
|
||||
\b
|
||||
# get the path of the config file
|
||||
fastanime config --path
|
||||
\b
|
||||
# print desktop entry info
|
||||
fastanime config --desktop-entry
|
||||
\b
|
||||
# update your config without opening an editor
|
||||
fastanime --icons --fzf --preview config --update
|
||||
\b
|
||||
# view the current contents of your config
|
||||
fastanime config --view
|
||||
""",
|
||||
)
|
||||
@click.option("--path", "-p", help="Print the config location and exit", is_flag=True)
|
||||
@click.option(
|
||||
"--view", "-v", help="View the current contents of your config", is_flag=True
|
||||
)
|
||||
@click.option(
|
||||
"--desktop-entry",
|
||||
"-d",
|
||||
help="Configure the desktop entry of fastanime",
|
||||
is_flag=True,
|
||||
)
|
||||
# @click.pass_obj
|
||||
def config(path, desktop_entry):
|
||||
pass
|
||||
@click.option(
|
||||
"--update",
|
||||
"-u",
|
||||
help="Persist all the config options passed to fastanime to your config file",
|
||||
is_flag=True,
|
||||
)
|
||||
@click.pass_obj
|
||||
def config(user_config: "Config", path, view, desktop_entry, update):
|
||||
import sys
|
||||
|
||||
from pyshortcuts import make_shortcut
|
||||
from rich import print
|
||||
|
||||
from ...constants import APP_NAME, ICON_PATH, USER_CONFIG_PATH
|
||||
from ... import __version__
|
||||
from ...constants import APP_NAME, ICON_PATH, S_PLATFORM, USER_CONFIG_PATH
|
||||
|
||||
if path:
|
||||
print(USER_CONFIG_PATH)
|
||||
elif view:
|
||||
print(user_config)
|
||||
elif desktop_entry:
|
||||
import os
|
||||
import shutil
|
||||
from pathlib import Path
|
||||
from textwrap import dedent
|
||||
|
||||
from rich import print
|
||||
from rich.prompt import Confirm
|
||||
|
||||
from ..utils.tools import exit_app
|
||||
|
||||
FASTANIME_EXECUTABLE = shutil.which("fastanime")
|
||||
if FASTANIME_EXECUTABLE:
|
||||
cmds = f"{FASTANIME_EXECUTABLE} --rofi anilist"
|
||||
else:
|
||||
cmds = "_ -m fastanime --rofi anilist"
|
||||
shortcut = make_shortcut(
|
||||
name=APP_NAME,
|
||||
description="Watch Anime from the terminal",
|
||||
icon=ICON_PATH,
|
||||
script=cmds,
|
||||
terminal=False,
|
||||
)
|
||||
if shortcut:
|
||||
print("Success", shortcut)
|
||||
else:
|
||||
print("Failed")
|
||||
else:
|
||||
import click
|
||||
cmds = f"{sys.executable} -m fastanime --rofi anilist"
|
||||
|
||||
# TODO: Get funs of the other platforms to complete this lol
|
||||
if S_PLATFORM == "win32":
|
||||
print(
|
||||
"Not implemented; the author thinks its not straight forward so welcomes lovers of windows to try and implement it themselves or to switch to a proper os like arch linux or pray the author gets bored 😜"
|
||||
)
|
||||
elif S_PLATFORM == "darwin":
|
||||
print(
|
||||
"Not implemented; the author thinks its not straight forward so welcomes lovers of mac to try and implement it themselves or to switch to a proper os like arch linux or pray the author gets bored 😜"
|
||||
)
|
||||
else:
|
||||
desktop_entry = dedent(
|
||||
f"""
|
||||
[Desktop Entry]
|
||||
Name={APP_NAME}
|
||||
Type=Application
|
||||
version={__version__}
|
||||
Path={Path().home()}
|
||||
Comment=Watch anime from your terminal
|
||||
Terminal=false
|
||||
Icon={ICON_PATH}
|
||||
Exec={cmds}
|
||||
Categories=Entertainment
|
||||
"""
|
||||
)
|
||||
base = os.path.expanduser("~/.local/share/applications")
|
||||
desktop_entry_path = os.path.join(base, f"{APP_NAME}.desktop")
|
||||
if os.path.exists(desktop_entry_path):
|
||||
if not Confirm.ask(
|
||||
f"The file already exists {desktop_entry_path}; or would you like to rewrite it",
|
||||
default=False,
|
||||
):
|
||||
exit_app(1)
|
||||
with open(desktop_entry_path, "w") as f:
|
||||
f.write(desktop_entry)
|
||||
with open(desktop_entry_path) as f:
|
||||
print(f"Successfully wrote \n{f.read()}")
|
||||
exit_app(0)
|
||||
elif update:
|
||||
with open(USER_CONFIG_PATH, "w", encoding="utf-8") as file:
|
||||
file.write(user_config.__str__())
|
||||
print("update successfull")
|
||||
else:
|
||||
click.edit(filename=USER_CONFIG_PATH)
|
||||
|
||||
@@ -2,6 +2,8 @@ from typing import TYPE_CHECKING
|
||||
|
||||
import click
|
||||
|
||||
from ..completion_functions import anime_titles_shell_complete
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ..config import Config
|
||||
|
||||
@@ -9,25 +11,125 @@ if TYPE_CHECKING:
|
||||
@click.command(
|
||||
help="Download anime using the anime provider for a specified range",
|
||||
short_help="Download anime",
|
||||
epilog="""
|
||||
\b
|
||||
\b\bExamples:
|
||||
# Download all available episodes
|
||||
# multiple titles can be specified with -t option
|
||||
fastanime download -t <anime-title> -t <anime-title>
|
||||
# -- or --
|
||||
fastanime download -t <anime-title> -t <anime-title> -r ':'
|
||||
\b
|
||||
# download latest episode for the two anime titles
|
||||
# the number can be any no of latest episodes but a minus sign
|
||||
# must be present
|
||||
fastanime download -t <anime-title> -t <anime-title> -r '-1'
|
||||
\b
|
||||
# latest 5
|
||||
fastanime download -t <anime-title> -t <anime-title> -r '-5'
|
||||
\b
|
||||
# Download specific episode range
|
||||
# be sure to observe the range Syntax
|
||||
fastanime download -t <anime-title> -r '<episodes-start>:<episodes-end>:<step>'
|
||||
\b
|
||||
fastanime download -t <anime-title> -r '<episodes-start>:<episodes-end>'
|
||||
\b
|
||||
fastanime download -t <anime-title> -r '<episodes-start>:'
|
||||
\b
|
||||
fastanime download -t <anime-title> -r ':<episodes-end>'
|
||||
\b
|
||||
# download specific episode
|
||||
# remember python indexing starts at 0
|
||||
fastanime download -t <anime-title> -r '<episode-1>:<episode>'
|
||||
\b
|
||||
# merge subtitles with ffmpeg to mkv format; hianime tends to give subs as separate files
|
||||
# and dont prompt for anything
|
||||
# eg existing file in destination instead remove
|
||||
# and clean
|
||||
# ie remove original files (sub file and vid file)
|
||||
# only keep merged files
|
||||
fastanime download -t <anime-title> --merge --clean --no-prompt
|
||||
\b
|
||||
# EOF is used since -t always expects a title
|
||||
# you can supply anime titles from file or -t at the same time
|
||||
# from stdin
|
||||
echo -e "<anime-title>\\n<anime-title>\\n<anime-title>" | fastanime download -t "EOF" -r <range> -f -
|
||||
\b
|
||||
# from file
|
||||
fastanime download -t "EOF" -r <range> -f <file-path>
|
||||
""",
|
||||
)
|
||||
@click.argument(
|
||||
"anime-title",
|
||||
@click.option(
|
||||
"--anime-titles",
|
||||
"--anime_title",
|
||||
"-t",
|
||||
required=True,
|
||||
shell_complete=anime_titles_shell_complete,
|
||||
multiple=True,
|
||||
help="Specify which anime to download",
|
||||
)
|
||||
@click.option(
|
||||
"--episode-range",
|
||||
"-r",
|
||||
help="A range of episodes to download",
|
||||
help="A range of episodes to download (start-end)",
|
||||
)
|
||||
@click.option(
|
||||
"--highest_priority",
|
||||
"-h",
|
||||
help="Choose stream indicated as highest priority",
|
||||
"--file",
|
||||
"-f",
|
||||
type=click.File(),
|
||||
help="A file to read from all anime to download",
|
||||
)
|
||||
@click.option(
|
||||
"--force-unknown-ext",
|
||||
"-F",
|
||||
help="This option forces yt-dlp to download extensions its not aware of",
|
||||
is_flag=True,
|
||||
)
|
||||
@click.option(
|
||||
"--silent/--no-silent",
|
||||
"-q/-V",
|
||||
type=bool,
|
||||
help="Download silently (during download)",
|
||||
default=True,
|
||||
)
|
||||
@click.option("--verbose", "-v", is_flag=True, help="Download verbosely (everywhere)")
|
||||
@click.option(
|
||||
"--merge", "-m", is_flag=True, help="Merge the subfile with video using ffmpeg"
|
||||
)
|
||||
@click.option(
|
||||
"--clean",
|
||||
"-c",
|
||||
is_flag=True,
|
||||
help="After merging delete the original files",
|
||||
)
|
||||
@click.option(
|
||||
"--wait-time",
|
||||
"-w",
|
||||
type=int,
|
||||
help="The amount of time to wait after downloading is complete before the screen is completely cleared",
|
||||
default=60,
|
||||
)
|
||||
@click.option(
|
||||
"--prompt/--no-prompt",
|
||||
help="Whether to prompt for anything instead just do the best thing",
|
||||
default=True,
|
||||
)
|
||||
@click.pass_obj
|
||||
def download(config: "Config", anime_title, episode_range, highest_priority):
|
||||
from click import clear
|
||||
def download(
|
||||
config: "Config",
|
||||
anime_titles: tuple,
|
||||
episode_range,
|
||||
file,
|
||||
force_unknown_ext,
|
||||
silent,
|
||||
verbose,
|
||||
merge,
|
||||
clean,
|
||||
wait_time,
|
||||
prompt,
|
||||
):
|
||||
import time
|
||||
|
||||
from rich import print
|
||||
from rich.progress import Progress
|
||||
from thefuzz import fuzz
|
||||
@@ -35,117 +137,243 @@ def download(config: "Config", anime_title, episode_range, highest_priority):
|
||||
from ...AnimeProvider import AnimeProvider
|
||||
from ...libs.anime_provider.types import Anime
|
||||
from ...libs.fzf import fzf
|
||||
from ...Utility.data import anime_normalizer
|
||||
from ...Utility.downloader.downloader import downloader
|
||||
from ..utils.tools import exit_app
|
||||
from ..utils.utils import fuzzy_inquirer
|
||||
from ..utils.utils import (
|
||||
filter_by_quality,
|
||||
fuzzy_inquirer,
|
||||
move_preferred_subtitle_lang_to_top,
|
||||
)
|
||||
|
||||
anime_provider = AnimeProvider(config.provider)
|
||||
anilist_anime_info = None
|
||||
|
||||
translation_type = config.translation_type
|
||||
download_dir = config.downloads_dir
|
||||
|
||||
# ---- search for anime ----
|
||||
with Progress() as progress:
|
||||
progress.add_task("Fetching Search Results...", total=None)
|
||||
search_results = anime_provider.search_for_anime(
|
||||
anime_title, translation_type=translation_type
|
||||
if file:
|
||||
contents = file.read()
|
||||
anime_titles_from_file = tuple(
|
||||
[title for title in contents.split("\n") if title]
|
||||
)
|
||||
if not search_results:
|
||||
print("Search results failed")
|
||||
input("Enter to retry")
|
||||
download(config, anime_title, episode_range, highest_priority)
|
||||
return
|
||||
search_results = search_results["results"]
|
||||
search_results_ = {
|
||||
search_result["title"]: search_result for search_result in search_results
|
||||
}
|
||||
file.close()
|
||||
|
||||
if config.auto_select:
|
||||
search_result = max(
|
||||
search_results_.keys(), key=lambda title: fuzz.ratio(title, anime_title)
|
||||
)
|
||||
print("[cyan]Auto selecting:[/] ", search_result)
|
||||
else:
|
||||
choices = list(search_results_.keys())
|
||||
if config.use_fzf:
|
||||
search_result = fzf.run(choices, "Please Select title: ", "FastAnime")
|
||||
else:
|
||||
search_result = fuzzy_inquirer("Please Select title", choices)
|
||||
|
||||
# ---- fetch anime ----
|
||||
with Progress() as progress:
|
||||
progress.add_task("Fetching Anime...", total=None)
|
||||
anime: Anime | None = anime_provider.get_anime(
|
||||
search_results_[search_result]["id"]
|
||||
)
|
||||
if not anime:
|
||||
print("Sth went wring anime no found")
|
||||
input("Enter to continue...")
|
||||
download(config, anime_title, episode_range, highest_priority)
|
||||
return
|
||||
|
||||
episodes = anime["availableEpisodesDetail"][config.translation_type]
|
||||
if episode_range:
|
||||
episodes_start, episodes_end = episode_range.split("-")
|
||||
|
||||
else:
|
||||
episodes_start, episodes_end = 0, len(episodes)
|
||||
for episode in range(round(float(episodes_start)), round(float(episodes_end))):
|
||||
try:
|
||||
episode = str(episode)
|
||||
if episode not in episodes:
|
||||
print(f"[cyan]Warning[/]: Episode {episode} not found, skipping")
|
||||
continue
|
||||
with Progress() as progress:
|
||||
progress.add_task("Fetching Episode Streams...", total=None)
|
||||
streams = anime_provider.get_episode_streams(
|
||||
anime, episode, config.translation_type
|
||||
)
|
||||
if not streams:
|
||||
print("No streams skipping")
|
||||
continue
|
||||
|
||||
with Progress() as progress:
|
||||
if highest_priority:
|
||||
progress.add_task("Fetching highest priority stream", total=None)
|
||||
streams = list(streams)
|
||||
links = [
|
||||
(link.get("priority", 0), link["link"])
|
||||
for server in streams
|
||||
for link in server["links"]
|
||||
]
|
||||
link = max(links, key=lambda x: x[0])[1]
|
||||
episode_title = streams[0]["episode_title"]
|
||||
elif config.server == "top":
|
||||
progress.add_task("Fetching Top Server", total=None)
|
||||
server = next(streams)
|
||||
link = server["links"][config.quality]["link"]
|
||||
episode_title = server["episode_title"]
|
||||
else:
|
||||
# TODO: Make this better but no rush whats the point of manual selection
|
||||
progress.add_task("Fetching links", total=None)
|
||||
streams = list(streams)
|
||||
links = [
|
||||
link["link"] for server in streams for link in server["links"]
|
||||
]
|
||||
episode_title = streams[0]["episode_title"]
|
||||
if config.use_fzf:
|
||||
link = fzf.run(links, "Select link", "Links")
|
||||
else:
|
||||
link = fuzzy_inquirer("Select link", links)
|
||||
|
||||
print(f"[purple]Now Downloading:[/] {search_result} Episode {episode}")
|
||||
|
||||
downloader._download_file(
|
||||
link,
|
||||
download_dir,
|
||||
(anime["title"], episode_title),
|
||||
True,
|
||||
config.format,
|
||||
anime_titles = (*anime_titles_from_file, *anime_titles)
|
||||
print(f"[green bold]Queued:[/] {anime_titles}")
|
||||
for anime_title in anime_titles:
|
||||
if anime_title == "EOF":
|
||||
break
|
||||
print(f"[green bold]Now Downloading: [/] {anime_title}")
|
||||
# ---- search for anime ----
|
||||
with Progress() as progress:
|
||||
progress.add_task("Fetching Search Results...", total=None)
|
||||
search_results = anime_provider.search_for_anime(
|
||||
anime_title, translation_type=translation_type
|
||||
)
|
||||
except Exception as e:
|
||||
print(e)
|
||||
print("Continuing")
|
||||
clear()
|
||||
print("Done")
|
||||
if not search_results:
|
||||
print("Search results failed")
|
||||
input("Enter to retry")
|
||||
download(
|
||||
config,
|
||||
anime_title,
|
||||
episode_range,
|
||||
file,
|
||||
force_unknown_ext,
|
||||
silent,
|
||||
verbose,
|
||||
merge,
|
||||
clean,
|
||||
wait_time,
|
||||
prompt,
|
||||
)
|
||||
return
|
||||
search_results = search_results["results"]
|
||||
if not search_results:
|
||||
print("Nothing muches your search term")
|
||||
continue
|
||||
search_results_ = {
|
||||
search_result["title"]: search_result for search_result in search_results
|
||||
}
|
||||
|
||||
if config.auto_select:
|
||||
selected_anime_title = max(
|
||||
search_results_.keys(),
|
||||
key=lambda title: fuzz.ratio(
|
||||
anime_normalizer.get(title, title), anime_title
|
||||
),
|
||||
)
|
||||
print("[cyan]Auto selecting:[/] ", selected_anime_title)
|
||||
else:
|
||||
choices = list(search_results_.keys())
|
||||
if config.use_fzf:
|
||||
selected_anime_title = fzf.run(
|
||||
choices, "Please Select title", "FastAnime"
|
||||
)
|
||||
else:
|
||||
selected_anime_title = fuzzy_inquirer(
|
||||
choices,
|
||||
"Please Select title",
|
||||
)
|
||||
|
||||
# ---- fetch anime ----
|
||||
with Progress() as progress:
|
||||
progress.add_task("Fetching Anime...", total=None)
|
||||
anime: Anime | None = anime_provider.get_anime(
|
||||
search_results_[selected_anime_title]["id"]
|
||||
)
|
||||
if not anime:
|
||||
print("Sth went wring anime no found")
|
||||
input("Enter to continue...")
|
||||
download(
|
||||
config,
|
||||
anime_title,
|
||||
episode_range,
|
||||
file,
|
||||
force_unknown_ext,
|
||||
silent,
|
||||
verbose,
|
||||
merge,
|
||||
clean,
|
||||
wait_time,
|
||||
prompt,
|
||||
)
|
||||
return
|
||||
|
||||
episodes = sorted(
|
||||
anime["availableEpisodesDetail"][config.translation_type], key=float
|
||||
)
|
||||
# where the magic happens
|
||||
if episode_range:
|
||||
if ":" in episode_range:
|
||||
ep_range_tuple = episode_range.split(":")
|
||||
if len(ep_range_tuple) == 2 and all(ep_range_tuple):
|
||||
episodes_start, episodes_end = ep_range_tuple
|
||||
episodes_range = episodes[int(episodes_start) : int(episodes_end)]
|
||||
elif len(ep_range_tuple) == 3 and all(ep_range_tuple):
|
||||
episodes_start, episodes_end, step = ep_range_tuple
|
||||
episodes_range = episodes[
|
||||
int(episodes_start) : int(episodes_end) : int(step)
|
||||
]
|
||||
else:
|
||||
episodes_start, episodes_end = ep_range_tuple
|
||||
if episodes_start.strip():
|
||||
episodes_range = episodes[int(episodes_start) :]
|
||||
elif episodes_end.strip():
|
||||
episodes_range = episodes[: int(episodes_end)]
|
||||
else:
|
||||
episodes_range = episodes
|
||||
else:
|
||||
episodes_range = episodes[int(episode_range) :]
|
||||
print(f"[green bold]Downloading: [/] {episodes_range}")
|
||||
|
||||
else:
|
||||
episodes_range = sorted(episodes, key=float)
|
||||
|
||||
if config.normalize_titles:
|
||||
from ...libs.common.mini_anilist import get_basic_anime_info_by_title
|
||||
|
||||
anilist_anime_info = get_basic_anime_info_by_title(anime["title"])
|
||||
|
||||
# lets download em
|
||||
for episode in episodes_range:
|
||||
try:
|
||||
episode = str(episode)
|
||||
if episode not in episodes:
|
||||
print(f"[cyan]Warning[/]: Episode {episode} not found, skipping")
|
||||
continue
|
||||
with Progress() as progress:
|
||||
progress.add_task("Fetching Episode Streams...", total=None)
|
||||
streams = anime_provider.get_episode_streams(
|
||||
anime["id"], episode, config.translation_type
|
||||
)
|
||||
if not streams:
|
||||
print("No streams skipping")
|
||||
continue
|
||||
# ---- fetch servers ----
|
||||
if config.server == "top":
|
||||
with Progress() as progress:
|
||||
progress.add_task("Fetching top server...", total=None)
|
||||
server_name = next(streams, None)
|
||||
if not server_name:
|
||||
print("Sth went wrong when fetching the server")
|
||||
continue
|
||||
stream_link = filter_by_quality(
|
||||
config.quality, server_name["links"]
|
||||
)
|
||||
if not stream_link:
|
||||
print("[yellow bold]WARNING:[/] No streams found")
|
||||
time.sleep(1)
|
||||
print("Continuing...")
|
||||
continue
|
||||
link = stream_link["link"]
|
||||
provider_headers = server_name["headers"]
|
||||
episode_title = server_name["episode_title"]
|
||||
subtitles = server_name["subtitles"]
|
||||
else:
|
||||
with Progress() as progress:
|
||||
progress.add_task("Fetching servers", total=None)
|
||||
# prompt for server selection
|
||||
servers = {server["server"]: server for server in streams}
|
||||
servers_names = list(servers.keys())
|
||||
if config.server in servers_names:
|
||||
server_name = config.server
|
||||
else:
|
||||
if config.use_fzf:
|
||||
server_name = fzf.run(servers_names, "Select an link")
|
||||
else:
|
||||
server_name = fuzzy_inquirer(
|
||||
servers_names,
|
||||
"Select link",
|
||||
)
|
||||
stream_link = filter_by_quality(
|
||||
config.quality, servers[server_name]["links"]
|
||||
)
|
||||
if not stream_link:
|
||||
print("[yellow bold]WARNING:[/] No streams found")
|
||||
time.sleep(1)
|
||||
print("Continuing...")
|
||||
continue
|
||||
link = stream_link["link"]
|
||||
provider_headers = servers[server_name]["headers"]
|
||||
|
||||
subtitles = servers[server_name]["subtitles"]
|
||||
episode_title = servers[server_name]["episode_title"]
|
||||
|
||||
if anilist_anime_info:
|
||||
selected_anime_title = (
|
||||
anilist_anime_info["title"][config.preferred_language]
|
||||
or anilist_anime_info["title"]["romaji"]
|
||||
or anilist_anime_info["title"]["english"]
|
||||
)
|
||||
import re
|
||||
|
||||
for episode_detail in anilist_anime_info["episodes"]:
|
||||
if re.match(f"Episode {episode} ", episode_detail["title"]):
|
||||
episode_title = episode_detail["title"]
|
||||
break
|
||||
print(f"[purple]Now Downloading:[/] {episode_title}")
|
||||
subtitles = move_preferred_subtitle_lang_to_top(
|
||||
subtitles, config.sub_lang
|
||||
)
|
||||
downloader._download_file(
|
||||
link,
|
||||
selected_anime_title,
|
||||
episode_title,
|
||||
download_dir,
|
||||
silent,
|
||||
config.format,
|
||||
force_unknown_ext,
|
||||
verbose,
|
||||
headers=provider_headers,
|
||||
sub=subtitles[0]["url"] if subtitles else "",
|
||||
merge=merge,
|
||||
clean=clean,
|
||||
prompt=prompt,
|
||||
)
|
||||
except Exception as e:
|
||||
print(e)
|
||||
time.sleep(1)
|
||||
print("Continuing...")
|
||||
print("Done Downloading")
|
||||
time.sleep(wait_time)
|
||||
exit_app()
|
||||
|
||||
@@ -1,25 +1,73 @@
|
||||
import logging
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
import click
|
||||
|
||||
from ..completion_functions import downloaded_anime_titles
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
if TYPE_CHECKING:
|
||||
from ..config import Config
|
||||
|
||||
|
||||
@click.command(
|
||||
help="View and watch your downloads using mpv", short_help="Watch downloads"
|
||||
help="View and watch your downloads using mpv",
|
||||
short_help="Watch downloads",
|
||||
epilog="""
|
||||
\b
|
||||
\b\bExamples:
|
||||
fastanime downloads
|
||||
\b
|
||||
# view individual episodes
|
||||
fastanime downloads --view-episodes
|
||||
# --- or ---
|
||||
fastanime downloads -v
|
||||
\b
|
||||
# to set seek time when using ffmpegthumbnailer for local previews
|
||||
# -1 means random and is the default
|
||||
fastanime downloads --time-to-seek <intRange(-1,100)>
|
||||
# --- or ---
|
||||
fastanime downloads -t <intRange(-1,100)>
|
||||
\b
|
||||
# to watch a specific title
|
||||
# be sure to get the completions for the best experience
|
||||
fastanime downloads --title <title>
|
||||
\b
|
||||
# to get the path to the downloads folder set
|
||||
fastanime downloads --path
|
||||
# useful when you want to use the value for other programs
|
||||
""",
|
||||
)
|
||||
@click.option("--path", "-p", help="print the downloads folder and exit", is_flag=True)
|
||||
@click.option(
|
||||
"--title",
|
||||
"-T",
|
||||
shell_complete=downloaded_anime_titles,
|
||||
help="watch a specific title",
|
||||
)
|
||||
@click.option("--view-episodes", "-v", help="View individual episodes", is_flag=True)
|
||||
@click.option(
|
||||
"--ffmpegthumbnailer-seek-time",
|
||||
"--time-to-seek",
|
||||
"-t",
|
||||
type=click.IntRange(-1, 100),
|
||||
help="ffmpegthumbnailer seek time",
|
||||
)
|
||||
@click.pass_obj
|
||||
def downloads(config: "Config", path: bool):
|
||||
def downloads(
|
||||
config: "Config", path: bool, title, view_episodes, ffmpegthumbnailer_seek_time
|
||||
):
|
||||
import os
|
||||
|
||||
from ...cli.utils.mpv import run_mpv
|
||||
from ...libs.fzf import fzf
|
||||
from ...libs.rofi import Rofi
|
||||
from ...Utility.utils import sort_by_episode_number
|
||||
from ..utils.tools import exit_app
|
||||
from ..utils.utils import fuzzy_inquirer
|
||||
|
||||
if not ffmpegthumbnailer_seek_time:
|
||||
ffmpegthumbnailer_seek_time = config.ffmpegthumbnailer_seek_time
|
||||
USER_VIDEOS_DIR = config.downloads_dir
|
||||
if path:
|
||||
print(USER_VIDEOS_DIR)
|
||||
@@ -27,21 +75,282 @@ def downloads(config: "Config", path: bool):
|
||||
if not os.path.exists(USER_VIDEOS_DIR):
|
||||
print("Downloads directory specified does not exist")
|
||||
return
|
||||
playlists = os.listdir(USER_VIDEOS_DIR)
|
||||
playlists.append("Exit")
|
||||
anime_downloads = sorted(
|
||||
os.listdir(USER_VIDEOS_DIR),
|
||||
)
|
||||
anime_downloads.append("Exit")
|
||||
|
||||
def stream():
|
||||
if config.use_fzf:
|
||||
playlist_name = fzf.run(playlists, "Enter Playlist Name", "Downloads")
|
||||
elif config.use_rofi:
|
||||
playlist_name = Rofi.run(playlists, "Enter Playlist Name")
|
||||
def create_thumbnails(video_path, anime_title, downloads_thumbnail_cache_dir):
|
||||
import os
|
||||
import shutil
|
||||
import subprocess
|
||||
|
||||
FFMPEG_THUMBNAILER = shutil.which("ffmpegthumbnailer")
|
||||
if not FFMPEG_THUMBNAILER:
|
||||
return
|
||||
|
||||
out = os.path.join(downloads_thumbnail_cache_dir, anime_title)
|
||||
if ffmpegthumbnailer_seek_time == -1:
|
||||
import random
|
||||
|
||||
seektime = str(random.randrange(0, 100))
|
||||
else:
|
||||
playlist_name = fuzzy_inquirer("Enter Playlist Name: ", playlists)
|
||||
seektime = str(ffmpegthumbnailer_seek_time)
|
||||
_ = subprocess.run(
|
||||
[
|
||||
FFMPEG_THUMBNAILER,
|
||||
"-i",
|
||||
video_path,
|
||||
"-o",
|
||||
out,
|
||||
"-s",
|
||||
"0",
|
||||
"-t",
|
||||
seektime,
|
||||
],
|
||||
stderr=subprocess.PIPE,
|
||||
stdout=subprocess.PIPE,
|
||||
)
|
||||
|
||||
def get_previews_anime(workers=None, bg=True):
|
||||
import concurrent.futures
|
||||
import random
|
||||
import shutil
|
||||
from pathlib import Path
|
||||
|
||||
if not shutil.which("ffmpegthumbnailer"):
|
||||
print("ffmpegthumbnailer not found")
|
||||
logger.error("ffmpegthumbnailer not found")
|
||||
return
|
||||
|
||||
from ...constants import APP_CACHE_DIR
|
||||
from ..utils.scripts import fzf_preview
|
||||
|
||||
downloads_thumbnail_cache_dir = os.path.join(APP_CACHE_DIR, "video_thumbnails")
|
||||
Path(downloads_thumbnail_cache_dir).mkdir(parents=True, exist_ok=True)
|
||||
|
||||
def _worker():
|
||||
# use concurrency to download the images as fast as possible
|
||||
with concurrent.futures.ThreadPoolExecutor(max_workers=workers) as executor:
|
||||
# load the jobs
|
||||
future_to_url = {}
|
||||
for anime_title in anime_downloads:
|
||||
anime_path = os.path.join(USER_VIDEOS_DIR, anime_title)
|
||||
if not os.path.isdir(anime_path):
|
||||
continue
|
||||
playlist = [
|
||||
anime
|
||||
for anime in sorted(
|
||||
os.listdir(anime_path),
|
||||
)
|
||||
if "mp4" in anime
|
||||
]
|
||||
if playlist:
|
||||
# actual link to download image from
|
||||
video_path = os.path.join(anime_path, random.choice(playlist))
|
||||
future_to_url[
|
||||
executor.submit(
|
||||
create_thumbnails,
|
||||
video_path,
|
||||
anime_title,
|
||||
downloads_thumbnail_cache_dir,
|
||||
)
|
||||
] = anime_title
|
||||
|
||||
# execute the jobs
|
||||
for future in concurrent.futures.as_completed(future_to_url):
|
||||
url = future_to_url[future]
|
||||
try:
|
||||
future.result()
|
||||
except Exception as e:
|
||||
logger.error("%r generated an exception: %s" % (url, e))
|
||||
|
||||
if bg:
|
||||
from threading import Thread
|
||||
|
||||
worker = Thread(target=_worker)
|
||||
worker.start()
|
||||
else:
|
||||
_worker()
|
||||
os.environ["SHELL"] = shutil.which("bash") or "bash"
|
||||
preview = """
|
||||
%s
|
||||
if [ -s %s/{} ]; then
|
||||
if ! fzf-preview %s/{} 2>/dev/null; then
|
||||
echo Loading...
|
||||
fi
|
||||
else echo Loading...
|
||||
fi
|
||||
""" % (
|
||||
fzf_preview,
|
||||
downloads_thumbnail_cache_dir,
|
||||
downloads_thumbnail_cache_dir,
|
||||
)
|
||||
return preview
|
||||
|
||||
def get_previews_episodes(anime_playlist_path, workers=None, bg=True):
|
||||
import shutil
|
||||
from pathlib import Path
|
||||
|
||||
from ...constants import APP_CACHE_DIR
|
||||
from ..utils.scripts import fzf_preview
|
||||
|
||||
if not shutil.which("ffmpegthumbnailer"):
|
||||
print("ffmpegthumbnailer not found")
|
||||
logger.error("ffmpegthumbnailer not found")
|
||||
return
|
||||
|
||||
downloads_thumbnail_cache_dir = os.path.join(APP_CACHE_DIR, "video_thumbnails")
|
||||
Path(downloads_thumbnail_cache_dir).mkdir(parents=True, exist_ok=True)
|
||||
|
||||
def _worker():
|
||||
import concurrent.futures
|
||||
|
||||
# use concurrency to download the images as fast as possible
|
||||
# anime_playlist_path = os.path.join(USER_VIDEOS_DIR, anime_playlist_path)
|
||||
if not os.path.isdir(anime_playlist_path):
|
||||
return
|
||||
anime_episodes = sorted(
|
||||
os.listdir(anime_playlist_path), key=sort_by_episode_number
|
||||
)
|
||||
with concurrent.futures.ThreadPoolExecutor(max_workers=workers) as executor:
|
||||
# load the jobs
|
||||
future_to_url = {}
|
||||
for episode_title in anime_episodes:
|
||||
episode_path = os.path.join(anime_playlist_path, episode_title)
|
||||
|
||||
# actual link to download image from
|
||||
future_to_url[
|
||||
executor.submit(
|
||||
create_thumbnails,
|
||||
episode_path,
|
||||
episode_title,
|
||||
downloads_thumbnail_cache_dir,
|
||||
)
|
||||
] = episode_title
|
||||
|
||||
# execute the jobs
|
||||
for future in concurrent.futures.as_completed(future_to_url):
|
||||
url = future_to_url[future]
|
||||
try:
|
||||
future.result()
|
||||
except Exception as e:
|
||||
logger.error("%r generated an exception: %s" % (url, e))
|
||||
|
||||
if bg:
|
||||
from threading import Thread
|
||||
|
||||
worker = Thread(target=_worker)
|
||||
worker.start()
|
||||
else:
|
||||
_worker()
|
||||
os.environ["SHELL"] = shutil.which("bash") or "bash"
|
||||
preview = """
|
||||
%s
|
||||
if [ -s %s/{} ]; then
|
||||
if ! fzf-preview %s/{} 2>/dev/null; then
|
||||
echo Loading...
|
||||
fi
|
||||
else echo Loading...
|
||||
fi
|
||||
""" % (
|
||||
fzf_preview,
|
||||
downloads_thumbnail_cache_dir,
|
||||
downloads_thumbnail_cache_dir,
|
||||
)
|
||||
return preview
|
||||
|
||||
def stream_episode(
|
||||
anime_playlist_path,
|
||||
):
|
||||
if view_episodes:
|
||||
if not os.path.isdir(anime_playlist_path):
|
||||
print(anime_playlist_path, "is not dir")
|
||||
exit_app(1)
|
||||
return
|
||||
episodes = sorted(
|
||||
os.listdir(anime_playlist_path), key=sort_by_episode_number
|
||||
)
|
||||
downloaded_episodes = [*episodes, "Back"]
|
||||
|
||||
if config.use_fzf:
|
||||
if not config.preview:
|
||||
episode_title = fzf.run(
|
||||
downloaded_episodes,
|
||||
"Enter Episode ",
|
||||
)
|
||||
else:
|
||||
preview = get_previews_episodes(anime_playlist_path)
|
||||
episode_title = fzf.run(
|
||||
downloaded_episodes,
|
||||
"Enter Episode ",
|
||||
preview=preview,
|
||||
)
|
||||
elif config.use_rofi:
|
||||
episode_title = Rofi.run(downloaded_episodes, "Enter Episode")
|
||||
else:
|
||||
episode_title = fuzzy_inquirer(
|
||||
downloaded_episodes,
|
||||
"Enter Playlist Name",
|
||||
)
|
||||
if episode_title == "Back":
|
||||
stream_anime()
|
||||
return
|
||||
episode_path = os.path.join(anime_playlist_path, episode_title)
|
||||
if config.sync_play:
|
||||
from ..utils.syncplay import SyncPlayer
|
||||
|
||||
SyncPlayer(episode_path)
|
||||
else:
|
||||
run_mpv(
|
||||
episode_path,
|
||||
player=config.player,
|
||||
)
|
||||
stream_episode(anime_playlist_path)
|
||||
|
||||
def stream_anime(title=None):
|
||||
if title:
|
||||
from thefuzz import fuzz
|
||||
|
||||
playlist_name = max(anime_downloads, key=lambda t: fuzz.ratio(title, t))
|
||||
elif config.use_fzf:
|
||||
if not config.preview:
|
||||
playlist_name = fzf.run(
|
||||
anime_downloads,
|
||||
"Enter Playlist Name",
|
||||
)
|
||||
else:
|
||||
preview = get_previews_anime()
|
||||
playlist_name = fzf.run(
|
||||
anime_downloads,
|
||||
"Enter Playlist Name",
|
||||
preview=preview,
|
||||
)
|
||||
elif config.use_rofi:
|
||||
playlist_name = Rofi.run(anime_downloads, "Enter Playlist Name")
|
||||
else:
|
||||
playlist_name = fuzzy_inquirer(
|
||||
anime_downloads,
|
||||
"Enter Playlist Name",
|
||||
)
|
||||
if playlist_name == "Exit":
|
||||
exit_app()
|
||||
return
|
||||
playlist = os.path.join(USER_VIDEOS_DIR, playlist_name)
|
||||
run_mpv(playlist)
|
||||
stream()
|
||||
if view_episodes:
|
||||
stream_episode(
|
||||
playlist,
|
||||
)
|
||||
else:
|
||||
if config.sync_play:
|
||||
from ..utils.syncplay import SyncPlayer
|
||||
|
||||
stream()
|
||||
SyncPlayer(playlist)
|
||||
else:
|
||||
run_mpv(
|
||||
playlist,
|
||||
player=config.player,
|
||||
)
|
||||
stream_anime()
|
||||
|
||||
stream_anime(title)
|
||||
|
||||
243
fastanime/cli/commands/grab.py
Normal file
243
fastanime/cli/commands/grab.py
Normal file
@@ -0,0 +1,243 @@
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
import click
|
||||
|
||||
from ..completion_functions import anime_titles_shell_complete
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ..config import Config
|
||||
|
||||
|
||||
@click.command(
|
||||
help="Helper command to get streams for anime to use externally in a non-python application",
|
||||
short_help="Print anime streams to standard out",
|
||||
epilog="""
|
||||
\b
|
||||
\b\bExamples:
|
||||
# --- print anime info + episode streams ---
|
||||
\b
|
||||
# multiple titles can be specified with the -t option
|
||||
fastanime grab -t <anime-title> -t <anime-title>
|
||||
# -- or --
|
||||
# print all available episodes
|
||||
fastanime grab -t <anime-title> -r ':'
|
||||
\b
|
||||
# print the latest episode
|
||||
fastanime grab -t <anime-title> -r '-1'
|
||||
\b
|
||||
# print a specific episode range
|
||||
# be sure to observe the range Syntax
|
||||
fastanime grab -t <anime-title> -r '<start>:<stop>'
|
||||
\b
|
||||
fastanime grab -t <anime-title> -r '<start>:<stop>:<step>'
|
||||
\b
|
||||
fastanime grab -t <anime-title> -r '<start>:'
|
||||
\b
|
||||
fastanime grab -t <anime-title> -r ':<end>'
|
||||
\b
|
||||
# --- grab options ---
|
||||
\b
|
||||
# print search results only
|
||||
fastanime grab -t <anime-title> -r <range> --search-results-only
|
||||
\b
|
||||
# print anime info only
|
||||
fastanime grab -t <anime-title> -r <range> --anime-info-only
|
||||
\b
|
||||
# print episode streams only
|
||||
fastanime grab -t <anime-title> -r <range> --episode-streams-only
|
||||
""",
|
||||
)
|
||||
@click.option(
|
||||
"--anime-titles",
|
||||
"--anime_title",
|
||||
"-t",
|
||||
required=True,
|
||||
shell_complete=anime_titles_shell_complete,
|
||||
multiple=True,
|
||||
help="Specify which anime to download",
|
||||
)
|
||||
@click.option(
|
||||
"--episode-range",
|
||||
"-r",
|
||||
help="A range of episodes to download (start-end)",
|
||||
)
|
||||
@click.option(
|
||||
"--search-results-only",
|
||||
"-s",
|
||||
help="print only the search results to stdout",
|
||||
is_flag=True,
|
||||
)
|
||||
@click.option(
|
||||
"--anime-info-only", "-i", help="print only selected anime title info", is_flag=True
|
||||
)
|
||||
@click.option(
|
||||
"--episode-streams-only",
|
||||
"-e",
|
||||
help="print only selected anime episodes streams of given range",
|
||||
is_flag=True,
|
||||
)
|
||||
@click.pass_obj
|
||||
def grab(
|
||||
config: "Config",
|
||||
anime_titles: tuple,
|
||||
episode_range,
|
||||
search_results_only,
|
||||
anime_info_only,
|
||||
episode_streams_only,
|
||||
):
|
||||
import json
|
||||
from logging import getLogger
|
||||
from sys import exit
|
||||
|
||||
from thefuzz import fuzz
|
||||
|
||||
logger = getLogger(__name__)
|
||||
if config.manga:
|
||||
manga_title = anime_titles[0]
|
||||
from ...MangaProvider import MangaProvider
|
||||
|
||||
manga_provider = MangaProvider()
|
||||
search_data = manga_provider.search_for_manga(manga_title)
|
||||
if not search_data:
|
||||
exit(1)
|
||||
if search_results_only:
|
||||
print(json.dumps(search_data))
|
||||
exit(0)
|
||||
search_results = search_data["results"]
|
||||
if not search_results:
|
||||
logger.error("no results for your search")
|
||||
exit(1)
|
||||
search_results_ = {
|
||||
search_result["title"]: search_result for search_result in search_results
|
||||
}
|
||||
|
||||
search_result_anime_title = max(
|
||||
search_results_.keys(), key=lambda title: fuzz.ratio(title, anime_titles[0])
|
||||
)
|
||||
manga_info = manga_provider.get_manga(
|
||||
search_results_[search_result_anime_title]["id"]
|
||||
)
|
||||
if not manga_info:
|
||||
return
|
||||
if anime_info_only:
|
||||
print(json.dumps(manga_info))
|
||||
exit(0)
|
||||
|
||||
chapter_info = manga_provider.get_chapter_thumbnails(
|
||||
manga_info["id"], str(episode_range)
|
||||
)
|
||||
if not chapter_info:
|
||||
exit(1)
|
||||
print(json.dumps(chapter_info))
|
||||
|
||||
else:
|
||||
from ...AnimeProvider import AnimeProvider
|
||||
|
||||
anime_provider = AnimeProvider(config.provider)
|
||||
|
||||
grabbed_animes = []
|
||||
for anime_title in anime_titles:
|
||||
# ---- search for anime ----
|
||||
search_results = anime_provider.search_for_anime(
|
||||
anime_title, translation_type=config.translation_type
|
||||
)
|
||||
if not search_results:
|
||||
exit(1)
|
||||
if search_results_only:
|
||||
# grab only search results skipping all lines after this
|
||||
grabbed_animes.append(search_results)
|
||||
continue
|
||||
|
||||
search_results = search_results["results"]
|
||||
if not search_results:
|
||||
logger.error("no results for your search")
|
||||
exit(1)
|
||||
search_results_ = {
|
||||
search_result["title"]: search_result
|
||||
for search_result in search_results
|
||||
}
|
||||
|
||||
search_result_anime_title = max(
|
||||
search_results_.keys(), key=lambda title: fuzz.ratio(title, anime_title)
|
||||
)
|
||||
|
||||
# ---- fetch anime ----
|
||||
anime = anime_provider.get_anime(
|
||||
search_results_[search_result_anime_title]["id"]
|
||||
)
|
||||
if not anime:
|
||||
exit(1)
|
||||
if anime_info_only:
|
||||
# grab only the anime data skipping all lines after this
|
||||
grabbed_animes.append(anime)
|
||||
continue
|
||||
episodes = sorted(
|
||||
anime["availableEpisodesDetail"][config.translation_type], key=float
|
||||
)
|
||||
|
||||
# where the magic happens
|
||||
if episode_range:
|
||||
if ":" in episode_range:
|
||||
ep_range_tuple = episode_range.split(":")
|
||||
if len(ep_range_tuple) == 2 and all(ep_range_tuple):
|
||||
episodes_start, episodes_end = ep_range_tuple
|
||||
episodes_range = episodes[
|
||||
int(episodes_start) : int(episodes_end)
|
||||
]
|
||||
elif len(ep_range_tuple) == 3 and all(ep_range_tuple):
|
||||
episodes_start, episodes_end, step = ep_range_tuple
|
||||
episodes_range = episodes[
|
||||
int(episodes_start) : int(episodes_end) : int(step)
|
||||
]
|
||||
else:
|
||||
episodes_start, episodes_end = ep_range_tuple
|
||||
if episodes_start.strip():
|
||||
episodes_range = episodes[int(episodes_start) :]
|
||||
elif episodes_end.strip():
|
||||
episodes_range = episodes[: int(episodes_end)]
|
||||
else:
|
||||
episodes_range = episodes
|
||||
else:
|
||||
episodes_range = episodes[int(episode_range) :]
|
||||
|
||||
else:
|
||||
episodes_range = sorted(episodes, key=float)
|
||||
|
||||
if not episode_streams_only:
|
||||
grabbed_anime = dict(anime)
|
||||
grabbed_anime["requested_episodes"] = episodes_range
|
||||
grabbed_anime["translation_type"] = config.translation_type
|
||||
grabbed_anime["episodes_streams"] = {}
|
||||
else:
|
||||
grabbed_anime = {}
|
||||
|
||||
# lets download em
|
||||
for episode in episodes_range:
|
||||
try:
|
||||
if episode not in episodes:
|
||||
continue
|
||||
streams = anime_provider.get_episode_streams(
|
||||
anime["id"], episode, config.translation_type
|
||||
)
|
||||
if not streams:
|
||||
continue
|
||||
episode_streams = {server["server"]: server for server in streams}
|
||||
|
||||
if episode_streams_only:
|
||||
grabbed_anime[episode] = episode_streams
|
||||
else:
|
||||
grabbed_anime["episodes_streams"][ # pyright:ignore
|
||||
episode
|
||||
] = episode_streams
|
||||
|
||||
except Exception as e:
|
||||
logger.error(e)
|
||||
|
||||
# grab the full data for single title and appen to final result or episode streams
|
||||
grabbed_animes.append(grabbed_anime)
|
||||
|
||||
# print out the final result either {} or [] depending if more than one title os requested
|
||||
if len(grabbed_animes) == 1:
|
||||
print(json.dumps(grabbed_animes[0]))
|
||||
else:
|
||||
print(json.dumps(grabbed_animes))
|
||||
@@ -1,145 +1,385 @@
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
import click
|
||||
|
||||
from ...cli.config import Config
|
||||
from ..completion_functions import anime_titles_shell_complete
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ...cli.config import Config
|
||||
|
||||
|
||||
@click.command(
|
||||
help="This subcommand directly interacts with the provider to enable basic streaming. Useful for binging anime.",
|
||||
short_help="Binge anime",
|
||||
epilog="""
|
||||
\b
|
||||
\b\bExamples:
|
||||
# basic form where you will still be prompted for the episode number
|
||||
# multiple titles can be specified with the -t option
|
||||
fastanime search -t <anime-title> -t <anime-title>
|
||||
\b
|
||||
# binge all episodes with this command
|
||||
fastanime search -t <anime-title> -r ':'
|
||||
\b
|
||||
# watch latest episode
|
||||
fastanime search -t <anime-title> -r '-1'
|
||||
\b
|
||||
# binge a specific episode range with this command
|
||||
# be sure to observe the range Syntax
|
||||
fastanime search -t <anime-title> -r '<start>:<stop>'
|
||||
\b
|
||||
fastanime search -t <anime-title> -r '<start>:<stop>:<step>'
|
||||
\b
|
||||
fastanime search -t <anime-title> -r '<start>:'
|
||||
\b
|
||||
fastanime search -t <anime-title> -r ':<end>'
|
||||
""",
|
||||
)
|
||||
@click.option(
|
||||
"--anime-titles",
|
||||
"--anime_title",
|
||||
"-t",
|
||||
required=True,
|
||||
shell_complete=anime_titles_shell_complete,
|
||||
multiple=True,
|
||||
help="Specify which anime to download",
|
||||
)
|
||||
@click.option(
|
||||
"--episode-range",
|
||||
"-r",
|
||||
help="A range of episodes to binge",
|
||||
help="A range of episodes to binge (start-end)",
|
||||
)
|
||||
@click.argument("anime_title", required=True, type=str)
|
||||
@click.pass_obj
|
||||
def search(config: Config, anime_title: str, episode_range: str):
|
||||
def search(config: "Config", anime_titles: str, episode_range: str):
|
||||
from click import clear
|
||||
from rich import print
|
||||
from rich.progress import Progress
|
||||
from thefuzz import fuzz
|
||||
|
||||
from ...AnimeProvider import AnimeProvider
|
||||
from ...libs.anime_provider.types import Anime
|
||||
from ...libs.fzf import fzf
|
||||
from ...libs.rofi import Rofi
|
||||
from ..utils.mpv import run_mpv
|
||||
from ..utils.tools import exit_app
|
||||
from ..utils.utils import fuzzy_inquirer
|
||||
|
||||
anime_provider = AnimeProvider(config.provider)
|
||||
if config.manga:
|
||||
from InquirerPy.prompts.number import NumberPrompt
|
||||
from yt_dlp.utils import sanitize_filename
|
||||
|
||||
# ---- search for anime ----
|
||||
with Progress() as progress:
|
||||
progress.add_task("Fetching Search Results...", total=None)
|
||||
search_results = anime_provider.search_for_anime(
|
||||
anime_title, config.translation_type
|
||||
)
|
||||
if not search_results:
|
||||
print("Search results not found")
|
||||
input("Enter to retry")
|
||||
search(config, anime_title, episode_range)
|
||||
return
|
||||
search_results = search_results["results"]
|
||||
if not search_results:
|
||||
print("Anime not found :cry:")
|
||||
exit_app()
|
||||
search_results_ = {
|
||||
search_result["title"]: search_result for search_result in search_results
|
||||
}
|
||||
from ...MangaProvider import MangaProvider
|
||||
from ..utils.feh import feh_manga_viewer
|
||||
|
||||
if config.auto_select:
|
||||
search_result = max(
|
||||
search_results_.keys(), key=lambda title: fuzz.ratio(title, anime_title)
|
||||
)
|
||||
print("[cyan]Auto Selecting:[/] ", search_result)
|
||||
manga_title = anime_titles[0]
|
||||
|
||||
else:
|
||||
choices = list(search_results_.keys())
|
||||
if config.use_fzf:
|
||||
search_result = fzf.run(choices, "Please Select title: ", "FastAnime")
|
||||
elif config.use_rofi:
|
||||
search_result = Rofi.run(choices, "Please Select Title")
|
||||
else:
|
||||
search_result = fuzzy_inquirer(
|
||||
"Please Select Title",
|
||||
choices,
|
||||
manga_provider = MangaProvider()
|
||||
search_data = manga_provider.search_for_manga(manga_title)
|
||||
if not search_data:
|
||||
print("No search results")
|
||||
exit(1)
|
||||
|
||||
search_results = search_data["results"]
|
||||
|
||||
search_results_ = {
|
||||
sanitize_filename(search_result["title"]): search_result
|
||||
for search_result in search_results
|
||||
}
|
||||
|
||||
if config.auto_select:
|
||||
search_result_manga_title = max(
|
||||
search_results_.keys(),
|
||||
key=lambda title: fuzz.ratio(title, manga_title),
|
||||
)
|
||||
print("[cyan]Auto Selecting:[/] ", search_result_manga_title)
|
||||
|
||||
# ---- fetch selected anime ----
|
||||
with Progress() as progress:
|
||||
progress.add_task("Fetching Anime...", total=None)
|
||||
anime: Anime | None = anime_provider.get_anime(
|
||||
search_results_[search_result]["id"]
|
||||
)
|
||||
|
||||
if not anime:
|
||||
print("Sth went wring anime no found")
|
||||
input("Enter to continue...")
|
||||
search(config, anime_title, episode_range)
|
||||
return
|
||||
episode_range_ = None
|
||||
episodes = anime["availableEpisodesDetail"][config.translation_type]
|
||||
if episode_range:
|
||||
episodes_start, episodes_end = episode_range.split("-")
|
||||
if episodes_start and episodes_end:
|
||||
episode_range_ = iter(
|
||||
range(round(float(episodes_start)), round(float(episodes_end)) + 1)
|
||||
)
|
||||
else:
|
||||
episode_range_ = iter(sorted(episodes, key=float))
|
||||
choices = list(search_results_.keys())
|
||||
preview = None
|
||||
if config.preview:
|
||||
from ..interfaces.utils import get_fzf_manga_preview
|
||||
|
||||
def stream_anime():
|
||||
clear()
|
||||
episode = None
|
||||
|
||||
if episode_range_:
|
||||
try:
|
||||
episode = str(next(episode_range_))
|
||||
print(
|
||||
f"[cyan]Auto selecting:[/] {search_result} [cyan]Episode:[/] {episode}"
|
||||
)
|
||||
except StopIteration:
|
||||
print("[green]Completed binge sequence[/]:smile:")
|
||||
|
||||
if not episode or episode not in episodes:
|
||||
preview = get_fzf_manga_preview(search_results)
|
||||
if config.use_fzf:
|
||||
episode = fzf.run(episodes, "Select an episode: ", header=search_result)
|
||||
search_result_manga_title = fzf.run(
|
||||
choices, "Please Select title", preview=preview
|
||||
)
|
||||
elif config.use_rofi:
|
||||
episode = Rofi.run(episodes, "Select an episode")
|
||||
search_result_manga_title = Rofi.run(choices, "Please Select Title")
|
||||
else:
|
||||
episode = fuzzy_inquirer("Select episode", episodes)
|
||||
search_result_manga_title = fuzzy_inquirer(
|
||||
choices,
|
||||
"Please Select Title",
|
||||
)
|
||||
|
||||
# ---- fetch streams ----
|
||||
with Progress() as progress:
|
||||
progress.add_task("Fetching Episode Streams...", total=None)
|
||||
streams = anime_provider.get_episode_streams(
|
||||
anime, episode, config.translation_type
|
||||
anilist_id = search_results_[search_result_manga_title]["id"]
|
||||
manga_info = manga_provider.get_manga(anilist_id)
|
||||
if not manga_info:
|
||||
print("No manga info")
|
||||
exit(1)
|
||||
|
||||
anilist_helper = None
|
||||
if config.user:
|
||||
from ...anilist import AniList
|
||||
|
||||
AniList.login_user(config.user["token"])
|
||||
anilist_helper = AniList
|
||||
|
||||
def _manga_viewer():
|
||||
chapter_number = NumberPrompt("Select a chapter number").execute()
|
||||
chapter_info = manga_provider.get_chapter_thumbnails(
|
||||
manga_info["id"], str(chapter_number)
|
||||
)
|
||||
if not streams:
|
||||
print("Failed to get streams")
|
||||
|
||||
if not chapter_info:
|
||||
print("No chapter info")
|
||||
input("Enter to retry...")
|
||||
_manga_viewer()
|
||||
return
|
||||
print(
|
||||
f"[purple bold]Now Reading: [/] {search_result_manga_title} [cyan bold]Chapter:[/] {chapter_info['title']}"
|
||||
)
|
||||
feh_manga_viewer(chapter_info["thumbnails"], str(chapter_info["title"]))
|
||||
if anilist_helper:
|
||||
anilist_helper.update_anime_list(
|
||||
{"mediaId": anilist_id, "progress": chapter_number}
|
||||
)
|
||||
_manga_viewer()
|
||||
|
||||
_manga_viewer()
|
||||
else:
|
||||
from ...AnimeProvider import AnimeProvider
|
||||
from ...libs.anime_provider.types import Anime
|
||||
from ...Utility.data import anime_normalizer
|
||||
from ..utils.mpv import run_mpv
|
||||
from ..utils.utils import filter_by_quality, move_preferred_subtitle_lang_to_top
|
||||
|
||||
anime_provider = AnimeProvider(config.provider)
|
||||
anilist_anime_info = None
|
||||
|
||||
print(f"[green bold]Streaming:[/] {anime_titles}")
|
||||
for anime_title in anime_titles:
|
||||
# ---- search for anime ----
|
||||
with Progress() as progress:
|
||||
progress.add_task("Fetching Search Results...", total=None)
|
||||
search_results = anime_provider.search_for_anime(
|
||||
anime_title, config.translation_type
|
||||
)
|
||||
if not search_results:
|
||||
print("Search results not found")
|
||||
input("Enter to retry")
|
||||
search(config, anime_title, episode_range)
|
||||
return
|
||||
search_results = search_results["results"]
|
||||
if not search_results:
|
||||
print("Anime not found :cry:")
|
||||
exit_app()
|
||||
search_results_ = {
|
||||
search_result["title"]: search_result
|
||||
for search_result in search_results
|
||||
}
|
||||
|
||||
if config.auto_select:
|
||||
search_result_manga_title = max(
|
||||
search_results_.keys(),
|
||||
key=lambda title: fuzz.ratio(
|
||||
anime_normalizer.get(title, title), anime_title
|
||||
),
|
||||
)
|
||||
print("[cyan]Auto Selecting:[/] ", search_result_manga_title)
|
||||
|
||||
# ---- fetch servers ----
|
||||
with Progress() as progress:
|
||||
if config.server == "top":
|
||||
progress.add_task("Fetching top server...", total=None)
|
||||
server = next(streams)
|
||||
link = server["links"][config.quality]["link"]
|
||||
else:
|
||||
progress.add_task("Fetching servers", total=None)
|
||||
links = [link["link"] for server in streams for link in server["links"]]
|
||||
choices = list(search_results_.keys())
|
||||
if config.use_fzf:
|
||||
link = fzf.run(links, "Select an link: ", header=search_result)
|
||||
search_result_manga_title = fzf.run(
|
||||
choices, "Please Select title", "FastAnime"
|
||||
)
|
||||
elif config.use_rofi:
|
||||
link = Rofi.run(links, "Select an link")
|
||||
search_result_manga_title = Rofi.run(choices, "Please Select Title")
|
||||
else:
|
||||
link = fuzzy_inquirer("Select link", links)
|
||||
search_result_manga_title = fuzzy_inquirer(
|
||||
choices,
|
||||
"Please Select Title",
|
||||
)
|
||||
|
||||
print(f"[purple]Now Playing:[/] {search_result} Episode {episode}")
|
||||
# ---- fetch selected anime ----
|
||||
with Progress() as progress:
|
||||
progress.add_task("Fetching Anime...", total=None)
|
||||
anime: Anime | None = anime_provider.get_anime(
|
||||
search_results_[search_result_manga_title]["id"]
|
||||
)
|
||||
|
||||
run_mpv(link, search_result)
|
||||
stream_anime()
|
||||
if not anime:
|
||||
print("Sth went wring anime no found")
|
||||
input("Enter to continue...")
|
||||
search(config, anime_title, episode_range)
|
||||
return
|
||||
episodes_range = []
|
||||
episodes: list[str] = sorted(
|
||||
anime["availableEpisodesDetail"][config.translation_type], key=float
|
||||
)
|
||||
if episode_range:
|
||||
if ":" in episode_range:
|
||||
ep_range_tuple = episode_range.split(":")
|
||||
if len(ep_range_tuple) == 3 and all(ep_range_tuple):
|
||||
episodes_start, episodes_end, step = ep_range_tuple
|
||||
episodes_range = episodes[
|
||||
int(episodes_start) : int(episodes_end) : int(step)
|
||||
]
|
||||
|
||||
stream_anime()
|
||||
elif len(ep_range_tuple) == 2 and all(ep_range_tuple):
|
||||
episodes_start, episodes_end = ep_range_tuple
|
||||
episodes_range = episodes[
|
||||
int(episodes_start) : int(episodes_end)
|
||||
]
|
||||
else:
|
||||
episodes_start, episodes_end = ep_range_tuple
|
||||
if episodes_start.strip():
|
||||
episodes_range = episodes[int(episodes_start) :]
|
||||
elif episodes_end.strip():
|
||||
episodes_range = episodes[: int(episodes_end)]
|
||||
else:
|
||||
episodes_range = episodes
|
||||
else:
|
||||
episodes_range = episodes[int(episode_range) :]
|
||||
|
||||
episodes_range = iter(episodes_range)
|
||||
|
||||
if config.normalize_titles:
|
||||
from ...libs.common.mini_anilist import get_basic_anime_info_by_title
|
||||
|
||||
anilist_anime_info = get_basic_anime_info_by_title(anime["title"])
|
||||
|
||||
def stream_anime(anime: "Anime"):
|
||||
clear()
|
||||
episode = None
|
||||
|
||||
if episodes_range:
|
||||
try:
|
||||
episode = next(episodes_range) # pyright:ignore
|
||||
print(
|
||||
f"[cyan]Auto selecting:[/] {search_result_manga_title} [cyan]Episode:[/] {episode}"
|
||||
)
|
||||
except StopIteration:
|
||||
print("[green]Completed binge sequence[/]:smile:")
|
||||
return
|
||||
|
||||
if not episode or episode not in episodes:
|
||||
choices = [*episodes, "end"]
|
||||
if config.use_fzf:
|
||||
episode = fzf.run(
|
||||
choices,
|
||||
"Select an episode",
|
||||
header=search_result_manga_title,
|
||||
)
|
||||
elif config.use_rofi:
|
||||
episode = Rofi.run(choices, "Select an episode")
|
||||
else:
|
||||
episode = fuzzy_inquirer(
|
||||
choices,
|
||||
"Select episode",
|
||||
)
|
||||
if episode == "end":
|
||||
return
|
||||
|
||||
# ---- fetch streams ----
|
||||
with Progress() as progress:
|
||||
progress.add_task("Fetching Episode Streams...", total=None)
|
||||
streams = anime_provider.get_episode_streams(
|
||||
anime["id"], episode, config.translation_type
|
||||
)
|
||||
if not streams:
|
||||
print("Failed to get streams")
|
||||
return
|
||||
|
||||
try:
|
||||
# ---- fetch servers ----
|
||||
if config.server == "top":
|
||||
with Progress() as progress:
|
||||
progress.add_task("Fetching top server...", total=None)
|
||||
server = next(streams, None)
|
||||
if not server:
|
||||
print("Sth went wrong when fetching the episode")
|
||||
input("Enter to continue")
|
||||
stream_anime(anime)
|
||||
return
|
||||
stream_link = filter_by_quality(config.quality, server["links"])
|
||||
if not stream_link:
|
||||
print("Quality not found")
|
||||
input("Enter to continue")
|
||||
stream_anime(anime)
|
||||
return
|
||||
link = stream_link["link"]
|
||||
subtitles = server["subtitles"]
|
||||
stream_headers = server["headers"]
|
||||
episode_title = server["episode_title"]
|
||||
else:
|
||||
with Progress() as progress:
|
||||
progress.add_task("Fetching servers", total=None)
|
||||
# prompt for server selection
|
||||
servers = {server["server"]: server for server in streams}
|
||||
servers_names = list(servers.keys())
|
||||
if config.server in servers_names:
|
||||
server = config.server
|
||||
else:
|
||||
if config.use_fzf:
|
||||
server = fzf.run(servers_names, "Select an link")
|
||||
elif config.use_rofi:
|
||||
server = Rofi.run(servers_names, "Select an link")
|
||||
else:
|
||||
server = fuzzy_inquirer(
|
||||
servers_names,
|
||||
"Select link",
|
||||
)
|
||||
stream_link = filter_by_quality(
|
||||
config.quality, servers[server]["links"]
|
||||
)
|
||||
if not stream_link:
|
||||
print("Quality not found")
|
||||
input("Enter to continue")
|
||||
stream_anime(anime)
|
||||
return
|
||||
link = stream_link["link"]
|
||||
stream_headers = servers[server]["headers"]
|
||||
subtitles = servers[server]["subtitles"]
|
||||
episode_title = servers[server]["episode_title"]
|
||||
|
||||
selected_anime_title = search_result_manga_title
|
||||
if anilist_anime_info:
|
||||
selected_anime_title = (
|
||||
anilist_anime_info["title"][config.preferred_language]
|
||||
or anilist_anime_info["title"]["romaji"]
|
||||
or anilist_anime_info["title"]["english"]
|
||||
)
|
||||
import re
|
||||
|
||||
for episode_detail in anilist_anime_info["episodes"]:
|
||||
if re.match(f"Episode {episode} ", episode_detail["title"]):
|
||||
episode_title = episode_detail["title"]
|
||||
break
|
||||
print(
|
||||
f"[purple]Now Playing:[/] {selected_anime_title} Episode {episode}"
|
||||
)
|
||||
subtitles = move_preferred_subtitle_lang_to_top(
|
||||
subtitles, config.sub_lang
|
||||
)
|
||||
if config.sync_play:
|
||||
from ..utils.syncplay import SyncPlayer
|
||||
|
||||
SyncPlayer(
|
||||
link,
|
||||
episode_title,
|
||||
headers=stream_headers,
|
||||
subtitles=subtitles,
|
||||
)
|
||||
else:
|
||||
run_mpv(
|
||||
link,
|
||||
episode_title,
|
||||
headers=stream_headers,
|
||||
subtitles=subtitles,
|
||||
player=config.player,
|
||||
)
|
||||
except IndexError as e:
|
||||
print(e)
|
||||
input("Enter to continue")
|
||||
stream_anime(anime)
|
||||
|
||||
stream_anime(anime)
|
||||
|
||||
53
fastanime/cli/commands/update.py
Normal file
53
fastanime/cli/commands/update.py
Normal file
@@ -0,0 +1,53 @@
|
||||
import click
|
||||
|
||||
|
||||
@click.command(
|
||||
help="Helper command to update fastanime to latest",
|
||||
epilog="""
|
||||
\b
|
||||
\b\bExamples:
|
||||
# update fastanime to latest
|
||||
fastanime update
|
||||
\b
|
||||
# check for latest release
|
||||
fastanime update --check
|
||||
""",
|
||||
)
|
||||
@click.option("--check", "-c", help="Check for the latest release", is_flag=True)
|
||||
def update(
|
||||
check,
|
||||
):
|
||||
from rich.console import Console
|
||||
from rich.markdown import Markdown
|
||||
|
||||
from ... import __version__
|
||||
from ..app_updater import check_for_updates, update_app
|
||||
|
||||
def _print_release(release_data):
|
||||
console = Console()
|
||||
body = Markdown(release_data["body"])
|
||||
tag = github_release_data["tag_name"]
|
||||
tag_title = release_data["name"]
|
||||
github_page_url = release_data["html_url"]
|
||||
console.print(f"Release Page: {github_page_url}")
|
||||
console.print(f"Tag: {tag}")
|
||||
console.print(f"Title: {tag_title}")
|
||||
console.print(body)
|
||||
|
||||
if check:
|
||||
is_latest, github_release_data = check_for_updates()
|
||||
if not is_latest:
|
||||
print(
|
||||
f"You are running an older version ({__version__}) of fastanime please update to get the latest features"
|
||||
)
|
||||
_print_release(github_release_data)
|
||||
else:
|
||||
print(f"You are running the latest version ({__version__}) of fastanime")
|
||||
_print_release(github_release_data)
|
||||
else:
|
||||
success, github_release_data = update_app()
|
||||
_print_release(github_release_data)
|
||||
if success:
|
||||
print("Successfully updated")
|
||||
else:
|
||||
print("failed to update")
|
||||
95
fastanime/cli/completion_functions.py
Normal file
95
fastanime/cli/completion_functions.py
Normal file
@@ -0,0 +1,95 @@
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
ANILIST_ENDPOINT = "https://graphql.anilist.co"
|
||||
|
||||
|
||||
anime_title_query = """
|
||||
query ($query: String) {
|
||||
Page(perPage: 50) {
|
||||
pageInfo {
|
||||
total
|
||||
}
|
||||
media(search: $query, type: ANIME) {
|
||||
id
|
||||
idMal
|
||||
title {
|
||||
romaji
|
||||
english
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
"""
|
||||
|
||||
|
||||
def get_anime_titles(query: str, variables: dict = {}):
|
||||
"""the abstraction over all none authenticated requests and that returns data of a similar type
|
||||
|
||||
Args:
|
||||
query: the anilist query
|
||||
variables: the anilist api variables
|
||||
|
||||
Returns:
|
||||
a boolean indicating success and none or an anilist object depending on success
|
||||
"""
|
||||
from requests import post
|
||||
|
||||
try:
|
||||
response = post(
|
||||
ANILIST_ENDPOINT,
|
||||
json={"query": query, "variables": variables},
|
||||
timeout=10,
|
||||
)
|
||||
anilist_data = response.json()
|
||||
|
||||
if response.status_code == 200:
|
||||
eng_titles = [
|
||||
anime["title"]["english"]
|
||||
for anime in anilist_data["data"]["Page"]["media"]
|
||||
if anime["title"]["english"]
|
||||
]
|
||||
romaji_titles = [
|
||||
anime["title"]["romaji"]
|
||||
for anime in anilist_data["data"]["Page"]["media"]
|
||||
if anime["title"]["romaji"]
|
||||
]
|
||||
return [*eng_titles, *romaji_titles]
|
||||
else:
|
||||
return []
|
||||
except Exception as e:
|
||||
logger.error(f"Something unexpected occured {e}")
|
||||
return []
|
||||
|
||||
|
||||
def downloaded_anime_titles(ctx, param, incomplete):
|
||||
import os
|
||||
|
||||
from ..constants import USER_VIDEOS_DIR
|
||||
|
||||
try:
|
||||
titles = [
|
||||
title
|
||||
for title in os.listdir(USER_VIDEOS_DIR)
|
||||
if title.lower().startswith(incomplete.lower()) or not incomplete
|
||||
]
|
||||
return titles
|
||||
except Exception:
|
||||
return []
|
||||
|
||||
|
||||
def anime_titles_shell_complete(ctx, param, incomplete):
|
||||
incomplete = incomplete.strip()
|
||||
if not incomplete:
|
||||
incomplete = None
|
||||
variables = {}
|
||||
else:
|
||||
variables = {"query": incomplete}
|
||||
return get_anime_titles(anime_title_query, variables)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
t = input("Enter title")
|
||||
results = get_anime_titles(anime_title_query, {"query": t})
|
||||
print(results)
|
||||
@@ -1,129 +1,215 @@
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
from configparser import ConfigParser
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from rich import print
|
||||
|
||||
from ..constants import USER_CONFIG_PATH, USER_VIDEOS_DIR
|
||||
from ..constants import (
|
||||
USER_CONFIG_PATH,
|
||||
USER_DATA_PATH,
|
||||
USER_VIDEOS_DIR,
|
||||
USER_WATCH_HISTORY_PATH,
|
||||
)
|
||||
from ..libs.rofi import Rofi
|
||||
from ..Utility.user_data_helper import user_data_helper
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
if TYPE_CHECKING:
|
||||
from ..AnimeProvider import AnimeProvider
|
||||
|
||||
|
||||
class Config(object):
|
||||
manga = False
|
||||
sync_play = False
|
||||
anime_list: list
|
||||
watch_history: dict
|
||||
watch_history: dict = {}
|
||||
fastanime_anilist_app_login_url = (
|
||||
"https://anilist.co/api/v2/oauth/authorize?client_id=20148&response_type=token"
|
||||
)
|
||||
anime_provider: "AnimeProvider"
|
||||
user_data = {"watch_history": {}, "animelist": [], "user": {}}
|
||||
default_config = {
|
||||
"auto_next": "False",
|
||||
"auto_select": "True",
|
||||
"cache_requests": "true",
|
||||
"continue_from_history": "True",
|
||||
"default_media_list_tracking": "None",
|
||||
"downloads_dir": USER_VIDEOS_DIR,
|
||||
"episode_complete_at": "80",
|
||||
"ffmpegthumbnailer_seek_time": "-1",
|
||||
"force_forward_tracking": "true",
|
||||
"force_window": "immediate",
|
||||
"format": "best[height<=1080]/bestvideo[height<=1080]+bestaudio/best",
|
||||
"icons": "false",
|
||||
"normalize_titles": "true",
|
||||
"notification_duration": "2",
|
||||
"player": "mpv",
|
||||
"preferred_history": "local",
|
||||
"preferred_language": "english",
|
||||
"preview": "False",
|
||||
"provider": "allanime",
|
||||
"quality": "1080",
|
||||
"rofi_theme": "",
|
||||
"rofi_theme_confirm": "",
|
||||
"rofi_theme_input": "",
|
||||
"server": "top",
|
||||
"skip": "false",
|
||||
"sort_by": "search match",
|
||||
"sub_lang": "eng",
|
||||
"translation_type": "sub",
|
||||
"use_fzf": "False",
|
||||
"use_persistent_provider_store": "false",
|
||||
"use_python_mpv": "false",
|
||||
"use_rofi": "false",
|
||||
}
|
||||
|
||||
def __init__(self) -> None:
|
||||
self.initialize_user_data_and_watch_history()
|
||||
self.load_config()
|
||||
|
||||
def load_config(self):
|
||||
self.configparser = ConfigParser(
|
||||
{
|
||||
"server": "top",
|
||||
"continue_from_history": "True",
|
||||
"quality": "0",
|
||||
"auto_next": "False",
|
||||
"auto_select": "True",
|
||||
"sort_by": "search match",
|
||||
"downloads_dir": USER_VIDEOS_DIR,
|
||||
"translation_type": "sub",
|
||||
"preferred_language": "english",
|
||||
"use_fzf": "False",
|
||||
"preview": "False",
|
||||
"format": "best[height<=1080]/bestvideo[height<=1080]+bestaudio/best",
|
||||
"provider": "allanime",
|
||||
"error": "3",
|
||||
"icons": "false",
|
||||
"notification_duration": "2",
|
||||
"skip": "false",
|
||||
"use_rofi": "false",
|
||||
"rofi_theme": "",
|
||||
"rofi_theme_input": "",
|
||||
"rofi_theme_confirm": "",
|
||||
"use_mpv_mod": "true",
|
||||
}
|
||||
)
|
||||
self.configparser = ConfigParser(self.default_config)
|
||||
self.configparser.add_section("stream")
|
||||
self.configparser.add_section("general")
|
||||
self.configparser.add_section("anilist")
|
||||
if not os.path.exists(USER_CONFIG_PATH):
|
||||
with open(USER_CONFIG_PATH, "w") as config:
|
||||
self.configparser.write(config)
|
||||
|
||||
self.configparser.read(USER_CONFIG_PATH)
|
||||
# --- set config values from file or using defaults ---
|
||||
if os.path.exists(USER_CONFIG_PATH):
|
||||
self.configparser.read(USER_CONFIG_PATH, encoding="utf-8")
|
||||
|
||||
# --- set defaults ---
|
||||
self.downloads_dir = self.get_downloads_dir()
|
||||
self.provider = self.get_provider()
|
||||
self.use_fzf = self.get_use_fzf()
|
||||
self.use_rofi = self.get_use_rofi()
|
||||
self.skip = self.get_skip()
|
||||
self.icons = self.get_icons()
|
||||
self.preview = self.get_preview()
|
||||
self.translation_type = self.get_translation_type()
|
||||
self.sort_by = self.get_sort_by()
|
||||
self.continue_from_history = self.get_continue_from_history()
|
||||
self.auto_next = self.get_auto_next()
|
||||
self.auto_select = self.get_auto_select()
|
||||
self.use_mpv_mod = self.get_use_mpv_mod()
|
||||
self.quality = self.get_quality()
|
||||
self.notification_duration = self.get_notification_duration()
|
||||
self.error = self.get_error()
|
||||
self.server = self.get_server()
|
||||
self.cache_requests = self.get_cache_requests()
|
||||
self.continue_from_history = self.get_continue_from_history()
|
||||
self.default_media_list_tracking = self.get_default_media_list_tracking()
|
||||
self.downloads_dir = self.get_downloads_dir()
|
||||
self.episode_complete_at = self.get_episode_complete_at()
|
||||
self.ffmpegthumbnailer_seek_time = self.get_ffmpegthumnailer_seek_time()
|
||||
self.force_forward_tracking = self.get_force_forward_tracking()
|
||||
self.force_window = self.get_force_window()
|
||||
self.format = self.get_format()
|
||||
self.icons = self.get_icons()
|
||||
self.normalize_titles = self.get_normalize_titles()
|
||||
self.notification_duration = self.get_notification_duration()
|
||||
self.player = self.get_player()
|
||||
self.preferred_history = self.get_preferred_history()
|
||||
self.preferred_language = self.get_preferred_language()
|
||||
self.rofi_theme = self.get_rofi_theme()
|
||||
Rofi.rofi_theme = self.rofi_theme
|
||||
self.rofi_theme_input = self.get_rofi_theme_input()
|
||||
Rofi.rofi_theme_input = self.rofi_theme_input
|
||||
self.preview = self.get_preview()
|
||||
self.provider = self.get_provider()
|
||||
self.quality = self.get_quality()
|
||||
|
||||
self.rofi_theme_confirm = self.get_rofi_theme_confirm()
|
||||
self.rofi_theme_input = self.get_rofi_theme_input()
|
||||
self.rofi_theme = self.get_rofi_theme()
|
||||
|
||||
Rofi.rofi_theme_confirm = self.rofi_theme_confirm
|
||||
Rofi.rofi_theme_input = self.rofi_theme_input
|
||||
Rofi.rofi_theme = self.rofi_theme
|
||||
|
||||
self.server = self.get_server()
|
||||
self.skip = self.get_skip()
|
||||
self.sort_by = self.get_sort_by()
|
||||
self.sub_lang = self.get_sub_lang()
|
||||
self.translation_type = self.get_translation_type()
|
||||
self.use_fzf = self.get_use_fzf()
|
||||
self.use_python_mpv = self.get_use_mpv_mod()
|
||||
self.use_rofi = self.get_use_rofi()
|
||||
self.use_persistent_provider_store = self.get_use_persistent_provider_store()
|
||||
|
||||
# ---- setup user data ------
|
||||
self.watch_history: dict = user_data_helper.user_data.get("watch_history", {})
|
||||
self.anime_list: list = user_data_helper.user_data.get("animelist", [])
|
||||
self.user: dict = user_data_helper.user_data.get("user", {})
|
||||
self.anime_list: list = self.user_data.get("animelist", [])
|
||||
self.user: dict = self.user_data.get("user", {})
|
||||
|
||||
if not os.path.exists(USER_CONFIG_PATH):
|
||||
with open(USER_CONFIG_PATH, "w", encoding="utf-8") as config:
|
||||
config.write(self.__repr__())
|
||||
|
||||
def set_fastanime_config_environs(self):
|
||||
current_config = []
|
||||
for key in self.default_config:
|
||||
current_config.append((f"FASTANIME_{key.upper()}", str(getattr(self, key))))
|
||||
os.environ.update(current_config)
|
||||
|
||||
def update_user(self, user):
|
||||
self.user = user
|
||||
user_data_helper.update_user_info(user)
|
||||
self.user_data["user"] = user
|
||||
self._update_user_data()
|
||||
|
||||
def update_watch_history(
|
||||
self, anime_id: int, episode: str | None, start_time="0", total_time="0"
|
||||
def media_list_track(
|
||||
self,
|
||||
anime_id: int,
|
||||
episode_no: str,
|
||||
episode_stopped_at="0",
|
||||
episode_total_length="0",
|
||||
progress_tracking="prompt",
|
||||
):
|
||||
self.watch_history.update(
|
||||
{
|
||||
str(anime_id): {
|
||||
"episode": episode,
|
||||
"start_time": start_time,
|
||||
"total_time": total_time,
|
||||
"episode_no": episode_no,
|
||||
"episode_stopped_at": episode_stopped_at,
|
||||
"episode_total_length": episode_total_length,
|
||||
"progress_tracking": progress_tracking,
|
||||
}
|
||||
}
|
||||
)
|
||||
user_data_helper.update_watch_history(self.watch_history)
|
||||
with open(USER_WATCH_HISTORY_PATH, "w") as f:
|
||||
json.dump(self.watch_history, f)
|
||||
|
||||
def update_anime_list(self, anime_id: int, remove=False):
|
||||
if remove:
|
||||
try:
|
||||
self.anime_list.remove(anime_id)
|
||||
print("Succesfully removed :cry:")
|
||||
except Exception:
|
||||
print(anime_id, "Nothing to remove :confused:")
|
||||
else:
|
||||
self.anime_list.append(anime_id)
|
||||
user_data_helper.update_animelist(self.anime_list)
|
||||
print("Succesfully added :smile:")
|
||||
input("Enter to continue...")
|
||||
def initialize_user_data_and_watch_history(self):
|
||||
try:
|
||||
if os.path.isfile(USER_DATA_PATH):
|
||||
with open(USER_DATA_PATH, "r") as f:
|
||||
user_data = json.load(f)
|
||||
self.user_data.update(user_data)
|
||||
except Exception as e:
|
||||
logger.error(e)
|
||||
try:
|
||||
if os.path.isfile(USER_WATCH_HISTORY_PATH):
|
||||
with open(USER_WATCH_HISTORY_PATH, "r") as f:
|
||||
watch_history = json.load(f)
|
||||
self.watch_history.update(watch_history)
|
||||
except Exception as e:
|
||||
logger.error(e)
|
||||
|
||||
def _update_user_data(self):
|
||||
"""method that updates the actual user data file"""
|
||||
with open(USER_DATA_PATH, "w") as f:
|
||||
json.dump(self.user_data, f)
|
||||
|
||||
# getters for user configuration
|
||||
|
||||
# --- general section ---
|
||||
def get_provider(self):
|
||||
return self.configparser.get("general", "provider")
|
||||
|
||||
def get_ffmpegthumnailer_seek_time(self):
|
||||
return self.configparser.getint("general", "ffmpegthumbnailer_seek_time")
|
||||
|
||||
def get_preferred_language(self):
|
||||
return self.configparser.get("general", "preferred_language")
|
||||
|
||||
def get_sub_lang(self):
|
||||
return self.configparser.get("general", "sub_lang")
|
||||
|
||||
def get_downloads_dir(self):
|
||||
return self.configparser.get("general", "downloads_dir")
|
||||
|
||||
def get_icons(self):
|
||||
return self.configparser.getboolean("general", "icons")
|
||||
|
||||
def get_preview(self):
|
||||
return self.configparser.getboolean("general", "preview")
|
||||
|
||||
def get_use_fzf(self):
|
||||
return self.configparser.getboolean("general", "use_fzf")
|
||||
|
||||
def get_use_persistent_provider_store(self):
|
||||
return self.configparser.getboolean("general", "use_persistent_provider_store")
|
||||
|
||||
# rofi conifiguration
|
||||
def get_use_rofi(self):
|
||||
return self.configparser.getboolean("general", "use_rofi")
|
||||
|
||||
def get_rofi_theme(self):
|
||||
return self.configparser.get("general", "rofi_theme")
|
||||
|
||||
@@ -133,53 +219,51 @@ class Config(object):
|
||||
def get_rofi_theme_confirm(self):
|
||||
return self.configparser.get("general", "rofi_theme_confirm")
|
||||
|
||||
def get_downloads_dir(self):
|
||||
return self.configparser.get("general", "downloads_dir")
|
||||
def get_force_forward_tracking(self):
|
||||
return self.configparser.getboolean("general", "force_forward_tracking")
|
||||
|
||||
def get_use_fzf(self):
|
||||
return self.configparser.getboolean("general", "use_fzf")
|
||||
def get_cache_requests(self):
|
||||
return self.configparser.getboolean("general", "cache_requests")
|
||||
|
||||
def get_use_rofi(self):
|
||||
return self.configparser.getboolean("general", "use_rofi")
|
||||
def get_default_media_list_tracking(self):
|
||||
return self.configparser.get("general", "default_media_list_tracking")
|
||||
|
||||
def get_normalize_titles(self):
|
||||
return self.configparser.getboolean("general", "normalize_titles")
|
||||
|
||||
# --- stream section ---
|
||||
def get_skip(self):
|
||||
return self.configparser.getboolean("stream", "skip")
|
||||
|
||||
def get_icons(self):
|
||||
return self.configparser.getboolean("general", "icons")
|
||||
|
||||
def get_preview(self):
|
||||
return self.configparser.getboolean("general", "preview")
|
||||
|
||||
def get_preferred_language(self):
|
||||
return self.configparser.get("general", "preferred_language")
|
||||
|
||||
def get_sort_by(self):
|
||||
return self.configparser.get("anilist", "sort_by")
|
||||
|
||||
def get_continue_from_history(self):
|
||||
return self.configparser.getboolean("stream", "continue_from_history")
|
||||
|
||||
def get_translation_type(self):
|
||||
return self.configparser.get("stream", "translation_type")
|
||||
|
||||
def get_auto_next(self):
|
||||
return self.configparser.getboolean("stream", "auto_next")
|
||||
|
||||
def get_auto_select(self):
|
||||
return self.configparser.getboolean("stream", "auto_select")
|
||||
|
||||
def get_quality(self):
|
||||
return self.configparser.getint("stream", "quality")
|
||||
def get_continue_from_history(self):
|
||||
return self.configparser.getboolean("stream", "continue_from_history")
|
||||
|
||||
def get_use_mpv_mod(self):
|
||||
return self.configparser.getboolean("stream", "use_mpv_mod")
|
||||
return self.configparser.getboolean("stream", "use_python_mpv")
|
||||
|
||||
def get_notification_duration(self):
|
||||
return self.configparser.getint("general", "notification_duration")
|
||||
|
||||
def get_error(self):
|
||||
return self.configparser.getint("stream", "error")
|
||||
def get_episode_complete_at(self):
|
||||
return self.configparser.getint("stream", "episode_complete_at")
|
||||
|
||||
def get_force_window(self):
|
||||
return self.configparser.get("stream", "force_window")
|
||||
|
||||
def get_translation_type(self):
|
||||
return self.configparser.get("stream", "translation_type")
|
||||
|
||||
def get_preferred_history(self):
|
||||
return self.configparser.get("stream", "preferred_history")
|
||||
|
||||
def get_quality(self):
|
||||
return self.configparser.get("stream", "quality")
|
||||
|
||||
def get_server(self):
|
||||
return self.configparser.get("stream", "server")
|
||||
@@ -187,13 +271,242 @@ class Config(object):
|
||||
def get_format(self):
|
||||
return self.configparser.get("stream", "format")
|
||||
|
||||
def get_player(self):
|
||||
return self.configparser.get("stream", "player")
|
||||
|
||||
def get_sort_by(self):
|
||||
return self.configparser.get("anilist", "sort_by")
|
||||
|
||||
def update_config(self, section: str, key: str, value: str):
|
||||
self.configparser.set(section, key, value)
|
||||
with open(USER_CONFIG_PATH, "w") as config:
|
||||
self.configparser.write(config)
|
||||
|
||||
def __repr__(self):
|
||||
return f"Config(server:{self.get_server()},quality:{self.get_quality()},auto_next:{self.get_auto_next()},continue_from_history:{self.get_continue_from_history()},sort_by:{self.get_sort_by()},downloads_dir:{self.get_downloads_dir()})"
|
||||
current_config_state = f"""\
|
||||
#
|
||||
# ███████╗░█████╗░░██████╗████████╗░█████╗░███╗░░██╗██╗███╗░░░███╗███████╗ ░█████╗░░█████╗░███╗░░██╗███████╗██╗░██████╗░
|
||||
# ██╔════╝██╔══██╗██╔════╝╚══██╔══╝██╔══██╗████╗░██║██║████╗░████║██╔════╝ ██╔══██╗██╔══██╗████╗░██║██╔════╝██║██╔════╝░
|
||||
# █████╗░░███████║╚█████╗░░░░██║░░░███████║██╔██╗██║██║██╔████╔██║█████╗░░ ██║░░╚═╝██║░░██║██╔██╗██║█████╗░░██║██║░░██╗░
|
||||
# ██╔══╝░░██╔══██║░╚═══██╗░░░██║░░░██╔══██║██║╚████║██║██║╚██╔╝██║██╔══╝░░ ██║░░██╗██║░░██║██║╚████║██╔══╝░░██║██║░░╚██╗
|
||||
# ██║░░░░░██║░░██║██████╔╝░░░██║░░░██║░░██║██║░╚███║██║██║░╚═╝░██║███████╗ ╚█████╔╝╚█████╔╝██║░╚███║██║░░░░░██║╚██████╔╝
|
||||
# ╚═╝░░░░░╚═╝░░╚═╝╚═════╝░░░░╚═╝░░░╚═╝░░╚═╝╚═╝░░╚══╝╚═╝╚═╝░░░░░╚═╝╚══════╝ ░╚════╝░░╚════╝░╚═╝░░╚══╝╚═╝░░░░░╚═╝░╚═════╝░
|
||||
#
|
||||
[general]
|
||||
# whether to show the icons in the tui [True/False]
|
||||
# more like emojis
|
||||
# by the way if you have any recommendations to which should be used where please
|
||||
# don't hesitate to share your opinion
|
||||
# cause it's a lot of work to look for the right one for each menu option
|
||||
# be sure to also give the replacement emoji
|
||||
icons = {self.icons}
|
||||
|
||||
# the quality of the stream [1080,720,480,360]
|
||||
# this option is usually only reliable when:
|
||||
# provider=animepahe
|
||||
# since it provides links that actually point to streams of different qualities
|
||||
# while the rest just point to another link that can provide the anime from the same server
|
||||
quality = {self.quality}
|
||||
|
||||
# whether to normalize provider titles [True/False]
|
||||
# basically takes the provider titles and finds the corresponding anilist title then changes the title to that
|
||||
# useful for uniformity especially when downloading from different providers
|
||||
# this also applies to episode titles
|
||||
normalize_titles = {self.normalize_titles}
|
||||
|
||||
# can be [allanime, animepahe, hianime]
|
||||
# allanime is the most realible
|
||||
# animepahe provides different links to streams of different quality so a quality can be selected reliably with --quality option
|
||||
# hianime which is now hianime usually provides subs in different languuages and its servers are generally faster
|
||||
provider = {self.provider}
|
||||
|
||||
# Display language [english, romaji]
|
||||
# this is passed to anilist directly and is used to set the language which the anime titles will be in
|
||||
# when using the anilist interface
|
||||
preferred_language = {self.preferred_language}
|
||||
|
||||
# Download directory
|
||||
# where you will find your videos after downloading them with 'fastanime download' command
|
||||
downloads_dir = {self.downloads_dir}
|
||||
|
||||
# whether to show a preview window when using fzf or rofi [True/False]
|
||||
# the preview requires you have a commandline image viewer as documented in the README
|
||||
# this is only when usinf fzf
|
||||
# if you dont care about image previews it doesnt matter
|
||||
# though its awesome
|
||||
# try it and you will see
|
||||
preview = {self.preview}
|
||||
|
||||
# the time to seek when using ffmpegthumbnailer [-1 to 100]
|
||||
# -1 means random and is the default
|
||||
# ffmpegthumbnailer is used to generate previews and you can select at what time in the video to extract an image
|
||||
# random makes things quite exciting cause you never no at what time it will extract the image from
|
||||
ffmpegthumbnailer_seek_time = {self.ffmpegthumbnailer_seek_time}
|
||||
|
||||
# whether to use fzf as the interface for the anilist command and others. [True/False]
|
||||
use_fzf = {self.use_fzf}
|
||||
|
||||
# whether to use rofi for the ui [True/False]
|
||||
# it's more useful if you want to create a desktop entry
|
||||
# which can be setup with 'fastanime config --desktop-entry'
|
||||
# though if you want it to be your sole interface even when fastanime is run directly from the terminal
|
||||
use_rofi = {self.use_rofi}
|
||||
|
||||
# rofi themes to use
|
||||
# the values of this option is the path to the rofi config files to use
|
||||
# i choose to split it into three since it gives the best look and feel
|
||||
# you can refer to the rofi demo on github to see for your self
|
||||
# by the way i recommend getting the rofi themes from this project;
|
||||
rofi_theme = {self.rofi_theme}
|
||||
|
||||
rofi_theme_input = {self.rofi_theme_input}
|
||||
|
||||
rofi_theme_confirm = {self.rofi_theme_confirm}
|
||||
|
||||
# the duration in minutes a notification will stay in the screen
|
||||
# used by notifier command
|
||||
notification_duration = {self.notification_duration}
|
||||
|
||||
# used when the provider gives subs of different languages
|
||||
# currently its the case for:
|
||||
# hianime
|
||||
# the values for this option are the short names for countries
|
||||
# regex is used to determine what you selected
|
||||
sub_lang = {self.sub_lang}
|
||||
|
||||
# what is your default media list tracking [track/disabled/prompt]
|
||||
# only affects your anilist anime list
|
||||
# track - means your progress will always be reflected in your anilist anime list
|
||||
# disabled - means progress tracking will no longer be reflected in your anime list
|
||||
# prompt - means for every anime you will be prompted whether you want your progress to be tracked or not
|
||||
default_media_list_tracking = {self.default_media_list_tracking}
|
||||
|
||||
# whether media list tracking should only be updated when the next episode is greater than the previous
|
||||
# this affects only your anilist anime list
|
||||
force_forward_tracking = {self.force_forward_tracking}
|
||||
|
||||
# whether to cache requests [true/false]
|
||||
# this makes the experience better and more faster
|
||||
# as data need not always be fetched from web server
|
||||
# and instead can be gotten locally
|
||||
# from the cached_requests_db
|
||||
cache_requests = {self.cache_requests}
|
||||
|
||||
# whether to use a persistent store (basically a sqlitedb) for storing some data the provider requires
|
||||
# to enable a seamless experience [true/false]
|
||||
# this option exists primarily because i think it may help in the optimization
|
||||
# of fastanime as a library in a website project
|
||||
# for now i don't recommend changing it
|
||||
# leave it as is
|
||||
use_persistent_provider_store = {self.use_persistent_provider_store}
|
||||
|
||||
|
||||
[stream]
|
||||
# Auto continue from watch history [True/False]
|
||||
# this will make fastanime to choose the episode that you last watched to completion
|
||||
# and increment it by one
|
||||
# and use that to auto select the episode you want to watch
|
||||
continue_from_history = {self.continue_from_history}
|
||||
|
||||
# which history to use [local/remote]
|
||||
# local history means it will just use the watch history stored locally in your device
|
||||
# the file that stores it is called watch_history.json and is stored next to your config file
|
||||
# remote means it ignores the last episode stored locally and instead uses the one in your anilist anime list
|
||||
# this config option is useful if you want to overwrite your local history or import history covered from another device or platform
|
||||
# since remote history will take precendence over whats available locally
|
||||
preferred_history = {self.preferred_history}
|
||||
|
||||
# Preferred language for anime [dub/sub]
|
||||
translation_type = {self.translation_type}
|
||||
|
||||
# what server to use for a particular provider
|
||||
# allanime: [dropbox, sharepoint, wetransfer, gogoanime, wixmp]
|
||||
# animepahe: [kwik]
|
||||
# hianime: [HD1, HD2, StreamSB, StreamTape]
|
||||
# 'top' can also be used as a value for this option
|
||||
# 'top' will cause fastanime to auto select the first server it sees
|
||||
# this saves on resources and is faster since not all servers are being fetched
|
||||
server = {self.server}
|
||||
|
||||
# Auto select next episode [True/False]
|
||||
# this makes fastanime increment the current episode number
|
||||
# then after using that value to fetch the next episode instead of prompting
|
||||
# this option is useful for binging
|
||||
auto_next = {self.auto_next}
|
||||
|
||||
# Auto select the anime provider results with fuzzy find. [True/False]
|
||||
# Note this won't always be correct
|
||||
# this is because the providers sometime use non-standard names
|
||||
# that are there own preference rather than the official names
|
||||
# But 99% of the time will be accurate
|
||||
# if this happens just turn of auto_select in the menus or from the commandline and manually select the correct anime title
|
||||
# and then please open an issue at <> highlighting the normalized title and the title given by the provider for the anime you wished to watch
|
||||
# or even better edit this file <> and open a pull request
|
||||
auto_select = {self.auto_select}
|
||||
|
||||
# whether to skip the opening and ending theme songs [True/False]
|
||||
# NOTE: requires ani-skip to be in path
|
||||
# for python-mpv users am planning to create this functionality n python without the use of an external script
|
||||
# so its disabled for now
|
||||
skip = {self.skip}
|
||||
|
||||
# at what percentage progress should the episode be considered as completed [0-100]
|
||||
# this value is used to determine whether to increment the current episode number and save it to your local list
|
||||
# so you can continue immediately to the next episode without select it the next time you decide to watch the anime
|
||||
# it is also used to determine whether your anilist anime list should be updated or not
|
||||
episode_complete_at = {self.episode_complete_at}
|
||||
|
||||
# whether to use python-mpv [True/False]
|
||||
# to enable superior control over the player
|
||||
# adding more options to it
|
||||
# Enable this one and you will be wonder why you did not discover fastanime sooner
|
||||
# Since you basically don't have to close the player window
|
||||
# to go to the next or previous episode, switch servers,
|
||||
# change translation type or change to a given episode x
|
||||
# so try it if you haven't already
|
||||
# if you have any issues setting it up
|
||||
# don't be afraid to ask
|
||||
# especially on windows
|
||||
# honestly it can be a pain to set it up there
|
||||
# personally it took me quite sometime to figure it out
|
||||
# this is because of how windows handles shared libraries
|
||||
# so just ask when you find yourself stuck
|
||||
# or just switch to arch linux
|
||||
use_python_mpv = {self.use_python_mpv}
|
||||
|
||||
# force mpv window
|
||||
# the default 'immediate' just makes mpv to open the window even if the video has not yet loaded
|
||||
# done for asthetics
|
||||
# passed directly to mpv so values are same
|
||||
force_window = immediate
|
||||
|
||||
# the format of downloaded anime and trailer
|
||||
# based on yt-dlp format and passed directly to it
|
||||
# learn more by looking it up on their site
|
||||
# only works for downloaded anime if:
|
||||
# provider=allanime, server=gogoanime
|
||||
# provider=allanime, server=wixmp
|
||||
# provider=hianime
|
||||
# this is because they provider a m3u8 file that contans multiple quality streams
|
||||
format = {self.format}
|
||||
|
||||
# set the player to use for streaming [mpv/vlc]
|
||||
# while this option exists i will still recommend that you use mpv
|
||||
# since you will miss out on some features if you use the others
|
||||
player = {self.player}
|
||||
|
||||
# NOTE:
|
||||
# if you have any trouble setting up your config
|
||||
# please don't be afraid to ask in our discord
|
||||
# plus if there are any errors, improvements or suggestions please tell us in the discord
|
||||
# or help us by contributing
|
||||
# we appreciate all the help we can get
|
||||
# since we may not always have the time to immediately implement the changes
|
||||
#
|
||||
# HOPE YOU ENJOY FASTANIME AND BE SURE TO STAR THE PROJECT ON GITHUB
|
||||
#
|
||||
"""
|
||||
return current_config_state
|
||||
|
||||
def __str__(self):
|
||||
return self.__repr__()
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -7,95 +7,28 @@ import textwrap
|
||||
from threading import Thread
|
||||
|
||||
import requests
|
||||
from yt_dlp.utils import clean_html, sanitize_filename
|
||||
|
||||
from ...constants import APP_CACHE_DIR
|
||||
from ...libs.anilist.anilist_data_schema import AnilistBaseMediaDataSchema
|
||||
from ...libs.anilist.types import AnilistBaseMediaDataSchema
|
||||
from ...Utility import anilist_data_helper
|
||||
from ...Utility.utils import remove_html_tags
|
||||
from ..utils.scripts import fzf_preview
|
||||
from ..utils.utils import get_true_fg
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
fzf_preview = r"""
|
||||
#
|
||||
# The purpose of this script is to demonstrate how to preview a file or an
|
||||
# image in the preview window of fzf.
|
||||
#
|
||||
# Dependencies:
|
||||
# - https://github.com/sharkdp/bat
|
||||
# - https://github.com/hpjansson/chafa
|
||||
# - https://iterm2.com/utilities/imgcat
|
||||
fzf-preview(){
|
||||
if [[ $# -ne 1 ]]; then
|
||||
>&2 echo "usage: $0 FILENAME"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
file=${1/#\~\//$HOME/}
|
||||
type=$(file --dereference --mime -- "$file")
|
||||
|
||||
if [[ ! $type =~ image/ ]]; then
|
||||
if [[ $type =~ =binary ]]; then
|
||||
file "$1"
|
||||
exit
|
||||
fi
|
||||
|
||||
# Sometimes bat is installed as batcat.
|
||||
if command -v batcat > /dev/null; then
|
||||
batname="batcat"
|
||||
elif command -v bat > /dev/null; then
|
||||
batname="bat"
|
||||
else
|
||||
cat "$1"
|
||||
exit
|
||||
fi
|
||||
|
||||
${batname} --style="${BAT_STYLE:-numbers}" --color=always --pager=never -- "$file"
|
||||
exit
|
||||
fi
|
||||
|
||||
dim=${FZF_PREVIEW_COLUMNS}x${FZF_PREVIEW_LINES}
|
||||
if [[ $dim = x ]]; then
|
||||
dim=$(stty size < /dev/tty | awk '{print $2 "x" $1}')
|
||||
elif ! [[ $KITTY_WINDOW_ID ]] && (( FZF_PREVIEW_TOP + FZF_PREVIEW_LINES == $(stty size < /dev/tty | awk '{print $1}') )); then
|
||||
# Avoid scrolling issue when the Sixel image touches the bottom of the screen
|
||||
# * https://github.com/junegunn/fzf/issues/2544
|
||||
dim=${FZF_PREVIEW_COLUMNS}x$((FZF_PREVIEW_LINES - 1))
|
||||
fi
|
||||
|
||||
# 1. Use kitty icat on kitty terminal
|
||||
if [[ $KITTY_WINDOW_ID ]]; then
|
||||
# 1. 'memory' is the fastest option but if you want the image to be scrollable,
|
||||
# you have to use 'stream'.
|
||||
#
|
||||
# 2. The last line of the output is the ANSI reset code without newline.
|
||||
# This confuses fzf and makes it render scroll offset indicator.
|
||||
# So we remove the last line and append the reset code to its previous line.
|
||||
kitty icat --clear --transfer-mode=memory --unicode-placeholder --stdin=no --place="$dim@0x0" "$file" | sed '$d' | sed $'$s/$/\e[m/'
|
||||
|
||||
# 2. Use chafa with Sixel output
|
||||
elif command -v chafa > /dev/null; then
|
||||
chafa -f sixel -s "$dim" "$file"
|
||||
# Add a new line character so that fzf can display multiple images in the preview window
|
||||
echo
|
||||
|
||||
# 3. If chafa is not found but imgcat is available, use it on iTerm2
|
||||
elif command -v imgcat > /dev/null; then
|
||||
# NOTE: We should use https://iterm2.com/utilities/it2check to check if the
|
||||
# user is running iTerm2. But for the sake of simplicity, we just assume
|
||||
# that's the case here.
|
||||
imgcat -W "${dim%%x*}" -H "${dim##*x}" "$file"
|
||||
|
||||
# 4. Cannot find any suitable method to preview the image
|
||||
else
|
||||
file "$file"
|
||||
fi
|
||||
}
|
||||
"""
|
||||
|
||||
|
||||
# ---- aniskip intergration ----
|
||||
def aniskip(mal_id, episode):
|
||||
def aniskip(mal_id: int, episode: str):
|
||||
"""helper function to be used for setting and getting skip data
|
||||
|
||||
Args:
|
||||
mal_id: mal id of the anime
|
||||
episode: episode number
|
||||
|
||||
Returns:
|
||||
mpv chapter options
|
||||
"""
|
||||
ANISKIP = shutil.which("ani-skip")
|
||||
if not ANISKIP:
|
||||
print("Aniskip not found, please install and try again")
|
||||
@@ -111,62 +44,94 @@ def aniskip(mal_id, episode):
|
||||
# ---- prevew stuff ----
|
||||
# import tempfile
|
||||
|
||||
# NOTE: May change this to a temp dir but there were issues so later
|
||||
WORKING_DIR = APP_CACHE_DIR # tempfile.gettempdir()
|
||||
IMAGES_DIR = os.path.join(WORKING_DIR, "images")
|
||||
if not os.path.exists(IMAGES_DIR):
|
||||
os.mkdir(IMAGES_DIR)
|
||||
INFO_DIR = os.path.join(WORKING_DIR, "info")
|
||||
if not os.path.exists(INFO_DIR):
|
||||
os.mkdir(INFO_DIR)
|
||||
|
||||
IMAGES_CACHE_DIR = os.path.join(WORKING_DIR, "images")
|
||||
if not os.path.exists(IMAGES_CACHE_DIR):
|
||||
os.mkdir(IMAGES_CACHE_DIR)
|
||||
ANIME_INFO_CACHE_DIR = os.path.join(WORKING_DIR, "info")
|
||||
if not os.path.exists(ANIME_INFO_CACHE_DIR):
|
||||
os.mkdir(ANIME_INFO_CACHE_DIR)
|
||||
|
||||
|
||||
def save_image_from_url(url: str, file_name: str):
|
||||
"""Helper function that downloads an image to the FastAnime images cache dir given its url and filename
|
||||
|
||||
Args:
|
||||
url: image url to download
|
||||
file_name: filename to use
|
||||
"""
|
||||
image = requests.get(url)
|
||||
with open(f"{IMAGES_DIR}/{file_name}", "wb") as f:
|
||||
with open(f"{IMAGES_CACHE_DIR}/{file_name}", "wb") as f:
|
||||
f.write(image.content)
|
||||
|
||||
|
||||
def save_info_from_str(info: str, file_name: str):
|
||||
with open(f"{INFO_DIR}/{file_name}", "w") as f:
|
||||
"""Helper function that writes text (anime details and info) to a file given its filename
|
||||
|
||||
Args:
|
||||
info: the information anilist has on the anime
|
||||
file_name: the filename to use
|
||||
"""
|
||||
with open(f"{ANIME_INFO_CACHE_DIR}/{file_name}", "w") as f:
|
||||
f.write(info)
|
||||
|
||||
|
||||
def write_search_results(
|
||||
search_results: list[AnilistBaseMediaDataSchema],
|
||||
titles,
|
||||
workers=None,
|
||||
anilist_results: list[AnilistBaseMediaDataSchema],
|
||||
titles: list[str],
|
||||
workers: int | None = None,
|
||||
):
|
||||
H_COLOR = 215, 0, 95
|
||||
S_COLOR = 208, 208, 208
|
||||
S_WIDTH = 45
|
||||
"""A helper function used by and run in a background thread by get_fzf_preview function inorder to get the actual preview data to be displayed by fzf
|
||||
|
||||
Args:
|
||||
anilist_results: the anilist results from an anilist action
|
||||
titles: sanitized anime titles
|
||||
workers:number of threads to use defaults to as many as possible
|
||||
"""
|
||||
# NOTE: Will probably make this a configuraable option
|
||||
HEADER_COLOR = 215, 0, 95
|
||||
SEPARATOR_COLOR = 208, 208, 208
|
||||
SEPARATOR_WIDTH = 30
|
||||
# use concurency to download and write as fast as possible
|
||||
with concurrent.futures.ThreadPoolExecutor(max_workers=workers) as executor:
|
||||
future_to_task = {}
|
||||
for anime, title in zip(search_results, titles):
|
||||
for anime, title in zip(anilist_results, titles):
|
||||
# actual image url
|
||||
image_url = anime["coverImage"]["large"]
|
||||
future_to_task[executor.submit(save_image_from_url, image_url, title)] = (
|
||||
image_url
|
||||
)
|
||||
|
||||
mediaListName = "Not in any of your lists"
|
||||
progress = "UNKNOWN"
|
||||
if anime_list := anime["mediaListEntry"]:
|
||||
mediaListName = anime_list["status"]
|
||||
progress = anime_list["progress"]
|
||||
# handle the text data
|
||||
template = f"""
|
||||
{get_true_fg("-"*S_WIDTH,*S_COLOR,bold=False)}
|
||||
{get_true_fg('Title(jp):',*H_COLOR)} {anime['title']['romaji']}
|
||||
{get_true_fg('Title(eng):',*H_COLOR)} {anime['title']['english']}
|
||||
{get_true_fg('Popularity:',*H_COLOR)} {anime['popularity']}
|
||||
{get_true_fg('Favourites:',*H_COLOR)} {anime['favourites']}
|
||||
{get_true_fg('Status:',*H_COLOR)} {anime['status']}
|
||||
{get_true_fg('Episodes:',*H_COLOR)} {anime['episodes']}
|
||||
{get_true_fg('Genres:',*H_COLOR)} {anilist_data_helper.format_list_data_with_comma(anime['genres'])}
|
||||
{get_true_fg('Next Episode:',*H_COLOR)} {anilist_data_helper.extract_next_airing_episode(anime['nextAiringEpisode'])}
|
||||
{get_true_fg('Start Date:',*H_COLOR)} {anilist_data_helper.format_anilist_date_object(anime['startDate'])}
|
||||
{get_true_fg('End Date:',*H_COLOR)} {anilist_data_helper.format_anilist_date_object(anime['endDate'])}
|
||||
{get_true_fg("-"*S_WIDTH,*S_COLOR,bold=False)}
|
||||
{get_true_fg('Description:',*H_COLOR)}
|
||||
{get_true_fg("-"*SEPARATOR_WIDTH,*SEPARATOR_COLOR,bold=False)}
|
||||
{get_true_fg('Title(jp):',*HEADER_COLOR)} {anime['title']['romaji']}
|
||||
{get_true_fg('Title(eng):',*HEADER_COLOR)} {anime['title']['english']}
|
||||
{get_true_fg('Popularity:',*HEADER_COLOR)} {anime['popularity']}
|
||||
{get_true_fg('Favourites:',*HEADER_COLOR)} {anime['favourites']}
|
||||
{get_true_fg('Status:',*HEADER_COLOR)} {anime['status']}
|
||||
{get_true_fg('Episodes:',*HEADER_COLOR)} {anime['episodes']}
|
||||
{get_true_fg('Genres:',*HEADER_COLOR)} {anilist_data_helper.format_list_data_with_comma(anime['genres'])}
|
||||
{get_true_fg('Next Episode:',*HEADER_COLOR)} {anilist_data_helper.extract_next_airing_episode(anime['nextAiringEpisode'])}
|
||||
{get_true_fg('Start Date:',*HEADER_COLOR)} {anilist_data_helper.format_anilist_date_object(anime['startDate'])}
|
||||
{get_true_fg('End Date:',*HEADER_COLOR)} {anilist_data_helper.format_anilist_date_object(anime['endDate'])}
|
||||
{get_true_fg("-"*SEPARATOR_WIDTH,*SEPARATOR_COLOR,bold=False)}
|
||||
{get_true_fg('Media List:',*HEADER_COLOR)} {mediaListName}
|
||||
{get_true_fg('Progress:',*HEADER_COLOR)} {progress}
|
||||
{get_true_fg("-"*SEPARATOR_WIDTH,*SEPARATOR_COLOR,bold=False)}
|
||||
{get_true_fg('Description:',*HEADER_COLOR)}
|
||||
"""
|
||||
template = textwrap.dedent(template)
|
||||
template = f"""
|
||||
{template}
|
||||
{textwrap.fill(remove_html_tags(
|
||||
{textwrap.fill(clean_html(
|
||||
str(anime['description'])), width=45)}
|
||||
"""
|
||||
future_to_task[executor.submit(save_info_from_str, template, title)] = title
|
||||
@@ -181,11 +146,22 @@ def write_search_results(
|
||||
|
||||
|
||||
# get rofi icons
|
||||
def get_icons(search_results: list[AnilistBaseMediaDataSchema], titles, workers=None):
|
||||
def get_rofi_icons(
|
||||
anilist_results: list[AnilistBaseMediaDataSchema], titles, workers=None
|
||||
):
|
||||
"""A helper function to make sure that the images are downloaded so they can be used as icons
|
||||
|
||||
Args:
|
||||
titles (list[str]): sanitized titles of the anime; NOTE: its important that they are sanitized since they are used as the filenames of the images
|
||||
workers ([TODO:parameter]): Number of threads to use to download the images; defaults to as many as possible
|
||||
anilist_results: the anilist results from an anilist action
|
||||
"""
|
||||
# use concurrency to download the images as fast as possible
|
||||
with concurrent.futures.ThreadPoolExecutor(max_workers=workers) as executor:
|
||||
# load the jobs
|
||||
future_to_url = {}
|
||||
for anime, title in zip(search_results, titles):
|
||||
for anime, title in zip(anilist_results, titles):
|
||||
# actual link to download image from
|
||||
image_url = anime["coverImage"]["large"]
|
||||
future_to_url[executor.submit(save_image_from_url, image_url, title)] = (
|
||||
image_url
|
||||
@@ -196,19 +172,127 @@ def get_icons(search_results: list[AnilistBaseMediaDataSchema], titles, workers=
|
||||
url = future_to_url[future]
|
||||
try:
|
||||
future.result()
|
||||
except Exception as exc:
|
||||
logger.error("%r generated an exception: %s" % (url, exc))
|
||||
except Exception as e:
|
||||
logger.error("%r generated an exception: %s" % (url, e))
|
||||
|
||||
|
||||
def get_preview(search_results: list[AnilistBaseMediaDataSchema], titles, wait=False):
|
||||
# ensure images and info exists
|
||||
# get rofi icons
|
||||
def get_fzf_manga_preview(manga_results, workers=None, wait=False):
|
||||
"""A helper function to make sure that the images are downloaded so they can be used as icons
|
||||
|
||||
Args:
|
||||
titles (list[str]): sanitized titles of the anime; NOTE: its important that they are sanitized since they are used as the filenames of the images
|
||||
workers ([TODO:parameter]): Number of threads to use to download the images; defaults to as many as possible
|
||||
anilist_results: the anilist results from an anilist action
|
||||
"""
|
||||
|
||||
def _worker():
|
||||
# use concurrency to download the images as fast as possible
|
||||
with concurrent.futures.ThreadPoolExecutor(max_workers=workers) as executor:
|
||||
# load the jobs
|
||||
future_to_url = {}
|
||||
for manga in manga_results:
|
||||
image_url = manga["poster"]
|
||||
future_to_url[
|
||||
executor.submit(
|
||||
save_image_from_url,
|
||||
image_url,
|
||||
sanitize_filename(manga["title"]),
|
||||
)
|
||||
] = image_url
|
||||
|
||||
# execute the jobs
|
||||
for future in concurrent.futures.as_completed(future_to_url):
|
||||
url = future_to_url[future]
|
||||
try:
|
||||
future.result()
|
||||
except Exception as e:
|
||||
logger.error("%r generated an exception: %s" % (url, e))
|
||||
|
||||
background_worker = Thread(
|
||||
target=write_search_results, args=(search_results, titles)
|
||||
target=_worker,
|
||||
)
|
||||
background_worker.daemon = True
|
||||
# ensure images and info exists
|
||||
background_worker.start()
|
||||
|
||||
os.environ["SHELL"] = shutil.which("bash") or "sh"
|
||||
# the preview script is in bash so making sure fzf doesnt use any other shell lang to process the preview script
|
||||
os.environ["SHELL"] = shutil.which("bash") or "bash"
|
||||
preview = """
|
||||
%s
|
||||
if [ -s %s/{} ]; then fzf-preview %s/{}
|
||||
else echo Loading...
|
||||
fi
|
||||
""" % (
|
||||
fzf_preview,
|
||||
IMAGES_CACHE_DIR,
|
||||
IMAGES_CACHE_DIR,
|
||||
)
|
||||
if wait:
|
||||
background_worker.join()
|
||||
return preview
|
||||
|
||||
|
||||
# get rofi icons
|
||||
def get_fzf_episode_preview(
|
||||
anilist_result: AnilistBaseMediaDataSchema, episodes, workers=None, wait=False
|
||||
):
|
||||
"""A helper function to make sure that the images are downloaded so they can be used as icons
|
||||
|
||||
Args:
|
||||
titles (list[str]): sanitized titles of the anime; NOTE: its important that they are sanitized since they are used as the filenames of the images
|
||||
workers ([TODO:parameter]): Number of threads to use to download the images; defaults to as many as possible
|
||||
anilist_results: the anilist results from an anilist action
|
||||
"""
|
||||
|
||||
HEADER_COLOR = 215, 0, 95
|
||||
import re
|
||||
|
||||
def _worker():
|
||||
# use concurrency to download the images as fast as possible
|
||||
with concurrent.futures.ThreadPoolExecutor(max_workers=workers) as executor:
|
||||
# load the jobs
|
||||
future_to_url = {}
|
||||
for episode in episodes:
|
||||
episode_title = ""
|
||||
image_url = ""
|
||||
for episode_detail in anilist_result["streamingEpisodes"]:
|
||||
if re.match(f"Episode {episode} ", episode_detail["title"]):
|
||||
episode_title = episode_detail["title"]
|
||||
image_url = episode_detail["thumbnail"]
|
||||
|
||||
if episode_title and image_url:
|
||||
# actual link to download image from
|
||||
if not image_url:
|
||||
continue
|
||||
future_to_url[
|
||||
executor.submit(save_image_from_url, image_url, episode)
|
||||
] = image_url
|
||||
template = textwrap.dedent(
|
||||
f"""
|
||||
{get_true_fg('Anime Title:',*HEADER_COLOR)} {anilist_result['title']['romaji'] or anilist_result['title']['english']}
|
||||
{get_true_fg('Episode Title:',*HEADER_COLOR)} {episode_title}
|
||||
"""
|
||||
)
|
||||
future_to_url[
|
||||
executor.submit(save_info_from_str, template, episode)
|
||||
] = episode_title
|
||||
|
||||
# execute the jobs
|
||||
for future in concurrent.futures.as_completed(future_to_url):
|
||||
url = future_to_url[future]
|
||||
try:
|
||||
future.result()
|
||||
except Exception as e:
|
||||
logger.error("%r generated an exception: %s" % (url, e))
|
||||
|
||||
background_worker = Thread(
|
||||
target=_worker,
|
||||
)
|
||||
# ensure images and info exists
|
||||
background_worker.start()
|
||||
|
||||
# the preview script is in bash so making sure fzf doesnt use any other shell lang to process the preview script
|
||||
os.environ["SHELL"] = shutil.which("bash") or "bash"
|
||||
preview = """
|
||||
%s
|
||||
if [ -s %s/{} ]; then fzf-preview %s/{}
|
||||
@@ -219,12 +303,76 @@ def get_preview(search_results: list[AnilistBaseMediaDataSchema], titles, wait=F
|
||||
fi
|
||||
""" % (
|
||||
fzf_preview,
|
||||
IMAGES_DIR,
|
||||
IMAGES_DIR,
|
||||
INFO_DIR,
|
||||
INFO_DIR,
|
||||
IMAGES_CACHE_DIR,
|
||||
IMAGES_CACHE_DIR,
|
||||
ANIME_INFO_CACHE_DIR,
|
||||
ANIME_INFO_CACHE_DIR,
|
||||
)
|
||||
# preview.replace("\n", ";")
|
||||
if wait:
|
||||
background_worker.join()
|
||||
return preview
|
||||
|
||||
|
||||
def get_fzf_anime_preview(
|
||||
anilist_results: list[AnilistBaseMediaDataSchema], titles, wait=False
|
||||
):
|
||||
"""A helper function that constructs data to be used for the fzf preview
|
||||
|
||||
Args:
|
||||
titles (list[str]): The sanitized titles to use, NOTE: its important that they are sanitized since thay will be used as filenames
|
||||
wait (bool): whether to block the ui as we wait for preview defaults to false
|
||||
anilist_results: the anilist results got from an anilist action
|
||||
|
||||
Returns:
|
||||
THe fzf preview script to use
|
||||
"""
|
||||
# ensure images and info exists
|
||||
from ...constants import S_PLATFORM
|
||||
|
||||
background_worker = Thread(
|
||||
target=write_search_results, args=(anilist_results, titles)
|
||||
)
|
||||
background_worker.start()
|
||||
|
||||
# the preview script is in bash so making sure fzf doesnt use any other shell lang to process the preview script
|
||||
os.environ["SHELL"] = shutil.which("bash") or "bash"
|
||||
if S_PLATFORM == "win32":
|
||||
preview = """
|
||||
%s
|
||||
title={}
|
||||
dim=${FZF_PREVIEW_COLUMNS}x${FZF_PREVIEW_LINES}
|
||||
if [ -s "%s\\\\\\$title" ]; then
|
||||
if command -v chafa >/dev/null;then
|
||||
chafa -f kitty -s $dim "%s\\\\\\$title"
|
||||
fi
|
||||
else echo Loading...
|
||||
fi
|
||||
if [ -s "%s\\\\\\$title" ]; then cat "%s\\\\\\$title"
|
||||
else echo Loading...
|
||||
fi
|
||||
""" % (
|
||||
fzf_preview,
|
||||
IMAGES_CACHE_DIR.replace("\\", "\\\\\\"),
|
||||
IMAGES_CACHE_DIR.replace("\\", "\\\\\\"),
|
||||
ANIME_INFO_CACHE_DIR.replace("\\", "\\\\\\"),
|
||||
ANIME_INFO_CACHE_DIR.replace("\\", "\\\\\\"),
|
||||
)
|
||||
else:
|
||||
preview = """
|
||||
%s
|
||||
if [ -s %s/{} ]; then fzf-preview %s/{}
|
||||
else echo Loading...
|
||||
fi
|
||||
if [ -s %s/{} ]; then cat %s/{}
|
||||
else echo Loading...
|
||||
fi
|
||||
""" % (
|
||||
fzf_preview,
|
||||
IMAGES_CACHE_DIR,
|
||||
IMAGES_CACHE_DIR,
|
||||
ANIME_INFO_CACHE_DIR,
|
||||
ANIME_INFO_CACHE_DIR,
|
||||
)
|
||||
if wait:
|
||||
background_worker.join()
|
||||
return preview
|
||||
|
||||
12
fastanime/cli/utils/feh.py
Normal file
12
fastanime/cli/utils/feh.py
Normal file
@@ -0,0 +1,12 @@
|
||||
import shutil
|
||||
import subprocess
|
||||
from sys import exit
|
||||
|
||||
|
||||
def feh_manga_viewer(image_links: list[str], window_title: str):
|
||||
FEH_EXECUTABLE = shutil.which("feh")
|
||||
if not FEH_EXECUTABLE:
|
||||
print("feh not found")
|
||||
exit(1)
|
||||
commands = [FEH_EXECUTABLE, *image_links, "--title", window_title]
|
||||
subprocess.run(commands)
|
||||
@@ -2,6 +2,8 @@ import re
|
||||
import shutil
|
||||
import subprocess
|
||||
|
||||
from fastanime.constants import S_PLATFORM
|
||||
|
||||
|
||||
def stream_video(MPV, url, mpv_args, custom_args):
|
||||
process = subprocess.Popen(
|
||||
@@ -48,66 +50,140 @@ def stream_video(MPV, url, mpv_args, custom_args):
|
||||
|
||||
def run_mpv(
|
||||
link: str,
|
||||
title: str | None = "",
|
||||
title: str = "",
|
||||
start_time: str = "0",
|
||||
ytdl_format="",
|
||||
custom_args=[],
|
||||
headers={},
|
||||
subtitles=[],
|
||||
player="",
|
||||
):
|
||||
# Determine if mpv is available
|
||||
MPV = shutil.which("mpv")
|
||||
|
||||
# If title is None, set a default value
|
||||
|
||||
# Regex to check if the link is a YouTube URL
|
||||
youtube_regex = r"(https?://)?(www\.)?(youtube|youtu|youtube-nocookie)\.(com|be)/.+"
|
||||
|
||||
if not MPV:
|
||||
# Determine if the link is a YouTube URL
|
||||
if re.match(youtube_regex, link):
|
||||
# Android specific commands to launch mpv with a YouTube URL
|
||||
args = [
|
||||
"nohup",
|
||||
"am",
|
||||
"start",
|
||||
"--user",
|
||||
"0",
|
||||
"-a",
|
||||
"android.intent.action.VIEW",
|
||||
"-d",
|
||||
link,
|
||||
"-n",
|
||||
"com.google.android.youtube/.UrlActivity",
|
||||
]
|
||||
if link.endswith(".torrent"):
|
||||
WEBTORRENT_CLI = shutil.which("webtorrent")
|
||||
if not WEBTORRENT_CLI:
|
||||
import time
|
||||
|
||||
print(
|
||||
"webtorrent cli is not installed which is required for downloading and streaming from nyaa\nplease install it or use another provider"
|
||||
)
|
||||
time.sleep(120)
|
||||
return "0", "0"
|
||||
cmd = [WEBTORRENT_CLI, link, f"--{player}"]
|
||||
subprocess.run(cmd)
|
||||
return "0", "0"
|
||||
if player == "vlc":
|
||||
VLC = shutil.which("vlc")
|
||||
if not VLC and not S_PLATFORM == "win32":
|
||||
# Determine if the link is a YouTube URL
|
||||
if re.match(youtube_regex, link):
|
||||
# Android specific commands to launch mpv with a YouTube URL
|
||||
args = [
|
||||
"nohup",
|
||||
"am",
|
||||
"start",
|
||||
"--user",
|
||||
"0",
|
||||
"-a",
|
||||
"android.intent.action.VIEW",
|
||||
"-d",
|
||||
link,
|
||||
"-n",
|
||||
"com.google.android.youtube/.UrlActivity",
|
||||
]
|
||||
return "0", "0"
|
||||
else:
|
||||
args = [
|
||||
"nohup",
|
||||
"am",
|
||||
"start",
|
||||
"--user",
|
||||
"0",
|
||||
"-a",
|
||||
"android.intent.action.VIEW",
|
||||
"-d",
|
||||
link,
|
||||
"-n",
|
||||
"org.videolan.vlc/org.videolan.vlc.gui.video.VideoPlayerActivity",
|
||||
"-e",
|
||||
"title",
|
||||
title,
|
||||
]
|
||||
|
||||
subprocess.run(args)
|
||||
return "0", "0"
|
||||
else:
|
||||
# Android specific commands to launch mpv with a regular URL
|
||||
args = [
|
||||
"nohup",
|
||||
"am",
|
||||
"start",
|
||||
"--user",
|
||||
"0",
|
||||
"-a",
|
||||
"android.intent.action.VIEW",
|
||||
"-d",
|
||||
link,
|
||||
"-n",
|
||||
"is.xyz.mpv/.MPVActivity",
|
||||
]
|
||||
|
||||
subprocess.run(args)
|
||||
return "0", "0"
|
||||
args = ["vlc", link]
|
||||
for subtitle in subtitles:
|
||||
args.append("--sub-file")
|
||||
args.append(subtitle["url"])
|
||||
break
|
||||
if title:
|
||||
args.append("--video-title")
|
||||
args.append(title)
|
||||
subprocess.run(args)
|
||||
return "0", "0"
|
||||
else:
|
||||
# General mpv command with custom arguments
|
||||
mpv_args = []
|
||||
if start_time != "0":
|
||||
mpv_args.append(f"--start={start_time}")
|
||||
if title:
|
||||
mpv_args.append(f"--title={title}")
|
||||
if ytdl_format:
|
||||
mpv_args.append(f"--ytdl-format={ytdl_format}")
|
||||
stop_time, total_time = stream_video(MPV, link, mpv_args, custom_args)
|
||||
return stop_time, total_time
|
||||
# Determine if mpv is available
|
||||
MPV = shutil.which("mpv")
|
||||
if not MPV and not S_PLATFORM == "win32":
|
||||
# Determine if the link is a YouTube URL
|
||||
if re.match(youtube_regex, link):
|
||||
# Android specific commands to launch mpv with a YouTube URL
|
||||
args = [
|
||||
"nohup",
|
||||
"am",
|
||||
"start",
|
||||
"--user",
|
||||
"0",
|
||||
"-a",
|
||||
"android.intent.action.VIEW",
|
||||
"-d",
|
||||
link,
|
||||
"-n",
|
||||
"com.google.android.youtube/.UrlActivity",
|
||||
]
|
||||
return "0", "0"
|
||||
else:
|
||||
# Android specific commands to launch mpv with a regular URL
|
||||
args = [
|
||||
"nohup",
|
||||
"am",
|
||||
"start",
|
||||
"--user",
|
||||
"0",
|
||||
"-a",
|
||||
"android.intent.action.VIEW",
|
||||
"-d",
|
||||
link,
|
||||
"-n",
|
||||
"is.xyz.mpv/.MPVActivity",
|
||||
]
|
||||
|
||||
subprocess.run(args)
|
||||
return "0", "0"
|
||||
else:
|
||||
# General mpv command with custom arguments
|
||||
mpv_args = []
|
||||
if headers:
|
||||
mpv_headers = "--http-header-fields="
|
||||
for header_name, header_value in headers.items():
|
||||
mpv_headers += f"{header_name}:{header_value},"
|
||||
mpv_args.append(mpv_headers)
|
||||
for subtitle in subtitles:
|
||||
mpv_args.append(f"--sub-file={subtitle['url']}")
|
||||
if start_time != "0":
|
||||
mpv_args.append(f"--start={start_time}")
|
||||
if title:
|
||||
mpv_args.append(f"--title={title}")
|
||||
if ytdl_format:
|
||||
mpv_args.append(f"--ytdl-format={ytdl_format}")
|
||||
stop_time, total_time = stream_video(MPV, link, mpv_args, custom_args)
|
||||
return stop_time, total_time
|
||||
|
||||
|
||||
# Example usage
|
||||
|
||||
@@ -3,12 +3,14 @@ from typing import TYPE_CHECKING
|
||||
import mpv
|
||||
|
||||
from ...anilist import AniList
|
||||
from .utils import filter_by_quality, move_preferred_subtitle_lang_to_top
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from typing import Literal
|
||||
|
||||
from ...AnimeProvider import AnimeProvider
|
||||
from ..config import Config
|
||||
from .tools import FastAnimeRuntimeState
|
||||
|
||||
|
||||
def format_time(duration_in_secs: float):
|
||||
@@ -21,23 +23,32 @@ def format_time(duration_in_secs: float):
|
||||
class MpvPlayer(object):
|
||||
anime_provider: "AnimeProvider"
|
||||
config: "Config"
|
||||
subs = []
|
||||
mpv_player: "mpv.MPV"
|
||||
last_stop_time: str = "0"
|
||||
last_total_time: str = "0"
|
||||
last_stop_time_secs = 0
|
||||
last_total_time_secs = 0
|
||||
current_media_title = ""
|
||||
player_fetching = False
|
||||
|
||||
def get_episode(
|
||||
self, type: "Literal['next','previous','reload','custom']", ep_no=None
|
||||
self,
|
||||
type: "Literal['next','previous','reload','custom']",
|
||||
ep_no=None,
|
||||
server="top",
|
||||
):
|
||||
anilist_config = self.anilist_config
|
||||
fastanime_runtime_state = self.fastanime_runtime_state
|
||||
config = self.config
|
||||
episode_number: str = anilist_config.episode_number
|
||||
current_episode_number: str = (
|
||||
fastanime_runtime_state.provider_current_episode_number
|
||||
)
|
||||
quality = config.quality
|
||||
episodes: list = sorted(anilist_config.episodes, key=float)
|
||||
anime_id: int = anilist_config.anime_id
|
||||
anime = anilist_config.anime
|
||||
total_episodes: list = sorted(
|
||||
fastanime_runtime_state.provider_available_episodes, key=float
|
||||
)
|
||||
anime_id_anilist: int = fastanime_runtime_state.selected_anime_id_anilist
|
||||
provider_anime = fastanime_runtime_state.provider_anime
|
||||
translation_type = config.translation_type
|
||||
anime_provider = config.anime_provider
|
||||
self.last_stop_time: str = "0"
|
||||
@@ -48,70 +59,133 @@ class MpvPlayer(object):
|
||||
# next or prev
|
||||
if type == "next":
|
||||
self.mpv_player.show_text("Fetching next episode...")
|
||||
next_episode = episodes.index(episode_number) + 1
|
||||
if next_episode >= len(episodes):
|
||||
next_episode = len(episodes) - 1
|
||||
anilist_config.episode_number = episodes[next_episode]
|
||||
episode_number = anilist_config.episode_number
|
||||
config.update_watch_history(anime_id, str(episode_number))
|
||||
next_episode = total_episodes.index(current_episode_number) + 1
|
||||
if next_episode >= len(total_episodes):
|
||||
next_episode = len(total_episodes) - 1
|
||||
fastanime_runtime_state.provider_current_episode_number = total_episodes[
|
||||
next_episode
|
||||
]
|
||||
current_episode_number = (
|
||||
fastanime_runtime_state.provider_current_episode_number
|
||||
)
|
||||
config.media_list_track(
|
||||
anime_id_anilist,
|
||||
episode_no=str(current_episode_number),
|
||||
progress_tracking=fastanime_runtime_state.progress_tracking,
|
||||
)
|
||||
elif type == "reload":
|
||||
if episode_number not in episodes:
|
||||
if current_episode_number not in total_episodes:
|
||||
self.mpv_player.show_text("Episode not available")
|
||||
return
|
||||
self.mpv_player.show_text("Replaying Episode...")
|
||||
elif type == "custom":
|
||||
if not ep_no or ep_no not in episodes:
|
||||
if not ep_no or ep_no not in total_episodes:
|
||||
self.mpv_player.show_text("Episode number not specified or invalid")
|
||||
self.mpv_player.show_text(f"Acceptable episodes are: {episodes}")
|
||||
self.mpv_player.show_text(
|
||||
f"Acceptable episodes are: {total_episodes}",
|
||||
)
|
||||
return
|
||||
|
||||
self.mpv_player.show_text(f"Fetching episode {ep_no}")
|
||||
episode_number = ep_no
|
||||
config.update_watch_history(anime_id, str(ep_no))
|
||||
anilist_config.episode_number = str(ep_no)
|
||||
current_episode_number = ep_no
|
||||
config.media_list_track(
|
||||
anime_id_anilist,
|
||||
episode_no=str(ep_no),
|
||||
progress_tracking=fastanime_runtime_state.progress_tracking,
|
||||
)
|
||||
fastanime_runtime_state.provider_current_episode_number = str(ep_no)
|
||||
else:
|
||||
self.mpv_player.show_text("Fetching previous episode...")
|
||||
prev_episode = episodes.index(episode_number) - 1
|
||||
prev_episode = total_episodes.index(current_episode_number) - 1
|
||||
if prev_episode <= 0:
|
||||
prev_episode = 0
|
||||
anilist_config.episode_number = episodes[prev_episode]
|
||||
episode_number = anilist_config.episode_number
|
||||
config.update_watch_history(anime_id, str(episode_number))
|
||||
fastanime_runtime_state.provider_current_episode_number = total_episodes[
|
||||
prev_episode
|
||||
]
|
||||
current_episode_number = (
|
||||
fastanime_runtime_state.provider_current_episode_number
|
||||
)
|
||||
config.media_list_track(
|
||||
anime_id_anilist,
|
||||
episode_no=str(current_episode_number),
|
||||
progress_tracking=fastanime_runtime_state.progress_tracking,
|
||||
)
|
||||
# update episode progress
|
||||
if config.user and episode_number:
|
||||
if config.user and current_episode_number:
|
||||
AniList.update_anime_list(
|
||||
{
|
||||
"mediaId": anime_id,
|
||||
"progress": episode_number,
|
||||
"mediaId": anime_id_anilist,
|
||||
"progress": int(float(current_episode_number)),
|
||||
}
|
||||
)
|
||||
# get them juicy streams
|
||||
episode_streams = anime_provider.get_episode_streams(
|
||||
anime,
|
||||
episode_number,
|
||||
provider_anime["id"],
|
||||
current_episode_number,
|
||||
translation_type,
|
||||
anilist_config.selected_anime_anilist,
|
||||
)
|
||||
if not episode_streams:
|
||||
self.mpv_player.show_text("No streams were found")
|
||||
return None
|
||||
return
|
||||
|
||||
# always select the first
|
||||
selected_server = next(episode_streams)
|
||||
if server == "top":
|
||||
selected_server = next(episode_streams, None)
|
||||
if not selected_server:
|
||||
self.mpv_player.show_text("Sth went wrong when loading the episode")
|
||||
return
|
||||
else:
|
||||
episode_streams_dict = {
|
||||
episode_stream["server"]: episode_stream
|
||||
for episode_stream in episode_streams
|
||||
}
|
||||
selected_server = episode_streams_dict.get(server)
|
||||
if selected_server is None:
|
||||
self.mpv_player.show_text(
|
||||
f"Invalid server!!; servers available are: {episode_streams_dict.keys()}",
|
||||
)
|
||||
return
|
||||
self.current_media_title = selected_server["episode_title"]
|
||||
if config.normalize_titles:
|
||||
import re
|
||||
|
||||
for episode_detail in fastanime_runtime_state.selected_anime_anilist[
|
||||
"streamingEpisodes"
|
||||
]:
|
||||
if re.match(
|
||||
f"Episode {current_episode_number} ", episode_detail["title"]
|
||||
):
|
||||
self.current_media_title = episode_detail["title"]
|
||||
break
|
||||
|
||||
links = selected_server["links"]
|
||||
if quality > len(links) - 1:
|
||||
quality = config.quality = len(links) - 1
|
||||
elif quality < 0:
|
||||
quality = config.quality = 0
|
||||
stream_link = links[quality]["link"]
|
||||
|
||||
stream_link_ = filter_by_quality(quality, links)
|
||||
if not stream_link_:
|
||||
self.mpv_player.show_text("Quality not found")
|
||||
return
|
||||
self.mpv_player._set_property("start", "0")
|
||||
stream_link = stream_link_["link"]
|
||||
fastanime_runtime_state.provider_current_episode_stream_link = stream_link
|
||||
self.subs = move_preferred_subtitle_lang_to_top(
|
||||
selected_server["subtitles"], config.sub_lang
|
||||
)
|
||||
return stream_link
|
||||
|
||||
def create_player(
|
||||
self, anime_provider: "AnimeProvider", anilist_config, config: "Config", title
|
||||
self,
|
||||
stream_link,
|
||||
anime_provider: "AnimeProvider",
|
||||
fastanime_runtime_state: "FastAnimeRuntimeState",
|
||||
config: "Config",
|
||||
title,
|
||||
start_time,
|
||||
headers={},
|
||||
subtitles=[],
|
||||
):
|
||||
self.subs = subtitles
|
||||
self.anime_provider = anime_provider
|
||||
self.anilist_config = anilist_config
|
||||
self.fastanime_runtime_state = fastanime_runtime_state
|
||||
self.config = config
|
||||
self.last_stop_time: str = "0"
|
||||
self.last_total_time: str = "0"
|
||||
@@ -120,32 +194,71 @@ class MpvPlayer(object):
|
||||
self.current_media_title = ""
|
||||
|
||||
mpv_player = mpv.MPV(
|
||||
log_handler=print,
|
||||
loglevel="error",
|
||||
config=True,
|
||||
input_default_bindings=True,
|
||||
input_vo_keyboard=True,
|
||||
osc=True,
|
||||
ytdl=True,
|
||||
)
|
||||
mpv_player.title = title
|
||||
|
||||
# -- events --
|
||||
@mpv_player.event_callback("file-loaded")
|
||||
def set_total_time(event, *args):
|
||||
d = mpv_player._get_property("duration")
|
||||
self.player_fetching = False
|
||||
if isinstance(d, float):
|
||||
self.last_total_time = format_time(d)
|
||||
try:
|
||||
if not mpv_player.core_shutdown:
|
||||
if self.subs:
|
||||
for i, subtitle in enumerate(self.subs):
|
||||
if i == 0:
|
||||
flag = "select"
|
||||
else:
|
||||
flag = "auto"
|
||||
mpv_player.sub_add(
|
||||
subtitle["url"], flag, None, subtitle["language"]
|
||||
)
|
||||
self.subs = []
|
||||
except mpv.ShutdownError:
|
||||
pass
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
@mpv_player.property_observer("time-pos")
|
||||
def handle_time_start_update(*args):
|
||||
if len(args) > 1:
|
||||
value = args[1]
|
||||
if value is not None:
|
||||
self.last_stop_time = format_time(value)
|
||||
|
||||
@mpv_player.property_observer("time-remaining")
|
||||
def handle_time_remaining_update(
|
||||
property, time_remaining: float | None = None, *args
|
||||
):
|
||||
if time_remaining is not None:
|
||||
if time_remaining < 1 and config.auto_next and not self.player_fetching:
|
||||
print("Auto Fetching Next Episode")
|
||||
self.player_fetching = True
|
||||
url = self.get_episode("next")
|
||||
if url:
|
||||
mpv_player.loadfile(
|
||||
url,
|
||||
)
|
||||
mpv_player.title = self.current_media_title
|
||||
|
||||
# -- keybindings --
|
||||
@mpv_player.on_key_press("shift+n")
|
||||
def _next_episode():
|
||||
url = self.get_episode("next")
|
||||
if url:
|
||||
mpv_player.loadfile(url, options=f"title={self.current_media_title}")
|
||||
mpv_player.loadfile(
|
||||
url,
|
||||
)
|
||||
mpv_player.title = self.current_media_title
|
||||
|
||||
@mpv_player.event_callback("file-loaded")
|
||||
def set_total_time(event, *args):
|
||||
d = mpv_player._get_property("duration")
|
||||
if isinstance(d, float):
|
||||
self.last_total_time = format_time(d)
|
||||
|
||||
@mpv_player.event_callback("shutdown")
|
||||
def set_total_time_on_shutdown(event, *args):
|
||||
d = mpv_player._get_property("duration")
|
||||
if isinstance(d, float):
|
||||
self.last_total_time = format_time(d)
|
||||
|
||||
@mpv_player.on_key_press("shift+p")
|
||||
def _previous_episode():
|
||||
url = self.get_episode("previous")
|
||||
@@ -166,14 +279,16 @@ class MpvPlayer(object):
|
||||
@mpv_player.on_key_press("shift+t")
|
||||
def _toggle_translation_type():
|
||||
translation_type = "sub" if config.translation_type == "dub" else "dub"
|
||||
mpv_player.show_text("Changing translation type...")
|
||||
anime = anime_provider.get_anime(
|
||||
anilist_config._anime["id"],
|
||||
anilist_config.selected_anime_anilist,
|
||||
fastanime_runtime_state.provider_anime_search_result["id"],
|
||||
)
|
||||
if not anime:
|
||||
mpv_player.show_text("Failed to update translation type")
|
||||
return
|
||||
anilist_config.episodes = anime["availableEpisodesDetail"][translation_type]
|
||||
fastanime_runtime_state.provider_available_episodes = anime[
|
||||
"availableEpisodesDetail"
|
||||
][translation_type]
|
||||
config.translation_type = translation_type
|
||||
|
||||
if config.translation_type == "dub":
|
||||
@@ -190,31 +305,11 @@ class MpvPlayer(object):
|
||||
)
|
||||
mpv_player.title = self.current_media_title
|
||||
|
||||
@mpv_player.property_observer("time-pos")
|
||||
def handle_time_start_update(*args):
|
||||
if len(args) > 1:
|
||||
value = args[1]
|
||||
if value is not None:
|
||||
self.last_stop_time_secs = value
|
||||
self.last_stop_time = format_time(value)
|
||||
|
||||
@mpv_player.property_observer("time-remaining")
|
||||
def handle_time_remaining_update(*args):
|
||||
if len(args) > 1:
|
||||
value = args[1]
|
||||
if value is not None:
|
||||
rem_time = value
|
||||
if rem_time < 10 and config.auto_next:
|
||||
url = self.get_episode("next")
|
||||
if url:
|
||||
mpv_player.loadfile(
|
||||
url,
|
||||
)
|
||||
mpv_player.title = self.current_media_title
|
||||
|
||||
# -- script messages --
|
||||
@mpv_player.message_handler("select-episode")
|
||||
def select_episode(episode: bytes | None = None, *args):
|
||||
if not episode:
|
||||
mpv_player.show_text("No episode was selected")
|
||||
return
|
||||
url = self.get_episode("custom", episode.decode())
|
||||
if url:
|
||||
@@ -223,13 +318,69 @@ class MpvPlayer(object):
|
||||
)
|
||||
mpv_player.title = self.current_media_title
|
||||
|
||||
mpv_player.register_message_handler("select-episode", select_episode)
|
||||
@mpv_player.message_handler("select-server")
|
||||
def select_server(server: bytes | None = None, *args):
|
||||
if not server:
|
||||
mpv_player.show_text("No server was selected")
|
||||
return
|
||||
url = self.get_episode("reload", server=server.decode())
|
||||
if url:
|
||||
mpv_player.loadfile(
|
||||
url,
|
||||
)
|
||||
mpv_player.title = self.current_media_title
|
||||
else:
|
||||
pass
|
||||
|
||||
@mpv_player.message_handler("select-quality")
|
||||
def select_quality(quality_raw: bytes | None = None, *args):
|
||||
if not quality_raw:
|
||||
mpv_player.show_text("No quality was selected")
|
||||
return
|
||||
q = ["360", "720", "1080"]
|
||||
quality = quality_raw.decode()
|
||||
links: list = fastanime_runtime_state.provider_server_episode_streams
|
||||
q = [link["quality"] for link in links]
|
||||
if quality in q:
|
||||
config.quality = quality
|
||||
stream_link_ = filter_by_quality(quality, links)
|
||||
if not stream_link_:
|
||||
mpv_player.show_text("Quality not found")
|
||||
return
|
||||
mpv_player.show_text(f"Changing to stream of quality {quality}")
|
||||
stream_link = stream_link_["link"]
|
||||
mpv_player.loadfile(stream_link)
|
||||
else:
|
||||
mpv_player.show_text(f"invalid quality!! Valid quality includes: {q}")
|
||||
|
||||
# -- events --
|
||||
mpv_player.observe_property("time-pos", handle_time_start_update)
|
||||
mpv_player.register_event_callback(set_total_time)
|
||||
mpv_player.register_event_callback(set_total_time_on_shutdown)
|
||||
mpv_player.observe_property("time-remaining", handle_time_remaining_update)
|
||||
mpv_player.register_event_callback(set_total_time)
|
||||
|
||||
# --script-messages --
|
||||
mpv_player.register_message_handler("select-episode", select_episode)
|
||||
mpv_player.register_message_handler("select-server", select_server)
|
||||
mpv_player.register_message_handler("select-quality", select_quality)
|
||||
|
||||
self.mpv_player = mpv_player
|
||||
return mpv_player
|
||||
mpv_player.force_window = config.force_window
|
||||
# mpv_player.cache = "yes"
|
||||
# mpv_player.cache_pause = "no"
|
||||
mpv_player.title = title
|
||||
mpv_headers = ""
|
||||
if headers:
|
||||
for header_name, header_value in headers.items():
|
||||
mpv_headers += f"{header_name}:{header_value},"
|
||||
mpv_player.http_header_fields = mpv_headers
|
||||
|
||||
mpv_player.play(stream_link)
|
||||
|
||||
if not start_time == "0":
|
||||
mpv_player.start = start_time
|
||||
|
||||
mpv_player.wait_for_shutdown()
|
||||
mpv_player.terminate()
|
||||
|
||||
|
||||
player = MpvPlayer()
|
||||
|
||||
@@ -5,20 +5,23 @@ import requests
|
||||
|
||||
|
||||
def print_img(url: str):
|
||||
executable = shutil.which("chafa")
|
||||
curl = shutil.which("curl")
|
||||
# curl -sL "$1" | chafa /dev/stdin
|
||||
"""helper funtion to print an image given its url
|
||||
|
||||
if executable is None or curl is None:
|
||||
print("chafa or curl not found")
|
||||
return
|
||||
Args:
|
||||
url: [TODO:description]
|
||||
"""
|
||||
if EXECUTABLE := shutil.which("icat"):
|
||||
subprocess.run([EXECUTABLE, url])
|
||||
else:
|
||||
EXECUTABLE = shutil.which("chafa")
|
||||
|
||||
res = requests.get(url)
|
||||
if res.status_code != 200:
|
||||
print("Error fetching image")
|
||||
return
|
||||
img_bytes = res.content
|
||||
if not img_bytes:
|
||||
print("No image found")
|
||||
img_bytes = subprocess.check_output([curl, "-sL", url])
|
||||
subprocess.run([executable, url, "--size=15x15"], input=img_bytes)
|
||||
if EXECUTABLE is None:
|
||||
print("chafanot found")
|
||||
return
|
||||
|
||||
res = requests.get(url)
|
||||
if res.status_code != 200:
|
||||
print("Error fetching image")
|
||||
return
|
||||
img_bytes = res.content
|
||||
subprocess.run([EXECUTABLE, url, "--size=15x15"], input=img_bytes)
|
||||
|
||||
53
fastanime/cli/utils/scripts.py
Normal file
53
fastanime/cli/utils/scripts.py
Normal file
@@ -0,0 +1,53 @@
|
||||
fzf_preview = r"""
|
||||
#
|
||||
# Adapted from the preview script in the fzf repo
|
||||
#
|
||||
# Dependencies:
|
||||
# - https://github.com/hpjansson/chafa
|
||||
# - https://iterm2.com/utilities/imgcat
|
||||
#
|
||||
fzf-preview() {
|
||||
file=${1/#\~\//$HOME/}
|
||||
dim=${FZF_PREVIEW_COLUMNS}x${FZF_PREVIEW_LINES}
|
||||
if [[ $dim = x ]]; then
|
||||
dim=$(stty size </dev/tty | awk '{print $2 "x" $1}')
|
||||
elif ! [[ $KITTY_WINDOW_ID ]] && ((FZF_PREVIEW_TOP + FZF_PREVIEW_LINES == $(stty size </dev/tty | awk '{print $1}'))); then
|
||||
# Avoid scrolling issue when the Sixel image touches the bottom of the screen
|
||||
# * https://github.com/junegunn/fzf/issues/2544
|
||||
dim=${FZF_PREVIEW_COLUMNS}x$((FZF_PREVIEW_LINES - 1))
|
||||
fi
|
||||
|
||||
# 1. Use kitty icat on kitty terminal
|
||||
if [[ $KITTY_WINDOW_ID ]]; then
|
||||
# 1. 'memory' is the fastest option but if you want the image to be scrollable,
|
||||
# you have to use 'stream'.
|
||||
#
|
||||
# 2. The last line of the output is the ANSI reset code without newline.
|
||||
# This confuses fzf and makes it render scroll offset indicator.
|
||||
# So we remove the last line and append the reset code to its previous line.
|
||||
kitty icat --clear --transfer-mode=memory --unicode-placeholder --stdin=no --place="$dim@0x0" "$file" | sed '$d' | sed $'$s/$/\e[m/'
|
||||
|
||||
# 2. Use chafa with Sixel output
|
||||
elif command -v chafa >/dev/null; then
|
||||
case "$(uname -a)" in
|
||||
# termux does not support sixel graphics
|
||||
# and produces weird output
|
||||
*ndroid*) chafa -s "$dim" "$file";;
|
||||
*) chafa -f sixel -s "$dim" "$file";;
|
||||
esac
|
||||
# Add a new line character so that fzf can display multiple images in the preview window
|
||||
echo
|
||||
|
||||
# 3. If chafa is not found but imgcat is available, use it on iTerm2
|
||||
elif command -v imgcat >/dev/null; then
|
||||
# NOTE: We should use https://iterm2.com/utilities/it2check to check if the
|
||||
# user is running iTerm2. But for the sake of simplicity, we just assume
|
||||
# that's the case here.
|
||||
imgcat -W "${dim%%x*}" -H "${dim##*x}" "$file"
|
||||
|
||||
# 4. Cannot find any suitable method to preview the image
|
||||
else
|
||||
echo install chafa or imgcat or install kitty terminal so you can enjoy image previews
|
||||
fi
|
||||
}
|
||||
"""
|
||||
44
fastanime/cli/utils/syncplay.py
Normal file
44
fastanime/cli/utils/syncplay.py
Normal file
@@ -0,0 +1,44 @@
|
||||
import shutil
|
||||
import subprocess
|
||||
|
||||
from .tools import exit_app
|
||||
|
||||
|
||||
def SyncPlayer(url: str, anime_title=None, headers={}, subtitles=[], *args):
|
||||
# TODO: handle m3u8 multi quality streams
|
||||
#
|
||||
# check for SyncPlay
|
||||
SYNCPLAY_EXECUTABLE = shutil.which("syncplay")
|
||||
if not SYNCPLAY_EXECUTABLE:
|
||||
print("Syncplay not found")
|
||||
exit_app(1)
|
||||
return "0", "0"
|
||||
# start SyncPlayer
|
||||
mpv_args = []
|
||||
if headers:
|
||||
mpv_headers = "--http-header-fields="
|
||||
for header_name, header_value in headers.items():
|
||||
mpv_headers += f"{header_name}:{header_value},"
|
||||
mpv_args.append(mpv_headers)
|
||||
for subtitle in subtitles:
|
||||
mpv_args.append(f"--sub-file={subtitle['url']}")
|
||||
if not anime_title:
|
||||
subprocess.run(
|
||||
[
|
||||
SYNCPLAY_EXECUTABLE,
|
||||
url,
|
||||
]
|
||||
)
|
||||
else:
|
||||
subprocess.run(
|
||||
[
|
||||
SYNCPLAY_EXECUTABLE,
|
||||
url,
|
||||
"--",
|
||||
f"--force-media-title={anime_title}",
|
||||
*mpv_args,
|
||||
]
|
||||
)
|
||||
|
||||
# for compatability
|
||||
return "0", "0"
|
||||
@@ -1,39 +1,49 @@
|
||||
class QueryDict(dict):
|
||||
"""dot.notation access to dictionary attributes"""
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
def __getattr__(self, attr):
|
||||
try:
|
||||
return self.__getitem__(attr)
|
||||
except KeyError:
|
||||
raise AttributeError(
|
||||
"%r object has no attribute %r" % (self.__class__.__name__, attr)
|
||||
)
|
||||
if TYPE_CHECKING:
|
||||
from typing import Any
|
||||
|
||||
def __setattr__(self, attr, value):
|
||||
self.__setitem__(attr, value)
|
||||
from ...libs.anilist.types import AnilistBaseMediaDataSchema
|
||||
from ...libs.anime_provider.types import Anime, EpisodeStream, SearchResult, Server
|
||||
|
||||
|
||||
def exit_app(*args):
|
||||
import os
|
||||
import shutil
|
||||
class FastAnimeRuntimeState(object):
|
||||
"""A class that manages fastanime runtime during anilist command runtime"""
|
||||
|
||||
provider_current_episode_stream_link: str
|
||||
provider_current_server: "Server"
|
||||
provider_current_server_name: str
|
||||
provider_available_episodes: list[str]
|
||||
provider_current_episode_number: str
|
||||
provider_server_episode_streams: list["EpisodeStream"]
|
||||
provider_anime_title: str
|
||||
provider_anime: "Anime"
|
||||
provider_anime_search_result: "SearchResult"
|
||||
progress_tracking: str = ""
|
||||
|
||||
selected_anime_anilist: "AnilistBaseMediaDataSchema"
|
||||
selected_anime_id_anilist: int
|
||||
selected_anime_title_anilist: str
|
||||
# current_anilist_data: "AnilistDataSchema | AnilistMediaList"
|
||||
anilist_results_data: "Any"
|
||||
|
||||
|
||||
def exit_app(exit_code=0, *args):
|
||||
import sys
|
||||
|
||||
from rich.console import Console
|
||||
|
||||
from ...constants import APP_NAME, ICON_PATH, USER_NAME
|
||||
|
||||
def is_running_in_terminal():
|
||||
console = Console()
|
||||
if not console.is_terminal:
|
||||
try:
|
||||
shutil.get_terminal_size()
|
||||
return (
|
||||
sys.stdin.isatty()
|
||||
and sys.stdout.isatty()
|
||||
and os.getenv("TERM") is not None
|
||||
from plyer import notification
|
||||
except ImportError:
|
||||
print(
|
||||
"Plyer is not installed; install it for desktop notifications to be enabled"
|
||||
)
|
||||
except OSError:
|
||||
return False
|
||||
|
||||
if not is_running_in_terminal():
|
||||
from plyer import notification
|
||||
|
||||
exit(1)
|
||||
notification.notify(
|
||||
app_name=APP_NAME,
|
||||
app_icon=ICON_PATH,
|
||||
@@ -41,20 +51,6 @@ def exit_app(*args):
|
||||
title="Shutting down",
|
||||
) # pyright:ignore
|
||||
else:
|
||||
from rich import print
|
||||
|
||||
print("Have a good day :smile:", USER_NAME)
|
||||
sys.exit(0)
|
||||
|
||||
|
||||
def get_formatted_str(string: str, style):
|
||||
from rich.text import Text
|
||||
|
||||
# Create a Text object with desired style
|
||||
text = Text(string, style="bold red")
|
||||
|
||||
# Convert the Text object to an ANSI string
|
||||
ansi_output = text.__rich_console__(None, None) # pyright:ignore
|
||||
|
||||
# Join the ANSI strings to form the final output
|
||||
"".join(segment.text for segment in ansi_output)
|
||||
console.clear()
|
||||
console.print("Have a good day :smile:", USER_NAME)
|
||||
sys.exit(exit_code)
|
||||
|
||||
@@ -1,12 +1,11 @@
|
||||
import logging
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from InquirerPy import inquirer
|
||||
from thefuzz import fuzz
|
||||
|
||||
from ...Utility.data import anime_normalizer
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ...libs.anime_provider.types import EpisodeStream
|
||||
|
||||
# Define ANSI escape codes as constants
|
||||
RESET = "\033[0m"
|
||||
@@ -20,15 +19,109 @@ BG_GREEN = "\033[48;2;120;233;12;m"
|
||||
GREEN = "\033[38;2;45;24;45;m"
|
||||
|
||||
|
||||
def sizeof_fmt(num, suffix="B"):
|
||||
def get_requested_quality_or_default_to_first(url, quality):
|
||||
import yt_dlp
|
||||
|
||||
with yt_dlp.YoutubeDL({"quiet": True, "silent": True, "no_warnings": True}) as ydl:
|
||||
m3u8_info = ydl.extract_info(url, False)
|
||||
if not m3u8_info:
|
||||
return
|
||||
|
||||
m3u8_formats = m3u8_info["formats"]
|
||||
quality = int(quality)
|
||||
quality_u = quality - 80
|
||||
quality_l = quality + 80
|
||||
for m3u8_format in m3u8_formats:
|
||||
if m3u8_format["height"] == quality or (
|
||||
m3u8_format["height"] < quality_u and m3u8_format["height"] > quality_l
|
||||
):
|
||||
return m3u8_format["url"]
|
||||
else:
|
||||
return m3u8_formats[0]["url"]
|
||||
|
||||
|
||||
def move_preferred_subtitle_lang_to_top(sub_list, lang_str):
|
||||
"""Moves the dictionary with the given ID to the front of the list.
|
||||
|
||||
Args:
|
||||
sub_list: list of subs
|
||||
lang_str: the sub lang pref
|
||||
|
||||
Returns:
|
||||
The modified list.
|
||||
"""
|
||||
import re
|
||||
|
||||
for i, d in enumerate(sub_list):
|
||||
if re.search(lang_str, d["language"], re.IGNORECASE):
|
||||
sub_list.insert(0, sub_list.pop(i))
|
||||
break
|
||||
return sub_list
|
||||
|
||||
|
||||
def filter_by_quality(quality: str, stream_links: "list[EpisodeStream]", default=True):
|
||||
"""Helper function used to filter a list of EpisodeStream objects to one that has a corresponding quality
|
||||
|
||||
Args:
|
||||
quality: the quality to use
|
||||
stream_links: a list of EpisodeStream objects
|
||||
|
||||
Returns:
|
||||
an EpisodeStream object or None incase the quality was not found
|
||||
"""
|
||||
for stream_link in stream_links:
|
||||
q = float(quality)
|
||||
Q = float(stream_link["quality"])
|
||||
# some providers have inaccurate/weird/non-standard eg qualities 718 instead of 720
|
||||
if Q <= q + 80 and Q >= q - 80:
|
||||
return stream_link
|
||||
else:
|
||||
if stream_links and default:
|
||||
from rich import print
|
||||
|
||||
try:
|
||||
print("[yellow bold]WARNING Qualities were:[/] ", stream_links)
|
||||
print(
|
||||
"[cyan bold]Using default of quality:[/] ",
|
||||
stream_links[0]["quality"],
|
||||
)
|
||||
return stream_links[0]
|
||||
except Exception as e:
|
||||
print(e)
|
||||
return
|
||||
|
||||
|
||||
def format_bytes_to_human(num_of_bytes: float, suffix: str = "B"):
|
||||
"""Helper function usedd to format bytes to human
|
||||
|
||||
Args:
|
||||
num_of_bytes: the number of bytes to format
|
||||
suffix: the suffix to use
|
||||
|
||||
Returns:
|
||||
formated bytes
|
||||
"""
|
||||
for unit in ("", "K", "M", "G", "T", "P", "E", "Z"):
|
||||
if abs(num) < 1024.0:
|
||||
return f"{num:3.1f}{unit}{suffix}"
|
||||
num /= 1024.0
|
||||
return f"{num:.1f}Yi{suffix}"
|
||||
if abs(num_of_bytes) < 1024.0:
|
||||
return f"{num_of_bytes:3.1f}{unit}{suffix}"
|
||||
num_of_bytes /= 1024.0
|
||||
return f"{num_of_bytes:.1f}Yi{suffix}"
|
||||
|
||||
|
||||
def get_true_fg(string: str, r: int, g: int, b: int, bold=True) -> str:
|
||||
def get_true_fg(string: str, r: int, g: int, b: int, bold: bool = True) -> str:
|
||||
"""Custom helper function that enables colored text in the terminal
|
||||
|
||||
Args:
|
||||
bold: whether to bolden the text
|
||||
string: string to color
|
||||
r: red
|
||||
g: green
|
||||
b: blue
|
||||
|
||||
Returns:
|
||||
colored string
|
||||
"""
|
||||
# NOTE: Currently only supports terminals that support true color
|
||||
if bold:
|
||||
return f"{BOLD}\033[38;2;{r};{g};{b};m{string}{RESET}"
|
||||
else:
|
||||
@@ -39,11 +132,21 @@ def get_true_bg(string, r: int, g: int, b: int) -> str:
|
||||
return f"\033[48;2;{r};{g};{b};m{string}{RESET}"
|
||||
|
||||
|
||||
def fuzzy_inquirer(prompt: str, choices, **kwargs):
|
||||
def fuzzy_inquirer(choices: list, prompt: str, **kwargs):
|
||||
"""helper function that enables easier interaction with InquirerPy lib
|
||||
|
||||
Args:
|
||||
choices: the choices to prompt
|
||||
prompt: the prompt string to use
|
||||
**kwargs: other options to pass to fuzzy_inquirer
|
||||
|
||||
Returns:
|
||||
a choice
|
||||
"""
|
||||
from click import clear
|
||||
|
||||
clear()
|
||||
action = inquirer.fuzzy(
|
||||
action = inquirer.fuzzy( # pyright:ignore
|
||||
prompt,
|
||||
choices,
|
||||
height="100%",
|
||||
@@ -52,29 +155,3 @@ def fuzzy_inquirer(prompt: str, choices, **kwargs):
|
||||
**kwargs,
|
||||
).execute()
|
||||
return action
|
||||
|
||||
|
||||
def anime_title_percentage_match(
|
||||
possible_user_requested_anime_title: str, title: tuple
|
||||
) -> float:
|
||||
"""Returns the percentage match between the possible title and user title
|
||||
|
||||
Args:
|
||||
possible_user_requested_anime_title (str): an Animdl search result title
|
||||
title (str): the anime title the user wants
|
||||
|
||||
Returns:
|
||||
int: the percentage match
|
||||
"""
|
||||
if normalized_anime_title := anime_normalizer.get(
|
||||
possible_user_requested_anime_title
|
||||
):
|
||||
possible_user_requested_anime_title = normalized_anime_title
|
||||
for key, value in locals().items():
|
||||
logger.info(f"{key}: {value}")
|
||||
# compares both the romaji and english names and gets highest Score
|
||||
percentage_ratio = max(
|
||||
fuzz.ratio(title[0].lower(), possible_user_requested_anime_title.lower()),
|
||||
fuzz.ratio(title[1].lower(), possible_user_requested_anime_title.lower()),
|
||||
)
|
||||
return percentage_ratio
|
||||
|
||||
@@ -1,17 +1,16 @@
|
||||
import os
|
||||
import sys
|
||||
from pathlib import Path
|
||||
from platform import system
|
||||
|
||||
from platformdirs import PlatformDirs
|
||||
import click
|
||||
|
||||
from . import APP_NAME, AUTHOR
|
||||
from . import APP_NAME, __version__
|
||||
|
||||
PLATFORM = system()
|
||||
dirs = PlatformDirs(appname=APP_NAME, appauthor=AUTHOR, ensure_exists=True)
|
||||
|
||||
|
||||
# ---- app deps ----
|
||||
APP_DIR = os.path.abspath(os.path.dirname(__file__))
|
||||
CONFIGS_DIR = os.path.join(APP_DIR, "configs")
|
||||
ASSETS_DIR = os.path.join(APP_DIR, "assets")
|
||||
|
||||
|
||||
@@ -20,21 +19,68 @@ if PLATFORM == "Windows":
|
||||
ICON_PATH = os.path.join(ASSETS_DIR, "logo.ico")
|
||||
else:
|
||||
ICON_PATH = os.path.join(ASSETS_DIR, "logo.png")
|
||||
# PREVIEW_IMAGE = os.path.join(ASSETS_DIR, "preview")
|
||||
|
||||
|
||||
# ----- user configs and data -----
|
||||
APP_DATA_DIR = dirs.user_config_dir
|
||||
if not APP_DATA_DIR:
|
||||
APP_DATA_DIR = dirs.user_data_dir
|
||||
|
||||
S_PLATFORM = sys.platform
|
||||
APP_DATA_DIR = click.get_app_dir(APP_NAME, roaming=False)
|
||||
if S_PLATFORM == "win32":
|
||||
# app data
|
||||
# app_data_dir_base = os.getenv("LOCALAPPDATA")
|
||||
# if not app_data_dir_base:
|
||||
# raise RuntimeError("Could not determine app data dir please report to devs")
|
||||
# APP_DATA_DIR = os.path.join(app_data_dir_base, AUTHOR, APP_NAME)
|
||||
#
|
||||
# cache dir
|
||||
APP_CACHE_DIR = os.path.join(APP_DATA_DIR, "cache")
|
||||
|
||||
# videos dir
|
||||
video_dir_base = os.path.join(Path().home(), "Videos")
|
||||
USER_VIDEOS_DIR = os.path.join(video_dir_base, APP_NAME)
|
||||
|
||||
elif S_PLATFORM == "darwin":
|
||||
# app data
|
||||
# app_data_dir_base = os.path.expanduser("~/Library/Application Support")
|
||||
# APP_DATA_DIR = os.path.join(app_data_dir_base, APP_NAME, __version__)
|
||||
#
|
||||
# cache dir
|
||||
cache_dir_base = os.path.expanduser("~/Library/Caches")
|
||||
APP_CACHE_DIR = os.path.join(cache_dir_base, APP_NAME, __version__)
|
||||
|
||||
# videos dir
|
||||
video_dir_base = os.path.expanduser("~/Movies")
|
||||
USER_VIDEOS_DIR = os.path.join(video_dir_base, APP_NAME)
|
||||
else:
|
||||
# # app data
|
||||
# app_data_dir_base = os.environ.get("XDG_CONFIG_HOME", "")
|
||||
# if not app_data_dir_base.strip():
|
||||
# app_data_dir_base = os.path.expanduser("~/.config")
|
||||
# APP_DATA_DIR = os.path.join(app_data_dir_base, APP_NAME)
|
||||
#
|
||||
# cache dir
|
||||
cache_dir_base = os.environ.get("XDG_CACHE_HOME", "")
|
||||
if not cache_dir_base.strip():
|
||||
cache_dir_base = os.path.expanduser("~/.cache")
|
||||
APP_CACHE_DIR = os.path.join(cache_dir_base, APP_NAME)
|
||||
|
||||
# videos dir
|
||||
video_dir_base = os.environ.get("XDG_VIDEOS_DIR", "")
|
||||
if not video_dir_base.strip():
|
||||
video_dir_base = os.path.expanduser("~/Videos")
|
||||
USER_VIDEOS_DIR = os.path.join(video_dir_base, APP_NAME)
|
||||
|
||||
# ensure paths exist
|
||||
Path(APP_DATA_DIR).mkdir(parents=True, exist_ok=True)
|
||||
Path(APP_CACHE_DIR).mkdir(parents=True, exist_ok=True)
|
||||
Path(USER_VIDEOS_DIR).mkdir(parents=True, exist_ok=True)
|
||||
|
||||
# useful paths
|
||||
USER_DATA_PATH = os.path.join(APP_DATA_DIR, "user_data.json")
|
||||
USER_WATCH_HISTORY_PATH = os.path.join(APP_DATA_DIR, "watch_history.json")
|
||||
USER_CONFIG_PATH = os.path.join(APP_DATA_DIR, "config.ini")
|
||||
NOTIFIER_LOG_FILE_PATH = os.path.join(APP_DATA_DIR, "notifier.log")
|
||||
|
||||
# cache dir
|
||||
APP_CACHE_DIR = dirs.user_cache_dir
|
||||
|
||||
# video dir
|
||||
USER_VIDEOS_DIR = os.path.join(dirs.user_videos_dir, APP_NAME)
|
||||
LOG_FILE_PATH = os.path.join(APP_DATA_DIR, "fastanime.log")
|
||||
|
||||
|
||||
USER_NAME = os.environ.get("USERNAME", "Anime fun")
|
||||
|
||||
@@ -15,6 +15,7 @@ from .queries_graphql import (
|
||||
delete_list_entry_query,
|
||||
get_logged_in_user_query,
|
||||
get_medialist_item_query,
|
||||
get_user_info,
|
||||
media_list_mutation,
|
||||
media_list_query,
|
||||
most_favourite_query,
|
||||
@@ -29,13 +30,14 @@ from .queries_graphql import (
|
||||
)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .anilist_data_schema import (
|
||||
from .types import (
|
||||
AnilistDataSchema,
|
||||
AnilistMediaLists,
|
||||
AnilistMediaListStatus,
|
||||
AnilistNotifications,
|
||||
AnilistUser,
|
||||
AnilistUser_,
|
||||
AnilistUserData,
|
||||
AnilistViewerData,
|
||||
)
|
||||
logger = logging.getLogger(__name__)
|
||||
ANILIST_ENDPOINT = "https://graphql.anilist.co"
|
||||
@@ -77,7 +79,7 @@ class AniListApi:
|
||||
return
|
||||
if not success or not user:
|
||||
return
|
||||
user_info: AnilistUser = user["data"]["Viewer"]
|
||||
user_info: "AnilistUser_" = user["data"]["Viewer"]
|
||||
self.user_id = user_info["id"]
|
||||
return user_info
|
||||
|
||||
@@ -91,7 +93,7 @@ class AniListApi:
|
||||
"""
|
||||
return self._make_authenticated_request(notification_query)
|
||||
|
||||
def update_login_info(self, user: "AnilistUser", token: str):
|
||||
def update_login_info(self, user: "AnilistUser_", token: str):
|
||||
"""method used to login a user enabling authenticated requests
|
||||
|
||||
Args:
|
||||
@@ -103,7 +105,18 @@ class AniListApi:
|
||||
self.session.headers.update(self.headers)
|
||||
self.user_id = user["id"]
|
||||
|
||||
def get_logged_in_user(self) -> tuple[bool, "AnilistUserData"] | tuple[bool, None]:
|
||||
def get_user_info(self) -> tuple[bool, "AnilistUserData"] | tuple[bool, None]:
|
||||
"""get the details of the user who is currently logged in
|
||||
|
||||
Returns:
|
||||
an anilist user
|
||||
"""
|
||||
|
||||
return self._make_authenticated_request(get_user_info, {"userId": self.user_id})
|
||||
|
||||
def get_logged_in_user(
|
||||
self,
|
||||
) -> tuple[bool, "AnilistViewerData"] | tuple[bool, None]:
|
||||
"""get the details of the user who is currently logged in
|
||||
|
||||
Returns:
|
||||
@@ -126,7 +139,9 @@ class AniListApi:
|
||||
return self._make_authenticated_request(media_list_mutation, variables)
|
||||
|
||||
def get_anime_list(
|
||||
self, status: "AnilistMediaListStatus"
|
||||
self,
|
||||
status: "AnilistMediaListStatus",
|
||||
type="ANIME",
|
||||
) -> tuple[bool, "AnilistMediaLists"] | tuple[bool, None]:
|
||||
"""gets an anime list from your media list given the list status
|
||||
|
||||
@@ -136,7 +151,7 @@ class AniListApi:
|
||||
Returns:
|
||||
a media list
|
||||
"""
|
||||
variables = {"status": status, "userId": self.user_id}
|
||||
variables = {"status": status, "userId": self.user_id, "type": type}
|
||||
return self._make_authenticated_request(media_list_query, variables)
|
||||
|
||||
def get_medialist_entry(
|
||||
@@ -307,9 +322,15 @@ class AniListApi:
|
||||
status_not_in: list[str] | None = None,
|
||||
endDate_greater: int | None = None,
|
||||
endDate_lesser: int | None = None,
|
||||
start_greater: int | None = None,
|
||||
start_lesser: int | None = None,
|
||||
startDate_greater: int | None = None,
|
||||
startDate_lesser: int | None = None,
|
||||
startDate: str | None = None,
|
||||
seasonYear: str | None = None,
|
||||
page: int | None = None,
|
||||
season: str | None = None,
|
||||
format_in: list[str] | None = None,
|
||||
on_list: bool | None = None,
|
||||
type="ANIME",
|
||||
**kwargs,
|
||||
):
|
||||
"""
|
||||
@@ -317,7 +338,7 @@ class AniListApi:
|
||||
"""
|
||||
variables = {}
|
||||
for key, val in list(locals().items())[1:]:
|
||||
if val is not None and key not in ["variables"]:
|
||||
if (val or val is False) and key not in ["variables"]:
|
||||
variables[key] = val
|
||||
search_results = self.get_data(search_query, variables=variables)
|
||||
return search_results
|
||||
@@ -329,65 +350,74 @@ class AniListApi:
|
||||
variables = {"id": id}
|
||||
return self.get_data(anime_query, variables)
|
||||
|
||||
def get_trending(self, *_, **kwargs):
|
||||
def get_trending(self, type="ANIME", *_, **kwargs):
|
||||
"""
|
||||
Gets the currently trending anime
|
||||
"""
|
||||
trending = self.get_data(trending_query)
|
||||
variables = {"type": type}
|
||||
trending = self.get_data(trending_query, variables)
|
||||
return trending
|
||||
|
||||
def get_most_favourite(self, *_, **kwargs):
|
||||
def get_most_favourite(self, type="ANIME", *_, **kwargs):
|
||||
"""
|
||||
Gets the most favoured anime on anilist
|
||||
"""
|
||||
most_favourite = self.get_data(most_favourite_query)
|
||||
variables = {"type": type}
|
||||
most_favourite = self.get_data(most_favourite_query, variables)
|
||||
return most_favourite
|
||||
|
||||
def get_most_scored(self, *_, **kwargs):
|
||||
def get_most_scored(self, type="ANIME", *_, **kwargs):
|
||||
"""
|
||||
Gets most scored anime on anilist
|
||||
"""
|
||||
most_scored = self.get_data(most_scored_query)
|
||||
variables = {"type": type}
|
||||
most_scored = self.get_data(most_scored_query, variables)
|
||||
return most_scored
|
||||
|
||||
def get_most_recently_updated(self, *_, **kwargs):
|
||||
def get_most_recently_updated(self, type="ANIME", *_, **kwargs):
|
||||
"""
|
||||
Gets most recently updated anime from anilist
|
||||
"""
|
||||
most_recently_updated = self.get_data(most_recently_updated_query)
|
||||
variables = {"type": type}
|
||||
most_recently_updated = self.get_data(most_recently_updated_query, variables)
|
||||
return most_recently_updated
|
||||
|
||||
def get_most_popular(self):
|
||||
def get_most_popular(
|
||||
self,
|
||||
type="ANIME",
|
||||
):
|
||||
"""
|
||||
Gets most popular anime on anilist
|
||||
"""
|
||||
most_popular = self.get_data(most_popular_query)
|
||||
variables = {"type": type}
|
||||
most_popular = self.get_data(most_popular_query, variables)
|
||||
return most_popular
|
||||
|
||||
def get_upcoming_anime(self, page: int = 1, *_, **kwargs):
|
||||
def get_upcoming_anime(self, type="ANIME", page: int = 1, *_, **kwargs):
|
||||
"""
|
||||
Gets upcoming anime from anilist
|
||||
"""
|
||||
variables = {"page": page}
|
||||
variables = {"page": page, "type": type}
|
||||
upcoming_anime = self.get_data(upcoming_anime_query, variables)
|
||||
return upcoming_anime
|
||||
|
||||
# NOTE: THe following methods will probably be scraped soon
|
||||
def get_recommended_anime_for(self, id: int, *_, **kwargs):
|
||||
recommended_anime = self.get_data(recommended_query)
|
||||
def get_recommended_anime_for(self, id: int, type="ANIME", *_, **kwargs):
|
||||
variables = {"type": type}
|
||||
recommended_anime = self.get_data(recommended_query, variables)
|
||||
return recommended_anime
|
||||
|
||||
def get_charcters_of(self, id: int, *_, **kwargs):
|
||||
def get_charcters_of(self, id: int, type="ANIME", *_, **kwargs):
|
||||
variables = {"id": id}
|
||||
characters = self.get_data(anime_characters_query, variables)
|
||||
return characters
|
||||
|
||||
def get_related_anime_for(self, id: int, *_, **kwargs):
|
||||
def get_related_anime_for(self, id: int, type="ANIME", *_, **kwargs):
|
||||
variables = {"id": id}
|
||||
related_anime = self.get_data(anime_relations_query, variables)
|
||||
return related_anime
|
||||
|
||||
def get_airing_schedule_for(self, id: int, *_, **kwargs):
|
||||
def get_airing_schedule_for(self, id: int, type="ANIME", *_, **kwargs):
|
||||
variables = {"id": id}
|
||||
airing_schedule = self.get_data(airing_schedule_query, variables)
|
||||
return airing_schedule
|
||||
|
||||
@@ -3,7 +3,6 @@ This module contains all the preset queries for the sake of neatness and convini
|
||||
Mostly for internal usage
|
||||
"""
|
||||
|
||||
# TODO: Format the queries
|
||||
mark_as_read_mutation = """
|
||||
mutation{
|
||||
UpdateUser{
|
||||
@@ -17,7 +16,6 @@ query($id:Int){
|
||||
pageInfo{
|
||||
total
|
||||
}
|
||||
|
||||
reviews(mediaId:$id){
|
||||
summary
|
||||
user{
|
||||
@@ -35,50 +33,48 @@ query($id:Int){
|
||||
|
||||
"""
|
||||
notification_query = """
|
||||
query{
|
||||
Page(perPage:5){
|
||||
pageInfo {
|
||||
total
|
||||
}
|
||||
notifications(resetNotificationCount:true,type:AIRING) {
|
||||
... on AiringNotification {
|
||||
id
|
||||
type
|
||||
episode
|
||||
contexts
|
||||
createdAt
|
||||
media {
|
||||
id
|
||||
idMal
|
||||
title {
|
||||
romaji
|
||||
english
|
||||
}
|
||||
coverImage{
|
||||
medium
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
query {
|
||||
Page(perPage: 5) {
|
||||
pageInfo {
|
||||
total
|
||||
}
|
||||
notifications(resetNotificationCount: true, type: AIRING) {
|
||||
... on AiringNotification {
|
||||
id
|
||||
type
|
||||
episode
|
||||
contexts
|
||||
createdAt
|
||||
media {
|
||||
id
|
||||
idMal
|
||||
title {
|
||||
romaji
|
||||
english
|
||||
}
|
||||
coverImage {
|
||||
medium
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
"""
|
||||
|
||||
get_medialist_item_query = """
|
||||
query($mediaId:Int){
|
||||
MediaList(mediaId:$mediaId){
|
||||
id
|
||||
}
|
||||
query ($mediaId: Int) {
|
||||
MediaList(mediaId: $mediaId) {
|
||||
id
|
||||
}
|
||||
}
|
||||
"""
|
||||
|
||||
delete_list_entry_query = """
|
||||
mutation($id:Int){
|
||||
DeleteMediaListEntry(id:$id){
|
||||
deleted
|
||||
|
||||
}
|
||||
mutation ($id: Int) {
|
||||
DeleteMediaListEntry(id: $id) {
|
||||
deleted
|
||||
}
|
||||
}
|
||||
"""
|
||||
|
||||
@@ -97,9 +93,85 @@ query{
|
||||
}
|
||||
"""
|
||||
|
||||
get_user_info = """
|
||||
query ($userId: Int) {
|
||||
User(id: $userId) {
|
||||
name
|
||||
about
|
||||
avatar {
|
||||
large
|
||||
medium
|
||||
}
|
||||
bannerImage
|
||||
statistics {
|
||||
anime {
|
||||
count
|
||||
minutesWatched
|
||||
episodesWatched
|
||||
genres {
|
||||
count
|
||||
meanScore
|
||||
genre
|
||||
}
|
||||
tags {
|
||||
tag {
|
||||
id
|
||||
}
|
||||
count
|
||||
meanScore
|
||||
}
|
||||
}
|
||||
manga {
|
||||
count
|
||||
meanScore
|
||||
chaptersRead
|
||||
volumesRead
|
||||
tags {
|
||||
count
|
||||
meanScore
|
||||
}
|
||||
genres {
|
||||
count
|
||||
meanScore
|
||||
}
|
||||
}
|
||||
}
|
||||
favourites {
|
||||
anime {
|
||||
nodes {
|
||||
title {
|
||||
romaji
|
||||
english
|
||||
}
|
||||
}
|
||||
}
|
||||
manga {
|
||||
nodes {
|
||||
title {
|
||||
romaji
|
||||
english
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
"""
|
||||
media_list_mutation = """
|
||||
mutation($mediaId:Int,$scoreRaw:Int,$repeat:Int,$progress:Int,$status:MediaListStatus){
|
||||
SaveMediaListEntry(mediaId:$mediaId,scoreRaw:$scoreRaw,progress:$progress,repeat:$repeat,status:$status){
|
||||
mutation (
|
||||
$mediaId: Int
|
||||
$scoreRaw: Int
|
||||
$repeat: Int
|
||||
$progress: Int
|
||||
$status: MediaListStatus
|
||||
) {
|
||||
SaveMediaListEntry(
|
||||
mediaId: $mediaId
|
||||
scoreRaw: $scoreRaw
|
||||
progress: $progress
|
||||
repeat: $repeat
|
||||
status: $status
|
||||
) {
|
||||
id
|
||||
status
|
||||
mediaId
|
||||
@@ -116,21 +188,19 @@ mutation($mediaId:Int,$scoreRaw:Int,$repeat:Int,$progress:Int,$status:MediaListS
|
||||
month
|
||||
day
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
"""
|
||||
|
||||
media_list_query = """
|
||||
query ($userId: Int, $status: MediaListStatus) {
|
||||
query ($userId: Int, $status: MediaListStatus, $type: MediaType) {
|
||||
Page {
|
||||
pageInfo {
|
||||
currentPage
|
||||
total
|
||||
currentPage
|
||||
total
|
||||
}
|
||||
mediaList(userId: $userId, status: $status, type: ANIME) {
|
||||
mediaList(userId: $userId, status: $status, type: $type) {
|
||||
mediaId
|
||||
|
||||
media {
|
||||
id
|
||||
idMal
|
||||
@@ -147,6 +217,10 @@ query ($userId: Int, $status: MediaListStatus) {
|
||||
id
|
||||
}
|
||||
popularity
|
||||
streamingEpisodes {
|
||||
title
|
||||
thumbnail
|
||||
}
|
||||
favourites
|
||||
averageScore
|
||||
episodes
|
||||
@@ -172,9 +246,10 @@ query ($userId: Int, $status: MediaListStatus) {
|
||||
}
|
||||
status
|
||||
description
|
||||
mediaListEntry{
|
||||
id
|
||||
progress
|
||||
mediaListEntry {
|
||||
status
|
||||
id
|
||||
progress
|
||||
}
|
||||
nextAiringEpisode {
|
||||
timeUntilAiring
|
||||
@@ -198,7 +273,6 @@ query ($userId: Int, $status: MediaListStatus) {
|
||||
day
|
||||
}
|
||||
createdAt
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -220,70 +294,83 @@ $popularity_greater:Int,\
|
||||
$popularity_lesser:Int,\
|
||||
$averageScore_greater:Int,\
|
||||
$averageScore_lesser:Int,\
|
||||
$seasonYear:Int,\
|
||||
$startDate_greater:FuzzyDateInt,\
|
||||
$startDate_lesser:FuzzyDateInt,\
|
||||
$startDate:FuzzyDateInt,\
|
||||
$endDate_greater:FuzzyDateInt,\
|
||||
$endDate_lesser:FuzzyDateInt\
|
||||
$endDate_lesser:FuzzyDateInt,\
|
||||
$format_in:[MediaFormat],\
|
||||
$type:MediaType\
|
||||
$season:MediaSeason\
|
||||
$on_list:Boolean\
|
||||
"
|
||||
# FuzzyDateInt = (yyyymmdd)
|
||||
# MediaStatus = (FINISHED,RELEASING,NOT_YET_RELEASED,CANCELLED,HIATUS)
|
||||
|
||||
search_query = (
|
||||
"""
|
||||
query($query:String,%s){
|
||||
Page(perPage:30,page:$page){
|
||||
pageInfo{
|
||||
Page(perPage: 50, page: $page) {
|
||||
pageInfo {
|
||||
total
|
||||
currentPage
|
||||
hasNextPage
|
||||
}
|
||||
media(
|
||||
search:$query,
|
||||
id_in:$id_in,
|
||||
genre_in:$genre_in,
|
||||
genre_not_in:$genre_not_in,
|
||||
tag_in:$tag_in,
|
||||
tag_not_in:$tag_not_in,
|
||||
status_in:$status_in,
|
||||
status:$status,
|
||||
status_not_in:$status_not_in,
|
||||
popularity_greater:$popularity_greater,
|
||||
popularity_lesser:$popularity_lesser,
|
||||
averageScore_greater:$averageScore_greater,
|
||||
averageScore_lesser:$averageScore_lesser,
|
||||
startDate_greater:$startDate_greater,
|
||||
startDate_lesser:$startDate_lesser,
|
||||
endDate_greater:$endDate_greater,
|
||||
endDate_lesser:$endDate_lesser,
|
||||
sort:$sort,
|
||||
type:ANIME
|
||||
)
|
||||
{
|
||||
search: $query
|
||||
id_in: $id_in
|
||||
genre_in: $genre_in
|
||||
genre_not_in: $genre_not_in
|
||||
tag_in: $tag_in
|
||||
tag_not_in: $tag_not_in
|
||||
status_in: $status_in
|
||||
status: $status
|
||||
startDate: $startDate
|
||||
status_not_in: $status_not_in
|
||||
popularity_greater: $popularity_greater
|
||||
popularity_lesser: $popularity_lesser
|
||||
averageScore_greater: $averageScore_greater
|
||||
averageScore_lesser: $averageScore_lesser
|
||||
startDate_greater: $startDate_greater
|
||||
startDate_lesser: $startDate_lesser
|
||||
endDate_greater: $endDate_greater
|
||||
endDate_lesser: $endDate_lesser
|
||||
format_in: $format_in
|
||||
sort: $sort
|
||||
season: $season
|
||||
seasonYear: $seasonYear
|
||||
type: $type
|
||||
onList:$on_list
|
||||
) {
|
||||
id
|
||||
idMal
|
||||
title{
|
||||
idMal
|
||||
title {
|
||||
romaji
|
||||
english
|
||||
}
|
||||
coverImage{
|
||||
coverImage {
|
||||
medium
|
||||
large
|
||||
}
|
||||
trailer {
|
||||
site
|
||||
id
|
||||
|
||||
}
|
||||
mediaListEntry{
|
||||
mediaListEntry {
|
||||
status
|
||||
id
|
||||
progress
|
||||
}
|
||||
}
|
||||
popularity
|
||||
streamingEpisodes {
|
||||
title
|
||||
thumbnail
|
||||
}
|
||||
favourites
|
||||
averageScore
|
||||
episodes
|
||||
genres
|
||||
studios{
|
||||
nodes{
|
||||
studios {
|
||||
nodes {
|
||||
name
|
||||
isAnimationStudio
|
||||
}
|
||||
@@ -316,17 +403,16 @@ query($query:String,%s){
|
||||
)
|
||||
|
||||
trending_query = """
|
||||
query{
|
||||
Page(perPage:15){
|
||||
|
||||
media(sort:TRENDING_DESC,type:ANIME,genre_not_in:["hentai"]){
|
||||
query ($type: MediaType) {
|
||||
Page(perPage: 15) {
|
||||
media(sort: TRENDING_DESC, type: $type, genre_not_in: ["hentai"]) {
|
||||
id
|
||||
idMal
|
||||
title{
|
||||
idMal
|
||||
title {
|
||||
romaji
|
||||
english
|
||||
}
|
||||
coverImage{
|
||||
coverImage {
|
||||
medium
|
||||
large
|
||||
}
|
||||
@@ -335,6 +421,10 @@ query{
|
||||
id
|
||||
}
|
||||
popularity
|
||||
streamingEpisodes {
|
||||
title
|
||||
thumbnail
|
||||
}
|
||||
favourites
|
||||
averageScore
|
||||
genres
|
||||
@@ -354,10 +444,11 @@ query{
|
||||
month
|
||||
day
|
||||
}
|
||||
mediaListEntry{
|
||||
mediaListEntry {
|
||||
status
|
||||
id
|
||||
progress
|
||||
}
|
||||
}
|
||||
endDate {
|
||||
year
|
||||
month
|
||||
@@ -376,29 +467,37 @@ query{
|
||||
|
||||
# mosts
|
||||
most_favourite_query = """
|
||||
query{
|
||||
Page(perPage:15){
|
||||
media(sort:FAVOURITES_DESC,type:ANIME,genre_not_in:["hentai"]){
|
||||
query ($type: MediaType) {
|
||||
Page(perPage: 15) {
|
||||
media(sort: FAVOURITES_DESC, type: $type, genre_not_in: ["hentai"]) {
|
||||
id
|
||||
idMal
|
||||
title{
|
||||
idMal
|
||||
title {
|
||||
romaji
|
||||
english
|
||||
}
|
||||
coverImage{
|
||||
coverImage {
|
||||
medium
|
||||
large
|
||||
}
|
||||
trailer {
|
||||
site
|
||||
id
|
||||
|
||||
}
|
||||
mediaListEntry{
|
||||
mediaListEntry {
|
||||
status
|
||||
id
|
||||
progress
|
||||
}
|
||||
}
|
||||
popularity
|
||||
streamingEpisodes {
|
||||
title
|
||||
thumbnail
|
||||
}
|
||||
streamingEpisodes {
|
||||
title
|
||||
thumbnail
|
||||
}
|
||||
favourites
|
||||
averageScore
|
||||
episodes
|
||||
@@ -435,29 +534,33 @@ query{
|
||||
"""
|
||||
|
||||
most_scored_query = """
|
||||
query{
|
||||
Page(perPage:15){
|
||||
media(sort:SCORE_DESC,type:ANIME,genre_not_in:["hentai"]){
|
||||
query ($type: MediaType) {
|
||||
Page(perPage: 15) {
|
||||
media(sort: SCORE_DESC, type: $type, genre_not_in: ["hentai"]) {
|
||||
id
|
||||
idMal
|
||||
title{
|
||||
idMal
|
||||
title {
|
||||
romaji
|
||||
english
|
||||
}
|
||||
coverImage{
|
||||
coverImage {
|
||||
medium
|
||||
large
|
||||
}
|
||||
trailer {
|
||||
site
|
||||
id
|
||||
|
||||
}
|
||||
mediaListEntry{
|
||||
mediaListEntry {
|
||||
status
|
||||
id
|
||||
progress
|
||||
}
|
||||
}
|
||||
popularity
|
||||
streamingEpisodes {
|
||||
title
|
||||
thumbnail
|
||||
}
|
||||
episodes
|
||||
favourites
|
||||
averageScore
|
||||
@@ -494,34 +597,38 @@ query{
|
||||
"""
|
||||
|
||||
most_popular_query = """
|
||||
query{
|
||||
Page(perPage:15){
|
||||
media(sort:POPULARITY_DESC,type:ANIME,genre_not_in:["hentai"]){
|
||||
query ($type: MediaType) {
|
||||
Page(perPage: 15) {
|
||||
media(sort: POPULARITY_DESC, type: $type, genre_not_in: ["hentai"]) {
|
||||
id
|
||||
idMal
|
||||
title{
|
||||
idMal
|
||||
title {
|
||||
romaji
|
||||
english
|
||||
}
|
||||
coverImage{
|
||||
coverImage {
|
||||
medium
|
||||
large
|
||||
}
|
||||
trailer {
|
||||
site
|
||||
id
|
||||
|
||||
}
|
||||
popularity
|
||||
streamingEpisodes {
|
||||
title
|
||||
thumbnail
|
||||
}
|
||||
favourites
|
||||
averageScore
|
||||
description
|
||||
episodes
|
||||
genres
|
||||
mediaListEntry{
|
||||
mediaListEntry {
|
||||
status
|
||||
id
|
||||
progress
|
||||
}
|
||||
}
|
||||
studios {
|
||||
nodes {
|
||||
name
|
||||
@@ -553,16 +660,22 @@ query{
|
||||
"""
|
||||
|
||||
most_recently_updated_query = """
|
||||
query{
|
||||
Page(perPage:15){
|
||||
media(sort:UPDATED_AT_DESC,type:ANIME,averageScore_greater:50,genre_not_in:["hentai"],status:RELEASING){
|
||||
query ($type: MediaType) {
|
||||
Page(perPage: 15) {
|
||||
media(
|
||||
sort: UPDATED_AT_DESC
|
||||
type: $type
|
||||
averageScore_greater: 50
|
||||
genre_not_in: ["hentai"]
|
||||
status: RELEASING
|
||||
) {
|
||||
id
|
||||
idMal
|
||||
title{
|
||||
idMal
|
||||
title {
|
||||
romaji
|
||||
english
|
||||
}
|
||||
coverImage{
|
||||
coverImage {
|
||||
medium
|
||||
large
|
||||
}
|
||||
@@ -570,11 +683,17 @@ query{
|
||||
site
|
||||
id
|
||||
}
|
||||
mediaListEntry{
|
||||
mediaListEntry {
|
||||
status
|
||||
id
|
||||
progress
|
||||
}
|
||||
}
|
||||
popularity
|
||||
streamingEpisodes {
|
||||
title
|
||||
thumbnail
|
||||
}
|
||||
|
||||
favourites
|
||||
averageScore
|
||||
description
|
||||
@@ -611,37 +730,41 @@ query{
|
||||
"""
|
||||
|
||||
recommended_query = """
|
||||
query {
|
||||
Page(perPage:15) {
|
||||
media( type: ANIME,genre_not_in:["hentai"]) {
|
||||
recommendations(sort:RATING_DESC){
|
||||
nodes{
|
||||
media{
|
||||
query ($type: MediaType) {
|
||||
Page(perPage: 15) {
|
||||
media(type: $type, genre_not_in: ["hentai"]) {
|
||||
recommendations(sort: RATING_DESC) {
|
||||
nodes {
|
||||
media {
|
||||
id
|
||||
idMal
|
||||
title{
|
||||
idMal
|
||||
title {
|
||||
english
|
||||
romaji
|
||||
native
|
||||
}
|
||||
coverImage{
|
||||
coverImage {
|
||||
medium
|
||||
large
|
||||
}
|
||||
mediaListEntry{
|
||||
id
|
||||
progress
|
||||
mediaListEntry {
|
||||
status
|
||||
id
|
||||
progress
|
||||
}
|
||||
description
|
||||
episodes
|
||||
trailer{
|
||||
trailer {
|
||||
site
|
||||
id
|
||||
}
|
||||
|
||||
genres
|
||||
averageScore
|
||||
popularity
|
||||
streamingEpisodes {
|
||||
title
|
||||
thumbnail
|
||||
}
|
||||
favourites
|
||||
tags {
|
||||
name
|
||||
@@ -671,9 +794,9 @@ query {
|
||||
"""
|
||||
|
||||
anime_characters_query = """
|
||||
query($id:Int){
|
||||
query ($id: Int, $type: MediaType) {
|
||||
Page {
|
||||
media(id:$id, type: ANIME) {
|
||||
media(id: $id, type: $type) {
|
||||
characters {
|
||||
nodes {
|
||||
name {
|
||||
@@ -706,13 +829,18 @@ query($id:Int){
|
||||
|
||||
|
||||
anime_relations_query = """
|
||||
query ($id: Int) {
|
||||
query ($id: Int, $type: MediaType) {
|
||||
Page(perPage: 20) {
|
||||
media(id: $id, sort: POPULARITY_DESC, type: ANIME,genre_not_in:["hentai"]) {
|
||||
media(
|
||||
id: $id
|
||||
sort: POPULARITY_DESC
|
||||
type: $type
|
||||
genre_not_in: ["hentai"]
|
||||
) {
|
||||
relations {
|
||||
nodes {
|
||||
id
|
||||
idMal
|
||||
idMal
|
||||
title {
|
||||
english
|
||||
romaji
|
||||
@@ -722,10 +850,11 @@ query ($id: Int) {
|
||||
medium
|
||||
large
|
||||
}
|
||||
mediaListEntry{
|
||||
id
|
||||
progress
|
||||
}
|
||||
mediaListEntry {
|
||||
status
|
||||
id
|
||||
progress
|
||||
}
|
||||
description
|
||||
episodes
|
||||
trailer {
|
||||
@@ -735,26 +864,30 @@ query ($id: Int) {
|
||||
genres
|
||||
averageScore
|
||||
popularity
|
||||
streamingEpisodes {
|
||||
title
|
||||
thumbnail
|
||||
}
|
||||
favourites
|
||||
tags {
|
||||
name
|
||||
}
|
||||
startDate {
|
||||
year
|
||||
month
|
||||
day
|
||||
}
|
||||
endDate {
|
||||
year
|
||||
month
|
||||
day
|
||||
}
|
||||
status
|
||||
nextAiringEpisode {
|
||||
timeUntilAiring
|
||||
airingAt
|
||||
episode
|
||||
}
|
||||
year
|
||||
month
|
||||
day
|
||||
}
|
||||
endDate {
|
||||
year
|
||||
month
|
||||
day
|
||||
}
|
||||
status
|
||||
nextAiringEpisode {
|
||||
timeUntilAiring
|
||||
airingAt
|
||||
episode
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -763,9 +896,9 @@ query ($id: Int) {
|
||||
"""
|
||||
|
||||
airing_schedule_query = """
|
||||
query ($id: Int) {
|
||||
query ($id: Int,$type:MediaType) {
|
||||
Page {
|
||||
media(id: $id, sort: POPULARITY_DESC, type: ANIME) {
|
||||
media(id: $id, sort: POPULARITY_DESC, type: $type) {
|
||||
airingSchedule(notYetAired:true){
|
||||
nodes{
|
||||
airingAt
|
||||
@@ -780,7 +913,7 @@ query ($id: Int) {
|
||||
"""
|
||||
|
||||
upcoming_anime_query = """
|
||||
query ($page: Int) {
|
||||
query ($page: Int, $type: MediaType) {
|
||||
Page(page: $page) {
|
||||
pageInfo {
|
||||
total
|
||||
@@ -788,9 +921,14 @@ query ($page: Int) {
|
||||
currentPage
|
||||
hasNextPage
|
||||
}
|
||||
media(type: ANIME, status: NOT_YET_RELEASED,sort:POPULARITY_DESC,genre_not_in:["hentai"]) {
|
||||
media(
|
||||
type: $type
|
||||
status: NOT_YET_RELEASED
|
||||
sort: POPULARITY_DESC
|
||||
genre_not_in: ["hentai"]
|
||||
) {
|
||||
id
|
||||
idMal
|
||||
idMal
|
||||
title {
|
||||
romaji
|
||||
english
|
||||
@@ -803,11 +941,16 @@ query ($page: Int) {
|
||||
site
|
||||
id
|
||||
}
|
||||
mediaListEntry{
|
||||
mediaListEntry {
|
||||
status
|
||||
id
|
||||
progress
|
||||
}
|
||||
}
|
||||
popularity
|
||||
streamingEpisodes {
|
||||
title
|
||||
thumbnail
|
||||
}
|
||||
favourites
|
||||
averageScore
|
||||
genres
|
||||
@@ -844,19 +987,20 @@ query ($page: Int) {
|
||||
"""
|
||||
|
||||
anime_query = """
|
||||
query($id:Int){
|
||||
Page{
|
||||
media(id:$id) {
|
||||
query ($id: Int) {
|
||||
Page {
|
||||
media(id: $id) {
|
||||
id
|
||||
idMal
|
||||
idMal
|
||||
title {
|
||||
romaji
|
||||
english
|
||||
}
|
||||
mediaListEntry{
|
||||
mediaListEntry {
|
||||
status
|
||||
id
|
||||
progress
|
||||
}
|
||||
}
|
||||
nextAiringEpisode {
|
||||
timeUntilAiring
|
||||
airingAt
|
||||
@@ -870,7 +1014,6 @@ query($id:Int){
|
||||
node {
|
||||
name {
|
||||
full
|
||||
|
||||
}
|
||||
gender
|
||||
dateOfBirth {
|
||||
@@ -923,6 +1066,11 @@ query($id:Int){
|
||||
countryOfOrigin
|
||||
averageScore
|
||||
popularity
|
||||
streamingEpisodes {
|
||||
title
|
||||
thumbnail
|
||||
}
|
||||
|
||||
favourites
|
||||
source
|
||||
hashtag
|
||||
|
||||
@@ -19,7 +19,7 @@ class AnilistImage(TypedDict):
|
||||
large: str
|
||||
|
||||
|
||||
class AnilistUser(TypedDict):
|
||||
class AnilistUser_(TypedDict):
|
||||
id: int
|
||||
name: str
|
||||
bannerImage: str | None
|
||||
@@ -28,11 +28,26 @@ class AnilistUser(TypedDict):
|
||||
|
||||
|
||||
class AnilistViewer(TypedDict):
|
||||
Viewer: AnilistUser
|
||||
Viewer: AnilistUser_
|
||||
|
||||
|
||||
class AnilistViewerData(TypedDict):
|
||||
data: AnilistViewer
|
||||
|
||||
|
||||
class AnilistUser(TypedDict):
|
||||
name: str
|
||||
about: str | None
|
||||
avatar: AnilistImage
|
||||
bannerImage: str | None
|
||||
|
||||
|
||||
class AnilistUserInfo(TypedDict):
|
||||
User: AnilistUser
|
||||
|
||||
|
||||
class AnilistUserData(TypedDict):
|
||||
data: AnilistViewer
|
||||
data: AnilistUserInfo
|
||||
|
||||
|
||||
class AnilistMediaTrailer(TypedDict):
|
||||
@@ -69,7 +84,7 @@ class AnilistMediaNextAiringEpisode(TypedDict):
|
||||
|
||||
class AnilistReview(TypedDict):
|
||||
summary: str
|
||||
user: AnilistUser
|
||||
user: AnilistUser_
|
||||
|
||||
|
||||
class AnilistReviewNodes(TypedDict):
|
||||
@@ -114,16 +129,17 @@ class AnilistCharactersEdges(TypedDict):
|
||||
edges: list[AnilistCharactersEdge]
|
||||
|
||||
|
||||
class AnilistMediaList_(TypedDict):
|
||||
id: int
|
||||
progress: int
|
||||
|
||||
|
||||
AnilistMediaListStatus = Literal[
|
||||
"CURRENT", "PLANNING", "COMPLETED", "DROPPED", "PAUSED", "REPEATING"
|
||||
]
|
||||
|
||||
|
||||
class AnilistMediaList_(TypedDict):
|
||||
id: int
|
||||
progress: int
|
||||
status: AnilistMediaListStatus
|
||||
|
||||
|
||||
class AnilistMediaListProperties(TypedDict):
|
||||
status: AnilistMediaListStatus
|
||||
score: float
|
||||
@@ -136,6 +152,11 @@ class AnilistMediaListProperties(TypedDict):
|
||||
hiddenFromStatusLists: bool
|
||||
|
||||
|
||||
class StreamingEpisode(TypedDict):
|
||||
title: str
|
||||
thumbnail: str
|
||||
|
||||
|
||||
class AnilistBaseMediaDataSchema(TypedDict):
|
||||
"""
|
||||
This a convenience class is used to type the received Anilist data to enhance dev experience
|
||||
@@ -159,6 +180,8 @@ class AnilistBaseMediaDataSchema(TypedDict):
|
||||
status: str
|
||||
nextAiringEpisode: AnilistMediaNextAiringEpisode
|
||||
season: str
|
||||
streamingEpisodes: list[StreamingEpisode]
|
||||
chapters: int
|
||||
seasonYear: int
|
||||
duration: int
|
||||
synonyms: list[str]
|
||||
@@ -1,5 +1,11 @@
|
||||
from .allanime.constants import SERVERS_AVAILABLE as ALLANIME_SERVERS
|
||||
from .animepahe.constants import SERVERS_AVAILABLE as ANIMEPAHE_SERVERS
|
||||
from .hianime.constants import SERVERS_AVAILABLE as HIANIME_SERVERS
|
||||
|
||||
anime_sources = {
|
||||
"allanime": "api.AllAnimeAPI",
|
||||
"animepahe": "api.AnimePaheApi",
|
||||
"aniwatch": "api.AniWatchApi",
|
||||
"hianime": "api.HiAnimeApi",
|
||||
"nyaa": "api.NyaaApi",
|
||||
}
|
||||
SERVERS_AVAILABLE = [*ALLANIME_SERVERS, *ANIMEPAHE_SERVERS, *HIANIME_SERVERS]
|
||||
|
||||
@@ -7,24 +7,14 @@ import json
|
||||
import logging
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from requests.exceptions import Timeout
|
||||
|
||||
from ...anime_provider.base_provider import AnimeProvider
|
||||
from ..utils import decode_hex_string
|
||||
from .constants import (
|
||||
ALLANIME_API_ENDPOINT,
|
||||
ALLANIME_BASE,
|
||||
ALLANIME_REFERER,
|
||||
USER_AGENT,
|
||||
)
|
||||
from ..decorators import debug_provider
|
||||
from ..utils import give_random_quality, one_digit_symmetric_xor
|
||||
from .constants import ALLANIME_API_ENDPOINT, ALLANIME_BASE, ALLANIME_REFERER
|
||||
from .gql_queries import ALLANIME_EPISODES_GQL, ALLANIME_SEARCH_GQL, ALLANIME_SHOW_GQL
|
||||
from .normalizer import normalize_anime, normalize_search_results
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from typing import Iterator
|
||||
|
||||
from ....libs.anime_provider.allanime.types import AllAnimeEpisode
|
||||
from ....libs.anime_provider.types import Anime, Server
|
||||
from .types import AllAnimeEpisode
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@@ -36,7 +26,11 @@ class AllAnimeAPI(AnimeProvider):
|
||||
Provides a fast and effective interface to AllAnime site.
|
||||
"""
|
||||
|
||||
PROVIDER = "allanime"
|
||||
api_endpoint = ALLANIME_API_ENDPOINT
|
||||
HEADERS = {
|
||||
"Referer": ALLANIME_REFERER,
|
||||
}
|
||||
|
||||
def _fetch_gql(self, query: str, variables: dict):
|
||||
"""main abstraction over all requests to the allanime api
|
||||
@@ -48,30 +42,21 @@ class AllAnimeAPI(AnimeProvider):
|
||||
Returns:
|
||||
[TODO:return]
|
||||
"""
|
||||
try:
|
||||
response = self.session.get(
|
||||
self.api_endpoint,
|
||||
params={
|
||||
"variables": json.dumps(variables),
|
||||
"query": query,
|
||||
},
|
||||
headers={"Referer": ALLANIME_REFERER, "User-Agent": USER_AGENT},
|
||||
timeout=10,
|
||||
)
|
||||
if response.status_code == 200:
|
||||
return response.json()["data"]
|
||||
else:
|
||||
logger.error("allanime(ERROR): ", response.text)
|
||||
return {}
|
||||
except Timeout:
|
||||
logger.error(
|
||||
"allanime(Error):Timeout exceeded this could mean allanime is down or you have lost internet connection"
|
||||
)
|
||||
return {}
|
||||
except Exception as e:
|
||||
logger.error(f"allanime:Error: {e}")
|
||||
response = self.session.get(
|
||||
self.api_endpoint,
|
||||
params={
|
||||
"variables": json.dumps(variables),
|
||||
"query": query,
|
||||
},
|
||||
timeout=10,
|
||||
)
|
||||
if response.ok:
|
||||
return response.json()["data"]
|
||||
else:
|
||||
logger.error("[ALLANIME-ERROR]: ", response.text)
|
||||
return {}
|
||||
|
||||
@debug_provider(PROVIDER.upper())
|
||||
def search_for_anime(
|
||||
self,
|
||||
user_query: str,
|
||||
@@ -104,13 +89,25 @@ class AllAnimeAPI(AnimeProvider):
|
||||
"translationtype": translationtype,
|
||||
"countryorigin": countryorigin,
|
||||
}
|
||||
try:
|
||||
search_results = self._fetch_gql(ALLANIME_SEARCH_GQL, variables)
|
||||
return normalize_search_results(search_results) # pyright:ignore
|
||||
except Exception as e:
|
||||
logger.error(f"FA(AllAnime): {e}")
|
||||
return {}
|
||||
search_results = self._fetch_gql(ALLANIME_SEARCH_GQL, variables)
|
||||
page_info = search_results["shows"]["pageInfo"]
|
||||
results = []
|
||||
for result in search_results["shows"]["edges"]:
|
||||
normalized_result = {
|
||||
"id": result["_id"],
|
||||
"title": result["name"],
|
||||
"type": result["__typename"],
|
||||
"availableEpisodes": result["availableEpisodes"],
|
||||
}
|
||||
results.append(normalized_result)
|
||||
|
||||
normalized_search_results = {
|
||||
"pageInfo": page_info,
|
||||
"results": results,
|
||||
}
|
||||
return normalized_search_results
|
||||
|
||||
@debug_provider(PROVIDER.upper())
|
||||
def get_anime(self, allanime_show_id: str):
|
||||
"""get an anime details given its id
|
||||
|
||||
@@ -121,15 +118,23 @@ class AllAnimeAPI(AnimeProvider):
|
||||
[TODO:return]
|
||||
"""
|
||||
variables = {"showId": allanime_show_id}
|
||||
try:
|
||||
anime = self._fetch_gql(ALLANIME_SHOW_GQL, variables)
|
||||
return normalize_anime(anime["show"])
|
||||
except Exception as e:
|
||||
logger.error(f"FA(AllAnime): {e}")
|
||||
return None
|
||||
anime = self._fetch_gql(ALLANIME_SHOW_GQL, variables)
|
||||
id: str = anime["show"]["_id"]
|
||||
title: str = anime["show"]["name"]
|
||||
availableEpisodesDetail = anime["show"]["availableEpisodesDetail"]
|
||||
self.store.set(allanime_show_id, "anime_info", {"title": title})
|
||||
type = anime.get("__typename")
|
||||
normalized_anime = {
|
||||
"id": id,
|
||||
"title": title,
|
||||
"availableEpisodesDetail": availableEpisodesDetail,
|
||||
"type": type,
|
||||
}
|
||||
return normalized_anime
|
||||
|
||||
@debug_provider(PROVIDER.upper())
|
||||
def _get_anime_episode(
|
||||
self, allanime_show_id: str, episode_string: str, translation_type: str = "sub"
|
||||
self, allanime_show_id: str, episode, translation_type: str = "sub"
|
||||
) -> "AllAnimeEpisode | dict":
|
||||
"""get the episode details and sources info
|
||||
|
||||
@@ -144,18 +149,15 @@ class AllAnimeAPI(AnimeProvider):
|
||||
variables = {
|
||||
"showId": allanime_show_id,
|
||||
"translationType": translation_type,
|
||||
"episodeString": episode_string,
|
||||
"episodeString": episode,
|
||||
}
|
||||
try:
|
||||
episode = self._fetch_gql(ALLANIME_EPISODES_GQL, variables)
|
||||
return episode["episode"]
|
||||
except Exception as e:
|
||||
logger.error(f"FA(AllAnime): {e}")
|
||||
return {}
|
||||
episode = self._fetch_gql(ALLANIME_EPISODES_GQL, variables)
|
||||
return episode["episode"]
|
||||
|
||||
@debug_provider(PROVIDER.upper())
|
||||
def get_episode_streams(
|
||||
self, anime: "Anime", episode_number: str, translation_type="sub"
|
||||
) -> "Iterator[Server] | None":
|
||||
self, anime_id, episode_number: str, translation_type="sub"
|
||||
):
|
||||
"""get the streams of an episode
|
||||
|
||||
Args:
|
||||
@@ -166,7 +168,10 @@ class AllAnimeAPI(AnimeProvider):
|
||||
Yields:
|
||||
[TODO:description]
|
||||
"""
|
||||
anime_id = anime["id"]
|
||||
|
||||
anime_title = (self.store.get(anime_id, "anime_info", "") or {"title": ""})[
|
||||
"title"
|
||||
]
|
||||
allanime_episode = self._get_anime_episode(
|
||||
anime_id, episode_number, translation_type
|
||||
)
|
||||
@@ -174,177 +179,117 @@ class AllAnimeAPI(AnimeProvider):
|
||||
return []
|
||||
|
||||
embeds = allanime_episode["sourceUrls"]
|
||||
try:
|
||||
for embed in embeds:
|
||||
try:
|
||||
# filter the working streams no need to get all since the others are mostly hsl
|
||||
# TODO: should i just get all the servers and handle the hsl??
|
||||
if embed.get("sourceName", "") not in (
|
||||
"Sak",
|
||||
"Kir",
|
||||
"S-mp4",
|
||||
"Luf-mp4",
|
||||
"Default",
|
||||
):
|
||||
continue
|
||||
url = embed.get("sourceUrl")
|
||||
|
||||
if not url:
|
||||
continue
|
||||
if url.startswith("--"):
|
||||
url = url[2:]
|
||||
@debug_provider(self.PROVIDER.upper())
|
||||
def _get_server(embed):
|
||||
# filter the working streams no need to get all since the others are mostly hsl
|
||||
# TODO: should i just get all the servers and handle the hsl??
|
||||
if embed.get("sourceName", "") not in (
|
||||
# priorities based on death note
|
||||
"Sak", # 7
|
||||
"S-mp4", # 7.9
|
||||
"Luf-mp4", # 7.7
|
||||
"Default", # 8.5
|
||||
"Yt-mp4", # 7.9
|
||||
"Kir", # NA
|
||||
# "Vid-mp4" # 4
|
||||
# "Ok", # 3.5
|
||||
# "Ss-Hls", # 5.5
|
||||
# "Mp4", # 4
|
||||
):
|
||||
return
|
||||
url = embed.get("sourceUrl")
|
||||
#
|
||||
if not url:
|
||||
return
|
||||
if url.startswith("--"):
|
||||
url = url[2:]
|
||||
url = one_digit_symmetric_xor(56, url)
|
||||
|
||||
# get the stream url for an episode of the defined source names
|
||||
parsed_url = decode_hex_string(url)
|
||||
embed_url = f"https://{ALLANIME_BASE}{parsed_url.replace('clock', 'clock.json')}"
|
||||
resp = self.session.get(
|
||||
embed_url,
|
||||
headers={
|
||||
"Referer": ALLANIME_REFERER,
|
||||
"User-Agent": USER_AGENT,
|
||||
},
|
||||
timeout=10,
|
||||
)
|
||||
if resp.status_code == 200:
|
||||
match embed["sourceName"]:
|
||||
case "Luf-mp4":
|
||||
logger.debug("allanime:Found streams from gogoanime")
|
||||
yield {
|
||||
"server": "gogoanime",
|
||||
"episode_title": (
|
||||
allanime_episode["notes"] or f'{anime["title"]}'
|
||||
)
|
||||
+ f"; Episode {episode_number}",
|
||||
"links": resp.json()["links"],
|
||||
} # pyright:ignore
|
||||
case "Kir":
|
||||
logger.debug("allanime:Found streams from wetransfer")
|
||||
yield {
|
||||
"server": "wetransfer",
|
||||
"episode_title": (
|
||||
allanime_episode["notes"] or f'{anime["title"]}'
|
||||
)
|
||||
+ f"; Episode {episode_number}",
|
||||
"links": resp.json()["links"],
|
||||
} # pyright:ignore
|
||||
case "S-mp4":
|
||||
logger.debug("allanime:Found streams from sharepoint")
|
||||
yield {
|
||||
"server": "sharepoint",
|
||||
"episode_title": (
|
||||
allanime_episode["notes"] or f'{anime["title"]}'
|
||||
)
|
||||
+ f"; Episode {episode_number}",
|
||||
"links": resp.json()["links"],
|
||||
} # pyright:ignore
|
||||
case "Sak":
|
||||
logger.debug("allanime:Found streams from dropbox")
|
||||
yield {
|
||||
"server": "dropbox",
|
||||
"episode_title": (
|
||||
allanime_episode["notes"] or f'{anime["title"]}'
|
||||
)
|
||||
+ f"; Episode {episode_number}",
|
||||
"links": resp.json()["links"],
|
||||
} # pyright:ignore
|
||||
case "Default":
|
||||
logger.debug("allanime:Found streams from wixmp")
|
||||
yield {
|
||||
"server": "wixmp",
|
||||
"episode_title": (
|
||||
allanime_episode["notes"] or f'{anime["title"]}'
|
||||
)
|
||||
+ f"; Episode {episode_number}",
|
||||
"links": resp.json()["links"],
|
||||
} # pyright:ignore
|
||||
except Timeout:
|
||||
logger.error(
|
||||
"Timeout has been exceeded this could mean allanime is down or you have lost internet connection"
|
||||
)
|
||||
return []
|
||||
except Exception as e:
|
||||
logger.error(f"FA(Allanime): {e}")
|
||||
return []
|
||||
except Exception as e:
|
||||
logger.error(f"FA(Allanime): {e}")
|
||||
return []
|
||||
if "tools.fast4speed.rsvp" in url:
|
||||
return {
|
||||
"server": "Yt",
|
||||
"episode_title": f"{anime_title}; Episode {episode_number}",
|
||||
"headers": {"Referer": f"https://{ALLANIME_BASE}/"},
|
||||
"subtitles": [],
|
||||
"links": [
|
||||
{
|
||||
"link": url,
|
||||
"quality": "1080",
|
||||
}
|
||||
],
|
||||
}
|
||||
|
||||
# get the stream url for an episode of the defined source names
|
||||
embed_url = f"https://{ALLANIME_BASE}{url.replace('clock', 'clock.json')}"
|
||||
resp = self.session.get(
|
||||
embed_url,
|
||||
timeout=10,
|
||||
)
|
||||
|
||||
if __name__ == "__main__":
|
||||
anime_provider = AllAnimeAPI()
|
||||
# lets see if it works :)
|
||||
import subprocess
|
||||
import sys
|
||||
if resp.ok:
|
||||
match embed["sourceName"]:
|
||||
case "Luf-mp4":
|
||||
logger.debug("allanime:Found streams from gogoanime")
|
||||
return {
|
||||
"server": "gogoanime",
|
||||
"headers": {},
|
||||
"subtitles": [],
|
||||
"episode_title": (
|
||||
allanime_episode["notes"] or f"{anime_title}"
|
||||
)
|
||||
+ f"; Episode {episode_number}",
|
||||
"links": give_random_quality(resp.json()["links"]),
|
||||
}
|
||||
case "Kir":
|
||||
logger.debug("allanime:Found streams from wetransfer")
|
||||
return {
|
||||
"server": "wetransfer",
|
||||
"headers": {},
|
||||
"subtitles": [],
|
||||
"episode_title": (
|
||||
allanime_episode["notes"] or f"{anime_title}"
|
||||
)
|
||||
+ f"; Episode {episode_number}",
|
||||
"links": give_random_quality(resp.json()["links"]),
|
||||
}
|
||||
case "S-mp4":
|
||||
logger.debug("allanime:Found streams from sharepoint")
|
||||
return {
|
||||
"server": "sharepoint",
|
||||
"headers": {},
|
||||
"subtitles": [],
|
||||
"episode_title": (
|
||||
allanime_episode["notes"] or f"{anime_title}"
|
||||
)
|
||||
+ f"; Episode {episode_number}",
|
||||
"links": give_random_quality(resp.json()["links"]),
|
||||
}
|
||||
case "Sak":
|
||||
logger.debug("allanime:Found streams from dropbox")
|
||||
return {
|
||||
"server": "dropbox",
|
||||
"headers": {},
|
||||
"subtitles": [],
|
||||
"episode_title": (
|
||||
allanime_episode["notes"] or f"{anime_title}"
|
||||
)
|
||||
+ f"; Episode {episode_number}",
|
||||
"links": give_random_quality(resp.json()["links"]),
|
||||
}
|
||||
case "Default":
|
||||
logger.debug("allanime:Found streams from wixmp")
|
||||
return {
|
||||
"server": "wixmp",
|
||||
"headers": {},
|
||||
"subtitles": [],
|
||||
"episode_title": (
|
||||
allanime_episode["notes"] or f"{anime_title}"
|
||||
)
|
||||
+ f"; Episode {episode_number}",
|
||||
"links": give_random_quality(resp.json()["links"]),
|
||||
}
|
||||
|
||||
from InquirerPy import inquirer, validator
|
||||
|
||||
anime = input("Enter the anime name: ")
|
||||
translation = input("Enter the translation type: ")
|
||||
|
||||
search_results = anime_provider.search_for_anime(
|
||||
anime, translation_type=translation.strip()
|
||||
)
|
||||
|
||||
if not search_results:
|
||||
raise Exception("No results found")
|
||||
|
||||
search_results = search_results["results"]
|
||||
options = {show["title"]: show for show in search_results}
|
||||
anime = inquirer.fuzzy(
|
||||
"Enter the anime title",
|
||||
list(options.keys()),
|
||||
validate=validator.EmptyInputValidator(),
|
||||
).execute()
|
||||
if anime is None:
|
||||
print("No anime was selected")
|
||||
sys.exit(1)
|
||||
|
||||
anime_result = options[anime]
|
||||
anime_data = anime_provider.get_anime(anime_result["id"])
|
||||
if not anime_data:
|
||||
raise Exception("Anime not found")
|
||||
availableEpisodesDetail = anime_data["availableEpisodesDetail"]
|
||||
if not availableEpisodesDetail.get(translation.strip()):
|
||||
raise Exception("No episodes found")
|
||||
|
||||
stream_link = True
|
||||
while stream_link != "quit":
|
||||
print("select episode")
|
||||
episode = inquirer.fuzzy(
|
||||
"Choose an episode",
|
||||
availableEpisodesDetail[translation.strip()],
|
||||
validate=validator.EmptyInputValidator(),
|
||||
).execute()
|
||||
if episode is None:
|
||||
print("No episode was selected")
|
||||
sys.exit(1)
|
||||
|
||||
if not anime_data:
|
||||
print("Sth went wrong")
|
||||
break
|
||||
episode_streams_ = anime_provider.get_episode_streams(
|
||||
anime_data, episode, translation.strip()
|
||||
)
|
||||
if episode_streams_ is None:
|
||||
raise Exception("Episode not found")
|
||||
|
||||
episode_streams = list(episode_streams_)
|
||||
stream_links = []
|
||||
for server in episode_streams:
|
||||
stream_links.extend([link["link"] for link in server["links"]])
|
||||
stream_links.append("back")
|
||||
stream_link = inquirer.fuzzy(
|
||||
"Choose a link to stream",
|
||||
stream_links,
|
||||
validate=validator.EmptyInputValidator(),
|
||||
).execute()
|
||||
if stream_link == "quit":
|
||||
print("Have a nice day")
|
||||
sys.exit()
|
||||
if not stream_link:
|
||||
raise Exception("No stream was selected")
|
||||
|
||||
title = episode_streams[0].get(
|
||||
"episode_title", "%s: Episode %s" % (anime_data["title"], episode)
|
||||
)
|
||||
subprocess.run(["mpv", f"--title={title}", stream_link])
|
||||
for embed in embeds:
|
||||
if server := _get_server(embed):
|
||||
yield server
|
||||
|
||||
@@ -1,7 +1,4 @@
|
||||
from yt_dlp.utils.networking import random_user_agent
|
||||
|
||||
SERVERS_AVAILABLE = ["sharepoint", "dropbox", "gogoanime", "weTransfer", "wixmp", "Yt"]
|
||||
ALLANIME_BASE = "allanime.day"
|
||||
ALLANIME_REFERER = "https://allanime.to/"
|
||||
ALLANIME_API_ENDPOINT = "https://api.{}/api/".format(ALLANIME_BASE)
|
||||
USER_AGENT = random_user_agent()
|
||||
SERVERS_AVAILABLE = ["sharepoint", "dropbox", "gogoanime", "weTransfer", "wixmp"]
|
||||
|
||||
@@ -1,56 +1,56 @@
|
||||
ALLANIME_SEARCH_GQL = """
|
||||
query(
|
||||
$search: SearchInput
|
||||
$limit: Int
|
||||
$page: Int
|
||||
$translationType: VaildTranslationTypeEnumType
|
||||
$countryOrigin: VaildCountryOriginEnumType
|
||||
) {
|
||||
shows(
|
||||
search: $search
|
||||
limit: $limit
|
||||
page: $page
|
||||
translationType: $translationType
|
||||
countryOrigin: $countryOrigin
|
||||
) {
|
||||
pageInfo {
|
||||
total
|
||||
}
|
||||
edges {
|
||||
_id
|
||||
name
|
||||
availableEpisodes
|
||||
__typename
|
||||
}
|
||||
query (
|
||||
$search: SearchInput
|
||||
$limit: Int
|
||||
$page: Int
|
||||
$translationType: VaildTranslationTypeEnumType
|
||||
$countryOrigin: VaildCountryOriginEnumType
|
||||
) {
|
||||
shows(
|
||||
search: $search
|
||||
limit: $limit
|
||||
page: $page
|
||||
translationType: $translationType
|
||||
countryOrigin: $countryOrigin
|
||||
) {
|
||||
pageInfo {
|
||||
total
|
||||
}
|
||||
edges {
|
||||
_id
|
||||
name
|
||||
availableEpisodes
|
||||
__typename
|
||||
}
|
||||
}
|
||||
}
|
||||
"""
|
||||
|
||||
|
||||
ALLANIME_EPISODES_GQL = """\
|
||||
query ($showId: String!, $translationType: VaildTranslationTypeEnumType!, $episodeString: String!) {
|
||||
episode(
|
||||
showId: $showId
|
||||
translationType: $translationType
|
||||
episodeString: $episodeString
|
||||
) {
|
||||
|
||||
episodeString
|
||||
sourceUrls
|
||||
notes
|
||||
}
|
||||
}"""
|
||||
query (
|
||||
$showId: String!
|
||||
$translationType: VaildTranslationTypeEnumType!
|
||||
$episodeString: String!
|
||||
) {
|
||||
episode(
|
||||
showId: $showId
|
||||
translationType: $translationType
|
||||
episodeString: $episodeString
|
||||
) {
|
||||
episodeString
|
||||
sourceUrls
|
||||
notes
|
||||
}
|
||||
}
|
||||
"""
|
||||
|
||||
ALLANIME_SHOW_GQL = """
|
||||
query ($showId: String!) {
|
||||
show(
|
||||
_id: $showId
|
||||
) {
|
||||
|
||||
_id
|
||||
name
|
||||
availableEpisodesDetail
|
||||
|
||||
}
|
||||
show(_id: $showId) {
|
||||
_id
|
||||
name
|
||||
availableEpisodesDetail
|
||||
}
|
||||
}
|
||||
"""
|
||||
|
||||
@@ -1,40 +0,0 @@
|
||||
from ..types import Anime, EpisodesDetail, SearchResults
|
||||
from .types import AllAnimeEpisode, AllAnimeSearchResults, AllAnimeShow
|
||||
|
||||
|
||||
def normalize_search_results(search_results: AllAnimeSearchResults) -> SearchResults:
|
||||
page_info = search_results["shows"]["pageInfo"]
|
||||
results = []
|
||||
for result in search_results["shows"]["edges"]:
|
||||
normalized_result = {
|
||||
"id": result["_id"],
|
||||
"title": result["name"],
|
||||
"type": result["__typename"],
|
||||
"availableEpisodes": result["availableEpisodes"],
|
||||
}
|
||||
results.append(normalized_result)
|
||||
|
||||
normalized_search_results: SearchResults = {
|
||||
"pageInfo": page_info,
|
||||
"results": results,
|
||||
}
|
||||
|
||||
return normalized_search_results
|
||||
|
||||
|
||||
def normalize_anime(anime: AllAnimeShow) -> Anime:
|
||||
id: str = anime["_id"]
|
||||
title: str = anime["name"]
|
||||
availableEpisodesDetail: EpisodesDetail = anime["availableEpisodesDetail"]
|
||||
type = anime.get("__typename")
|
||||
normalized_anime: Anime = {
|
||||
"id": id,
|
||||
"title": title,
|
||||
"availableEpisodesDetail": availableEpisodesDetail,
|
||||
"type": type,
|
||||
}
|
||||
return normalized_anime
|
||||
|
||||
|
||||
def normalize_episode(episode: AllAnimeEpisode):
|
||||
pass
|
||||
@@ -1,63 +1,236 @@
|
||||
import requests
|
||||
import logging
|
||||
import random
|
||||
import re
|
||||
import time
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from .constants import ANIMEPAHE_BASE, ANIMEPAHE_ENDPOINT, REQUEST_HEADERS
|
||||
from yt_dlp.utils import (
|
||||
extract_attributes,
|
||||
get_element_by_id,
|
||||
get_elements_html_by_class,
|
||||
)
|
||||
|
||||
from ..base_provider import AnimeProvider
|
||||
from ..decorators import debug_provider
|
||||
from .constants import (
|
||||
ANIMEPAHE_BASE,
|
||||
ANIMEPAHE_ENDPOINT,
|
||||
REQUEST_HEADERS,
|
||||
SERVER_HEADERS,
|
||||
)
|
||||
from .utils import process_animepahe_embed_page
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .types import AnimePaheAnimePage, AnimePaheSearchPage, AnimePaheSearchResult
|
||||
JUICY_STREAM_REGEX = re.compile(r"source='(.*)';")
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
KWIK_RE = re.compile(r"Player\|(.+?)'")
|
||||
|
||||
|
||||
# TODO: hack this to completion
|
||||
class AnimePaheApi:
|
||||
def search_for_anime(self, user_query, *args):
|
||||
try:
|
||||
url = f"{ANIMEPAHE_ENDPOINT}m=search&q={user_query}"
|
||||
headers = {**REQUEST_HEADERS}
|
||||
response = requests.get(url, headers=headers)
|
||||
if not response.status_code == 200:
|
||||
return
|
||||
data = response.json()
|
||||
class AnimePaheApi(AnimeProvider):
|
||||
search_page: "AnimePaheSearchPage"
|
||||
anime: "AnimePaheAnimePage"
|
||||
HEADERS = REQUEST_HEADERS
|
||||
PROVIDER = "animepahe"
|
||||
|
||||
@debug_provider(PROVIDER.upper())
|
||||
def search_for_anime(self, user_query: str, *args):
|
||||
url = f"{ANIMEPAHE_ENDPOINT}m=search&q={user_query}"
|
||||
response = self.session.get(
|
||||
url,
|
||||
)
|
||||
if not response.ok:
|
||||
return
|
||||
data: "AnimePaheSearchPage" = response.json()
|
||||
self.search_page = data
|
||||
for animepahe_search_result in data["data"]:
|
||||
self.store.set(
|
||||
str(animepahe_search_result["session"]),
|
||||
"search_result",
|
||||
animepahe_search_result,
|
||||
)
|
||||
|
||||
return {
|
||||
"pageInfo": {
|
||||
"total": data["total"],
|
||||
"perPage": data["per_page"],
|
||||
"currentPage": data["current_page"],
|
||||
},
|
||||
"results": [
|
||||
{
|
||||
"availableEpisodes": list(range(result["episodes"])),
|
||||
"id": result["session"],
|
||||
"title": result["title"],
|
||||
"type": result["type"],
|
||||
"year": result["year"],
|
||||
"score": result["score"],
|
||||
"status": result["status"],
|
||||
"season": result["season"],
|
||||
"poster": result["poster"],
|
||||
}
|
||||
for result in data["data"]
|
||||
],
|
||||
}
|
||||
|
||||
@debug_provider(PROVIDER.upper())
|
||||
def get_anime(self, session_id: str, *args):
|
||||
page = 1
|
||||
if d := self.store.get(str(session_id), "search_result"):
|
||||
anime_result: "AnimePaheSearchResult" = d
|
||||
data: "AnimePaheAnimePage" = {} # pyright:ignore
|
||||
|
||||
url = f"{ANIMEPAHE_ENDPOINT}m=release&id={session_id}&sort=episode_asc&page={page}"
|
||||
|
||||
def _pages_loader(
|
||||
url,
|
||||
page,
|
||||
):
|
||||
response = self.session.get(
|
||||
url,
|
||||
)
|
||||
if response.ok:
|
||||
if not data:
|
||||
data.update(response.json())
|
||||
else:
|
||||
if ep_data := response.json().get("data"):
|
||||
data["data"].extend(ep_data)
|
||||
if response.json()["next_page_url"]:
|
||||
# TODO: Refine this
|
||||
time.sleep(
|
||||
random.choice(
|
||||
[
|
||||
0.25,
|
||||
0.1,
|
||||
0.5,
|
||||
0.75,
|
||||
1,
|
||||
]
|
||||
)
|
||||
)
|
||||
page += 1
|
||||
url = f"{ANIMEPAHE_ENDPOINT}m=release&id={session_id}&sort=episode_asc&page={page}"
|
||||
_pages_loader(
|
||||
url,
|
||||
page,
|
||||
)
|
||||
|
||||
_pages_loader(
|
||||
url,
|
||||
page,
|
||||
)
|
||||
|
||||
if not data:
|
||||
return {}
|
||||
data["title"] = anime_result["title"] # pyright:ignore
|
||||
self.store.set(str(session_id), "anime_info", data)
|
||||
episodes = list(map(str, [episode["episode"] for episode in data["data"]]))
|
||||
title = ""
|
||||
return {
|
||||
"pageInfo": {"total": data["total"]},
|
||||
"results": [
|
||||
"id": session_id,
|
||||
"title": anime_result["title"],
|
||||
"year": anime_result["year"],
|
||||
"season": anime_result["season"],
|
||||
"poster": anime_result["poster"],
|
||||
"score": anime_result["score"],
|
||||
"availableEpisodesDetail": {
|
||||
"sub": episodes,
|
||||
"dub": episodes,
|
||||
"raw": episodes,
|
||||
},
|
||||
"episodesInfo": [
|
||||
{
|
||||
"id": result["session"],
|
||||
"title": result["title"],
|
||||
"availableEpisodes": result["episodes"],
|
||||
"type": result["type"],
|
||||
"title": f"{episode['title'] or title};{episode['episode']}",
|
||||
"episode": episode["episode"],
|
||||
"id": episode["session"],
|
||||
"translation_type": episode["audio"],
|
||||
"duration": episode["duration"],
|
||||
"poster": episode["snapshot"],
|
||||
}
|
||||
for result in data["data"]
|
||||
for episode in data["data"]
|
||||
],
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
print(e)
|
||||
input()
|
||||
@debug_provider(PROVIDER.upper())
|
||||
def get_episode_streams(
|
||||
self, anime_id, episode_number: str, translation_type, *args
|
||||
):
|
||||
anime_title = ""
|
||||
episode = None
|
||||
# extract episode details from memory
|
||||
if d := self.store.get(str(anime_id), "anime_info"):
|
||||
anime_title = d["title"]
|
||||
episode = [
|
||||
episode
|
||||
for episode in d["data"]
|
||||
if float(episode["episode"]) == float(episode_number)
|
||||
]
|
||||
|
||||
def get_anime(self, session_id: str, *args):
|
||||
url = "https://animepahe.ru/api?m=release&id=&sort=episode_asc&page=1"
|
||||
url = f"{ANIMEPAHE_ENDPOINT}m=release&id={session_id}&sort=episode_asc&page=1"
|
||||
response = requests.get(url, headers=REQUEST_HEADERS)
|
||||
if not response.status_code == 200:
|
||||
return
|
||||
data = response.json()
|
||||
self.current = data
|
||||
episodes = list(map(str, range(data["total"])))
|
||||
return {
|
||||
"id": session_id,
|
||||
"title": "none",
|
||||
"availableEpisodesDetail": {
|
||||
"sub": episodes,
|
||||
"dub": episodes,
|
||||
"raw": episodes,
|
||||
},
|
||||
if not episode:
|
||||
logger.error(f"[ANIMEPAHE-ERROR]: episode {episode_number} doesn't exist")
|
||||
return []
|
||||
episode = episode[0]
|
||||
|
||||
# fetch the episode page
|
||||
url = f"{ANIMEPAHE_BASE}/play/{anime_id}/{episode['session']}"
|
||||
response = self.session.get(url)
|
||||
# get the element containing links to juicy streams
|
||||
c = get_element_by_id("resolutionMenu", response.text)
|
||||
resolutionMenuItems = get_elements_html_by_class("dropdown-item", c)
|
||||
# convert the elements containing embed links to a neat dict containing:
|
||||
# data-src
|
||||
# data-audio
|
||||
# data-resolution
|
||||
res_dicts = [extract_attributes(item) for item in resolutionMenuItems]
|
||||
|
||||
# get the episode title
|
||||
episode_title = (
|
||||
f"{episode['title'] or anime_title}; Episode {episode['episode']}"
|
||||
)
|
||||
# get all links
|
||||
streams = {
|
||||
"server": "kwik",
|
||||
"links": [],
|
||||
"episode_title": episode_title,
|
||||
"subtitles": [],
|
||||
"headers": {},
|
||||
}
|
||||
for res_dict in res_dicts:
|
||||
# get embed url
|
||||
embed_url = res_dict["data-src"]
|
||||
data_audio = "dub" if res_dict["data-audio"] == "eng" else "sub"
|
||||
# filter streams by translation_type
|
||||
if data_audio != translation_type:
|
||||
continue
|
||||
|
||||
def get_episode_streams(self, anime, episode, *args):
|
||||
episode_id = self.current["data"][int(episode)]["session"]
|
||||
anime_id = anime["id"]
|
||||
url = f"{ANIMEPAHE_BASE}play/{anime_id}{episode_id}"
|
||||
response = requests.get(url, headers=REQUEST_HEADERS)
|
||||
print(response.status_code)
|
||||
input()
|
||||
if not response.status_code == 200:
|
||||
print(response.text)
|
||||
return
|
||||
print(response.text)
|
||||
input()
|
||||
if not embed_url:
|
||||
logger.warning(
|
||||
"[ANIMEPAHE-WARN]: embed url not found please report to the developers"
|
||||
)
|
||||
return []
|
||||
# get embed page
|
||||
embed_response = self.session.get(
|
||||
embed_url, headers={"User-Agent": self.USER_AGENT, **SERVER_HEADERS}
|
||||
)
|
||||
if not response.ok:
|
||||
continue
|
||||
embed_page = embed_response.text
|
||||
|
||||
decoded_js = process_animepahe_embed_page(embed_page)
|
||||
if not decoded_js:
|
||||
logger.error("[ANIMEPAHE-ERROR]: failed to decode embed page")
|
||||
return
|
||||
juicy_stream = JUICY_STREAM_REGEX.search(decoded_js)
|
||||
if not juicy_stream:
|
||||
logger.error("[ANIMEPAHE-ERROR]: failed to find juicy stream")
|
||||
return
|
||||
juicy_stream = juicy_stream.group(1)
|
||||
# add the link
|
||||
streams["links"].append(
|
||||
{
|
||||
"quality": res_dict["data-resolution"],
|
||||
"translation_type": data_audio,
|
||||
"link": juicy_stream,
|
||||
}
|
||||
)
|
||||
yield streams
|
||||
|
||||
@@ -1,18 +1,14 @@
|
||||
from yt_dlp.utils.networking import random_user_agent
|
||||
|
||||
USER_AGENT = random_user_agent()
|
||||
ANIMEPAHE = "animepahe.ru"
|
||||
ANIMEPAHE_BASE = f"https://{ANIMEPAHE}/"
|
||||
ANIMEPAHE_BASE = f"https://{ANIMEPAHE}"
|
||||
ANIMEPAHE_ENDPOINT = f"{ANIMEPAHE_BASE}/api?"
|
||||
|
||||
SERVERS_AVAILABLE = ["kwik"]
|
||||
REQUEST_HEADERS = {
|
||||
"Cookie": "__ddgid_=VvX0ebHrH2DsFZo4; __ddgmark_=3savRpSVFhvZcn5x; __ddg2_=buBJ3c4pNBYKFZNp; __ddg1_=rbVADKr9URtt55zoIGFa; SERVERID=janna; XSRF-TOKEN=eyJpdiI6IjV5bFNtd0phUHgvWGJxc25wL0VJSUE9PSIsInZhbHVlIjoicEJTZktlR2hxR2JZTWhnL0JzazlvZU5TQTR2bjBWZ2dDb0RwUXVUUWNSclhQWUhLRStYSmJmWmUxWkpiYkFRYU12RjFWejlSWHorME1wZG5qQ1U0TnFlNnBFR2laQjN1MjdyNjc5TjVPdXdJb2o5VkU1bEduRW9pRHNDTHh6Sy8iLCJtYWMiOiI0OTc0ZmNjY2UwMGJkOWY2MWNkM2NlMjk2ZGMyZGJmMWE0NTdjZTdkNGI2Y2IwNTIzZmFiZWU5ZTE2OTk0YmU4IiwidGFnIjoiIn0%3D; laravel_session=eyJpdiI6ImxvdlpqREFnTjdaeFJubUlXQWlJVWc9PSIsInZhbHVlIjoiQnE4R3VHdjZ4M1NDdEVWM1ZqMUxtNnVERnJCcmtCUHZKNzRPR2RFbzNFcStTL29xdnVTbWhsNVRBUXEybVZWNU1UYVlTazFqYlN5UjJva1k4czNGaXBTbkJJK01oTUd3VHRYVHBoc3dGUWxHYnFlS2NJVVNFbTFqMVBWdFpuVUgiLCJtYWMiOiI1NDdjZTVkYmNhNjUwZTMxZmRlZmVmMmRlMGNiYjAwYjlmYjFjY2U0MDc1YTQzZThiMTIxMjJlYTg1NTA4YjBmIiwidGFnIjoiIn0%3D; latest=5592 ",
|
||||
"Host": ANIMEPAHE,
|
||||
"User-Agent": USER_AGENT,
|
||||
"Accept": "application , text/javascript, */*; q=0.01",
|
||||
"Accept-Encoding": "gzip, deflate, br, zstd",
|
||||
"Accept-Encoding": "Utf-8",
|
||||
"Referer": ANIMEPAHE_BASE,
|
||||
"X-Requested-With": "XMLHttpRequest",
|
||||
"DNT": "1",
|
||||
"Connection": "keep-alive",
|
||||
"Sec-Fetch-Dest": "empty",
|
||||
@@ -20,3 +16,18 @@ REQUEST_HEADERS = {
|
||||
"Sec-Fetch-Mode": "cors",
|
||||
"TE": "trailers",
|
||||
}
|
||||
SERVER_HEADERS = {
|
||||
"Host": "kwik.si",
|
||||
"Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/png,image/svg+xml,*/*;q=0.8",
|
||||
"Accept-Language": "en-US,en;q=0.5",
|
||||
"Accept-Encoding": "Utf-8",
|
||||
"DNT": "1",
|
||||
"Connection": "keep-alive",
|
||||
"Referer": "https://animepahe.ru/",
|
||||
"Upgrade-Insecure-Requests": "1",
|
||||
"Sec-Fetch-Dest": "iframe",
|
||||
"Sec-Fetch-Mode": "navigate",
|
||||
"Sec-Fetch-Site": "cross-site",
|
||||
"Priority": "u=4",
|
||||
"TE": "trailers",
|
||||
}
|
||||
|
||||
@@ -0,0 +1,61 @@
|
||||
from typing import Literal, TypedDict
|
||||
|
||||
|
||||
class AnimePaheSearchResult(TypedDict):
|
||||
id: int
|
||||
title: str
|
||||
type: str
|
||||
episodes: int
|
||||
status: str
|
||||
season: str
|
||||
year: int
|
||||
score: int
|
||||
poster: str
|
||||
session: str
|
||||
|
||||
|
||||
class AnimePaheSearchPage(TypedDict):
|
||||
total: int
|
||||
per_page: int
|
||||
current_page: int
|
||||
last_page: int
|
||||
_from: int
|
||||
to: int
|
||||
data: list[AnimePaheSearchResult]
|
||||
|
||||
|
||||
class Episode(TypedDict):
|
||||
id: int
|
||||
anime_id: int
|
||||
episode: int
|
||||
episode2: int
|
||||
edition: str
|
||||
title: str
|
||||
snapshot: str # episode image
|
||||
disc: str
|
||||
audio: Literal["eng", "jpn"]
|
||||
duration: str # time 00:00:00
|
||||
session: str
|
||||
filler: int
|
||||
created_at: str
|
||||
|
||||
|
||||
class AnimePaheAnimePage(TypedDict):
|
||||
total: int
|
||||
per_page: int
|
||||
current_page: int
|
||||
last_page: int
|
||||
next_page_url: str | None
|
||||
prev_page_url: str | None
|
||||
_from: int
|
||||
to: int
|
||||
data: list[Episode]
|
||||
|
||||
|
||||
class Server:
|
||||
type: str
|
||||
data_src = "https://kwik.si/e/PImJ0u7Y3M0G"
|
||||
data_fansub: str
|
||||
data_resolution: Literal["360", "720", "1080"]
|
||||
data_audio: Literal["eng", "jpn"]
|
||||
data_av1: str
|
||||
|
||||
75
fastanime/libs/anime_provider/animepahe/utils.py
Normal file
75
fastanime/libs/anime_provider/animepahe/utils.py
Normal file
@@ -0,0 +1,75 @@
|
||||
# from ..utils import int2base
|
||||
import re
|
||||
|
||||
from yt_dlp.utils import encode_base_n, get_element_text_and_html_by_tag
|
||||
|
||||
|
||||
def animepahe_key_creator(c: int, a: int):
|
||||
if c < a:
|
||||
val_a = ""
|
||||
else:
|
||||
val_a = animepahe_key_creator(int(c / a), a)
|
||||
c = c % a
|
||||
if c > 35:
|
||||
val_b = chr(c + 29)
|
||||
else:
|
||||
val_b = encode_base_n(c, 36)
|
||||
return val_a + val_b
|
||||
|
||||
|
||||
def animepahe_embed_decoder(
|
||||
encoded_js_p: str,
|
||||
base_a: int,
|
||||
no_of_keys_c: int,
|
||||
values_to_replace_with_k: list,
|
||||
):
|
||||
decode_mapper_d: dict = {}
|
||||
for i in range(no_of_keys_c):
|
||||
key = animepahe_key_creator(i, base_a)
|
||||
val = values_to_replace_with_k[i] or key
|
||||
decode_mapper_d[key] = val
|
||||
return re.sub(
|
||||
r"\b\w+\b", lambda match: decode_mapper_d[match.group(0)], encoded_js_p
|
||||
)
|
||||
|
||||
|
||||
PARAMETERS_REGEX = re.compile(r"eval\(function\(p,a,c,k,e,d\)\{.*\}\((.*?)\)\)$")
|
||||
ENCODE_JS_REGEX = re.compile(r"'(.*?);',(\d+),(\d+),'(.*)'\.split")
|
||||
|
||||
|
||||
def process_animepahe_embed_page(embed_page: str):
|
||||
encoded_js_string = ""
|
||||
embed_page_content = embed_page
|
||||
for _ in range(8):
|
||||
text, html = get_element_text_and_html_by_tag("script", embed_page_content)
|
||||
if not text:
|
||||
embed_page_content = re.sub(html, "", embed_page_content)
|
||||
continue
|
||||
encoded_js_string = text.strip()
|
||||
break
|
||||
if not encoded_js_string:
|
||||
return
|
||||
obsfucated_js_parameter_match = PARAMETERS_REGEX.search(encoded_js_string)
|
||||
if not obsfucated_js_parameter_match:
|
||||
return
|
||||
parameter_string = obsfucated_js_parameter_match.group(1)
|
||||
encoded_js_parameter_string = ENCODE_JS_REGEX.search(parameter_string)
|
||||
if not encoded_js_parameter_string:
|
||||
return
|
||||
p: str = encoded_js_parameter_string.group(1)
|
||||
a: int = int(encoded_js_parameter_string.group(2))
|
||||
c: int = int(encoded_js_parameter_string.group(3))
|
||||
k: list = encoded_js_parameter_string.group(4).split("|")
|
||||
return animepahe_embed_decoder(p, a, c, k).replace("\\", "")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
# Testing time
|
||||
filepath = input("Enter file name: ")
|
||||
if filepath:
|
||||
with open(filepath, "r") as file:
|
||||
data = file.read()
|
||||
else:
|
||||
data = """<script>eval(function(p,a,c,k,e,d){e=function(c){return(c<a?'':e(parseInt(c/a)))+((c=c%a)>35?String.fromCharCode(c+29):c.toString(36))};if(!''.replace(/^/,String)){while(c--){d[e(c)]=k[c]||e(c)}k=[function(e){return d[e]}];e=function(){return'\\w+'};c=1};while(c--){if(k[c]){p=p.replace(new RegExp('\\b'+e(c)+'\\b','g'),k[c])}}return p}('f $7={H:a(2){4 B(9.7.h(y z("(?:(?:^|.*;)\\\\s*"+d(2).h(/[\\-\\.\\+\\*]/g,"\\\\$&")+"\\\\s*\\\\=\\\\s*([^;]*).*$)|^.*$"),"$1"))||G},E:a(2,q,3,6,5,t){k(!2||/^(?:8|r\\-v|o|m|p)$/i.D(2)){4 w}f b="";k(3){F(3.J){j K:b=3===P?"; 8=O, I N Q M:u:u A":"; r-v="+3;n;j L:b="; 8="+3;n;j S:b="; 8="+3.Z();n}}9.7=d(2)+"="+d(q)+b+(5?"; m="+5:"")+(6?"; o="+6:"")+(t?"; p":"");4 x},Y:a(2,6,5){k(!2||!11.C(2)){4 w}9.7=d(2)+"=; 8=12, R 10 W l:l:l A"+(5?"; m="+5:"")+(6?"; o="+6:"");4 x},C:a(2){4(y z("(?:^|;\\\\s*)"+d(2).h(/[\\-\\.\\+\\*]/g,"\\\\$&")+"\\\\s*\\\\=")).D(9.7)},X:a(){f c=9.7.h(/((?:^|\\s*;)[^\\=]+)(?=;|$)|^\\s*|\\s*(?:\\=[^;]*)?(?:\\1|$)/g,"").T(/\\s*(?:\\=[^;]*)?;\\s*/);U(f e=0;e<c.V;e++){c[e]=B(c[e])}4 c}};',62,65,'||sKey|vEnd|return|sDomain|sPath|cookie|expires|document|function|sExpires|aKeys|encodeURIComponent|nIdx|var||replace||case|if|00|domain|break|path|secure|sValue|max||bSecure|59|age|false|true|new|RegExp|GMT|decodeURIComponent|hasItem|test|setItem|switch|null|getItem|31|constructor|Number|String|23|Dec|Fri|Infinity|9999|01|Date|split|for|length|1970|keys|removeItem|toUTCString|Jan|this|Thu'.split('|'),0,{}));eval(function(p,a,c,k,e,d){e=function(c){return(c<a?'':e(parseInt(c/a)))+((c=c%a)>35?String.fromCharCode(c+29):c.toString(36))};if(!''.replace(/^/,String)){while(c--){d[e(c)]=k[c]||e(c)}k=[function(e){return d[e]}];e=function(){return'\\w+'};c=1};while(c--){if(k[c]){p=p.replace(new RegExp('\\b'+e(c)+'\\b','g'),k[c])}}return p}('h o=\'1D://1C-E.1B.1A.1z/1y/E/1x/1w/1v.1u\';h d=s.r(\'d\');h 0=B 1t(d,{\'1s\':{\'1r\':i},\'1q\':\'16:9\',\'D\':1,\'1p\':5,\'1o\':{\'1n\':\'1m\'},1l:[\'7-1k\',\'7\',\'1j\',\'1i-1h\',\'1g\',\'1f-1e\',\'1d\',\'D\',\'1c\',\'1b\',\'1a\',\'19\',\'C\',\'18\'],\'C\':{\'17\':i}});8(!A.15()){d.14=o}x{j z={13:12,11:10,Z:Y,X:i,W:i};h c=B A(z);c.V(o);c.U(d);g.c=c}0.3("T",6=>{g.S.R.Q("P")});0.O=1;k v(b,n,m){8(b.y){b.y(n,m,N)}x 8(b.w){b.w(\'3\'+n,m)}}j 4=k(l){g.M.L(l,\'*\')};v(g,\'l\',k(e){j a=e.a;8(a===\'7\')0.7();8(a===\'f\')0.f();8(a===\'u\')0.u()});0.3(\'t\',6=>{4(\'t\')});0.3(\'7\',6=>{4(\'7\')});0.3(\'f\',6=>{4(\'f\')});0.3(\'K\',6=>{4(0.q);s.r(\'.J-I\').H=G(0.q.F(2))});0.3(\'p\',6=>{4(\'p\')});',62,102,'player|||on|sendMessage||event|play|if||data|element|hls|video||pause|window|const|true|var|function|message|eventHandler|eventName|source|ended|currentTime|querySelector|document|ready|stop|bindEvent|attachEvent|else|addEventListener|config|Hls|new|fullscreen|volume|01|toFixed|String|innerHTML|timestamp|ss|timeupdate|postMessage|parent|false|speed|landscape|lock|orientation|screen|enterfullscreen|attachMedia|loadSource|lowLatencyMode|enableWorker|Infinity|backBufferLength|600|maxMaxBufferLength|180|maxBufferLength|src|isSupported||iosNative|capture|airplay|pip|settings|captions|mute|time|current|progress|forward|fast|rewind|large|controls|kwik|key|storage|seekTime|ratio|global|keyboard|Plyr|m3u8|uwu|b92a392054c041a3f9c6eecabeb0e127183f44e547828447b10bca8d77523e6f|03|stream|org|nextcdn|files|eu|https'.split('|'),0,{}))</script>"""
|
||||
|
||||
print(process_animepahe_embed_page(data))
|
||||
@@ -1,49 +0,0 @@
|
||||
from ...anilist.anilist_data_schema import AnilistBaseMediaDataSchema
|
||||
from ..base_provider import AnimeProvider
|
||||
|
||||
"""
|
||||
"Zoro": {
|
||||
"27": {
|
||||
"identifier": "27",
|
||||
"image": "https://cdn.noitatnemucod.net/thumbnail/300x400/100/ce5e539af63e42431621fc66a47fbec1.jpg",
|
||||
"malId": 1,
|
||||
"aniId": 1,
|
||||
"page": "Zoro",
|
||||
"title": "Cowboy Bebop",
|
||||
"type": "anime",
|
||||
"url": "https://hianime.to/cowboy-bebop-27"
|
||||
}
|
||||
},
|
||||
|
||||
episode info = https://hianime.to/ajax/v2/episode/list/27
|
||||
"""
|
||||
|
||||
|
||||
# TODO: complete this
|
||||
class AniWatchApi(AnimeProvider):
|
||||
def search_for_anime(
|
||||
self, anilist_selected_anime: AnilistBaseMediaDataSchema, *args
|
||||
):
|
||||
return {
|
||||
"pageInfo": 1,
|
||||
"results": [
|
||||
{
|
||||
"id": anilist_selected_anime["id"],
|
||||
"title": anilist_selected_anime["title"],
|
||||
"availableEpisodes": [],
|
||||
}
|
||||
],
|
||||
}
|
||||
|
||||
def get_anime(self, id: int):
|
||||
url = f"https://raw.githubusercontent.com/bal-mackup/mal-backup/master/anilist/anime/{id}.json"
|
||||
response = self.session.get(url)
|
||||
if response.status_code == 200:
|
||||
data = response.json()
|
||||
data["Sites"]["Zoro"]
|
||||
return {"id": ""}
|
||||
else:
|
||||
return {}
|
||||
|
||||
def get_episode_streams(self, id: int, episode: str, translation_type: str):
|
||||
pass
|
||||
@@ -1,8 +1,34 @@
|
||||
import os
|
||||
|
||||
import requests
|
||||
from yt_dlp.utils.networking import random_user_agent
|
||||
|
||||
from ...constants import APP_CACHE_DIR
|
||||
from .providers_store import ProviderStore
|
||||
|
||||
|
||||
class AnimeProvider:
|
||||
session: requests.Session
|
||||
|
||||
def __init__(self) -> None:
|
||||
self.session = requests.session()
|
||||
PROVIDER = ""
|
||||
USER_AGENT = random_user_agent()
|
||||
HEADERS = {}
|
||||
|
||||
def __init__(self, cache_requests, use_persistent_provider_store) -> None:
|
||||
if cache_requests.lower() == "true":
|
||||
from ..common.requests_cacher import CachedRequestsSession
|
||||
|
||||
self.session = CachedRequestsSession(
|
||||
os.path.join(APP_CACHE_DIR, "cached_requests.db")
|
||||
)
|
||||
else:
|
||||
self.session = requests.session()
|
||||
self.session.headers.update({"User-Agent": self.USER_AGENT, **self.HEADERS})
|
||||
if use_persistent_provider_store.lower() == "true":
|
||||
self.store = ProviderStore(
|
||||
"persistent",
|
||||
self.PROVIDER,
|
||||
os.path.join(APP_CACHE_DIR, "anime_providers_store.db"),
|
||||
)
|
||||
else:
|
||||
self.store = ProviderStore("memory")
|
||||
|
||||
15
fastanime/libs/anime_provider/common.py
Normal file
15
fastanime/libs/anime_provider/common.py
Normal file
@@ -0,0 +1,15 @@
|
||||
import logging
|
||||
|
||||
from requests import get
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def fetch_anime_info_from_bal(anilist_id):
|
||||
try:
|
||||
url = f"https://raw.githubusercontent.com/bal-mackup/mal-backup/master/anilist/anime/{anilist_id}.json"
|
||||
response = get(url, timeout=11)
|
||||
if response.status_code == 200:
|
||||
return response.json()
|
||||
except Exception as e:
|
||||
logger.error(e)
|
||||
39
fastanime/libs/anime_provider/decorators.py
Normal file
39
fastanime/libs/anime_provider/decorators.py
Normal file
@@ -0,0 +1,39 @@
|
||||
import functools
|
||||
import logging
|
||||
import os
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def debug_provider(provider_name: str):
|
||||
def _provider_function_decorator(provider_function):
|
||||
@functools.wraps(provider_function)
|
||||
def _provider_function_wrapper(*args, **kwargs):
|
||||
if not os.environ.get("FASTANIME_DEBUG"):
|
||||
try:
|
||||
return provider_function(*args, **kwargs)
|
||||
except Exception as e:
|
||||
logger.error(f"[{provider_name}@{provider_function.__name__}]: {e}")
|
||||
else:
|
||||
return provider_function(*args, **kwargs)
|
||||
|
||||
return _provider_function_wrapper
|
||||
|
||||
return _provider_function_decorator
|
||||
|
||||
|
||||
def ensure_internet_connection(provider_function):
|
||||
@functools.wraps(provider_function)
|
||||
def _wrapper(*args, **kwargs):
|
||||
import requests
|
||||
|
||||
try:
|
||||
requests.get("https://google.com", timeout=5)
|
||||
except requests.ConnectionError:
|
||||
from sys import exit
|
||||
|
||||
print("You are not connected to the internet;Aborting...")
|
||||
exit(1)
|
||||
return provider_function(*args, **kwargs)
|
||||
|
||||
return _wrapper
|
||||
0
fastanime/libs/anime_provider/hianime/__init__.py
Normal file
0
fastanime/libs/anime_provider/hianime/__init__.py
Normal file
243
fastanime/libs/anime_provider/hianime/api.py
Normal file
243
fastanime/libs/anime_provider/hianime/api.py
Normal file
@@ -0,0 +1,243 @@
|
||||
import logging
|
||||
import re
|
||||
from html.parser import HTMLParser
|
||||
from itertools import cycle
|
||||
from urllib.parse import quote_plus
|
||||
|
||||
from yt_dlp.utils import (
|
||||
clean_html,
|
||||
extract_attributes,
|
||||
get_element_by_class,
|
||||
get_element_html_by_class,
|
||||
get_elements_by_class,
|
||||
get_elements_html_by_class,
|
||||
)
|
||||
|
||||
from ..base_provider import AnimeProvider
|
||||
from ..decorators import debug_provider
|
||||
from ..utils import give_random_quality
|
||||
from .constants import SERVERS_AVAILABLE
|
||||
from .types import HiAnimeStream
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
LINK_TO_STREAMS_REGEX = re.compile(r".*://(.*)/embed-(2|4|6)/e-([0-9])/(.*)\?.*")
|
||||
IMAGE_HTML_ELEMENT_REGEX = re.compile(r"<img.*?>")
|
||||
|
||||
|
||||
class ParseAnchorAndImgTag(HTMLParser):
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.img_tag = None
|
||||
self.a_tag = None
|
||||
|
||||
def handle_starttag(self, tag, attrs):
|
||||
if tag == "img":
|
||||
self.img_tag = {attr[0]: attr[1] for attr in attrs}
|
||||
if tag == "a":
|
||||
self.a_tag = {attr[0]: attr[1] for attr in attrs}
|
||||
|
||||
|
||||
class HiAnimeApi(AnimeProvider):
|
||||
# HEADERS = {"Referer": "https://hianime.to/home"}
|
||||
|
||||
PROVIDER = "hianime"
|
||||
|
||||
@debug_provider(PROVIDER.upper())
|
||||
def search_for_anime(self, anime_title: str, *args):
|
||||
query = quote_plus(anime_title)
|
||||
url = f"https://hianime.to/search?keyword={query}"
|
||||
response = self.session.get(url)
|
||||
if not response.ok:
|
||||
return
|
||||
search_page = response.text
|
||||
search_results_html_items = get_elements_by_class("flw-item", search_page)
|
||||
results = []
|
||||
for search_results_html_item in search_results_html_items:
|
||||
film_poster_html = get_element_by_class(
|
||||
"film-poster", search_results_html_item
|
||||
)
|
||||
|
||||
if not film_poster_html:
|
||||
continue
|
||||
# get availableEpisodes
|
||||
episodes_html = get_element_html_by_class("tick-sub", film_poster_html)
|
||||
episodes = clean_html(episodes_html) or 12
|
||||
|
||||
# get anime id and poster image url
|
||||
parser = ParseAnchorAndImgTag()
|
||||
parser.feed(film_poster_html)
|
||||
image_data = parser.img_tag
|
||||
anime_link_data = parser.a_tag
|
||||
if not image_data or not anime_link_data:
|
||||
continue
|
||||
|
||||
episodes = int(episodes)
|
||||
|
||||
# finally!!
|
||||
image_link = image_data["data-src"]
|
||||
anime_id = anime_link_data["data-id"]
|
||||
title = anime_link_data["title"]
|
||||
|
||||
result = {
|
||||
"availableEpisodes": list(range(1, episodes)),
|
||||
"id": anime_id,
|
||||
"title": title,
|
||||
"poster": image_link,
|
||||
}
|
||||
|
||||
results.append(result)
|
||||
|
||||
self.store.set(result["id"], "search_result", result)
|
||||
return {"pageInfo": {}, "results": results}
|
||||
|
||||
@debug_provider(PROVIDER.upper())
|
||||
def get_anime(self, hianime_id, *args):
|
||||
anime_result = {}
|
||||
if d := self.store.get(str(hianime_id), "search_result"):
|
||||
anime_result = d
|
||||
anime_url = f"https://hianime.to/ajax/v2/episode/list/{hianime_id}"
|
||||
response = self.session.get(anime_url, timeout=10)
|
||||
if response.ok:
|
||||
response_json = response.json()
|
||||
hianime_anime_page = response_json["html"]
|
||||
episodes_info_container_html = get_element_html_by_class(
|
||||
"ss-list", hianime_anime_page
|
||||
)
|
||||
episodes_info_html_list = get_elements_html_by_class(
|
||||
"ep-item", episodes_info_container_html
|
||||
)
|
||||
# keys: [ data-number: episode_number, data-id: episode_id, title: episode_title , href:episode_page_url]
|
||||
episodes_info_dicts = [
|
||||
extract_attributes(episode_dict)
|
||||
for episode_dict in episodes_info_html_list
|
||||
]
|
||||
episodes = [episode["data-number"] for episode in episodes_info_dicts]
|
||||
episodes_info = [
|
||||
{
|
||||
"id": episode["data-id"],
|
||||
"title": (
|
||||
(episode["title"] or "").replace(
|
||||
f"Episode {episode['data-number']}", ""
|
||||
)
|
||||
or anime_result["title"]
|
||||
)
|
||||
+ f"; Episode {episode['data-number']}",
|
||||
"episode": episode["data-number"],
|
||||
}
|
||||
for episode in episodes_info_dicts
|
||||
]
|
||||
self.store.set(
|
||||
str(hianime_id),
|
||||
"anime_info",
|
||||
episodes_info,
|
||||
)
|
||||
return {
|
||||
"id": hianime_id,
|
||||
"availableEpisodesDetail": {
|
||||
"dub": episodes,
|
||||
"sub": episodes,
|
||||
"raw": episodes,
|
||||
},
|
||||
"poster": anime_result["poster"],
|
||||
"title": anime_result["title"],
|
||||
"episodes_info": episodes_info,
|
||||
}
|
||||
|
||||
@debug_provider(PROVIDER.upper())
|
||||
def get_episode_streams(self, anime_id, episode, translation_type, *args):
|
||||
if d := self.store.get(str(anime_id), "anime_info"):
|
||||
episodes_info = d
|
||||
episode_details = [
|
||||
episode_details
|
||||
for episode_details in episodes_info
|
||||
if episode_details["episode"] == episode
|
||||
]
|
||||
if not episode_details:
|
||||
return
|
||||
episode_details = episode_details[0]
|
||||
episode_url = f"https://hianime.to/ajax/v2/episode/servers?episodeId={episode_details['id']}"
|
||||
response = self.session.get(episode_url)
|
||||
if response.ok:
|
||||
response_json = response.json()
|
||||
episode_page_html = response_json["html"]
|
||||
servers_containers_html = get_elements_html_by_class(
|
||||
"ps__-list", episode_page_html
|
||||
)
|
||||
if not servers_containers_html:
|
||||
return
|
||||
# sub servers
|
||||
try:
|
||||
servers_html_sub = get_elements_html_by_class(
|
||||
"server-item", servers_containers_html[0]
|
||||
)
|
||||
except Exception:
|
||||
logger.warning("HiAnime: sub not found")
|
||||
servers_html_sub = None
|
||||
|
||||
# dub servers
|
||||
try:
|
||||
servers_html_dub = get_elements_html_by_class(
|
||||
"server-item", servers_containers_html[1]
|
||||
)
|
||||
except Exception:
|
||||
logger.warning("HiAnime: dub not found")
|
||||
servers_html_dub = None
|
||||
|
||||
if translation_type == "dub":
|
||||
servers_html = servers_html_dub
|
||||
else:
|
||||
servers_html = servers_html_sub
|
||||
if not servers_html:
|
||||
return
|
||||
|
||||
@debug_provider(self.PROVIDER.upper())
|
||||
def _get_server(server_name, server_html):
|
||||
# keys: [ data-type: translation_type, data-id: embed_id, data-server-id: server_id ]
|
||||
servers_info = extract_attributes(server_html)
|
||||
embed_url = f"https://hianime.to/ajax/v2/episode/sources?id={servers_info['data-id']}"
|
||||
embed_response = self.session.get(embed_url)
|
||||
if embed_response.ok:
|
||||
embed_json = embed_response.json()
|
||||
raw_link_to_streams = embed_json["link"]
|
||||
match = LINK_TO_STREAMS_REGEX.match(raw_link_to_streams)
|
||||
if not match:
|
||||
return
|
||||
provider_domain = match.group(1)
|
||||
embed_type = match.group(2)
|
||||
episode_number = match.group(3)
|
||||
source_id = match.group(4)
|
||||
|
||||
link_to_streams = f"https://{provider_domain}/embed-{embed_type}/ajax/e-{episode_number}/getSources?id={source_id}"
|
||||
link_to_streams_response = self.session.get(link_to_streams)
|
||||
if link_to_streams_response.ok:
|
||||
juicy_streams_json: "HiAnimeStream" = (
|
||||
link_to_streams_response.json()
|
||||
)
|
||||
# TODO: Hianime decided to fucking encrypt shit
|
||||
# so got to fix it later
|
||||
return {
|
||||
"headers": {},
|
||||
"subtitles": [
|
||||
{
|
||||
"url": track["file"],
|
||||
"language": track["label"],
|
||||
}
|
||||
for track in juicy_streams_json["tracks"]
|
||||
if track["kind"] == "captions"
|
||||
],
|
||||
"server": server_name,
|
||||
"episode_title": episode_details["title"],
|
||||
"links": give_random_quality(
|
||||
[
|
||||
{"link": link["file"]}
|
||||
for link in juicy_streams_json["tracks"]
|
||||
]
|
||||
),
|
||||
}
|
||||
|
||||
for server_name, server_html in zip(
|
||||
cycle(SERVERS_AVAILABLE), servers_html
|
||||
):
|
||||
if server := _get_server(server_name, server_html):
|
||||
yield server
|
||||
1
fastanime/libs/anime_provider/hianime/constants.py
Normal file
1
fastanime/libs/anime_provider/hianime/constants.py
Normal file
@@ -0,0 +1 @@
|
||||
SERVERS_AVAILABLE = ["HD1", "HD2", "StreamSB", "StreamTape"]
|
||||
26
fastanime/libs/anime_provider/hianime/types.py
Normal file
26
fastanime/libs/anime_provider/hianime/types.py
Normal file
@@ -0,0 +1,26 @@
|
||||
from typing import Literal, TypedDict
|
||||
|
||||
|
||||
class HiAnimeSkipTime(TypedDict):
|
||||
start: int
|
||||
end: int
|
||||
|
||||
|
||||
class HiAnimeSource(TypedDict):
|
||||
file: str
|
||||
type: str
|
||||
|
||||
|
||||
class HiAnimeTrack(TypedDict):
|
||||
file: str
|
||||
label: str
|
||||
kind: Literal["captions", "thumbnails", "audio"]
|
||||
|
||||
|
||||
class HiAnimeStream(TypedDict):
|
||||
sources: list[HiAnimeSource]
|
||||
tracks: list[HiAnimeTrack]
|
||||
encrypted: bool
|
||||
intro: HiAnimeSkipTime
|
||||
outro: HiAnimeSkipTime
|
||||
server: int
|
||||
345
fastanime/libs/anime_provider/nyaa/api.py
Normal file
345
fastanime/libs/anime_provider/nyaa/api.py
Normal file
@@ -0,0 +1,345 @@
|
||||
import os
|
||||
import re
|
||||
from logging import getLogger
|
||||
|
||||
from yt_dlp.utils import (
|
||||
extract_attributes,
|
||||
get_element_html_by_attribute,
|
||||
get_element_html_by_class,
|
||||
get_element_text_and_html_by_tag,
|
||||
get_elements_html_by_class,
|
||||
)
|
||||
|
||||
from ...common.mini_anilist import search_for_anime_with_anilist
|
||||
from ..base_provider import AnimeProvider
|
||||
from ..decorators import debug_provider
|
||||
from ..types import SearchResults
|
||||
from .constants import NYAA_ENDPOINT
|
||||
|
||||
logger = getLogger(__name__)
|
||||
|
||||
EXTRACT_USEFUL_INFO_PATTERN_1 = re.compile(
|
||||
r"\[(\w+)\] (.+) - (\d+) [\[\(](\d+)p[\]\)].*"
|
||||
)
|
||||
|
||||
EXTRACT_USEFUL_INFO_PATTERN_2 = re.compile(
|
||||
r"\[(\w+)\] (.+)E(\d+) [\[\(]?(\d+)p.*[\]\)]?.*"
|
||||
)
|
||||
|
||||
|
||||
class NyaaApi(AnimeProvider):
|
||||
search_results: SearchResults
|
||||
PROVIDER = "nyaa"
|
||||
|
||||
@debug_provider(PROVIDER.upper())
|
||||
def search_for_anime(self, user_query: str, *args, **_):
|
||||
self.search_results = search_for_anime_with_anilist(
|
||||
user_query, True
|
||||
) # pyright: ignore
|
||||
self.user_query = user_query
|
||||
return self.search_results
|
||||
|
||||
@debug_provider(PROVIDER.upper())
|
||||
def get_anime(self, anilist_id: str, *_):
|
||||
for anime in self.search_results["results"]:
|
||||
if anime["id"] == anilist_id:
|
||||
self.titles = [anime["title"], *anime["otherTitles"], self.user_query]
|
||||
return {
|
||||
"id": anime["id"],
|
||||
"title": anime["title"],
|
||||
"poster": anime["poster"],
|
||||
"availableEpisodesDetail": {
|
||||
"dub": anime["availableEpisodes"],
|
||||
"sub": anime["availableEpisodes"],
|
||||
"raw": anime["availableEpisodes"],
|
||||
},
|
||||
}
|
||||
|
||||
@debug_provider(PROVIDER.upper())
|
||||
def get_episode_streams(
|
||||
self,
|
||||
anime_id: str,
|
||||
episode_number: str,
|
||||
translation_type: str,
|
||||
trusted_only=bool(int(os.environ.get("FA_NYAA_TRUSTED_ONLY", "0"))),
|
||||
allow_dangerous=bool(int(os.environ.get("FA_NYAA_ALLOW_DANGEROUS", "0"))),
|
||||
sort_by="seeders",
|
||||
*args,
|
||||
):
|
||||
anime_title = self.titles[0]
|
||||
logger.debug(f"Searching nyaa for query: '{anime_title} {episode_number}'")
|
||||
servers = {}
|
||||
|
||||
torrents_table = ""
|
||||
for title in self.titles:
|
||||
try:
|
||||
url_arguments: dict[str, str] = {
|
||||
"c": "1_2", # Language (English)
|
||||
"q": f"{title} {'0' if len(episode_number)==1 else ''}{episode_number}", # Search Query
|
||||
}
|
||||
# url_arguments["q"] = anime_title
|
||||
|
||||
# if trusted_only:
|
||||
# url_arguments["f"] = "2" # Trusted uploaders only
|
||||
|
||||
# What to sort torrents by
|
||||
if sort_by == "seeders":
|
||||
url_arguments["s"] = "seeders"
|
||||
elif sort_by == "date":
|
||||
url_arguments["s"] = "id"
|
||||
elif sort_by == "size":
|
||||
url_arguments["s"] = "size"
|
||||
elif sort_by == "comments":
|
||||
url_arguments["s"] = "comments"
|
||||
|
||||
logger.debug(f"URL Arguments: {url_arguments}")
|
||||
|
||||
response = self.session.get(NYAA_ENDPOINT, params=url_arguments)
|
||||
if not response.ok:
|
||||
logger.error(f"[NYAA]: {response.text}")
|
||||
return
|
||||
|
||||
try:
|
||||
torrents_table = get_element_text_and_html_by_tag(
|
||||
"table", response.text
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"[NYAA]: {e}")
|
||||
continue
|
||||
|
||||
if not torrents_table:
|
||||
continue
|
||||
|
||||
for anime_torrent in get_elements_html_by_class(
|
||||
"success", torrents_table[1]
|
||||
):
|
||||
td_title = get_element_html_by_attribute(
|
||||
"colspan", "2", anime_torrent
|
||||
)
|
||||
if not td_title:
|
||||
continue
|
||||
title_anchor_tag = get_element_text_and_html_by_tag("a", td_title)
|
||||
|
||||
if not title_anchor_tag:
|
||||
continue
|
||||
title_anchor_tag_attrs = extract_attributes(title_anchor_tag[1])
|
||||
if not title_anchor_tag_attrs:
|
||||
continue
|
||||
if "class" in title_anchor_tag_attrs:
|
||||
td_title = td_title.replace(title_anchor_tag[1], "")
|
||||
title_anchor_tag = get_element_text_and_html_by_tag(
|
||||
"a", td_title
|
||||
)
|
||||
|
||||
if not title_anchor_tag:
|
||||
continue
|
||||
title_anchor_tag_attrs = extract_attributes(title_anchor_tag[1])
|
||||
if not title_anchor_tag_attrs:
|
||||
continue
|
||||
anime_title_info = title_anchor_tag_attrs["title"]
|
||||
if not anime_title_info:
|
||||
continue
|
||||
match = EXTRACT_USEFUL_INFO_PATTERN_1.search(
|
||||
anime_title_info.strip()
|
||||
)
|
||||
if not match:
|
||||
continue
|
||||
server = match[1]
|
||||
match[2]
|
||||
_episode_number = match[3]
|
||||
quality = match[4]
|
||||
if float(episode_number) != float(_episode_number):
|
||||
continue
|
||||
|
||||
links_td = get_element_html_by_class("text-center", anime_torrent)
|
||||
if not links_td:
|
||||
continue
|
||||
torrent_anchor_tag = get_element_text_and_html_by_tag("a", links_td)
|
||||
if not torrent_anchor_tag:
|
||||
continue
|
||||
torrent_anchor_tag_atrrs = extract_attributes(torrent_anchor_tag[1])
|
||||
if not torrent_anchor_tag_atrrs:
|
||||
continue
|
||||
torrent_file_url = (
|
||||
f'{NYAA_ENDPOINT}{torrent_anchor_tag_atrrs["href"]}'
|
||||
)
|
||||
if server in servers:
|
||||
link = {
|
||||
"translation_type": "sub",
|
||||
"link": torrent_file_url,
|
||||
"quality": quality,
|
||||
}
|
||||
if link not in servers[server]["links"]:
|
||||
servers[server]["links"].append(link)
|
||||
else:
|
||||
servers[server] = {
|
||||
"server": server,
|
||||
"headers": {},
|
||||
"episode_title": f"{anime_title}; Episode {episode_number}",
|
||||
"subtitles": [],
|
||||
"links": [
|
||||
{
|
||||
"translation_type": "sub",
|
||||
"link": torrent_file_url,
|
||||
"quality": quality,
|
||||
}
|
||||
],
|
||||
}
|
||||
for anime_torrent in get_elements_html_by_class(
|
||||
"default", torrents_table[1]
|
||||
):
|
||||
td_title = get_element_html_by_attribute(
|
||||
"colspan", "2", anime_torrent
|
||||
)
|
||||
if not td_title:
|
||||
continue
|
||||
title_anchor_tag = get_element_text_and_html_by_tag("a", td_title)
|
||||
|
||||
if not title_anchor_tag:
|
||||
continue
|
||||
title_anchor_tag_attrs = extract_attributes(title_anchor_tag[1])
|
||||
if not title_anchor_tag_attrs:
|
||||
continue
|
||||
if "class" in title_anchor_tag_attrs:
|
||||
td_title = td_title.replace(title_anchor_tag[1], "")
|
||||
title_anchor_tag = get_element_text_and_html_by_tag(
|
||||
"a", td_title
|
||||
)
|
||||
|
||||
if not title_anchor_tag:
|
||||
continue
|
||||
title_anchor_tag_attrs = extract_attributes(title_anchor_tag[1])
|
||||
if not title_anchor_tag_attrs:
|
||||
continue
|
||||
anime_title_info = title_anchor_tag_attrs["title"]
|
||||
if not anime_title_info:
|
||||
continue
|
||||
match = EXTRACT_USEFUL_INFO_PATTERN_2.search(
|
||||
anime_title_info.strip()
|
||||
)
|
||||
if not match:
|
||||
continue
|
||||
server = match[1]
|
||||
match[2]
|
||||
_episode_number = match[3]
|
||||
quality = match[4]
|
||||
if float(episode_number) != float(_episode_number):
|
||||
continue
|
||||
|
||||
links_td = get_element_html_by_class("text-center", anime_torrent)
|
||||
if not links_td:
|
||||
continue
|
||||
torrent_anchor_tag = get_element_text_and_html_by_tag("a", links_td)
|
||||
if not torrent_anchor_tag:
|
||||
continue
|
||||
torrent_anchor_tag_atrrs = extract_attributes(torrent_anchor_tag[1])
|
||||
if not torrent_anchor_tag_atrrs:
|
||||
continue
|
||||
torrent_file_url = (
|
||||
f'{NYAA_ENDPOINT}{torrent_anchor_tag_atrrs["href"]}'
|
||||
)
|
||||
if server in servers:
|
||||
link = {
|
||||
"translation_type": "sub",
|
||||
"link": torrent_file_url,
|
||||
"quality": quality,
|
||||
}
|
||||
if link not in servers[server]["links"]:
|
||||
servers[server]["links"].append(link)
|
||||
else:
|
||||
servers[server] = {
|
||||
"server": server,
|
||||
"headers": {},
|
||||
"episode_title": f"{anime_title}; Episode {episode_number}",
|
||||
"subtitles": [],
|
||||
"links": [
|
||||
{
|
||||
"translation_type": "sub",
|
||||
"link": torrent_file_url,
|
||||
"quality": quality,
|
||||
}
|
||||
],
|
||||
}
|
||||
if not allow_dangerous:
|
||||
break
|
||||
for anime_torrent in get_elements_html_by_class(
|
||||
"danger", torrents_table[1]
|
||||
):
|
||||
td_title = get_element_html_by_attribute(
|
||||
"colspan", "2", anime_torrent
|
||||
)
|
||||
if not td_title:
|
||||
continue
|
||||
title_anchor_tag = get_element_text_and_html_by_tag("a", td_title)
|
||||
|
||||
if not title_anchor_tag:
|
||||
continue
|
||||
title_anchor_tag_attrs = extract_attributes(title_anchor_tag[1])
|
||||
if not title_anchor_tag_attrs:
|
||||
continue
|
||||
if "class" in title_anchor_tag_attrs:
|
||||
td_title = td_title.replace(title_anchor_tag[1], "")
|
||||
title_anchor_tag = get_element_text_and_html_by_tag(
|
||||
"a", td_title
|
||||
)
|
||||
|
||||
if not title_anchor_tag:
|
||||
continue
|
||||
title_anchor_tag_attrs = extract_attributes(title_anchor_tag[1])
|
||||
if not title_anchor_tag_attrs:
|
||||
continue
|
||||
anime_title_info = title_anchor_tag_attrs["title"]
|
||||
if not anime_title_info:
|
||||
continue
|
||||
match = EXTRACT_USEFUL_INFO_PATTERN_2.search(
|
||||
anime_title_info.strip()
|
||||
)
|
||||
if not match:
|
||||
continue
|
||||
server = match[1]
|
||||
match[2]
|
||||
_episode_number = match[3]
|
||||
quality = match[4]
|
||||
if float(episode_number) != float(_episode_number):
|
||||
continue
|
||||
|
||||
links_td = get_element_html_by_class("text-center", anime_torrent)
|
||||
if not links_td:
|
||||
continue
|
||||
torrent_anchor_tag = get_element_text_and_html_by_tag("a", links_td)
|
||||
if not torrent_anchor_tag:
|
||||
continue
|
||||
torrent_anchor_tag_atrrs = extract_attributes(torrent_anchor_tag[1])
|
||||
if not torrent_anchor_tag_atrrs:
|
||||
continue
|
||||
torrent_file_url = (
|
||||
f'{NYAA_ENDPOINT}{torrent_anchor_tag_atrrs["href"]}'
|
||||
)
|
||||
if server in servers:
|
||||
link = {
|
||||
"translation_type": "sub",
|
||||
"link": torrent_file_url,
|
||||
"quality": quality,
|
||||
}
|
||||
if link not in servers[server]["links"]:
|
||||
servers[server]["links"].append(link)
|
||||
else:
|
||||
servers[server] = {
|
||||
"server": server,
|
||||
"headers": {},
|
||||
"episode_title": f"{anime_title}; Episode {episode_number}",
|
||||
"subtitles": [],
|
||||
"links": [
|
||||
{
|
||||
"translation_type": "sub",
|
||||
"link": torrent_file_url,
|
||||
"quality": quality,
|
||||
}
|
||||
],
|
||||
}
|
||||
except Exception as e:
|
||||
logger.error(f"[NYAA]: {e}")
|
||||
continue
|
||||
|
||||
for server in servers:
|
||||
yield servers[server]
|
||||
1
fastanime/libs/anime_provider/nyaa/constants.py
Normal file
1
fastanime/libs/anime_provider/nyaa/constants.py
Normal file
@@ -0,0 +1 @@
|
||||
NYAA_ENDPOINT = "https://nyaa.si"
|
||||
126
fastanime/libs/anime_provider/nyaa/utils.py
Normal file
126
fastanime/libs/anime_provider/nyaa/utils.py
Normal file
@@ -0,0 +1,126 @@
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
|
||||
import libtorrent # pyright: ignore
|
||||
from rich import print
|
||||
from rich.progress import (
|
||||
BarColumn,
|
||||
DownloadColumn,
|
||||
Progress,
|
||||
TextColumn,
|
||||
TimeRemainingColumn,
|
||||
TransferSpeedColumn,
|
||||
)
|
||||
|
||||
logger = logging.getLogger("nyaa")
|
||||
|
||||
|
||||
def download_torrent(
|
||||
filename: str,
|
||||
result_filename: str | None = None,
|
||||
show_progress: bool = True,
|
||||
base_path: str = "Anime",
|
||||
) -> str:
|
||||
session = libtorrent.session({"listen_interfaces": "0.0.0.0:6881"})
|
||||
logger.debug("Started libtorrent session")
|
||||
|
||||
base_path = os.path.expanduser(base_path)
|
||||
logger.debug(f"Downloading output to: '{base_path}'")
|
||||
|
||||
info = libtorrent.torrent_info(filename)
|
||||
|
||||
logger.debug("Started downloading torrent")
|
||||
handle: libtorrent.torrent_handle = session.add_torrent(
|
||||
{"ti": info, "save_path": base_path}
|
||||
)
|
||||
|
||||
status: libtorrent.session_status = handle.status()
|
||||
|
||||
progress_bar = Progress(
|
||||
"[progress.description]{task.description}",
|
||||
BarColumn(bar_width=None),
|
||||
"[progress.percentage]{task.percentage:>3.1f}%",
|
||||
"•",
|
||||
DownloadColumn(),
|
||||
"•",
|
||||
TransferSpeedColumn(),
|
||||
"•",
|
||||
TimeRemainingColumn(),
|
||||
"•",
|
||||
TextColumn("[green]Peers: {task.fields[peers]}[/green]"),
|
||||
)
|
||||
|
||||
if show_progress:
|
||||
with progress_bar:
|
||||
download_task = progress_bar.add_task(
|
||||
"downloading",
|
||||
filename=status.name,
|
||||
total=status.total_wanted,
|
||||
peers=0,
|
||||
start=False,
|
||||
)
|
||||
|
||||
while not status.total_done:
|
||||
# Checking files
|
||||
status = handle.status()
|
||||
description = "[bold yellow]Checking files[/bold yellow]"
|
||||
progress_bar.update(
|
||||
download_task,
|
||||
completed=status.total_done,
|
||||
peers=status.num_peers,
|
||||
description=description,
|
||||
)
|
||||
|
||||
# Started download
|
||||
progress_bar.start_task(download_task)
|
||||
description = f"[bold blue]Downloading[/bold blue] [bold yellow]{result_filename}[/bold yellow]"
|
||||
|
||||
while not status.is_seeding:
|
||||
status = handle.status()
|
||||
|
||||
progress_bar.update(
|
||||
download_task,
|
||||
completed=status.total_done,
|
||||
peers=status.num_peers,
|
||||
description=description,
|
||||
)
|
||||
|
||||
alerts = session.pop_alerts()
|
||||
|
||||
alert: libtorrent.alert
|
||||
for alert in alerts:
|
||||
if (
|
||||
alert.category()
|
||||
& libtorrent.alert.category_t.error_notification
|
||||
):
|
||||
logger.debug(f"[Alert] {alert}")
|
||||
|
||||
time.sleep(1)
|
||||
|
||||
progress_bar.update(
|
||||
download_task,
|
||||
description=f"[bold blue]Finished Downloading[/bold blue] [bold green]{result_filename}[/bold green]",
|
||||
completed=status.total_wanted,
|
||||
)
|
||||
|
||||
if result_filename:
|
||||
old_name = f"{base_path}/{status.name}"
|
||||
new_name = f"{base_path}/{result_filename}"
|
||||
|
||||
os.rename(old_name, new_name)
|
||||
|
||||
logger.debug(f"Finished torrent download, renamed '{old_name}' to '{new_name}'")
|
||||
|
||||
return new_name
|
||||
|
||||
return ""
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
if len(sys.argv) < 2:
|
||||
print("You need to pass in the .torrent file path.")
|
||||
sys.exit(1)
|
||||
|
||||
download_torrent(sys.argv[1])
|
||||
114
fastanime/libs/anime_provider/providers_store.py
Normal file
114
fastanime/libs/anime_provider/providers_store.py
Normal file
@@ -0,0 +1,114 @@
|
||||
import json
|
||||
import logging
|
||||
import time
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ProviderStoreDB:
|
||||
def __init__(
|
||||
self,
|
||||
provider_name,
|
||||
cache_db_path: str,
|
||||
max_lifetime: int = 604800,
|
||||
max_size: int = (1024**2) * 10,
|
||||
table_name: str = "fastanime_providers_store",
|
||||
clean_db=False,
|
||||
):
|
||||
from ..common.sqlitedb_helper import SqliteDB
|
||||
|
||||
self.cache_db_path = cache_db_path
|
||||
self.clean_db = clean_db
|
||||
self.provider_name = provider_name
|
||||
self.max_lifetime = max_lifetime
|
||||
self.max_size = max_size
|
||||
self.table_name = table_name
|
||||
self.sqlite_db_connection = SqliteDB(self.cache_db_path)
|
||||
|
||||
# Prepare the cache table if it doesn't exist
|
||||
self._create_store_table()
|
||||
|
||||
def _create_store_table(self):
|
||||
"""Create cache table if it doesn't exist."""
|
||||
with self.sqlite_db_connection as conn:
|
||||
conn.execute(
|
||||
f"""
|
||||
CREATE TABLE IF NOT EXISTS {self.table_name} (
|
||||
id TEXT,
|
||||
data_type TEXT,
|
||||
provider_name TEXT,
|
||||
data TEXT,
|
||||
cache_expiry INTEGER
|
||||
)"""
|
||||
)
|
||||
|
||||
def get(self, id: str, data_type: str, default=None):
|
||||
with self.sqlite_db_connection as conn:
|
||||
cursor = conn.cursor()
|
||||
cursor.execute(
|
||||
f"""
|
||||
SELECT
|
||||
data
|
||||
FROM {self.table_name}
|
||||
WHERE
|
||||
id = ?
|
||||
AND data_type = ?
|
||||
AND provider_name = ?
|
||||
AND cache_expiry > ?
|
||||
""",
|
||||
(id, data_type, self.provider_name, int(time.time())),
|
||||
)
|
||||
cached_data = cursor.fetchone()
|
||||
|
||||
if cached_data:
|
||||
logger.debug("Found existing request in cache")
|
||||
(json_data,) = cached_data
|
||||
return json.loads(json_data)
|
||||
return default
|
||||
|
||||
def set(self, id: str, data_type: str, data):
|
||||
with self.sqlite_db_connection as connection:
|
||||
cursor = connection.cursor()
|
||||
cursor.execute(
|
||||
f"""
|
||||
INSERT INTO {self.table_name}
|
||||
VALUES ( ?, ?,?, ?, ?)
|
||||
""",
|
||||
(
|
||||
id,
|
||||
data_type,
|
||||
self.provider_name,
|
||||
json.dumps(data),
|
||||
int(time.time()) + self.max_lifetime,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
class ProviderStoreMem:
|
||||
def __init__(self) -> None:
|
||||
from collections import defaultdict
|
||||
|
||||
self._store = defaultdict(dict)
|
||||
|
||||
def get(self, id: str, data_type: str, default=None):
|
||||
return self._store[id][data_type]
|
||||
|
||||
def set(self, id: str, data_type: str, data):
|
||||
self._store[id][data_type] = data
|
||||
|
||||
|
||||
def ProviderStore(store_type, *args, **kwargs):
|
||||
if store_type == "persistent":
|
||||
return ProviderStoreDB(*args, **kwargs)
|
||||
else:
|
||||
return ProviderStoreMem()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
store = ProviderStore("persistent", "test_provider", "provider_store")
|
||||
store.set("123", "test", {"hello": "world"})
|
||||
print(store.get("123", "test"))
|
||||
print("-------------------------------")
|
||||
store = ProviderStore("memory")
|
||||
store.set("1", "test", {"hello": "world"})
|
||||
print(store.get("1", "test"))
|
||||
@@ -1,16 +1,31 @@
|
||||
from typing import TypedDict
|
||||
from typing import Literal, TypedDict
|
||||
|
||||
|
||||
class PageInfo(TypedDict):
|
||||
total: int
|
||||
perPage: int
|
||||
currentPage: int
|
||||
|
||||
|
||||
#
|
||||
# class EpisodesDetail(TypedDict):
|
||||
# dub: int
|
||||
# sub: int
|
||||
# raw: int
|
||||
#
|
||||
|
||||
|
||||
# search data
|
||||
class SearchResult(TypedDict):
|
||||
id: str
|
||||
title: str
|
||||
otherTitles: list[str]
|
||||
availableEpisodes: list[str]
|
||||
type: str
|
||||
score: int
|
||||
status: str
|
||||
season: str
|
||||
poster: str
|
||||
|
||||
|
||||
class SearchResults(TypedDict):
|
||||
@@ -19,30 +34,57 @@ class SearchResults(TypedDict):
|
||||
|
||||
|
||||
# anime data
|
||||
class EpisodesDetail(TypedDict):
|
||||
dub: int
|
||||
sub: int
|
||||
raw: int
|
||||
class AnimeEpisodeDetails(TypedDict):
|
||||
dub: list[str]
|
||||
sub: list[str]
|
||||
raw: list[str]
|
||||
|
||||
|
||||
#
|
||||
# class AnimeEpisode(TypedDict):
|
||||
# id: str
|
||||
# title: str
|
||||
#
|
||||
|
||||
|
||||
class AnimeEpisodeInfo(TypedDict):
|
||||
id: str
|
||||
title: str
|
||||
episode: str
|
||||
poster: str | None
|
||||
duration: str | None
|
||||
translation_type: str | None
|
||||
|
||||
|
||||
class Anime(TypedDict):
|
||||
id: str
|
||||
title: str
|
||||
availableEpisodesDetail: EpisodesDetail
|
||||
availableEpisodesDetail: AnimeEpisodeDetails
|
||||
type: str | None
|
||||
episodesInfo: list[AnimeEpisodeInfo] | None
|
||||
poster: str
|
||||
year: str
|
||||
|
||||
|
||||
class EpisodeStream(TypedDict):
|
||||
resolution: str
|
||||
resolution: str | None
|
||||
link: str
|
||||
hls: bool | None
|
||||
mp4: bool
|
||||
priority: int
|
||||
headers: dict
|
||||
fromCache: str
|
||||
mp4: bool | None
|
||||
priority: int | None
|
||||
quality: Literal["360", "720", "1080", "unknown"]
|
||||
translation_type: Literal["dub", "sub"]
|
||||
|
||||
|
||||
class Subtitle(TypedDict):
|
||||
url: str
|
||||
language: str
|
||||
|
||||
|
||||
class Server(TypedDict):
|
||||
headers: dict
|
||||
subtitles: list[Subtitle]
|
||||
audio: list
|
||||
server: str
|
||||
episode_title: str | None
|
||||
links: list
|
||||
episode_title: str
|
||||
links: list[EpisodeStream]
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import re
|
||||
from itertools import cycle
|
||||
|
||||
# Dictionary to map hex values to characters
|
||||
hex_to_char = {
|
||||
@@ -34,6 +35,23 @@ hex_to_char = {
|
||||
}
|
||||
|
||||
|
||||
def give_random_quality(links):
|
||||
qualities = cycle(["1080", "720", "480", "360"])
|
||||
|
||||
return [
|
||||
{**episode_stream, "quality": quality}
|
||||
for episode_stream, quality in zip(links, qualities)
|
||||
]
|
||||
|
||||
|
||||
def one_digit_symmetric_xor(password: int, target: str):
|
||||
def genexp():
|
||||
for segment in bytearray.fromhex(target):
|
||||
yield segment ^ password
|
||||
|
||||
return bytes(genexp()).decode("utf-8")
|
||||
|
||||
|
||||
def decode_hex_string(hex_string):
|
||||
"""some of the sources encrypt the urls into hex codes this function decrypts the urls
|
||||
|
||||
|
||||
15
fastanime/libs/common/common.py
Normal file
15
fastanime/libs/common/common.py
Normal file
@@ -0,0 +1,15 @@
|
||||
import logging
|
||||
|
||||
from requests import get
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def fetch_anime_info_from_bal(anilist_id):
|
||||
try:
|
||||
url = f"https://raw.githubusercontent.com/bal-mackup/mal-backup/master/anilist/anime/{anilist_id}.json"
|
||||
response = get(url, timeout=11)
|
||||
if response.status_code == 200:
|
||||
return response.json()
|
||||
except Exception as e:
|
||||
logger.error(e)
|
||||
323
fastanime/libs/common/mini_anilist.py
Normal file
323
fastanime/libs/common/mini_anilist.py
Normal file
@@ -0,0 +1,323 @@
|
||||
import logging
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from requests import post
|
||||
from thefuzz import fuzz
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ..anilist.types import AnilistDataSchema
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
ANILIST_ENDPOINT = "https://graphql.anilist.co"
|
||||
"""
|
||||
query ($query: String) {
|
||||
Page(perPage: 50) {
|
||||
pageInfo {
|
||||
total
|
||||
currentPage
|
||||
hasNextPage
|
||||
}
|
||||
media(search: $query, type: ANIME) {
|
||||
id
|
||||
idMal
|
||||
title {
|
||||
romaji
|
||||
english
|
||||
}
|
||||
episodes
|
||||
status
|
||||
nextAiringEpisode {
|
||||
timeUntilAiring
|
||||
airingAt
|
||||
episode
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
"""
|
||||
|
||||
|
||||
def search_for_manga_with_anilist(manga_title: str):
|
||||
query = """
|
||||
query ($query: String) {
|
||||
Page(perPage: 50) {
|
||||
pageInfo {
|
||||
currentPage
|
||||
}
|
||||
media(search: $query, type: MANGA,genre_not_in: ["hentai"]) {
|
||||
id
|
||||
idMal
|
||||
title {
|
||||
romaji
|
||||
english
|
||||
}
|
||||
chapters
|
||||
status
|
||||
coverImage {
|
||||
medium
|
||||
large
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
"""
|
||||
response = post(
|
||||
ANILIST_ENDPOINT,
|
||||
json={"query": query, "variables": {"query": manga_title}},
|
||||
timeout=10,
|
||||
)
|
||||
if response.status_code == 200:
|
||||
anilist_data: "AnilistDataSchema" = response.json()
|
||||
return {
|
||||
"pageInfo": anilist_data["data"]["Page"]["pageInfo"],
|
||||
"results": [
|
||||
{
|
||||
"id": anime_result["id"],
|
||||
"poster": anime_result["coverImage"]["large"],
|
||||
"title": (
|
||||
anime_result["title"]["romaji"]
|
||||
or anime_result["title"]["english"]
|
||||
)
|
||||
+ f" [Chapters: {anime_result['chapters']}]",
|
||||
"type": "manga",
|
||||
"availableChapters": list(
|
||||
range(
|
||||
1,
|
||||
(
|
||||
anime_result["chapters"]
|
||||
if anime_result["chapters"]
|
||||
else 0
|
||||
),
|
||||
)
|
||||
),
|
||||
}
|
||||
for anime_result in anilist_data["data"]["Page"]["media"]
|
||||
],
|
||||
}
|
||||
|
||||
|
||||
def search_for_anime_with_anilist(anime_title: str, prefer_eng_titles=False):
|
||||
query = """
|
||||
query ($query: String) {
|
||||
Page(perPage: 50) {
|
||||
pageInfo {
|
||||
total
|
||||
currentPage
|
||||
hasNextPage
|
||||
}
|
||||
media(search: $query, type: ANIME, genre_not_in: ["hentai"]) {
|
||||
id
|
||||
idMal
|
||||
title {
|
||||
romaji
|
||||
english
|
||||
}
|
||||
episodes
|
||||
status
|
||||
synonyms
|
||||
nextAiringEpisode {
|
||||
timeUntilAiring
|
||||
airingAt
|
||||
episode
|
||||
}
|
||||
coverImage {
|
||||
large
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
"""
|
||||
response = post(
|
||||
ANILIST_ENDPOINT,
|
||||
json={"query": query, "variables": {"query": anime_title}},
|
||||
timeout=10,
|
||||
)
|
||||
if response.status_code == 200:
|
||||
anilist_data: "AnilistDataSchema" = response.json()
|
||||
return {
|
||||
"pageInfo": anilist_data["data"]["Page"]["pageInfo"],
|
||||
"results": [
|
||||
{
|
||||
"id": str(anime_result["id"]),
|
||||
"title": (
|
||||
(
|
||||
anime_result["title"]["english"]
|
||||
or anime_result["title"]["romaji"]
|
||||
)
|
||||
if prefer_eng_titles
|
||||
else (
|
||||
anime_result["title"]["romaji"]
|
||||
or anime_result["title"]["english"]
|
||||
)
|
||||
),
|
||||
"otherTitles": [
|
||||
(
|
||||
(
|
||||
anime_result["title"]["romaji"]
|
||||
or anime_result["title"]["english"]
|
||||
)
|
||||
if prefer_eng_titles
|
||||
else (
|
||||
anime_result["title"]["english"]
|
||||
or anime_result["title"]["romaji"]
|
||||
)
|
||||
),
|
||||
*(anime_result["synonyms"] or []),
|
||||
],
|
||||
"type": "anime",
|
||||
"poster": anime_result["coverImage"]["large"],
|
||||
"availableEpisodes": list(
|
||||
map(
|
||||
str,
|
||||
range(
|
||||
1,
|
||||
(
|
||||
anime_result["episodes"]
|
||||
if not anime_result["status"] == "RELEASING"
|
||||
and anime_result["episodes"]
|
||||
else (
|
||||
(
|
||||
anime_result["nextAiringEpisode"]["episode"]
|
||||
- 1
|
||||
if anime_result["nextAiringEpisode"]
|
||||
else 0
|
||||
)
|
||||
if not anime_result["episodes"]
|
||||
else anime_result["episodes"]
|
||||
)
|
||||
)
|
||||
+ 1,
|
||||
),
|
||||
)
|
||||
),
|
||||
}
|
||||
for anime_result in anilist_data["data"]["Page"]["media"]
|
||||
],
|
||||
}
|
||||
|
||||
|
||||
def get_mal_id_and_anilist_id(anime_title: str) -> "dict[str,int] | None":
|
||||
"""the abstraction over all none authenticated requests and that returns data of a similar type
|
||||
|
||||
Args:
|
||||
query: the anilist query
|
||||
variables: the anilist api variables
|
||||
|
||||
Returns:
|
||||
a boolean indicating success and none or an anilist object depending on success
|
||||
"""
|
||||
query = """
|
||||
query ($query: String) {
|
||||
Page(perPage: 50) {
|
||||
pageInfo {
|
||||
total
|
||||
currentPage
|
||||
hasNextPage
|
||||
}
|
||||
media(search: $query, type: ANIME) {
|
||||
id
|
||||
idMal
|
||||
title {
|
||||
romaji
|
||||
english
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
"""
|
||||
|
||||
try:
|
||||
variables = {"query": anime_title}
|
||||
response = post(
|
||||
ANILIST_ENDPOINT,
|
||||
json={"query": query, "variables": variables},
|
||||
timeout=10,
|
||||
)
|
||||
anilist_data: "AnilistDataSchema" = response.json()
|
||||
if response.status_code == 200:
|
||||
anime = max(
|
||||
anilist_data["data"]["Page"]["media"],
|
||||
key=lambda anime: max(
|
||||
(
|
||||
fuzz.ratio(anime, str(anime["title"]["romaji"])),
|
||||
fuzz.ratio(anime_title, str(anime["title"]["english"])),
|
||||
)
|
||||
),
|
||||
)
|
||||
return {"id_anilist": anime["id"], "id_mal": anime["idMal"]}
|
||||
except Exception as e:
|
||||
logger.error(f"Something unexpected occured {e}")
|
||||
|
||||
|
||||
def get_basic_anime_info_by_title(anime_title: str):
|
||||
"""the abstraction over all none authenticated requests and that returns data of a similar type
|
||||
|
||||
Args:
|
||||
query: the anilist query
|
||||
variables: the anilist api variables
|
||||
|
||||
Returns:
|
||||
a boolean indicating success and none or an anilist object depending on success
|
||||
"""
|
||||
query = """
|
||||
query ($query: String) {
|
||||
Page(perPage: 50) {
|
||||
pageInfo {
|
||||
total
|
||||
}
|
||||
media(search: $query, type: ANIME,genre_not_in: ["hentai"]) {
|
||||
id
|
||||
idMal
|
||||
title {
|
||||
romaji
|
||||
english
|
||||
}
|
||||
streamingEpisodes {
|
||||
title
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
"""
|
||||
|
||||
from ...Utility.data import anime_normalizer
|
||||
|
||||
# normalize the title
|
||||
anime_title = anime_normalizer.get(anime_title, anime_title)
|
||||
try:
|
||||
variables = {"query": anime_title}
|
||||
response = post(
|
||||
ANILIST_ENDPOINT,
|
||||
json={"query": query, "variables": variables},
|
||||
timeout=10,
|
||||
)
|
||||
anilist_data: "AnilistDataSchema" = response.json()
|
||||
if response.status_code == 200:
|
||||
anime = max(
|
||||
anilist_data["data"]["Page"]["media"],
|
||||
key=lambda anime: max(
|
||||
(
|
||||
fuzz.ratio(
|
||||
anime_title.lower(), str(anime["title"]["romaji"]).lower()
|
||||
),
|
||||
fuzz.ratio(
|
||||
anime_title.lower(), str(anime["title"]["english"]).lower()
|
||||
),
|
||||
)
|
||||
),
|
||||
)
|
||||
return {
|
||||
"idAnilist": anime["id"],
|
||||
"idMal": anime["idMal"],
|
||||
"title": {
|
||||
"english": anime["title"]["english"],
|
||||
"romaji": anime["title"]["romaji"],
|
||||
},
|
||||
"episodes": [
|
||||
{"title": episode["title"]}
|
||||
for episode in anime["streamingEpisodes"]
|
||||
if episode
|
||||
],
|
||||
}
|
||||
except Exception as e:
|
||||
logger.error(f"Something unexpected occured {e}")
|
||||
214
fastanime/libs/common/requests_cacher.py
Normal file
214
fastanime/libs/common/requests_cacher.py
Normal file
@@ -0,0 +1,214 @@
|
||||
import json
|
||||
import logging
|
||||
import re
|
||||
import time
|
||||
from datetime import datetime
|
||||
from urllib.parse import urlencode
|
||||
|
||||
import requests
|
||||
|
||||
from .sqlitedb_helper import SqliteDB
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
caching_mimetypes = {
|
||||
"application": {
|
||||
"json",
|
||||
"xml",
|
||||
"x-www-form-urlencoded",
|
||||
"x-javascript",
|
||||
"javascript",
|
||||
},
|
||||
"text": {"html", "css", "javascript", "plain", "xml", "xsl", "x-javascript"},
|
||||
}
|
||||
|
||||
|
||||
class CachedRequestsSession(requests.Session):
|
||||
__request_functions__ = (
|
||||
"get",
|
||||
"options",
|
||||
"head",
|
||||
"post",
|
||||
"put",
|
||||
"patch",
|
||||
"delete",
|
||||
)
|
||||
|
||||
def __new__(cls, *args, **kwargs):
|
||||
def caching_params(name: str):
|
||||
def wrapper(self, *args, **kwargs):
|
||||
return cls.request(self, name, *args, **kwargs)
|
||||
|
||||
return wrapper
|
||||
|
||||
for func in cls.__request_functions__:
|
||||
setattr(cls, func, caching_params(func))
|
||||
|
||||
return super().__new__(cls)
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
cache_db_path: str,
|
||||
max_lifetime: int = 604800,
|
||||
max_size: int = (1024**2) * 10,
|
||||
table_name: str = "fastanime_requests_cache",
|
||||
clean_db=False,
|
||||
*args,
|
||||
**kwargs,
|
||||
):
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
self.cache_db_path = cache_db_path
|
||||
self.max_lifetime = max_lifetime
|
||||
self.max_size = max_size
|
||||
self.table_name = table_name
|
||||
self.sqlite_db_connection = SqliteDB(self.cache_db_path)
|
||||
|
||||
# Prepare the cache table if it doesn't exist
|
||||
self._create_cache_table()
|
||||
|
||||
def _create_cache_table(self):
|
||||
"""Create cache table if it doesn't exist."""
|
||||
with self.sqlite_db_connection as conn:
|
||||
conn.execute(
|
||||
f"""
|
||||
CREATE TABLE IF NOT EXISTS {self.table_name} (
|
||||
url TEXT,
|
||||
status_code INTEGER,
|
||||
request_headers TEXT,
|
||||
response_headers TEXT,
|
||||
data BLOB,
|
||||
redirection_policy INT,
|
||||
cache_expiry INTEGER
|
||||
)"""
|
||||
)
|
||||
|
||||
def request(
|
||||
self,
|
||||
method,
|
||||
url,
|
||||
params=None,
|
||||
force_caching=False,
|
||||
fresh=0,
|
||||
*args,
|
||||
**kwargs,
|
||||
):
|
||||
# TODO: improve the caching functionality and add a layer to auto delete
|
||||
# expired requests
|
||||
if fresh:
|
||||
logger.debug("Executing fresh request")
|
||||
return super().request(method, url, params=params, *args, **kwargs)
|
||||
|
||||
if params:
|
||||
url += "?" + urlencode(params)
|
||||
|
||||
redirection_policy = int(kwargs.get("force_redirects", False))
|
||||
|
||||
with self.sqlite_db_connection as conn:
|
||||
cursor = conn.cursor()
|
||||
time_before_access_db = datetime.now()
|
||||
|
||||
logger.debug("Checking for existing request in cache")
|
||||
cursor.execute(
|
||||
f"""
|
||||
SELECT
|
||||
status_code,
|
||||
request_headers,
|
||||
response_headers,
|
||||
data,
|
||||
redirection_policy
|
||||
FROM {self.table_name}
|
||||
WHERE
|
||||
url = ?
|
||||
AND redirection_policy = ?
|
||||
AND cache_expiry > ?
|
||||
""",
|
||||
(url, redirection_policy, int(time.time())),
|
||||
)
|
||||
cached_request = cursor.fetchone()
|
||||
time_after_access_db = datetime.now()
|
||||
|
||||
if cached_request:
|
||||
logger.debug("Found existing request in cache")
|
||||
(
|
||||
status_code,
|
||||
request_headers,
|
||||
response_headers,
|
||||
data,
|
||||
redirection_policy,
|
||||
) = cached_request
|
||||
|
||||
response = requests.Response()
|
||||
response.headers.update(json.loads(response_headers))
|
||||
response.status_code = status_code
|
||||
response._content = data
|
||||
|
||||
if "timeout" in kwargs:
|
||||
kwargs.pop("timeout")
|
||||
if "headers" in kwargs:
|
||||
kwargs.pop("headers")
|
||||
_request = requests.Request(
|
||||
method, url, headers=json.loads(request_headers), *args, **kwargs
|
||||
)
|
||||
response.request = _request.prepare()
|
||||
response.elapsed = time_after_access_db - time_before_access_db
|
||||
|
||||
return response
|
||||
|
||||
# Perform the request and cache it
|
||||
response = super().request(method, url, *args, **kwargs)
|
||||
if response.ok and (
|
||||
force_caching
|
||||
or self.is_content_type_cachable(
|
||||
response.headers.get("content-type"), caching_mimetypes
|
||||
)
|
||||
and len(response.content) < self.max_size
|
||||
):
|
||||
logger.debug("Caching the current request")
|
||||
cursor.execute(
|
||||
f"""
|
||||
INSERT INTO {self.table_name}
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?)
|
||||
""",
|
||||
(
|
||||
url,
|
||||
response.status_code,
|
||||
json.dumps(dict(response.request.headers)),
|
||||
json.dumps(dict(response.headers)),
|
||||
response.content,
|
||||
redirection_policy,
|
||||
int(time.time()) + self.max_lifetime,
|
||||
),
|
||||
)
|
||||
|
||||
return response
|
||||
|
||||
@staticmethod
|
||||
def is_content_type_cachable(content_type, caching_mimetypes):
|
||||
"""Checks whether the given encoding is supported by the cacher"""
|
||||
if content_type is None:
|
||||
return True
|
||||
|
||||
mime, contents = content_type.split("/")
|
||||
|
||||
contents = re.sub(r";.*$", "", contents)
|
||||
|
||||
return mime in caching_mimetypes and any(
|
||||
content in caching_mimetypes[mime] for content in contents.split("+")
|
||||
)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
with CachedRequestsSession("cache.db") as session:
|
||||
response = session.get(
|
||||
"https://google.com",
|
||||
)
|
||||
|
||||
response_b = session.get(
|
||||
"https://google.com",
|
||||
)
|
||||
|
||||
print("A: ", response.elapsed)
|
||||
print("B: ", response_b.elapsed)
|
||||
|
||||
print(response_b.text[0:30])
|
||||
34
fastanime/libs/common/sqlitedb_helper.py
Normal file
34
fastanime/libs/common/sqlitedb_helper.py
Normal file
@@ -0,0 +1,34 @@
|
||||
import logging
|
||||
import sqlite3
|
||||
import time
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class SqliteDB:
|
||||
def __init__(self, db_path: str) -> None:
|
||||
self.db_path = db_path
|
||||
self.connection = sqlite3.connect(self.db_path)
|
||||
logger.debug("Enabling WAL mode for concurrent access")
|
||||
self.connection.execute("PRAGMA journal_mode=WAL;")
|
||||
self.connection.close()
|
||||
self.connection = None
|
||||
|
||||
def __enter__(self):
|
||||
logger.debug("Starting new connection...")
|
||||
start_time = time.time()
|
||||
self.connection = sqlite3.connect(self.db_path)
|
||||
logger.debug(
|
||||
"Successfully got a new connection in {} seconds".format(
|
||||
time.time() - start_time
|
||||
)
|
||||
)
|
||||
return self.connection
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
if self.connection:
|
||||
logger.debug("Closing connection to cache db")
|
||||
self.connection.commit()
|
||||
self.connection.close()
|
||||
self.connection = None
|
||||
logger.debug("Successfully closed connection to cache db")
|
||||
@@ -5,8 +5,6 @@ import subprocess
|
||||
import sys
|
||||
from typing import Callable, List
|
||||
|
||||
# TODO: will probably scrap art not to useful
|
||||
from art import text2art
|
||||
from click import clear
|
||||
from rich import print
|
||||
|
||||
@@ -22,6 +20,17 @@ FZF_DEFAULT_OPTS = """
|
||||
--marker=">" --pointer="◆" --separator="─" --scrollbar="│"
|
||||
"""
|
||||
|
||||
HEADER = """
|
||||
|
||||
███████╗░█████╗░░██████╗████████╗░█████╗░███╗░░██╗██╗███╗░░░███╗███████╗
|
||||
██╔════╝██╔══██╗██╔════╝╚══██╔══╝██╔══██╗████╗░██║██║████╗░████║██╔════╝
|
||||
█████╗░░███████║╚█████╗░░░░██║░░░███████║██╔██╗██║██║██╔████╔██║█████╗░░
|
||||
██╔══╝░░██╔══██║░╚═══██╗░░░██║░░░██╔══██║██║╚████║██║██║╚██╔╝██║██╔══╝░░
|
||||
██║░░░░░██║░░██║██████╔╝░░░██║░░░██║░░██║██║░╚███║██║██║░╚═╝░██║███████╗
|
||||
╚═╝░░░░░╚═╝░░╚═╝╚═════╝░░░░╚═╝░░░╚═╝░░╚═╝╚═╝░░╚══╝╚═╝╚═╝░░░░░╚═╝╚══════╝
|
||||
|
||||
"""
|
||||
|
||||
|
||||
class FZF:
|
||||
"""an abstraction over the fzf commandline utility
|
||||
@@ -113,7 +122,9 @@ class FZF:
|
||||
[self.FZF_EXECUTABLE, *commands],
|
||||
input=fzf_input,
|
||||
stdout=subprocess.PIPE,
|
||||
universal_newlines=True,
|
||||
text=True,
|
||||
encoding="utf-8",
|
||||
)
|
||||
if not result or result.returncode != 0 or not result.stdout:
|
||||
print("sth went wrong:confused:")
|
||||
@@ -128,7 +139,7 @@ class FZF:
|
||||
self,
|
||||
fzf_input: list[str],
|
||||
prompt: str,
|
||||
header: str,
|
||||
header: str = HEADER,
|
||||
preview: str | None = None,
|
||||
expect: str | None = None,
|
||||
validator: Callable | None = None,
|
||||
@@ -149,10 +160,10 @@ class FZF:
|
||||
_commands = [
|
||||
*self.default_options,
|
||||
"--header",
|
||||
text2art(header),
|
||||
HEADER,
|
||||
"--header-first",
|
||||
"--prompt",
|
||||
prompt.title(),
|
||||
f"{prompt.title()}: ",
|
||||
] # pyright:ignore
|
||||
|
||||
if preview:
|
||||
|
||||
1
fastanime/libs/manga_provider/__init__.py
Normal file
1
fastanime/libs/manga_provider/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
manga_sources = {"mangadex": "api.MangaDexApi"}
|
||||
13
fastanime/libs/manga_provider/base_provider.py
Normal file
13
fastanime/libs/manga_provider/base_provider.py
Normal file
@@ -0,0 +1,13 @@
|
||||
import requests
|
||||
from yt_dlp.utils.networking import random_user_agent
|
||||
|
||||
|
||||
class MangaProvider:
|
||||
session: requests.Session
|
||||
|
||||
USER_AGENT = random_user_agent()
|
||||
HEADERS = {}
|
||||
|
||||
def __init__(self) -> None:
|
||||
self.session = requests.session()
|
||||
self.session.headers.update({"User-Agent": self.USER_AGENT, **self.HEADERS})
|
||||
15
fastanime/libs/manga_provider/common.py
Normal file
15
fastanime/libs/manga_provider/common.py
Normal file
@@ -0,0 +1,15 @@
|
||||
import logging
|
||||
|
||||
from requests import get
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def fetch_manga_info_from_bal(anilist_id):
|
||||
try:
|
||||
url = f"https://raw.githubusercontent.com/bal-mackup/mal-backup/master/anilist/manga/{anilist_id}.json"
|
||||
response = get(url, timeout=11)
|
||||
if response.ok:
|
||||
return response.json()
|
||||
except Exception as e:
|
||||
logger.error(e)
|
||||
0
fastanime/libs/manga_provider/mangadex/__init__.py
Normal file
0
fastanime/libs/manga_provider/mangadex/__init__.py
Normal file
51
fastanime/libs/manga_provider/mangadex/api.py
Normal file
51
fastanime/libs/manga_provider/mangadex/api.py
Normal file
@@ -0,0 +1,51 @@
|
||||
import logging
|
||||
|
||||
from ...common.mini_anilist import search_for_manga_with_anilist
|
||||
from ..base_provider import MangaProvider
|
||||
from ..common import fetch_manga_info_from_bal
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class MangaDexApi(MangaProvider):
|
||||
def search_for_manga(self, title: str, *args):
|
||||
try:
|
||||
search_results = search_for_manga_with_anilist(title)
|
||||
return search_results
|
||||
except Exception as e:
|
||||
logger.error(f"[MANGADEX-ERROR]: {e}")
|
||||
|
||||
def get_manga(self, anilist_manga_id: str):
|
||||
bal_data = fetch_manga_info_from_bal(anilist_manga_id)
|
||||
if not bal_data:
|
||||
return
|
||||
manga_id, MangaDexManga = next(iter(bal_data["Sites"]["Mangadex"].items()))
|
||||
return {
|
||||
"id": manga_id,
|
||||
"title": MangaDexManga["title"],
|
||||
"poster": MangaDexManga["image"],
|
||||
"availableChapters": [],
|
||||
}
|
||||
|
||||
def get_chapter_thumbnails(self, manga_id, chapter):
|
||||
chapter_info_url = f"https://api.mangadex.org/chapter?manga={manga_id}&translatedLanguage[]=en&chapter={chapter}&includeEmptyPages=0"
|
||||
chapter_info_response = self.session.get(chapter_info_url)
|
||||
if not chapter_info_response.ok:
|
||||
return
|
||||
chapter_info = next(iter(chapter_info_response.json()["data"]))
|
||||
chapters_thumbnails_url = (
|
||||
f"https://api.mangadex.org/at-home/server/{chapter_info['id']}"
|
||||
)
|
||||
chapter_thumbnails_response = self.session.get(chapters_thumbnails_url)
|
||||
if not chapter_thumbnails_response.ok:
|
||||
return
|
||||
chapter_thumbnails_info = chapter_thumbnails_response.json()
|
||||
base_url = chapter_thumbnails_info["baseUrl"]
|
||||
hash = chapter_thumbnails_info["chapter"]["hash"]
|
||||
return {
|
||||
"thumbnails": [
|
||||
f"{base_url}/data/{hash}/{chapter_thumbnail}"
|
||||
for chapter_thumbnail in chapter_thumbnails_info["chapter"]["data"]
|
||||
],
|
||||
"title": chapter_info["attributes"]["title"],
|
||||
}
|
||||
@@ -2,8 +2,6 @@ import subprocess
|
||||
from shutil import which
|
||||
from sys import exit
|
||||
|
||||
from plyer import notification
|
||||
|
||||
from fastanime import APP_NAME
|
||||
|
||||
from ...constants import ICON_PATH
|
||||
@@ -25,7 +23,7 @@ class RofiApi:
|
||||
args = [self.ROFI_EXECUTABLE]
|
||||
if self.rofi_theme:
|
||||
args.extend(["-no-config", "-theme", self.rofi_theme])
|
||||
args.extend(["-p", prompt_text, "-i", "-show-icons", "-dmenu"])
|
||||
args.extend(["-p", f"{prompt_text.title()}", "-i", "-show-icons", "-dmenu"])
|
||||
result = subprocess.run(
|
||||
args,
|
||||
input=rofi_input,
|
||||
@@ -35,6 +33,13 @@ class RofiApi:
|
||||
|
||||
choice = result.stdout.strip()
|
||||
if not choice:
|
||||
try:
|
||||
from plyer import notification
|
||||
except ImportError:
|
||||
print(
|
||||
"Plyer is not installed; install it for desktop notifications to be enabled"
|
||||
)
|
||||
exit(1)
|
||||
notification.notify(
|
||||
app_name=APP_NAME,
|
||||
app_icon=ICON_PATH,
|
||||
@@ -64,6 +69,13 @@ class RofiApi:
|
||||
|
||||
choice = result.stdout.strip()
|
||||
if not choice or choice not in options:
|
||||
try:
|
||||
from plyer import notification
|
||||
except ImportError:
|
||||
print(
|
||||
"Plyer is not installed; install it for desktop notifications to be enabled"
|
||||
)
|
||||
exit(1)
|
||||
notification.notify(
|
||||
app_name=APP_NAME,
|
||||
app_icon=ICON_PATH,
|
||||
@@ -91,6 +103,13 @@ class RofiApi:
|
||||
|
||||
choice = result.stdout.strip()
|
||||
if not choice:
|
||||
try:
|
||||
from plyer import notification
|
||||
except ImportError:
|
||||
print(
|
||||
"Plyer is not installed; install it for desktop notifications to be enabled"
|
||||
)
|
||||
exit(1)
|
||||
notification.notify(
|
||||
app_name=APP_NAME,
|
||||
app_icon=ICON_PATH,
|
||||
@@ -120,6 +139,13 @@ class RofiApi:
|
||||
|
||||
user_input = result.stdout.strip()
|
||||
if not user_input:
|
||||
try:
|
||||
from plyer import notification
|
||||
except ImportError:
|
||||
print(
|
||||
"Plyer is not installed; install it for desktop notifications to be enabled"
|
||||
)
|
||||
exit(1)
|
||||
notification.notify(
|
||||
app_name=APP_NAME,
|
||||
app_icon=ICON_PATH,
|
||||
|
||||
793
poetry.lock
generated
793
poetry.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -1,6 +1,6 @@
|
||||
[tool.poetry]
|
||||
name = "fastanime"
|
||||
version = "0.60.1.dev1"
|
||||
version = "2.6.0"
|
||||
description = "A browser anime site experience from the terminal"
|
||||
authors = ["Benextempest <benextempest@gmail.com>"]
|
||||
license = "UNLICENSE"
|
||||
@@ -9,18 +9,20 @@ readme = "README.md"
|
||||
[tool.poetry.dependencies]
|
||||
python = "^3.10"
|
||||
yt-dlp = "^2024.5.27"
|
||||
rich = "^13.7.1"
|
||||
click = "^8.1.7"
|
||||
inquirerpy = "^0.3.4"
|
||||
platformdirs = "^4.2.2"
|
||||
art = "^6.2"
|
||||
python-dotenv = "^1.0.1"
|
||||
thefuzz = "^0.22.1"
|
||||
requests = "^2.32.3"
|
||||
plyer = "^2.1.0"
|
||||
pyshortcuts = "^1.9.0"
|
||||
rich = { version = "^13.7.1", optional = false }
|
||||
click = { version = "^8.1.7", optional = false }
|
||||
inquirerpy = { version = "^0.3.4", optional = false }
|
||||
mpv = { version = "^1.0.7", optional = true }
|
||||
plyer = { version = "^2.1.0", optional = true }
|
||||
|
||||
[tool.poetry.extras]
|
||||
full = ["plyer", "mpv"]
|
||||
# cli = ["rich", "click", "inquirerpy"]
|
||||
mpv = ["mpv"]
|
||||
notifications = ["plyer"]
|
||||
|
||||
mpv = "^1.0.7"
|
||||
[tool.poetry.group.dev.dependencies]
|
||||
black = "^24.4.2"
|
||||
isort = "^5.13.2"
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
{
|
||||
"typeCheckingMode": "standard",
|
||||
"reportPrivateImportUsage": false
|
||||
"venvPath": ".",
|
||||
"venv": ".venv",
|
||||
"pythonVersion": "3.10"
|
||||
}
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
# TODO: Write tests to make sure all click commands work
|
||||
import pytest
|
||||
from click.testing import CliRunner
|
||||
|
||||
@@ -7,7 +6,7 @@ from fastanime.cli import run_cli
|
||||
|
||||
@pytest.fixture
|
||||
def runner():
|
||||
return CliRunner()
|
||||
return CliRunner(env={"FASTANIME_CACHE_REQUESTS": "false"})
|
||||
|
||||
|
||||
def test_main_help(runner: CliRunner):
|
||||
@@ -45,6 +44,26 @@ def test_search_help(runner: CliRunner):
|
||||
assert result.exit_code == 0
|
||||
|
||||
|
||||
def test_cache_help(runner: CliRunner):
|
||||
result = runner.invoke(run_cli, ["cache", "--help"])
|
||||
assert result.exit_code == 0
|
||||
|
||||
|
||||
def test_completions_help(runner: CliRunner):
|
||||
result = runner.invoke(run_cli, ["completions", "--help"])
|
||||
assert result.exit_code == 0
|
||||
|
||||
|
||||
def test_update_help(runner: CliRunner):
|
||||
result = runner.invoke(run_cli, ["update", "--help"])
|
||||
assert result.exit_code == 0
|
||||
|
||||
|
||||
def test_grab_help(runner: CliRunner):
|
||||
result = runner.invoke(run_cli, ["grab", "--help"])
|
||||
assert result.exit_code == 0
|
||||
|
||||
|
||||
def test_anilist_help(runner: CliRunner):
|
||||
result = runner.invoke(run_cli, ["anilist", "--help"])
|
||||
assert result.exit_code == 0
|
||||
|
||||
6
tox.ini
6
tox.ini
@@ -7,7 +7,7 @@ env_list = lint, pyright, py{310,311}
|
||||
description = run unit tests
|
||||
deps =poetry
|
||||
commands =
|
||||
poetry install
|
||||
poetry install --all-extras
|
||||
poetry run pytest
|
||||
|
||||
[testenv:lint]
|
||||
@@ -15,7 +15,7 @@ description = run linters
|
||||
skip_install = true
|
||||
deps =poetry
|
||||
commands =
|
||||
poetry install
|
||||
poetry install --all-extras
|
||||
poetry run black .
|
||||
|
||||
[testenv:pyright]
|
||||
@@ -23,5 +23,5 @@ description = run type checking
|
||||
skip_install = true
|
||||
deps =poetry
|
||||
commands =
|
||||
poetry install --no-root
|
||||
poetry install --no-root --all-extras
|
||||
poetry run pyright
|
||||
|
||||
Reference in New Issue
Block a user