mirror of
https://github.com/Benexl/FastAnime.git
synced 2025-12-07 05:10:39 -08:00
Compare commits
431 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
06506fb47f | ||
|
|
29480c64cd | ||
|
|
474da2f1fd | ||
|
|
abac604ccd | ||
|
|
27d71cbb23 | ||
|
|
615b420c74 | ||
|
|
f9c2b6e939 | ||
|
|
fd448ad701 | ||
|
|
b223a34879 | ||
|
|
76460b6c54 | ||
|
|
e58fd33fe0 | ||
|
|
931f9f10f8 | ||
|
|
0e9dbd2c6b | ||
|
|
3bdfa27e1c | ||
|
|
f46f09ffdf | ||
|
|
d309c04214 | ||
|
|
b19a323d15 | ||
|
|
ff94edfd05 | ||
|
|
59e1a82646 | ||
|
|
2e902fa4e7 | ||
|
|
d2e17af7a9 | ||
|
|
f1fa40c419 | ||
|
|
8bbde97403 | ||
|
|
a2b7d71eb2 | ||
|
|
67b4f0ea38 | ||
|
|
a6d5d5f37c | ||
|
|
67a066f16e | ||
|
|
e6297619d4 | ||
|
|
8f514858f2 | ||
|
|
eb9cffbd7a | ||
|
|
c5f9c37d3a | ||
|
|
44fd65ebab | ||
|
|
e919980ff7 | ||
|
|
6887c6ff10 | ||
|
|
b394de0b23 | ||
|
|
71003049d6 | ||
|
|
6f69b785d8 | ||
|
|
6756540fd1 | ||
|
|
e6b9df25dd | ||
|
|
f40dd2363a | ||
|
|
61a525ff94 | ||
|
|
27d671f89d | ||
|
|
58edb0427f | ||
|
|
7abcdc8f7c | ||
|
|
ca4ca0d476 | ||
|
|
5a337b1c97 | ||
|
|
6c94dd22fc | ||
|
|
8c7e1e201f | ||
|
|
98e41e1eb5 | ||
|
|
bd0e7db73c | ||
|
|
fb705b4ac2 | ||
|
|
93654be74f | ||
|
|
9b14a4c723 | ||
|
|
bce5acf7b5 | ||
|
|
228be7e1f7 | ||
|
|
2eb434e42a | ||
|
|
fbb3a00ab0 | ||
|
|
8341ffe8fd | ||
|
|
0822e2e92c | ||
|
|
3b9fbd0665 | ||
|
|
60b74bee18 | ||
|
|
d7dc63e003 | ||
|
|
d40edb6ff6 | ||
|
|
803712649f | ||
|
|
7bc0d33f69 | ||
|
|
5885d134df | ||
|
|
5500ec49c8 | ||
|
|
8c94380050 | ||
|
|
87a97dd0c6 | ||
|
|
80d9f732b1 | ||
|
|
051273dac9 | ||
|
|
036f448906 | ||
|
|
b5aeed9268 | ||
|
|
4257502b85 | ||
|
|
28a857520f | ||
|
|
4f9fff375c | ||
|
|
ce31f63788 | ||
|
|
9412c2491e | ||
|
|
8209adec62 | ||
|
|
39703d9eca | ||
|
|
57d16b3e18 | ||
|
|
73a99f8b96 | ||
|
|
309d7d5858 | ||
|
|
8d20e490ca | ||
|
|
3a6e005f3a | ||
|
|
bdf49bd7ce | ||
|
|
c4df2587d0 | ||
|
|
b38f66767f | ||
|
|
6c0e0ccf72 | ||
|
|
e39c992883 | ||
|
|
a1744fc9b3 | ||
|
|
3c5106c32c | ||
|
|
fd0d899f72 | ||
|
|
c753873f61 | ||
|
|
4c8ff2ae9b | ||
|
|
23274de367 | ||
|
|
2aec40ead0 | ||
|
|
172f2bb1de | ||
|
|
2f5684a93a | ||
|
|
1d40160abf | ||
|
|
af84d80137 | ||
|
|
e6412631ae | ||
|
|
978d8d45ba | ||
|
|
06575120d6 | ||
|
|
72cec28613 | ||
|
|
8023edcf3a | ||
|
|
0cb50cd506 | ||
|
|
9981b3dec8 | ||
|
|
50c048e158 | ||
|
|
c0a57c7814 | ||
|
|
bcdd88c725 | ||
|
|
d45d438663 | ||
|
|
3d12059e27 | ||
|
|
677f4690fa | ||
|
|
a79b59f727 | ||
|
|
5641c245e7 | ||
|
|
058fc285cd | ||
|
|
71cfe667c9 | ||
|
|
d9692201aa | ||
|
|
1fd4087b41 | ||
|
|
787eb0c9ca | ||
|
|
acd937f8ab | ||
|
|
52af68d13f | ||
|
|
1ff3074fad | ||
|
|
debaa2ffa6 | ||
|
|
5b6ccbe748 | ||
|
|
d6ca923951 | ||
|
|
0e9bf7f2de | ||
|
|
ccad2435b0 | ||
|
|
30fa9851dd | ||
|
|
000bae9bb7 | ||
|
|
8c2bb71e08 | ||
|
|
57393b085a | ||
|
|
5f721847d7 | ||
|
|
383cb62ede | ||
|
|
434ac947dd | ||
|
|
d0fb39cede | ||
|
|
f98ae77587 | ||
|
|
33e1b0fb6f | ||
|
|
7134702eb9 | ||
|
|
cac7586a86 | ||
|
|
0b9da27def | ||
|
|
ddbb4ca451 | ||
|
|
757393aa36 | ||
|
|
eb54d5e995 | ||
|
|
0d95a38321 | ||
|
|
8d2734db74 | ||
|
|
b3abcb958b | ||
|
|
0667749e4c | ||
|
|
57e73e6799 | ||
|
|
7d890b9719 | ||
|
|
8cbbcf458d | ||
|
|
67bc25a527 | ||
|
|
e668f9326a | ||
|
|
a02db6471f | ||
|
|
08b1f0c90c | ||
|
|
3ec8dbee8c | ||
|
|
473c11faca | ||
|
|
320e3799d3 | ||
|
|
a0f28ddf6d | ||
|
|
9512c3530a | ||
|
|
72602a0ec1 | ||
|
|
4daf6a2b07 | ||
|
|
8b37927f6a | ||
|
|
9d6f785a7f | ||
|
|
897c34d98c | ||
|
|
28c75215bd | ||
|
|
8697b27fe0 | ||
|
|
b6e05c877b | ||
|
|
d8c3ba6181 | ||
|
|
8b5c917038 | ||
|
|
856f62c245 | ||
|
|
02dfc9d71c | ||
|
|
cef0bae528 | ||
|
|
4867720ad2 | ||
|
|
8d85e30150 | ||
|
|
eb99b7e6ba | ||
|
|
089c049f26 | ||
|
|
a33e47d205 | ||
|
|
25dc35eaaf | ||
|
|
525586e955 | ||
|
|
5129219e23 | ||
|
|
7cd97c78b1 | ||
|
|
27b4422ef3 | ||
|
|
1c367c8aa1 | ||
|
|
7b6cc48b90 | ||
|
|
812d0110a7 | ||
|
|
60b05bf0ac | ||
|
|
d830cca3bc | ||
|
|
209e93b6d9 | ||
|
|
b10d9dc39a | ||
|
|
fe8cda094c | ||
|
|
33c06eab0a | ||
|
|
f3f4be7410 | ||
|
|
3915ef0fb6 | ||
|
|
20d26166dd | ||
|
|
ddca724bd8 | ||
|
|
b86c1a0479 | ||
|
|
1fa7830ddf | ||
|
|
59abafbe16 | ||
|
|
b6eebb9736 | ||
|
|
7797053102 | ||
|
|
d763445f72 | ||
|
|
7bc6b14b5f | ||
|
|
f70d2ac8af | ||
|
|
defdfc5a47 | ||
|
|
e67eeda492 | ||
|
|
a17588d02c | ||
|
|
67b59305c4 | ||
|
|
61db9aeea6 | ||
|
|
4f0768a060 | ||
|
|
21704cbbea | ||
|
|
886bc4d011 | ||
|
|
e3437e066a | ||
|
|
8f2795843a | ||
|
|
c6290592e8 | ||
|
|
050ba740b8 | ||
|
|
0b1a27b223 | ||
|
|
bafd04b788 | ||
|
|
fb5f51eea5 | ||
|
|
799e1f0681 | ||
|
|
53a2d953f8 | ||
|
|
9ce5bc3c76 | ||
|
|
dc58fc8536 | ||
|
|
1d5c3016fc | ||
|
|
8737aea746 | ||
|
|
bd03866f5e | ||
|
|
81690a8015 | ||
|
|
933112a52b | ||
|
|
eb513dfe0e | ||
|
|
3928b77506 | ||
|
|
95cb2bd78c | ||
|
|
4fa1c45eb2 | ||
|
|
b9051bc792 | ||
|
|
a590024f1c | ||
|
|
2f51936679 | ||
|
|
327c50d290 | ||
|
|
031dfbb9b5 | ||
|
|
050365302a | ||
|
|
0f248b1119 | ||
|
|
871d5cf758 | ||
|
|
320376d2e8 | ||
|
|
02e7fdff6f | ||
|
|
2c5c28f295 | ||
|
|
2d3509ccc1 | ||
|
|
30babf2d69 | ||
|
|
cfbbabf898 | ||
|
|
5ac6c45fdf | ||
|
|
a14645b563 | ||
|
|
90dbc26c46 | ||
|
|
54cc830c35 | ||
|
|
4928ff5b74 | ||
|
|
bb481fe21a | ||
|
|
0d27b8f652 | ||
|
|
bdd3aae399 | ||
|
|
af94cd7eb5 | ||
|
|
54044f9527 | ||
|
|
1e5c039ece | ||
|
|
15555759dc | ||
|
|
0ed51e05cc | ||
|
|
634ef6febf | ||
|
|
bda4b2dbe1 | ||
|
|
f015305e7c | ||
|
|
d32b7e917f | ||
|
|
3b35e80199 | ||
|
|
c65a1a2815 | ||
|
|
0b3615c9f5 | ||
|
|
3ac4e1ac71 | ||
|
|
d62f580d7a | ||
|
|
02e35b66cb | ||
|
|
7b11e0a301 | ||
|
|
aa8b91aed3 | ||
|
|
fe0fa97576 | ||
|
|
92059cd5ed | ||
|
|
ed3064e3b1 | ||
|
|
441d1e5e6c | ||
|
|
653b2cf4eb | ||
|
|
8d4b71e0c8 | ||
|
|
29cc6cad09 | ||
|
|
8119eef263 | ||
|
|
912c8674cf | ||
|
|
6b3ca236dd | ||
|
|
f1c352d4ff | ||
|
|
714533d845 | ||
|
|
56dd25df8d | ||
|
|
8248dc53df | ||
|
|
1a8a187de6 | ||
|
|
bc86be8c93 | ||
|
|
75026d4fc5 | ||
|
|
f8a5ccb8d2 | ||
|
|
719d1bd187 | ||
|
|
0dd83463c6 | ||
|
|
966301bce8 | ||
|
|
d776880306 | ||
|
|
1ee50e8a55 | ||
|
|
ae95c5ea3d | ||
|
|
d64ad5e11d | ||
|
|
d1a47c6d44 | ||
|
|
51a834a62f | ||
|
|
3a030bf6f7 | ||
|
|
eb6a6fc82c | ||
|
|
437ccd94e4 | ||
|
|
d65868cc30 | ||
|
|
8678aa6544 | ||
|
|
00e5141152 | ||
|
|
90e757dfe1 | ||
|
|
8b471b08e8 | ||
|
|
158bc5710f | ||
|
|
a0b946a13d | ||
|
|
b547b75f03 | ||
|
|
58c7427a47 | ||
|
|
6220b9c55d | ||
|
|
6b9b5c131c | ||
|
|
212f2af39c | ||
|
|
f7b2b4e0c9 | ||
|
|
a747529279 | ||
|
|
1dfdcc27ce | ||
|
|
3c03289453 | ||
|
|
06fd446a72 | ||
|
|
172d912d8b | ||
|
|
2396018607 | ||
|
|
a9be9779c5 | ||
|
|
2f76b26a99 | ||
|
|
2fe5edf810 | ||
|
|
d67ee6a779 | ||
|
|
e06ec5dbd4 | ||
|
|
c1b24ba2aa | ||
|
|
59e9cf9fd0 | ||
|
|
58761f5b96 | ||
|
|
ac959da229 | ||
|
|
bacc8c48ec | ||
|
|
905a159428 | ||
|
|
20f734cab2 | ||
|
|
7c2c644aef | ||
|
|
0efc92081a | ||
|
|
fafeee2367 | ||
|
|
e03063cd76 | ||
|
|
93b38b055f | ||
|
|
045635fb55 | ||
|
|
de7f773e9e | ||
|
|
ef6a465bd2 | ||
|
|
0c623af8a4 | ||
|
|
0589f83998 | ||
|
|
e17608afd5 | ||
|
|
b915654685 | ||
|
|
2ce9bf6c47 | ||
|
|
3c22232432 | ||
|
|
3474e9520c | ||
|
|
e9bacf4f9c | ||
|
|
ef422ed6fd | ||
|
|
d0f5366908 | ||
|
|
3557205feb | ||
|
|
ba4c41d888 | ||
|
|
1427a3193c | ||
|
|
b5cee20e56 | ||
|
|
be7f464073 | ||
|
|
c7f8f168f5 | ||
|
|
ba59fbdcb0 | ||
|
|
9f54fa4998 | ||
|
|
3c9688b32c | ||
|
|
1f046447bb | ||
|
|
87e3a275bb | ||
|
|
037b5c36a4 | ||
|
|
7d8b60fb14 | ||
|
|
0ad16fee53 | ||
|
|
249243aeb4 | ||
|
|
c208dc3579 | ||
|
|
ea93f2ba23 | ||
|
|
d910a0bb6a | ||
|
|
550fcfeddc | ||
|
|
c6910e5a1c | ||
|
|
8555edb521 | ||
|
|
139193ce29 | ||
|
|
1a87375ccd | ||
|
|
83cbef40f6 | ||
|
|
85b4fc75a1 | ||
|
|
f2e2da378f | ||
|
|
7c34bc9120 | ||
|
|
6f153f2acb | ||
|
|
8171083978 | ||
|
|
db5b9a59b4 | ||
|
|
6fa656ba11 | ||
|
|
de0682c1bb | ||
|
|
a6a32d8de4 | ||
|
|
bb14b269de | ||
|
|
14331d8bc2 | ||
|
|
1729464844 | ||
|
|
5fb9747285 | ||
|
|
394228d391 | ||
|
|
5d3c0cc6ec | ||
|
|
3ef7c5248c | ||
|
|
8bebc401fd | ||
|
|
215b28457b | ||
|
|
dfd2bfc857 | ||
|
|
f991292e94 | ||
|
|
d837457f80 | ||
|
|
343bdba31b | ||
|
|
1c1c2457e8 | ||
|
|
b083bfb074 | ||
|
|
ea1abcb2ae | ||
|
|
001030ba2b | ||
|
|
eda8984781 | ||
|
|
d8dc6f0a34 | ||
|
|
2d711a7a7f | ||
|
|
30ca25626a | ||
|
|
b1f5a558c8 | ||
|
|
8062c8dc83 | ||
|
|
cb7eed46bc | ||
|
|
4626eca89e | ||
|
|
0d549c5915 | ||
|
|
33c518ed4c | ||
|
|
8e155dcc74 | ||
|
|
7743b0423e | ||
|
|
6346ea7343 | ||
|
|
32de01047f | ||
|
|
35c7f81afb | ||
|
|
2dbbb1c4df | ||
|
|
6a6efa9d56 | ||
|
|
e510dc3a11 | ||
|
|
9639fd8c05 | ||
|
|
add35ce682 | ||
|
|
6bcc77ea44 | ||
|
|
1a72f88be3 | ||
|
|
1a9f1120b8 | ||
|
|
c2fc807688 | ||
|
|
2b0ade093c | ||
|
|
a26193706e | ||
|
|
ff3c57ef9b | ||
|
|
3b987bd07a | ||
|
|
e8474c0428 | ||
|
|
c78a759aa1 |
15
.github/FUNDING.yml
vendored
Normal file
15
.github/FUNDING.yml
vendored
Normal file
@@ -0,0 +1,15 @@
|
||||
# These are supported funding model platforms
|
||||
|
||||
github: benexl # Replace with up to 4 GitHub Sponsors-enabled usernames e.g., [user1, user2]
|
||||
patreon: # Replace with a single Patreon username
|
||||
open_collective: # Replace with a single Open Collective username
|
||||
ko_fi: # Replace with a single Ko-fi username
|
||||
tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel
|
||||
community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry
|
||||
liberapay: # Replace with a single Liberapay username
|
||||
issuehunt: # Replace with a single IssueHunt username
|
||||
lfx_crowdfunding: # Replace with a single LFX Crowdfunding project-name e.g., cloud-foundry
|
||||
polar: # Replace with a single Polar username
|
||||
buy_me_a_coffee: # Replace with a single Buy Me a Coffee username
|
||||
thanks_dev: # Replace with a single thanks.dev username
|
||||
custom: # Replace with up to 4 custom sponsorship URLs e.g., ['link1', 'link2']
|
||||
29
.github/workflows/build.yml
vendored
29
.github/workflows/build.yml
vendored
@@ -8,31 +8,24 @@ jobs:
|
||||
debug_build:
|
||||
if: ${{ github.event.workflow_run.conclusion == 'success' }}
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Install Python
|
||||
|
||||
- name: "Set up Python"
|
||||
uses: actions/setup-python@v5
|
||||
- name: Install poetry
|
||||
uses: abatilo/actions-poetry@v2
|
||||
- name: Setup a local virtual environment (if no poetry.toml file)
|
||||
run: |
|
||||
poetry config virtualenvs.create true --local
|
||||
poetry config virtualenvs.in-project true --local
|
||||
- uses: actions/cache@v3
|
||||
name: Define a cache for the virtual environment based on the dependencies lock file
|
||||
|
||||
- name: Install uv
|
||||
uses: astral-sh/setup-uv@v3
|
||||
with:
|
||||
path: ./.venv
|
||||
key: venv-${{ hashFiles('poetry.lock') }}
|
||||
- name: Install the project dependencies
|
||||
run: poetry install
|
||||
- name: build app
|
||||
run: poetry build
|
||||
enable-cache: true
|
||||
|
||||
- name: Build fastanime
|
||||
run: uv build
|
||||
|
||||
- name: Archive production artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: fastanime_debug_build
|
||||
path: |
|
||||
dist
|
||||
!dist/*.whl
|
||||
# - name: Run the automated tests (for example)
|
||||
# run: poetry run pytest -v
|
||||
|
||||
12
.github/workflows/publish.yml
vendored
12
.github/workflows/publish.yml
vendored
@@ -27,11 +27,13 @@ jobs:
|
||||
with:
|
||||
python-version: "3.10"
|
||||
|
||||
- name: Build release distributions
|
||||
run: |
|
||||
# NOTE: put your own distribution build steps here.
|
||||
python -m pip install build
|
||||
python -m build
|
||||
- name: Install uv
|
||||
uses: astral-sh/setup-uv@v3
|
||||
with:
|
||||
enable-cache: true
|
||||
|
||||
- name: Build fastanime
|
||||
run: uv build
|
||||
|
||||
- name: Upload distributions
|
||||
uses: actions/upload-artifact@v4
|
||||
|
||||
40
.github/workflows/test.yml
vendored
40
.github/workflows/test.yml
vendored
@@ -6,37 +6,35 @@ on:
|
||||
pull_request:
|
||||
branches:
|
||||
- master
|
||||
|
||||
jobs:
|
||||
test:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ["3.10", "3.11"] # List the Python versions you want to test
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Install Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install poetry
|
||||
uses: abatilo/actions-poetry@v2
|
||||
- name: Setup a local virtual environment (if no poetry.toml file)
|
||||
run: |
|
||||
poetry config virtualenvs.create true --local
|
||||
poetry config virtualenvs.in-project true --local
|
||||
- uses: actions/cache@v3
|
||||
name: Define a cache for the virtual environment based on the dependencies lock file
|
||||
|
||||
- name: Install uv
|
||||
uses: astral-sh/setup-uv@v3
|
||||
with:
|
||||
path: ./.venv
|
||||
key: venv-${{ hashFiles('poetry.lock') }}
|
||||
- name: Install the project dependencies
|
||||
run: poetry install
|
||||
- name: run linter, formatters and sort imports
|
||||
run: |
|
||||
poetry run black .
|
||||
poetry run ruff check --output-format=github . --fix
|
||||
poetry run isort . --profile black
|
||||
- name: run type checking
|
||||
run: poetry run pyright
|
||||
- name: run tests
|
||||
run: poetry run pytest
|
||||
enable-cache: true
|
||||
|
||||
- name: Install the project
|
||||
run: uv sync --all-extras --dev
|
||||
|
||||
- name: Run linter and formater
|
||||
run: uv run ruff check --output-format=github
|
||||
|
||||
- name: Run type checking
|
||||
run: uv run pyright
|
||||
|
||||
- name: Run tests
|
||||
run: uv run pytest tests
|
||||
|
||||
2
.gitignore
vendored
2
.gitignore
vendored
@@ -176,3 +176,5 @@ app/View/SearchScreen/.search_screen.py.un~
|
||||
app/View/SearchScreen/search_screen.py~
|
||||
app/user_data.json
|
||||
.buildozer
|
||||
result
|
||||
repomix-output.xml
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
default_language_version:
|
||||
python: python3.10
|
||||
python: python3.12
|
||||
|
||||
repos:
|
||||
- repo: https://github.com/pycqa/isort
|
||||
@@ -7,7 +7,7 @@ repos:
|
||||
hooks:
|
||||
- id: isort
|
||||
name: isort (python)
|
||||
args: ["--profile", "black"] # Ensure compatibility with Black
|
||||
args: ["--profile", "black"]
|
||||
|
||||
- repo: https://github.com/PyCQA/autoflake
|
||||
rev: v2.2.1
|
||||
@@ -19,17 +19,15 @@ repos:
|
||||
"--remove-unused-variables",
|
||||
"--remove-all-unused-imports",
|
||||
]
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
# Ruff version.
|
||||
rev: v0.4.10
|
||||
hooks:
|
||||
# Run the linter.
|
||||
- id: ruff
|
||||
args: [--fix]
|
||||
# - repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
# rev: v0.4.10
|
||||
# hooks:
|
||||
# - id: ruff
|
||||
# args: [--fix]
|
||||
|
||||
- repo: https://github.com/psf/black-pre-commit-mirror
|
||||
rev: 24.4.2
|
||||
hooks:
|
||||
- id: black
|
||||
name: black
|
||||
language_version: python3.10 # to ensure compatibilty
|
||||
#language_version: python3.10
|
||||
|
||||
3
.vscode/settings.json
vendored
Normal file
3
.vscode/settings.json
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
{
|
||||
"python.analysis.autoImportCompletions": true
|
||||
}
|
||||
40
DISCLAIMER.md
Normal file
40
DISCLAIMER.md
Normal file
@@ -0,0 +1,40 @@
|
||||
<h1 align="center">Disclaimer</h1>
|
||||
|
||||
<div align="center">
|
||||
|
||||
<h2>This project: fastanime</h2>
|
||||
|
||||
<br>
|
||||
|
||||
The core aim of this project is to co-relate automation and efficiency to extract what is provided to a user on the internet. All content available through the project is hosted by external non-affiliated sources.
|
||||
|
||||
<br>
|
||||
|
||||
<b>All content served through this project is publicly accessible. If your site is listed in this project, the code is pretty much public. Take necessary measures to counter the exploits used to extract content in your site.</b>
|
||||
|
||||
Think of this project as your normal browser, but a bit more straight-forward and specific. While an average browser makes hundreds of requests to get everything from a site, this project goes on to only make requests associated with getting the content served by the sites.
|
||||
|
||||
<b>
|
||||
|
||||
This project is to be used at the user's own risk, based on their government and laws.
|
||||
|
||||
This project has no control on the content it is serving, using copyrighted content from the providers is not going to be accounted for by the developer. It is the user's own risk.
|
||||
|
||||
</b>
|
||||
|
||||
|
||||
<br>
|
||||
|
||||
<h2>DMCA and Copyright Infrigements</h3>
|
||||
|
||||
<br>
|
||||
|
||||
<b>
|
||||
|
||||
A browser is a tool, and the maliciousness of the tool is directly based on the user.
|
||||
</b>
|
||||
|
||||
|
||||
This project uses client-side content access mechanisms. Hence, the copyright infrigements or DMCA in this project's regards are to be forwarded to the associated site by the associated notifier of any such claims. This is one of the main reasons the sites are listed in this project.
|
||||
|
||||
<b>Do not harass the developer. Any personal information about the developer is intentionally not made public. Exploiting such information without consent in regards to this topic will lead to legal actions by the developer themselves.</b>
|
||||
11
Dockerfile
11
Dockerfile
@@ -1,10 +1,7 @@
|
||||
FROM ubuntu
|
||||
RUN apt-get update
|
||||
RUN apt-get -y install python3
|
||||
RUN apt-get update
|
||||
RUN apt-get -y install pipx
|
||||
RUN pipx ensurepath
|
||||
FROM python:3.12-slim-bookworm
|
||||
COPY --from=ghcr.io/astral-sh/uv:latest /uv /uvx /bin/
|
||||
COPY . /fastanime
|
||||
ENV PATH=/root/.local/bin:$PATH
|
||||
WORKDIR /fastanime
|
||||
RUN pipx install .
|
||||
RUN uv tool install .
|
||||
CMD ["bash"]
|
||||
|
||||
434
README.md
434
README.md
@@ -1,79 +1,116 @@
|
||||
# FastAnime
|
||||
|
||||
Welcome to **FastAnime**, anime site experience from the terminal.
|
||||
|
||||
**fzf mode**
|
||||
|
||||
[fa_fzf_demo.webm](https://github.com/user-attachments/assets/b1fecf25-e358-4e8b-a144-bcb7947210cf)
|
||||
|
||||
**other modes:**
|
||||
|
||||
<details>
|
||||
<summary><b>rofi mode</b></summary>
|
||||
|
||||
[fa_rofi_mode.webm](https://github.com/user-attachments/assets/2ce669bf-b62f-4c44-bd79-cf0dcaddf37a)
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary><b>Default mode</b></summary>
|
||||
<p align="center">
|
||||
<h1 align="center">FastAnime</h1>
|
||||
</p>
|
||||
<p align="center">
|
||||
<sup>
|
||||
Browse anime from the terminal
|
||||
</sup>
|
||||
</p>
|
||||
<div align="center">
|
||||
|
||||
[fa_default_mode.webm](https://github.com/user-attachments/assets/1ce3a23d-f4a0-4bc1-8518-426ec7b3b69e)
|
||||
 
|
||||

|
||||

|
||||

|
||||

|
||||

|
||||
</div>
|
||||
|
||||
<p align="center">
|
||||
<a href="https://discord.gg/HBEmAwvbHV">
|
||||
<img src="https://invidget.switchblade.xyz/C4rhMA4mmK">
|
||||
</a>
|
||||
</p>
|
||||
|
||||

|
||||
|
||||
<details>
|
||||
<summary>
|
||||
<b>Riced</b>
|
||||
</summary>
|
||||
|
||||
**Anilist results menu:**
|
||||

|
||||
|
||||
**Episodes menu preview:**
|
||||

|
||||
|
||||
**Without preview images enabled:**
|
||||

|
||||
|
||||
**Desktop notifications + episodes menu without image preview:**
|
||||

|
||||
|
||||
</details>
|
||||
|
||||
Heavily inspired by [animdl](https://github.com/justfoolingaround/animdl), [magic-tape](https://gitlab.com/christosangel/magic-tape/-/tree/main?ref_type=heads) and [ani-cli](https://github.com/pystardust/ani-cli).
|
||||
|
||||
<!--toc:start-->
|
||||
|
||||
- [FastAnime](#fastanime)
|
||||
- [Installation](#installation)
|
||||
- [Installation using your favourite package manager](#installation-using-your-favourite-package-manager)
|
||||
- [Using pipx](#using-pipx)
|
||||
- [Using pip](#using-pip)
|
||||
- [Installing the bleeding edge version](#installing-the-bleeding-edge-version)
|
||||
- [Building from the source](#building-from-the-source)
|
||||
- [External Dependencies](#external-dependencies)
|
||||
- [Usage](#usage)
|
||||
- [The Commandline interface :fire:](#the-commandline-interface-fire)
|
||||
- [The anilist command :fire: :fire: :fire:](#the-anilist-command-fire-fire-fire)
|
||||
- [Running without any subcommand](#running-without-any-subcommand)
|
||||
- [Subcommands](#subcommands)
|
||||
- [download subcommand](#download-subcommand)
|
||||
- [search subcommand](#search-subcommand)
|
||||
- [grab subcommand](#grab-subcommand)
|
||||
- [downloads subcommand](#downloads-subcommand)
|
||||
- [config subcommand](#config-subcommand)
|
||||
- [cache subcommand](#cache-subcommand)
|
||||
- [update subcommand](#update-subcommand)
|
||||
- [completions subcommand](#completions-subcommand)
|
||||
- [MPV specific commands](#mpv-specific-commands)
|
||||
- [Key Bindings](#key-bindings)
|
||||
- [Script Messages](#script-messages)
|
||||
- [Configuration](#configuration)
|
||||
- [Contributing](#contributing)
|
||||
- [Receiving Support](#receiving-support)
|
||||
- [Supporting the Project](#supporting-the-project)
|
||||
<!--toc:end-->
|
||||
|
||||
> [!IMPORTANT]
|
||||
>
|
||||
> This project currently scrapes allanime, aniwatch and animepahe. The site is in the public domain and can be accessed by any one with a browser.
|
||||
|
||||
## Installation
|
||||
|
||||

|
||||

|
||||

|
||||

|
||||

|
||||
|
||||
The app can run wherever python can run. So all you need to have is python installed on your device.
|
||||
On android you can use [termux](https://github.com/termux/termux-app).
|
||||
If you have any difficulty consult for help on the [discord channel](https://discord.gg/HRjySFjQ)
|
||||
If you have any difficulty consult for help on the [discord channel](https://discord.gg/HBEmAwvbHV)
|
||||
|
||||
### Installation on nixos
|
||||
|
||||

|
||||
|
||||
```bash
|
||||
nix profile install github:Benexl/fastanime
|
||||
```
|
||||
|
||||
### Installation on Arch
|
||||
|
||||

|
||||
|
||||
Install from the AUR using an AUR helper such as [yay](https://github.com/Jguer/yay) or [paru](https://github.com/Morganamilo/paru), either the git version, which uses the latest commit:
|
||||
|
||||

|
||||
|
||||
```bash
|
||||
yay -S fastanime-git
|
||||
```
|
||||
|
||||
or the stable version, which uses a tagged release:
|
||||
|
||||

|
||||
|
||||
```bash
|
||||
yay -S fastanime
|
||||
```
|
||||
|
||||
### Installation using your favourite package manager
|
||||
|
||||
Currently the app is only published on [pypi](https://pypi.org/project/fastanime/).
|
||||
With the following extras available:
|
||||
|
||||
- standard -which installs all dependencies
|
||||
- api - which installs dependencies required to use `fastanime serve`
|
||||
- mpv - which installs python mpv
|
||||
- notifications - which installs plyer required for desktop notifications
|
||||
|
||||
#### Using uv
|
||||
|
||||
Recommended method of installation is using [uv](https://docs.astral.sh/uv/).
|
||||
|
||||
```bash
|
||||
# generally:
|
||||
uv tool install "fastanime[standard]"
|
||||
|
||||
# or stripped down installations:
|
||||
uv tool install fastanime
|
||||
uv tool install "fastanime[api]"
|
||||
uv tool install "fastanime[mpv]"
|
||||
uv tool install "fastanime[notifications]"
|
||||
|
||||
```
|
||||
|
||||
#### Using pipx
|
||||
|
||||
Preferred method of installation since [Pipx](https://github.com/pypa/pipx) creates an isolated environment for each app it installs.
|
||||
|
||||
```bash
|
||||
|
||||
pipx install fastanime
|
||||
@@ -96,7 +133,7 @@ pip install 'fastanime==<latest-pre-release-tag>.dev1'
|
||||
|
||||
### Installing the bleeding edge version
|
||||
|
||||
To install the latest build which are created on every push by GitHub actions, download the [fastanime_debug_build](https://github.com/Benex254/FastAnime/actions) of your choosing from the GitHub actions page.
|
||||
To install the latest build which are created on every push by GitHub actions, download the [fastanime_debug_build](https://github.com/FastAnime/FastAnime/actions) of your choosing from the GitHub actions page.
|
||||
Then:
|
||||
|
||||
```bash
|
||||
@@ -117,24 +154,17 @@ Requirements:
|
||||
|
||||
- [git](https://git-scm.com/)
|
||||
- [python 3.10 and above](https://www.python.org/)
|
||||
- [poetry](https://python-poetry.org/docs/#installation)
|
||||
- [uv](https://astral.sh/blog/uv)
|
||||
|
||||
To build from the source, follow these steps:
|
||||
|
||||
1. Clone the repository: `git clone https://github.com/Benex254/FastAnime.git --depth 1`
|
||||
1. Clone the repository: `git clone https://github.com/Benexl/FastAnime.git --depth 1`
|
||||
2. Navigate into the folder: `cd FastAnime`
|
||||
3. Then build and Install the app:
|
||||
|
||||
```bash
|
||||
# Normal Installation
|
||||
poetry build
|
||||
cd dist
|
||||
pip install fastanime<version>.whl
|
||||
|
||||
# Editable installation (easiest for updates)
|
||||
# just do a git pull in the Project dir
|
||||
# the latter will require rebuilding the app
|
||||
pip install -e .
|
||||
# build and install fastanime with uv
|
||||
uv tool install .
|
||||
```
|
||||
|
||||
4. Enjoy! Verify installation with:
|
||||
@@ -145,12 +175,13 @@ fastanime --version
|
||||
|
||||
> [!Tip]
|
||||
>
|
||||
> Download the completions from [here](https://github.com/Benex254/FastAnime/tree/master/completions) for your shell.
|
||||
> Download the completions from [here](https://github.com/FastAnime/FastAnime/tree/master/completions) for your shell.
|
||||
> To add completions:
|
||||
>
|
||||
> - Fish Users: `cp $FASTANIME_PATH/completions/fastanime.fish ~/.config/fish/completions/`
|
||||
> - Bash Users: Add `source $FASTANIME_PATH/completions/fastanime.bash` to your `.bashrc`
|
||||
> - Zsh Users: Add `source $FASTANIME_PATH/completions/fastanime.zsh` to your `.zshrc`
|
||||
> or using the built in command `fastanime completions`
|
||||
|
||||
### External Dependencies
|
||||
|
||||
@@ -162,26 +193,31 @@ The only required external dependency, unless you won't be streaming, is [MPV](h
|
||||
> player because we believe nothing beats **MPV** and it provides
|
||||
> everything you could ever need with a small footprint.
|
||||
> But if you have a reason feel free to encourage as to do so.
|
||||
> However, on android this is not the case so vlc is also supported
|
||||
|
||||
**Other external dependencies that will just make your experience better:**
|
||||
|
||||
- [webtorrent-cli](https://github.com/webtorrent/webtorrent-cli) used when the provider is nyaa
|
||||
- [ffmpeg](https://www.ffmpeg.org/) is required to be in your path environment variables to properly download [hls](https://www.cloudflare.com/en-gb/learning/video/what-is-http-live-streaming/) streams.
|
||||
- [fzf](https://github.com/junegunn/fzf) 🔥 which is used as a better alternative to the ui.
|
||||
- [rofi](https://github.com/davatorium/rofi) 🔥 which is used as another alternative ui + the the desktop entry ui
|
||||
- [rofi](https://github.com/davatorium/rofi) 🔥 which is used as another alternative ui + the desktop entry ui
|
||||
- [chafa](https://github.com/hpjansson/chafa) currently the best cross platform and cross terminal image viewer for the terminal.
|
||||
- [icat](https://sw.kovidgoyal.net/kitty/kittens/icat/) an image viewer that only works in [kitty terminal](https://sw.kovidgoyal.net/kitty/), which is currently the best terminal in my opinion, and by far the best image renderer for the terminal thanks to kitty's terminal graphics protocol. Its terminal graphics is so op that you can [run a browser on it](https://github.com/chase/awrit?tab=readme-ov-file)!!
|
||||
- [bash](https://www.gnu.org/software/bash/) is used as the preview script language.
|
||||
- [ani-skip](https://github.com/synacktraa/ani-skip) used for skipping the opening and ending theme songs
|
||||
- [ffmpegthumbnailer](https://github.com/dirkvdb/ffmpegthumbnailer) used for local previews of downloaded anime
|
||||
- [syncplay](https://syncplay.pl/) to enable watch together.
|
||||
- [feh](https://github.com/derf/feh) used in manga mode
|
||||
|
||||
## Usage
|
||||
|
||||
The project offers a featureful command-line interface and MPV interface through the use of python-mpv.
|
||||
The project also offers subs in different languages thanks to hianime provider.
|
||||
|
||||
### The Commandline interface :fire:
|
||||
|
||||
Designed for efficiency and automation. Plus has a beautiful pseudo-TUI in some of the commands.
|
||||
If you are stuck anywhere just use `--help` before the command you would like to get help on
|
||||
|
||||
**Overview of main commands:**
|
||||
|
||||
@@ -220,7 +256,7 @@ Available options for the fastanime include:
|
||||
- `--default` use the default ui
|
||||
- `--preview` show a preview when using fzf
|
||||
- `--no-preview` dont show a preview when using fzf
|
||||
- `--format <yt-dlp format string>` or `-f <yt-dlp format string>` set the format of anime downloaded and streamed based on yt-dlp format. Works when `--server gogoanime`
|
||||
- `--format <yt-dlp format string>` or `-f <yt-dlp format string>` set the format of anime downloaded and streamed based on [yt-dlp format](https://github.com/yt-dlp/yt-dlp#format-selection). Works when `--server gogoanime` or on providers that provide multi quality streams eg hianime
|
||||
- `--icons/--no-icons` toggle the visibility of the icons
|
||||
- `--skip/--no-skip` whether to skip the opening and ending theme songs.
|
||||
- `--rofi` use rofi for the ui
|
||||
@@ -231,9 +267,11 @@ Available options for the fastanime include:
|
||||
- `--log-file` allow logging to a file
|
||||
- `--rich-traceback` allow rich traceback
|
||||
- `--use-mpv-mod/--use-default-player` whether to use python-mpv
|
||||
- `--provider <allanime/animepahe>` anime site of choice to scrape from
|
||||
- `--provider <allanime/animepahe/hianime/nyaa>` anime site of choice to scrape from
|
||||
- `--sync-play` or `-sp` use syncplay for streaming anime so you can watch with your friends
|
||||
- `--sub_lang <en/or any other common shortform for country>` regex is used to determine the appropriate. Only works when provider is aniwatch.
|
||||
- `--sub-lang <en/or any other common shortform for country>` regex is used to determine the appropriate. Only works when provider is hianime.
|
||||
- `--normalize-titles/--no-normalize-titles` whether to normalize provider titles
|
||||
- `--manga` toggle experimental manga mode
|
||||
|
||||
Example usage of the above options
|
||||
|
||||
@@ -247,18 +285,21 @@ fastanime --sync-play --server sharepoint search -t <anime-title>
|
||||
fastanime --sync-play --server sharepoint anilist
|
||||
|
||||
# downloading dubbed anime
|
||||
fastanime --dub download <anime>
|
||||
fastanime --dub download -t <anime>
|
||||
|
||||
# use icons and fzf for a more elegant ui with preview
|
||||
fastanime --icons --preview --fzf anilist
|
||||
|
||||
# use icons with default ui
|
||||
fastanime --icons --default anilist
|
||||
|
||||
# viewing manga
|
||||
fastanime --manga search -t <manga-title>
|
||||
```
|
||||
|
||||
#### The anilist command :fire: :fire: :fire:
|
||||
|
||||
Stream, browse, and discover anime efficiently from the terminal using the [AniList API](https://github.com/AniList/ApiV2-GraphQL-Docs).
|
||||
Uses the [AniList API](https://github.com/AniList/ApiV2-GraphQL-Docs) to create a terminal anilist client which is then intergrated with the scraping capabilities of the project.
|
||||
|
||||
##### Running without any subcommand
|
||||
|
||||
@@ -267,7 +308,7 @@ Run `fastanime anilist` to access the main interface.
|
||||
##### Subcommands
|
||||
|
||||
The subcommands are mainly their as convenience. Since all the features already exist in the main interface.
|
||||
Most of the subcommands share the common option `--dump-json` or `-d` which will print only the json data and supress the ui.
|
||||
Most of the subcommands share the common option `--dump-json` or `-d` which will print only the json data and suppress the ui.
|
||||
|
||||
- `fastanime anilist trending`: Top 15 trending anime.
|
||||
- `fastanime anilist recent`: Top 15 recently updated anime.
|
||||
@@ -277,28 +318,76 @@ Most of the subcommands share the common option `--dump-json` or `-d` which will
|
||||
- `fastanime anilist favourites`: Top 15 favorite anime.
|
||||
- `fastanime anilist random`: get random anime
|
||||
|
||||
**FastAnime Anilist Search subcommand**
|
||||
**FastAnime Anilist Search subcommand** 🔥 🔥 🔥
|
||||
|
||||
It is by far one of the most powerful commands.
|
||||
It offers the following options:
|
||||
|
||||
- `--sort <MediaSort>` or `-s <MediaSort>`
|
||||
- `--title <anime-title>` or `-t <anime-title>`
|
||||
- `--tags <tag>` or `-t <tag>` can be specified multiple times for different tags to filter by.
|
||||
- `--tags <tag>` or `-T <tag>` can be specified multiple times for different tags to filter by.
|
||||
- `--year <year>` or `-y <year>`
|
||||
- `--status <MediaStatus>` or `-S <MediaStatus>`
|
||||
- `--status <MediaStatus>` or `-S <MediaStatus>` can be specified multiple times
|
||||
- `--media-format <MediaFormat>` or `-f <MediaFormat>`
|
||||
- `--season <MediaSeason>`
|
||||
- `--genres <genre>` or `-g <genre>` can be specified multiple times.
|
||||
- `--on-list/--not-on-list`
|
||||
|
||||
Example:
|
||||
|
||||
```bash
|
||||
fastanime anilist search -t isekai
|
||||
# get anime with the tag of isekai
|
||||
fastanime anilist search -T isekai
|
||||
|
||||
# get anime of 2024 and sort by popularity
|
||||
# that has already finished airing or is releasing
|
||||
# and is not in your anime lists
|
||||
fastanime anilist search -y 2024 -s POPULARITY_DESC --status RELEASING --status FINISHED --not-on-list
|
||||
|
||||
# get anime of 2024 season WINTER
|
||||
fastanime anilist search -y 2024 --season WINTER
|
||||
|
||||
# get anime genre action and tag isekai,magic
|
||||
fastanime anilist search -g Action -T Isekai -T Magic
|
||||
|
||||
# get anime of 2024 thats finished airing
|
||||
fastanime anilist search -y 2024 -S FINISHED
|
||||
|
||||
# get the most favourite anime movies
|
||||
fastanime anilist search -f MOVIE -s FAVOURITES_DESC
|
||||
```
|
||||
|
||||
For more details visit the anilist docs or just get the completions which will improve the experience.
|
||||
|
||||
Like seriously **[get the completions](https://github.com/FastAnime/FastAnime#completions-subcommand)** and the experience will be a 💯 💯 better.
|
||||
|
||||
**Fastanime anilist download:**
|
||||
Supports all the options for search except its used for downloading.
|
||||
it also supports all options for `fastanime download`
|
||||
Example:
|
||||
|
||||
```bash
|
||||
# get anime with the tag of isekai
|
||||
fastanime anilist download -T isekai
|
||||
|
||||
# get anime of 2024 and sort by popularity
|
||||
# that has already finished airing or is releasing
|
||||
# and is not in your anime lists
|
||||
fastanime anilist download -y 2024 -s POPULARITY_DESC --status RELEASING --status FINISHED --not-on-list
|
||||
|
||||
# get anime of 2024 season WINTER
|
||||
fastanime anilist download -y 2024 --season WINTER
|
||||
|
||||
# get anime genre action and tag isekai,magic
|
||||
fastanime anilist download -g Action -T Isekai -T Magic
|
||||
|
||||
# get anime of 2024 thats finished airing
|
||||
fastanime anilist download -y 2024 -S FINISHED
|
||||
|
||||
# get the most favourite anime movies
|
||||
fastanime anilist download -f MOVIE -s FAVOURITES_DESC
|
||||
```
|
||||
|
||||
The following are commands you can only run if you are signed in to your AniList account:
|
||||
|
||||
- `fastanime anilist watching`
|
||||
@@ -308,7 +397,7 @@ The following are commands you can only run if you are signed in to your AniList
|
||||
- `fastanime anilist paused`
|
||||
- `fastanime anilist completed`
|
||||
|
||||
Plus: `fastanime anilist notifier` :fire:
|
||||
Plus: `fastanime anilist notifier` 🔥
|
||||
|
||||
```bash
|
||||
# basic form
|
||||
@@ -385,7 +474,7 @@ fastanime download -t <anime-title> -r ':<episodes-end>'
|
||||
# remember python indexing starts at 0
|
||||
fastanime download -t <anime-title> -r '<episode-1>:<episode>'
|
||||
|
||||
# merge subtitles with ffmpeg to mkv format; aniwatch tends to give subs as separate files
|
||||
# merge subtitles with ffmpeg to mkv format; hianime tends to give subs as separate files
|
||||
# and dont prompt for anything
|
||||
# eg existing file in destination instead remove
|
||||
# and clean
|
||||
@@ -506,6 +595,10 @@ fastanime downloads --time-to-seek <intRange(-1,100)>
|
||||
# --- or ---
|
||||
fastanime downloads -t <intRange(-1,100)>
|
||||
|
||||
# to watch a specific title
|
||||
# be sure to get the completions for the best experience
|
||||
fastanime downloads --title <title>
|
||||
|
||||
# to get the path to the downloads folder set
|
||||
fastanime downloads --path
|
||||
# useful when you want to use the value for other programs
|
||||
@@ -533,7 +626,7 @@ fastanime config --view
|
||||
|
||||
> [!Note]
|
||||
>
|
||||
> If it opens [vim](https://www.vim.org/download.php) you can exit by typing `:q` .
|
||||
> If it opens [vim](https://www.vim.org/download.php) you can exit by typing `:q` 😉.
|
||||
|
||||
#### cache subcommand
|
||||
|
||||
@@ -586,6 +679,19 @@ fastanime completions --bash
|
||||
fastanime completions --zsh
|
||||
```
|
||||
|
||||
#### fastanime serve
|
||||
|
||||
Helper command that starts a rest server.
|
||||
This requires you to install fastanime with the api extra or standard extra.
|
||||
|
||||
```bash
|
||||
# default options
|
||||
fastanime serve
|
||||
|
||||
# specify host and port
|
||||
fastanime serve --host <host> --port <port>
|
||||
```
|
||||
|
||||
### MPV specific commands
|
||||
|
||||
The project now allows on the fly media controls directly from mpv. This means you can go to the next or previous episode without the window ever closing thus offering a seamless experience.
|
||||
@@ -620,97 +726,115 @@ script-message select-server <server-name>
|
||||
script-message select-quality <1080/720/480/360>
|
||||
```
|
||||
|
||||
## styling the default interface
|
||||
|
||||
The default interface uses inquirerPy which is customizable. Read here to findout more <https://inquirerpy.readthedocs.io/en/latest/pages/env.html>
|
||||
|
||||
## Configuration
|
||||
|
||||
The app includes sensible defaults but can be customized extensively. Configuration is stored in `.ini` format at `~/.config/FastAnime/config.ini` on arch linux; for the other operating systems you can check by running `fastanime config --path`.
|
||||
|
||||
> [!TIP]
|
||||
> You can now use the option `--update` to update your config file from the command-line
|
||||
> For Example:
|
||||
> `fastanime --icons --fzf --preview config --update`
|
||||
> the above will set icons to true, use_fzf to true and preview to true in your config file
|
||||
|
||||
By default if a config file does not exist it will be auto created with comments to explain each and every option.
|
||||
The default config:
|
||||
|
||||
```ini
|
||||
[stream]
|
||||
continue_from_history = True # Auto continue from watch history
|
||||
|
||||
# which history to use [local/remote]
|
||||
preferred_history = local
|
||||
|
||||
# force mpv window
|
||||
# passed directly to mpv so values are same
|
||||
force_window = immediate
|
||||
|
||||
translation_type = sub # Preferred language for anime (options: dub, sub)
|
||||
|
||||
server = top # Default server (options: dropbox, sharepoint, wetransfer.gogoanime, top, wixmp)
|
||||
|
||||
auto_next = False # Auto-select next episode
|
||||
|
||||
# Auto select the anime provider results with fuzzy find.
|
||||
# Note this wont always be correct.But 99% of the time will be.
|
||||
auto_select=True
|
||||
|
||||
# whether to skip the opening and ending theme songs
|
||||
# note requires ani-skip to be in path
|
||||
skip=false
|
||||
|
||||
# the maximum delta time in minutes after which the episode should be considered as completed
|
||||
# used in the continue from time stamp
|
||||
error=3
|
||||
|
||||
use_mpv_mod=False
|
||||
|
||||
# the format of downloaded anime and trailer
|
||||
# based on yt-dlp format and passed directly to it
|
||||
# learn more by looking it up on their site
|
||||
# only works for downloaded anime if server=gogoanime
|
||||
# since its the only one that offers different formats
|
||||
# the others tend not to
|
||||
format=best[height<=1080]/bestvideo[height<=1080]+bestaudio/best # default
|
||||
|
||||
[general]
|
||||
# can be [allanime,animepahe]
|
||||
icons = False
|
||||
|
||||
quality = 1080
|
||||
|
||||
normalize_titles = True
|
||||
|
||||
provider = allanime
|
||||
|
||||
preferred_language = romaji # Display language (options: english, romaji)
|
||||
preferred_language = english
|
||||
|
||||
downloads_dir = <Default-videos-dir>/FastAnime # Download directory
|
||||
downloads_dir = ~/Videos/FastAnime
|
||||
|
||||
preview=false # whether to show a preview window when using fzf or rofi
|
||||
preview = False
|
||||
|
||||
use_fzf=False # whether to use fzf as the interface for the anilist command and others.
|
||||
ffmpegthumbnailer_seek_time = -1
|
||||
|
||||
use_rofi=false # whether to use rofi for the ui
|
||||
use_fzf = False
|
||||
|
||||
rofi_theme=<path-to-rofi-theme-file>
|
||||
use_rofi = False
|
||||
|
||||
rofi_theme_input=<path-to-rofi-theme-file>
|
||||
rofi_theme =
|
||||
|
||||
rofi_theme_confirm=<path-to-rofi-theme-file>
|
||||
rofi_theme_input =
|
||||
|
||||
rofi_theme_confirm =
|
||||
|
||||
notification_duration = 120
|
||||
|
||||
sub_lang = eng
|
||||
|
||||
default_media_list_tracking = None
|
||||
|
||||
force_forward_tracking = True
|
||||
|
||||
cache_requests = True
|
||||
|
||||
use_persistent_provider_store = False
|
||||
|
||||
recent = 50
|
||||
|
||||
|
||||
# whether to show the icons
|
||||
icons=false
|
||||
[stream]
|
||||
continue_from_history = True
|
||||
|
||||
# the duration in minutes a notification will stay in the screen
|
||||
# used by notifier command
|
||||
notification_duration=2
|
||||
preferred_history = local
|
||||
|
||||
[anilist]
|
||||
# Not implemented yet
|
||||
translation_type = sub
|
||||
|
||||
server = top
|
||||
|
||||
auto_next = False
|
||||
|
||||
auto_select = True
|
||||
|
||||
skip = False
|
||||
|
||||
episode_complete_at = 80
|
||||
|
||||
use_python_mpv = False
|
||||
|
||||
force_window = immediate
|
||||
|
||||
format = best[height<=1080]/bestvideo[height<=1080]+bestaudio/best
|
||||
|
||||
player = mpv
|
||||
```
|
||||
|
||||
### Other Terminal Browsers I Made
|
||||
[yt-x](https://github.com/Benexl/yt-x) - browse youtube and other yt-dlp sites from the terminal
|
||||
|
||||
[lib-x](https://github.com/Benexl/lib-x) - browse your calibre library from the terminal
|
||||
|
||||
|
||||
|
||||
## Contributing
|
||||
|
||||
We welcome your issues and feature requests. However, due to time constraints, we currently do not plan to add another provider.
|
||||
pr's are highly welcome
|
||||
|
||||
If you wish to contribute directly, please first open an issue describing your proposed changes so it can be discussed or if you are in a rush for the feature to be merged just open a pr.
|
||||
|
||||
## Receiving Support
|
||||
|
||||
For inquiries, join our [Discord Server](https://discord.gg/C4rhMA4mmK).
|
||||
|
||||
<p align="center">
|
||||
<a href="https://discord.gg/C4rhMA4mmK">
|
||||
<img src="https://invidget.switchblade.xyz/C4rhMA4mmK">
|
||||
</a>
|
||||
</p>
|
||||
If you find an anime title that does not correspond with a provider or is just weird just [edit the data file](https://github.com/Benexl/FastAnime/blob/master/fastanime/Utility/data.py) and open a pr, issues will be ignored 😝.
|
||||
|
||||
## Supporting the Project
|
||||
|
||||
Show your support by starring our GitHub repository or [buying us a coffee](https://ko-fi.com/benex254).
|
||||
More pr's less issues 🙃
|
||||
|
||||
Show your support by starring the GitHub repository.
|
||||
|
||||
## Disclaimer
|
||||
|
||||
> [!IMPORTANT]
|
||||
>
|
||||
> This project currently scrapes allanime, hianime, nyaa and animepahe.
|
||||
> The developer(s) of this application does not have any affiliation with the content providers available, and this application hosts zero content.
|
||||
> [DISCLAIMER](https://github.com/Benexl/FastAnime/blob/master/DISCLAIMER.md)
|
||||
|
||||
5
fa
5
fa
@@ -1,4 +1,3 @@
|
||||
#!/usr/bin/env sh
|
||||
# exec "${PYTHON:-python3}" -Werror -Xdev -m "$(dirname "$(realpath "$0")")/fastanime" "$@"
|
||||
cd "$(dirname "$(realpath "$0")")" || exit 1
|
||||
exec python -m fastanime "$@"
|
||||
CLI_DIR="$(dirname "$(realpath "$0")")"
|
||||
exec uv run --directory "$CLI_DIR/../" fastanime "$@"
|
||||
|
||||
@@ -1,10 +1,8 @@
|
||||
"""An abstraction over all providers offering added features with a simple and well typed api
|
||||
|
||||
[TODO:description]
|
||||
"""
|
||||
"""An abstraction over all providers offering added features with a simple and well typed api"""
|
||||
|
||||
import importlib
|
||||
import logging
|
||||
import os
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from .libs.anime_provider import anime_sources
|
||||
@@ -12,13 +10,12 @@ from .libs.anime_provider import anime_sources
|
||||
if TYPE_CHECKING:
|
||||
from typing import Iterator
|
||||
|
||||
from .libs.anilist.types import AnilistBaseMediaDataSchema
|
||||
from .libs.anime_provider.types import Anime, SearchResults, Server
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
# TODO: improve performance of this class and add cool features like auto retry
|
||||
# TODO: add cool features like auto retry
|
||||
class AnimeProvider:
|
||||
"""Class that manages all anime sources adding some extra functionality to them.
|
||||
Attributes:
|
||||
@@ -33,27 +30,39 @@ class AnimeProvider:
|
||||
PROVIDERS = list(anime_sources.keys())
|
||||
provider = PROVIDERS[0]
|
||||
|
||||
def __init__(self, provider, dynamic=False, retries=0) -> None:
|
||||
def __init__(
|
||||
self,
|
||||
provider,
|
||||
cache_requests=os.environ.get("FASTANIME_CACHE_REQUESTS", "false"),
|
||||
use_persistent_provider_store=os.environ.get(
|
||||
"FASTANIME_USE_PERSISTENT_PROVIDER_STORE", "false"
|
||||
),
|
||||
dynamic=False,
|
||||
retries=0,
|
||||
) -> None:
|
||||
self.provider = provider
|
||||
self.dynamic = dynamic
|
||||
self.retries = retries
|
||||
self.cache_requests = cache_requests
|
||||
self.use_persistent_provider_store = use_persistent_provider_store
|
||||
self.lazyload_provider(self.provider)
|
||||
|
||||
def lazyload_provider(self, provider):
|
||||
"""updates the current provider being used"""
|
||||
try:
|
||||
self.anime_provider.session.kill_connection_to_db()
|
||||
except Exception:
|
||||
pass
|
||||
_, anime_provider_cls_name = anime_sources[provider].split(".", 1)
|
||||
package = f"fastanime.libs.anime_provider.{provider}"
|
||||
provider_api = importlib.import_module(".api", package)
|
||||
anime_provider = getattr(provider_api, anime_provider_cls_name)
|
||||
self.anime_provider = anime_provider()
|
||||
self.anime_provider = anime_provider(
|
||||
self.cache_requests, self.use_persistent_provider_store
|
||||
)
|
||||
|
||||
def search_for_anime(
|
||||
self,
|
||||
user_query,
|
||||
translation_type,
|
||||
anilist_obj: "AnilistBaseMediaDataSchema | None" = None,
|
||||
nsfw=True,
|
||||
unknown=True,
|
||||
self, search_keywords, translation_type, **kwargs
|
||||
) -> "SearchResults | None":
|
||||
"""core abstraction over all providers search functionality
|
||||
|
||||
@@ -68,19 +77,16 @@ class AnimeProvider:
|
||||
[TODO:return]
|
||||
"""
|
||||
anime_provider = self.anime_provider
|
||||
try:
|
||||
results = anime_provider.search_for_anime(
|
||||
user_query, translation_type, nsfw, unknown
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(e)
|
||||
results = None
|
||||
results = anime_provider.search_for_anime(
|
||||
search_keywords, translation_type, **kwargs
|
||||
)
|
||||
|
||||
return results
|
||||
|
||||
def get_anime(
|
||||
self,
|
||||
anime_id: str,
|
||||
anilist_obj: "AnilistBaseMediaDataSchema | None" = None,
|
||||
**kwargs,
|
||||
) -> "Anime | None":
|
||||
"""core abstraction over getting info of an anime from all providers
|
||||
|
||||
@@ -92,19 +98,16 @@ class AnimeProvider:
|
||||
[TODO:return]
|
||||
"""
|
||||
anime_provider = self.anime_provider
|
||||
try:
|
||||
results = anime_provider.get_anime(anime_id)
|
||||
except Exception as e:
|
||||
logger.error(e)
|
||||
results = None
|
||||
results = anime_provider.get_anime(anime_id, **kwargs)
|
||||
|
||||
return results
|
||||
|
||||
def get_episode_streams(
|
||||
self,
|
||||
anime,
|
||||
anime_id,
|
||||
episode: str,
|
||||
translation_type: str,
|
||||
anilist_obj: "AnilistBaseMediaDataSchema|None" = None,
|
||||
**kwargs,
|
||||
) -> "Iterator[Server] | None":
|
||||
"""core abstractions for getting juicy streams from all providers
|
||||
|
||||
@@ -118,11 +121,7 @@ class AnimeProvider:
|
||||
[TODO:return]
|
||||
"""
|
||||
anime_provider = self.anime_provider
|
||||
try:
|
||||
results = anime_provider.get_episode_streams(
|
||||
anime, episode, translation_type
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(e)
|
||||
results = None
|
||||
return results # pyright:ignore
|
||||
results = anime_provider.get_episode_streams(
|
||||
anime_id, episode, translation_type, **kwargs
|
||||
)
|
||||
return results
|
||||
|
||||
105
fastanime/MangaProvider.py
Normal file
105
fastanime/MangaProvider.py
Normal file
@@ -0,0 +1,105 @@
|
||||
"""An abstraction over all providers offering added features with a simple and well typed api
|
||||
|
||||
[TODO:description]
|
||||
"""
|
||||
|
||||
import importlib
|
||||
import logging
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from .libs.manga_provider import manga_sources
|
||||
|
||||
if TYPE_CHECKING:
|
||||
pass
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class MangaProvider:
|
||||
"""Class that manages all anime sources adding some extra functionality to them.
|
||||
Attributes:
|
||||
PROVIDERS: [TODO:attribute]
|
||||
provider: [TODO:attribute]
|
||||
provider: [TODO:attribute]
|
||||
dynamic: [TODO:attribute]
|
||||
retries: [TODO:attribute]
|
||||
manga_provider: [TODO:attribute]
|
||||
"""
|
||||
|
||||
PROVIDERS = list(manga_sources.keys())
|
||||
provider = PROVIDERS[0]
|
||||
|
||||
def __init__(self, provider="mangadex", dynamic=False, retries=0) -> None:
|
||||
self.provider = provider
|
||||
self.dynamic = dynamic
|
||||
self.retries = retries
|
||||
self.lazyload_provider(self.provider)
|
||||
|
||||
def lazyload_provider(self, provider):
|
||||
"""updates the current provider being used"""
|
||||
_, anime_provider_cls_name = manga_sources[provider].split(".", 1)
|
||||
package = f"fastanime.libs.manga_provider.{provider}"
|
||||
provider_api = importlib.import_module(".api", package)
|
||||
manga_provider = getattr(provider_api, anime_provider_cls_name)
|
||||
self.manga_provider = manga_provider()
|
||||
|
||||
def search_for_manga(
|
||||
self,
|
||||
user_query,
|
||||
nsfw=True,
|
||||
unknown=True,
|
||||
):
|
||||
"""core abstraction over all providers search functionality
|
||||
|
||||
Args:
|
||||
user_query ([TODO:parameter]): [TODO:description]
|
||||
translation_type ([TODO:parameter]): [TODO:description]
|
||||
nsfw ([TODO:parameter]): [TODO:description]
|
||||
manga_provider ([TODO:parameter]): [TODO:description]
|
||||
anilist_obj: [TODO:description]
|
||||
|
||||
Returns:
|
||||
[TODO:return]
|
||||
"""
|
||||
manga_provider = self.manga_provider
|
||||
try:
|
||||
results = manga_provider.search_for_manga(user_query, nsfw, unknown)
|
||||
except Exception as e:
|
||||
logger.error(e)
|
||||
results = None
|
||||
return results
|
||||
|
||||
def get_manga(
|
||||
self,
|
||||
anime_id: str,
|
||||
):
|
||||
"""core abstraction over getting info of an anime from all providers
|
||||
|
||||
Args:
|
||||
anime_id: [TODO:description]
|
||||
anilist_obj: [TODO:description]
|
||||
|
||||
Returns:
|
||||
[TODO:return]
|
||||
"""
|
||||
manga_provider = self.manga_provider
|
||||
try:
|
||||
results = manga_provider.get_manga(anime_id)
|
||||
except Exception as e:
|
||||
logger.error(e)
|
||||
results = None
|
||||
return results
|
||||
|
||||
def get_chapter_thumbnails(
|
||||
self,
|
||||
manga_id: str,
|
||||
chapter: str,
|
||||
):
|
||||
manga_provider = self.manga_provider
|
||||
try:
|
||||
results = manga_provider.get_chapter_thumbnails(manga_id, chapter)
|
||||
except Exception as e:
|
||||
logger.error(e)
|
||||
results = None
|
||||
return results # pyright:ignore
|
||||
@@ -1,13 +1,16 @@
|
||||
import re
|
||||
from datetime import datetime
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ..libs.anilist.types import AnilistDateObject, AnilistMediaNextAiringEpisode
|
||||
|
||||
COMMA_REGEX = re.compile(r"([0-9]{3})(?=\d)")
|
||||
|
||||
|
||||
# TODO: Add formating options for the final date
|
||||
def format_anilist_date_object(anilist_date_object: "AnilistDateObject"):
|
||||
if anilist_date_object:
|
||||
if anilist_date_object and anilist_date_object["day"]:
|
||||
return f"{anilist_date_object['day']}/{anilist_date_object['month']}/{anilist_date_object['year']}"
|
||||
else:
|
||||
return "Unknown"
|
||||
@@ -27,6 +30,12 @@ def format_list_data_with_comma(data: list | None):
|
||||
return "None"
|
||||
|
||||
|
||||
def format_number_with_commas(number: int | None):
|
||||
if not number:
|
||||
return "0"
|
||||
return COMMA_REGEX.sub(lambda match: f"{match.group(1)},", str(number)[::-1])[::-1]
|
||||
|
||||
|
||||
def extract_next_airing_episode(airing_episode: "AnilistMediaNextAiringEpisode"):
|
||||
if airing_episode:
|
||||
return f"{airing_episode['episode']} on {format_anilist_timestamp(airing_episode['airingAt'])}"
|
||||
|
||||
@@ -3,12 +3,31 @@ Just contains some useful data used across the codebase
|
||||
"""
|
||||
|
||||
# useful incases where the anilist title is too different from the provider title
|
||||
anime_normalizer = {
|
||||
"1P": "one piece",
|
||||
"Magia Record: Mahou Shoujo Madoka☆Magica Gaiden (TV)": "Mahou Shoujo Madoka☆Magica",
|
||||
"Dungeon ni Deai o Motomeru no wa Machigatte Iru Darouka": "Dungeon ni Deai wo Motomeru no wa Machigatteiru Darou ka",
|
||||
'Hazurewaku no "Joutai Ijou Skill" de Saikyou ni Natta Ore ga Subete wo Juurin suru made': "Hazure Waku no [Joutai Ijou Skill] de Saikyou ni Natta Ore ga Subete wo Juurin Suru made",
|
||||
anime_normalizer_raw = {
|
||||
"allanime": {
|
||||
"1P": "one piece",
|
||||
"Magia Record: Mahou Shoujo Madoka☆Magica Gaiden (TV)": "Mahou Shoujo Madoka☆Magica",
|
||||
"Dungeon ni Deai o Motomeru no wa Machigatte Iru Darouka": "Dungeon ni Deai wo Motomeru no wa Machigatteiru Darou ka",
|
||||
'Hazurewaku no "Joutai Ijou Skill" de Saikyou ni Natta Ore ga Subete wo Juurin suru made': "Hazure Waku no [Joutai Ijou Skill] de Saikyou ni Natta Ore ga Subete wo Juurin Suru made",
|
||||
"Re:Zero kara Hajimeru Isekai Seikatsu Season 3": "Re:Zero kara Hajimeru Isekai Seikatsu 3rd Season",
|
||||
},
|
||||
"hianime": {"My Star": "Oshi no Ko"},
|
||||
"animepahe": {
|
||||
"Azumanga Daiou The Animation": "Azumanga Daioh",
|
||||
"Mairimashita! Iruma-kun 2nd Season": "Mairimashita! Iruma-kun 2",
|
||||
"Mairimashita! Iruma-kun 3rd Season": "Mairimashita! Iruma-kun 3",
|
||||
},
|
||||
"nyaa": {},
|
||||
"yugen": {},
|
||||
}
|
||||
|
||||
|
||||
anilist_sort_normalizer = {"search match": "SEARCH_MATCH"}
|
||||
def get_anime_normalizer():
|
||||
"""Used because there are different providers"""
|
||||
import os
|
||||
|
||||
current_provider = os.environ.get("FASTANIME_PROVIDER", "allanime")
|
||||
return anime_normalizer_raw[current_provider]
|
||||
|
||||
|
||||
anime_normalizer = get_anime_normalizer()
|
||||
|
||||
6
fastanime/Utility/downloader/_yt_dlp.py
Normal file
6
fastanime/Utility/downloader/_yt_dlp.py
Normal file
@@ -0,0 +1,6 @@
|
||||
from yt_dlp import YoutubeDL
|
||||
|
||||
|
||||
# TODO: create a class that makes yt-dlp's YoutubeDL fit in more with fastanime
|
||||
class YtDlp(YoutubeDL):
|
||||
pass
|
||||
@@ -16,6 +16,7 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
class YtDLPDownloader:
|
||||
downloads_queue = Queue()
|
||||
_thread = None
|
||||
|
||||
def _worker(self):
|
||||
while True:
|
||||
@@ -26,13 +27,6 @@ class YtDLPDownloader:
|
||||
logger.error(f"Something went wrong {e}")
|
||||
self.downloads_queue.task_done()
|
||||
|
||||
def __init__(self):
|
||||
self._thread = Thread(target=self._worker)
|
||||
self._thread.daemon = True
|
||||
self._thread.start()
|
||||
|
||||
# Function to download the file
|
||||
# TODO: untpack the title to its actual values episode_title and anime_title
|
||||
def _download_file(
|
||||
self,
|
||||
url: str,
|
||||
@@ -40,6 +34,7 @@ class YtDLPDownloader:
|
||||
episode_title: str,
|
||||
download_dir: str,
|
||||
silent: bool,
|
||||
progress_hooks=[],
|
||||
vid_format: str = "best",
|
||||
force_unknown_ext=False,
|
||||
verbose=False,
|
||||
@@ -48,6 +43,10 @@ class YtDLPDownloader:
|
||||
merge=False,
|
||||
clean=False,
|
||||
prompt=True,
|
||||
force_ffmpeg=False,
|
||||
hls_use_mpegts=False,
|
||||
hls_use_h264=False,
|
||||
nocheckcertificate=False,
|
||||
):
|
||||
"""Helper function that downloads anime given url and path details
|
||||
|
||||
@@ -61,6 +60,25 @@ class YtDLPDownloader:
|
||||
"""
|
||||
anime_title = sanitize_filename(anime_title)
|
||||
episode_title = sanitize_filename(episode_title)
|
||||
if url.endswith(".torrent"):
|
||||
WEBTORRENT_CLI = shutil.which("webtorrent")
|
||||
if not WEBTORRENT_CLI:
|
||||
import time
|
||||
|
||||
print(
|
||||
"webtorrent cli is not installed which is required for downloading and streaming from nyaa\nplease install it or use another provider"
|
||||
)
|
||||
time.sleep(120)
|
||||
return
|
||||
cmd = [
|
||||
WEBTORRENT_CLI,
|
||||
"download",
|
||||
url,
|
||||
"--out",
|
||||
os.path.join(download_dir, anime_title, episode_title),
|
||||
]
|
||||
subprocess.run(cmd)
|
||||
return
|
||||
ydl_opts = {
|
||||
# Specify the output path and template
|
||||
"http_headers": headers,
|
||||
@@ -69,6 +87,8 @@ class YtDLPDownloader:
|
||||
"verbose": verbose,
|
||||
"format": vid_format,
|
||||
"compat_opts": ("allow-unsafe-ext",) if force_unknown_ext else tuple(),
|
||||
"progress_hooks": progress_hooks,
|
||||
"nocheckcertificate": nocheckcertificate,
|
||||
}
|
||||
urls = [url]
|
||||
if sub:
|
||||
@@ -76,12 +96,47 @@ class YtDLPDownloader:
|
||||
vid_path = ""
|
||||
sub_path = ""
|
||||
for i, url in enumerate(urls):
|
||||
with yt_dlp.YoutubeDL(ydl_opts) as ydl:
|
||||
options = ydl_opts
|
||||
if i == 0:
|
||||
if force_ffmpeg:
|
||||
options = options | {
|
||||
"external_downloader": {"default": "ffmpeg"},
|
||||
"external_downloader_args": {
|
||||
"ffmpeg_i1": ["-v", "error", "-stats"],
|
||||
},
|
||||
}
|
||||
if hls_use_mpegts:
|
||||
options = options | {
|
||||
"hls_use_mpegts": True,
|
||||
"outtmpl": ".".join(options["outtmpl"].split(".")[:-1]) + ".ts", # force .ts extension
|
||||
}
|
||||
elif hls_use_h264:
|
||||
options = options | {
|
||||
"external_downloader_args": options["external_downloader_args"] | {
|
||||
"ffmpeg_o1": [
|
||||
"-c:v", "copy",
|
||||
"-c:a", "aac",
|
||||
"-bsf:a", "aac_adtstoasc",
|
||||
"-q:a", "1",
|
||||
"-ac", "2",
|
||||
"-af", "loudnorm=I=-22:TP=-2.5:LRA=11,alimiter=limit=-1.5dB", # prevent clipping from HE-AAC to AAC convertion
|
||||
],
|
||||
}
|
||||
}
|
||||
|
||||
with yt_dlp.YoutubeDL(options) as ydl:
|
||||
info = ydl.extract_info(url, download=True)
|
||||
if not info:
|
||||
continue
|
||||
if i == 0:
|
||||
vid_path = info["requested_downloads"][0]["filepath"]
|
||||
vid_path: str = info["requested_downloads"][0]["filepath"]
|
||||
if vid_path.endswith(".unknown_video"):
|
||||
print("Normalizing path...")
|
||||
_vid_path = vid_path.replace(".unknown_video", ".mp4")
|
||||
shutil.move(vid_path, _vid_path)
|
||||
vid_path = _vid_path
|
||||
print("successfully normalized path")
|
||||
|
||||
else:
|
||||
sub_path = info["requested_downloads"][0]["filepath"]
|
||||
if sub_path and vid_path and merge:
|
||||
@@ -150,8 +205,15 @@ class YtDLPDownloader:
|
||||
except Exception as e:
|
||||
print(f"[red bold]An error[/] occurred: {e}")
|
||||
|
||||
# WARN: May remove this legacy functionality
|
||||
def download_file(self, url: str, title, silent=True):
|
||||
def download_file(
|
||||
self,
|
||||
url: str,
|
||||
anime_title: str,
|
||||
episode_title: str,
|
||||
download_dir: str,
|
||||
silent: bool = True,
|
||||
**kwargs,
|
||||
):
|
||||
"""A helper that just does things in the background
|
||||
|
||||
Args:
|
||||
@@ -159,7 +221,17 @@ class YtDLPDownloader:
|
||||
silent ([TODO:parameter]): [TODO:description]
|
||||
url: [TODO:description]
|
||||
"""
|
||||
self.downloads_queue.put((self._download_file, (url, title, silent)))
|
||||
if not self._thread:
|
||||
self._thread = Thread(target=self._worker)
|
||||
self._thread.daemon = True
|
||||
self._thread.start()
|
||||
|
||||
self.downloads_queue.put(
|
||||
(
|
||||
self._download_file,
|
||||
(url, anime_title, episode_title, download_dir, silent),
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
downloader = YtDLPDownloader()
|
||||
|
||||
@@ -30,14 +30,17 @@ def anime_title_percentage_match(
|
||||
Returns:
|
||||
int: the percentage match
|
||||
"""
|
||||
if normalized_anime_title := anime_normalizer.get(
|
||||
possible_user_requested_anime_title
|
||||
):
|
||||
possible_user_requested_anime_title = normalized_anime_title
|
||||
possible_user_requested_anime_title = anime_normalizer.get(
|
||||
possible_user_requested_anime_title, possible_user_requested_anime_title
|
||||
)
|
||||
# compares both the romaji and english names and gets highest Score
|
||||
title_a = str(anime["title"]["romaji"])
|
||||
title_b = str(anime["title"]["english"])
|
||||
percentage_ratio = max(
|
||||
*[
|
||||
fuzz.ratio(title.lower(), possible_user_requested_anime_title.lower())
|
||||
for title in anime["synonyms"]
|
||||
],
|
||||
fuzz.ratio(title_a.lower(), possible_user_requested_anime_title.lower()),
|
||||
fuzz.ratio(title_b.lower(), possible_user_requested_anime_title.lower()),
|
||||
)
|
||||
|
||||
@@ -2,14 +2,14 @@ import sys
|
||||
|
||||
if sys.version_info < (3, 10):
|
||||
raise ImportError(
|
||||
"You are using an unsupported version of Python. Only Python versions 3.8 and above are supported by yt-dlp"
|
||||
"You are using an unsupported version of Python. Only Python versions 3.10 and above are supported by FastAnime"
|
||||
) # noqa: F541
|
||||
|
||||
|
||||
__version__ = "v2.3.8"
|
||||
__version__ = "v2.9.9"
|
||||
|
||||
APP_NAME = "FastAnime"
|
||||
AUTHOR = "Benex254"
|
||||
AUTHOR = "Benexl"
|
||||
GIT_REPO = "github.com"
|
||||
REPO = f"{GIT_REPO}/{AUTHOR}/{APP_NAME}"
|
||||
|
||||
|
||||
93
fastanime/api/__init__.py
Normal file
93
fastanime/api/__init__.py
Normal file
@@ -0,0 +1,93 @@
|
||||
from typing import Literal
|
||||
|
||||
from fastapi import FastAPI
|
||||
from requests import post
|
||||
from thefuzz import fuzz
|
||||
|
||||
from ..AnimeProvider import AnimeProvider
|
||||
from ..Utility.data import anime_normalizer
|
||||
|
||||
app = FastAPI()
|
||||
anime_provider = AnimeProvider("allanime", "true", "true")
|
||||
ANILIST_ENDPOINT = "https://graphql.anilist.co"
|
||||
|
||||
|
||||
@app.get("/search")
|
||||
def search_for_anime(title: str, translation_type: Literal["dub", "sub"] = "sub"):
|
||||
return anime_provider.search_for_anime(title, translation_type)
|
||||
|
||||
|
||||
@app.get("/anime/{anime_id}")
|
||||
def get_anime(anime_id: str):
|
||||
return anime_provider.get_anime(anime_id)
|
||||
|
||||
|
||||
@app.get("/anime/{anime_id}/watch")
|
||||
def get_episode_streams(
|
||||
anime_id: str, episode: str, translation_type: Literal["sub", "dub"]
|
||||
):
|
||||
return anime_provider.get_episode_streams(anime_id, episode, translation_type)
|
||||
|
||||
|
||||
def get_anime_by_anilist_id(anilist_id: int):
|
||||
query = f"""
|
||||
query {{
|
||||
Media(id: {anilist_id}) {{
|
||||
id
|
||||
title {{
|
||||
romaji
|
||||
english
|
||||
native
|
||||
}}
|
||||
synonyms
|
||||
episodes
|
||||
duration
|
||||
}}
|
||||
}}
|
||||
"""
|
||||
response = post(ANILIST_ENDPOINT, json={"query": query}).json()
|
||||
return response["data"]["Media"]
|
||||
|
||||
|
||||
@app.get("/watch/{anilist_id}")
|
||||
def get_episode_streams_by_anilist_id(
|
||||
anilist_id: int, episode: str, translation_type: Literal["sub", "dub"]
|
||||
):
|
||||
anime = get_anime_by_anilist_id(anilist_id)
|
||||
if not anime:
|
||||
return
|
||||
if search_results := anime_provider.search_for_anime(
|
||||
str(anime["title"]["romaji"] or anime["title"]["english"]), translation_type
|
||||
):
|
||||
if not search_results["results"]:
|
||||
return
|
||||
|
||||
def match_title(possible_user_requested_anime_title):
|
||||
possible_user_requested_anime_title = anime_normalizer.get(
|
||||
possible_user_requested_anime_title, possible_user_requested_anime_title
|
||||
)
|
||||
title_a = str(anime["title"]["romaji"])
|
||||
title_b = str(anime["title"]["english"])
|
||||
percentage_ratio = max(
|
||||
*[
|
||||
fuzz.ratio(
|
||||
title.lower(), possible_user_requested_anime_title.lower()
|
||||
)
|
||||
for title in anime["synonyms"]
|
||||
],
|
||||
fuzz.ratio(
|
||||
title_a.lower(), possible_user_requested_anime_title.lower()
|
||||
),
|
||||
fuzz.ratio(
|
||||
title_b.lower(), possible_user_requested_anime_title.lower()
|
||||
),
|
||||
)
|
||||
return percentage_ratio
|
||||
|
||||
provider_anime = max(
|
||||
search_results["results"], key=lambda x: match_title(x["title"])
|
||||
)
|
||||
anime_provider.get_anime(provider_anime["id"])
|
||||
return anime_provider.get_episode_streams(
|
||||
provider_anime["id"], episode, translation_type
|
||||
)
|
||||
80
fastanime/assets/rofi_theme.rasi
Normal file
80
fastanime/assets/rofi_theme.rasi
Normal file
@@ -0,0 +1,80 @@
|
||||
configuration {
|
||||
font: "Sans 12";
|
||||
line-margin: 10;
|
||||
display-drun: "";
|
||||
}
|
||||
|
||||
* {
|
||||
background: #000000; /* Black background for everything */
|
||||
background-alt: #000000; /* Ensures no alternation */
|
||||
foreground: #CCCCCC;
|
||||
selected: #3584E4;
|
||||
active: #2E7D32;
|
||||
urgent: #C62828;
|
||||
}
|
||||
|
||||
window {
|
||||
fullscreen: false;
|
||||
background-color: rgba(0, 0, 0, 0.8); /* Solid black transparent background */
|
||||
border-radius: 50px;
|
||||
}
|
||||
|
||||
mainbox {
|
||||
padding: 50px 50px;
|
||||
background-color: transparent; /* Ensures black background fills entire main area */
|
||||
children: [inputbar, listview];
|
||||
spacing: 20px;
|
||||
}
|
||||
|
||||
inputbar {
|
||||
background-color: #333333; /* Dark gray background for input bar */
|
||||
padding: 8px;
|
||||
border-radius: 8px;
|
||||
children: [prompt, entry];
|
||||
}
|
||||
|
||||
prompt {
|
||||
enabled: true;
|
||||
padding: 8px;
|
||||
background-color: @selected;
|
||||
text-color: #000000;
|
||||
border-radius: 4px;
|
||||
}
|
||||
|
||||
entry {
|
||||
padding: 8px;
|
||||
background-color: transparent; /* Slightly lighter gray for visibility */
|
||||
text-color: #FFFFFF; /* White text to make typing visible */
|
||||
placeholder: "Search...";
|
||||
placeholder-color: rgba(255, 255, 255, 0.5);
|
||||
border-radius: 6px;
|
||||
}
|
||||
|
||||
listview {
|
||||
layout: vertical;
|
||||
spacing: 8px;
|
||||
lines: 9;
|
||||
background-color: transparent; /* Consistent black background for list items */
|
||||
}
|
||||
|
||||
element {
|
||||
padding: 12px;
|
||||
border-radius: 4px;
|
||||
background-color: transparent; /* Uniform color for each list item */
|
||||
text-color: @foreground;
|
||||
}
|
||||
|
||||
element normal.normal {
|
||||
background-color: transparent; /* Ensures no alternating color */
|
||||
}
|
||||
|
||||
element selected.normal {
|
||||
background-color: @selected;
|
||||
text-color: #FFFFFF;
|
||||
}
|
||||
|
||||
element-text {
|
||||
background-color: transparent;
|
||||
text-color: inherit;
|
||||
vertical-align: 0.5;
|
||||
}
|
||||
55
fastanime/assets/rofi_theme_confirm.rasi
Normal file
55
fastanime/assets/rofi_theme_confirm.rasi
Normal file
@@ -0,0 +1,55 @@
|
||||
configuration {
|
||||
font: "Sans 12";
|
||||
}
|
||||
|
||||
* {
|
||||
background-color: rgba(0, 0, 0, 0.7);
|
||||
text-color: #FFFFFF;
|
||||
}
|
||||
|
||||
window {
|
||||
fullscreen: true;
|
||||
transparency: "real";
|
||||
background-color: transparent;
|
||||
}
|
||||
|
||||
mainbox {
|
||||
children: [ message, listview, inputbar ];
|
||||
padding: 40% 30%;
|
||||
background-color: transparent;
|
||||
}
|
||||
|
||||
message {
|
||||
border: 0;
|
||||
padding: 10px;
|
||||
border-radius:20px;
|
||||
margin: 0 0 20px 0;
|
||||
font: "Sans Bold 24"; /* Increased font size and made it bold */
|
||||
}
|
||||
|
||||
inputbar {
|
||||
children: [ prompt, entry ];
|
||||
background-color: rgba(255, 255, 255, 0.1);
|
||||
padding: 8px;
|
||||
border-radius: 4px;
|
||||
}
|
||||
|
||||
prompt {
|
||||
padding: 8px;
|
||||
}
|
||||
|
||||
entry {
|
||||
padding: 8px;
|
||||
background-color: transparent;
|
||||
}
|
||||
|
||||
listview {
|
||||
lines: 0;
|
||||
}
|
||||
|
||||
/* Style for the message text specifically */
|
||||
textbox {
|
||||
horizontal-align: 0.5; /* Center the text */
|
||||
font: "Sans Bold 24"; /* Match message font */
|
||||
background-color: transparent;
|
||||
}
|
||||
55
fastanime/assets/rofi_theme_input.rasi
Normal file
55
fastanime/assets/rofi_theme_input.rasi
Normal file
@@ -0,0 +1,55 @@
|
||||
configuration {
|
||||
font: "Sans 12";
|
||||
}
|
||||
|
||||
* {
|
||||
background-color: rgba(0, 0, 0, 0.7);
|
||||
text-color: #FFFFFF;
|
||||
}
|
||||
|
||||
window {
|
||||
fullscreen: true;
|
||||
transparency: "real";
|
||||
background-color: transparent;
|
||||
}
|
||||
|
||||
mainbox {
|
||||
children: [ message, listview, inputbar ];
|
||||
padding: 40% 30%;
|
||||
background-color: transparent;
|
||||
}
|
||||
|
||||
message {
|
||||
border: 0;
|
||||
padding: 10px;
|
||||
border-radius:20px;
|
||||
margin: 0 0 20px 0;
|
||||
font: "Sans Bold 24"; /* Increased font size and made it bold */
|
||||
}
|
||||
|
||||
inputbar {
|
||||
children: [ prompt, entry ];
|
||||
background-color: rgba(255, 255, 255, 0.1);
|
||||
padding: 8px;
|
||||
border-radius: 4px;
|
||||
}
|
||||
|
||||
prompt {
|
||||
padding: 8px;
|
||||
}
|
||||
|
||||
entry {
|
||||
padding: 8px;
|
||||
background-color: transparent;
|
||||
}
|
||||
|
||||
listview {
|
||||
lines: 0;
|
||||
}
|
||||
|
||||
/* Style for the message text specifically */
|
||||
textbox {
|
||||
horizontal-align: 0.5; /* Center the text */
|
||||
font: "Sans Bold 24"; /* Match message font */
|
||||
background-color: transparent;
|
||||
}
|
||||
120
fastanime/assets/rofi_theme_preview.rasi
Normal file
120
fastanime/assets/rofi_theme_preview.rasi
Normal file
@@ -0,0 +1,120 @@
|
||||
// Colours
|
||||
* {
|
||||
background-color: transparent; /* Transparent background for the global UI */
|
||||
background: #000000; /* Solid black background */
|
||||
background-transparent: #1D2330A0; /* Semi-transparent background */
|
||||
text-color: #BBBBBB; /* Default text color (light gray) */
|
||||
text-color-selected: #FFFFFF; /* Text color when selected (white) */
|
||||
primary: rgba(53, 132, 228, 0.75); /* Blusish primary color */
|
||||
important: rgba(53, 132, 228, 0.75); /* Bluish primary color */
|
||||
}
|
||||
|
||||
configuration {
|
||||
font: "Roboto 14"; /* Sets the global font to Roboto, size 14 */
|
||||
show-icons: true; /* Option to display icons in the UI */
|
||||
}
|
||||
|
||||
window {
|
||||
fullscreen: true; /* The window will open in fullscreen */
|
||||
height: 100%; /* Full window height */
|
||||
width: 100%; /* Full window width */
|
||||
transparency: "real"; /* Real transparency effect */
|
||||
background-color: @background-transparent; /* Transparent background */
|
||||
border: 0px; /* No border around the window */
|
||||
border-color: @primary; /* Border color set to the primary color */
|
||||
}
|
||||
|
||||
mainbox {
|
||||
children: [prompt, inputbar-box, listview]; /* Main box contains prompt, input bar, and list view */
|
||||
padding: 0px; /* No padding around the main box */
|
||||
}
|
||||
|
||||
prompt {
|
||||
width: 100%; /* Prompt takes full width */
|
||||
margin: 10px 0px 0px 30px; /* Margin around the prompt */
|
||||
text-color: @important; /* Text color for prompt (important color) */
|
||||
font: "Roboto Bold 27"; /* Bold Roboto font, size 27 */
|
||||
}
|
||||
|
||||
listview {
|
||||
layout: vertical; /* Vertical layout for list items */
|
||||
padding: 10px; /* Padding inside the list view */
|
||||
spacing: 20px; /* Space between items in the list */
|
||||
columns: 8; /* Maximum 8 items per row */
|
||||
dynamic: true; /* Allows the list to dynamically adjust */
|
||||
orientation: horizontal; /* Horizontal orientation for list items */
|
||||
}
|
||||
|
||||
inputbar-box {
|
||||
children: [dummy, inputbar, dummy]; /* Input bar is centered with dummy placeholders */
|
||||
orientation: horizontal; /* Horizontal layout for input bar */
|
||||
expand: false; /* Does not expand to fill the space */
|
||||
}
|
||||
|
||||
inputbar {
|
||||
children: [textbox-prompt, entry]; /* Contains a prompt and an entry field */
|
||||
margin: 0px; /* No margin around the input bar */
|
||||
background-color: @primary; /* Background color set to the primary color */
|
||||
border: 4px; /* Border thickness around the input bar */
|
||||
border-color: @primary; /* Border color matches the primary color */
|
||||
border-radius: 8px; /* Rounded corners for the input bar */
|
||||
}
|
||||
|
||||
textbox-prompt {
|
||||
text-color: @background; /* Text color inside prompt matches the background color */
|
||||
horizontal-align: 0.5; /* Horizontally centered */
|
||||
vertical-align: 0.5; /* Vertically centered */
|
||||
expand: false; /* Does not expand to fill available space */
|
||||
}
|
||||
|
||||
entry {
|
||||
expand: false; /* Entry field does not expand */
|
||||
padding: 8px; /* Padding inside the entry field */
|
||||
margin: -6px; /* Negative margin to position entry properly */
|
||||
horizontal-align: 0; /* Left-aligned text inside the entry field */
|
||||
width: 300; /* Fixed width for the entry field */
|
||||
background-color: @background; /* Entry background color matches the global background */
|
||||
border: 6px; /* Border thickness around the entry field */
|
||||
border-color: @primary; /* Border color matches the primary color */
|
||||
border-radius: 8px; /* Rounded corners for the entry field */
|
||||
cursor: text; /* Cursor changes to text input cursor inside the entry field */
|
||||
}
|
||||
|
||||
element {
|
||||
children: [dummy, element-box, dummy]; /* Contains an element box with dummy placeholders */
|
||||
padding: 5px; /* Padding around the element */
|
||||
orientation: vertical; /* Vertical layout for element content */
|
||||
border: 0px; /* No border around the element */
|
||||
border-radius: 16px; /* Rounded corners for the element */
|
||||
background-color: transparent; /* Transparent background */
|
||||
width: 100px; /* Width of each element */
|
||||
height: 50px; /* Height of each element */
|
||||
}
|
||||
|
||||
element selected {
|
||||
background-color: @primary; /* Background color of the element when selected */
|
||||
}
|
||||
|
||||
element-box {
|
||||
children: [element-icon, element-text]; /* Element box contains an icon and text */
|
||||
orientation: vertical; /* Vertical layout for icon and text */
|
||||
expand: false; /* Does not expand to fill available space */
|
||||
cursor: pointer; /* Cursor changes to a pointer when hovering over the element */
|
||||
}
|
||||
|
||||
element-icon {
|
||||
padding: 10px; /* Padding inside the icon */
|
||||
cursor: inherit; /* Inherits cursor style from the parent */
|
||||
size: 33%; /* Icon size is set to 33% of the parent element */
|
||||
margin: 10px; /* Margin around the icon */
|
||||
}
|
||||
|
||||
element-text {
|
||||
horizontal-align: 0.5; /* Horizontally center-aligns the text */
|
||||
cursor: inherit; /* Inherits cursor style from the parent */
|
||||
text-color: @text-color; /* Text color for element text */
|
||||
}
|
||||
|
||||
element-text selected {
|
||||
text-color: @text-color-selected; /* Text color when the element is selected */
|
||||
}
|
||||
@@ -4,7 +4,6 @@ import click
|
||||
|
||||
from .. import __version__
|
||||
from ..libs.anime_provider import SERVERS_AVAILABLE, anime_sources
|
||||
from ..Utility.data import anilist_sort_normalizer
|
||||
from .commands import LazyGroup
|
||||
|
||||
commands = {
|
||||
@@ -17,6 +16,7 @@ commands = {
|
||||
"completions": "completions.completions",
|
||||
"update": "update.update",
|
||||
"grab": "grab.grab",
|
||||
"serve": "serve.serve",
|
||||
}
|
||||
|
||||
|
||||
@@ -39,8 +39,32 @@ signal.signal(signal.SIGINT, handle_exit)
|
||||
cls=LazyGroup,
|
||||
help="A command line application for streaming anime that provides a complete and featureful interface",
|
||||
short_help="Stream Anime",
|
||||
epilog="""
|
||||
\b
|
||||
\b\bExamples:
|
||||
# example of syncplay intergration
|
||||
fastanime --sync-play --server sharepoint search -t <anime-title>
|
||||
\b
|
||||
# --- or ---
|
||||
\b
|
||||
# to watch with anilist intergration
|
||||
fastanime --sync-play --server sharepoint anilist
|
||||
\b
|
||||
# downloading dubbed anime
|
||||
fastanime --dub download -t <anime>
|
||||
\b
|
||||
# use icons and fzf for a more elegant ui with preview
|
||||
fastanime --icons --preview --fzf anilist
|
||||
\b
|
||||
# use icons with default ui
|
||||
fastanime --icons --default anilist
|
||||
\b
|
||||
# viewing manga
|
||||
fastanime --manga search -t <manga-title>
|
||||
""",
|
||||
)
|
||||
@click.version_option(__version__, "--version")
|
||||
@click.option("--manga", "-m", help="Enable manga mode", is_flag=True)
|
||||
@click.option("--log", help="Allow logging to stdout", is_flag=True)
|
||||
@click.option("--log-file", help="Allow logging to a file", is_flag=True)
|
||||
@click.option("--rich-traceback", help="Use rich to output tracebacks", is_flag=True)
|
||||
@@ -116,9 +140,9 @@ signal.signal(signal.SIGINT, handle_exit)
|
||||
help="Auto select anime title?",
|
||||
)
|
||||
@click.option(
|
||||
"-S",
|
||||
"--sort-by",
|
||||
type=click.Choice(anilist_sort_normalizer.keys()), # pyright: ignore
|
||||
"--normalize-titles/--no-normalize-titles",
|
||||
type=bool,
|
||||
help="whether to normalize anime and episode titles given by providers",
|
||||
)
|
||||
@click.option("-d", "--downloads-dir", type=click.Path(), help="Downloads location")
|
||||
@click.option("--fzf", is_flag=True, help="Use fzf for the ui")
|
||||
@@ -134,6 +158,9 @@ signal.signal(signal.SIGINT, handle_exit)
|
||||
@click.option("--sub", help="Set the translation type to sub", is_flag=True)
|
||||
@click.option("--rofi", help="Use rofi for the ui", is_flag=True)
|
||||
@click.option("--rofi-theme", help="Rofi theme to use", type=click.Path())
|
||||
@click.option(
|
||||
"--rofi-theme-preview", help="Rofi theme to use for previews", type=click.Path()
|
||||
)
|
||||
@click.option(
|
||||
"--rofi-theme-confirm",
|
||||
help="Rofi theme to use for the confirm prompt",
|
||||
@@ -145,12 +172,23 @@ signal.signal(signal.SIGINT, handle_exit)
|
||||
type=click.Path(),
|
||||
)
|
||||
@click.option(
|
||||
"--use-mpv-mod/--use-default-player", help="Whether to use python-mpv", type=bool
|
||||
"--use-python-mpv/--use-default-player", help="Whether to use python-mpv", type=bool
|
||||
)
|
||||
@click.option("--sync-play", "-sp", help="Use sync play", is_flag=True)
|
||||
@click.option(
|
||||
"--player",
|
||||
"-P",
|
||||
help="the player to use when streaming",
|
||||
type=click.Choice(["mpv", "vlc"]),
|
||||
)
|
||||
@click.option(
|
||||
"--fresh-requests", is_flag=True, help="Force the requests cache to be updated"
|
||||
)
|
||||
@click.option("--no-config", is_flag=True, help="Don't load the user config")
|
||||
@click.pass_context
|
||||
def run_cli(
|
||||
ctx: click.Context,
|
||||
manga,
|
||||
log,
|
||||
log_file,
|
||||
rich_traceback,
|
||||
@@ -165,7 +203,7 @@ def run_cli(
|
||||
quality,
|
||||
auto_next,
|
||||
auto_select,
|
||||
sort_by,
|
||||
normalize_titles,
|
||||
downloads_dir,
|
||||
fzf,
|
||||
default,
|
||||
@@ -176,14 +214,73 @@ def run_cli(
|
||||
sub,
|
||||
rofi,
|
||||
rofi_theme,
|
||||
rofi_theme_preview,
|
||||
rofi_theme_confirm,
|
||||
rofi_theme_input,
|
||||
use_mpv_mod,
|
||||
use_python_mpv,
|
||||
sync_play,
|
||||
player,
|
||||
fresh_requests,
|
||||
no_config,
|
||||
):
|
||||
import os
|
||||
import sys
|
||||
|
||||
from .config import Config
|
||||
|
||||
ctx.obj = Config()
|
||||
ctx.obj = Config(no_config)
|
||||
if (
|
||||
ctx.obj.check_for_updates
|
||||
and ctx.invoked_subcommand != "completions"
|
||||
and "notifier" not in sys.argv
|
||||
):
|
||||
import time
|
||||
|
||||
last_update = ctx.obj.user_data["meta"]["last_updated"]
|
||||
now = time.time()
|
||||
# checks after every 12 hours
|
||||
if (now - last_update) > 43200:
|
||||
ctx.obj.user_data["meta"]["last_updated"] = now
|
||||
ctx.obj._update_user_data()
|
||||
|
||||
from .app_updater import check_for_updates
|
||||
|
||||
print("Checking for updates...", file=sys.stderr)
|
||||
print("So you can enjoy the latest features and bug fixes", file=sys.stderr)
|
||||
print(
|
||||
"You can disable this by setting check_for_updates to False in the config",
|
||||
file=sys.stderr,
|
||||
)
|
||||
is_latest, github_release_data = check_for_updates()
|
||||
if not is_latest:
|
||||
from rich.console import Console
|
||||
from rich.markdown import Markdown
|
||||
from rich.prompt import Confirm
|
||||
|
||||
from .app_updater import update_app
|
||||
|
||||
def _print_release(release_data):
|
||||
console = Console()
|
||||
body = Markdown(release_data["body"])
|
||||
tag = github_release_data["tag_name"]
|
||||
tag_title = release_data["name"]
|
||||
github_page_url = release_data["html_url"]
|
||||
console.print(f"Release Page: {github_page_url}")
|
||||
console.print(f"Tag: {tag}")
|
||||
console.print(f"Title: {tag_title}")
|
||||
console.print(body)
|
||||
|
||||
if Confirm.ask(
|
||||
"A new version of fastanime is available, would you like to update?"
|
||||
):
|
||||
_, release_json = update_app()
|
||||
print("Successfully updated")
|
||||
_print_release(release_json)
|
||||
exit(0)
|
||||
else:
|
||||
print("You are using the latest version of fastanime", file=sys.stderr)
|
||||
|
||||
ctx.obj.manga = manga
|
||||
if log:
|
||||
import logging
|
||||
|
||||
@@ -218,6 +315,8 @@ def run_cli(
|
||||
|
||||
install()
|
||||
|
||||
if fresh_requests:
|
||||
os.environ["FASTANIME_FRESH_REQUESTS"] = "1"
|
||||
if sync_play:
|
||||
ctx.obj.sync_play = sync_play
|
||||
if provider:
|
||||
@@ -230,8 +329,15 @@ def run_cli(
|
||||
ctx.obj.sub_lang = sub_lang
|
||||
if ctx.get_parameter_source("continue_") == click.core.ParameterSource.COMMANDLINE:
|
||||
ctx.obj.continue_from_history = continue_
|
||||
if ctx.get_parameter_source("player") == click.core.ParameterSource.COMMANDLINE:
|
||||
ctx.obj.player = player
|
||||
if ctx.get_parameter_source("skip") == click.core.ParameterSource.COMMANDLINE:
|
||||
ctx.obj.skip = skip
|
||||
if (
|
||||
ctx.get_parameter_source("normalize_titles")
|
||||
== click.core.ParameterSource.COMMANDLINE
|
||||
):
|
||||
ctx.obj.normalize_titles = normalize_titles
|
||||
|
||||
if quality:
|
||||
ctx.obj.quality = quality
|
||||
@@ -250,20 +356,19 @@ def run_cli(
|
||||
):
|
||||
ctx.obj.auto_select = auto_select
|
||||
if (
|
||||
ctx.get_parameter_source("use_mpv_mod")
|
||||
ctx.get_parameter_source("use_python_mpv")
|
||||
== click.core.ParameterSource.COMMANDLINE
|
||||
):
|
||||
ctx.obj.use_mpv_mod = use_mpv_mod
|
||||
if sort_by:
|
||||
ctx.obj.sort_by = sort_by
|
||||
ctx.obj.use_python_mpv = use_python_mpv
|
||||
if downloads_dir:
|
||||
ctx.obj.downloads_dir = downloads_dir
|
||||
if translation_type:
|
||||
ctx.obj.translation_type = translation_type
|
||||
if fzf:
|
||||
ctx.obj.use_fzf = True
|
||||
if default:
|
||||
ctx.obj.use_fzf = False
|
||||
ctx.obj.use_rofi = False
|
||||
if fzf:
|
||||
ctx.obj.use_fzf = True
|
||||
if preview:
|
||||
ctx.obj.preview = True
|
||||
if no_preview:
|
||||
@@ -278,6 +383,10 @@ def run_cli(
|
||||
if rofi:
|
||||
from ..libs.rofi import Rofi
|
||||
|
||||
if rofi_theme_preview:
|
||||
ctx.obj.rofi_theme_preview = rofi_theme_preview
|
||||
Rofi.rofi_theme_preview = rofi_theme_preview
|
||||
|
||||
if rofi_theme:
|
||||
ctx.obj.rofi_theme = rofi_theme
|
||||
Rofi.rofi_theme = rofi_theme
|
||||
@@ -289,3 +398,4 @@ def run_cli(
|
||||
if rofi_theme_confirm:
|
||||
ctx.obj.rofi_theme_confirm = rofi_theme_confirm
|
||||
Rofi.rofi_theme_confirm = rofi_theme_confirm
|
||||
ctx.obj.set_fastanime_config_environs()
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import os
|
||||
import pathlib
|
||||
import re
|
||||
import shlex
|
||||
@@ -15,14 +16,18 @@ API_URL = f"https://api.{GIT_REPO}/repos/{AUTHOR}/{APP_NAME}/releases/latest"
|
||||
|
||||
def check_for_updates():
|
||||
USER_AGENT = f"{APP_NAME} user"
|
||||
request = requests.get(
|
||||
API_URL,
|
||||
headers={
|
||||
"User-Agent": USER_AGENT,
|
||||
"X-GitHub-Api-Version": "2022-11-28",
|
||||
"Accept": "application/vnd.github+json",
|
||||
},
|
||||
)
|
||||
try:
|
||||
request = requests.get(
|
||||
API_URL,
|
||||
headers={
|
||||
"User-Agent": USER_AGENT,
|
||||
"X-GitHub-Api-Version": "2022-11-28",
|
||||
"Accept": "application/vnd.github+json",
|
||||
},
|
||||
)
|
||||
except Exception:
|
||||
print("You are not connected to the internet")
|
||||
return True, {}
|
||||
|
||||
if request.status_code == 200:
|
||||
release_json = request.json()
|
||||
@@ -45,8 +50,9 @@ def check_for_updates():
|
||||
|
||||
return (is_latest, release_json)
|
||||
else:
|
||||
print("Failed to check for updates")
|
||||
print(request.text)
|
||||
return (False, {})
|
||||
return (True, {})
|
||||
|
||||
|
||||
def is_git_repo(author, repository):
|
||||
@@ -75,15 +81,22 @@ def is_git_repo(author, repository):
|
||||
return bool(match) and match.group(1) == f"{author}/{repository}"
|
||||
|
||||
|
||||
def update_app():
|
||||
def update_app(force=False):
|
||||
is_latest, release_json = check_for_updates()
|
||||
if is_latest:
|
||||
if is_latest and not force:
|
||||
print("[green]App is up to date[/]")
|
||||
return False, release_json
|
||||
tag_name = release_json["tag_name"]
|
||||
|
||||
print("[cyan]Updating app to version %s[/]" % tag_name)
|
||||
if is_git_repo(AUTHOR, APP_NAME):
|
||||
if os.path.exists("/nix/store") and os.path.exists("/run/current-system"):
|
||||
NIX = shutil.which("nix")
|
||||
if not NIX:
|
||||
print("[red]Cannot find nix, it looks like your system is broken.[/]")
|
||||
return False, release_json
|
||||
|
||||
process = subprocess.run([NIX, "profile", "upgrade", APP_NAME.lower()])
|
||||
elif is_git_repo(AUTHOR, APP_NAME):
|
||||
GIT_EXECUTABLE = shutil.which("git")
|
||||
args = [
|
||||
GIT_EXECUTABLE,
|
||||
@@ -101,8 +114,10 @@ def update_app():
|
||||
)
|
||||
|
||||
else:
|
||||
if PIPX_EXECUTABLE := shutil.which("pipx"):
|
||||
process = subprocess.run([PIPX_EXECUTABLE, "upgrade", APP_NAME])
|
||||
if UV := shutil.which("uv"):
|
||||
process = subprocess.run([UV, "tool", "upgrade", APP_NAME])
|
||||
elif PIPX := shutil.which("pipx"):
|
||||
process = subprocess.run([PIPX, "upgrade", APP_NAME])
|
||||
else:
|
||||
PYTHON_EXECUTABLE = sys.executable
|
||||
|
||||
@@ -112,11 +127,20 @@ def update_app():
|
||||
"pip",
|
||||
"install",
|
||||
APP_NAME,
|
||||
"--user",
|
||||
"-U",
|
||||
"--no-warn-script-location",
|
||||
]
|
||||
if sys.prefix == sys.base_prefix:
|
||||
# ensure NOT in a venv, where --user flag can cause an error.
|
||||
# TODO: Get value of 'include-system-site-packages' in pyenv.cfg.
|
||||
args.append("--user")
|
||||
|
||||
process = subprocess.run(args)
|
||||
if process.returncode == 0:
|
||||
print(
|
||||
"[green]Its recommended to run the following after updating:\n\tfastanime config --update (to get the latest config docs)\n\tfastanime cache --clean (to get rid of any potential issues)[/]",
|
||||
file=sys.stderr,
|
||||
)
|
||||
return True, release_json
|
||||
else:
|
||||
return False, release_json
|
||||
|
||||
@@ -20,6 +20,9 @@ commands = {
|
||||
"completed": "completed.completed",
|
||||
"planning": "planning.planning",
|
||||
"notifier": "notifier.notifier",
|
||||
"stats": "stats.stats",
|
||||
"download": "download.download",
|
||||
"downloads": "downloads.downloads",
|
||||
}
|
||||
|
||||
|
||||
@@ -29,16 +32,61 @@ commands = {
|
||||
invoke_without_command=True,
|
||||
help="A beautiful interface that gives you access to a commplete streaming experience",
|
||||
short_help="Access all streaming options",
|
||||
epilog="""
|
||||
\b
|
||||
\b\bExamples:
|
||||
# ---- search ----
|
||||
\b
|
||||
# get anime with the tag of isekai
|
||||
fastanime anilist search -T isekai
|
||||
\b
|
||||
# get anime of 2024 and sort by popularity
|
||||
# that has already finished airing or is releasing
|
||||
# and is not in your anime lists
|
||||
fastanime anilist search -y 2024 -s POPULARITY_DESC --status RELEASING --status FINISHED --not-on-list
|
||||
\b
|
||||
# get anime of 2024 season WINTER
|
||||
fastanime anilist search -y 2024 --season WINTER
|
||||
\b
|
||||
# get anime genre action and tag isekai,magic
|
||||
fastanime anilist search -g Action -T Isekai -T Magic
|
||||
\b
|
||||
# get anime of 2024 thats finished airing
|
||||
fastanime anilist search -y 2024 -S FINISHED
|
||||
\b
|
||||
# get the most favourite anime movies
|
||||
fastanime anilist search -f MOVIE -s FAVOURITES_DESC
|
||||
\b
|
||||
# ---- login ----
|
||||
\b
|
||||
# To sign in just run
|
||||
fastanime anilist login
|
||||
\b
|
||||
# To view your login status
|
||||
fastanime anilist login --status
|
||||
\b
|
||||
# To erase login data
|
||||
fastanime anilist login --erase
|
||||
\b
|
||||
# ---- notifier ----
|
||||
\b
|
||||
# basic form
|
||||
fastanime anilist notifier
|
||||
\b
|
||||
# with logging to stdout
|
||||
fastanime --log anilist notifier
|
||||
\b
|
||||
# with logging to a file. stored in the same place as your config
|
||||
fastanime --log-file anilist notifier
|
||||
""",
|
||||
)
|
||||
@click.option("--resume", is_flag=True, help="Resume from the last session")
|
||||
@click.pass_context
|
||||
def anilist(ctx: click.Context):
|
||||
def anilist(ctx: click.Context, resume: bool):
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from ....anilist import AniList
|
||||
from ....AnimeProvider import AnimeProvider
|
||||
from ...interfaces.anilist_interfaces import (
|
||||
fastanime_main_menu as anilist_interface,
|
||||
)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ...config import Config
|
||||
@@ -48,4 +96,33 @@ def anilist(ctx: click.Context):
|
||||
AniList.update_login_info(user, user["token"])
|
||||
if ctx.invoked_subcommand is None:
|
||||
fastanime_runtime_state = FastAnimeRuntimeState()
|
||||
anilist_interface(ctx.obj, fastanime_runtime_state)
|
||||
if resume:
|
||||
from ...interfaces.anilist_interfaces import (
|
||||
anime_provider_search_results_menu,
|
||||
)
|
||||
|
||||
if not config.user_data["recent_anime"]:
|
||||
click.echo("No recent anime found", err=True, color=True)
|
||||
return
|
||||
fastanime_runtime_state.anilist_results_data = {
|
||||
"data": {"Page": {"media": config.user_data["recent_anime"]}}
|
||||
}
|
||||
|
||||
fastanime_runtime_state.selected_anime_anilist = config.user_data[
|
||||
"recent_anime"
|
||||
][0]
|
||||
fastanime_runtime_state.selected_anime_id_anilist = config.user_data[
|
||||
"recent_anime"
|
||||
][0]["id"]
|
||||
fastanime_runtime_state.selected_anime_title_anilist = (
|
||||
config.user_data["recent_anime"][0]["title"]["romaji"]
|
||||
or config.user_data["recent_anime"][0]["title"]["english"]
|
||||
)
|
||||
anime_provider_search_results_menu(config, fastanime_runtime_state)
|
||||
|
||||
else:
|
||||
from ...interfaces.anilist_interfaces import (
|
||||
fastanime_main_menu as anilist_interface,
|
||||
)
|
||||
|
||||
anilist_interface(ctx.obj, fastanime_runtime_state)
|
||||
|
||||
@@ -34,7 +34,7 @@ class LazyGroup(click.Group):
|
||||
# get the Command object from that module
|
||||
cmd_object = getattr(mod, cmd_object_name)
|
||||
# check the result to make debugging easier
|
||||
if not isinstance(cmd_object, click.BaseCommand):
|
||||
if not isinstance(cmd_object, click.Command):
|
||||
raise ValueError(
|
||||
f"Lazy loading of {import_path} failed by returning "
|
||||
"a non-command object"
|
||||
|
||||
@@ -42,5 +42,12 @@ def completed(config: "Config", dump_json):
|
||||
from ...interfaces import anilist_interfaces
|
||||
|
||||
fastanime_runtime_state = FastAnimeRuntimeState()
|
||||
fastanime_runtime_state.anilist_data = anime_list[1]
|
||||
|
||||
fastanime_runtime_state.current_page = 1
|
||||
fastanime_runtime_state.current_data_loader = (
|
||||
lambda config, **kwargs: anilist_interfaces._handle_animelist(
|
||||
config, fastanime_runtime_state, "Completed", **kwargs
|
||||
)
|
||||
)
|
||||
fastanime_runtime_state.anilist_results_data = anime_list[1]
|
||||
anilist_interfaces.anilist_results_menu(config, fastanime_runtime_state)
|
||||
|
||||
477
fastanime/cli/commands/anilist/data.py
Normal file
477
fastanime/cli/commands/anilist/data.py
Normal file
@@ -0,0 +1,477 @@
|
||||
sorts_available = [
|
||||
"ID",
|
||||
"ID_DESC",
|
||||
"TITLE_ROMAJI",
|
||||
"TITLE_ROMAJI_DESC",
|
||||
"TITLE_ENGLISH",
|
||||
"TITLE_ENGLISH_DESC",
|
||||
"TITLE_NATIVE",
|
||||
"TITLE_NATIVE_DESC",
|
||||
"TYPE",
|
||||
"TYPE_DESC",
|
||||
"FORMAT",
|
||||
"FORMAT_DESC",
|
||||
"START_DATE",
|
||||
"START_DATE_DESC",
|
||||
"END_DATE",
|
||||
"END_DATE_DESC",
|
||||
"SCORE",
|
||||
"SCORE_DESC",
|
||||
"POPULARITY",
|
||||
"POPULARITY_DESC",
|
||||
"TRENDING",
|
||||
"TRENDING_DESC",
|
||||
"EPISODES",
|
||||
"EPISODES_DESC",
|
||||
"DURATION",
|
||||
"DURATION_DESC",
|
||||
"STATUS",
|
||||
"STATUS_DESC",
|
||||
"CHAPTERS",
|
||||
"CHAPTERS_DESC",
|
||||
"VOLUMES",
|
||||
"VOLUMES_DESC",
|
||||
"UPDATED_AT",
|
||||
"UPDATED_AT_DESC",
|
||||
"SEARCH_MATCH",
|
||||
"FAVOURITES",
|
||||
"FAVOURITES_DESC",
|
||||
]
|
||||
|
||||
media_statuses_available = [
|
||||
"FINISHED",
|
||||
"RELEASING",
|
||||
"NOT_YET_RELEASED",
|
||||
"CANCELLED",
|
||||
"HIATUS",
|
||||
]
|
||||
seasons_available = ["WINTER", "SPRING", "SUMMER", "FALL"]
|
||||
genres_available = [
|
||||
"Action",
|
||||
"Adventure",
|
||||
"Comedy",
|
||||
"Drama",
|
||||
"Ecchi",
|
||||
"Fantasy",
|
||||
"Horror",
|
||||
"Mahou Shoujo",
|
||||
"Mecha",
|
||||
"Music",
|
||||
"Mystery",
|
||||
"Psychological",
|
||||
"Romance",
|
||||
"Sci-Fi",
|
||||
"Slice of Life",
|
||||
"Sports",
|
||||
"Supernatural",
|
||||
"Thriller",
|
||||
"Hentai",
|
||||
]
|
||||
media_formats_available = [
|
||||
"TV",
|
||||
"TV_SHORT",
|
||||
"MOVIE",
|
||||
"SPECIAL",
|
||||
"OVA",
|
||||
"MUSIC",
|
||||
"NOVEL",
|
||||
"ONE_SHOT",
|
||||
]
|
||||
years_available = [
|
||||
"1900",
|
||||
"1910",
|
||||
"1920",
|
||||
"1930",
|
||||
"1940",
|
||||
"1950",
|
||||
"1960",
|
||||
"1970",
|
||||
"1980",
|
||||
"1990",
|
||||
"2000",
|
||||
"2004",
|
||||
"2005",
|
||||
"2006",
|
||||
"2007",
|
||||
"2008",
|
||||
"2009",
|
||||
"2010",
|
||||
"2011",
|
||||
"2012",
|
||||
"2013",
|
||||
"2014",
|
||||
"2015",
|
||||
"2016",
|
||||
"2017",
|
||||
"2018",
|
||||
"2019",
|
||||
"2020",
|
||||
"2021",
|
||||
"2022",
|
||||
"2023",
|
||||
"2024",
|
||||
"2025",
|
||||
]
|
||||
|
||||
tags_available = {
|
||||
"Cast": ["Polyamorous"],
|
||||
"Cast Main Cast": [
|
||||
"Anti-Hero",
|
||||
"Elderly Protagonist",
|
||||
"Ensemble Cast",
|
||||
"Estranged Family",
|
||||
"Female Protagonist",
|
||||
"Male Protagonist",
|
||||
"Primarily Adult Cast",
|
||||
"Primarily Animal Cast",
|
||||
"Primarily Child Cast",
|
||||
"Primarily Female Cast",
|
||||
"Primarily Male Cast",
|
||||
"Primarily Teen Cast",
|
||||
],
|
||||
"Cast Traits": [
|
||||
"Age Regression",
|
||||
"Agender",
|
||||
"Aliens",
|
||||
"Amnesia",
|
||||
"Angels",
|
||||
"Anthropomorphism",
|
||||
"Aromantic",
|
||||
"Arranged Marriage",
|
||||
"Artificial Intelligence",
|
||||
"Asexual",
|
||||
"Butler",
|
||||
"Centaur",
|
||||
"Chimera",
|
||||
"Chuunibyou",
|
||||
"Clone",
|
||||
"Cosplay",
|
||||
"Cowboys",
|
||||
"Crossdressing",
|
||||
"Cyborg",
|
||||
"Delinquents",
|
||||
"Demons",
|
||||
"Detective",
|
||||
"Dinosaurs",
|
||||
"Disability",
|
||||
"Dissociative Identities",
|
||||
"Dragons",
|
||||
"Dullahan",
|
||||
"Elf",
|
||||
"Fairy",
|
||||
"Femboy",
|
||||
"Ghost",
|
||||
"Goblin",
|
||||
"Gods",
|
||||
"Gyaru",
|
||||
"Hikikomori",
|
||||
"Homeless",
|
||||
"Idol",
|
||||
"Kemonomimi",
|
||||
"Kuudere",
|
||||
"Maids",
|
||||
"Mermaid",
|
||||
"Monster Boy",
|
||||
"Monster Girl",
|
||||
"Nekomimi",
|
||||
"Ninja",
|
||||
"Nudity",
|
||||
"Nun",
|
||||
"Office Lady",
|
||||
"Oiran",
|
||||
"Ojou-sama",
|
||||
"Orphan",
|
||||
"Pirates",
|
||||
"Robots",
|
||||
"Samurai",
|
||||
"Shrine Maiden",
|
||||
"Skeleton",
|
||||
"Succubus",
|
||||
"Tanned Skin",
|
||||
"Teacher",
|
||||
"Tomboy",
|
||||
"Transgender",
|
||||
"Tsundere",
|
||||
"Twins",
|
||||
"Vampire",
|
||||
"Veterinarian",
|
||||
"Vikings",
|
||||
"Villainess",
|
||||
"VTuber",
|
||||
"Werewolf",
|
||||
"Witch",
|
||||
"Yandere",
|
||||
"Zombie",
|
||||
],
|
||||
"Demographic": ["Josei", "Kids", "Seinen", "Shoujo", "Shounen"],
|
||||
"Setting": ["Matriarchy"],
|
||||
"Setting Scene": [
|
||||
"Bar",
|
||||
"Boarding School",
|
||||
"Circus",
|
||||
"Coastal",
|
||||
"College",
|
||||
"Desert",
|
||||
"Dungeon",
|
||||
"Foreign",
|
||||
"Inn",
|
||||
"Konbini",
|
||||
"Natural Disaster",
|
||||
"Office",
|
||||
"Outdoor",
|
||||
"Prison",
|
||||
"Restaurant",
|
||||
"Rural",
|
||||
"School",
|
||||
"School Club",
|
||||
"Snowscape",
|
||||
"Urban",
|
||||
"Work",
|
||||
],
|
||||
"Setting Time": [
|
||||
"Achronological Order",
|
||||
"Anachronism",
|
||||
"Ancient China",
|
||||
"Dystopian",
|
||||
"Historical",
|
||||
"Time Skip",
|
||||
],
|
||||
"Setting Universe": [
|
||||
"Afterlife",
|
||||
"Alternate Universe",
|
||||
"Augmented Reality",
|
||||
"Omegaverse",
|
||||
"Post-Apocalyptic",
|
||||
"Space",
|
||||
"Urban Fantasy",
|
||||
"Virtual World",
|
||||
],
|
||||
"Technical": [
|
||||
"4-koma",
|
||||
"Achromatic",
|
||||
"Advertisement",
|
||||
"Anthology",
|
||||
"CGI",
|
||||
"Episodic",
|
||||
"Flash",
|
||||
"Full CGI",
|
||||
"Full Color",
|
||||
"No Dialogue",
|
||||
"Non-fiction",
|
||||
"POV",
|
||||
"Puppetry",
|
||||
"Rotoscoping",
|
||||
"Stop Motion",
|
||||
],
|
||||
"Theme Action": [
|
||||
"Archery",
|
||||
"Battle Royale",
|
||||
"Espionage",
|
||||
"Fugitive",
|
||||
"Guns",
|
||||
"Martial Arts",
|
||||
"Spearplay",
|
||||
"Swordplay",
|
||||
],
|
||||
"Theme Arts": [
|
||||
"Acting",
|
||||
"Calligraphy",
|
||||
"Classic Literature",
|
||||
"Drawing",
|
||||
"Fashion",
|
||||
"Food",
|
||||
"Makeup",
|
||||
"Photography",
|
||||
"Rakugo",
|
||||
"Writing",
|
||||
],
|
||||
"Theme Arts-Music": [
|
||||
"Band",
|
||||
"Classical Music",
|
||||
"Dancing",
|
||||
"Hip-hop Music",
|
||||
"Jazz Music",
|
||||
"Metal Music",
|
||||
"Musical Theater",
|
||||
"Rock Music",
|
||||
],
|
||||
"Theme Comedy": ["Parody", "Satire", "Slapstick", "Surreal Comedy"],
|
||||
"Theme Drama": [
|
||||
"Bullying",
|
||||
"Class Struggle",
|
||||
"Coming of Age",
|
||||
"Conspiracy",
|
||||
"Eco-Horror",
|
||||
"Fake Relationship",
|
||||
"Kingdom Management",
|
||||
"Rehabilitation",
|
||||
"Revenge",
|
||||
"Suicide",
|
||||
"Tragedy",
|
||||
],
|
||||
"Theme Fantasy": [
|
||||
"Alchemy",
|
||||
"Body Swapping",
|
||||
"Cultivation",
|
||||
"Fairy Tale",
|
||||
"Henshin",
|
||||
"Isekai",
|
||||
"Kaiju",
|
||||
"Magic",
|
||||
"Mythology",
|
||||
"Necromancy",
|
||||
"Shapeshifting",
|
||||
"Steampunk",
|
||||
"Super Power",
|
||||
"Superhero",
|
||||
"Wuxia",
|
||||
"Youkai",
|
||||
],
|
||||
"Theme Game": ["Board Game", "E-Sports", "Video Games"],
|
||||
"Theme Game-Card & Board Game": [
|
||||
"Card Battle",
|
||||
"Go",
|
||||
"Karuta",
|
||||
"Mahjong",
|
||||
"Poker",
|
||||
"Shogi",
|
||||
],
|
||||
"Theme Game-Sport": [
|
||||
"Acrobatics",
|
||||
"Airsoft",
|
||||
"American Football",
|
||||
"Athletics",
|
||||
"Badminton",
|
||||
"Baseball",
|
||||
"Basketball",
|
||||
"Bowling",
|
||||
"Boxing",
|
||||
"Cheerleading",
|
||||
"Cycling",
|
||||
"Fencing",
|
||||
"Fishing",
|
||||
"Fitness",
|
||||
"Football",
|
||||
"Golf",
|
||||
"Handball",
|
||||
"Ice Skating",
|
||||
"Judo",
|
||||
"Lacrosse",
|
||||
"Parkour",
|
||||
"Rugby",
|
||||
"Scuba Diving",
|
||||
"Skateboarding",
|
||||
"Sumo",
|
||||
"Surfing",
|
||||
"Swimming",
|
||||
"Table Tennis",
|
||||
"Tennis",
|
||||
"Volleyball",
|
||||
"Wrestling",
|
||||
],
|
||||
"Theme Other": [
|
||||
"Adoption",
|
||||
"Animals",
|
||||
"Astronomy",
|
||||
"Autobiographical",
|
||||
"Biographical",
|
||||
"Body Horror",
|
||||
"Cannibalism",
|
||||
"Chibi",
|
||||
"Cosmic Horror",
|
||||
"Crime",
|
||||
"Crossover",
|
||||
"Death Game",
|
||||
"Denpa",
|
||||
"Drugs",
|
||||
"Economics",
|
||||
"Educational",
|
||||
"Environmental",
|
||||
"Ero Guro",
|
||||
"Filmmaking",
|
||||
"Found Family",
|
||||
"Gambling",
|
||||
"Gender Bending",
|
||||
"Gore",
|
||||
"Language Barrier",
|
||||
"LGBTQ+ Themes",
|
||||
"Lost Civilization",
|
||||
"Marriage",
|
||||
"Medicine",
|
||||
"Memory Manipulation",
|
||||
"Meta",
|
||||
"Mountaineering",
|
||||
"Noir",
|
||||
"Otaku Culture",
|
||||
"Pandemic",
|
||||
"Philosophy",
|
||||
"Politics",
|
||||
"Proxy Battle",
|
||||
"Psychosexual",
|
||||
"Reincarnation",
|
||||
"Religion",
|
||||
"Royal Affairs",
|
||||
"Slavery",
|
||||
"Software Development",
|
||||
"Survival",
|
||||
"Terrorism",
|
||||
"Torture",
|
||||
"Travel",
|
||||
"War",
|
||||
],
|
||||
"Theme Other-Organisations": [
|
||||
"Assassins",
|
||||
"Criminal Organization",
|
||||
"Cult",
|
||||
"Firefighters",
|
||||
"Gangs",
|
||||
"Mafia",
|
||||
"Military",
|
||||
"Police",
|
||||
"Triads",
|
||||
"Yakuza",
|
||||
],
|
||||
"Theme Other-Vehicle": [
|
||||
"Aviation",
|
||||
"Cars",
|
||||
"Mopeds",
|
||||
"Motorcycles",
|
||||
"Ships",
|
||||
"Tanks",
|
||||
"Trains",
|
||||
],
|
||||
"Theme Romance": [
|
||||
"Age Gap",
|
||||
"Bisexual",
|
||||
"Boys' Love",
|
||||
"Female Harem",
|
||||
"Heterosexual",
|
||||
"Love Triangle",
|
||||
"Male Harem",
|
||||
"Matchmaking",
|
||||
"Mixed Gender Harem",
|
||||
"Teens' Love",
|
||||
"Unrequited Love",
|
||||
"Yuri",
|
||||
],
|
||||
"Theme Sci Fi": [
|
||||
"Cyberpunk",
|
||||
"Space Opera",
|
||||
"Time Loop",
|
||||
"Time Manipulation",
|
||||
"Tokusatsu",
|
||||
],
|
||||
"Theme Sci Fi-Mecha": ["Real Robot", "Super Robot"],
|
||||
"Theme Slice of Life": [
|
||||
"Agriculture",
|
||||
"Cute Boys Doing Cute Things",
|
||||
"Cute Girls Doing Cute Things",
|
||||
"Family Life",
|
||||
"Horticulture",
|
||||
"Iyashikei",
|
||||
"Parenthood",
|
||||
],
|
||||
}
|
||||
tags_available_list = []
|
||||
for tag_category, tags_in_category in tags_available.items():
|
||||
tags_available_list.extend(tags_in_category)
|
||||
408
fastanime/cli/commands/anilist/download.py
Normal file
408
fastanime/cli/commands/anilist/download.py
Normal file
@@ -0,0 +1,408 @@
|
||||
import click
|
||||
|
||||
from ...completion_functions import anime_titles_shell_complete
|
||||
from .data import (
|
||||
genres_available,
|
||||
media_formats_available,
|
||||
media_statuses_available,
|
||||
seasons_available,
|
||||
sorts_available,
|
||||
tags_available_list,
|
||||
years_available,
|
||||
)
|
||||
|
||||
|
||||
@click.command(
|
||||
help="download anime using anilists api to get the titles",
|
||||
short_help="download anime with anilist intergration",
|
||||
)
|
||||
@click.option("--title", "-t", shell_complete=anime_titles_shell_complete)
|
||||
@click.option(
|
||||
"--season",
|
||||
help="The season the media was released",
|
||||
type=click.Choice(seasons_available),
|
||||
)
|
||||
@click.option(
|
||||
"--status",
|
||||
"-S",
|
||||
help="The media status of the anime",
|
||||
multiple=True,
|
||||
type=click.Choice(media_statuses_available),
|
||||
)
|
||||
@click.option(
|
||||
"--sort",
|
||||
"-s",
|
||||
help="What to sort the search results on",
|
||||
type=click.Choice(sorts_available),
|
||||
)
|
||||
@click.option(
|
||||
"--genres",
|
||||
"-g",
|
||||
multiple=True,
|
||||
help="the genres to filter by",
|
||||
type=click.Choice(genres_available),
|
||||
)
|
||||
@click.option(
|
||||
"--tags",
|
||||
"-T",
|
||||
multiple=True,
|
||||
help="the tags to filter by",
|
||||
type=click.Choice(tags_available_list),
|
||||
)
|
||||
@click.option(
|
||||
"--media-format",
|
||||
"-f",
|
||||
multiple=True,
|
||||
help="Media format",
|
||||
type=click.Choice(media_formats_available),
|
||||
)
|
||||
@click.option(
|
||||
"--year",
|
||||
"-y",
|
||||
type=click.Choice(years_available),
|
||||
help="the year the media was released",
|
||||
)
|
||||
@click.option(
|
||||
"--on-list/--not-on-list",
|
||||
"-L/-no-L",
|
||||
help="Whether the anime should be in your list or not",
|
||||
type=bool,
|
||||
)
|
||||
@click.option(
|
||||
"--episode-range",
|
||||
"-r",
|
||||
help="A range of episodes to download (start-end)",
|
||||
)
|
||||
@click.option(
|
||||
"--force-unknown-ext",
|
||||
"-F",
|
||||
help="This option forces yt-dlp to download extensions its not aware of",
|
||||
is_flag=True,
|
||||
)
|
||||
@click.option(
|
||||
"--silent/--no-silent",
|
||||
"-q/-V",
|
||||
type=bool,
|
||||
help="Download silently (during download)",
|
||||
default=True,
|
||||
)
|
||||
@click.option("--verbose", "-v", is_flag=True, help="Download verbosely (everywhere)")
|
||||
@click.option(
|
||||
"--merge", "-m", is_flag=True, help="Merge the subfile with video using ffmpeg"
|
||||
)
|
||||
@click.option(
|
||||
"--clean",
|
||||
"-c",
|
||||
is_flag=True,
|
||||
help="After merging delete the original files",
|
||||
)
|
||||
@click.option(
|
||||
"--wait-time",
|
||||
"-w",
|
||||
type=int,
|
||||
help="The amount of time to wait after downloading is complete before the screen is completely cleared",
|
||||
default=60,
|
||||
)
|
||||
@click.option(
|
||||
"--prompt/--no-prompt",
|
||||
help="Whether to prompt for anything instead just do the best thing",
|
||||
default=True,
|
||||
)
|
||||
@click.option(
|
||||
"--force-ffmpeg",
|
||||
is_flag=True,
|
||||
help="Force the use of FFmpeg for downloading (supports large variety of streams but slower)",
|
||||
)
|
||||
@click.option(
|
||||
"--hls-use-mpegts",
|
||||
is_flag=True,
|
||||
help="Use mpegts for hls streams, resulted in .ts file (useful for some streams: see Docs) (this option forces --force-ffmpeg to be True)",
|
||||
)
|
||||
@click.option(
|
||||
"--hls-use-h264",
|
||||
is_flag=True,
|
||||
help="Use H.264 (MP4) for hls streams, resulted in .mp4 file (useful for some streams: see Docs) (this option forces --force-ffmpeg to be True)",
|
||||
)
|
||||
@click.option(
|
||||
"--no-check-certificates",
|
||||
is_flag=True,
|
||||
help="Suppress HTTPS certificate validation",
|
||||
)
|
||||
@click.option(
|
||||
"--max-results", "-M", type=int, help="The maximum number of results to show"
|
||||
)
|
||||
@click.pass_obj
|
||||
def download(
|
||||
config,
|
||||
title,
|
||||
season,
|
||||
status,
|
||||
sort,
|
||||
genres,
|
||||
tags,
|
||||
media_format,
|
||||
year,
|
||||
on_list,
|
||||
episode_range,
|
||||
force_unknown_ext,
|
||||
silent,
|
||||
verbose,
|
||||
merge,
|
||||
clean,
|
||||
wait_time,
|
||||
prompt,
|
||||
force_ffmpeg,
|
||||
hls_use_mpegts,
|
||||
hls_use_h264,
|
||||
no_check_certificates,
|
||||
max_results,
|
||||
):
|
||||
from rich import print
|
||||
|
||||
from ....anilist import AniList
|
||||
|
||||
force_ffmpeg |= (hls_use_mpegts or hls_use_h264)
|
||||
|
||||
success, anilist_search_results = AniList.search(
|
||||
query=title,
|
||||
sort=sort,
|
||||
status_in=list(status),
|
||||
genre_in=list(genres),
|
||||
season=season,
|
||||
tag_in=list(tags),
|
||||
seasonYear=year,
|
||||
format_in=list(media_format),
|
||||
on_list=on_list,
|
||||
max_results=max_results,
|
||||
)
|
||||
if success:
|
||||
import time
|
||||
|
||||
from rich.progress import Progress
|
||||
from thefuzz import fuzz
|
||||
|
||||
from ....AnimeProvider import AnimeProvider
|
||||
from ....libs.anime_provider.types import Anime
|
||||
from ....libs.fzf import fzf
|
||||
from ....Utility.data import anime_normalizer
|
||||
from ....Utility.downloader.downloader import downloader
|
||||
from ...utils.tools import exit_app
|
||||
from ...utils.utils import (
|
||||
filter_by_quality,
|
||||
fuzzy_inquirer,
|
||||
move_preferred_subtitle_lang_to_top,
|
||||
)
|
||||
|
||||
anime_provider = AnimeProvider(config.provider)
|
||||
anilist_anime_info = None
|
||||
|
||||
translation_type = config.translation_type
|
||||
download_dir = config.downloads_dir
|
||||
anime_titles = [
|
||||
(anime["title"]["romaji"] or anime["title"]["english"])
|
||||
for anime in anilist_search_results["data"]["Page"]["media"]
|
||||
]
|
||||
print(f"[green bold]Queued:[/] {anime_titles}")
|
||||
for i, anime_title in enumerate(anime_titles):
|
||||
print(f"[green bold]Now Downloading: [/] {anime_title}")
|
||||
# ---- search for anime ----
|
||||
with Progress() as progress:
|
||||
progress.add_task("Fetching Search Results...", total=None)
|
||||
search_results = anime_provider.search_for_anime(
|
||||
anime_title, translation_type=translation_type
|
||||
)
|
||||
if not search_results:
|
||||
print(
|
||||
"No search results found from provider for {}".format(anime_title)
|
||||
)
|
||||
continue
|
||||
search_results = search_results["results"]
|
||||
if not search_results:
|
||||
print("Nothing muches your search term")
|
||||
continue
|
||||
search_results_ = {
|
||||
search_result["title"]: search_result
|
||||
for search_result in search_results
|
||||
}
|
||||
|
||||
if config.auto_select:
|
||||
selected_anime_title = max(
|
||||
search_results_.keys(),
|
||||
key=lambda title: fuzz.ratio(
|
||||
anime_normalizer.get(title, title), anime_title
|
||||
),
|
||||
)
|
||||
print("[cyan]Auto selecting:[/] ", selected_anime_title)
|
||||
else:
|
||||
choices = list(search_results_.keys())
|
||||
if config.use_fzf:
|
||||
selected_anime_title = fzf.run(
|
||||
choices, "Please Select title", "FastAnime"
|
||||
)
|
||||
else:
|
||||
selected_anime_title = fuzzy_inquirer(
|
||||
choices,
|
||||
"Please Select title",
|
||||
)
|
||||
|
||||
# ---- fetch anime ----
|
||||
with Progress() as progress:
|
||||
progress.add_task("Fetching Anime...", total=None)
|
||||
anime: Anime | None = anime_provider.get_anime(
|
||||
search_results_[selected_anime_title]["id"]
|
||||
)
|
||||
if not anime:
|
||||
print("Failed to fetch anime {}".format(selected_anime_title))
|
||||
continue
|
||||
|
||||
episodes = sorted(
|
||||
anime["availableEpisodesDetail"][config.translation_type], key=float
|
||||
)
|
||||
# where the magic happens
|
||||
if episode_range:
|
||||
if ":" in episode_range:
|
||||
ep_range_tuple = episode_range.split(":")
|
||||
if len(ep_range_tuple) == 2 and all(ep_range_tuple):
|
||||
episodes_start, episodes_end = ep_range_tuple
|
||||
episodes_range = episodes[
|
||||
int(episodes_start) : int(episodes_end)
|
||||
]
|
||||
elif len(ep_range_tuple) == 3 and all(ep_range_tuple):
|
||||
episodes_start, episodes_end, step = ep_range_tuple
|
||||
episodes_range = episodes[
|
||||
int(episodes_start) : int(episodes_end) : int(step)
|
||||
]
|
||||
else:
|
||||
episodes_start, episodes_end = ep_range_tuple
|
||||
if episodes_start.strip():
|
||||
episodes_range = episodes[int(episodes_start) :]
|
||||
elif episodes_end.strip():
|
||||
episodes_range = episodes[: int(episodes_end)]
|
||||
else:
|
||||
episodes_range = episodes
|
||||
else:
|
||||
episodes_range = episodes[int(episode_range) :]
|
||||
print(f"[green bold]Downloading: [/] {episodes_range}")
|
||||
|
||||
else:
|
||||
episodes_range = sorted(episodes, key=float)
|
||||
|
||||
if config.normalize_titles:
|
||||
anilist_anime_info = anilist_search_results["data"]["Page"]["media"][i]
|
||||
|
||||
# lets download em
|
||||
for episode in episodes_range:
|
||||
try:
|
||||
episode = str(episode)
|
||||
if episode not in episodes:
|
||||
print(
|
||||
f"[cyan]Warning[/]: Episode {episode} not found, skipping"
|
||||
)
|
||||
continue
|
||||
with Progress() as progress:
|
||||
progress.add_task("Fetching Episode Streams...", total=None)
|
||||
streams = anime_provider.get_episode_streams(
|
||||
anime["id"], episode, config.translation_type
|
||||
)
|
||||
if not streams:
|
||||
print("No streams skipping")
|
||||
continue
|
||||
# ---- fetch servers ----
|
||||
if config.server == "top":
|
||||
with Progress() as progress:
|
||||
progress.add_task("Fetching top server...", total=None)
|
||||
server_name = next(streams, None)
|
||||
if not server_name:
|
||||
print("Sth went wrong when fetching the server")
|
||||
continue
|
||||
stream_link = filter_by_quality(
|
||||
config.quality, server_name["links"]
|
||||
)
|
||||
if not stream_link:
|
||||
print("[yellow bold]WARNING:[/] No streams found")
|
||||
time.sleep(1)
|
||||
print("Continuing...")
|
||||
continue
|
||||
link = stream_link["link"]
|
||||
provider_headers = server_name["headers"]
|
||||
episode_title = server_name["episode_title"]
|
||||
subtitles = server_name["subtitles"]
|
||||
else:
|
||||
with Progress() as progress:
|
||||
progress.add_task("Fetching servers", total=None)
|
||||
# prompt for server selection
|
||||
servers = {server["server"]: server for server in streams}
|
||||
servers_names = list(servers.keys())
|
||||
if config.server in servers_names:
|
||||
server_name = config.server
|
||||
else:
|
||||
if config.use_fzf:
|
||||
server_name = fzf.run(servers_names, "Select an link")
|
||||
else:
|
||||
server_name = fuzzy_inquirer(
|
||||
servers_names,
|
||||
"Select link",
|
||||
)
|
||||
stream_link = filter_by_quality(
|
||||
config.quality, servers[server_name]["links"]
|
||||
)
|
||||
if not stream_link:
|
||||
print("[yellow bold]WARNING:[/] No streams found")
|
||||
time.sleep(1)
|
||||
print("Continuing...")
|
||||
continue
|
||||
link = stream_link["link"]
|
||||
provider_headers = servers[server_name]["headers"]
|
||||
|
||||
subtitles = servers[server_name]["subtitles"]
|
||||
episode_title = servers[server_name]["episode_title"]
|
||||
|
||||
if anilist_anime_info:
|
||||
selected_anime_title = (
|
||||
anilist_anime_info["title"][config.preferred_language]
|
||||
or anilist_anime_info["title"]["romaji"]
|
||||
or anilist_anime_info["title"]["english"]
|
||||
)
|
||||
import re
|
||||
|
||||
for episode_detail in anilist_anime_info["streamingEpisodes"]:
|
||||
if re.match(
|
||||
f".*Episode {episode} .*", episode_detail["title"]
|
||||
):
|
||||
episode_title = episode_detail["title"]
|
||||
break
|
||||
print(f"[purple]Now Downloading:[/] {episode_title}")
|
||||
subtitles = move_preferred_subtitle_lang_to_top(
|
||||
subtitles, config.sub_lang
|
||||
)
|
||||
downloader._download_file(
|
||||
link,
|
||||
selected_anime_title,
|
||||
episode_title,
|
||||
download_dir,
|
||||
silent,
|
||||
vid_format=config.format,
|
||||
force_unknown_ext=force_unknown_ext,
|
||||
verbose=verbose,
|
||||
headers=provider_headers,
|
||||
sub=subtitles[0]["url"] if subtitles else "",
|
||||
merge=merge,
|
||||
clean=clean,
|
||||
prompt=prompt,
|
||||
force_ffmpeg=force_ffmpeg,
|
||||
hls_use_mpegts=hls_use_mpegts,
|
||||
hls_use_h264=hls_use_h264,
|
||||
nocheckcertificate=no_check_certificates,
|
||||
)
|
||||
except Exception as e:
|
||||
print(e)
|
||||
time.sleep(1)
|
||||
print("Continuing...")
|
||||
print("Done Downloading")
|
||||
time.sleep(wait_time)
|
||||
exit_app()
|
||||
else:
|
||||
from sys import exit
|
||||
|
||||
print("Failed to search for anime", anilist_search_results)
|
||||
exit(1)
|
||||
358
fastanime/cli/commands/anilist/downloads.py
Normal file
358
fastanime/cli/commands/anilist/downloads.py
Normal file
@@ -0,0 +1,358 @@
|
||||
import logging
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
import click
|
||||
|
||||
from ...completion_functions import downloaded_anime_titles
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
if TYPE_CHECKING:
|
||||
from ..config import Config
|
||||
|
||||
|
||||
@click.command(
|
||||
help="View and watch your downloads using mpv",
|
||||
short_help="Watch downloads",
|
||||
epilog="""
|
||||
\b
|
||||
\b\bExamples:
|
||||
fastanime downloads
|
||||
\b
|
||||
# view individual episodes
|
||||
fastanime downloads --view-episodes
|
||||
# --- or ---
|
||||
fastanime downloads -v
|
||||
\b
|
||||
# to set seek time when using ffmpegthumbnailer for local previews
|
||||
# -1 means random and is the default
|
||||
fastanime downloads --time-to-seek <intRange(-1,100)>
|
||||
# --- or ---
|
||||
fastanime downloads -t <intRange(-1,100)>
|
||||
\b
|
||||
# to watch a specific title
|
||||
# be sure to get the completions for the best experience
|
||||
fastanime downloads --title <title>
|
||||
\b
|
||||
# to get the path to the downloads folder set
|
||||
fastanime downloads --path
|
||||
# useful when you want to use the value for other programs
|
||||
""",
|
||||
)
|
||||
@click.option("--path", "-p", help="print the downloads folder and exit", is_flag=True)
|
||||
@click.option(
|
||||
"--title",
|
||||
"-T",
|
||||
shell_complete=downloaded_anime_titles,
|
||||
help="watch a specific title",
|
||||
)
|
||||
@click.option("--view-episodes", "-v", help="View individual episodes", is_flag=True)
|
||||
@click.option(
|
||||
"--ffmpegthumbnailer-seek-time",
|
||||
"--time-to-seek",
|
||||
"-t",
|
||||
type=click.IntRange(-1, 100),
|
||||
help="ffmpegthumbnailer seek time",
|
||||
)
|
||||
@click.pass_obj
|
||||
def downloads(
|
||||
config: "Config", path: bool, title, view_episodes, ffmpegthumbnailer_seek_time
|
||||
):
|
||||
import os
|
||||
|
||||
from ....cli.utils.mpv import run_mpv
|
||||
from ....libs.fzf import fzf
|
||||
from ....libs.rofi import Rofi
|
||||
from ....Utility.utils import sort_by_episode_number
|
||||
from ...utils.tools import exit_app
|
||||
from ...utils.utils import fuzzy_inquirer
|
||||
|
||||
if not ffmpegthumbnailer_seek_time:
|
||||
ffmpegthumbnailer_seek_time = config.ffmpegthumbnailer_seek_time
|
||||
USER_VIDEOS_DIR = config.downloads_dir
|
||||
if path:
|
||||
print(USER_VIDEOS_DIR)
|
||||
return
|
||||
if not os.path.exists(USER_VIDEOS_DIR):
|
||||
print("Downloads directory specified does not exist")
|
||||
return
|
||||
anime_downloads = sorted(
|
||||
os.listdir(USER_VIDEOS_DIR),
|
||||
)
|
||||
anime_downloads.append("Exit")
|
||||
|
||||
def create_thumbnails(video_path, anime_title, downloads_thumbnail_cache_dir):
|
||||
import os
|
||||
import shutil
|
||||
import subprocess
|
||||
|
||||
FFMPEG_THUMBNAILER = shutil.which("ffmpegthumbnailer")
|
||||
if not FFMPEG_THUMBNAILER:
|
||||
return
|
||||
|
||||
out = os.path.join(downloads_thumbnail_cache_dir, anime_title)
|
||||
if ffmpegthumbnailer_seek_time == -1:
|
||||
import random
|
||||
|
||||
seektime = str(random.randrange(0, 100))
|
||||
else:
|
||||
seektime = str(ffmpegthumbnailer_seek_time)
|
||||
_ = subprocess.run(
|
||||
[
|
||||
FFMPEG_THUMBNAILER,
|
||||
"-i",
|
||||
video_path,
|
||||
"-o",
|
||||
out,
|
||||
"-s",
|
||||
"0",
|
||||
"-t",
|
||||
seektime,
|
||||
],
|
||||
stderr=subprocess.PIPE,
|
||||
stdout=subprocess.PIPE,
|
||||
)
|
||||
|
||||
def get_previews_anime(workers=None, bg=True):
|
||||
import concurrent.futures
|
||||
import random
|
||||
import shutil
|
||||
from pathlib import Path
|
||||
|
||||
if not shutil.which("ffmpegthumbnailer"):
|
||||
print("ffmpegthumbnailer not found")
|
||||
logger.error("ffmpegthumbnailer not found")
|
||||
return
|
||||
|
||||
from ....constants import APP_CACHE_DIR
|
||||
from ...utils.scripts import bash_functions
|
||||
|
||||
downloads_thumbnail_cache_dir = os.path.join(APP_CACHE_DIR, "video_thumbnails")
|
||||
Path(downloads_thumbnail_cache_dir).mkdir(parents=True, exist_ok=True)
|
||||
|
||||
def _worker():
|
||||
# use concurrency to download the images as fast as possible
|
||||
with concurrent.futures.ThreadPoolExecutor(max_workers=workers) as executor:
|
||||
# load the jobs
|
||||
future_to_url = {}
|
||||
for anime_title in anime_downloads:
|
||||
anime_path = os.path.join(USER_VIDEOS_DIR, anime_title)
|
||||
if not os.path.isdir(anime_path):
|
||||
continue
|
||||
playlist = [
|
||||
anime
|
||||
for anime in sorted(
|
||||
os.listdir(anime_path),
|
||||
)
|
||||
if "mp4" in anime
|
||||
]
|
||||
if playlist:
|
||||
# actual link to download image from
|
||||
video_path = os.path.join(anime_path, random.choice(playlist))
|
||||
future_to_url[
|
||||
executor.submit(
|
||||
create_thumbnails,
|
||||
video_path,
|
||||
anime_title,
|
||||
downloads_thumbnail_cache_dir,
|
||||
)
|
||||
] = anime_title
|
||||
|
||||
# execute the jobs
|
||||
for future in concurrent.futures.as_completed(future_to_url):
|
||||
url = future_to_url[future]
|
||||
try:
|
||||
future.result()
|
||||
except Exception as e:
|
||||
logger.error("%r generated an exception: %s" % (url, e))
|
||||
|
||||
if bg:
|
||||
from threading import Thread
|
||||
|
||||
worker = Thread(target=_worker)
|
||||
worker.daemon = True
|
||||
worker.start()
|
||||
else:
|
||||
_worker()
|
||||
os.environ["SHELL"] = shutil.which("bash") or "bash"
|
||||
preview = """
|
||||
%s
|
||||
if [ -s %s/{} ]; then
|
||||
if ! fzf-preview %s/{} 2>/dev/null; then
|
||||
echo Loading...
|
||||
fi
|
||||
else echo Loading...
|
||||
fi
|
||||
""" % (
|
||||
bash_functions,
|
||||
downloads_thumbnail_cache_dir,
|
||||
downloads_thumbnail_cache_dir,
|
||||
)
|
||||
return preview
|
||||
|
||||
def get_previews_episodes(anime_playlist_path, workers=None, bg=True):
|
||||
import shutil
|
||||
from pathlib import Path
|
||||
|
||||
from ....constants import APP_CACHE_DIR
|
||||
from ...utils.scripts import bash_functions
|
||||
|
||||
if not shutil.which("ffmpegthumbnailer"):
|
||||
print("ffmpegthumbnailer not found")
|
||||
logger.error("ffmpegthumbnailer not found")
|
||||
return
|
||||
|
||||
downloads_thumbnail_cache_dir = os.path.join(APP_CACHE_DIR, "video_thumbnails")
|
||||
Path(downloads_thumbnail_cache_dir).mkdir(parents=True, exist_ok=True)
|
||||
|
||||
def _worker():
|
||||
import concurrent.futures
|
||||
|
||||
# use concurrency to download the images as fast as possible
|
||||
# anime_playlist_path = os.path.join(USER_VIDEOS_DIR, anime_playlist_path)
|
||||
if not os.path.isdir(anime_playlist_path):
|
||||
return
|
||||
anime_episodes = sorted(
|
||||
os.listdir(anime_playlist_path), key=sort_by_episode_number
|
||||
)
|
||||
with concurrent.futures.ThreadPoolExecutor(max_workers=workers) as executor:
|
||||
# load the jobs
|
||||
future_to_url = {}
|
||||
for episode_title in anime_episodes:
|
||||
episode_path = os.path.join(anime_playlist_path, episode_title)
|
||||
|
||||
# actual link to download image from
|
||||
future_to_url[
|
||||
executor.submit(
|
||||
create_thumbnails,
|
||||
episode_path,
|
||||
episode_title,
|
||||
downloads_thumbnail_cache_dir,
|
||||
)
|
||||
] = episode_title
|
||||
|
||||
# execute the jobs
|
||||
for future in concurrent.futures.as_completed(future_to_url):
|
||||
url = future_to_url[future]
|
||||
try:
|
||||
future.result()
|
||||
except Exception as e:
|
||||
logger.error("%r generated an exception: %s" % (url, e))
|
||||
|
||||
if bg:
|
||||
from threading import Thread
|
||||
|
||||
worker = Thread(target=_worker)
|
||||
worker.daemon = True
|
||||
worker.start()
|
||||
else:
|
||||
_worker()
|
||||
os.environ["SHELL"] = shutil.which("bash") or "bash"
|
||||
preview = """
|
||||
%s
|
||||
if [ -s %s/{} ]; then
|
||||
if ! fzf-preview %s/{} 2>/dev/null; then
|
||||
echo Loading...
|
||||
fi
|
||||
else echo Loading...
|
||||
fi
|
||||
""" % (
|
||||
bash_functions,
|
||||
downloads_thumbnail_cache_dir,
|
||||
downloads_thumbnail_cache_dir,
|
||||
)
|
||||
return preview
|
||||
|
||||
def stream_episode(
|
||||
anime_playlist_path,
|
||||
):
|
||||
if view_episodes:
|
||||
if not os.path.isdir(anime_playlist_path):
|
||||
print(anime_playlist_path, "is not dir")
|
||||
exit_app(1)
|
||||
return
|
||||
episodes = sorted(
|
||||
os.listdir(anime_playlist_path), key=sort_by_episode_number
|
||||
)
|
||||
downloaded_episodes = [*episodes, "Back"]
|
||||
|
||||
if config.use_fzf:
|
||||
if not config.preview:
|
||||
episode_title = fzf.run(
|
||||
downloaded_episodes,
|
||||
"Enter Episode ",
|
||||
)
|
||||
else:
|
||||
preview = get_previews_episodes(anime_playlist_path)
|
||||
episode_title = fzf.run(
|
||||
downloaded_episodes,
|
||||
"Enter Episode ",
|
||||
preview=preview,
|
||||
)
|
||||
elif config.use_rofi:
|
||||
episode_title = Rofi.run(downloaded_episodes, "Enter Episode")
|
||||
else:
|
||||
episode_title = fuzzy_inquirer(
|
||||
downloaded_episodes,
|
||||
"Enter Playlist Name",
|
||||
)
|
||||
if episode_title == "Back":
|
||||
stream_anime()
|
||||
return
|
||||
episode_path = os.path.join(anime_playlist_path, episode_title)
|
||||
if config.sync_play:
|
||||
from ...utils.syncplay import SyncPlayer
|
||||
|
||||
SyncPlayer(episode_path)
|
||||
else:
|
||||
run_mpv(
|
||||
episode_path,
|
||||
player=config.player,
|
||||
)
|
||||
stream_episode(anime_playlist_path)
|
||||
|
||||
def stream_anime(title=None):
|
||||
if title:
|
||||
from thefuzz import fuzz
|
||||
|
||||
playlist_name = max(anime_downloads, key=lambda t: fuzz.ratio(title, t))
|
||||
elif config.use_fzf:
|
||||
if not config.preview:
|
||||
playlist_name = fzf.run(
|
||||
anime_downloads,
|
||||
"Enter Playlist Name",
|
||||
)
|
||||
else:
|
||||
preview = get_previews_anime()
|
||||
playlist_name = fzf.run(
|
||||
anime_downloads,
|
||||
"Enter Playlist Name",
|
||||
preview=preview,
|
||||
)
|
||||
elif config.use_rofi:
|
||||
playlist_name = Rofi.run(anime_downloads, "Enter Playlist Name")
|
||||
else:
|
||||
playlist_name = fuzzy_inquirer(
|
||||
anime_downloads,
|
||||
"Enter Playlist Name",
|
||||
)
|
||||
if playlist_name == "Exit":
|
||||
exit_app()
|
||||
return
|
||||
playlist = os.path.join(USER_VIDEOS_DIR, playlist_name)
|
||||
if view_episodes:
|
||||
stream_episode(
|
||||
playlist,
|
||||
)
|
||||
else:
|
||||
if config.sync_play:
|
||||
from ...utils.syncplay import SyncPlayer
|
||||
|
||||
SyncPlayer(playlist)
|
||||
else:
|
||||
run_mpv(
|
||||
playlist,
|
||||
player=config.player,
|
||||
)
|
||||
stream_anime()
|
||||
|
||||
stream_anime(title)
|
||||
@@ -42,5 +42,12 @@ def dropped(config: "Config", dump_json):
|
||||
from ...utils.tools import FastAnimeRuntimeState
|
||||
|
||||
fastanime_runtime_state = FastAnimeRuntimeState()
|
||||
fastanime_runtime_state.anilist_data = anime_list[1]
|
||||
|
||||
fastanime_runtime_state.current_page = 1
|
||||
fastanime_runtime_state.current_data_loader = (
|
||||
lambda config, **kwargs: anilist_interfaces._handle_animelist(
|
||||
config, fastanime_runtime_state, "Dropped", **kwargs
|
||||
)
|
||||
)
|
||||
fastanime_runtime_state.anilist_results_data = anime_list[1]
|
||||
anilist_interfaces.anilist_results_menu(config, fastanime_runtime_state)
|
||||
|
||||
@@ -26,7 +26,10 @@ def favourites(config, dump_json):
|
||||
from ...utils.tools import FastAnimeRuntimeState
|
||||
|
||||
fastanime_runtime_state = FastAnimeRuntimeState()
|
||||
fastanime_runtime_state.anilist_data = anime_data[1]
|
||||
|
||||
fastanime_runtime_state.current_page = 1
|
||||
fastanime_runtime_state.current_data_loader = AniList.get_most_favourite
|
||||
fastanime_runtime_state.anilist_results_data = anime_data[1]
|
||||
anilist_results_menu(config, fastanime_runtime_state)
|
||||
else:
|
||||
from sys import exit
|
||||
|
||||
@@ -11,15 +11,20 @@ if TYPE_CHECKING:
|
||||
@click.option("--erase", "-e", help="Erase your login details", is_flag=True)
|
||||
@click.pass_obj
|
||||
def login(config: "Config", status, erase):
|
||||
from os import path
|
||||
from sys import exit
|
||||
|
||||
from rich import print
|
||||
from rich.prompt import Confirm, Prompt
|
||||
|
||||
from ....constants import S_PLATFORM
|
||||
|
||||
if status:
|
||||
is_logged_in = True if config.user else False
|
||||
message = (
|
||||
"You are logged in :smile:" if is_logged_in else "You arent logged in :cry:"
|
||||
"You are logged in :smile:"
|
||||
if is_logged_in
|
||||
else "You aren't logged in :cry:"
|
||||
)
|
||||
print(message)
|
||||
print(config.user)
|
||||
@@ -46,9 +51,19 @@ def login(config: "Config", status, erase):
|
||||
print(
|
||||
f"A browser session will be opened ( [link]{config.fastanime_anilist_app_login_url}[/link] )",
|
||||
)
|
||||
launch(config.fastanime_anilist_app_login_url, wait=True)
|
||||
print("Please paste the token provided here")
|
||||
token = Prompt.ask("Enter token")
|
||||
token = ""
|
||||
if S_PLATFORM.startswith("darwin"):
|
||||
anilist_key_file_path = path.expanduser("~") + "/Downloads/anilist_key.txt"
|
||||
launch(config.fastanime_anilist_app_login_url, wait=False)
|
||||
Prompt.ask(
|
||||
"MacOS detected.\nPress any key once the token provided has been pasted into "
|
||||
+ anilist_key_file_path
|
||||
)
|
||||
with open(anilist_key_file_path, "r") as key_file:
|
||||
token = key_file.read().strip()
|
||||
else:
|
||||
launch(config.fastanime_anilist_app_login_url, wait=False)
|
||||
token = Prompt.ask("Enter token")
|
||||
user = AniList.login_user(token)
|
||||
if not user:
|
||||
print("Sth went wrong", user)
|
||||
|
||||
@@ -16,7 +16,12 @@ def notifier(config: "Config"):
|
||||
from sys import exit
|
||||
|
||||
import requests
|
||||
from plyer import notification
|
||||
|
||||
try:
|
||||
from plyer import notification
|
||||
except ImportError:
|
||||
print("Please install plyer to use this command")
|
||||
exit(1)
|
||||
|
||||
from ....anilist import AniList
|
||||
from ....constants import APP_CACHE_DIR, APP_DATA_DIR, APP_NAME, ICON_PATH, PLATFORM
|
||||
@@ -25,12 +30,12 @@ def notifier(config: "Config"):
|
||||
|
||||
notified = os.path.join(APP_DATA_DIR, "last_notification.json")
|
||||
anime_image_path = os.path.join(APP_CACHE_DIR, "notification_image")
|
||||
notification_duration = config.notification_duration * 60
|
||||
notification_duration = config.notification_duration
|
||||
notification_image_path = ""
|
||||
|
||||
if not config.user:
|
||||
print("Not Authenticated")
|
||||
print("Run the following to get started: fastanime anilist loggin")
|
||||
print("Run the following to get started: fastanime anilist login")
|
||||
exit(1)
|
||||
run = True
|
||||
# WARNING: Mess around with this value at your own risk
|
||||
|
||||
@@ -41,6 +41,12 @@ def paused(config: "Config", dump_json):
|
||||
from ...interfaces import anilist_interfaces
|
||||
from ...utils.tools import FastAnimeRuntimeState
|
||||
|
||||
anilist_config = FastAnimeRuntimeState()
|
||||
anilist_config.data = anime_list[1]
|
||||
anilist_interfaces.anilist_results_menu(config, anilist_config)
|
||||
fastanime_runtime_state = FastAnimeRuntimeState()
|
||||
fastanime_runtime_state.current_page = 1
|
||||
fastanime_runtime_state.current_data_loader = (
|
||||
lambda config, **kwargs: anilist_interfaces._handle_animelist(
|
||||
config, fastanime_runtime_state, "Paused", **kwargs
|
||||
)
|
||||
)
|
||||
fastanime_runtime_state.anilist_results_data = anime_list[1]
|
||||
anilist_interfaces.anilist_results_menu(config, fastanime_runtime_state)
|
||||
|
||||
@@ -42,5 +42,12 @@ def planning(config: "Config", dump_json):
|
||||
from ...utils.tools import FastAnimeRuntimeState
|
||||
|
||||
fastanime_runtime_state = FastAnimeRuntimeState()
|
||||
fastanime_runtime_state.anilist_data = anime_list[1]
|
||||
|
||||
fastanime_runtime_state.current_page = 1
|
||||
fastanime_runtime_state.current_data_loader = (
|
||||
lambda config, **kwargs: anilist_interfaces._handle_animelist(
|
||||
config, fastanime_runtime_state, "Planned", **kwargs
|
||||
)
|
||||
)
|
||||
fastanime_runtime_state.anilist_results_data = anime_list[1]
|
||||
anilist_interfaces.anilist_results_menu(config, fastanime_runtime_state)
|
||||
|
||||
@@ -25,7 +25,10 @@ def popular(config, dump_json):
|
||||
from ...utils.tools import FastAnimeRuntimeState
|
||||
|
||||
fastanime_runtime_state = FastAnimeRuntimeState()
|
||||
fastanime_runtime_state.anilist_data = anime_data[1]
|
||||
|
||||
fastanime_runtime_state.current_page = 1
|
||||
fastanime_runtime_state.current_data_loader = AniList.get_most_popular
|
||||
fastanime_runtime_state.anilist_results_data = anime_data[1]
|
||||
anilist_results_menu(config, fastanime_runtime_state)
|
||||
else:
|
||||
from sys import exit
|
||||
|
||||
@@ -2,7 +2,7 @@ import click
|
||||
|
||||
|
||||
@click.command(
|
||||
help="Get random anime from anilist based on a range of anilist anime ids that are seected at random",
|
||||
help="Get random anime from anilist based on a range of anilist anime ids that are selected at random",
|
||||
short_help="View random anime",
|
||||
)
|
||||
@click.option(
|
||||
@@ -33,7 +33,7 @@ def random_anime(config, dump_json):
|
||||
from ...utils.tools import FastAnimeRuntimeState
|
||||
|
||||
fastanime_runtime_state = FastAnimeRuntimeState()
|
||||
fastanime_runtime_state.anilist_data = anime_data[1]
|
||||
fastanime_runtime_state.anilist_results_data = anime_data[1]
|
||||
anilist_results_menu(config, fastanime_runtime_state)
|
||||
else:
|
||||
exit(1)
|
||||
|
||||
@@ -26,7 +26,12 @@ def recent(config, dump_json):
|
||||
from ...utils.tools import FastAnimeRuntimeState
|
||||
|
||||
fastanime_runtime_state = FastAnimeRuntimeState()
|
||||
fastanime_runtime_state.anilist_data = anime_data[1]
|
||||
|
||||
fastanime_runtime_state.current_page = 1
|
||||
fastanime_runtime_state.current_data_loader = (
|
||||
AniList.get_most_recently_updated
|
||||
)
|
||||
fastanime_runtime_state.anilist_results_data = anime_data[1]
|
||||
anilist_results_menu(config, fastanime_runtime_state)
|
||||
else:
|
||||
from sys import exit
|
||||
|
||||
@@ -42,5 +42,12 @@ def rewatching(config: "Config", dump_json):
|
||||
from ...utils.tools import FastAnimeRuntimeState
|
||||
|
||||
fastanime_runtime_state = FastAnimeRuntimeState()
|
||||
fastanime_runtime_state.anilist_data = anime_list[1]
|
||||
|
||||
fastanime_runtime_state.current_page = 1
|
||||
fastanime_runtime_state.current_data_loader = (
|
||||
lambda config, **kwargs: anilist_interfaces._handle_animelist(
|
||||
config, fastanime_runtime_state, "Rewatching", **kwargs
|
||||
)
|
||||
)
|
||||
fastanime_runtime_state.anilist_results_data = anime_list[1]
|
||||
anilist_interfaces.anilist_results_menu(config, fastanime_runtime_state)
|
||||
|
||||
@@ -25,7 +25,10 @@ def scores(config, dump_json):
|
||||
from ...utils.tools import FastAnimeRuntimeState
|
||||
|
||||
fastanime_runtime_state = FastAnimeRuntimeState()
|
||||
fastanime_runtime_state.data = anime_data[1]
|
||||
|
||||
fastanime_runtime_state.current_page = 1
|
||||
fastanime_runtime_state.current_data_loader = AniList.get_most_scored
|
||||
fastanime_runtime_state.anilist_results_data = anime_data[1]
|
||||
anilist_results_menu(config, fastanime_runtime_state)
|
||||
else:
|
||||
from sys import exit
|
||||
|
||||
@@ -1,369 +1,15 @@
|
||||
import click
|
||||
|
||||
from ...completion_functions import anime_titles_shell_complete
|
||||
|
||||
tags_available = {
|
||||
"Cast": ["Polyamorous"],
|
||||
"Cast / Main Cast": [
|
||||
"Anti-Hero",
|
||||
"Elderly Protagonist",
|
||||
"Ensemble Cast",
|
||||
"Estranged Family",
|
||||
"Female Protagonist",
|
||||
"Male Protagonist",
|
||||
"Primarily Adult Cast",
|
||||
"Primarily Animal Cast",
|
||||
"Primarily Child Cast",
|
||||
"Primarily Female Cast",
|
||||
"Primarily Male Cast",
|
||||
"Primarily Teen Cast",
|
||||
],
|
||||
"Cast / Traits": [
|
||||
"Age Regression",
|
||||
"Agender",
|
||||
"Aliens",
|
||||
"Amnesia",
|
||||
"Angels",
|
||||
"Anthropomorphism",
|
||||
"Aromantic",
|
||||
"Arranged Marriage",
|
||||
"Artificial Intelligence",
|
||||
"Asexual",
|
||||
"Butler",
|
||||
"Centaur",
|
||||
"Chimera",
|
||||
"Chuunibyou",
|
||||
"Clone",
|
||||
"Cosplay",
|
||||
"Cowboys",
|
||||
"Crossdressing",
|
||||
"Cyborg",
|
||||
"Delinquents",
|
||||
"Demons",
|
||||
"Detective",
|
||||
"Dinosaurs",
|
||||
"Disability",
|
||||
"Dissociative Identities",
|
||||
"Dragons",
|
||||
"Dullahan",
|
||||
"Elf",
|
||||
"Fairy",
|
||||
"Femboy",
|
||||
"Ghost",
|
||||
"Goblin",
|
||||
"Gods",
|
||||
"Gyaru",
|
||||
"Hikikomori",
|
||||
"Homeless",
|
||||
"Idol",
|
||||
"Kemonomimi",
|
||||
"Kuudere",
|
||||
"Maids",
|
||||
"Mermaid",
|
||||
"Monster Boy",
|
||||
"Monster Girl",
|
||||
"Nekomimi",
|
||||
"Ninja",
|
||||
"Nudity",
|
||||
"Nun",
|
||||
"Office Lady",
|
||||
"Oiran",
|
||||
"Ojou-sama",
|
||||
"Orphan",
|
||||
"Pirates",
|
||||
"Robots",
|
||||
"Samurai",
|
||||
"Shrine Maiden",
|
||||
"Skeleton",
|
||||
"Succubus",
|
||||
"Tanned Skin",
|
||||
"Teacher",
|
||||
"Tomboy",
|
||||
"Transgender",
|
||||
"Tsundere",
|
||||
"Twins",
|
||||
"Vampire",
|
||||
"Veterinarian",
|
||||
"Vikings",
|
||||
"Villainess",
|
||||
"VTuber",
|
||||
"Werewolf",
|
||||
"Witch",
|
||||
"Yandere",
|
||||
"Zombie",
|
||||
],
|
||||
"Demographic": ["Josei", "Kids", "Seinen", "Shoujo", "Shounen"],
|
||||
"Setting": ["Matriarchy"],
|
||||
"Setting / Scene": [
|
||||
"Bar",
|
||||
"Boarding School",
|
||||
"Circus",
|
||||
"Coastal",
|
||||
"College",
|
||||
"Desert",
|
||||
"Dungeon",
|
||||
"Foreign",
|
||||
"Inn",
|
||||
"Konbini",
|
||||
"Natural Disaster",
|
||||
"Office",
|
||||
"Outdoor",
|
||||
"Prison",
|
||||
"Restaurant",
|
||||
"Rural",
|
||||
"School",
|
||||
"School Club",
|
||||
"Snowscape",
|
||||
"Urban",
|
||||
"Work",
|
||||
],
|
||||
"Setting / Time": [
|
||||
"Achronological Order",
|
||||
"Anachronism",
|
||||
"Ancient China",
|
||||
"Dystopian",
|
||||
"Historical",
|
||||
"Time Skip",
|
||||
],
|
||||
"Setting / Universe": [
|
||||
"Afterlife",
|
||||
"Alternate Universe",
|
||||
"Augmented Reality",
|
||||
"Omegaverse",
|
||||
"Post-Apocalyptic",
|
||||
"Space",
|
||||
"Urban Fantasy",
|
||||
"Virtual World",
|
||||
],
|
||||
"Technical": [
|
||||
"4-koma",
|
||||
"Achromatic",
|
||||
"Advertisement",
|
||||
"Anthology",
|
||||
"CGI",
|
||||
"Episodic",
|
||||
"Flash",
|
||||
"Full CGI",
|
||||
"Full Color",
|
||||
"No Dialogue",
|
||||
"Non-fiction",
|
||||
"POV",
|
||||
"Puppetry",
|
||||
"Rotoscoping",
|
||||
"Stop Motion",
|
||||
],
|
||||
"Theme / Action": [
|
||||
"Archery",
|
||||
"Battle Royale",
|
||||
"Espionage",
|
||||
"Fugitive",
|
||||
"Guns",
|
||||
"Martial Arts",
|
||||
"Spearplay",
|
||||
"Swordplay",
|
||||
],
|
||||
"Theme / Arts": [
|
||||
"Acting",
|
||||
"Calligraphy",
|
||||
"Classic Literature",
|
||||
"Drawing",
|
||||
"Fashion",
|
||||
"Food",
|
||||
"Makeup",
|
||||
"Photography",
|
||||
"Rakugo",
|
||||
"Writing",
|
||||
],
|
||||
"Theme / Arts-Music": [
|
||||
"Band",
|
||||
"Classical Music",
|
||||
"Dancing",
|
||||
"Hip-hop Music",
|
||||
"Jazz Music",
|
||||
"Metal Music",
|
||||
"Musical Theater",
|
||||
"Rock Music",
|
||||
],
|
||||
"Theme / Comedy": ["Parody", "Satire", "Slapstick", "Surreal Comedy"],
|
||||
"Theme / Drama": [
|
||||
"Bullying",
|
||||
"Class Struggle",
|
||||
"Coming of Age",
|
||||
"Conspiracy",
|
||||
"Eco-Horror",
|
||||
"Fake Relationship",
|
||||
"Kingdom Management",
|
||||
"Rehabilitation",
|
||||
"Revenge",
|
||||
"Suicide",
|
||||
"Tragedy",
|
||||
],
|
||||
"Theme / Fantasy": [
|
||||
"Alchemy",
|
||||
"Body Swapping",
|
||||
"Cultivation",
|
||||
"Fairy Tale",
|
||||
"Henshin",
|
||||
"Isekai",
|
||||
"Kaiju",
|
||||
"Magic",
|
||||
"Mythology",
|
||||
"Necromancy",
|
||||
"Shapeshifting",
|
||||
"Steampunk",
|
||||
"Super Power",
|
||||
"Superhero",
|
||||
"Wuxia",
|
||||
"Youkai",
|
||||
],
|
||||
"Theme / Game": ["Board Game", "E-Sports", "Video Games"],
|
||||
"Theme / Game-Card & Board Game": [
|
||||
"Card Battle",
|
||||
"Go",
|
||||
"Karuta",
|
||||
"Mahjong",
|
||||
"Poker",
|
||||
"Shogi",
|
||||
],
|
||||
"Theme / Game-Sport": [
|
||||
"Acrobatics",
|
||||
"Airsoft",
|
||||
"American Football",
|
||||
"Athletics",
|
||||
"Badminton",
|
||||
"Baseball",
|
||||
"Basketball",
|
||||
"Bowling",
|
||||
"Boxing",
|
||||
"Cheerleading",
|
||||
"Cycling",
|
||||
"Fencing",
|
||||
"Fishing",
|
||||
"Fitness",
|
||||
"Football",
|
||||
"Golf",
|
||||
"Handball",
|
||||
"Ice Skating",
|
||||
"Judo",
|
||||
"Lacrosse",
|
||||
"Parkour",
|
||||
"Rugby",
|
||||
"Scuba Diving",
|
||||
"Skateboarding",
|
||||
"Sumo",
|
||||
"Surfing",
|
||||
"Swimming",
|
||||
"Table Tennis",
|
||||
"Tennis",
|
||||
"Volleyball",
|
||||
"Wrestling",
|
||||
],
|
||||
"Theme / Other": [
|
||||
"Adoption",
|
||||
"Animals",
|
||||
"Astronomy",
|
||||
"Autobiographical",
|
||||
"Biographical",
|
||||
"Body Horror",
|
||||
"Cannibalism",
|
||||
"Chibi",
|
||||
"Cosmic Horror",
|
||||
"Crime",
|
||||
"Crossover",
|
||||
"Death Game",
|
||||
"Denpa",
|
||||
"Drugs",
|
||||
"Economics",
|
||||
"Educational",
|
||||
"Environmental",
|
||||
"Ero Guro",
|
||||
"Filmmaking",
|
||||
"Found Family",
|
||||
"Gambling",
|
||||
"Gender Bending",
|
||||
"Gore",
|
||||
"Language Barrier",
|
||||
"LGBTQ+ Themes",
|
||||
"Lost Civilization",
|
||||
"Marriage",
|
||||
"Medicine",
|
||||
"Memory Manipulation",
|
||||
"Meta",
|
||||
"Mountaineering",
|
||||
"Noir",
|
||||
"Otaku Culture",
|
||||
"Pandemic",
|
||||
"Philosophy",
|
||||
"Politics",
|
||||
"Proxy Battle",
|
||||
"Psychosexual",
|
||||
"Reincarnation",
|
||||
"Religion",
|
||||
"Royal Affairs",
|
||||
"Slavery",
|
||||
"Software Development",
|
||||
"Survival",
|
||||
"Terrorism",
|
||||
"Torture",
|
||||
"Travel",
|
||||
"War",
|
||||
],
|
||||
"Theme / Other-Organisations": [
|
||||
"Assassins",
|
||||
"Criminal Organization",
|
||||
"Cult",
|
||||
"Firefighters",
|
||||
"Gangs",
|
||||
"Mafia",
|
||||
"Military",
|
||||
"Police",
|
||||
"Triads",
|
||||
"Yakuza",
|
||||
],
|
||||
"Theme / Other-Vehicle": [
|
||||
"Aviation",
|
||||
"Cars",
|
||||
"Mopeds",
|
||||
"Motorcycles",
|
||||
"Ships",
|
||||
"Tanks",
|
||||
"Trains",
|
||||
],
|
||||
"Theme / Romance": [
|
||||
"Age Gap",
|
||||
"Bisexual",
|
||||
"Boys' Love",
|
||||
"Female Harem",
|
||||
"Heterosexual",
|
||||
"Love Triangle",
|
||||
"Male Harem",
|
||||
"Matchmaking",
|
||||
"Mixed Gender Harem",
|
||||
"Teens' Love",
|
||||
"Unrequited Love",
|
||||
"Yuri",
|
||||
],
|
||||
"Theme / Sci Fi": [
|
||||
"Cyberpunk",
|
||||
"Space Opera",
|
||||
"Time Loop",
|
||||
"Time Manipulation",
|
||||
"Tokusatsu",
|
||||
],
|
||||
"Theme / Sci Fi-Mecha": ["Real Robot", "Super Robot"],
|
||||
"Theme / Slice of Life": [
|
||||
"Agriculture",
|
||||
"Cute Boys Doing Cute Things",
|
||||
"Cute Girls Doing Cute Things",
|
||||
"Family Life",
|
||||
"Horticulture",
|
||||
"Iyashikei",
|
||||
"Parenthood",
|
||||
],
|
||||
}
|
||||
tags_available_list = []
|
||||
for tag_category, tags_in_category in tags_available.items():
|
||||
tags_available_list.extend(tags_in_category)
|
||||
from .data import (
|
||||
genres_available,
|
||||
media_formats_available,
|
||||
media_statuses_available,
|
||||
seasons_available,
|
||||
sorts_available,
|
||||
tags_available_list,
|
||||
years_available,
|
||||
)
|
||||
|
||||
|
||||
@click.command(
|
||||
@@ -380,90 +26,27 @@ for tag_category, tags_in_category in tags_available.items():
|
||||
@click.option(
|
||||
"--season",
|
||||
help="The season the media was released",
|
||||
type=click.Choice(["WINTER", "SPRING", "SUMMER", "FALL"]),
|
||||
type=click.Choice(seasons_available),
|
||||
)
|
||||
@click.option(
|
||||
"--status",
|
||||
"-S",
|
||||
help="The media status of the anime",
|
||||
type=click.Choice(
|
||||
["FINISHED", "RELEASING", "NOT_YET_RELEASED", "CANCELLED", "HIATUS"]
|
||||
),
|
||||
multiple=True,
|
||||
type=click.Choice(media_statuses_available),
|
||||
)
|
||||
@click.option(
|
||||
"--sort",
|
||||
"-s",
|
||||
help="What to sort the search results on",
|
||||
type=click.Choice(
|
||||
[
|
||||
"ID",
|
||||
"ID_DESC",
|
||||
"TITLE_ROMAJI",
|
||||
"TITLE_ROMAJI_DESC",
|
||||
"TITLE_ENGLISH",
|
||||
"TITLE_ENGLISH_DESC",
|
||||
"TITLE_NATIVE",
|
||||
"TITLE_NATIVE_DESC",
|
||||
"TYPE",
|
||||
"TYPE_DESC",
|
||||
"FORMAT",
|
||||
"FORMAT_DESC",
|
||||
"START_DATE",
|
||||
"START_DATE_DESC",
|
||||
"END_DATE",
|
||||
"END_DATE_DESC",
|
||||
"SCORE",
|
||||
"SCORE_DESC",
|
||||
"POPULARITY",
|
||||
"POPULARITY_DESC",
|
||||
"TRENDING",
|
||||
"TRENDING_DESC",
|
||||
"EPISODES",
|
||||
"EPISODES_DESC",
|
||||
"DURATION",
|
||||
"DURATION_DESC",
|
||||
"STATUS",
|
||||
"STATUS_DESC",
|
||||
"CHAPTERS",
|
||||
"CHAPTERS_DESC",
|
||||
"VOLUMES",
|
||||
"VOLUMES_DESC",
|
||||
"UPDATED_AT",
|
||||
"UPDATED_AT_DESC",
|
||||
"SEARCH_MATCH",
|
||||
"FAVOURITES",
|
||||
"FAVOURITES_DESC",
|
||||
]
|
||||
),
|
||||
type=click.Choice(sorts_available),
|
||||
)
|
||||
@click.option(
|
||||
"--genres",
|
||||
"-g",
|
||||
multiple=True,
|
||||
help="the genres to filter by",
|
||||
type=click.Choice(
|
||||
[
|
||||
"Action",
|
||||
"Adventure",
|
||||
"Comedy",
|
||||
"Drama",
|
||||
"Ecchi",
|
||||
"Fantasy",
|
||||
"Horror",
|
||||
"Mahou Shoujo",
|
||||
"Mecha",
|
||||
"Music",
|
||||
"Mystery",
|
||||
"Psychological",
|
||||
"Romance",
|
||||
"Sci-Fi",
|
||||
"Slice of Life",
|
||||
"Sports",
|
||||
"Supernatural",
|
||||
"Thriller",
|
||||
"Hentai",
|
||||
]
|
||||
),
|
||||
type=click.Choice(genres_available),
|
||||
)
|
||||
@click.option(
|
||||
"--tags",
|
||||
@@ -477,66 +60,46 @@ for tag_category, tags_in_category in tags_available.items():
|
||||
"-f",
|
||||
multiple=True,
|
||||
help="Media format",
|
||||
type=click.Choice(
|
||||
["TV", "TV_SHORT", "MOVIE", "SPECIAL", "OVA", "MUSIC", "NOVEL", "ONE_SHOT"]
|
||||
),
|
||||
type=click.Choice(media_formats_available),
|
||||
)
|
||||
@click.option(
|
||||
"--year",
|
||||
"-y",
|
||||
type=click.Choice(
|
||||
[
|
||||
"2024",
|
||||
"2023",
|
||||
"2022",
|
||||
"2021",
|
||||
"2020",
|
||||
"2019",
|
||||
"2018",
|
||||
"2017",
|
||||
"2016",
|
||||
"2015",
|
||||
"2014",
|
||||
"2013",
|
||||
"2012",
|
||||
"2011",
|
||||
"2010",
|
||||
"2009",
|
||||
"2008",
|
||||
"2007",
|
||||
"2006",
|
||||
"2005",
|
||||
"2004",
|
||||
"2000",
|
||||
"1990",
|
||||
"1980",
|
||||
"1970",
|
||||
"1960",
|
||||
"1950",
|
||||
"1940",
|
||||
"1930",
|
||||
"1920",
|
||||
"1910",
|
||||
"1900",
|
||||
]
|
||||
),
|
||||
type=click.Choice(years_available),
|
||||
help="the year the media was released",
|
||||
)
|
||||
@click.option(
|
||||
"--on-list/--not-on-list",
|
||||
"-L/-no-L",
|
||||
help="Whether the anime should be in your list or not",
|
||||
type=bool,
|
||||
)
|
||||
@click.pass_obj
|
||||
def search(
|
||||
config, title, dump_json, season, status, sort, genres, tags, media_format, year
|
||||
config,
|
||||
title,
|
||||
dump_json,
|
||||
season,
|
||||
status,
|
||||
sort,
|
||||
genres,
|
||||
tags,
|
||||
media_format,
|
||||
year,
|
||||
on_list,
|
||||
):
|
||||
from ....anilist import AniList
|
||||
|
||||
success, search_results = AniList.search(
|
||||
query=title,
|
||||
sort=sort,
|
||||
status=status,
|
||||
status_in=list(status),
|
||||
genre_in=list(genres),
|
||||
season=season,
|
||||
tag_in=list(tags),
|
||||
seasonYear=year,
|
||||
format_in=list(media_format),
|
||||
on_list=on_list,
|
||||
)
|
||||
if success:
|
||||
if dump_json:
|
||||
@@ -548,7 +111,23 @@ def search(
|
||||
from ...utils.tools import FastAnimeRuntimeState
|
||||
|
||||
fastanime_runtime_state = FastAnimeRuntimeState()
|
||||
fastanime_runtime_state.anilist_data = search_results
|
||||
|
||||
fastanime_runtime_state.current_page = 1
|
||||
fastanime_runtime_state.current_data_loader = (
|
||||
lambda page=1, **kwargs: AniList.search(
|
||||
query=title,
|
||||
sort=sort,
|
||||
status_in=list(status),
|
||||
genre_in=list(genres),
|
||||
season=season,
|
||||
tag_in=list(tags),
|
||||
seasonYear=year,
|
||||
format_in=list(media_format),
|
||||
on_list=on_list,
|
||||
page=page,
|
||||
)
|
||||
)
|
||||
fastanime_runtime_state.anilist_results_data = search_results
|
||||
anilist_results_menu(config, fastanime_runtime_state)
|
||||
else:
|
||||
from sys import exit
|
||||
|
||||
63
fastanime/cli/commands/anilist/stats.py
Normal file
63
fastanime/cli/commands/anilist/stats.py
Normal file
@@ -0,0 +1,63 @@
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
import click
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ...config import Config
|
||||
|
||||
|
||||
@click.command(help="Print out your anilist stats")
|
||||
@click.pass_obj
|
||||
def stats(
|
||||
config: "Config",
|
||||
):
|
||||
import shutil
|
||||
import subprocess
|
||||
from sys import exit
|
||||
|
||||
from rich.console import Console
|
||||
|
||||
console = Console()
|
||||
from rich.markdown import Markdown
|
||||
from rich.panel import Panel
|
||||
|
||||
from ....anilist import AniList
|
||||
|
||||
user_data = AniList.get_user_info()
|
||||
if not user_data[0] or not user_data[1]:
|
||||
print("Failed to get user info")
|
||||
print(user_data[1])
|
||||
exit(1)
|
||||
|
||||
KITTEN_EXECUTABLE = shutil.which("kitten")
|
||||
if not KITTEN_EXECUTABLE:
|
||||
print("Kitten not found")
|
||||
exit(1)
|
||||
|
||||
image_url = user_data[1]["data"]["User"]["avatar"]["medium"]
|
||||
user_name = user_data[1]["data"]["User"]["name"]
|
||||
about = user_data[1]["data"]["User"]["about"] or ""
|
||||
console.clear()
|
||||
image_x = int(console.size.width * 0.1)
|
||||
image_y = int(console.size.height * 0.1)
|
||||
img_w = console.size.width // 3
|
||||
img_h = console.size.height // 3
|
||||
image_process = subprocess.run(
|
||||
[
|
||||
KITTEN_EXECUTABLE,
|
||||
"icat",
|
||||
"--clear",
|
||||
"--place",
|
||||
f"{img_w}x{img_h}@{image_x}x{image_y}",
|
||||
image_url,
|
||||
],
|
||||
)
|
||||
if not image_process.returncode == 0:
|
||||
print("failed to get image from icat")
|
||||
exit(1)
|
||||
console.print(
|
||||
Panel(
|
||||
Markdown(about),
|
||||
title=user_name,
|
||||
)
|
||||
)
|
||||
@@ -26,7 +26,10 @@ def trending(config, dump_json):
|
||||
from ...utils.tools import FastAnimeRuntimeState
|
||||
|
||||
fastanime_runtime_state = FastAnimeRuntimeState()
|
||||
fastanime_runtime_state.anilist_data = data
|
||||
|
||||
fastanime_runtime_state.current_page = 1
|
||||
fastanime_runtime_state.current_data_loader = AniList.get_trending
|
||||
fastanime_runtime_state.anilist_results_data = data
|
||||
anilist_results_menu(config, fastanime_runtime_state)
|
||||
else:
|
||||
from sys import exit
|
||||
|
||||
@@ -2,7 +2,7 @@ import click
|
||||
|
||||
|
||||
@click.command(
|
||||
help="Fetch the 15 most anticipited anime", short_help="View upcoming anime"
|
||||
help="Fetch the 15 most anticipated anime", short_help="View upcoming anime"
|
||||
)
|
||||
@click.option(
|
||||
"--dump-json",
|
||||
@@ -25,7 +25,10 @@ def upcoming(config, dump_json):
|
||||
from ...utils.tools import FastAnimeRuntimeState
|
||||
|
||||
fastanime_runtime_state = FastAnimeRuntimeState()
|
||||
fastanime_runtime_state.anilist_data = data
|
||||
|
||||
fastanime_runtime_state.current_page = 1
|
||||
fastanime_runtime_state.current_data_loader = AniList.get_upcoming_anime
|
||||
fastanime_runtime_state.anilist_results_data = data
|
||||
anilist_results_menu(config, fastanime_runtime_state)
|
||||
else:
|
||||
from sys import exit
|
||||
|
||||
@@ -42,5 +42,12 @@ def watching(config: "Config", dump_json):
|
||||
from ...utils.tools import FastAnimeRuntimeState
|
||||
|
||||
fastanime_runtime_state = FastAnimeRuntimeState()
|
||||
fastanime_runtime_state.anilist_data = anime_list[1]
|
||||
|
||||
fastanime_runtime_state.current_page = 1
|
||||
fastanime_runtime_state.current_data_loader = (
|
||||
lambda config, **kwargs: anilist_interfaces._handle_animelist(
|
||||
config, fastanime_runtime_state, "Watching", **kwargs
|
||||
)
|
||||
)
|
||||
fastanime_runtime_state.anilist_results_data = anime_list[1]
|
||||
anilist_interfaces.anilist_results_menu(config, fastanime_runtime_state)
|
||||
|
||||
@@ -1,7 +1,24 @@
|
||||
import click
|
||||
|
||||
|
||||
@click.command(help="Helper command to manage cache")
|
||||
@click.command(
|
||||
help="Helper command to manage cache",
|
||||
epilog="""
|
||||
\b
|
||||
\b\bExamples:
|
||||
# delete everything in the cache dir
|
||||
fastanime cache --clean
|
||||
\b
|
||||
# print the path to the cache dir and exit
|
||||
fastanime cache --path
|
||||
\b
|
||||
# print the current size of the cache dir and exit
|
||||
fastanime cache --size
|
||||
\b
|
||||
# open the cache dir and exit
|
||||
fastanime cache
|
||||
""",
|
||||
)
|
||||
@click.option("--clean", help="Clean the cache dir", is_flag=True)
|
||||
@click.option("--path", help="The path to the cache dir", is_flag=True)
|
||||
@click.option("--size", help="The size of the cache dir", is_flag=True)
|
||||
|
||||
@@ -1,7 +1,24 @@
|
||||
import click
|
||||
|
||||
|
||||
@click.command(help="Helper command to get shell completions")
|
||||
@click.command(
|
||||
help="Helper command to get shell completions",
|
||||
epilog="""
|
||||
\b
|
||||
\b\bExamples:
|
||||
# try to detect your shell and print completions
|
||||
fastanime completions
|
||||
\b
|
||||
# print fish completions
|
||||
fastanime completions --fish
|
||||
\b
|
||||
# print bash completions
|
||||
fastanime completions --bash
|
||||
\b
|
||||
# print zsh completions
|
||||
fastanime completions --zsh
|
||||
""",
|
||||
)
|
||||
@click.option("--fish", is_flag=True, help="print fish completions")
|
||||
@click.option("--zsh", is_flag=True, help="print zsh completions")
|
||||
@click.option("--bash", is_flag=True, help="print bash completions")
|
||||
|
||||
@@ -7,8 +7,27 @@ if TYPE_CHECKING:
|
||||
|
||||
|
||||
@click.command(
|
||||
help="Opens up your fastanime config in your preferred editor",
|
||||
help="Manage your config with ease",
|
||||
short_help="Edit your config",
|
||||
epilog="""
|
||||
\b
|
||||
\b\bExamples:
|
||||
# Edit your config in your default editor
|
||||
# NB: If it opens vim or vi exit with `:q`
|
||||
fastanime config
|
||||
\b
|
||||
# get the path of the config file
|
||||
fastanime config --path
|
||||
\b
|
||||
# print desktop entry info
|
||||
fastanime config --desktop-entry
|
||||
\b
|
||||
# update your config without opening an editor
|
||||
fastanime --icons --fzf --preview config --update
|
||||
\b
|
||||
# view the current contents of your config
|
||||
fastanime config --view
|
||||
""",
|
||||
)
|
||||
@click.option("--path", "-p", help="Print the config location and exit", is_flag=True)
|
||||
@click.option(
|
||||
@@ -20,8 +39,14 @@ if TYPE_CHECKING:
|
||||
help="Configure the desktop entry of fastanime",
|
||||
is_flag=True,
|
||||
)
|
||||
@click.option(
|
||||
"--update",
|
||||
"-u",
|
||||
help="Persist all the config options passed to fastanime to your config file",
|
||||
is_flag=True,
|
||||
)
|
||||
@click.pass_obj
|
||||
def config(config: "Config", path, view, desktop_entry):
|
||||
def config(user_config: "Config", path, view, desktop_entry, update):
|
||||
import sys
|
||||
|
||||
from rich import print
|
||||
@@ -32,7 +57,7 @@ def config(config: "Config", path, view, desktop_entry):
|
||||
if path:
|
||||
print(USER_CONFIG_PATH)
|
||||
elif view:
|
||||
print(config)
|
||||
print(user_config)
|
||||
elif desktop_entry:
|
||||
import os
|
||||
import shutil
|
||||
@@ -87,7 +112,9 @@ def config(config: "Config", path, view, desktop_entry):
|
||||
with open(desktop_entry_path) as f:
|
||||
print(f"Successfully wrote \n{f.read()}")
|
||||
exit_app(0)
|
||||
elif update:
|
||||
with open(USER_CONFIG_PATH, "w", encoding="utf-8") as file:
|
||||
file.write(user_config.__str__())
|
||||
print("update successfull")
|
||||
else:
|
||||
import click
|
||||
|
||||
click.edit(filename=USER_CONFIG_PATH)
|
||||
|
||||
@@ -11,6 +11,53 @@ if TYPE_CHECKING:
|
||||
@click.command(
|
||||
help="Download anime using the anime provider for a specified range",
|
||||
short_help="Download anime",
|
||||
epilog="""
|
||||
\b
|
||||
\b\bExamples:
|
||||
# Download all available episodes
|
||||
# multiple titles can be specified with -t option
|
||||
fastanime download -t <anime-title> -t <anime-title>
|
||||
# -- or --
|
||||
fastanime download -t <anime-title> -t <anime-title> -r ':'
|
||||
\b
|
||||
# download latest episode for the two anime titles
|
||||
# the number can be any no of latest episodes but a minus sign
|
||||
# must be present
|
||||
fastanime download -t <anime-title> -t <anime-title> -r '-1'
|
||||
\b
|
||||
# latest 5
|
||||
fastanime download -t <anime-title> -t <anime-title> -r '-5'
|
||||
\b
|
||||
# Download specific episode range
|
||||
# be sure to observe the range Syntax
|
||||
fastanime download -t <anime-title> -r '<episodes-start>:<episodes-end>:<step>'
|
||||
\b
|
||||
fastanime download -t <anime-title> -r '<episodes-start>:<episodes-end>'
|
||||
\b
|
||||
fastanime download -t <anime-title> -r '<episodes-start>:'
|
||||
\b
|
||||
fastanime download -t <anime-title> -r ':<episodes-end>'
|
||||
\b
|
||||
# download specific episode
|
||||
# remember python indexing starts at 0
|
||||
fastanime download -t <anime-title> -r '<episode-1>:<episode>'
|
||||
\b
|
||||
# merge subtitles with ffmpeg to mkv format; hianime tends to give subs as separate files
|
||||
# and dont prompt for anything
|
||||
# eg existing file in destination instead remove
|
||||
# and clean
|
||||
# ie remove original files (sub file and vid file)
|
||||
# only keep merged files
|
||||
fastanime download -t <anime-title> --merge --clean --no-prompt
|
||||
\b
|
||||
# EOF is used since -t always expects a title
|
||||
# you can supply anime titles from file or -t at the same time
|
||||
# from stdin
|
||||
echo -e "<anime-title>\\n<anime-title>\\n<anime-title>" | fastanime download -t "EOF" -r <range> -f -
|
||||
\b
|
||||
# from file
|
||||
fastanime download -t "EOF" -r <range> -f <file-path>
|
||||
""",
|
||||
)
|
||||
@click.option(
|
||||
"--anime-titles",
|
||||
@@ -64,9 +111,29 @@ if TYPE_CHECKING:
|
||||
)
|
||||
@click.option(
|
||||
"--prompt/--no-prompt",
|
||||
help="Dont prompt for anything instead just do the best thing",
|
||||
help="Whether to prompt for anything instead just do the best thing",
|
||||
default=True,
|
||||
)
|
||||
@click.option(
|
||||
"--force-ffmpeg",
|
||||
is_flag=True,
|
||||
help="Force the use of FFmpeg for downloading (supports large variety of streams but slower)",
|
||||
)
|
||||
@click.option(
|
||||
"--hls-use-mpegts",
|
||||
is_flag=True,
|
||||
help="Use mpegts for hls streams, resulted in .ts file (useful for some streams: see Docs) (this option forces --force-ffmpeg to be True)",
|
||||
)
|
||||
@click.option(
|
||||
"--hls-use-h264",
|
||||
is_flag=True,
|
||||
help="Use H.264 (MP4) for hls streams, resulted in .mp4 file (useful for some streams: see Docs) (this option forces --force-ffmpeg to be True)",
|
||||
)
|
||||
@click.option(
|
||||
"--no-check-certificates",
|
||||
is_flag=True,
|
||||
help="Suppress HTTPS certificate validation",
|
||||
)
|
||||
@click.pass_obj
|
||||
def download(
|
||||
config: "Config",
|
||||
@@ -80,6 +147,10 @@ def download(
|
||||
clean,
|
||||
wait_time,
|
||||
prompt,
|
||||
force_ffmpeg,
|
||||
hls_use_mpegts,
|
||||
hls_use_h264,
|
||||
no_check_certificates,
|
||||
):
|
||||
import time
|
||||
|
||||
@@ -90,6 +161,7 @@ def download(
|
||||
from ...AnimeProvider import AnimeProvider
|
||||
from ...libs.anime_provider.types import Anime
|
||||
from ...libs.fzf import fzf
|
||||
from ...Utility.data import anime_normalizer
|
||||
from ...Utility.downloader.downloader import downloader
|
||||
from ..utils.tools import exit_app
|
||||
from ..utils.utils import (
|
||||
@@ -98,7 +170,10 @@ def download(
|
||||
move_preferred_subtitle_lang_to_top,
|
||||
)
|
||||
|
||||
force_ffmpeg |= hls_use_mpegts or hls_use_h264
|
||||
|
||||
anime_provider = AnimeProvider(config.provider)
|
||||
anilist_anime_info = None
|
||||
|
||||
translation_type = config.translation_type
|
||||
download_dir = config.downloads_dir
|
||||
@@ -136,6 +211,10 @@ def download(
|
||||
clean,
|
||||
wait_time,
|
||||
prompt,
|
||||
force_ffmpeg,
|
||||
hls_use_mpegts,
|
||||
hls_use_h264,
|
||||
no_check_certificates,
|
||||
)
|
||||
return
|
||||
search_results = search_results["results"]
|
||||
@@ -147,16 +226,21 @@ def download(
|
||||
}
|
||||
|
||||
if config.auto_select:
|
||||
search_result = max(
|
||||
search_results_.keys(), key=lambda title: fuzz.ratio(title, anime_title)
|
||||
selected_anime_title = max(
|
||||
search_results_.keys(),
|
||||
key=lambda title: fuzz.ratio(
|
||||
anime_normalizer.get(title, title), anime_title
|
||||
),
|
||||
)
|
||||
print("[cyan]Auto selecting:[/] ", search_result)
|
||||
print("[cyan]Auto selecting:[/] ", selected_anime_title)
|
||||
else:
|
||||
choices = list(search_results_.keys())
|
||||
if config.use_fzf:
|
||||
search_result = fzf.run(choices, "Please Select title: ", "FastAnime")
|
||||
selected_anime_title = fzf.run(
|
||||
choices, "Please Select title", "FastAnime"
|
||||
)
|
||||
else:
|
||||
search_result = fuzzy_inquirer(
|
||||
selected_anime_title = fuzzy_inquirer(
|
||||
choices,
|
||||
"Please Select title",
|
||||
)
|
||||
@@ -165,7 +249,7 @@ def download(
|
||||
with Progress() as progress:
|
||||
progress.add_task("Fetching Anime...", total=None)
|
||||
anime: Anime | None = anime_provider.get_anime(
|
||||
search_results_[search_result]["id"]
|
||||
search_results_[selected_anime_title]["id"]
|
||||
)
|
||||
if not anime:
|
||||
print("Sth went wring anime no found")
|
||||
@@ -182,6 +266,10 @@ def download(
|
||||
clean,
|
||||
wait_time,
|
||||
prompt,
|
||||
force_ffmpeg,
|
||||
hls_use_mpegts,
|
||||
hls_use_h264,
|
||||
no_check_certificates,
|
||||
)
|
||||
return
|
||||
|
||||
@@ -214,6 +302,12 @@ def download(
|
||||
|
||||
else:
|
||||
episodes_range = sorted(episodes, key=float)
|
||||
print(f"[green bold]Downloading: [/] {episodes_range}")
|
||||
|
||||
if config.normalize_titles:
|
||||
from ...libs.common.mini_anilist import get_basic_anime_info_by_title
|
||||
|
||||
anilist_anime_info = get_basic_anime_info_by_title(anime["title"])
|
||||
|
||||
# lets download em
|
||||
for episode in episodes_range:
|
||||
@@ -225,7 +319,7 @@ def download(
|
||||
with Progress() as progress:
|
||||
progress.add_task("Fetching Episode Streams...", total=None)
|
||||
streams = anime_provider.get_episode_streams(
|
||||
anime, episode, config.translation_type
|
||||
anime["id"], episode, config.translation_type
|
||||
)
|
||||
if not streams:
|
||||
print("No streams skipping")
|
||||
@@ -260,7 +354,7 @@ def download(
|
||||
server_name = config.server
|
||||
else:
|
||||
if config.use_fzf:
|
||||
server_name = fzf.run(servers_names, "Select an link: ")
|
||||
server_name = fzf.run(servers_names, "Select an link")
|
||||
else:
|
||||
server_name = fuzzy_inquirer(
|
||||
servers_names,
|
||||
@@ -279,24 +373,41 @@ def download(
|
||||
|
||||
subtitles = servers[server_name]["subtitles"]
|
||||
episode_title = servers[server_name]["episode_title"]
|
||||
print(f"[purple]Now Downloading:[/] {search_result} Episode {episode}")
|
||||
|
||||
if anilist_anime_info:
|
||||
selected_anime_title = (
|
||||
anilist_anime_info["title"][config.preferred_language]
|
||||
or anilist_anime_info["title"]["romaji"]
|
||||
or anilist_anime_info["title"]["english"]
|
||||
)
|
||||
import re
|
||||
|
||||
for episode_detail in anilist_anime_info["episodes"]:
|
||||
if re.match(f"Episode {episode} ", episode_detail["title"]):
|
||||
episode_title = episode_detail["title"]
|
||||
break
|
||||
print(f"[purple]Now Downloading:[/] {episode_title}")
|
||||
subtitles = move_preferred_subtitle_lang_to_top(
|
||||
subtitles, config.sub_lang
|
||||
)
|
||||
downloader._download_file(
|
||||
link,
|
||||
search_result,
|
||||
selected_anime_title,
|
||||
episode_title,
|
||||
download_dir,
|
||||
silent,
|
||||
config.format,
|
||||
force_unknown_ext,
|
||||
verbose,
|
||||
vid_format=config.format,
|
||||
force_unknown_ext=force_unknown_ext,
|
||||
verbose=verbose,
|
||||
headers=provider_headers,
|
||||
sub=subtitles[0]["url"] if subtitles else "",
|
||||
merge=merge,
|
||||
clean=clean,
|
||||
prompt=prompt,
|
||||
force_ffmpeg=force_ffmpeg,
|
||||
hls_use_mpegts=hls_use_mpegts,
|
||||
hls_use_h264=hls_use_h264,
|
||||
nocheckcertificate=no_check_certificates,
|
||||
)
|
||||
except Exception as e:
|
||||
print(e)
|
||||
|
||||
@@ -3,25 +3,60 @@ from typing import TYPE_CHECKING
|
||||
|
||||
import click
|
||||
|
||||
from ..completion_functions import downloaded_anime_titles
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
if TYPE_CHECKING:
|
||||
from ..config import Config
|
||||
|
||||
|
||||
@click.command(
|
||||
help="View and watch your downloads using mpv", short_help="Watch downloads"
|
||||
help="View and watch your downloads using mpv",
|
||||
short_help="Watch downloads",
|
||||
epilog="""
|
||||
\b
|
||||
\b\bExamples:
|
||||
fastanime downloads
|
||||
\b
|
||||
# view individual episodes
|
||||
fastanime downloads --view-episodes
|
||||
# --- or ---
|
||||
fastanime downloads -v
|
||||
\b
|
||||
# to set seek time when using ffmpegthumbnailer for local previews
|
||||
# -1 means random and is the default
|
||||
fastanime downloads --time-to-seek <intRange(-1,100)>
|
||||
# --- or ---
|
||||
fastanime downloads -t <intRange(-1,100)>
|
||||
\b
|
||||
# to watch a specific title
|
||||
# be sure to get the completions for the best experience
|
||||
fastanime downloads --title <title>
|
||||
\b
|
||||
# to get the path to the downloads folder set
|
||||
fastanime downloads --path
|
||||
# useful when you want to use the value for other programs
|
||||
""",
|
||||
)
|
||||
@click.option("--path", "-p", help="print the downloads folder and exit", is_flag=True)
|
||||
@click.option(
|
||||
"--title",
|
||||
"-T",
|
||||
shell_complete=downloaded_anime_titles,
|
||||
help="watch a specific title",
|
||||
)
|
||||
@click.option("--view-episodes", "-v", help="View individual episodes", is_flag=True)
|
||||
@click.option(
|
||||
"--ffmpegthumbnailer-seek-time",
|
||||
"--time-to-seek",
|
||||
"-t",
|
||||
type=click.IntRange(-1, 100),
|
||||
help="ffmpegthumbnailer seek time [0-100]",
|
||||
help="ffmpegthumbnailer seek time",
|
||||
)
|
||||
@click.pass_obj
|
||||
def downloads(config: "Config", path: bool, view_episodes, ffmpegthumbnailer_seek_time):
|
||||
def downloads(
|
||||
config: "Config", path: bool, title, view_episodes, ffmpegthumbnailer_seek_time
|
||||
):
|
||||
import os
|
||||
|
||||
from ...cli.utils.mpv import run_mpv
|
||||
@@ -89,7 +124,7 @@ def downloads(config: "Config", path: bool, view_episodes, ffmpegthumbnailer_see
|
||||
return
|
||||
|
||||
from ...constants import APP_CACHE_DIR
|
||||
from ..utils.scripts import fzf_preview
|
||||
from ..utils.scripts import bash_functions
|
||||
|
||||
downloads_thumbnail_cache_dir = os.path.join(APP_CACHE_DIR, "video_thumbnails")
|
||||
Path(downloads_thumbnail_cache_dir).mkdir(parents=True, exist_ok=True)
|
||||
@@ -148,7 +183,7 @@ def downloads(config: "Config", path: bool, view_episodes, ffmpegthumbnailer_see
|
||||
else echo Loading...
|
||||
fi
|
||||
""" % (
|
||||
fzf_preview,
|
||||
bash_functions,
|
||||
downloads_thumbnail_cache_dir,
|
||||
downloads_thumbnail_cache_dir,
|
||||
)
|
||||
@@ -159,7 +194,7 @@ def downloads(config: "Config", path: bool, view_episodes, ffmpegthumbnailer_see
|
||||
from pathlib import Path
|
||||
|
||||
from ...constants import APP_CACHE_DIR
|
||||
from ..utils.scripts import fzf_preview
|
||||
from ..utils.scripts import bash_functions
|
||||
|
||||
if not shutil.which("ffmpegthumbnailer"):
|
||||
print("ffmpegthumbnailer not found")
|
||||
@@ -221,7 +256,7 @@ def downloads(config: "Config", path: bool, view_episodes, ffmpegthumbnailer_see
|
||||
else echo Loading...
|
||||
fi
|
||||
""" % (
|
||||
fzf_preview,
|
||||
bash_functions,
|
||||
downloads_thumbnail_cache_dir,
|
||||
downloads_thumbnail_cache_dir,
|
||||
)
|
||||
@@ -239,6 +274,7 @@ def downloads(config: "Config", path: bool, view_episodes, ffmpegthumbnailer_see
|
||||
os.listdir(anime_playlist_path), key=sort_by_episode_number
|
||||
)
|
||||
downloaded_episodes = [*episodes, "Back"]
|
||||
|
||||
if config.use_fzf:
|
||||
if not config.preview:
|
||||
episode_title = fzf.run(
|
||||
@@ -257,7 +293,7 @@ def downloads(config: "Config", path: bool, view_episodes, ffmpegthumbnailer_see
|
||||
else:
|
||||
episode_title = fuzzy_inquirer(
|
||||
downloaded_episodes,
|
||||
"Enter Playlist Name: ",
|
||||
"Enter Playlist Name",
|
||||
)
|
||||
if episode_title == "Back":
|
||||
stream_anime()
|
||||
@@ -268,11 +304,18 @@ def downloads(config: "Config", path: bool, view_episodes, ffmpegthumbnailer_see
|
||||
|
||||
SyncPlayer(episode_path)
|
||||
else:
|
||||
run_mpv(episode_path)
|
||||
run_mpv(
|
||||
episode_path,
|
||||
player=config.player,
|
||||
)
|
||||
stream_episode(anime_playlist_path)
|
||||
|
||||
def stream_anime():
|
||||
if config.use_fzf:
|
||||
def stream_anime(title=None):
|
||||
if title:
|
||||
from thefuzz import fuzz
|
||||
|
||||
playlist_name = max(anime_downloads, key=lambda t: fuzz.ratio(title, t))
|
||||
elif config.use_fzf:
|
||||
if not config.preview:
|
||||
playlist_name = fzf.run(
|
||||
anime_downloads,
|
||||
@@ -290,7 +333,7 @@ def downloads(config: "Config", path: bool, view_episodes, ffmpegthumbnailer_see
|
||||
else:
|
||||
playlist_name = fuzzy_inquirer(
|
||||
anime_downloads,
|
||||
"Enter Playlist Name: ",
|
||||
"Enter Playlist Name",
|
||||
)
|
||||
if playlist_name == "Exit":
|
||||
exit_app()
|
||||
@@ -306,7 +349,10 @@ def downloads(config: "Config", path: bool, view_episodes, ffmpegthumbnailer_see
|
||||
|
||||
SyncPlayer(playlist)
|
||||
else:
|
||||
run_mpv(playlist)
|
||||
run_mpv(
|
||||
playlist,
|
||||
player=config.player,
|
||||
)
|
||||
stream_anime()
|
||||
|
||||
stream_anime()
|
||||
stream_anime(title)
|
||||
|
||||
@@ -11,6 +11,41 @@ if TYPE_CHECKING:
|
||||
@click.command(
|
||||
help="Helper command to get streams for anime to use externally in a non-python application",
|
||||
short_help="Print anime streams to standard out",
|
||||
epilog="""
|
||||
\b
|
||||
\b\bExamples:
|
||||
# --- print anime info + episode streams ---
|
||||
\b
|
||||
# multiple titles can be specified with the -t option
|
||||
fastanime grab -t <anime-title> -t <anime-title>
|
||||
# -- or --
|
||||
# print all available episodes
|
||||
fastanime grab -t <anime-title> -r ':'
|
||||
\b
|
||||
# print the latest episode
|
||||
fastanime grab -t <anime-title> -r '-1'
|
||||
\b
|
||||
# print a specific episode range
|
||||
# be sure to observe the range Syntax
|
||||
fastanime grab -t <anime-title> -r '<start>:<stop>'
|
||||
\b
|
||||
fastanime grab -t <anime-title> -r '<start>:<stop>:<step>'
|
||||
\b
|
||||
fastanime grab -t <anime-title> -r '<start>:'
|
||||
\b
|
||||
fastanime grab -t <anime-title> -r ':<end>'
|
||||
\b
|
||||
# --- grab options ---
|
||||
\b
|
||||
# print search results only
|
||||
fastanime grab -t <anime-title> -r <range> --search-results-only
|
||||
\b
|
||||
# print anime info only
|
||||
fastanime grab -t <anime-title> -r <range> --anime-info-only
|
||||
\b
|
||||
# print episode streams only
|
||||
fastanime grab -t <anime-title> -r <range> --episode-streams-only
|
||||
""",
|
||||
)
|
||||
@click.option(
|
||||
"--anime-titles",
|
||||
@@ -56,26 +91,19 @@ def grab(
|
||||
|
||||
from thefuzz import fuzz
|
||||
|
||||
from ...AnimeProvider import AnimeProvider
|
||||
|
||||
logger = getLogger(__name__)
|
||||
if config.manga:
|
||||
manga_title = anime_titles[0]
|
||||
from ...MangaProvider import MangaProvider
|
||||
|
||||
anime_provider = AnimeProvider(config.provider)
|
||||
|
||||
grabbed_animes = []
|
||||
for anime_title in anime_titles:
|
||||
# ---- search for anime ----
|
||||
search_results = anime_provider.search_for_anime(
|
||||
anime_title, translation_type=config.translation_type
|
||||
)
|
||||
if not search_results:
|
||||
manga_provider = MangaProvider()
|
||||
search_data = manga_provider.search_for_manga(manga_title)
|
||||
if not search_data:
|
||||
exit(1)
|
||||
if search_results_only:
|
||||
# grab only search results skipping all lines after this
|
||||
grabbed_animes.append(search_results)
|
||||
continue
|
||||
|
||||
search_results = search_results["results"]
|
||||
print(json.dumps(search_data))
|
||||
exit(0)
|
||||
search_results = search_data["results"]
|
||||
if not search_results:
|
||||
logger.error("no results for your search")
|
||||
exit(1)
|
||||
@@ -83,63 +111,112 @@ def grab(
|
||||
search_result["title"]: search_result for search_result in search_results
|
||||
}
|
||||
|
||||
search_result = max(
|
||||
search_results_.keys(), key=lambda title: fuzz.ratio(title, anime_title)
|
||||
search_result_anime_title = max(
|
||||
search_results_.keys(), key=lambda title: fuzz.ratio(title, anime_titles[0])
|
||||
)
|
||||
|
||||
# ---- fetch anime ----
|
||||
anime = anime_provider.get_anime(search_results_[search_result]["id"])
|
||||
if not anime:
|
||||
exit(1)
|
||||
manga_info = manga_provider.get_manga(
|
||||
search_results_[search_result_anime_title]["id"]
|
||||
)
|
||||
if not manga_info:
|
||||
return
|
||||
if anime_info_only:
|
||||
# grab only the anime data skipping all lines after this
|
||||
grabbed_animes.append(anime)
|
||||
continue
|
||||
episodes = sorted(
|
||||
anime["availableEpisodesDetail"][config.translation_type], key=float
|
||||
print(json.dumps(manga_info))
|
||||
exit(0)
|
||||
|
||||
chapter_info = manga_provider.get_chapter_thumbnails(
|
||||
manga_info["id"], str(episode_range)
|
||||
)
|
||||
if not chapter_info:
|
||||
exit(1)
|
||||
print(json.dumps(chapter_info))
|
||||
|
||||
# where the magic happens
|
||||
if episode_range:
|
||||
if ":" in episode_range:
|
||||
ep_range_tuple = episode_range.split(":")
|
||||
if len(ep_range_tuple) == 2 and all(ep_range_tuple):
|
||||
episodes_start, episodes_end = ep_range_tuple
|
||||
episodes_range = episodes[int(episodes_start) : int(episodes_end)]
|
||||
elif len(ep_range_tuple) == 3 and all(ep_range_tuple):
|
||||
episodes_start, episodes_end, step = ep_range_tuple
|
||||
episodes_range = episodes[
|
||||
int(episodes_start) : int(episodes_end) : int(step)
|
||||
]
|
||||
else:
|
||||
episodes_start, episodes_end = ep_range_tuple
|
||||
if episodes_start.strip():
|
||||
episodes_range = episodes[int(episodes_start) :]
|
||||
elif episodes_end.strip():
|
||||
episodes_range = episodes[: int(episodes_end)]
|
||||
else:
|
||||
from ...AnimeProvider import AnimeProvider
|
||||
|
||||
anime_provider = AnimeProvider(config.provider)
|
||||
|
||||
grabbed_animes = []
|
||||
for anime_title in anime_titles:
|
||||
# ---- search for anime ----
|
||||
search_results = anime_provider.search_for_anime(
|
||||
anime_title, translation_type=config.translation_type
|
||||
)
|
||||
if not search_results:
|
||||
exit(1)
|
||||
if search_results_only:
|
||||
# grab only search results skipping all lines after this
|
||||
grabbed_animes.append(search_results)
|
||||
continue
|
||||
|
||||
search_results = search_results["results"]
|
||||
if not search_results:
|
||||
logger.error("no results for your search")
|
||||
exit(1)
|
||||
search_results_ = {
|
||||
search_result["title"]: search_result
|
||||
for search_result in search_results
|
||||
}
|
||||
|
||||
search_result_anime_title = max(
|
||||
search_results_.keys(), key=lambda title: fuzz.ratio(title, anime_title)
|
||||
)
|
||||
|
||||
# ---- fetch anime ----
|
||||
anime = anime_provider.get_anime(
|
||||
search_results_[search_result_anime_title]["id"]
|
||||
)
|
||||
if not anime:
|
||||
exit(1)
|
||||
if anime_info_only:
|
||||
# grab only the anime data skipping all lines after this
|
||||
grabbed_animes.append(anime)
|
||||
continue
|
||||
episodes = sorted(
|
||||
anime["availableEpisodesDetail"][config.translation_type], key=float
|
||||
)
|
||||
|
||||
# where the magic happens
|
||||
if episode_range:
|
||||
if ":" in episode_range:
|
||||
ep_range_tuple = episode_range.split(":")
|
||||
if len(ep_range_tuple) == 2 and all(ep_range_tuple):
|
||||
episodes_start, episodes_end = ep_range_tuple
|
||||
episodes_range = episodes[
|
||||
int(episodes_start) : int(episodes_end)
|
||||
]
|
||||
elif len(ep_range_tuple) == 3 and all(ep_range_tuple):
|
||||
episodes_start, episodes_end, step = ep_range_tuple
|
||||
episodes_range = episodes[
|
||||
int(episodes_start) : int(episodes_end) : int(step)
|
||||
]
|
||||
else:
|
||||
episodes_range = episodes
|
||||
episodes_start, episodes_end = ep_range_tuple
|
||||
if episodes_start.strip():
|
||||
episodes_range = episodes[int(episodes_start) :]
|
||||
elif episodes_end.strip():
|
||||
episodes_range = episodes[: int(episodes_end)]
|
||||
else:
|
||||
episodes_range = episodes
|
||||
else:
|
||||
episodes_range = episodes[int(episode_range) :]
|
||||
|
||||
else:
|
||||
episodes_range = episodes[int(episode_range) :]
|
||||
episodes_range = sorted(episodes, key=float)
|
||||
|
||||
else:
|
||||
episodes_range = sorted(episodes, key=float)
|
||||
if not episode_streams_only:
|
||||
grabbed_anime = dict(anime)
|
||||
grabbed_anime["requested_episodes"] = episodes_range
|
||||
grabbed_anime["translation_type"] = config.translation_type
|
||||
grabbed_anime["episodes_streams"] = {}
|
||||
else:
|
||||
grabbed_anime = {}
|
||||
|
||||
if not episode_streams_only:
|
||||
grabbed_anime = dict(anime)
|
||||
grabbed_anime["requested_episodes"] = episodes_range
|
||||
grabbed_anime["translation_type"] = config.translation_type
|
||||
grabbed_anime["episodes_streams"] = {}
|
||||
else:
|
||||
grabbed_anime = {}
|
||||
|
||||
# lets download em
|
||||
for episode in episodes_range:
|
||||
try:
|
||||
# lets download em
|
||||
for episode in episodes_range:
|
||||
if episode not in episodes:
|
||||
continue
|
||||
streams = anime_provider.get_episode_streams(
|
||||
anime, episode, config.translation_type
|
||||
anime["id"], episode, config.translation_type
|
||||
)
|
||||
if not streams:
|
||||
continue
|
||||
@@ -152,14 +229,11 @@ def grab(
|
||||
episode
|
||||
] = episode_streams
|
||||
|
||||
except Exception as e:
|
||||
logger.error(e)
|
||||
# grab the full data for single title and appen to final result or episode streams
|
||||
grabbed_animes.append(grabbed_anime)
|
||||
|
||||
# grab the full data for single title and appen to final result or episode streams
|
||||
grabbed_animes.append(grabbed_anime)
|
||||
|
||||
# print out the final result either {} or [] depending if more than one title os requested
|
||||
if len(grabbed_animes) == 1:
|
||||
print(json.dumps(grabbed_animes[0]))
|
||||
else:
|
||||
print(json.dumps(grabbed_animes))
|
||||
# print out the final result either {} or [] depending if more than one title os requested
|
||||
if len(grabbed_animes) == 1:
|
||||
print(json.dumps(grabbed_animes[0]))
|
||||
else:
|
||||
print(json.dumps(grabbed_animes))
|
||||
|
||||
@@ -1,12 +1,39 @@
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
import click
|
||||
|
||||
from ...cli.config import Config
|
||||
from ..completion_functions import anime_titles_shell_complete
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ...cli.config import Config
|
||||
|
||||
|
||||
@click.command(
|
||||
help="This subcommand directly interacts with the provider to enable basic streaming. Useful for binging anime.",
|
||||
short_help="Binge anime",
|
||||
epilog="""
|
||||
\b
|
||||
\b\bExamples:
|
||||
# basic form where you will still be prompted for the episode number
|
||||
# multiple titles can be specified with the -t option
|
||||
fastanime search -t <anime-title> -t <anime-title>
|
||||
\b
|
||||
# binge all episodes with this command
|
||||
fastanime search -t <anime-title> -r ':'
|
||||
\b
|
||||
# watch latest episode
|
||||
fastanime search -t <anime-title> -r '-1'
|
||||
\b
|
||||
# binge a specific episode range with this command
|
||||
# be sure to observe the range Syntax
|
||||
fastanime search -t <anime-title> -r '<start>:<stop>'
|
||||
\b
|
||||
fastanime search -t <anime-title> -r '<start>:<stop>:<step>'
|
||||
\b
|
||||
fastanime search -t <anime-title> -r '<start>:'
|
||||
\b
|
||||
fastanime search -t <anime-title> -r ':<end>'
|
||||
""",
|
||||
)
|
||||
@click.option(
|
||||
"--anime-titles",
|
||||
@@ -23,215 +50,343 @@ from ..completion_functions import anime_titles_shell_complete
|
||||
help="A range of episodes to binge (start-end)",
|
||||
)
|
||||
@click.pass_obj
|
||||
def search(config: Config, anime_titles: str, episode_range: str):
|
||||
def search(config: "Config", anime_titles: str, episode_range: str):
|
||||
from click import clear
|
||||
from rich import print
|
||||
from rich.progress import Progress
|
||||
from thefuzz import fuzz
|
||||
|
||||
from ...AnimeProvider import AnimeProvider
|
||||
from ...libs.anime_provider.types import Anime
|
||||
from ...libs.fzf import fzf
|
||||
from ...libs.rofi import Rofi
|
||||
from ..utils.mpv import run_mpv
|
||||
from ..utils.tools import exit_app
|
||||
from ..utils.utils import (
|
||||
filter_by_quality,
|
||||
fuzzy_inquirer,
|
||||
move_preferred_subtitle_lang_to_top,
|
||||
)
|
||||
from ..utils.utils import fuzzy_inquirer
|
||||
|
||||
anime_provider = AnimeProvider(config.provider)
|
||||
if config.manga:
|
||||
from InquirerPy.prompts.number import NumberPrompt
|
||||
from yt_dlp.utils import sanitize_filename
|
||||
|
||||
from ...MangaProvider import MangaProvider
|
||||
|
||||
from ..utils.feh import feh_manga_viewer
|
||||
from ..utils.icat import icat_manga_viewer
|
||||
|
||||
manga_title = anime_titles[0]
|
||||
|
||||
manga_provider = MangaProvider()
|
||||
search_data = manga_provider.search_for_manga(manga_title)
|
||||
if not search_data:
|
||||
print("No search results")
|
||||
exit(1)
|
||||
|
||||
search_results = search_data["results"]
|
||||
|
||||
print(f"[green bold]Streaming:[/] {anime_titles}")
|
||||
for anime_title in anime_titles:
|
||||
# ---- search for anime ----
|
||||
with Progress() as progress:
|
||||
progress.add_task("Fetching Search Results...", total=None)
|
||||
search_results = anime_provider.search_for_anime(
|
||||
anime_title, config.translation_type
|
||||
)
|
||||
if not search_results:
|
||||
print("Search results not found")
|
||||
input("Enter to retry")
|
||||
search(config, anime_title, episode_range)
|
||||
return
|
||||
search_results = search_results["results"]
|
||||
if not search_results:
|
||||
print("Anime not found :cry:")
|
||||
exit_app()
|
||||
search_results_ = {
|
||||
search_result["title"]: search_result for search_result in search_results
|
||||
sanitize_filename(search_result["title"]): search_result
|
||||
for search_result in search_results
|
||||
}
|
||||
|
||||
if config.auto_select:
|
||||
search_result = max(
|
||||
search_results_.keys(), key=lambda title: fuzz.ratio(title, anime_title)
|
||||
search_result_manga_title = max(
|
||||
search_results_.keys(),
|
||||
key=lambda title: fuzz.ratio(title, manga_title),
|
||||
)
|
||||
print("[cyan]Auto Selecting:[/] ", search_result)
|
||||
print("[cyan]Auto Selecting:[/] ", search_result_manga_title)
|
||||
|
||||
else:
|
||||
choices = list(search_results_.keys())
|
||||
preview = None
|
||||
if config.preview:
|
||||
from ..interfaces.utils import get_fzf_manga_preview
|
||||
|
||||
preview = get_fzf_manga_preview(search_results)
|
||||
if config.use_fzf:
|
||||
search_result = fzf.run(choices, "Please Select title: ", "FastAnime")
|
||||
search_result_manga_title = fzf.run(
|
||||
choices, "Please Select title", preview=preview
|
||||
)
|
||||
elif config.use_rofi:
|
||||
search_result = Rofi.run(choices, "Please Select Title")
|
||||
search_result_manga_title = Rofi.run(choices, "Please Select Title")
|
||||
else:
|
||||
search_result = fuzzy_inquirer(
|
||||
search_result_manga_title = fuzzy_inquirer(
|
||||
choices,
|
||||
"Please Select Title",
|
||||
)
|
||||
|
||||
# ---- fetch selected anime ----
|
||||
with Progress() as progress:
|
||||
progress.add_task("Fetching Anime...", total=None)
|
||||
anime: Anime | None = anime_provider.get_anime(
|
||||
search_results_[search_result]["id"]
|
||||
anilist_id = search_results_[search_result_manga_title]["id"]
|
||||
manga_info = manga_provider.get_manga(anilist_id)
|
||||
if not manga_info:
|
||||
print("No manga info")
|
||||
exit(1)
|
||||
|
||||
anilist_helper = None
|
||||
if config.user:
|
||||
from ...anilist import AniList
|
||||
|
||||
AniList.login_user(config.user["token"])
|
||||
anilist_helper = AniList
|
||||
|
||||
def _manga_viewer():
|
||||
chapter_number = NumberPrompt("Select a chapter number").execute()
|
||||
chapter_info = manga_provider.get_chapter_thumbnails(
|
||||
manga_info["id"], str(chapter_number)
|
||||
)
|
||||
|
||||
if not anime:
|
||||
print("Sth went wring anime no found")
|
||||
input("Enter to continue...")
|
||||
search(config, anime_title, episode_range)
|
||||
return
|
||||
episodes_range = []
|
||||
episodes: list[str] = sorted(
|
||||
anime["availableEpisodesDetail"][config.translation_type], key=float
|
||||
)
|
||||
if episode_range:
|
||||
if ":" in episode_range:
|
||||
ep_range_tuple = episode_range.split(":")
|
||||
if len(ep_range_tuple) == 3 and all(ep_range_tuple):
|
||||
episodes_start, episodes_end, step = ep_range_tuple
|
||||
episodes_range = episodes[
|
||||
int(episodes_start) : int(episodes_end) : int(step)
|
||||
]
|
||||
if not chapter_info:
|
||||
print("No chapter info")
|
||||
input("Enter to retry...")
|
||||
_manga_viewer()
|
||||
return
|
||||
print(
|
||||
f"[purple bold]Now Reading: [/] {search_result_manga_title} [cyan bold]Chapter:[/] {chapter_info['title']}"
|
||||
)
|
||||
if config.manga_viewer == "feh":
|
||||
feh_manga_viewer(chapter_info["thumbnails"], str(chapter_info["title"]))
|
||||
elif config.manga_viewer == "icat":
|
||||
icat_manga_viewer(
|
||||
chapter_info["thumbnails"], str(chapter_info["title"])
|
||||
)
|
||||
if anilist_helper:
|
||||
anilist_helper.update_anime_list(
|
||||
{"mediaId": anilist_id, "progress": chapter_number}
|
||||
)
|
||||
_manga_viewer()
|
||||
|
||||
_manga_viewer()
|
||||
else:
|
||||
from ...AnimeProvider import AnimeProvider
|
||||
from ...libs.anime_provider.types import Anime
|
||||
from ...Utility.data import anime_normalizer
|
||||
from ..utils.mpv import run_mpv
|
||||
from ..utils.utils import filter_by_quality, move_preferred_subtitle_lang_to_top
|
||||
|
||||
anime_provider = AnimeProvider(config.provider)
|
||||
anilist_anime_info = None
|
||||
|
||||
print(f"[green bold]Streaming:[/] {anime_titles}")
|
||||
for anime_title in anime_titles:
|
||||
# ---- search for anime ----
|
||||
with Progress() as progress:
|
||||
progress.add_task("Fetching Search Results...", total=None)
|
||||
search_results = anime_provider.search_for_anime(
|
||||
anime_title, config.translation_type
|
||||
)
|
||||
if not search_results:
|
||||
print("Search results not found")
|
||||
input("Enter to retry")
|
||||
search(config, anime_title, episode_range)
|
||||
return
|
||||
search_results = search_results["results"]
|
||||
if not search_results:
|
||||
print("Anime not found :cry:")
|
||||
exit_app()
|
||||
search_results_ = {
|
||||
search_result["title"]: search_result
|
||||
for search_result in search_results
|
||||
}
|
||||
|
||||
if config.auto_select:
|
||||
search_result_manga_title = max(
|
||||
search_results_.keys(),
|
||||
key=lambda title: fuzz.ratio(
|
||||
anime_normalizer.get(title, title), anime_title
|
||||
),
|
||||
)
|
||||
print("[cyan]Auto Selecting:[/] ", search_result_manga_title)
|
||||
|
||||
elif len(ep_range_tuple) == 2 and all(ep_range_tuple):
|
||||
episodes_start, episodes_end = ep_range_tuple
|
||||
episodes_range = episodes[int(episodes_start) : int(episodes_end)]
|
||||
else:
|
||||
episodes_start, episodes_end = ep_range_tuple
|
||||
if episodes_start.strip():
|
||||
episodes_range = episodes[int(episodes_start) :]
|
||||
elif episodes_end.strip():
|
||||
episodes_range = episodes[: int(episodes_end)]
|
||||
else:
|
||||
episodes_range = episodes
|
||||
else:
|
||||
episodes_range = episodes[int(episode_range) :]
|
||||
|
||||
episodes_range = iter(episodes_range)
|
||||
|
||||
def stream_anime():
|
||||
clear()
|
||||
episode = None
|
||||
|
||||
if episodes_range:
|
||||
try:
|
||||
episode = next(episodes_range) # pyright:ignore
|
||||
print(
|
||||
f"[cyan]Auto selecting:[/] {search_result} [cyan]Episode:[/] {episode}"
|
||||
)
|
||||
except StopIteration:
|
||||
print("[green]Completed binge sequence[/]:smile:")
|
||||
return
|
||||
|
||||
if not episode or episode not in episodes:
|
||||
choices = [*episodes, "end"]
|
||||
choices = list(search_results_.keys())
|
||||
if config.use_fzf:
|
||||
episode = fzf.run(
|
||||
choices, "Select an episode: ", header=search_result
|
||||
search_result_manga_title = fzf.run(
|
||||
choices, "Please Select title", "FastAnime"
|
||||
)
|
||||
elif config.use_rofi:
|
||||
episode = Rofi.run(choices, "Select an episode")
|
||||
search_result_manga_title = Rofi.run(choices, "Please Select Title")
|
||||
else:
|
||||
episode = fuzzy_inquirer(
|
||||
search_result_manga_title = fuzzy_inquirer(
|
||||
choices,
|
||||
"Select episode",
|
||||
"Please Select Title",
|
||||
)
|
||||
if episode == "end":
|
||||
return
|
||||
|
||||
# ---- fetch streams ----
|
||||
# ---- fetch selected anime ----
|
||||
with Progress() as progress:
|
||||
progress.add_task("Fetching Episode Streams...", total=None)
|
||||
streams = anime_provider.get_episode_streams(
|
||||
anime, episode, config.translation_type
|
||||
progress.add_task("Fetching Anime...", total=None)
|
||||
anime: Anime | None = anime_provider.get_anime(
|
||||
search_results_[search_result_manga_title]["id"]
|
||||
)
|
||||
if not streams:
|
||||
print("Failed to get streams")
|
||||
|
||||
if not anime:
|
||||
print("Sth went wring anime no found")
|
||||
input("Enter to continue...")
|
||||
search(config, anime_title, episode_range)
|
||||
return
|
||||
episodes_range = []
|
||||
episodes: list[str] = sorted(
|
||||
anime["availableEpisodesDetail"][config.translation_type], key=float
|
||||
)
|
||||
if episode_range:
|
||||
if ":" in episode_range:
|
||||
ep_range_tuple = episode_range.split(":")
|
||||
if len(ep_range_tuple) == 3 and all(ep_range_tuple):
|
||||
episodes_start, episodes_end, step = ep_range_tuple
|
||||
episodes_range = episodes[
|
||||
int(episodes_start) : int(episodes_end) : int(step)
|
||||
]
|
||||
|
||||
elif len(ep_range_tuple) == 2 and all(ep_range_tuple):
|
||||
episodes_start, episodes_end = ep_range_tuple
|
||||
episodes_range = episodes[
|
||||
int(episodes_start) : int(episodes_end)
|
||||
]
|
||||
else:
|
||||
episodes_start, episodes_end = ep_range_tuple
|
||||
if episodes_start.strip():
|
||||
episodes_range = episodes[int(episodes_start) :]
|
||||
elif episodes_end.strip():
|
||||
episodes_range = episodes[: int(episodes_end)]
|
||||
else:
|
||||
episodes_range = episodes
|
||||
else:
|
||||
episodes_range = episodes[int(episode_range) :]
|
||||
|
||||
episodes_range = iter(episodes_range)
|
||||
|
||||
if config.normalize_titles:
|
||||
from ...libs.common.mini_anilist import get_basic_anime_info_by_title
|
||||
|
||||
anilist_anime_info = get_basic_anime_info_by_title(anime["title"])
|
||||
|
||||
def stream_anime(anime: "Anime"):
|
||||
clear()
|
||||
episode = None
|
||||
|
||||
if episodes_range:
|
||||
try:
|
||||
episode = next(episodes_range) # pyright:ignore
|
||||
print(
|
||||
f"[cyan]Auto selecting:[/] {search_result_manga_title} [cyan]Episode:[/] {episode}"
|
||||
)
|
||||
except StopIteration:
|
||||
print("[green]Completed binge sequence[/]:smile:")
|
||||
return
|
||||
|
||||
if not episode or episode not in episodes:
|
||||
choices = [*episodes, "end"]
|
||||
if config.use_fzf:
|
||||
episode = fzf.run(
|
||||
choices,
|
||||
"Select an episode",
|
||||
header=search_result_manga_title,
|
||||
)
|
||||
elif config.use_rofi:
|
||||
episode = Rofi.run(choices, "Select an episode")
|
||||
else:
|
||||
episode = fuzzy_inquirer(
|
||||
choices,
|
||||
"Select episode",
|
||||
)
|
||||
if episode == "end":
|
||||
return
|
||||
|
||||
try:
|
||||
# ---- fetch servers ----
|
||||
if config.server == "top":
|
||||
with Progress() as progress:
|
||||
progress.add_task("Fetching top server...", total=None)
|
||||
server = next(streams, None)
|
||||
if not server:
|
||||
print("Sth went wrong when fetching the episode")
|
||||
# ---- fetch streams ----
|
||||
with Progress() as progress:
|
||||
progress.add_task("Fetching Episode Streams...", total=None)
|
||||
streams = anime_provider.get_episode_streams(
|
||||
anime["id"], episode, config.translation_type
|
||||
)
|
||||
if not streams:
|
||||
print("Failed to get streams")
|
||||
return
|
||||
|
||||
try:
|
||||
# ---- fetch servers ----
|
||||
if config.server == "top":
|
||||
with Progress() as progress:
|
||||
progress.add_task("Fetching top server...", total=None)
|
||||
server = next(streams, None)
|
||||
if not server:
|
||||
print("Sth went wrong when fetching the episode")
|
||||
input("Enter to continue")
|
||||
stream_anime(anime)
|
||||
return
|
||||
stream_link = filter_by_quality(config.quality, server["links"])
|
||||
if not stream_link:
|
||||
print("Quality not found")
|
||||
input("Enter to continue")
|
||||
stream_anime()
|
||||
stream_anime(anime)
|
||||
return
|
||||
stream_link = filter_by_quality(config.quality, server["links"])
|
||||
if not stream_link:
|
||||
print("Quality not found")
|
||||
input("Enter to continue")
|
||||
stream_anime()
|
||||
return
|
||||
link = stream_link["link"]
|
||||
subtitles = server["subtitles"]
|
||||
stream_headers = server["headers"]
|
||||
episode_title = server["episode_title"]
|
||||
else:
|
||||
with Progress() as progress:
|
||||
progress.add_task("Fetching servers", total=None)
|
||||
# prompt for server selection
|
||||
servers = {server["server"]: server for server in streams}
|
||||
servers_names = list(servers.keys())
|
||||
if config.server in servers_names:
|
||||
server = config.server
|
||||
link = stream_link["link"]
|
||||
subtitles = server["subtitles"]
|
||||
stream_headers = server["headers"]
|
||||
episode_title = server["episode_title"]
|
||||
else:
|
||||
if config.use_fzf:
|
||||
server = fzf.run(servers_names, "Select an link: ")
|
||||
elif config.use_rofi:
|
||||
server = Rofi.run(servers_names, "Select an link")
|
||||
with Progress() as progress:
|
||||
progress.add_task("Fetching servers", total=None)
|
||||
# prompt for server selection
|
||||
servers = {server["server"]: server for server in streams}
|
||||
servers_names = list(servers.keys())
|
||||
if config.server in servers_names:
|
||||
server = config.server
|
||||
else:
|
||||
server = fuzzy_inquirer(
|
||||
servers_names,
|
||||
"Select link",
|
||||
)
|
||||
stream_link = filter_by_quality(
|
||||
config.quality, servers[server]["links"]
|
||||
)
|
||||
if not stream_link:
|
||||
print("Quality not found")
|
||||
input("Enter to continue")
|
||||
stream_anime()
|
||||
return
|
||||
link = stream_link["link"]
|
||||
stream_headers = servers[server]["headers"]
|
||||
subtitles = servers[server]["subtitles"]
|
||||
episode_title = servers[server]["episode_title"]
|
||||
print(f"[purple]Now Playing:[/] {search_result} Episode {episode}")
|
||||
if config.use_fzf:
|
||||
server = fzf.run(servers_names, "Select an link")
|
||||
elif config.use_rofi:
|
||||
server = Rofi.run(servers_names, "Select an link")
|
||||
else:
|
||||
server = fuzzy_inquirer(
|
||||
servers_names,
|
||||
"Select link",
|
||||
)
|
||||
stream_link = filter_by_quality(
|
||||
config.quality, servers[server]["links"]
|
||||
)
|
||||
if not stream_link:
|
||||
print("Quality not found")
|
||||
input("Enter to continue")
|
||||
stream_anime(anime)
|
||||
return
|
||||
link = stream_link["link"]
|
||||
stream_headers = servers[server]["headers"]
|
||||
subtitles = servers[server]["subtitles"]
|
||||
episode_title = servers[server]["episode_title"]
|
||||
|
||||
subtitles = move_preferred_subtitle_lang_to_top(
|
||||
subtitles, config.sub_lang
|
||||
)
|
||||
if config.sync_play:
|
||||
from ..utils.syncplay import SyncPlayer
|
||||
selected_anime_title = search_result_manga_title
|
||||
if anilist_anime_info:
|
||||
selected_anime_title = (
|
||||
anilist_anime_info["title"][config.preferred_language]
|
||||
or anilist_anime_info["title"]["romaji"]
|
||||
or anilist_anime_info["title"]["english"]
|
||||
)
|
||||
import re
|
||||
|
||||
SyncPlayer(
|
||||
link, episode_title, headers=stream_headers, subtitles=subtitles
|
||||
for episode_detail in anilist_anime_info["episodes"]:
|
||||
if re.match(f"Episode {episode} ", episode_detail["title"]):
|
||||
episode_title = episode_detail["title"]
|
||||
break
|
||||
print(
|
||||
f"[purple]Now Playing:[/] {selected_anime_title} Episode {episode}"
|
||||
)
|
||||
else:
|
||||
run_mpv(
|
||||
link, episode_title, headers=stream_headers, subtitles=subtitles
|
||||
subtitles = move_preferred_subtitle_lang_to_top(
|
||||
subtitles, config.sub_lang
|
||||
)
|
||||
except IndexError as e:
|
||||
print(e)
|
||||
input("Enter to continue")
|
||||
stream_anime()
|
||||
if config.sync_play:
|
||||
from ..utils.syncplay import SyncPlayer
|
||||
|
||||
stream_anime()
|
||||
SyncPlayer(
|
||||
link,
|
||||
episode_title,
|
||||
headers=stream_headers,
|
||||
subtitles=subtitles,
|
||||
)
|
||||
else:
|
||||
run_mpv(
|
||||
link,
|
||||
episode_title,
|
||||
headers=stream_headers,
|
||||
subtitles=subtitles,
|
||||
player=config.player,
|
||||
)
|
||||
except IndexError as e:
|
||||
print(e)
|
||||
input("Enter to continue")
|
||||
stream_anime(anime)
|
||||
|
||||
stream_anime(anime)
|
||||
|
||||
31
fastanime/cli/commands/serve.py
Normal file
31
fastanime/cli/commands/serve.py
Normal file
@@ -0,0 +1,31 @@
|
||||
import click
|
||||
|
||||
|
||||
@click.command(
|
||||
help="Command that automates the starting of the builtin fastanime server",
|
||||
epilog="""
|
||||
\b
|
||||
\b\bExamples:
|
||||
# default
|
||||
fastanime serve
|
||||
|
||||
# specify host and port
|
||||
fastanime serve --host 127.0.0.1 --port 8080
|
||||
""",
|
||||
)
|
||||
@click.option("--host", "-H", help="Specify the host to run the server on")
|
||||
@click.option("--port", "-p", help="Specify the port to run the server on")
|
||||
def serve(host, port):
|
||||
import os
|
||||
import sys
|
||||
|
||||
from ...constants import APP_DIR
|
||||
|
||||
args = [sys.executable, "-m", "fastapi", "run"]
|
||||
if host:
|
||||
args.extend(["--host", host])
|
||||
|
||||
if port:
|
||||
args.extend(["--port", port])
|
||||
args.append(os.path.join(APP_DIR, "api"))
|
||||
os.execv(sys.executable, args)
|
||||
@@ -1,11 +1,24 @@
|
||||
import click
|
||||
|
||||
|
||||
@click.command(help="Helper command to update fastanime to latest")
|
||||
@click.command(
|
||||
help="Helper command to update fastanime to latest",
|
||||
epilog="""
|
||||
\b
|
||||
\b\bExamples:
|
||||
# update fastanime to latest
|
||||
fastanime update
|
||||
\b
|
||||
# check for latest release
|
||||
fastanime update --check
|
||||
|
||||
# Force an update regardless of the current version
|
||||
fastanime update --force
|
||||
""",
|
||||
)
|
||||
@click.option("--check", "-c", help="Check for the latest release", is_flag=True)
|
||||
def update(
|
||||
check,
|
||||
):
|
||||
@click.option("--force", "-c", help="Force update", is_flag=True)
|
||||
def update(check, force):
|
||||
from rich.console import Console
|
||||
from rich.markdown import Markdown
|
||||
|
||||
@@ -34,7 +47,7 @@ def update(
|
||||
print(f"You are running the latest version ({__version__}) of fastanime")
|
||||
_print_release(github_release_data)
|
||||
else:
|
||||
success, github_release_data = update_app()
|
||||
success, github_release_data = update_app(force)
|
||||
_print_release(github_release_data)
|
||||
if success:
|
||||
print("Successfully updated")
|
||||
|
||||
@@ -6,20 +6,20 @@ ANILIST_ENDPOINT = "https://graphql.anilist.co"
|
||||
|
||||
|
||||
anime_title_query = """
|
||||
query($query:String){
|
||||
Page(perPage:50){
|
||||
pageInfo{
|
||||
total
|
||||
}
|
||||
media(search:$query,type:ANIME){
|
||||
id
|
||||
idMal
|
||||
title{
|
||||
romaji
|
||||
english
|
||||
}
|
||||
}
|
||||
query ($query: String) {
|
||||
Page(perPage: 50) {
|
||||
pageInfo {
|
||||
total
|
||||
}
|
||||
media(search: $query, type: ANIME) {
|
||||
id
|
||||
idMal
|
||||
title {
|
||||
romaji
|
||||
english
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
"""
|
||||
|
||||
@@ -59,7 +59,23 @@ def get_anime_titles(query: str, variables: dict = {}):
|
||||
else:
|
||||
return []
|
||||
except Exception as e:
|
||||
logger.error(f"Something unexpected occured {e}")
|
||||
logger.error(f"Something unexpected occurred {e}")
|
||||
return []
|
||||
|
||||
|
||||
def downloaded_anime_titles(ctx, param, incomplete):
|
||||
import os
|
||||
|
||||
from ..constants import USER_VIDEOS_DIR
|
||||
|
||||
try:
|
||||
titles = [
|
||||
title
|
||||
for title in os.listdir(USER_VIDEOS_DIR)
|
||||
if title.lower().startswith(incomplete.lower()) or not incomplete
|
||||
]
|
||||
return titles
|
||||
except Exception:
|
||||
return []
|
||||
|
||||
|
||||
|
||||
@@ -1,12 +1,22 @@
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
from configparser import ConfigParser
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from rich import print
|
||||
|
||||
from ..constants import USER_CONFIG_PATH, USER_DATA_PATH, USER_VIDEOS_DIR
|
||||
from ..constants import (
|
||||
ASSETS_DIR,
|
||||
S_PLATFORM,
|
||||
USER_CONFIG_PATH,
|
||||
USER_DATA_PATH,
|
||||
USER_VIDEOS_DIR,
|
||||
USER_WATCH_HISTORY_PATH,
|
||||
)
|
||||
from ..libs.fzf import FZF_DEFAULT_OPTS, HEADER
|
||||
from ..libs.rofi import Rofi
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
@@ -15,155 +25,248 @@ if TYPE_CHECKING:
|
||||
|
||||
|
||||
class Config(object):
|
||||
"""class that handles and manages configuration and user data throughout the clis lifespan
|
||||
|
||||
Attributes:
|
||||
anime_list: [TODO:attribute]
|
||||
watch_history: [TODO:attribute]
|
||||
fastanime_anilist_app_login_url: [TODO:attribute]
|
||||
anime_provider: [TODO:attribute]
|
||||
user_data: [TODO:attribute]
|
||||
configparser: [TODO:attribute]
|
||||
downloads_dir: [TODO:attribute]
|
||||
provider: [TODO:attribute]
|
||||
use_fzf: [TODO:attribute]
|
||||
use_rofi: [TODO:attribute]
|
||||
skip: [TODO:attribute]
|
||||
icons: [TODO:attribute]
|
||||
preview: [TODO:attribute]
|
||||
translation_type: [TODO:attribute]
|
||||
sort_by: [TODO:attribute]
|
||||
continue_from_history: [TODO:attribute]
|
||||
auto_next: [TODO:attribute]
|
||||
auto_select: [TODO:attribute]
|
||||
use_mpv_mod: [TODO:attribute]
|
||||
quality: [TODO:attribute]
|
||||
notification_duration: [TODO:attribute]
|
||||
error: [TODO:attribute]
|
||||
server: [TODO:attribute]
|
||||
format: [TODO:attribute]
|
||||
force_window: [TODO:attribute]
|
||||
preferred_language: [TODO:attribute]
|
||||
rofi_theme: [TODO:attribute]
|
||||
rofi_theme: [TODO:attribute]
|
||||
rofi_theme_input: [TODO:attribute]
|
||||
rofi_theme_input: [TODO:attribute]
|
||||
rofi_theme_confirm: [TODO:attribute]
|
||||
rofi_theme_confirm: [TODO:attribute]
|
||||
watch_history: [TODO:attribute]
|
||||
anime_list: [TODO:attribute]
|
||||
user: [TODO:attribute]
|
||||
"""
|
||||
|
||||
manga = False
|
||||
sync_play = False
|
||||
anime_list: list
|
||||
watch_history: dict
|
||||
watch_history: dict = {}
|
||||
fastanime_anilist_app_login_url = (
|
||||
"https://anilist.co/api/v2/oauth/authorize?client_id=20148&response_type=token"
|
||||
)
|
||||
anime_provider: "AnimeProvider"
|
||||
user_data = {"watch_history": {}, "animelist": [], "user": {}}
|
||||
user_data = {
|
||||
"recent_anime": [],
|
||||
"animelist": [],
|
||||
"user": {},
|
||||
"meta": {"last_updated": 0},
|
||||
}
|
||||
default_config = {
|
||||
"auto_next": "False",
|
||||
"menu_order": "",
|
||||
"manga_viewer": "feh",
|
||||
"auto_select": "True",
|
||||
"cache_requests": "true",
|
||||
"check_for_updates": "True",
|
||||
"continue_from_history": "True",
|
||||
"default_media_list_tracking": "None",
|
||||
"downloads_dir": USER_VIDEOS_DIR,
|
||||
"disable_mpv_popen": "True",
|
||||
"discord": "False",
|
||||
"episode_complete_at": "80",
|
||||
"use_experimental_fzf_anilist_search": "True",
|
||||
"ffmpegthumbnailer_seek_time": "-1",
|
||||
"force_forward_tracking": "true",
|
||||
"force_window": "immediate",
|
||||
"fzf_opts": FZF_DEFAULT_OPTS,
|
||||
"header_color": "95,135,175",
|
||||
"header_ascii_art": HEADER,
|
||||
"format": "best[height<=1080]/bestvideo[height<=1080]+bestaudio/best",
|
||||
"icons": "false",
|
||||
"image_previews": "True" if S_PLATFORM != "win32" else "False",
|
||||
"image_renderer": "icat" if os.environ.get("KITTY_WINDOW_ID") else "chafa",
|
||||
"normalize_titles": "True",
|
||||
"notification_duration": "120",
|
||||
"max_cache_lifetime": "03:00:00",
|
||||
"mpv_args": "",
|
||||
"mpv_pre_args": "",
|
||||
"per_page": "15",
|
||||
"player": "mpv",
|
||||
"preferred_history": "local",
|
||||
"preferred_language": "english",
|
||||
"preview": "False",
|
||||
"preview_header_color": "215,0,95",
|
||||
"preview_separator_color": "208,208,208",
|
||||
"provider": "allanime",
|
||||
"quality": "1080",
|
||||
"recent": "50",
|
||||
"rofi_theme": os.path.join(ASSETS_DIR, "rofi_theme.rasi"),
|
||||
"rofi_theme_preview": os.path.join(ASSETS_DIR, "rofi_theme_preview.rasi"),
|
||||
"rofi_theme_confirm": os.path.join(ASSETS_DIR, "rofi_theme_confirm.rasi"),
|
||||
"rofi_theme_input": os.path.join(ASSETS_DIR, "rofi_theme_input.rasi"),
|
||||
"server": "top",
|
||||
"skip": "false",
|
||||
"sort_by": "search match",
|
||||
"sub_lang": "eng",
|
||||
"translation_type": "sub",
|
||||
"use_fzf": "False",
|
||||
"use_persistent_provider_store": "false",
|
||||
"use_python_mpv": "false",
|
||||
"use_rofi": "false",
|
||||
}
|
||||
|
||||
def __init__(self) -> None:
|
||||
self.initialize_user_data()
|
||||
self.load_config()
|
||||
def __init__(self, no_config) -> None:
|
||||
self.initialize_user_data_and_watch_history_recent_anime()
|
||||
self.load_config(no_config)
|
||||
|
||||
def load_config(self):
|
||||
self.configparser = ConfigParser(
|
||||
{
|
||||
"quality": "1080",
|
||||
"auto_next": "False",
|
||||
"auto_select": "True",
|
||||
"sort_by": "search match",
|
||||
"downloads_dir": USER_VIDEOS_DIR,
|
||||
"translation_type": "sub",
|
||||
"server": "top",
|
||||
"continue_from_history": "True",
|
||||
"preferred_history": "local",
|
||||
"use_mpv_mod": "false",
|
||||
"force_window": "immediate",
|
||||
"preferred_language": "english",
|
||||
"use_fzf": "False",
|
||||
"preview": "False",
|
||||
"format": "best[height<=1080]/bestvideo[height<=1080]+bestaudio/best",
|
||||
"provider": "allanime",
|
||||
"error": "3",
|
||||
"icons": "false",
|
||||
"notification_duration": "2",
|
||||
"skip": "false",
|
||||
"use_rofi": "false",
|
||||
"rofi_theme": "",
|
||||
"rofi_theme_input": "",
|
||||
"rofi_theme_confirm": "",
|
||||
"ffmpegthumnailer_seek_time": "-1",
|
||||
"sub_lang": "eng",
|
||||
}
|
||||
)
|
||||
def load_config(self, no_config=False):
|
||||
self.configparser = ConfigParser(self.default_config)
|
||||
self.configparser.add_section("stream")
|
||||
self.configparser.add_section("general")
|
||||
self.configparser.add_section("anilist")
|
||||
if not os.path.exists(USER_CONFIG_PATH):
|
||||
with open(USER_CONFIG_PATH, "w") as config:
|
||||
self.configparser.write(config)
|
||||
|
||||
self.configparser.read(USER_CONFIG_PATH)
|
||||
|
||||
# --- set config values from file or using defaults ---
|
||||
self.downloads_dir = self.get_downloads_dir()
|
||||
self.sub_lang = self.get_sub_lang()
|
||||
self.provider = self.get_provider()
|
||||
self.use_fzf = self.get_use_fzf()
|
||||
self.use_rofi = self.get_use_rofi()
|
||||
self.skip = self.get_skip()
|
||||
self.icons = self.get_icons()
|
||||
self.preview = self.get_preview()
|
||||
self.translation_type = self.get_translation_type()
|
||||
self.sort_by = self.get_sort_by()
|
||||
self.continue_from_history = self.get_continue_from_history()
|
||||
self.auto_next = self.get_auto_next()
|
||||
self.auto_select = self.get_auto_select()
|
||||
self.use_mpv_mod = self.get_use_mpv_mod()
|
||||
self.quality = self.get_quality()
|
||||
self.notification_duration = self.get_notification_duration()
|
||||
self.error = self.get_error()
|
||||
self.server = self.get_server()
|
||||
self.format = self.get_format()
|
||||
self.force_window = self.get_force_window()
|
||||
self.preferred_language = self.get_preferred_language()
|
||||
self.preferred_history = self.get_preferred_history()
|
||||
self.rofi_theme = self.get_rofi_theme()
|
||||
try:
|
||||
if os.path.exists(USER_CONFIG_PATH) and not no_config:
|
||||
self.configparser.read(USER_CONFIG_PATH, encoding="utf-8")
|
||||
except Exception as e:
|
||||
print(
|
||||
"[yellow]Warning[/]: Failed to read config file using default configuration",
|
||||
file=sys.stderr,
|
||||
)
|
||||
logger.error(f"Failed to read config file: {e}")
|
||||
time.sleep(5)
|
||||
|
||||
# get the configuration
|
||||
self.auto_next = self.configparser.getboolean("stream", "auto_next")
|
||||
self.auto_select = self.configparser.getboolean("stream", "auto_select")
|
||||
self.cache_requests = self.configparser.getboolean("general", "cache_requests")
|
||||
self.check_for_updates = self.configparser.getboolean(
|
||||
"general", "check_for_updates"
|
||||
)
|
||||
self.continue_from_history = self.configparser.getboolean(
|
||||
"stream", "continue_from_history"
|
||||
)
|
||||
self.default_media_list_tracking = self.configparser.get(
|
||||
"general", "default_media_list_tracking"
|
||||
)
|
||||
self.disable_mpv_popen = self.configparser.getboolean(
|
||||
"stream", "disable_mpv_popen"
|
||||
)
|
||||
self.discord = self.configparser.getboolean("general", "discord")
|
||||
self.downloads_dir = self.configparser.get("general", "downloads_dir")
|
||||
self.episode_complete_at = self.configparser.getint(
|
||||
"stream", "episode_complete_at"
|
||||
)
|
||||
self.use_experimental_fzf_anilist_search = self.configparser.getboolean(
|
||||
"general", "use_experimental_fzf_anilist_search"
|
||||
)
|
||||
self.ffmpegthumbnailer_seek_time = self.configparser.getint(
|
||||
"general", "ffmpegthumbnailer_seek_time"
|
||||
)
|
||||
self.force_forward_tracking = self.configparser.getboolean(
|
||||
"general", "force_forward_tracking"
|
||||
)
|
||||
self.force_window = self.configparser.get("stream", "force_window")
|
||||
self.format = self.configparser.get("stream", "format")
|
||||
self.fzf_opts = self.configparser.get("general", "fzf_opts")
|
||||
self.header_color = self.configparser.get("general", "header_color")
|
||||
self.header_ascii_art = self.configparser.get("general", "header_ascii_art")
|
||||
self.icons = self.configparser.getboolean("general", "icons")
|
||||
self.image_previews = self.configparser.getboolean("general", "image_previews")
|
||||
self.image_renderer = self.configparser.get("general", "image_renderer")
|
||||
self.normalize_titles = self.configparser.getboolean(
|
||||
"general", "normalize_titles"
|
||||
)
|
||||
self.notification_duration = self.configparser.getint(
|
||||
"general", "notification_duration"
|
||||
)
|
||||
self._max_cache_lifetime = self.configparser.get(
|
||||
"general", "max_cache_lifetime"
|
||||
)
|
||||
max_cache_lifetime = list(map(int, self._max_cache_lifetime.split(":")))
|
||||
self.max_cache_lifetime = (
|
||||
max_cache_lifetime[0] * 86400
|
||||
+ max_cache_lifetime[1] * 3600
|
||||
+ max_cache_lifetime[2] * 60
|
||||
)
|
||||
self.mpv_args = self.configparser.get("general", "mpv_args")
|
||||
self.mpv_pre_args = self.configparser.get("general", "mpv_pre_args")
|
||||
self.per_page = self.configparser.get("anilist", "per_page")
|
||||
self.player = self.configparser.get("stream", "player")
|
||||
self.preferred_history = self.configparser.get("stream", "preferred_history")
|
||||
self.preferred_language = self.configparser.get("general", "preferred_language")
|
||||
self.preview = self.configparser.getboolean("general", "preview")
|
||||
self.preview_separator_color = self.configparser.get(
|
||||
"general", "preview_separator_color"
|
||||
)
|
||||
self.preview_header_color = self.configparser.get(
|
||||
"general", "preview_header_color"
|
||||
)
|
||||
self.provider = self.configparser.get("general", "provider")
|
||||
self.quality = self.configparser.get("stream", "quality")
|
||||
self.recent = self.configparser.getint("general", "recent")
|
||||
self.rofi_theme_confirm = self.configparser.get("general", "rofi_theme_confirm")
|
||||
self.rofi_theme_input = self.configparser.get("general", "rofi_theme_input")
|
||||
self.rofi_theme = self.configparser.get("general", "rofi_theme")
|
||||
self.rofi_theme_preview = self.configparser.get("general", "rofi_theme_preview")
|
||||
self.server = self.configparser.get("stream", "server")
|
||||
self.skip = self.configparser.getboolean("stream", "skip")
|
||||
self.sort_by = self.configparser.get("anilist", "sort_by")
|
||||
self.menu_order = self.configparser.get("general", "menu_order")
|
||||
self.manga_viewer = self.configparser.get("general", "manga_viewer")
|
||||
self.sub_lang = self.configparser.get("general", "sub_lang")
|
||||
self.translation_type = self.configparser.get("stream", "translation_type")
|
||||
self.use_fzf = self.configparser.getboolean("general", "use_fzf")
|
||||
self.use_python_mpv = self.configparser.getboolean("stream", "use_python_mpv")
|
||||
self.use_rofi = self.configparser.getboolean("general", "use_rofi")
|
||||
self.use_persistent_provider_store = self.configparser.getboolean(
|
||||
"general", "use_persistent_provider_store"
|
||||
)
|
||||
|
||||
Rofi.rofi_theme = self.rofi_theme
|
||||
self.rofi_theme_input = self.get_rofi_theme_input()
|
||||
Rofi.rofi_theme_input = self.rofi_theme_input
|
||||
self.rofi_theme_confirm = self.get_rofi_theme_confirm()
|
||||
Rofi.rofi_theme_confirm = self.rofi_theme_confirm
|
||||
self.ffmpegthumbnailer_seek_time = self.get_ffmpegthumnailer_seek_time()
|
||||
Rofi.rofi_theme_preview = self.rofi_theme_preview
|
||||
|
||||
os.environ["FZF_DEFAULT_OPTS"] = self.fzf_opts
|
||||
|
||||
# ---- setup user data ------
|
||||
self.watch_history: dict = self.user_data.get("watch_history", {})
|
||||
self.anime_list: list = self.user_data.get("animelist", [])
|
||||
self.user: dict = self.user_data.get("user", {})
|
||||
|
||||
if not os.path.exists(USER_CONFIG_PATH):
|
||||
with open(USER_CONFIG_PATH, "w", encoding="utf-8") as config:
|
||||
config.write(self.__repr__())
|
||||
|
||||
def set_fastanime_config_environs(self):
|
||||
current_config = []
|
||||
for key in self.default_config:
|
||||
if not os.environ.get(f"FASTANIME_{key.upper()}"):
|
||||
current_config.append(
|
||||
(f"FASTANIME_{key.upper()}", str(getattr(self, key)))
|
||||
)
|
||||
os.environ.update(current_config)
|
||||
|
||||
def update_user(self, user):
|
||||
self.user = user
|
||||
self.user_data["user"] = user
|
||||
self._update_user_data()
|
||||
|
||||
def update_watch_history(
|
||||
self, anime_id: int, episode: str, start_time="0", total_time="0"
|
||||
def update_recent(self, recent_anime: list):
|
||||
recent_anime_ids = []
|
||||
_recent_anime = []
|
||||
for anime in recent_anime:
|
||||
if (
|
||||
anime["id"] not in recent_anime_ids
|
||||
and len(recent_anime_ids) <= self.recent
|
||||
):
|
||||
_recent_anime.append(anime)
|
||||
recent_anime_ids.append(anime["id"])
|
||||
|
||||
self.user_data["recent_anime"] = _recent_anime
|
||||
self._update_user_data()
|
||||
|
||||
def media_list_track(
|
||||
self,
|
||||
anime_id: int,
|
||||
episode_no: str,
|
||||
episode_stopped_at="0",
|
||||
episode_total_length="0",
|
||||
progress_tracking="prompt",
|
||||
):
|
||||
self.watch_history.update(
|
||||
{
|
||||
str(anime_id): {
|
||||
"episode": episode,
|
||||
"start_time": start_time,
|
||||
"total_time": total_time,
|
||||
"episode_no": episode_no,
|
||||
"episode_stopped_at": episode_stopped_at,
|
||||
"episode_total_length": episode_total_length,
|
||||
"progress_tracking": progress_tracking,
|
||||
}
|
||||
}
|
||||
)
|
||||
self.user_data["watch_history"] = self.watch_history
|
||||
self._update_user_data()
|
||||
with open(USER_WATCH_HISTORY_PATH, "w") as f:
|
||||
json.dump(self.watch_history, f)
|
||||
|
||||
def initialize_user_data(self):
|
||||
def initialize_user_data_and_watch_history_recent_anime(self):
|
||||
try:
|
||||
if os.path.isfile(USER_DATA_PATH):
|
||||
with open(USER_DATA_PATH, "r") as f:
|
||||
@@ -171,203 +274,361 @@ class Config(object):
|
||||
self.user_data.update(user_data)
|
||||
except Exception as e:
|
||||
logger.error(e)
|
||||
try:
|
||||
if os.path.isfile(USER_WATCH_HISTORY_PATH):
|
||||
with open(USER_WATCH_HISTORY_PATH, "r") as f:
|
||||
watch_history = json.load(f)
|
||||
self.watch_history.update(watch_history)
|
||||
except Exception as e:
|
||||
logger.error(e)
|
||||
|
||||
def _update_user_data(self):
|
||||
"""method that updates the actual user data file"""
|
||||
with open(USER_DATA_PATH, "w") as f:
|
||||
json.dump(self.user_data, f)
|
||||
|
||||
# getters for user configuration
|
||||
|
||||
# --- general section ---
|
||||
def get_provider(self):
|
||||
return self.configparser.get("general", "provider")
|
||||
|
||||
def get_ffmpegthumnailer_seek_time(self):
|
||||
return self.configparser.getint("general", "ffmpegthumnailer_seek_time")
|
||||
|
||||
def get_preferred_language(self):
|
||||
return self.configparser.get("general", "preferred_language")
|
||||
|
||||
def get_sub_lang(self):
|
||||
return self.configparser.get("general", "sub_lang")
|
||||
|
||||
def get_downloads_dir(self):
|
||||
return self.configparser.get("general", "downloads_dir")
|
||||
|
||||
def get_icons(self):
|
||||
return self.configparser.getboolean("general", "icons")
|
||||
|
||||
def get_preview(self):
|
||||
return self.configparser.getboolean("general", "preview")
|
||||
|
||||
def get_use_fzf(self):
|
||||
return self.configparser.getboolean("general", "use_fzf")
|
||||
|
||||
# rofi conifiguration
|
||||
def get_use_rofi(self):
|
||||
return self.configparser.getboolean("general", "use_rofi")
|
||||
|
||||
def get_rofi_theme(self):
|
||||
return self.configparser.get("general", "rofi_theme")
|
||||
|
||||
def get_rofi_theme_input(self):
|
||||
return self.configparser.get("general", "rofi_theme_input")
|
||||
|
||||
def get_rofi_theme_confirm(self):
|
||||
return self.configparser.get("general", "rofi_theme_confirm")
|
||||
|
||||
# --- stream section ---
|
||||
def get_skip(self):
|
||||
return self.configparser.getboolean("stream", "skip")
|
||||
|
||||
def get_auto_next(self):
|
||||
return self.configparser.getboolean("stream", "auto_next")
|
||||
|
||||
def get_auto_select(self):
|
||||
return self.configparser.getboolean("stream", "auto_select")
|
||||
|
||||
def get_continue_from_history(self):
|
||||
return self.configparser.getboolean("stream", "continue_from_history")
|
||||
|
||||
def get_use_mpv_mod(self):
|
||||
return self.configparser.getboolean("stream", "use_mpv_mod")
|
||||
|
||||
def get_notification_duration(self):
|
||||
return self.configparser.getint("general", "notification_duration")
|
||||
|
||||
def get_error(self):
|
||||
return self.configparser.getint("stream", "error")
|
||||
|
||||
def get_force_window(self):
|
||||
return self.configparser.get("stream", "force_window")
|
||||
|
||||
def get_translation_type(self):
|
||||
return self.configparser.get("stream", "translation_type")
|
||||
|
||||
def get_preferred_history(self):
|
||||
return self.configparser.get("stream", "preferred_history")
|
||||
|
||||
def get_quality(self):
|
||||
return self.configparser.get("stream", "quality")
|
||||
|
||||
def get_server(self):
|
||||
return self.configparser.get("stream", "server")
|
||||
|
||||
def get_format(self):
|
||||
return self.configparser.get("stream", "format")
|
||||
|
||||
def get_sort_by(self):
|
||||
return self.configparser.get("anilist", "sort_by")
|
||||
|
||||
def update_config(self, section: str, key: str, value: str):
|
||||
self.configparser.set(section, key, value)
|
||||
with open(USER_CONFIG_PATH, "w") as config:
|
||||
self.configparser.write(config)
|
||||
|
||||
def __repr__(self):
|
||||
current_config_state = f"""
|
||||
new_line = "\n"
|
||||
tab = "\t"
|
||||
current_config_state = f"""\
|
||||
#
|
||||
# ███████╗░█████╗░░██████╗████████╗░█████╗░███╗░░██╗██╗███╗░░░███╗███████╗ ░█████╗░░█████╗░███╗░░██╗███████╗██╗░██████╗░
|
||||
# ██╔════╝██╔══██╗██╔════╝╚══██╔══╝██╔══██╗████╗░██║██║████╗░████║██╔════╝ ██╔══██╗██╔══██╗████╗░██║██╔════╝██║██╔════╝░
|
||||
# █████╗░░███████║╚█████╗░░░░██║░░░███████║██╔██╗██║██║██╔████╔██║█████╗░░ ██║░░╚═╝██║░░██║██╔██╗██║█████╗░░██║██║░░██╗░
|
||||
# ██╔══╝░░██╔══██║░╚═══██╗░░░██║░░░██╔══██║██║╚████║██║██║╚██╔╝██║██╔══╝░░ ██║░░██╗██║░░██║██║╚████║██╔══╝░░██║██║░░╚██╗
|
||||
# ██║░░░░░██║░░██║██████╔╝░░░██║░░░██║░░██║██║░╚███║██║██║░╚═╝░██║███████╗ ╚█████╔╝╚█████╔╝██║░╚███║██║░░░░░██║╚██████╔╝
|
||||
# ╚═╝░░░░░╚═╝░░╚═╝╚═════╝░░░░╚═╝░░░╚═╝░░╚═╝╚═╝░░╚══╝╚═╝╚═╝░░░░░╚═╝╚══════╝ ░╚════╝░░╚════╝░╚═╝░░╚══╝╚═╝░░░░░╚═╝░╚═════╝░
|
||||
#
|
||||
[general]
|
||||
# Can you rice it?
|
||||
# For the preview pane
|
||||
preview_separator_color = {self.preview_separator_color}
|
||||
|
||||
preview_header_color = {self.preview_header_color}
|
||||
|
||||
# For the header
|
||||
# Be sure to indent
|
||||
header_ascii_art = {new_line.join([tab + line for line in self.header_ascii_art.split(new_line)])}
|
||||
|
||||
header_color = {self.header_color}
|
||||
|
||||
# the image renderer to use [icat/chafa]
|
||||
image_renderer = {self.image_renderer}
|
||||
|
||||
# To be passed to fzf
|
||||
# Be sure to indent
|
||||
fzf_opts = {new_line.join([tab + line for line in self.fzf_opts.split(new_line)])}
|
||||
|
||||
# Whether to show the icons in the TUI [True/False]
|
||||
# More like emojis
|
||||
# By the way, if you have any recommendations
|
||||
# for which should be used where, please
|
||||
# don't hesitate to share your opinion
|
||||
# because it's a lot of work
|
||||
# to look for the right one for each menu option
|
||||
# Be sure to also give the replacement emoji
|
||||
icons = {self.icons}
|
||||
|
||||
# Whether to normalize provider titles [True/False]
|
||||
# Basically takes the provider titles and finds the corresponding Anilist title, then changes the title to that
|
||||
# Useful for uniformity, especially when downloading from different providers
|
||||
# This also applies to episode titles
|
||||
normalize_titles = {self.normalize_titles}
|
||||
|
||||
# Whether to check for updates every time you run the script [True/False]
|
||||
# This is useful for keeping your script up to date
|
||||
# because there are always new features being added 😄
|
||||
check_for_updates = {self.check_for_updates}
|
||||
|
||||
# Can be [allanime, animepahe, hianime, nyaa, yugen]
|
||||
# Allanime is the most reliable
|
||||
# Animepahe provides different links to streams of different quality, so a quality can be selected reliably with the --quality option
|
||||
# Hianime usually provides subs in different languages, and its servers are generally faster
|
||||
# NOTE: Currently, they are encrypting the video links
|
||||
# though I’m working on it
|
||||
# However, you can still get the links to the subs
|
||||
# with ```fastanime grab``` command
|
||||
# Yugen meh
|
||||
# Nyaa for those who prefer torrents, though not reliable due to auto-selection of results
|
||||
# as most of the data in Nyaa is not structured
|
||||
# though it works relatively well for new anime
|
||||
# especially with SubsPlease and HorribleSubs
|
||||
# Oh, and you should have webtorrent CLI to use this
|
||||
provider = {self.provider}
|
||||
|
||||
# Display language [english, romaji]
|
||||
# This is passed to Anilist directly and is used to set the language for anime titles
|
||||
# when using the Anilist interface
|
||||
preferred_language = {self.preferred_language}
|
||||
|
||||
# Download directory
|
||||
# Where you will find your videos after downloading them with 'fastanime download' command
|
||||
downloads_dir = {self.downloads_dir}
|
||||
|
||||
# Whether to show a preview window when using fzf or rofi [True/False]
|
||||
# The preview requires you to have a command-line image viewer as documented in the README
|
||||
# This is only when using fzf or rofi
|
||||
# If you don't care about image and text previews, it doesn’t matter
|
||||
# though it’s awesome
|
||||
# Try it, and you will see
|
||||
preview = {self.preview}
|
||||
|
||||
# Whether to show images in the preview [True/False]
|
||||
# Windows users: just switch to Linux 😄
|
||||
# because even if you enable it
|
||||
# it won't look pretty
|
||||
# Just be satisfied with the text previews
|
||||
# So forget it exists 🤣
|
||||
image_previews = {self.image_previews}
|
||||
|
||||
# the time to seek when using ffmpegthumbnailer [-1 to 100]
|
||||
# -1 means random and is the default
|
||||
# ffmpegthumbnailer is used to generate previews,
|
||||
# allowing you to select the time in the video to extract an image.
|
||||
# Random makes things quite exciting because you never know at what time it will extract the image.
|
||||
# Used by the `fastanime downloads` command.
|
||||
ffmpegthumbnailer_seek_time = {self.ffmpegthumbnailer_seek_time}
|
||||
|
||||
# specify the order of menu items in a comma-separated list.
|
||||
# Only include the base names of menu options (e.g., "Trending", "Recent").
|
||||
# The default value is 'Trending,Recent,Watching,Paused,Dropped,Planned,Completed,Rewatching,Recently Updated Anime,Search,Watch History,Random Anime,Most Popular Anime,Most Favourite Anime,Most Scored Anime,Upcoming Anime,Edit Config,Exit'.
|
||||
# Leave blank to use the default menu order.
|
||||
# You can also omit some options by not including them in the list.
|
||||
menu_order = {self.menu_order}
|
||||
|
||||
# whether to use fzf as the interface for the anilist command and others. [True/False]
|
||||
use_fzf = {self.use_fzf}
|
||||
|
||||
# whether to use rofi for the UI [True/False]
|
||||
# It's more useful if you want to create a desktop entry,
|
||||
# which can be set up with 'fastanime config --desktop-entry'.
|
||||
# If you want it to be your sole interface even when fastanime is run directly from the terminal, enable this.
|
||||
use_rofi = {self.use_rofi}
|
||||
|
||||
# rofi themes to use <path>
|
||||
# The value of this option is the path to the rofi config files to use.
|
||||
# I chose to split it into 4 since it gives the best look and feel.
|
||||
# You can refer to the rofi demo on GitHub to see for yourself.
|
||||
# I need help designing the default rofi themes.
|
||||
# If you fancy yourself a rofi ricer, please contribute to improving
|
||||
# the default theme.
|
||||
rofi_theme = {self.rofi_theme}
|
||||
|
||||
rofi_theme_preview = {self.rofi_theme_preview}
|
||||
|
||||
rofi_theme_input = {self.rofi_theme_input}
|
||||
|
||||
rofi_theme_confirm = {self.rofi_theme_confirm}
|
||||
|
||||
# the duration in minutes a notification will stay on the screen.
|
||||
# Used by the notifier command.
|
||||
notification_duration = {self.notification_duration}
|
||||
|
||||
# used when the provider offers subtitles in different languages.
|
||||
# Currently, this is the case for:
|
||||
# hianime.
|
||||
# The values for this option are the short names for languages.
|
||||
# Regex is used to determine what you selected.
|
||||
sub_lang = {self.sub_lang}
|
||||
|
||||
# what is your default media list tracking [track/disabled/prompt]
|
||||
# This only affects your anilist anime list.
|
||||
# track - means your progress will always be reflected in your anilist anime list.
|
||||
# disabled - means progress tracking will no longer be reflected in your anime list.
|
||||
# prompt - means you will be prompted for each anime whether you want your progress to be tracked or not.
|
||||
default_media_list_tracking = {self.default_media_list_tracking}
|
||||
|
||||
# whether media list tracking should only be updated when the next episode is greater than the previous.
|
||||
# This only affects your anilist anime list.
|
||||
force_forward_tracking = {self.force_forward_tracking}
|
||||
|
||||
# whether to cache requests [true/false]
|
||||
# This improves the experience by making it faster,
|
||||
# as data doesn't always need to be fetched from the web server
|
||||
# and can instead be retrieved locally from the cached_requests_db.
|
||||
cache_requests = {self.cache_requests}
|
||||
|
||||
# the max lifetime for a cached request <days:hours:minutes>
|
||||
# Defaults to 3 days = 03:00:00.
|
||||
# This is the time after which a cached request will be deleted (technically).
|
||||
max_cache_lifetime = {self._max_cache_lifetime}
|
||||
|
||||
# whether to use a persistent store (basically an SQLite DB) for storing some data the provider requires
|
||||
# to enable a seamless experience. [true/false]
|
||||
# This option exists primarily to optimize FastAnime as a library in a website project.
|
||||
# For now, it's not recommended to change it. Leave it as is.
|
||||
use_persistent_provider_store = {self.use_persistent_provider_store}
|
||||
|
||||
# number of recent anime to keep [0-50].
|
||||
# 0 will disable recent anime tracking.
|
||||
recent = {self.recent}
|
||||
|
||||
# enable or disable Discord activity updater.
|
||||
# If you want to enable it, please follow the link below to register the app with your Discord account:
|
||||
# https://discord.com/oauth2/authorize?client_id=1292070065583165512
|
||||
discord = {self.discord}
|
||||
|
||||
# comma separated list of args that will be passed to mpv
|
||||
# example: --vo=kitty,--fullscreen,--volume=50
|
||||
mpv_args = {self.mpv_args}
|
||||
|
||||
# command line options passed before the mpv command
|
||||
# example: kitty
|
||||
# useful incase of wanting to run sth like: kitty mpv --vo=kitty <url>
|
||||
mpv_pre_args = {self.mpv_pre_args}
|
||||
|
||||
# choose manga viewer [feh/icat]
|
||||
# feh is the default and requires feh to be installed
|
||||
# icat is for kitty terminal users only
|
||||
manga_viewer = {self.manga_viewer}
|
||||
|
||||
# a little little something i introduced
|
||||
# remember how in a browser site when you search for an anime it dynamically reloads
|
||||
# after every type
|
||||
# well who says it cant be done in the terminal lol
|
||||
# though its still experimental lol
|
||||
# use this to disable it
|
||||
use_experimental_fzf_anilist_search = {self.use_experimental_fzf_anilist_search}
|
||||
|
||||
[stream]
|
||||
# Auto continue from watch history
|
||||
# the quality of the stream [1080,720,480,360]
|
||||
# this option is usually only reliable when:
|
||||
# provider=animepahe
|
||||
# since it provides links that actually point to streams of different qualities
|
||||
# while the rest just point to another link that can provide the anime from the same server
|
||||
quality = {self.quality}
|
||||
|
||||
# Auto continue from watch history [True/False]
|
||||
# this will make fastanime to choose the episode that you last watched to completion
|
||||
# and increment it by one
|
||||
# and use that to auto select the episode you want to watch
|
||||
continue_from_history = {self.continue_from_history}
|
||||
|
||||
# which hostory to use [local/remote]
|
||||
# which history to use [local/remote]
|
||||
# local history means it will just use the watch history stored locally in your device
|
||||
# the file that stores it is called watch_history.json
|
||||
# and is stored next to your config file
|
||||
# remote means it ignores the last episode stored locally
|
||||
# and instead uses the one in your anilist anime list
|
||||
# this config option is useful if you want to overwrite your local history
|
||||
# or import history covered from another device or platform
|
||||
# since remote history will take precendence over whats available locally
|
||||
preferred_history = {self.preferred_history}
|
||||
|
||||
|
||||
# Preferred language for anime (options: dub, sub)
|
||||
# Preferred language for anime [dub/sub]
|
||||
translation_type = {self.translation_type}
|
||||
|
||||
# Default server (options: dropbox, sharepoint, wetransfer.gogoanime, top, wixmp)
|
||||
# what server to use for a particular provider
|
||||
# allanime: [dropbox, sharepoint, wetransfer, gogoanime, wixmp]
|
||||
# animepahe: [kwik]
|
||||
# hianime: [HD1, HD2, StreamSB, StreamTape] : only HD2 for now
|
||||
# yugen: [gogoanime]
|
||||
# 'top' can also be used as a value for this option
|
||||
# 'top' will cause fastanime to auto select the first server it sees
|
||||
# this saves on resources and is faster since not all servers are being fetched
|
||||
server = {self.server}
|
||||
|
||||
# Auto-select next episode
|
||||
# Auto select next episode [True/False]
|
||||
# this makes fastanime increment the current episode number
|
||||
# then after using that value to fetch the next episode instead of prompting
|
||||
# this option is useful for binging
|
||||
auto_next = {self.auto_next}
|
||||
|
||||
# Auto select the anime provider results with fuzzy find.
|
||||
# Note this wont always be correct.But 99% of the time will be.
|
||||
# Auto select the anime provider results with fuzzy find. [True/False]
|
||||
# Note this won't always be correct
|
||||
# this is because the providers sometime use non-standard names
|
||||
# that are there own preference rather than the official names
|
||||
# But 99% of the time will be accurate
|
||||
# if this happens just turn off auto_select in the menus or from the commandline
|
||||
# and manually select the correct anime title
|
||||
# edit this file <https://github.com/Benexl/FastAnime/blob/master/fastanime/Utility/data.py>
|
||||
# and to the dictionary of the provider
|
||||
# the provider title (key) and their corresponding anilist names (value)
|
||||
# and then please open a pr
|
||||
# issues on the same will be ignored and then closed 😆
|
||||
auto_select = {self.auto_select}
|
||||
|
||||
# whether to skip the opening and ending theme songs
|
||||
# whether to skip the opening and ending theme songs [True/False]
|
||||
# NOTE: requires ani-skip to be in path
|
||||
# for python-mpv users am planning to create this functionality n python without the use of an external script
|
||||
# so its disabled for now
|
||||
# and anyways Dan Da Dan
|
||||
# taught as the importance of letting it flow 🙃
|
||||
skip = {self.skip}
|
||||
|
||||
# the maximum delta time in minutes after which the episode should be considered as completed
|
||||
# used in the continue from time stamp
|
||||
error = {self.error}
|
||||
# at what percentage progress should the episode be considered as completed [0-100]
|
||||
# this value is used to determine whether to increment the current episode number and save it to your local list
|
||||
# so you can continue immediately to the next episode without select it the next time you decide to watch the anime
|
||||
# it is also used to determine whether your anilist anime list should be updated or not
|
||||
episode_complete_at = {self.episode_complete_at}
|
||||
|
||||
# whether to use python-mpv
|
||||
# whether to use python-mpv [True/False]
|
||||
# to enable superior control over the player
|
||||
# adding more options to it
|
||||
use_mpv_mod = {self.use_mpv_mod}
|
||||
# Enabling this option and you will ask yourself
|
||||
# why you did not discover fastanime sooner 🙃
|
||||
# Since you basically don't have to close the player window
|
||||
# to go to the next or previous episode, switch servers,
|
||||
# change translation type or change to a given episode x
|
||||
# so try it if you haven't already
|
||||
# if you have any issues setting it up
|
||||
# don't be afraid to ask
|
||||
# especially on windows
|
||||
# honestly it can be a pain to set it up there
|
||||
# personally it took me quite sometime to figure it out
|
||||
# this is because of how windows handles shared libraries
|
||||
# so just ask when you find yourself stuck
|
||||
# or just switch to nixos 😄
|
||||
use_python_mpv = {self.use_python_mpv}
|
||||
|
||||
|
||||
# whether to use popen to get the timestamps for continue_from_history
|
||||
# implemented because popen does not work for some reason in nixos and apparently on mac as well
|
||||
# if you are on nixos or mac and you have a solution to this problem please share
|
||||
# i will be glad to hear it 😄
|
||||
# So for now ignore this option
|
||||
# and anyways the new method of getting timestamps is better
|
||||
disable_mpv_popen = {self.disable_mpv_popen}
|
||||
|
||||
# force mpv window
|
||||
# the default 'immediate' just makes mpv to open the window even if the video has not yet loaded
|
||||
# done for asthetics
|
||||
# passed directly to mpv so values are same
|
||||
force_window = immediate
|
||||
|
||||
# the format of downloaded anime and trailer
|
||||
# based on yt-dlp format and passed directly to it
|
||||
# learn more by looking it up on their site
|
||||
# only works for downloaded anime if server=gogoanime
|
||||
# since its the only one that offers different formats
|
||||
# the others tend not to
|
||||
# only works for downloaded anime if:
|
||||
# provider=allanime, server=gogoanime
|
||||
# provider=allanime, server=wixmp
|
||||
# provider=hianime
|
||||
# this is because they provider a m3u8 file that contans multiple quality streams
|
||||
format = {self.format}
|
||||
|
||||
[general]
|
||||
# set the player to use for streaming [mpv/vlc]
|
||||
# while this option exists i will still recommend that you use mpv
|
||||
# since you will miss out on some features if you use the others
|
||||
player = {self.player}
|
||||
|
||||
# can be [allanime,animepahe]
|
||||
provider = {self.provider}
|
||||
[anilist]
|
||||
per_page = {self.per_page}
|
||||
|
||||
# Display language (options: english, romaji)
|
||||
preferred_language = {self.preferred_language}
|
||||
|
||||
# Download directory
|
||||
downloads_dir = {self.downloads_dir}
|
||||
|
||||
# whether to show a preview window when using fzf or rofi
|
||||
preview = {self.preview}
|
||||
|
||||
# the time to seek when using ffmpegthumbnailer [-1 to 100]
|
||||
# -1 means random and is the default
|
||||
ffmpegthumbnailer_seek_time = {self.ffmpegthumbnailer_seek_time}
|
||||
|
||||
# whether to use fzf as the interface for the anilist command and others.
|
||||
use_fzf = {self.use_fzf}
|
||||
|
||||
# whether to use rofi for the ui
|
||||
use_rofi = {self.use_rofi}
|
||||
|
||||
# rofi theme to use
|
||||
rofi_theme = {self.rofi_theme}
|
||||
|
||||
rofi_theme_input = {self.rofi_theme_input}
|
||||
|
||||
rofi_theme_confirm = {self.rofi_theme_confirm}
|
||||
|
||||
|
||||
# whether to show the icons
|
||||
icons = {self.icons}
|
||||
|
||||
# the duration in minutes a notification will stay in the screen
|
||||
# used by notifier command
|
||||
notification_duration = {self.notification_duration}
|
||||
"""
|
||||
#
|
||||
# HOPE YOU ENJOY FASTANIME AND BE SURE TO STAR THE PROJECT ON GITHUB
|
||||
# https://github.com/Benexl/FastAnime
|
||||
#
|
||||
# Also join the discord server
|
||||
# where the anime tech community lives :)
|
||||
# https://discord.gg/C4rhMA4mmK
|
||||
#
|
||||
"""
|
||||
return current_config_state
|
||||
|
||||
def __str__(self):
|
||||
return self.__repr__()
|
||||
|
||||
# WARNING: depracated and will probably be removed
|
||||
def update_anime_list(self, anime_id: int, remove=False):
|
||||
if remove:
|
||||
try:
|
||||
self.anime_list.remove(anime_id)
|
||||
print("Succesfully removed :cry:")
|
||||
except Exception:
|
||||
print(anime_id, "Nothing to remove :confused:")
|
||||
else:
|
||||
self.anime_list.append(anime_id)
|
||||
self.user_data["animelist"] = list(set(self.anime_list))
|
||||
self._update_user_data()
|
||||
print("Succesfully added :smile:")
|
||||
input("Enter to continue...")
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -5,15 +5,15 @@ import shutil
|
||||
import subprocess
|
||||
import textwrap
|
||||
from threading import Thread
|
||||
from hashlib import sha256
|
||||
|
||||
import requests
|
||||
from yt_dlp.utils import clean_html
|
||||
|
||||
from ...constants import APP_CACHE_DIR
|
||||
from ...constants import APP_CACHE_DIR, S_PLATFORM
|
||||
from ...libs.anilist.types import AnilistBaseMediaDataSchema
|
||||
from ...Utility import anilist_data_helper
|
||||
from ..utils.scripts import fzf_preview
|
||||
from ..utils.utils import get_true_fg
|
||||
from ..utils.scripts import bash_functions
|
||||
from ..utils.utils import get_true_fg, which_bashlike
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -46,7 +46,13 @@ def aniskip(mal_id: int, episode: str):
|
||||
|
||||
# NOTE: May change this to a temp dir but there were issues so later
|
||||
WORKING_DIR = APP_CACHE_DIR # tempfile.gettempdir()
|
||||
|
||||
_HEADER_COLOR = os.environ.get("FASTANIME_PREVIEW_HEADER_COLOR", "215,0,95").split(",")
|
||||
HEADER_COLOR = _HEADER_COLOR[0], _HEADER_COLOR[1], _HEADER_COLOR[2]
|
||||
_SEPARATOR_COLOR = os.environ.get(
|
||||
"FASTANIME_PREVIEW_SEPARATOR_COLOR", "208,208,208"
|
||||
).split(",")
|
||||
SEPARATOR_COLOR = _SEPARATOR_COLOR[0], _SEPARATOR_COLOR[1], _SEPARATOR_COLOR[2]
|
||||
SINGLE_QUOTE = "'"
|
||||
IMAGES_CACHE_DIR = os.path.join(WORKING_DIR, "images")
|
||||
if not os.path.exists(IMAGES_CACHE_DIR):
|
||||
os.mkdir(IMAGES_CACHE_DIR)
|
||||
@@ -63,7 +69,12 @@ def save_image_from_url(url: str, file_name: str):
|
||||
file_name: filename to use
|
||||
"""
|
||||
image = requests.get(url)
|
||||
with open(f"{IMAGES_CACHE_DIR}/{file_name}", "wb") as f:
|
||||
with open(
|
||||
os.path.join(
|
||||
IMAGES_CACHE_DIR, f"{sha256(file_name.encode('utf-8')).hexdigest()}.png"
|
||||
),
|
||||
"wb",
|
||||
) as f:
|
||||
f.write(image.content)
|
||||
|
||||
|
||||
@@ -74,7 +85,14 @@ def save_info_from_str(info: str, file_name: str):
|
||||
info: the information anilist has on the anime
|
||||
file_name: the filename to use
|
||||
"""
|
||||
with open(f"{ANIME_INFO_CACHE_DIR}/{file_name}", "w") as f:
|
||||
with open(
|
||||
os.path.join(
|
||||
ANIME_INFO_CACHE_DIR,
|
||||
sha256(file_name.encode("utf-8")).hexdigest(),
|
||||
),
|
||||
"w",
|
||||
encoding="utf-8",
|
||||
) as f:
|
||||
f.write(info)
|
||||
|
||||
|
||||
@@ -83,48 +101,90 @@ def write_search_results(
|
||||
titles: list[str],
|
||||
workers: int | None = None,
|
||||
):
|
||||
"""A helper function used by and run in a background thread by get_fzf_preview function inorder to get the actual preview data to be displayed by fzf
|
||||
"""A helper function used by and run in a background thread by get_fzf_preview function in order to get the actual preview data to be displayed by fzf
|
||||
|
||||
Args:
|
||||
anilist_results: the anilist results from an anilist action
|
||||
titles: sanitized anime titles
|
||||
workers:number of threads to use defaults to as many as possible
|
||||
"""
|
||||
# NOTE: Will probably make this a configuraable option
|
||||
HEADER_COLOR = 215, 0, 95
|
||||
SEPARATOR_COLOR = 208, 208, 208
|
||||
SEPARATOR_WIDTH = 45
|
||||
# use concurency to download and write as fast as possible
|
||||
with concurrent.futures.ThreadPoolExecutor(max_workers=workers) as executor:
|
||||
future_to_task = {}
|
||||
for anime, title in zip(anilist_results, titles):
|
||||
# actual image url
|
||||
image_url = anime["coverImage"]["large"]
|
||||
future_to_task[executor.submit(save_image_from_url, image_url, title)] = (
|
||||
image_url
|
||||
)
|
||||
image_url = ""
|
||||
if os.environ.get("FASTANIME_IMAGE_PREVIEWS", "true").lower() == "true":
|
||||
image_url = anime["coverImage"]["large"]
|
||||
|
||||
if not (
|
||||
os.path.exists(
|
||||
os.path.join(
|
||||
IMAGES_CACHE_DIR,
|
||||
f"{sha256(title.encode('utf-8')).hexdigest()}.png",
|
||||
)
|
||||
)
|
||||
):
|
||||
future_to_task[
|
||||
executor.submit(save_image_from_url, image_url, title)
|
||||
] = image_url
|
||||
|
||||
mediaListName = "Not in any of your lists"
|
||||
progress = "UNKNOWN"
|
||||
if anime_list := anime["mediaListEntry"]:
|
||||
mediaListName = anime_list["status"]
|
||||
progress = anime_list["progress"]
|
||||
# handle the text data
|
||||
template = f"""
|
||||
{get_true_fg("-"*SEPARATOR_WIDTH,*SEPARATOR_COLOR,bold=False)}
|
||||
{get_true_fg('Title(jp):',*HEADER_COLOR)} {anime['title']['romaji']}
|
||||
{get_true_fg('Title(eng):',*HEADER_COLOR)} {anime['title']['english']}
|
||||
{get_true_fg('Popularity:',*HEADER_COLOR)} {anime['popularity']}
|
||||
{get_true_fg('Favourites:',*HEADER_COLOR)} {anime['favourites']}
|
||||
{get_true_fg('Status:',*HEADER_COLOR)} {anime['status']}
|
||||
{get_true_fg('Episodes:',*HEADER_COLOR)} {anime['episodes']}
|
||||
{get_true_fg('Genres:',*HEADER_COLOR)} {anilist_data_helper.format_list_data_with_comma(anime['genres'])}
|
||||
{get_true_fg('Next Episode:',*HEADER_COLOR)} {anilist_data_helper.extract_next_airing_episode(anime['nextAiringEpisode'])}
|
||||
{get_true_fg('Start Date:',*HEADER_COLOR)} {anilist_data_helper.format_anilist_date_object(anime['startDate'])}
|
||||
{get_true_fg('End Date:',*HEADER_COLOR)} {anilist_data_helper.format_anilist_date_object(anime['endDate'])}
|
||||
{get_true_fg("-"*SEPARATOR_WIDTH,*SEPARATOR_COLOR,bold=False)}
|
||||
{get_true_fg('Description:',*HEADER_COLOR)}
|
||||
image_url={image_url}
|
||||
ll=2
|
||||
while [ $ll -le $FZF_PREVIEW_COLUMNS ];do
|
||||
echo -n -e "{get_true_fg("─", *SEPARATOR_COLOR, bold=False)}"
|
||||
((ll++))
|
||||
done
|
||||
echo
|
||||
echo "{get_true_fg("Title(jp):", *HEADER_COLOR)} {(anime["title"]["romaji"] or "").replace('"', SINGLE_QUOTE)}"
|
||||
echo "{get_true_fg("Title(eng):", *HEADER_COLOR)} {(anime["title"]["english"] or "").replace('"', SINGLE_QUOTE)}"
|
||||
ll=2
|
||||
while [ $ll -le $FZF_PREVIEW_COLUMNS ];do
|
||||
echo -n -e "{get_true_fg("─", *SEPARATOR_COLOR, bold=False)}"
|
||||
((ll++))
|
||||
done
|
||||
echo
|
||||
echo "{get_true_fg("Popularity:", *HEADER_COLOR)} {anilist_data_helper.format_number_with_commas(anime["popularity"])}"
|
||||
echo "{get_true_fg("Favourites:", *HEADER_COLOR)} {anilist_data_helper.format_number_with_commas(anime["favourites"])}"
|
||||
echo "{get_true_fg("Status:", *HEADER_COLOR)} {str(anime["status"]).replace('"', SINGLE_QUOTE)}"
|
||||
echo "{get_true_fg("Next Episode:", *HEADER_COLOR)} {anilist_data_helper.extract_next_airing_episode(anime["nextAiringEpisode"]).replace('"', SINGLE_QUOTE)}"
|
||||
echo "{get_true_fg("Genres:", *HEADER_COLOR)} {anilist_data_helper.format_list_data_with_comma(anime["genres"]).replace('"', SINGLE_QUOTE)}"
|
||||
ll=2
|
||||
while [ $ll -le $FZF_PREVIEW_COLUMNS ];do
|
||||
echo -n -e "{get_true_fg("─", *SEPARATOR_COLOR, bold=False)}"
|
||||
((ll++))
|
||||
done
|
||||
echo
|
||||
echo "{get_true_fg("Episodes:", *HEADER_COLOR)} {(anime["episodes"]) or "UNKNOWN"}"
|
||||
echo "{get_true_fg("Start Date:", *HEADER_COLOR)} {anilist_data_helper.format_anilist_date_object(anime["startDate"]).replace('"', SINGLE_QUOTE)}"
|
||||
echo "{get_true_fg("End Date:", *HEADER_COLOR)} {anilist_data_helper.format_anilist_date_object(anime["endDate"]).replace('"', SINGLE_QUOTE)}"
|
||||
ll=2
|
||||
while [ $ll -le $FZF_PREVIEW_COLUMNS ];do
|
||||
echo -n -e "{get_true_fg("─", *SEPARATOR_COLOR, bold=False)}"
|
||||
((ll++))
|
||||
done
|
||||
echo
|
||||
echo "{get_true_fg("Media List:", *HEADER_COLOR)} {mediaListName.replace('"', SINGLE_QUOTE)}"
|
||||
echo "{get_true_fg("Progress:", *HEADER_COLOR)} {progress}"
|
||||
ll=2
|
||||
while [ $ll -le $FZF_PREVIEW_COLUMNS ];do
|
||||
echo -n -e "{get_true_fg("─", *SEPARATOR_COLOR, bold=False)}"
|
||||
((ll++))
|
||||
done
|
||||
echo
|
||||
# echo "{get_true_fg("Description:", *HEADER_COLOR).replace('"', SINGLE_QUOTE)}"
|
||||
"""
|
||||
template = textwrap.dedent(template)
|
||||
template = f"""
|
||||
{template}
|
||||
{textwrap.fill(clean_html(
|
||||
str(anime['description'])), width=45)}
|
||||
echo "{textwrap.fill(clean_html((anime["description"]) or "").replace('"', SINGLE_QUOTE), width=45)}"
|
||||
"""
|
||||
future_to_task[executor.submit(save_info_from_str, template, title)] = title
|
||||
|
||||
@@ -155,9 +215,18 @@ def get_rofi_icons(
|
||||
for anime, title in zip(anilist_results, titles):
|
||||
# actual link to download image from
|
||||
image_url = anime["coverImage"]["large"]
|
||||
future_to_url[executor.submit(save_image_from_url, image_url, title)] = (
|
||||
image_url
|
||||
)
|
||||
|
||||
if not (
|
||||
os.path.exists(
|
||||
os.path.join(
|
||||
IMAGES_CACHE_DIR,
|
||||
f"{sha256(title.encode('utf-8')).hexdigest()}.png",
|
||||
)
|
||||
)
|
||||
):
|
||||
future_to_url[
|
||||
executor.submit(save_image_from_url, image_url, title)
|
||||
] = image_url
|
||||
|
||||
# execute the jobs
|
||||
for future in concurrent.futures.as_completed(future_to_url):
|
||||
@@ -168,7 +237,214 @@ def get_rofi_icons(
|
||||
logger.error("%r generated an exception: %s" % (url, e))
|
||||
|
||||
|
||||
def get_fzf_preview(
|
||||
# get rofi icons
|
||||
def get_fzf_manga_preview(manga_results, workers=None, wait=False):
|
||||
"""A helper function to make sure that the images are downloaded so they can be used as icons
|
||||
|
||||
Args:
|
||||
titles (list[str]): sanitized titles of the anime; NOTE: its important that they are sanitized since they are used as the filenames of the images
|
||||
workers ([TODO:parameter]): Number of threads to use to download the images; defaults to as many as possible
|
||||
anilist_results: the anilist results from an anilist action
|
||||
"""
|
||||
|
||||
def _worker():
|
||||
# use concurrency to download the images as fast as possible
|
||||
with concurrent.futures.ThreadPoolExecutor(max_workers=workers) as executor:
|
||||
# load the jobs
|
||||
future_to_url = {}
|
||||
for manga in manga_results:
|
||||
image_url = manga["poster"]
|
||||
|
||||
if not (
|
||||
os.path.exists(
|
||||
os.path.join(
|
||||
IMAGES_CACHE_DIR,
|
||||
f"{sha256(manga['title'].encode('utf-8')).hexdigest()}.png",
|
||||
)
|
||||
)
|
||||
):
|
||||
future_to_url[
|
||||
executor.submit(
|
||||
save_image_from_url,
|
||||
image_url,
|
||||
manga["title"],
|
||||
)
|
||||
] = image_url
|
||||
|
||||
# execute the jobs
|
||||
for future in concurrent.futures.as_completed(future_to_url):
|
||||
url = future_to_url[future]
|
||||
try:
|
||||
future.result()
|
||||
except Exception as e:
|
||||
logger.error("%r generated an exception: %s" % (url, e))
|
||||
|
||||
background_worker = Thread(
|
||||
target=_worker,
|
||||
)
|
||||
background_worker.daemon = True
|
||||
# ensure images and info exists
|
||||
background_worker.start()
|
||||
|
||||
# the preview script is in bash so making sure fzf doesnt use any other shell lang to process the preview script
|
||||
os.environ["SHELL"] = shutil.which("bash") or "bash"
|
||||
preview = """
|
||||
%s
|
||||
title="$(echo -n {})"
|
||||
title="$(echo -n "$title" |generate_sha256)"
|
||||
if [ -s "%s/$title" ]; then fzf_preview "%s/title"
|
||||
else echo Loading...
|
||||
fi
|
||||
""" % (
|
||||
bash_functions,
|
||||
IMAGES_CACHE_DIR,
|
||||
IMAGES_CACHE_DIR,
|
||||
)
|
||||
if wait:
|
||||
background_worker.join()
|
||||
return preview
|
||||
|
||||
|
||||
# get rofi icons
|
||||
def get_fzf_episode_preview(
|
||||
anilist_result: AnilistBaseMediaDataSchema, episodes, workers=None, wait=False
|
||||
):
|
||||
"""A helper function to make sure that the images are downloaded so they can be used as icons
|
||||
|
||||
Args:
|
||||
titles (list[str]): sanitized titles of the anime; NOTE: its important that they are sanitized since they are used as the filenames of the images
|
||||
workers ([TODO:parameter]): Number of threads to use to download the images; defaults to as many as possible
|
||||
anilist_results: the anilist results from an anilist action
|
||||
|
||||
Returns:
|
||||
The fzf preview script to use or None if the bash is not found
|
||||
"""
|
||||
|
||||
# HEADER_COLOR = 215, 0, 95
|
||||
import re
|
||||
|
||||
def _worker():
|
||||
# use concurrency to download the images as fast as possible
|
||||
with concurrent.futures.ThreadPoolExecutor(max_workers=workers) as executor:
|
||||
# load the jobs
|
||||
future_to_url = {}
|
||||
|
||||
for episode in episodes:
|
||||
episode_title = ""
|
||||
image_url = ""
|
||||
for episode_detail in anilist_result["streamingEpisodes"]:
|
||||
if re.match(f".*Episode {episode} .*", episode_detail["title"]):
|
||||
episode_title = episode_detail["title"]
|
||||
image_url = episode_detail["thumbnail"]
|
||||
|
||||
if episode_title and image_url:
|
||||
future_to_url[
|
||||
executor.submit(save_image_from_url, image_url, str(episode))
|
||||
] = image_url
|
||||
template = textwrap.dedent(
|
||||
f"""
|
||||
ll=2
|
||||
while [ $ll -le $FZF_PREVIEW_COLUMNS ];do
|
||||
echo -n -e "{get_true_fg("─", *SEPARATOR_COLOR, bold=False)}"
|
||||
((ll++))
|
||||
done
|
||||
echo
|
||||
echo "{get_true_fg("Anime Title(eng):", *HEADER_COLOR)} {("" or anilist_result["title"]["english"]).replace('"', SINGLE_QUOTE)}"
|
||||
echo "{get_true_fg("Anime Title(jp):", *HEADER_COLOR)} {(anilist_result["title"]["romaji"] or "").replace('"', SINGLE_QUOTE)}"
|
||||
|
||||
ll=2
|
||||
while [ $ll -le $FZF_PREVIEW_COLUMNS ];do
|
||||
echo -n -e "{get_true_fg("─", *SEPARATOR_COLOR, bold=False)}"
|
||||
((ll++))
|
||||
done
|
||||
echo
|
||||
echo "{str(episode_title).replace('"', SINGLE_QUOTE)}"
|
||||
ll=2
|
||||
while [ $ll -le $FZF_PREVIEW_COLUMNS ];do
|
||||
echo -n -e "{get_true_fg("─", *SEPARATOR_COLOR, bold=False)}"
|
||||
((ll++))
|
||||
done
|
||||
"""
|
||||
)
|
||||
future_to_url[
|
||||
executor.submit(save_info_from_str, template, str(episode))
|
||||
] = str(episode)
|
||||
|
||||
# execute the jobs
|
||||
for future in concurrent.futures.as_completed(future_to_url):
|
||||
url = future_to_url[future]
|
||||
try:
|
||||
future.result()
|
||||
except Exception as e:
|
||||
logger.error("%r generated an exception: %s" % (url, e))
|
||||
|
||||
background_worker = Thread(
|
||||
target=_worker,
|
||||
)
|
||||
background_worker.daemon = True
|
||||
# ensure images and info exists
|
||||
background_worker.start()
|
||||
|
||||
# the preview script is in bash so making sure fzf doesnt use any other shell lang to process the preview script
|
||||
bash_path = which_bashlike()
|
||||
if not bash_path:
|
||||
return
|
||||
|
||||
os.environ["SHELL"] = bash_path
|
||||
if S_PLATFORM == "win32":
|
||||
preview = """
|
||||
%s
|
||||
title="$(echo -n {})"
|
||||
title="$(echo -n "$title" |generate_sha256)"
|
||||
dim=${FZF_PREVIEW_COLUMNS}x${FZF_PREVIEW_LINES}
|
||||
if [ "$FASTANIME_IMAGE_PREVIEWS" = "True" ];then
|
||||
if [ -s "%s\\\\\\${title}.png" ]; then
|
||||
if command -v "chafa">/dev/null;then
|
||||
chafa -s $dim "%s\\\\\\${title}.png"
|
||||
else
|
||||
echo please install chafa to enjoy image previews
|
||||
fi
|
||||
echo
|
||||
else
|
||||
echo Loading...
|
||||
fi
|
||||
fi
|
||||
if [ -s "%s\\\\\\$title" ]; then source "%s\\\\\\$title"
|
||||
else echo Loading...
|
||||
fi
|
||||
""" % (
|
||||
bash_functions,
|
||||
IMAGES_CACHE_DIR.replace("\\", "\\\\\\"),
|
||||
IMAGES_CACHE_DIR.replace("\\", "\\\\\\"),
|
||||
ANIME_INFO_CACHE_DIR.replace("\\", "\\\\\\"),
|
||||
ANIME_INFO_CACHE_DIR.replace("\\", "\\\\\\"),
|
||||
)
|
||||
else:
|
||||
preview = """
|
||||
%s
|
||||
title="$(echo -n {})"
|
||||
title="$(echo -n "$title" |generate_sha256)"
|
||||
if [ "$FASTANIME_IMAGE_PREVIEWS" = "True" ];then
|
||||
if [ -s %s/${title}.png ]; then fzf_preview %s/${title}.png
|
||||
else echo Loading...
|
||||
fi
|
||||
fi
|
||||
if [ -f %s/${title} ]; then source %s/${title}
|
||||
else echo Loading...
|
||||
fi
|
||||
""" % (
|
||||
bash_functions,
|
||||
IMAGES_CACHE_DIR,
|
||||
IMAGES_CACHE_DIR,
|
||||
ANIME_INFO_CACHE_DIR,
|
||||
ANIME_INFO_CACHE_DIR,
|
||||
)
|
||||
if wait:
|
||||
background_worker.join()
|
||||
return preview
|
||||
|
||||
|
||||
def get_fzf_anime_preview(
|
||||
anilist_results: list[AnilistBaseMediaDataSchema], titles, wait=False
|
||||
):
|
||||
"""A helper function that constructs data to be used for the fzf preview
|
||||
@@ -179,9 +455,10 @@ def get_fzf_preview(
|
||||
anilist_results: the anilist results got from an anilist action
|
||||
|
||||
Returns:
|
||||
THe fzf preview script to use
|
||||
The fzf preview script to use or None if the bash is not found
|
||||
"""
|
||||
# ensure images and info exists
|
||||
|
||||
background_worker = Thread(
|
||||
target=write_search_results, args=(anilist_results, titles)
|
||||
)
|
||||
@@ -189,22 +466,60 @@ def get_fzf_preview(
|
||||
background_worker.start()
|
||||
|
||||
# the preview script is in bash so making sure fzf doesnt use any other shell lang to process the preview script
|
||||
os.environ["SHELL"] = shutil.which("bash") or "bash"
|
||||
preview = """
|
||||
%s
|
||||
if [ -s %s/{} ]; then fzf-preview %s/{}
|
||||
else echo Loading...
|
||||
fi
|
||||
if [ -s %s/{} ]; then cat %s/{}
|
||||
else echo Loading...
|
||||
fi
|
||||
""" % (
|
||||
fzf_preview,
|
||||
IMAGES_CACHE_DIR,
|
||||
IMAGES_CACHE_DIR,
|
||||
ANIME_INFO_CACHE_DIR,
|
||||
ANIME_INFO_CACHE_DIR,
|
||||
)
|
||||
bash_path = which_bashlike()
|
||||
if not bash_path:
|
||||
return
|
||||
|
||||
os.environ["SHELL"] = bash_path
|
||||
|
||||
if S_PLATFORM == "win32":
|
||||
preview = """
|
||||
%s
|
||||
title="$(echo -n {})"
|
||||
title="$(echo -n "$title" |generate_sha256)"
|
||||
dim=${FZF_PREVIEW_COLUMNS}x${FZF_PREVIEW_LINES}
|
||||
if [ "$FASTANIME_IMAGE_PREVIEWS" = "True" ];then
|
||||
if [ -s "%s\\\\\\${title}.png" ]; then
|
||||
if command -v "chafa">/dev/null;then
|
||||
chafa -s $dim "%s\\\\\\${title}.png"
|
||||
else
|
||||
echo please install chafa to enjoy image previews
|
||||
fi
|
||||
echo
|
||||
else
|
||||
echo Loading...
|
||||
fi
|
||||
fi
|
||||
if [ -s "%s\\\\\\$title" ]; then source "%s\\\\\\$title"
|
||||
else echo Loading...
|
||||
fi
|
||||
""" % (
|
||||
bash_functions,
|
||||
IMAGES_CACHE_DIR.replace("\\", "\\\\\\"),
|
||||
IMAGES_CACHE_DIR.replace("\\", "\\\\\\"),
|
||||
ANIME_INFO_CACHE_DIR.replace("\\", "\\\\\\"),
|
||||
ANIME_INFO_CACHE_DIR.replace("\\", "\\\\\\"),
|
||||
)
|
||||
else:
|
||||
preview = """
|
||||
%s
|
||||
title="$(echo -n {})"
|
||||
title="$(echo -n "$title" |generate_sha256)"
|
||||
if [ "$FASTANIME_IMAGE_PREVIEWS" = "True" ];then
|
||||
if [ -s "%s/${title}.png" ]; then fzf_preview "%s/${title}.png"
|
||||
else echo Loading...
|
||||
fi
|
||||
fi
|
||||
if [ -s "%s/$title" ]; then source "%s/$title"
|
||||
else echo Loading...
|
||||
fi
|
||||
""" % (
|
||||
bash_functions,
|
||||
IMAGES_CACHE_DIR,
|
||||
IMAGES_CACHE_DIR,
|
||||
ANIME_INFO_CACHE_DIR,
|
||||
ANIME_INFO_CACHE_DIR,
|
||||
)
|
||||
if wait:
|
||||
background_worker.join()
|
||||
return preview
|
||||
|
||||
12
fastanime/cli/utils/feh.py
Normal file
12
fastanime/cli/utils/feh.py
Normal file
@@ -0,0 +1,12 @@
|
||||
import shutil
|
||||
import subprocess
|
||||
from sys import exit
|
||||
|
||||
|
||||
def feh_manga_viewer(image_links: list[str], window_title: str):
|
||||
FEH_EXECUTABLE = shutil.which("feh")
|
||||
if not FEH_EXECUTABLE:
|
||||
print("feh not found")
|
||||
exit(1)
|
||||
commands = [FEH_EXECUTABLE, *image_links, "--title", window_title]
|
||||
subprocess.run(commands)
|
||||
102
fastanime/cli/utils/icat.py
Normal file
102
fastanime/cli/utils/icat.py
Normal file
@@ -0,0 +1,102 @@
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
import termios
|
||||
import tty
|
||||
from sys import exit
|
||||
|
||||
from rich.console import Console
|
||||
from rich.panel import Panel
|
||||
from rich.align import Align
|
||||
from rich.text import Text
|
||||
|
||||
console = Console()
|
||||
|
||||
|
||||
def get_key():
|
||||
"""Read a single keypress (including arrows)."""
|
||||
fd = sys.stdin.fileno()
|
||||
old = termios.tcgetattr(fd)
|
||||
try:
|
||||
tty.setraw(fd)
|
||||
ch1 = sys.stdin.read(1)
|
||||
if ch1 == "\x1b":
|
||||
ch2 = sys.stdin.read(2)
|
||||
return ch1 + ch2
|
||||
return ch1
|
||||
finally:
|
||||
termios.tcsetattr(fd, termios.TCSADRAIN, old)
|
||||
|
||||
|
||||
def draw_banner_at(msg: str, row: int):
|
||||
"""Move cursor to `row`, then render a centered, cyan-bordered panel."""
|
||||
sys.stdout.write(f"\x1b[{row};1H")
|
||||
text = Text(msg, justify="center")
|
||||
panel = Panel(Align(text, align="center"), border_style="cyan", padding=(1, 2))
|
||||
console.print(panel)
|
||||
|
||||
|
||||
def icat_manga_viewer(image_links: list[str], window_title: str):
|
||||
ICAT = shutil.which("kitty")
|
||||
if not ICAT:
|
||||
console.print("[bold red]kitty (for icat) not found[/]")
|
||||
exit(1)
|
||||
|
||||
idx, total = 0, len(image_links)
|
||||
title = f"{window_title} ({total} images)"
|
||||
show_banner = True
|
||||
|
||||
try:
|
||||
while True:
|
||||
console.clear()
|
||||
term_width, term_height = shutil.get_terminal_size((80, 24))
|
||||
panel_height = 0
|
||||
|
||||
# Calculate space for image based on banner visibility
|
||||
if show_banner:
|
||||
msg_lines = 3 # Title + blank + controls
|
||||
panel_height = msg_lines + 4 # Padding and borders
|
||||
image_height = term_height - panel_height - 1
|
||||
else:
|
||||
image_height = term_height
|
||||
|
||||
subprocess.run(
|
||||
[
|
||||
ICAT,
|
||||
"+kitten",
|
||||
"icat",
|
||||
"--clear",
|
||||
"--scale-up",
|
||||
"--place",
|
||||
f"{term_width}x{image_height}@0x0",
|
||||
"--z-index",
|
||||
"-1",
|
||||
image_links[idx],
|
||||
]
|
||||
)
|
||||
|
||||
if show_banner:
|
||||
controls = (
|
||||
f"[{idx + 1}/{total}] Prev: [h/←] Next: [l/→] "
|
||||
f"Toggle Banner: [b] Quit: [q/Ctrl-C]"
|
||||
)
|
||||
msg = f"{title}\n\n{controls}"
|
||||
start_row = term_height - panel_height
|
||||
draw_banner_at(msg, start_row)
|
||||
|
||||
# key handling
|
||||
key = get_key()
|
||||
if key in ("l", "\x1b[C"):
|
||||
idx = (idx + 1) % total
|
||||
elif key in ("h", "\x1b[D"):
|
||||
idx = (idx - 1) % total
|
||||
elif key == "b":
|
||||
show_banner = not show_banner
|
||||
elif key in ("q", "\x03"):
|
||||
break
|
||||
|
||||
except KeyboardInterrupt:
|
||||
pass
|
||||
finally:
|
||||
console.clear()
|
||||
console.print("Exited viewer.", style="bold")
|
||||
@@ -1,131 +1,222 @@
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import subprocess
|
||||
import time
|
||||
|
||||
from fastanime.constants import S_PLATFORM
|
||||
from ...constants import S_PLATFORM
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
mpv_av_time_pattern = re.compile(r"AV: ([0-9:]*) / ([0-9:]*) \(([0-9]*)%\)")
|
||||
|
||||
|
||||
def stream_video(MPV, url, mpv_args, custom_args):
|
||||
process = subprocess.Popen(
|
||||
[MPV, url, *mpv_args, *custom_args],
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
text=True,
|
||||
)
|
||||
|
||||
last_time = None
|
||||
av_time_pattern = re.compile(r"AV: ([0-9:]*) / ([0-9:]*) \(([0-9]*)%\)")
|
||||
def stream_video(MPV, url, mpv_args, custom_args, pre_args=[]):
|
||||
last_time = "0"
|
||||
total_time = "0"
|
||||
if os.environ.get("FASTANIME_DISABLE_MPV_POPEN", "False") == "False":
|
||||
process = subprocess.Popen(
|
||||
pre_args
|
||||
+ [
|
||||
MPV,
|
||||
url,
|
||||
*mpv_args,
|
||||
*custom_args,
|
||||
"--no-terminal",
|
||||
],
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
text=True,
|
||||
bufsize=1,
|
||||
encoding="utf-8",
|
||||
)
|
||||
|
||||
try:
|
||||
while True:
|
||||
if not process.stderr:
|
||||
continue
|
||||
output = process.stderr.readline()
|
||||
try:
|
||||
while True:
|
||||
if not process.stderr:
|
||||
time.sleep(0.1)
|
||||
continue
|
||||
output = process.stderr.readline()
|
||||
|
||||
if output:
|
||||
# Match the timestamp in the output
|
||||
match = av_time_pattern.search(output.strip())
|
||||
if output:
|
||||
# Match the timestamp in the output
|
||||
match = mpv_av_time_pattern.search(output.strip())
|
||||
if match:
|
||||
current_time = match.group(1)
|
||||
total_time = match.group(2)
|
||||
last_time = current_time
|
||||
|
||||
# Check if the process has terminated
|
||||
retcode = process.poll()
|
||||
if retcode is not None:
|
||||
break
|
||||
|
||||
except Exception as e:
|
||||
print(f"An error occurred: {e}")
|
||||
logger.error(f"An error occurred: {e}")
|
||||
finally:
|
||||
process.terminate()
|
||||
process.wait()
|
||||
else:
|
||||
proc = subprocess.run(
|
||||
pre_args + [MPV, url, *mpv_args, *custom_args],
|
||||
capture_output=True,
|
||||
text=True,
|
||||
encoding="utf-8",
|
||||
)
|
||||
if proc.stdout:
|
||||
for line in reversed(proc.stdout.split("\n")):
|
||||
match = mpv_av_time_pattern.search(line.strip())
|
||||
if match:
|
||||
current_time = match.group(1)
|
||||
last_time = match.group(1)
|
||||
total_time = match.group(2)
|
||||
match.group(3)
|
||||
last_time = current_time
|
||||
# print(f"Current stream time: {current_time}, Total time: {total_time}, Progress: {percentage}%")
|
||||
|
||||
# Check if the process has terminated
|
||||
retcode = process.poll()
|
||||
if retcode is not None:
|
||||
print("Finshed at: ", last_time)
|
||||
break
|
||||
|
||||
except Exception as e:
|
||||
print(f"An error occurred: {e}")
|
||||
finally:
|
||||
process.terminate()
|
||||
|
||||
break
|
||||
return last_time, total_time
|
||||
|
||||
|
||||
def run_mpv(
|
||||
link: str,
|
||||
title: str | None = "",
|
||||
title: str = "",
|
||||
start_time: str = "0",
|
||||
ytdl_format="",
|
||||
custom_args=[],
|
||||
headers={},
|
||||
subtitles=[],
|
||||
player="",
|
||||
):
|
||||
# Determine if mpv is available
|
||||
MPV = shutil.which("mpv")
|
||||
|
||||
# If title is None, set a default value
|
||||
|
||||
# Regex to check if the link is a YouTube URL
|
||||
youtube_regex = r"(https?://)?(www\.)?(youtube|youtu|youtube-nocookie)\.(com|be)/.+"
|
||||
|
||||
if not MPV and not S_PLATFORM == "win32":
|
||||
# Determine if the link is a YouTube URL
|
||||
if re.match(youtube_regex, link):
|
||||
# Android specific commands to launch mpv with a YouTube URL
|
||||
args = [
|
||||
"nohup",
|
||||
"am",
|
||||
"start",
|
||||
"--user",
|
||||
"0",
|
||||
"-a",
|
||||
"android.intent.action.VIEW",
|
||||
"-d",
|
||||
link,
|
||||
"-n",
|
||||
"com.google.android.youtube/.UrlActivity",
|
||||
]
|
||||
if link.endswith(".torrent"):
|
||||
WEBTORRENT_CLI = shutil.which("webtorrent")
|
||||
if not WEBTORRENT_CLI:
|
||||
import time
|
||||
|
||||
print(
|
||||
"webtorrent cli is not installed which is required for downloading and streaming from nyaa\nplease install it or use another provider"
|
||||
)
|
||||
time.sleep(120)
|
||||
return "0", "0"
|
||||
cmd = [WEBTORRENT_CLI, link, f"--{player}"]
|
||||
subprocess.run(cmd, encoding="utf-8")
|
||||
return "0", "0"
|
||||
if player == "vlc":
|
||||
VLC = shutil.which("vlc")
|
||||
if not VLC and not S_PLATFORM == "win32":
|
||||
# Determine if the link is a YouTube URL
|
||||
if re.match(youtube_regex, link):
|
||||
# Android specific commands to launch mpv with a YouTube URL
|
||||
args = [
|
||||
"nohup",
|
||||
"am",
|
||||
"start",
|
||||
"--user",
|
||||
"0",
|
||||
"-a",
|
||||
"android.intent.action.VIEW",
|
||||
"-d",
|
||||
link,
|
||||
"-n",
|
||||
"com.google.android.youtube/.UrlActivity",
|
||||
]
|
||||
return "0", "0"
|
||||
else:
|
||||
args = [
|
||||
"nohup",
|
||||
"am",
|
||||
"start",
|
||||
"--user",
|
||||
"0",
|
||||
"-a",
|
||||
"android.intent.action.VIEW",
|
||||
"-d",
|
||||
link,
|
||||
"-n",
|
||||
"org.videolan.vlc/org.videolan.vlc.gui.video.VideoPlayerActivity",
|
||||
"-e",
|
||||
"title",
|
||||
title,
|
||||
]
|
||||
|
||||
subprocess.run(args)
|
||||
return "0", "0"
|
||||
else:
|
||||
# Android specific commands to launch mpv with a regular URL
|
||||
args = [
|
||||
"nohup",
|
||||
"am",
|
||||
"start",
|
||||
"--user",
|
||||
"0",
|
||||
"-a",
|
||||
"android.intent.action.VIEW",
|
||||
"-d",
|
||||
link,
|
||||
"-n",
|
||||
"is.xyz.mpv/.MPVActivity",
|
||||
]
|
||||
|
||||
subprocess.run(args)
|
||||
return "0", "0"
|
||||
args = ["vlc", link]
|
||||
for subtitle in subtitles:
|
||||
args.append("--sub-file")
|
||||
args.append(subtitle["url"])
|
||||
break
|
||||
if title:
|
||||
args.append("--video-title")
|
||||
args.append(title)
|
||||
subprocess.run(args, encoding="utf-8")
|
||||
return "0", "0"
|
||||
else:
|
||||
# General mpv command with custom arguments
|
||||
mpv_args = []
|
||||
if headers:
|
||||
mpv_headers = "--http-header-fields="
|
||||
for header_name, header_value in headers.items():
|
||||
mpv_headers += f"{header_name}:{header_value},"
|
||||
mpv_args.append(mpv_headers)
|
||||
for subtitle in subtitles:
|
||||
mpv_args.append(f"--sub-file={subtitle['url']}")
|
||||
if start_time != "0":
|
||||
mpv_args.append(f"--start={start_time}")
|
||||
if title:
|
||||
mpv_args.append(f"--title={title}")
|
||||
if ytdl_format:
|
||||
mpv_args.append(f"--ytdl-format={ytdl_format}")
|
||||
stop_time, total_time = stream_video(MPV, link, mpv_args, custom_args)
|
||||
return stop_time, total_time
|
||||
# Determine if mpv is available
|
||||
MPV = shutil.which("mpv")
|
||||
if not MPV and not S_PLATFORM == "win32":
|
||||
# Determine if the link is a YouTube URL
|
||||
if re.match(youtube_regex, link):
|
||||
# Android specific commands to launch mpv with a YouTube URL
|
||||
args = [
|
||||
"nohup",
|
||||
"am",
|
||||
"start",
|
||||
"--user",
|
||||
"0",
|
||||
"-a",
|
||||
"android.intent.action.VIEW",
|
||||
"-d",
|
||||
link,
|
||||
"-n",
|
||||
"com.google.android.youtube/.UrlActivity",
|
||||
]
|
||||
return "0", "0"
|
||||
else:
|
||||
# Android specific commands to launch mpv with a regular URL
|
||||
args = [
|
||||
"nohup",
|
||||
"am",
|
||||
"start",
|
||||
"--user",
|
||||
"0",
|
||||
"-a",
|
||||
"android.intent.action.VIEW",
|
||||
"-d",
|
||||
link,
|
||||
"-n",
|
||||
"is.xyz.mpv/.MPVActivity",
|
||||
]
|
||||
|
||||
subprocess.run(args)
|
||||
return "0", "0"
|
||||
else:
|
||||
# General mpv command with custom arguments
|
||||
mpv_args = []
|
||||
if headers:
|
||||
mpv_headers = "--http-header-fields="
|
||||
for header_name, header_value in headers.items():
|
||||
mpv_headers += f"{header_name}:{header_value},"
|
||||
mpv_args.append(mpv_headers)
|
||||
for subtitle in subtitles:
|
||||
mpv_args.append(f"--sub-file={subtitle['url']}")
|
||||
if start_time != "0":
|
||||
mpv_args.append(f"--start={start_time}")
|
||||
if title:
|
||||
mpv_args.append(f"--title={title}")
|
||||
if ytdl_format:
|
||||
mpv_args.append(f"--ytdl-format={ytdl_format}")
|
||||
|
||||
# Example usage
|
||||
if __name__ == "__main__":
|
||||
run_mpv(
|
||||
"https://www.youtube.com/watch?v=dQw4w9WgXcQ",
|
||||
"Example Video",
|
||||
"--fullscreen",
|
||||
"--volume=50",
|
||||
)
|
||||
if user_args := os.environ.get("FASTANIME_MPV_ARGS"):
|
||||
mpv_args.extend(user_args.split(","))
|
||||
|
||||
pre_args = []
|
||||
if user_args := os.environ.get("FASTANIME_MPV_PRE_ARGS"):
|
||||
pre_args = user_args.split(",")
|
||||
stop_time, total_time = stream_video(
|
||||
MPV, link, mpv_args, custom_args, pre_args
|
||||
)
|
||||
return stop_time, total_time
|
||||
|
||||
@@ -10,6 +10,7 @@ if TYPE_CHECKING:
|
||||
|
||||
from ...AnimeProvider import AnimeProvider
|
||||
from ..config import Config
|
||||
from .tools import FastAnimeRuntimeState
|
||||
|
||||
|
||||
def format_time(duration_in_secs: float):
|
||||
@@ -67,7 +68,11 @@ class MpvPlayer(object):
|
||||
current_episode_number = (
|
||||
fastanime_runtime_state.provider_current_episode_number
|
||||
)
|
||||
config.update_watch_history(anime_id_anilist, str(current_episode_number))
|
||||
config.media_list_track(
|
||||
anime_id_anilist,
|
||||
episode_no=str(current_episode_number),
|
||||
progress_tracking=fastanime_runtime_state.progress_tracking,
|
||||
)
|
||||
elif type == "reload":
|
||||
if current_episode_number not in total_episodes:
|
||||
self.mpv_player.show_text("Episode not available")
|
||||
@@ -83,7 +88,11 @@ class MpvPlayer(object):
|
||||
|
||||
self.mpv_player.show_text(f"Fetching episode {ep_no}")
|
||||
current_episode_number = ep_no
|
||||
config.update_watch_history(anime_id_anilist, str(ep_no))
|
||||
config.media_list_track(
|
||||
anime_id_anilist,
|
||||
episode_no=str(ep_no),
|
||||
progress_tracking=fastanime_runtime_state.progress_tracking,
|
||||
)
|
||||
fastanime_runtime_state.provider_current_episode_number = str(ep_no)
|
||||
else:
|
||||
self.mpv_player.show_text("Fetching previous episode...")
|
||||
@@ -96,7 +105,11 @@ class MpvPlayer(object):
|
||||
current_episode_number = (
|
||||
fastanime_runtime_state.provider_current_episode_number
|
||||
)
|
||||
config.update_watch_history(anime_id_anilist, str(current_episode_number))
|
||||
config.media_list_track(
|
||||
anime_id_anilist,
|
||||
episode_no=str(current_episode_number),
|
||||
progress_tracking=fastanime_runtime_state.progress_tracking,
|
||||
)
|
||||
# update episode progress
|
||||
if config.user and current_episode_number:
|
||||
AniList.update_anime_list(
|
||||
@@ -107,10 +120,9 @@ class MpvPlayer(object):
|
||||
)
|
||||
# get them juicy streams
|
||||
episode_streams = anime_provider.get_episode_streams(
|
||||
provider_anime,
|
||||
provider_anime["id"],
|
||||
current_episode_number,
|
||||
translation_type,
|
||||
fastanime_runtime_state.selected_anime_anilist,
|
||||
)
|
||||
if not episode_streams:
|
||||
self.mpv_player.show_text("No streams were found")
|
||||
@@ -134,6 +146,18 @@ class MpvPlayer(object):
|
||||
)
|
||||
return
|
||||
self.current_media_title = selected_server["episode_title"]
|
||||
if config.normalize_titles:
|
||||
import re
|
||||
|
||||
for episode_detail in fastanime_runtime_state.selected_anime_anilist[
|
||||
"streamingEpisodes"
|
||||
]:
|
||||
if re.match(
|
||||
f"Episode {current_episode_number} ", episode_detail["title"]
|
||||
):
|
||||
self.current_media_title = episode_detail["title"]
|
||||
break
|
||||
|
||||
links = selected_server["links"]
|
||||
|
||||
stream_link_ = filter_by_quality(quality, links)
|
||||
@@ -152,7 +176,7 @@ class MpvPlayer(object):
|
||||
self,
|
||||
stream_link,
|
||||
anime_provider: "AnimeProvider",
|
||||
fastanime_runtime_state,
|
||||
fastanime_runtime_state: "FastAnimeRuntimeState",
|
||||
config: "Config",
|
||||
title,
|
||||
start_time,
|
||||
@@ -258,7 +282,6 @@ class MpvPlayer(object):
|
||||
mpv_player.show_text("Changing translation type...")
|
||||
anime = anime_provider.get_anime(
|
||||
fastanime_runtime_state.provider_anime_search_result["id"],
|
||||
fastanime_runtime_state.selected_anime_anilist,
|
||||
)
|
||||
if not anime:
|
||||
mpv_player.show_text("Failed to update translation type")
|
||||
|
||||
@@ -5,7 +5,7 @@ import requests
|
||||
|
||||
|
||||
def print_img(url: str):
|
||||
"""helper funtion to print an image given its url
|
||||
"""helper function to print an image given its url
|
||||
|
||||
Args:
|
||||
url: [TODO:description]
|
||||
@@ -24,4 +24,10 @@ def print_img(url: str):
|
||||
print("Error fetching image")
|
||||
return
|
||||
img_bytes = res.content
|
||||
"""
|
||||
Change made in call to chafa. Chafa dev dropped ability
|
||||
to pull from urls. Keeping old line here just in case.
|
||||
|
||||
subprocess.run([EXECUTABLE, url, "--size=15x15"], input=img_bytes)
|
||||
"""
|
||||
subprocess.run([EXECUTABLE, "--size=15x15"], input=img_bytes)
|
||||
|
||||
@@ -1,78 +1,68 @@
|
||||
# this script was written by the fzf devs as an example on how to preview images
|
||||
# its only here for convinience
|
||||
fzf_preview = r"""
|
||||
#
|
||||
# The purpose of this script is to demonstrate how to preview a file or an
|
||||
# image in the preview window of fzf.
|
||||
#
|
||||
# Dependencies:
|
||||
# - https://github.com/sharkdp/bat
|
||||
# - https://github.com/hpjansson/chafa
|
||||
# - https://iterm2.com/utilities/imgcat
|
||||
fzf-preview(){
|
||||
if [[ $# -ne 1 ]]; then
|
||||
>&2 echo "usage: $0 FILENAME"
|
||||
exit 1
|
||||
fi
|
||||
bash_functions = r"""
|
||||
generate_sha256() {
|
||||
local input
|
||||
|
||||
file=${1/#\~\//$HOME/}
|
||||
type=$(file --dereference --mime -- "$file")
|
||||
# Check if input is passed as an argument or piped
|
||||
if [ -n "$1" ]; then
|
||||
input="$1"
|
||||
else
|
||||
input=$(cat)
|
||||
fi
|
||||
|
||||
if [[ ! $type =~ image/ ]]; then
|
||||
if [[ $type =~ =binary ]]; then
|
||||
file "$1"
|
||||
exit
|
||||
fi
|
||||
if command -v sha256sum &>/dev/null; then
|
||||
echo -n "$input" | sha256sum | awk '{print $1}'
|
||||
elif command -v shasum &>/dev/null; then
|
||||
echo -n "$input" | shasum -a 256 | awk '{print $1}'
|
||||
elif command -v sha256 &>/dev/null; then
|
||||
echo -n "$input" | sha256 | awk '{print $1}'
|
||||
elif command -v openssl &>/dev/null; then
|
||||
echo -n "$input" | openssl dgst -sha256 | awk '{print $2}'
|
||||
else
|
||||
echo -n "$input" | base64 | tr '/+' '_-' | tr -d '\n'
|
||||
fi
|
||||
}
|
||||
fzf_preview() {
|
||||
file=$1
|
||||
|
||||
# Sometimes bat is installed as batcat.
|
||||
if command -v batcat > /dev/null; then
|
||||
batname="batcat"
|
||||
elif command -v bat > /dev/null; then
|
||||
batname="bat"
|
||||
else
|
||||
cat "$1"
|
||||
exit
|
||||
fi
|
||||
|
||||
${batname} --style="${BAT_STYLE:-numbers}" --color=always --pager=never -- "$file"
|
||||
exit
|
||||
fi
|
||||
|
||||
dim=${FZF_PREVIEW_COLUMNS}x${FZF_PREVIEW_LINES}
|
||||
if [[ $dim = x ]]; then
|
||||
dim=$(stty size < /dev/tty | awk '{print $2 "x" $1}')
|
||||
elif ! [[ $KITTY_WINDOW_ID ]] && (( FZF_PREVIEW_TOP + FZF_PREVIEW_LINES == $(stty size < /dev/tty | awk '{print $1}') )); then
|
||||
# Avoid scrolling issue when the Sixel image touches the bottom of the screen
|
||||
# * https://github.com/junegunn/fzf/issues/2544
|
||||
dim=${FZF_PREVIEW_COLUMNS}x${FZF_PREVIEW_LINES}
|
||||
if [ "$dim" = x ]; then
|
||||
dim=$(stty size </dev/tty | awk "{print \$2 \"x\" \$1}")
|
||||
fi
|
||||
if ! [ "$FASTANIME_IMAGE_RENDERER" = "icat" ] && [ -z "$KITTY_WINDOW_ID" ] && [ "$((FZF_PREVIEW_TOP + FZF_PREVIEW_LINES))" -eq "$(stty size </dev/tty | awk "{print \$1}")" ]; then
|
||||
dim=${FZF_PREVIEW_COLUMNS}x$((FZF_PREVIEW_LINES - 1))
|
||||
fi
|
||||
|
||||
if [ "$FASTANIME_IMAGE_RENDERER" = "icat" ] && [ -z "$GHOSTTY_BIN_DIR" ]; then
|
||||
if command -v kitten >/dev/null 2>&1; then
|
||||
kitten icat --clear --transfer-mode=memory --unicode-placeholder --stdin=no --place="$dim@0x0" "$file" | sed "\$d" | sed "$(printf "\$s/\$/\033[m/")"
|
||||
elif command -v icat >/dev/null 2>&1; then
|
||||
icat --clear --transfer-mode=memory --unicode-placeholder --stdin=no --place="$dim@0x0" "$file" | sed "\$d" | sed "$(printf "\$s/\$/\033[m/")"
|
||||
else
|
||||
kitty icat --clear --transfer-mode=memory --unicode-placeholder --stdin=no --place="$dim@0x0" "$file" | sed "\$d" | sed "$(printf "\$s/\$/\033[m/")"
|
||||
fi
|
||||
|
||||
# 1. Use kitty icat on kitty terminal
|
||||
if [[ $KITTY_WINDOW_ID ]]; then
|
||||
# 1. 'memory' is the fastest option but if you want the image to be scrollable,
|
||||
# you have to use 'stream'.
|
||||
#
|
||||
# 2. The last line of the output is the ANSI reset code without newline.
|
||||
# This confuses fzf and makes it render scroll offset indicator.
|
||||
# So we remove the last line and append the reset code to its previous line.
|
||||
kitty icat --clear --transfer-mode=memory --unicode-placeholder --stdin=no --place="$dim@0x0" "$file" | sed '$d' | sed $'$s/$/\e[m/'
|
||||
|
||||
# 2. Use chafa with Sixel output
|
||||
elif command -v chafa > /dev/null; then
|
||||
chafa -f sixel -s "$dim" "$file"
|
||||
# Add a new line character so that fzf can display multiple images in the preview window
|
||||
elif [ -n "$GHOSTTY_BIN_DIR" ]; then
|
||||
if command -v kitten >/dev/null 2>&1; then
|
||||
kitten icat --clear --transfer-mode=memory --unicode-placeholder --stdin=no --place="$dim@0x0" "$file" | sed "\$d" | sed "$(printf "\$s/\$/\033[m/")"
|
||||
elif command -v icat >/dev/null 2>&1; then
|
||||
icat --clear --transfer-mode=memory --unicode-placeholder --stdin=no --place="$dim@0x0" "$file" | sed "\$d" | sed "$(printf "\$s/\$/\033[m/")"
|
||||
else
|
||||
chafa -s "$dim" "$file"
|
||||
fi
|
||||
elif command -v chafa >/dev/null 2>&1; then
|
||||
case "$PLATFORM" in
|
||||
android) chafa -s "$dim" "$file" ;;
|
||||
windows) chafa -f sixel -s "$dim" "$file" ;;
|
||||
*) chafa -s "$dim" "$file" ;;
|
||||
esac
|
||||
echo
|
||||
|
||||
# 3. If chafa is not found but imgcat is available, use it on iTerm2
|
||||
elif command -v imgcat > /dev/null; then
|
||||
# NOTE: We should use https://iterm2.com/utilities/it2check to check if the
|
||||
# user is running iTerm2. But for the sake of simplicity, we just assume
|
||||
# that's the case here.
|
||||
elif command -v imgcat >/dev/null; then
|
||||
imgcat -W "${dim%%x*}" -H "${dim##*x}" "$file"
|
||||
|
||||
# 4. Cannot find any suitable method to preview the image
|
||||
else
|
||||
file "$file"
|
||||
fi
|
||||
else
|
||||
echo please install a terminal image viewer
|
||||
echo either icat for kitty terminal and wezterm or imgcat or chafa
|
||||
fi
|
||||
}
|
||||
"""
|
||||
|
||||
@@ -1,20 +1,36 @@
|
||||
# TODO: add typing
|
||||
class FastAnimeRuntimeState(dict):
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from typing import Any, Callable
|
||||
|
||||
from ...libs.anilist.types import AnilistBaseMediaDataSchema
|
||||
from ...libs.anime_provider.types import Anime, EpisodeStream, SearchResult, Server
|
||||
|
||||
|
||||
class FastAnimeRuntimeState(object):
|
||||
"""A class that manages fastanime runtime during anilist command runtime"""
|
||||
|
||||
def __getattr__(self, attr):
|
||||
try:
|
||||
return self.__getitem__(attr)
|
||||
except KeyError:
|
||||
raise AttributeError(
|
||||
"%r object has no attribute %r" % (self.__class__.__name__, attr)
|
||||
)
|
||||
provider_current_episode_stream_link: str
|
||||
provider_current_server: "Server"
|
||||
provider_current_server_name: str
|
||||
provider_available_episodes: list[str]
|
||||
provider_current_episode_number: str
|
||||
provider_server_episode_streams: list["EpisodeStream"]
|
||||
provider_anime_title: str
|
||||
provider_anime: "Anime"
|
||||
provider_anime_search_result: "SearchResult"
|
||||
progress_tracking: str = ""
|
||||
|
||||
def __setattr__(self, attr, value):
|
||||
self.__setitem__(attr, value)
|
||||
selected_anime_anilist: "AnilistBaseMediaDataSchema"
|
||||
selected_anime_id_anilist: int
|
||||
selected_anime_title_anilist: str
|
||||
# current_anilist_data: "AnilistDataSchema | AnilistMediaList"
|
||||
anilist_results_data: "Any"
|
||||
current_page: int
|
||||
current_data_loader: "Callable"
|
||||
|
||||
|
||||
def exit_app(exit_code=0, *args):
|
||||
def exit_app(exit_code=0, *args, **kwargs):
|
||||
import sys
|
||||
|
||||
from rich.console import Console
|
||||
@@ -23,8 +39,13 @@ def exit_app(exit_code=0, *args):
|
||||
|
||||
console = Console()
|
||||
if not console.is_terminal:
|
||||
from plyer import notification
|
||||
|
||||
try:
|
||||
from plyer import notification
|
||||
except ImportError:
|
||||
print(
|
||||
"Plyer is not installed; install it for desktop notifications to be enabled"
|
||||
)
|
||||
exit(1)
|
||||
notification.notify(
|
||||
app_name=APP_NAME,
|
||||
app_icon=ICON_PATH,
|
||||
|
||||
@@ -1,8 +1,11 @@
|
||||
import logging
|
||||
import shutil
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from InquirerPy import inquirer
|
||||
|
||||
from fastanime.constants import S_PLATFORM
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
if TYPE_CHECKING:
|
||||
from ...libs.anime_provider.types import EpisodeStream
|
||||
@@ -92,7 +95,7 @@ def filter_by_quality(quality: str, stream_links: "list[EpisodeStream]", default
|
||||
|
||||
|
||||
def format_bytes_to_human(num_of_bytes: float, suffix: str = "B"):
|
||||
"""Helper function usedd to format bytes to human
|
||||
"""Helper function used to format bytes to human
|
||||
|
||||
Args:
|
||||
num_of_bytes: the number of bytes to format
|
||||
@@ -108,7 +111,7 @@ def format_bytes_to_human(num_of_bytes: float, suffix: str = "B"):
|
||||
return f"{num_of_bytes:.1f}Yi{suffix}"
|
||||
|
||||
|
||||
def get_true_fg(string: str, r: int, g: int, b: int, bold: bool = True) -> str:
|
||||
def get_true_fg(string: str, r, g, b, bold: bool = True) -> str:
|
||||
"""Custom helper function that enables colored text in the terminal
|
||||
|
||||
Args:
|
||||
@@ -155,3 +158,41 @@ def fuzzy_inquirer(choices: list, prompt: str, **kwargs):
|
||||
**kwargs,
|
||||
).execute()
|
||||
return action
|
||||
|
||||
|
||||
def which_win32_gitbash():
|
||||
"""Helper function that returns absolute path to the git bash executable
|
||||
(came with Git for Windows) on Windows
|
||||
|
||||
Returns:
|
||||
the path to the git bash executable or None if not found
|
||||
"""
|
||||
from os import path
|
||||
|
||||
gb_path = shutil.which("bash")
|
||||
|
||||
# Windows came with its own bash.exe but it's just an entry point for WSL not Git Bash
|
||||
if gb_path and not path.dirname(gb_path).lower().endswith("windows\\system32"):
|
||||
return gb_path
|
||||
|
||||
git_path = shutil.which("git")
|
||||
|
||||
if git_path:
|
||||
if path.dirname(git_path).endswith("cmd"):
|
||||
gb_path = path.abspath(
|
||||
path.join(path.dirname(git_path), "..", "bin", "bash.exe")
|
||||
)
|
||||
else:
|
||||
gb_path = path.join(path.dirname(git_path), "bash.exe")
|
||||
|
||||
if path.exists(gb_path):
|
||||
return gb_path
|
||||
|
||||
|
||||
def which_bashlike():
|
||||
"""Helper function that returns absolute path to the bash executable for the current platform
|
||||
|
||||
Returns:
|
||||
the path to the bash executable or None if not found
|
||||
"""
|
||||
return (shutil.which("bash") or "bash") if S_PLATFORM != "win32" else which_win32_gitbash()
|
||||
@@ -25,7 +25,7 @@ else:
|
||||
# ----- user configs and data -----
|
||||
|
||||
S_PLATFORM = sys.platform
|
||||
APP_DATA_DIR = click.get_app_dir(APP_NAME)
|
||||
APP_DATA_DIR = click.get_app_dir(APP_NAME, roaming=False)
|
||||
if S_PLATFORM == "win32":
|
||||
# app data
|
||||
# app_data_dir_base = os.getenv("LOCALAPPDATA")
|
||||
@@ -78,6 +78,7 @@ Path(USER_VIDEOS_DIR).mkdir(parents=True, exist_ok=True)
|
||||
|
||||
# useful paths
|
||||
USER_DATA_PATH = os.path.join(APP_DATA_DIR, "user_data.json")
|
||||
USER_WATCH_HISTORY_PATH = os.path.join(APP_DATA_DIR, "watch_history.json")
|
||||
USER_CONFIG_PATH = os.path.join(APP_DATA_DIR, "config.ini")
|
||||
LOG_FILE_PATH = os.path.join(APP_DATA_DIR, "fastanime.log")
|
||||
|
||||
|
||||
14
fastanime/fastanime.py
Executable file
14
fastanime/fastanime.py
Executable file
@@ -0,0 +1,14 @@
|
||||
#!/usr/bin/env python3
|
||||
import os
|
||||
import sys
|
||||
|
||||
# Add the application root directory to Python path
|
||||
if getattr(sys, "frozen", False):
|
||||
application_path = os.path.dirname(sys.executable)
|
||||
sys.path.insert(0, application_path)
|
||||
|
||||
# Import and run the main application
|
||||
from fastanime import FastAnime
|
||||
|
||||
if __name__ == "__main__":
|
||||
FastAnime()
|
||||
@@ -3,6 +3,7 @@ This is the core module availing all the abstractions of the anilist api
|
||||
"""
|
||||
|
||||
import logging
|
||||
import os
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
import requests
|
||||
@@ -15,6 +16,7 @@ from .queries_graphql import (
|
||||
delete_list_entry_query,
|
||||
get_logged_in_user_query,
|
||||
get_medialist_item_query,
|
||||
get_user_info,
|
||||
media_list_mutation,
|
||||
media_list_query,
|
||||
most_favourite_query,
|
||||
@@ -34,8 +36,9 @@ if TYPE_CHECKING:
|
||||
AnilistMediaLists,
|
||||
AnilistMediaListStatus,
|
||||
AnilistNotifications,
|
||||
AnilistUser,
|
||||
AnilistUser_,
|
||||
AnilistUserData,
|
||||
AnilistViewerData,
|
||||
)
|
||||
logger = logging.getLogger(__name__)
|
||||
ANILIST_ENDPOINT = "https://graphql.anilist.co"
|
||||
@@ -61,7 +64,7 @@ class AniListApi:
|
||||
self.session = requests.session()
|
||||
|
||||
def login_user(self, token: str):
|
||||
"""methosd used to login a new user enabling authenticated requests
|
||||
"""method used to login a new user enabling authenticated requests
|
||||
|
||||
Args:
|
||||
token: anilist app token
|
||||
@@ -77,7 +80,7 @@ class AniListApi:
|
||||
return
|
||||
if not success or not user:
|
||||
return
|
||||
user_info: AnilistUser = user["data"]["Viewer"]
|
||||
user_info: "AnilistUser_" = user["data"]["Viewer"]
|
||||
self.user_id = user_info["id"]
|
||||
return user_info
|
||||
|
||||
@@ -91,7 +94,7 @@ class AniListApi:
|
||||
"""
|
||||
return self._make_authenticated_request(notification_query)
|
||||
|
||||
def update_login_info(self, user: "AnilistUser", token: str):
|
||||
def update_login_info(self, user: "AnilistUser_", token: str):
|
||||
"""method used to login a user enabling authenticated requests
|
||||
|
||||
Args:
|
||||
@@ -103,7 +106,18 @@ class AniListApi:
|
||||
self.session.headers.update(self.headers)
|
||||
self.user_id = user["id"]
|
||||
|
||||
def get_logged_in_user(self) -> tuple[bool, "AnilistUserData"] | tuple[bool, None]:
|
||||
def get_user_info(self) -> tuple[bool, "AnilistUserData"] | tuple[bool, None]:
|
||||
"""get the details of the user who is currently logged in
|
||||
|
||||
Returns:
|
||||
an anilist user
|
||||
"""
|
||||
|
||||
return self._make_authenticated_request(get_user_info, {"userId": self.user_id})
|
||||
|
||||
def get_logged_in_user(
|
||||
self,
|
||||
) -> tuple[bool, "AnilistViewerData"] | tuple[bool, None]:
|
||||
"""get the details of the user who is currently logged in
|
||||
|
||||
Returns:
|
||||
@@ -129,6 +143,9 @@ class AniListApi:
|
||||
self,
|
||||
status: "AnilistMediaListStatus",
|
||||
type="ANIME",
|
||||
page=1,
|
||||
perPage=os.environ.get("FASTANIME_PER_PAGE", 15),
|
||||
**kwargs,
|
||||
) -> tuple[bool, "AnilistMediaLists"] | tuple[bool, None]:
|
||||
"""gets an anime list from your media list given the list status
|
||||
|
||||
@@ -138,7 +155,13 @@ class AniListApi:
|
||||
Returns:
|
||||
a media list
|
||||
"""
|
||||
variables = {"status": status, "userId": self.user_id, "type": type}
|
||||
variables = {
|
||||
"status": status,
|
||||
"userId": self.user_id,
|
||||
"type": type,
|
||||
"page": page,
|
||||
"perPage": int(perPage),
|
||||
}
|
||||
return self._make_authenticated_request(media_list_query, variables)
|
||||
|
||||
def get_medialist_entry(
|
||||
@@ -224,7 +247,7 @@ class AniListApi:
|
||||
return (False, None)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Something unexpected occured {e}")
|
||||
logger.error(f"Something unexpected occurred {e}")
|
||||
return (False, None) # type: ignore
|
||||
|
||||
def get_data(
|
||||
@@ -288,11 +311,12 @@ class AniListApi:
|
||||
},
|
||||
) # type: ignore
|
||||
except Exception as e:
|
||||
logger.error(f"Something unexpected occured {e}")
|
||||
logger.error(f"Something unexpected occurred {e}")
|
||||
return (False, {"Error": f"{e}"}) # type: ignore
|
||||
|
||||
def search(
|
||||
self,
|
||||
max_results=50,
|
||||
query: str | None = None,
|
||||
sort: str | None = None,
|
||||
genre_in: list[str] | None = None,
|
||||
@@ -316,6 +340,7 @@ class AniListApi:
|
||||
page: int | None = None,
|
||||
season: str | None = None,
|
||||
format_in: list[str] | None = None,
|
||||
on_list: bool | None = None,
|
||||
type="ANIME",
|
||||
**kwargs,
|
||||
):
|
||||
@@ -324,7 +349,7 @@ class AniListApi:
|
||||
"""
|
||||
variables = {}
|
||||
for key, val in list(locals().items())[1:]:
|
||||
if val and key not in ["variables"]:
|
||||
if (val or val is False) and key not in ["variables"]:
|
||||
variables[key] = val
|
||||
search_results = self.get_data(search_query, variables=variables)
|
||||
return search_results
|
||||
@@ -336,69 +361,107 @@ class AniListApi:
|
||||
variables = {"id": id}
|
||||
return self.get_data(anime_query, variables)
|
||||
|
||||
def get_trending(self, type="ANIME", *_, **kwargs):
|
||||
def get_trending(
|
||||
self,
|
||||
type="ANIME",
|
||||
page=1,
|
||||
perPage=os.environ.get("FASTANIME_PER_PAGE", 15),
|
||||
*_,
|
||||
**kwargs,
|
||||
):
|
||||
"""
|
||||
Gets the currently trending anime
|
||||
"""
|
||||
variables = {"type": type}
|
||||
variables = {"type": type, "page": page, "perPage": int(perPage)}
|
||||
trending = self.get_data(trending_query, variables)
|
||||
return trending
|
||||
|
||||
def get_most_favourite(self, type="ANIME", *_, **kwargs):
|
||||
def get_most_favourite(
|
||||
self,
|
||||
type="ANIME",
|
||||
page=1,
|
||||
perPage=os.environ.get("FASTANIME_PER_PAGE", 15),
|
||||
*_,
|
||||
**kwargs,
|
||||
):
|
||||
"""
|
||||
Gets the most favoured anime on anilist
|
||||
"""
|
||||
variables = {"type": type}
|
||||
variables = {"type": type, "page": page, "perPage": int(perPage)}
|
||||
most_favourite = self.get_data(most_favourite_query, variables)
|
||||
return most_favourite
|
||||
|
||||
def get_most_scored(self, type="ANIME", *_, **kwargs):
|
||||
def get_most_scored(
|
||||
self,
|
||||
type="ANIME",
|
||||
page=1,
|
||||
perPage=os.environ.get("FASTANIME_PER_PAGE", 15),
|
||||
*_,
|
||||
**kwargs,
|
||||
):
|
||||
"""
|
||||
Gets most scored anime on anilist
|
||||
"""
|
||||
variables = {"type": type}
|
||||
variables = {"type": type, "page": page, "perPage": int(perPage)}
|
||||
most_scored = self.get_data(most_scored_query, variables)
|
||||
return most_scored
|
||||
|
||||
def get_most_recently_updated(self, type="ANIME", *_, **kwargs):
|
||||
def get_most_recently_updated(
|
||||
self,
|
||||
type="ANIME",
|
||||
page=1,
|
||||
perPage=os.environ.get("FASTANIME_PER_PAGE", 15),
|
||||
*_,
|
||||
**kwargs,
|
||||
):
|
||||
"""
|
||||
Gets most recently updated anime from anilist
|
||||
"""
|
||||
variables = {"type": type}
|
||||
variables = {"type": type, "page": page, "perPage": int(perPage)}
|
||||
most_recently_updated = self.get_data(most_recently_updated_query, variables)
|
||||
return most_recently_updated
|
||||
|
||||
def get_most_popular(
|
||||
self,
|
||||
type="ANIME",
|
||||
page=1,
|
||||
perPage=os.environ.get("FASTANIME_PER_PAGE", 15),
|
||||
**kwargs,
|
||||
):
|
||||
"""
|
||||
Gets most popular anime on anilist
|
||||
"""
|
||||
variables = {"type": type}
|
||||
variables = {"type": type, "page": page, "perPage": int(perPage)}
|
||||
most_popular = self.get_data(most_popular_query, variables)
|
||||
return most_popular
|
||||
|
||||
def get_upcoming_anime(self, type="ANIME", page: int = 1, *_, **kwargs):
|
||||
def get_upcoming_anime(
|
||||
self,
|
||||
type="ANIME",
|
||||
page: int = 1,
|
||||
perPage=os.environ.get("FASTANIME_PER_PAGE", 15),
|
||||
*_,
|
||||
**kwargs,
|
||||
):
|
||||
"""
|
||||
Gets upcoming anime from anilist
|
||||
"""
|
||||
variables = {"page": page, "type": type}
|
||||
variables = {"page": page, "type": type, "perPage": int(perPage)}
|
||||
upcoming_anime = self.get_data(upcoming_anime_query, variables)
|
||||
return upcoming_anime
|
||||
|
||||
# NOTE: THe following methods will probably be scraped soon
|
||||
def get_recommended_anime_for(self, id: int, type="ANIME", *_, **kwargs):
|
||||
variables = {"type": type}
|
||||
def get_recommended_anime_for(self, mediaRecommendationId, page=1, *_, **kwargs):
|
||||
variables = {"mediaRecommendationId": mediaRecommendationId, "page": page}
|
||||
recommended_anime = self.get_data(recommended_query, variables)
|
||||
return recommended_anime
|
||||
|
||||
def get_charcters_of(self, id: int, type="ANIME", *_, **kwargs):
|
||||
def get_characters_of(self, id: int, type="ANIME", *_, **kwargs):
|
||||
variables = {"id": id}
|
||||
characters = self.get_data(anime_characters_query, variables)
|
||||
return characters
|
||||
|
||||
def get_related_anime_for(self, id: int, type="ANIME", *_, **kwargs):
|
||||
def get_related_anime_for(self, id: int, *_, **kwargs):
|
||||
variables = {"id": id}
|
||||
related_anime = self.get_data(anime_relations_query, variables)
|
||||
return related_anime
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -19,7 +19,7 @@ class AnilistImage(TypedDict):
|
||||
large: str
|
||||
|
||||
|
||||
class AnilistUser(TypedDict):
|
||||
class AnilistUser_(TypedDict):
|
||||
id: int
|
||||
name: str
|
||||
bannerImage: str | None
|
||||
@@ -28,11 +28,26 @@ class AnilistUser(TypedDict):
|
||||
|
||||
|
||||
class AnilistViewer(TypedDict):
|
||||
Viewer: AnilistUser
|
||||
Viewer: AnilistUser_
|
||||
|
||||
|
||||
class AnilistViewerData(TypedDict):
|
||||
data: AnilistViewer
|
||||
|
||||
|
||||
class AnilistUser(TypedDict):
|
||||
name: str
|
||||
about: str | None
|
||||
avatar: AnilistImage
|
||||
bannerImage: str | None
|
||||
|
||||
|
||||
class AnilistUserInfo(TypedDict):
|
||||
User: AnilistUser
|
||||
|
||||
|
||||
class AnilistUserData(TypedDict):
|
||||
data: AnilistViewer
|
||||
data: AnilistUserInfo
|
||||
|
||||
|
||||
class AnilistMediaTrailer(TypedDict):
|
||||
@@ -69,7 +84,7 @@ class AnilistMediaNextAiringEpisode(TypedDict):
|
||||
|
||||
class AnilistReview(TypedDict):
|
||||
summary: str
|
||||
user: AnilistUser
|
||||
user: AnilistUser_
|
||||
|
||||
|
||||
class AnilistReviewNodes(TypedDict):
|
||||
@@ -114,16 +129,17 @@ class AnilistCharactersEdges(TypedDict):
|
||||
edges: list[AnilistCharactersEdge]
|
||||
|
||||
|
||||
class AnilistMediaList_(TypedDict):
|
||||
id: int
|
||||
progress: int
|
||||
|
||||
|
||||
AnilistMediaListStatus = Literal[
|
||||
"CURRENT", "PLANNING", "COMPLETED", "DROPPED", "PAUSED", "REPEATING"
|
||||
]
|
||||
|
||||
|
||||
class AnilistMediaList_(TypedDict):
|
||||
id: int
|
||||
progress: int
|
||||
status: AnilistMediaListStatus
|
||||
|
||||
|
||||
class AnilistMediaListProperties(TypedDict):
|
||||
status: AnilistMediaListStatus
|
||||
score: float
|
||||
@@ -136,6 +152,11 @@ class AnilistMediaListProperties(TypedDict):
|
||||
hiddenFromStatusLists: bool
|
||||
|
||||
|
||||
class StreamingEpisode(TypedDict):
|
||||
title: str
|
||||
thumbnail: str
|
||||
|
||||
|
||||
class AnilistBaseMediaDataSchema(TypedDict):
|
||||
"""
|
||||
This a convenience class is used to type the received Anilist data to enhance dev experience
|
||||
@@ -159,6 +180,8 @@ class AnilistBaseMediaDataSchema(TypedDict):
|
||||
status: str
|
||||
nextAiringEpisode: AnilistMediaNextAiringEpisode
|
||||
season: str
|
||||
streamingEpisodes: list[StreamingEpisode]
|
||||
chapters: int
|
||||
seasonYear: int
|
||||
duration: int
|
||||
synonyms: list[str]
|
||||
|
||||
@@ -1,11 +1,12 @@
|
||||
from .allanime.constants import SERVERS_AVAILABLE as ALLANIME_SERVERS
|
||||
from .animepahe.constants import SERVERS_AVAILABLE as ANIMEPAHESERVERS
|
||||
from .aniwatch.constants import SERVERS_AVAILABLE as ANIWATCHSERVERS
|
||||
from .animepahe.constants import SERVERS_AVAILABLE as ANIMEPAHE_SERVERS
|
||||
from .hianime.constants import SERVERS_AVAILABLE as HIANIME_SERVERS
|
||||
|
||||
anime_sources = {
|
||||
"allanime": "api.AllAnimeAPI",
|
||||
"animepahe": "api.AnimePaheApi",
|
||||
"aniwatch": "api.AniWatchApi",
|
||||
"aniwave": "api.AniWaveApi",
|
||||
"allanime": "api.AllAnime",
|
||||
"animepahe": "api.AnimePahe",
|
||||
"hianime": "api.HiAnime",
|
||||
"nyaa": "api.Nyaa",
|
||||
"yugen": "api.Yugen",
|
||||
}
|
||||
SERVERS_AVAILABLE = [*ALLANIME_SERVERS, *ANIMEPAHESERVERS, *ANIWATCHSERVERS]
|
||||
SERVERS_AVAILABLE = [*ALLANIME_SERVERS, *ANIMEPAHE_SERVERS, *HIANIME_SERVERS]
|
||||
|
||||
@@ -1,323 +1,501 @@
|
||||
"""a module that handles the scraping of allanime
|
||||
|
||||
abstraction over allanime api
|
||||
"""
|
||||
|
||||
import json
|
||||
import logging
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from requests.exceptions import Timeout
|
||||
|
||||
from ...anime_provider.base_provider import AnimeProvider
|
||||
from ..decorators import debug_provider
|
||||
from ..utils import give_random_quality, one_digit_symmetric_xor
|
||||
from .constants import ALLANIME_API_ENDPOINT, ALLANIME_BASE, ALLANIME_REFERER
|
||||
from .gql_queries import ALLANIME_EPISODES_GQL, ALLANIME_SEARCH_GQL, ALLANIME_SHOW_GQL
|
||||
from .constants import (
|
||||
API_BASE_URL,
|
||||
API_ENDPOINT,
|
||||
API_REFERER,
|
||||
DEFAULT_COUNTRY_OF_ORIGIN,
|
||||
DEFAULT_NSFW,
|
||||
DEFAULT_PAGE,
|
||||
DEFAULT_PER_PAGE,
|
||||
DEFAULT_UNKNOWN,
|
||||
MP4_SERVER_JUICY_STREAM_REGEX,
|
||||
)
|
||||
from .gql_queries import EPISODES_GQL, SEARCH_GQL, SHOW_GQL
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from typing import Iterator
|
||||
|
||||
from ....libs.anime_provider.allanime.types import AllAnimeEpisode
|
||||
from ....libs.anime_provider.types import Anime, Server
|
||||
from .types import AllAnimeEpisode
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
# TODO: create tests for the api
|
||||
#
|
||||
# ** Based on ani-cli **
|
||||
class AllAnimeAPI(AnimeProvider):
|
||||
class AllAnime(AnimeProvider):
|
||||
"""
|
||||
Provides a fast and effective interface to AllAnime site.
|
||||
AllAnime is a provider class for fetching anime data from the AllAnime API.
|
||||
Attributes:
|
||||
HEADERS (dict): Default headers for API requests.
|
||||
Methods:
|
||||
_execute_graphql_query(query: str, variables: dict) -> dict:
|
||||
Executes a GraphQL query and returns the response data.
|
||||
search_for_anime(
|
||||
**kwargs
|
||||
) -> dict:
|
||||
Searches for anime based on the provided keywords and other parameters.
|
||||
get_anime(show_id: str) -> dict:
|
||||
Retrieves detailed information about a specific anime by its ID.
|
||||
_get_anime_episode(
|
||||
show_id: str, episode, translation_type: str = "sub"
|
||||
Retrieves information about a specific episode of an anime.
|
||||
get_episode_streams(
|
||||
) -> generator:
|
||||
Retrieves streaming links for a specific episode of an anime.
|
||||
"""
|
||||
|
||||
api_endpoint = ALLANIME_API_ENDPOINT
|
||||
HEADERS = {
|
||||
"Referer": ALLANIME_REFERER,
|
||||
"Referer": API_REFERER,
|
||||
}
|
||||
|
||||
def _fetch_gql(self, query: str, variables: dict):
|
||||
"""main abstraction over all requests to the allanime api
|
||||
def _execute_graphql_query(self, query: str, variables: dict):
|
||||
"""
|
||||
Executes a GraphQL query using the provided query string and variables.
|
||||
|
||||
Args:
|
||||
query: [TODO:description]
|
||||
variables: [TODO:description]
|
||||
query (str): The GraphQL query string to be executed.
|
||||
variables (dict): A dictionary of variables to be used in the query.
|
||||
|
||||
Returns:
|
||||
[TODO:return]
|
||||
"""
|
||||
try:
|
||||
response = self.session.get(
|
||||
self.api_endpoint,
|
||||
params={
|
||||
"variables": json.dumps(variables),
|
||||
"query": query,
|
||||
},
|
||||
timeout=10,
|
||||
)
|
||||
if response.status_code == 200:
|
||||
return response.json()["data"]
|
||||
else:
|
||||
logger.error("allanime(ERROR): ", response.text)
|
||||
return {}
|
||||
except Timeout:
|
||||
logger.error(
|
||||
"allanime(Error):Timeout exceeded this could mean allanime is down or you have lost internet connection"
|
||||
)
|
||||
return {}
|
||||
except Exception as e:
|
||||
logger.error(f"allanime:Error: {e}")
|
||||
return {}
|
||||
dict: The JSON response data from the GraphQL API.
|
||||
|
||||
Raises:
|
||||
requests.exceptions.HTTPError: If the HTTP request returned an unsuccessful status code.
|
||||
"""
|
||||
|
||||
response = self.session.get(
|
||||
API_ENDPOINT,
|
||||
params={
|
||||
"variables": json.dumps(variables),
|
||||
"query": query,
|
||||
},
|
||||
timeout=10,
|
||||
)
|
||||
response.raise_for_status()
|
||||
return response.json()["data"]
|
||||
|
||||
@debug_provider
|
||||
def search_for_anime(
|
||||
self,
|
||||
user_query: str,
|
||||
translation_type: str = "sub",
|
||||
nsfw=True,
|
||||
unknown=True,
|
||||
search_keywords: str,
|
||||
translation_type: str,
|
||||
*,
|
||||
nsfw=DEFAULT_NSFW,
|
||||
unknown=DEFAULT_UNKNOWN,
|
||||
limit=DEFAULT_PER_PAGE,
|
||||
page=DEFAULT_PAGE,
|
||||
country_of_origin=DEFAULT_COUNTRY_OF_ORIGIN,
|
||||
**kwargs,
|
||||
):
|
||||
"""search for an anime title using allanime provider
|
||||
|
||||
Args:
|
||||
nsfw ([TODO:parameter]): [TODO:description]
|
||||
unknown ([TODO:parameter]): [TODO:description]
|
||||
user_query: [TODO:description]
|
||||
translation_type: [TODO:description]
|
||||
**kwargs: [TODO:args]
|
||||
|
||||
Returns:
|
||||
[TODO:return]
|
||||
"""
|
||||
search = {"allowAdult": nsfw, "allowUnknown": unknown, "query": user_query}
|
||||
limit = 40
|
||||
translationtype = translation_type
|
||||
countryorigin = "all"
|
||||
page = 1
|
||||
variables = {
|
||||
"search": search,
|
||||
"limit": limit,
|
||||
"page": page,
|
||||
"translationtype": translationtype,
|
||||
"countryorigin": countryorigin,
|
||||
}
|
||||
try:
|
||||
search_results = self._fetch_gql(ALLANIME_SEARCH_GQL, variables)
|
||||
page_info = search_results["shows"]["pageInfo"]
|
||||
results = []
|
||||
for result in search_results["shows"]["edges"]:
|
||||
normalized_result = {
|
||||
Search for anime based on given keywords and filters.
|
||||
Args:
|
||||
search_keywords (str): The keywords to search for.
|
||||
translation_type (str, optional): The type of translation to search for (e.g., "sub" or "dub"). Defaults to "sub".
|
||||
limit (int, optional): The maximum number of results to return. Defaults to 40.
|
||||
page (int, optional): The page number to return. Defaults to 1.
|
||||
country_of_origin (str, optional): The country of origin filter. Defaults to "all".
|
||||
nsfw (bool, optional): Whether to include adult content in the search results. Defaults to True.
|
||||
unknown (bool, optional): Whether to include unknown content in the search results. Defaults to True.
|
||||
**kwargs: Additional keyword arguments.
|
||||
Returns:
|
||||
dict: A dictionary containing the page information and a list of search results. Each result includes:
|
||||
- id (str): The ID of the anime.
|
||||
- title (str): The title of the anime.
|
||||
- type (str): The type of the anime.
|
||||
- availableEpisodes (int): The number of available episodes.
|
||||
"""
|
||||
search_results = self._execute_graphql_query(
|
||||
SEARCH_GQL,
|
||||
variables={
|
||||
"search": {
|
||||
"allowAdult": nsfw,
|
||||
"allowUnknown": unknown,
|
||||
"query": search_keywords,
|
||||
},
|
||||
"limit": limit,
|
||||
"page": page,
|
||||
"translationtype": translation_type,
|
||||
"countryorigin": country_of_origin,
|
||||
},
|
||||
)
|
||||
return {
|
||||
"pageInfo": search_results["shows"]["pageInfo"],
|
||||
"results": [
|
||||
{
|
||||
"id": result["_id"],
|
||||
"title": result["name"],
|
||||
"type": result["__typename"],
|
||||
"availableEpisodes": result["availableEpisodes"],
|
||||
}
|
||||
results.append(normalized_result)
|
||||
|
||||
normalized_search_results = {
|
||||
"pageInfo": page_info,
|
||||
"results": results,
|
||||
}
|
||||
return normalized_search_results
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"FA(AllAnime): {e}")
|
||||
return {}
|
||||
|
||||
def get_anime(self, allanime_show_id: str):
|
||||
"""get an anime details given its id
|
||||
|
||||
Args:
|
||||
allanime_show_id: [TODO:description]
|
||||
|
||||
Returns:
|
||||
[TODO:return]
|
||||
"""
|
||||
variables = {"showId": allanime_show_id}
|
||||
try:
|
||||
anime = self._fetch_gql(ALLANIME_SHOW_GQL, variables)
|
||||
id: str = anime["show"]["_id"]
|
||||
title: str = anime["show"]["name"]
|
||||
availableEpisodesDetail = anime["show"]["availableEpisodesDetail"]
|
||||
type = anime.get("__typename")
|
||||
normalized_anime = {
|
||||
"id": id,
|
||||
"title": title,
|
||||
"availableEpisodesDetail": availableEpisodesDetail,
|
||||
"type": type,
|
||||
}
|
||||
return normalized_anime
|
||||
except Exception as e:
|
||||
logger.error(f"AllAnime(get_anime): {e}")
|
||||
return None
|
||||
|
||||
def _get_anime_episode(
|
||||
self, allanime_show_id: str, episode_string: str, translation_type: str = "sub"
|
||||
) -> "AllAnimeEpisode | dict":
|
||||
"""get the episode details and sources info
|
||||
|
||||
Args:
|
||||
allanime_show_id: [TODO:description]
|
||||
episode_string: [TODO:description]
|
||||
translation_type: [TODO:description]
|
||||
|
||||
Returns:
|
||||
[TODO:return]
|
||||
"""
|
||||
variables = {
|
||||
"showId": allanime_show_id,
|
||||
"translationType": translation_type,
|
||||
"episodeString": episode_string,
|
||||
for result in search_results["shows"]["edges"]
|
||||
],
|
||||
}
|
||||
try:
|
||||
episode = self._fetch_gql(ALLANIME_EPISODES_GQL, variables)
|
||||
return episode["episode"]
|
||||
except Exception as e:
|
||||
logger.error(f"FA(AllAnime): {e}")
|
||||
return {}
|
||||
|
||||
def get_episode_streams(
|
||||
self, anime: "Anime", episode_number: str, translation_type="sub"
|
||||
) -> "Iterator[Server] | None":
|
||||
"""get the streams of an episode
|
||||
|
||||
Args:
|
||||
translation_type ([TODO:parameter]): [TODO:description]
|
||||
anime: [TODO:description]
|
||||
episode_number: [TODO:description]
|
||||
|
||||
Yields:
|
||||
[TODO:description]
|
||||
@debug_provider
|
||||
def get_anime(self, id: str, **kwargs):
|
||||
"""
|
||||
anime_id = anime["id"]
|
||||
Fetches anime details using the provided show ID.
|
||||
Args:
|
||||
id (str): The ID of the anime show to fetch details for.
|
||||
Returns:
|
||||
dict: A dictionary containing the anime details, including:
|
||||
- id (str): The unique identifier of the anime show.
|
||||
- title (str): The title of the anime show.
|
||||
- availableEpisodesDetail (list): A list of available episodes details.
|
||||
- type (str, optional): The type of the anime show.
|
||||
"""
|
||||
|
||||
anime = self._execute_graphql_query(SHOW_GQL, variables={"showId": id})
|
||||
self.store.set(id, "anime_info", {"title": anime["show"]["name"]})
|
||||
return {
|
||||
"id": anime["show"]["_id"],
|
||||
"title": anime["show"]["name"],
|
||||
"availableEpisodesDetail": anime["show"]["availableEpisodesDetail"],
|
||||
"type": anime.get("__typename"),
|
||||
}
|
||||
|
||||
@debug_provider
|
||||
def _get_anime_episode(
|
||||
self, anime_id: str, episode, translation_type: str = "sub"
|
||||
) -> "AllAnimeEpisode":
|
||||
"""
|
||||
Fetches a specific episode of an anime by its ID and episode number.
|
||||
Args:
|
||||
anime_id (str): The unique identifier of the anime.
|
||||
episode (str): The episode number or string identifier.
|
||||
translation_type (str, optional): The type of translation for the episode. Defaults to "sub".
|
||||
Returns:
|
||||
AllAnimeEpisode: The episode details retrieved from the GraphQL query.
|
||||
"""
|
||||
return self._execute_graphql_query(
|
||||
EPISODES_GQL,
|
||||
variables={
|
||||
"showId": anime_id,
|
||||
"translationType": translation_type,
|
||||
"episodeString": episode,
|
||||
},
|
||||
)["episode"]
|
||||
|
||||
@debug_provider
|
||||
def _get_server(
|
||||
self,
|
||||
embed,
|
||||
anime_title: str,
|
||||
allanime_episode: "AllAnimeEpisode",
|
||||
episode_number,
|
||||
):
|
||||
"""
|
||||
Retrieves the streaming server information for a given anime episode based on the provided embed data.
|
||||
Args:
|
||||
embed (dict): A dictionary containing the embed data, including the source URL and source name.
|
||||
anime_title (str): The title of the anime.
|
||||
allanime_episode (AllAnimeEpisode): An object representing the episode details.
|
||||
Returns:
|
||||
dict: A dictionary containing server information, headers, subtitles, episode title, and links to the stream.
|
||||
Returns None if no valid URL or stream is found.
|
||||
Raises:
|
||||
requests.exceptions.RequestException: If there is an issue with the HTTP request.
|
||||
"""
|
||||
|
||||
url = embed.get("sourceUrl")
|
||||
#
|
||||
if not url:
|
||||
return
|
||||
if url.startswith("--"):
|
||||
url = one_digit_symmetric_xor(56, url[2:])
|
||||
|
||||
# FIRST CASE
|
||||
match embed["sourceName"]:
|
||||
case "Yt-mp4":
|
||||
logger.debug("Found streams from Yt")
|
||||
return {
|
||||
"server": "Yt",
|
||||
"episode_title": f"{anime_title}; Episode {episode_number}",
|
||||
"headers": {"Referer": f"https://{API_BASE_URL}/"},
|
||||
"subtitles": [],
|
||||
"links": [
|
||||
{
|
||||
"link": url,
|
||||
"quality": "1080",
|
||||
}
|
||||
],
|
||||
}
|
||||
case "Mp4":
|
||||
logger.debug("Found streams from Mp4")
|
||||
response = self.session.get(
|
||||
url,
|
||||
fresh=1, # pyright: ignore
|
||||
timeout=10,
|
||||
)
|
||||
response.raise_for_status()
|
||||
embed_html = response.text.replace(" ", "").replace("\n", "")
|
||||
vid = MP4_SERVER_JUICY_STREAM_REGEX.search(embed_html)
|
||||
if not vid:
|
||||
return
|
||||
return {
|
||||
"server": "mp4-upload",
|
||||
"headers": {"Referer": "https://www.mp4upload.com/"},
|
||||
"subtitles": [],
|
||||
"episode_title": (allanime_episode["notes"] or f"{anime_title}")
|
||||
+ f"; Episode {episode_number}",
|
||||
"links": [{"link": vid.group(1), "quality": "1080"}],
|
||||
}
|
||||
case "Fm-Hls":
|
||||
# TODO: requires decoding obsfucated js (filemoon)
|
||||
logger.debug("Found streams from Fm-Hls")
|
||||
response = self.session.get(
|
||||
url,
|
||||
timeout=10,
|
||||
)
|
||||
response.raise_for_status()
|
||||
embed_html = response.text.replace(" ", "").replace("\n", "")
|
||||
vid = MP4_SERVER_JUICY_STREAM_REGEX.search(embed_html)
|
||||
if not vid:
|
||||
return
|
||||
return {
|
||||
"server": "filemoon",
|
||||
"headers": {"Referer": "https://www.mp4upload.com/"},
|
||||
"subtitles": [],
|
||||
"episode_title": (allanime_episode["notes"] or f"{anime_title}")
|
||||
+ f"; Episode {episode_number}",
|
||||
"links": [{"link": vid.group(1), "quality": "1080"}],
|
||||
}
|
||||
case "Ok":
|
||||
# TODO: requires decoding the obsfucated js (filemoon)
|
||||
response = self.session.get(
|
||||
url,
|
||||
timeout=10,
|
||||
)
|
||||
response.raise_for_status()
|
||||
embed_html = response.text.replace(" ", "").replace("\n", "")
|
||||
vid = MP4_SERVER_JUICY_STREAM_REGEX.search(embed_html)
|
||||
logger.debug("Found streams from Ok")
|
||||
return {
|
||||
"server": "filemoon",
|
||||
"headers": {"Referer": f"https://{API_BASE_URL}/"},
|
||||
"subtitles": [],
|
||||
"episode_title": (allanime_episode["notes"] or f"{anime_title}")
|
||||
+ f"; Episode {episode_number}",
|
||||
"links": give_random_quality(response.json()["links"]),
|
||||
}
|
||||
case "Vid-mp4":
|
||||
# TODO: requires some serious work i think : )
|
||||
response = self.session.get(
|
||||
url,
|
||||
timeout=10,
|
||||
)
|
||||
response.raise_for_status()
|
||||
embed_html = response.text.replace(" ", "").replace("\n", "")
|
||||
logger.debug("Found streams from vid-mp4")
|
||||
return {
|
||||
"server": "Vid-mp4",
|
||||
"headers": {"Referer": f"https://{API_BASE_URL}/"},
|
||||
"subtitles": [],
|
||||
"episode_title": (allanime_episode["notes"] or f"{anime_title}")
|
||||
+ f"; Episode {episode_number}",
|
||||
"links": give_random_quality(response.json()["links"]),
|
||||
}
|
||||
case "Ss-Hls":
|
||||
# TODO: requires some serious work i think : )
|
||||
response = self.session.get(
|
||||
url,
|
||||
timeout=10,
|
||||
)
|
||||
response.raise_for_status()
|
||||
embed_html = response.text.replace(" ", "").replace("\n", "")
|
||||
logger.debug("Found streams from Ss-Hls")
|
||||
return {
|
||||
"server": "StreamSb",
|
||||
"headers": {"Referer": f"https://{API_BASE_URL}/"},
|
||||
"subtitles": [],
|
||||
"episode_title": (allanime_episode["notes"] or f"{anime_title}")
|
||||
+ f"; Episode {episode_number}",
|
||||
"links": give_random_quality(response.json()["links"]),
|
||||
}
|
||||
|
||||
# get the stream url for an episode of the defined source names
|
||||
response = self.session.get(
|
||||
f"https://{API_BASE_URL}{url.replace('clock', 'clock.json')}",
|
||||
timeout=10,
|
||||
)
|
||||
|
||||
response.raise_for_status()
|
||||
|
||||
# SECOND CASE
|
||||
match embed["sourceName"]:
|
||||
case "Luf-mp4":
|
||||
logger.debug("Found streams from gogoanime")
|
||||
return {
|
||||
"server": "gogoanime",
|
||||
"headers": {"Referer": f"https://{API_BASE_URL}/"},
|
||||
"subtitles": [],
|
||||
"episode_title": (allanime_episode["notes"] or f"{anime_title}")
|
||||
+ f"; Episode {episode_number}",
|
||||
"links": give_random_quality(response.json()["links"]),
|
||||
}
|
||||
case "Kir":
|
||||
logger.debug("Found streams from wetransfer")
|
||||
return {
|
||||
"server": "weTransfer",
|
||||
"headers": {"Referer": f"https://{API_BASE_URL}/"},
|
||||
"subtitles": [],
|
||||
"episode_title": (allanime_episode["notes"] or f"{anime_title}")
|
||||
+ f"; Episode {episode_number}",
|
||||
"links": give_random_quality(response.json()["links"]),
|
||||
}
|
||||
case "S-mp4":
|
||||
logger.debug("Found streams from sharepoint")
|
||||
return {
|
||||
"server": "sharepoint",
|
||||
"headers": {"Referer": f"https://{API_BASE_URL}/"},
|
||||
"subtitles": [],
|
||||
"episode_title": (allanime_episode["notes"] or f"{anime_title}")
|
||||
+ f"; Episode {episode_number}",
|
||||
"links": give_random_quality(response.json()["links"]),
|
||||
}
|
||||
case "Sak":
|
||||
logger.debug("Found streams from dropbox")
|
||||
return {
|
||||
"server": "dropbox",
|
||||
"headers": {"Referer": f"https://{API_BASE_URL}/"},
|
||||
"subtitles": [],
|
||||
"episode_title": (allanime_episode["notes"] or f"{anime_title}")
|
||||
+ f"; Episode {episode_number}",
|
||||
"links": give_random_quality(response.json()["links"]),
|
||||
}
|
||||
case "Default":
|
||||
logger.debug("Found streams from wixmp")
|
||||
return {
|
||||
"server": "wixmp",
|
||||
"headers": {"Referer": f"https://{API_BASE_URL}/"},
|
||||
"subtitles": [],
|
||||
"episode_title": (allanime_episode["notes"] or f"{anime_title}")
|
||||
+ f"; Episode {episode_number}",
|
||||
"links": give_random_quality(response.json()["links"]),
|
||||
}
|
||||
|
||||
case "Ak":
|
||||
# TODO: works but needs further probing
|
||||
logger.debug("Found streams from Ak")
|
||||
return {
|
||||
"server": "Ak",
|
||||
"headers": {"Referer": f"https://{API_BASE_URL}/"},
|
||||
"subtitles": [],
|
||||
"episode_title": (allanime_episode["notes"] or f"{anime_title}")
|
||||
+ f"; Episode {episode_number}",
|
||||
"links": give_random_quality(response.json()["links"]),
|
||||
}
|
||||
|
||||
@debug_provider
|
||||
def get_episode_streams(
|
||||
self, anime_id, episode_number: str, translation_type="sub", **kwargs
|
||||
):
|
||||
"""
|
||||
Retrieve streaming information for a specific episode of an anime.
|
||||
Args:
|
||||
anime_id (str): The unique identifier for the anime.
|
||||
episode_number (str): The episode number to retrieve streams for.
|
||||
translation_type (str, optional): The type of translation for the episode (e.g., "sub" for subtitles). Defaults to "sub".
|
||||
Yields:
|
||||
dict: A dictionary containing streaming information for the episode, including:
|
||||
- server (str): The name of the streaming server.
|
||||
- episode_title (str): The title of the episode.
|
||||
- headers (dict): HTTP headers required for accessing the stream.
|
||||
- subtitles (list): A list of subtitles available for the episode.
|
||||
- links (list): A list of dictionaries containing streaming links and their quality.
|
||||
"""
|
||||
anime_title = (self.store.get(anime_id, "anime_info", "") or {"title": ""})[
|
||||
"title"
|
||||
]
|
||||
allanime_episode = self._get_anime_episode(
|
||||
anime_id, episode_number, translation_type
|
||||
)
|
||||
if not allanime_episode:
|
||||
return []
|
||||
|
||||
embeds = allanime_episode["sourceUrls"]
|
||||
try:
|
||||
for embed in embeds:
|
||||
try:
|
||||
# filter the working streams no need to get all since the others are mostly hsl
|
||||
# TODO: should i just get all the servers and handle the hsl??
|
||||
if embed.get("sourceName", "") not in (
|
||||
# priorities based on death note
|
||||
"Sak", # 7
|
||||
"S-mp4", # 7.9
|
||||
"Luf-mp4", # 7.7
|
||||
"Default", # 8.5
|
||||
"Yt-mp4", # 7.9
|
||||
"Kir", # NA
|
||||
# "Vid-mp4" # 4
|
||||
# "Ok", # 3.5
|
||||
# "Ss-Hls", # 5.5
|
||||
# "Mp4", # 4
|
||||
):
|
||||
continue
|
||||
url = embed.get("sourceUrl")
|
||||
#
|
||||
if not url:
|
||||
continue
|
||||
if url.startswith("--"):
|
||||
url = url[2:]
|
||||
url = one_digit_symmetric_xor(56, url)
|
||||
for embed in allanime_episode["sourceUrls"]:
|
||||
if embed.get("sourceName", "") not in (
|
||||
# priorities based on death note
|
||||
"Sak", # 7
|
||||
"S-mp4", # 7.9
|
||||
"Luf-mp4", # 7.7
|
||||
"Default", # 8.5
|
||||
"Yt-mp4", # 7.9
|
||||
"Kir", # NA
|
||||
"Mp4", # 4
|
||||
# "Ak",#
|
||||
# "Vid-mp4", # 4
|
||||
# "Ok", # 3.5
|
||||
# "Ss-Hls", # 5.5
|
||||
# "Fm-Hls",#
|
||||
):
|
||||
logger.debug(f"Found {embed['sourceName']} but ignoring")
|
||||
continue
|
||||
if server := self._get_server(
|
||||
embed, anime_title, allanime_episode, episode_number
|
||||
):
|
||||
yield server
|
||||
|
||||
if "tools.fast4speed.rsvp" in url:
|
||||
yield {
|
||||
"server": "Yt",
|
||||
"episode_title": f'{anime["title"]}; Episode {episode_number}',
|
||||
"headers": {"Referer": f"https://{ALLANIME_BASE}/"},
|
||||
"subtitles": [],
|
||||
"links": [
|
||||
{
|
||||
"link": url,
|
||||
"quality": "1080",
|
||||
}
|
||||
],
|
||||
} # pyright:ignore
|
||||
continue
|
||||
|
||||
# get the stream url for an episode of the defined source names
|
||||
embed_url = (
|
||||
f"https://{ALLANIME_BASE}{url.replace('clock', 'clock.json')}"
|
||||
)
|
||||
resp = self.session.get(
|
||||
embed_url,
|
||||
timeout=10,
|
||||
)
|
||||
if __name__ == "__main__":
|
||||
import subprocess
|
||||
|
||||
if resp.status_code == 200:
|
||||
match embed["sourceName"]:
|
||||
case "Luf-mp4":
|
||||
logger.debug("allanime:Found streams from gogoanime")
|
||||
yield {
|
||||
"server": "gogoanime",
|
||||
"headers": {},
|
||||
"subtitles": [],
|
||||
"episode_title": (
|
||||
allanime_episode["notes"] or f'{anime["title"]}'
|
||||
)
|
||||
+ f"; Episode {episode_number}",
|
||||
"links": give_random_quality(resp.json()["links"]),
|
||||
} # pyright:ignore
|
||||
case "Kir":
|
||||
logger.debug("allanime:Found streams from wetransfer")
|
||||
yield {
|
||||
"server": "wetransfer",
|
||||
"headers": {},
|
||||
"subtitles": [],
|
||||
"episode_title": (
|
||||
allanime_episode["notes"] or f'{anime["title"]}'
|
||||
)
|
||||
+ f"; Episode {episode_number}",
|
||||
"links": give_random_quality(resp.json()["links"]),
|
||||
} # pyright:ignore
|
||||
case "S-mp4":
|
||||
logger.debug("allanime:Found streams from sharepoint")
|
||||
yield {
|
||||
"server": "sharepoint",
|
||||
"headers": {},
|
||||
"subtitles": [],
|
||||
"episode_title": (
|
||||
allanime_episode["notes"] or f'{anime["title"]}'
|
||||
)
|
||||
+ f"; Episode {episode_number}",
|
||||
"links": give_random_quality(resp.json()["links"]),
|
||||
} # pyright:ignore
|
||||
case "Sak":
|
||||
logger.debug("allanime:Found streams from dropbox")
|
||||
yield {
|
||||
"server": "dropbox",
|
||||
"headers": {},
|
||||
"subtitles": [],
|
||||
"episode_title": (
|
||||
allanime_episode["notes"] or f'{anime["title"]}'
|
||||
)
|
||||
+ f"; Episode {episode_number}",
|
||||
"links": give_random_quality(resp.json()["links"]),
|
||||
} # pyright:ignore
|
||||
case "Default":
|
||||
logger.debug("allanime:Found streams from wixmp")
|
||||
yield {
|
||||
"server": "wixmp",
|
||||
"headers": {},
|
||||
"subtitles": [],
|
||||
"episode_title": (
|
||||
allanime_episode["notes"] or f'{anime["title"]}'
|
||||
)
|
||||
+ f"; Episode {episode_number}",
|
||||
"links": give_random_quality(resp.json()["links"]),
|
||||
} # pyright:ignore
|
||||
allanime = AllAnime(cache_requests="True", use_persistent_provider_store="False")
|
||||
search_term = input("Enter the search term for the anime: ")
|
||||
translation_type = input("Enter the translation type (sub/dub): ")
|
||||
|
||||
except Timeout:
|
||||
logger.error(
|
||||
"Timeout has been exceeded this could mean allanime is down or you have lost internet connection"
|
||||
)
|
||||
search_results = allanime.search_for_anime(
|
||||
search_keywords=search_term, translation_type=translation_type
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"FA(Allanime): {e}")
|
||||
if not search_results["results"]:
|
||||
print("No results found.")
|
||||
exit()
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"FA(Allanime): {e}")
|
||||
return []
|
||||
print("Search Results:")
|
||||
for idx, result in enumerate(search_results["results"], start=1):
|
||||
print(f"{idx}. {result['title']} (ID: {result['id']})")
|
||||
|
||||
anime_choice = int(input("Enter the number of the anime you want to watch: ")) - 1
|
||||
anime_id = search_results["results"][anime_choice]["id"]
|
||||
|
||||
anime_details = allanime.get_anime(anime_id)
|
||||
print(f"Selected Anime: {anime_details['title']}")
|
||||
|
||||
print("Available Episodes:")
|
||||
for idx, episode in enumerate(
|
||||
sorted(anime_details["availableEpisodesDetail"][translation_type], key=float),
|
||||
start=1,
|
||||
):
|
||||
print(f"{idx}. Episode {episode}")
|
||||
|
||||
episode_choice = (
|
||||
int(input("Enter the number of the episode you want to watch: ")) - 1
|
||||
)
|
||||
episode_number = anime_details["availableEpisodesDetail"][translation_type][
|
||||
episode_choice
|
||||
]
|
||||
|
||||
streams = list(
|
||||
allanime.get_episode_streams(anime_id, episode_number, translation_type)
|
||||
)
|
||||
if not streams:
|
||||
print("No streams available.")
|
||||
exit()
|
||||
|
||||
print("Available Streams:")
|
||||
for idx, stream in enumerate(streams, start=1):
|
||||
print(f"{idx}. Server: {stream['server']}")
|
||||
|
||||
server_choice = int(input("Enter the number of the server you want to use: ")) - 1
|
||||
selected_stream = streams[server_choice]
|
||||
|
||||
stream_link = selected_stream["links"][0]["link"]
|
||||
mpv_args = ["mpv", stream_link]
|
||||
headers = selected_stream["headers"]
|
||||
if headers:
|
||||
mpv_headers = "--http-header-fields="
|
||||
for header_name, header_value in headers.items():
|
||||
mpv_headers += f"{header_name}:{header_value},"
|
||||
mpv_args.append(mpv_headers)
|
||||
subprocess.run(mpv_args)
|
||||
|
||||
@@ -1,4 +1,27 @@
|
||||
SERVERS_AVAILABLE = ["sharepoint", "dropbox", "gogoanime", "weTransfer", "wixmp", "Yt"]
|
||||
ALLANIME_BASE = "allanime.day"
|
||||
ALLANIME_REFERER = "https://allanime.to/"
|
||||
ALLANIME_API_ENDPOINT = "https://api.{}/api/".format(ALLANIME_BASE)
|
||||
import re
|
||||
|
||||
SERVERS_AVAILABLE = [
|
||||
"sharepoint",
|
||||
"dropbox",
|
||||
"gogoanime",
|
||||
"weTransfer",
|
||||
"wixmp",
|
||||
"Yt",
|
||||
"mp4-upload",
|
||||
]
|
||||
API_BASE_URL = "allanime.day"
|
||||
API_REFERER = "https://allanime.to/"
|
||||
API_ENDPOINT = f"https://api.{API_BASE_URL}/api/"
|
||||
|
||||
# search constants
|
||||
DEFAULT_COUNTRY_OF_ORIGIN = "all"
|
||||
DEFAULT_NSFW = True
|
||||
DEFAULT_UNKNOWN = True
|
||||
DEFAULT_PER_PAGE = 40
|
||||
DEFAULT_PAGE = 1
|
||||
|
||||
# regex stuff
|
||||
|
||||
MP4_SERVER_JUICY_STREAM_REGEX = re.compile(
|
||||
r"video/mp4\",src:\"(https?://.*/video\.mp4)\""
|
||||
)
|
||||
|
||||
@@ -1,56 +1,56 @@
|
||||
ALLANIME_SEARCH_GQL = """
|
||||
query(
|
||||
$search: SearchInput
|
||||
$limit: Int
|
||||
$page: Int
|
||||
$translationType: VaildTranslationTypeEnumType
|
||||
$countryOrigin: VaildCountryOriginEnumType
|
||||
) {
|
||||
shows(
|
||||
search: $search
|
||||
limit: $limit
|
||||
page: $page
|
||||
translationType: $translationType
|
||||
countryOrigin: $countryOrigin
|
||||
) {
|
||||
pageInfo {
|
||||
total
|
||||
}
|
||||
edges {
|
||||
_id
|
||||
name
|
||||
availableEpisodes
|
||||
__typename
|
||||
}
|
||||
SEARCH_GQL = """
|
||||
query (
|
||||
$search: SearchInput
|
||||
$limit: Int
|
||||
$page: Int
|
||||
$translationType: VaildTranslationTypeEnumType
|
||||
$countryOrigin: VaildCountryOriginEnumType
|
||||
) {
|
||||
shows(
|
||||
search: $search
|
||||
limit: $limit
|
||||
page: $page
|
||||
translationType: $translationType
|
||||
countryOrigin: $countryOrigin
|
||||
) {
|
||||
pageInfo {
|
||||
total
|
||||
}
|
||||
edges {
|
||||
_id
|
||||
name
|
||||
availableEpisodes
|
||||
__typename
|
||||
}
|
||||
}
|
||||
}
|
||||
"""
|
||||
|
||||
|
||||
ALLANIME_EPISODES_GQL = """\
|
||||
query ($showId: String!, $translationType: VaildTranslationTypeEnumType!, $episodeString: String!) {
|
||||
episode(
|
||||
showId: $showId
|
||||
translationType: $translationType
|
||||
episodeString: $episodeString
|
||||
) {
|
||||
|
||||
episodeString
|
||||
sourceUrls
|
||||
notes
|
||||
}
|
||||
}"""
|
||||
EPISODES_GQL = """\
|
||||
query (
|
||||
$showId: String!
|
||||
$translationType: VaildTranslationTypeEnumType!
|
||||
$episodeString: String!
|
||||
) {
|
||||
episode(
|
||||
showId: $showId
|
||||
translationType: $translationType
|
||||
episodeString: $episodeString
|
||||
) {
|
||||
episodeString
|
||||
sourceUrls
|
||||
notes
|
||||
}
|
||||
}
|
||||
"""
|
||||
|
||||
ALLANIME_SHOW_GQL = """
|
||||
SHOW_GQL = """
|
||||
query ($showId: String!) {
|
||||
show(
|
||||
_id: $showId
|
||||
) {
|
||||
|
||||
_id
|
||||
name
|
||||
availableEpisodesDetail
|
||||
|
||||
}
|
||||
show(_id: $showId) {
|
||||
_id
|
||||
name
|
||||
availableEpisodesDetail
|
||||
}
|
||||
}
|
||||
"""
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import logging
|
||||
import random
|
||||
import re
|
||||
import time
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
@@ -11,119 +10,142 @@ from yt_dlp.utils import (
|
||||
)
|
||||
|
||||
from ..base_provider import AnimeProvider
|
||||
from ..decorators import debug_provider
|
||||
from .constants import (
|
||||
ANIMEPAHE_BASE,
|
||||
ANIMEPAHE_ENDPOINT,
|
||||
JUICY_STREAM_REGEX,
|
||||
REQUEST_HEADERS,
|
||||
SERVER_HEADERS,
|
||||
)
|
||||
from .utils import process_animepahe_embed_page
|
||||
from .extractors import process_animepahe_embed_page
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ..types import Anime
|
||||
from .types import AnimePaheAnimePage, AnimePaheSearchPage, AnimeSearchResult
|
||||
JUICY_STREAM_REGEX = re.compile(r"source='(.*)';")
|
||||
from .types import AnimePaheAnimePage, AnimePaheSearchPage, AnimePaheSearchResult
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
KWIK_RE = re.compile(r"Player\|(.+?)'")
|
||||
|
||||
|
||||
# TODO: hack this to completion
|
||||
class AnimePaheApi(AnimeProvider):
|
||||
class AnimePahe(AnimeProvider):
|
||||
search_page: "AnimePaheSearchPage"
|
||||
anime: "AnimePaheAnimePage"
|
||||
HEADERS = REQUEST_HEADERS
|
||||
|
||||
def search_for_anime(self, user_query: str, *args):
|
||||
try:
|
||||
url = f"{ANIMEPAHE_ENDPOINT}m=search&q={user_query}"
|
||||
response = self.session.get(
|
||||
url,
|
||||
@debug_provider
|
||||
def search_for_anime(self, search_keywords: str, translation_type, **kwargs):
|
||||
response = self.session.get(
|
||||
ANIMEPAHE_ENDPOINT, params={"m": "search", "q": search_keywords}
|
||||
)
|
||||
response.raise_for_status()
|
||||
data: "AnimePaheSearchPage" = response.json()
|
||||
results = []
|
||||
for result in data["data"]:
|
||||
results.append(
|
||||
{
|
||||
"availableEpisodes": list(range(result["episodes"])),
|
||||
"id": result["session"],
|
||||
"title": result["title"],
|
||||
"type": result["type"],
|
||||
"year": result["year"],
|
||||
"score": result["score"],
|
||||
"status": result["status"],
|
||||
"season": result["season"],
|
||||
"poster": result["poster"],
|
||||
}
|
||||
)
|
||||
self.store.set(
|
||||
str(result["session"]),
|
||||
"search_result",
|
||||
result,
|
||||
)
|
||||
if not response.status_code == 200:
|
||||
return
|
||||
data: "AnimePaheSearchPage" = response.json()
|
||||
self.search_page = data
|
||||
|
||||
return {
|
||||
"pageInfo": {
|
||||
"total": data["total"],
|
||||
"perPage": data["per_page"],
|
||||
"currentPage": data["current_page"],
|
||||
return {
|
||||
"pageInfo": {
|
||||
"total": data["total"],
|
||||
"perPage": data["per_page"],
|
||||
"currentPage": data["current_page"],
|
||||
},
|
||||
"results": results,
|
||||
}
|
||||
|
||||
@debug_provider
|
||||
def _pages_loader(
|
||||
self,
|
||||
data,
|
||||
session_id,
|
||||
params,
|
||||
page,
|
||||
standardized_episode_number,
|
||||
):
|
||||
response = self.session.get(ANIMEPAHE_ENDPOINT, params=params)
|
||||
response.raise_for_status()
|
||||
if not data:
|
||||
data.update(response.json())
|
||||
else:
|
||||
if ep_data := response.json().get("data"):
|
||||
data["data"].extend(ep_data)
|
||||
if response.json()["next_page_url"]:
|
||||
# TODO: Refine this
|
||||
time.sleep(
|
||||
random.choice(
|
||||
[
|
||||
0.25,
|
||||
0.1,
|
||||
0.5,
|
||||
0.75,
|
||||
1,
|
||||
]
|
||||
)
|
||||
)
|
||||
page += 1
|
||||
self._pages_loader(
|
||||
data,
|
||||
session_id,
|
||||
params={
|
||||
"m": "release",
|
||||
"page": page,
|
||||
"id": session_id,
|
||||
"sort": "episode_asc",
|
||||
},
|
||||
"results": [
|
||||
{
|
||||
"availableEpisodes": list(range(result["episodes"])),
|
||||
"id": result["session"],
|
||||
"title": result["title"],
|
||||
"type": result["type"],
|
||||
"year": result["year"],
|
||||
"score": result["score"],
|
||||
"status": result["status"],
|
||||
"season": result["season"],
|
||||
"poster": result["poster"],
|
||||
}
|
||||
for result in data["data"]
|
||||
],
|
||||
}
|
||||
page=page,
|
||||
standardized_episode_number=standardized_episode_number,
|
||||
)
|
||||
else:
|
||||
for episode in data.get("data", []):
|
||||
if episode["episode"] % 1 == 0:
|
||||
standardized_episode_number += 1
|
||||
episode.update({"episode": standardized_episode_number})
|
||||
else:
|
||||
standardized_episode_number += episode["episode"] % 1
|
||||
episode.update({"episode": standardized_episode_number})
|
||||
standardized_episode_number = int(standardized_episode_number)
|
||||
return data
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"AnimePahe(search): {e}")
|
||||
return {}
|
||||
|
||||
def get_anime(self, session_id: str, *args):
|
||||
@debug_provider
|
||||
def get_anime(self, session_id: str, **kwargs):
|
||||
page = 1
|
||||
try:
|
||||
anime_result: "AnimeSearchResult" = [
|
||||
anime
|
||||
for anime in self.search_page["data"]
|
||||
if anime["session"] == session_id
|
||||
][0]
|
||||
standardized_episode_number = 0
|
||||
if d := self.store.get(str(session_id), "search_result"):
|
||||
anime_result: "AnimePaheSearchResult" = d
|
||||
data: "AnimePaheAnimePage" = {} # pyright:ignore
|
||||
|
||||
url = f"{ANIMEPAHE_ENDPOINT}m=release&id={session_id}&sort=episode_asc&page={page}"
|
||||
|
||||
def _pages_loader(
|
||||
url,
|
||||
page,
|
||||
):
|
||||
response = self.session.get(
|
||||
url,
|
||||
)
|
||||
if response.status_code == 200:
|
||||
if not data:
|
||||
data.update(response.json())
|
||||
else:
|
||||
if ep_data := response.json().get("data"):
|
||||
data["data"].extend(ep_data)
|
||||
if response.json()["next_page_url"]:
|
||||
# TODO: Refine this
|
||||
time.sleep(
|
||||
random.choice(
|
||||
[
|
||||
0.25,
|
||||
0.1,
|
||||
0.5,
|
||||
0.75,
|
||||
1,
|
||||
]
|
||||
)
|
||||
)
|
||||
page += 1
|
||||
url = f"{ANIMEPAHE_ENDPOINT}m=release&id={session_id}&sort=episode_asc&page={page}"
|
||||
_pages_loader(
|
||||
url,
|
||||
page,
|
||||
)
|
||||
|
||||
_pages_loader(
|
||||
url,
|
||||
page,
|
||||
data = self._pages_loader(
|
||||
data,
|
||||
session_id,
|
||||
params={
|
||||
"m": "release",
|
||||
"id": session_id,
|
||||
"sort": "episode_asc",
|
||||
"page": page,
|
||||
},
|
||||
page=page,
|
||||
standardized_episode_number=standardized_episode_number,
|
||||
)
|
||||
|
||||
if not data:
|
||||
return {}
|
||||
self.anime = data # pyright:ignore
|
||||
data["title"] = anime_result["title"] # pyright:ignore
|
||||
self.store.set(str(session_id), "anime_info", data)
|
||||
episodes = list(map(str, [episode["episode"] for episode in data["data"]]))
|
||||
title = ""
|
||||
return {
|
||||
@@ -150,91 +172,167 @@ class AnimePaheApi(AnimeProvider):
|
||||
for episode in data["data"]
|
||||
],
|
||||
}
|
||||
except Exception as e:
|
||||
logger.error(f"AnimePahe(anime): {e}")
|
||||
return {}
|
||||
|
||||
def get_episode_streams(
|
||||
self, anime: "Anime", episode_number: str, translation_type, *args
|
||||
):
|
||||
try:
|
||||
# extract episode details from memory
|
||||
episode = [
|
||||
episode
|
||||
for episode in self.anime["data"]
|
||||
if float(episode["episode"]) == float(episode_number)
|
||||
]
|
||||
@debug_provider
|
||||
def _get_server(self, episode, res_dicts, anime_title, translation_type):
|
||||
# get all links
|
||||
streams = {
|
||||
"server": "kwik",
|
||||
"links": [],
|
||||
"episode_title": f"{episode['title'] or anime_title}; Episode {episode['episode']}",
|
||||
"subtitles": [],
|
||||
"headers": {},
|
||||
}
|
||||
for res_dict in res_dicts:
|
||||
# get embed url
|
||||
embed_url = res_dict["data-src"]
|
||||
data_audio = "dub" if res_dict["data-audio"] == "eng" else "sub"
|
||||
# filter streams by translation_type
|
||||
if data_audio != translation_type:
|
||||
continue
|
||||
|
||||
if not episode:
|
||||
logger.error(
|
||||
f"AnimePahe(streams): episode {episode_number} doesn't exist"
|
||||
if not embed_url:
|
||||
logger.warning(
|
||||
"[ANIMEPAHE-WARN]: embed url not found please report to the developers"
|
||||
)
|
||||
return []
|
||||
episode = episode[0]
|
||||
|
||||
anime_id = anime["id"]
|
||||
# fetch the episode page
|
||||
url = f"{ANIMEPAHE_BASE}/play/{anime_id}/{episode['session']}"
|
||||
response = self.session.get(url)
|
||||
# get the element containing links to juicy streams
|
||||
c = get_element_by_id("resolutionMenu", response.text)
|
||||
resolutionMenuItems = get_elements_html_by_class("dropdown-item", c)
|
||||
# convert the elements containing embed links to a neat dict containing:
|
||||
# data-src
|
||||
# data-audio
|
||||
# data-resolution
|
||||
res_dicts = [extract_attributes(item) for item in resolutionMenuItems]
|
||||
|
||||
# get the episode title
|
||||
episode_title = (
|
||||
f"{episode['title'] or anime['title']}; Episode {episode['episode']}"
|
||||
continue
|
||||
# get embed page
|
||||
embed_response = self.session.get(
|
||||
embed_url, headers={"User-Agent": self.USER_AGENT, **SERVER_HEADERS}
|
||||
)
|
||||
# get all links
|
||||
streams = {
|
||||
"server": "kwik",
|
||||
"links": [],
|
||||
"episode_title": episode_title,
|
||||
"subtitles": [],
|
||||
"headers": {},
|
||||
}
|
||||
for res_dict in res_dicts:
|
||||
# get embed url
|
||||
embed_url = res_dict["data-src"]
|
||||
data_audio = "dub" if res_dict["data-audio"] == "eng" else "sub"
|
||||
# filter streams by translation_type
|
||||
if data_audio != translation_type:
|
||||
continue
|
||||
embed_response.raise_for_status()
|
||||
embed_page = embed_response.text
|
||||
|
||||
if not embed_url:
|
||||
logger.warn(
|
||||
"AnimePahe: embed url not found please report to the developers"
|
||||
)
|
||||
return []
|
||||
# get embed page
|
||||
embed_response = self.session.get(
|
||||
embed_url, headers={"User-Agent": self.USER_AGENT, **SERVER_HEADERS}
|
||||
)
|
||||
if not response.status_code == 200:
|
||||
continue
|
||||
embed_page = embed_response.text
|
||||
decoded_js = process_animepahe_embed_page(embed_page)
|
||||
if not decoded_js:
|
||||
logger.error("[ANIMEPAHE-ERROR]: failed to decode embed page")
|
||||
continue
|
||||
juicy_stream = JUICY_STREAM_REGEX.search(decoded_js)
|
||||
if not juicy_stream:
|
||||
logger.error("[ANIMEPAHE-ERROR]: failed to find juicy stream")
|
||||
continue
|
||||
juicy_stream = juicy_stream.group(1)
|
||||
# add the link
|
||||
streams["links"].append(
|
||||
{
|
||||
"quality": res_dict["data-resolution"],
|
||||
"translation_type": data_audio,
|
||||
"link": juicy_stream,
|
||||
}
|
||||
)
|
||||
return streams
|
||||
|
||||
decoded_js = process_animepahe_embed_page(embed_page)
|
||||
if not decoded_js:
|
||||
logger.error("Animepahe: failed to decode embed page")
|
||||
return
|
||||
juicy_stream = JUICY_STREAM_REGEX.search(decoded_js)
|
||||
if not juicy_stream:
|
||||
logger.error("Animepahe: failed to find juicy stream")
|
||||
return
|
||||
juicy_stream = juicy_stream.group(1)
|
||||
# add the link
|
||||
streams["links"].append(
|
||||
{
|
||||
"quality": res_dict["data-resolution"],
|
||||
"translation_type": data_audio,
|
||||
"link": juicy_stream,
|
||||
}
|
||||
)
|
||||
yield streams
|
||||
except Exception as e:
|
||||
logger.error(f"Animepahe: {e}")
|
||||
@debug_provider
|
||||
def get_episode_streams(
|
||||
self, anime_id, episode_number: str, translation_type, **kwargs
|
||||
):
|
||||
anime_title = ""
|
||||
# extract episode details from memory
|
||||
anime_info = self.store.get(str(anime_id), "anime_info")
|
||||
if not anime_info:
|
||||
logger.error(
|
||||
f"[ANIMEPAHE-ERROR]: Anime with ID {anime_id} not found in store"
|
||||
)
|
||||
return
|
||||
|
||||
anime_title = anime_info["title"]
|
||||
episode = next(
|
||||
(
|
||||
ep
|
||||
for ep in anime_info["data"]
|
||||
if float(ep["episode"]) == float(episode_number)
|
||||
),
|
||||
None,
|
||||
)
|
||||
|
||||
if not episode:
|
||||
logger.error(
|
||||
f"[ANIMEPAHE-ERROR]: Episode {episode_number} doesn't exist for anime {anime_title}"
|
||||
)
|
||||
return
|
||||
|
||||
# fetch the episode page
|
||||
url = f"{ANIMEPAHE_BASE}/play/{anime_id}/{episode['session']}"
|
||||
response = self.session.get(url)
|
||||
|
||||
response.raise_for_status()
|
||||
# get the element containing links to juicy streams
|
||||
c = get_element_by_id("resolutionMenu", response.text)
|
||||
resolutionMenuItems = get_elements_html_by_class("dropdown-item", c)
|
||||
# convert the elements containing embed links to a neat dict containing:
|
||||
# data-src
|
||||
# data-audio
|
||||
# data-resolution
|
||||
res_dicts = [extract_attributes(item) for item in resolutionMenuItems]
|
||||
if _server := self._get_server(
|
||||
episode, res_dicts, anime_title, translation_type
|
||||
):
|
||||
yield _server
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
import subprocess
|
||||
|
||||
animepahe = AnimePahe(cache_requests="True", use_persistent_provider_store="False")
|
||||
search_term = input("Enter the search term for the anime: ")
|
||||
translation_type = input("Enter the translation type (sub/dub): ")
|
||||
|
||||
search_results = animepahe.search_for_anime(
|
||||
search_keywords=search_term, translation_type=translation_type
|
||||
)
|
||||
|
||||
if not search_results or not search_results["results"]:
|
||||
print("No results found.")
|
||||
exit()
|
||||
|
||||
print("Search Results:")
|
||||
for idx, result in enumerate(search_results["results"], start=1):
|
||||
print(f"{idx}. {result['title']} (ID: {result['id']})")
|
||||
|
||||
anime_choice = int(input("Enter the number of the anime you want to watch: ")) - 1
|
||||
anime_id = search_results["results"][anime_choice]["id"]
|
||||
|
||||
anime_details = animepahe.get_anime(anime_id)
|
||||
|
||||
if anime_details is None:
|
||||
print("Failed to get anime details.")
|
||||
exit()
|
||||
print(f"Selected Anime: {anime_details['title']}")
|
||||
|
||||
print("Available Episodes:")
|
||||
for idx, episode in enumerate(
|
||||
sorted(anime_details["availableEpisodesDetail"][translation_type], key=float),
|
||||
start=1,
|
||||
):
|
||||
print(f"{idx}. Episode {episode}")
|
||||
|
||||
episode_choice = (
|
||||
int(input("Enter the number of the episode you want to watch: ")) - 1
|
||||
)
|
||||
episode_number = anime_details["availableEpisodesDetail"][translation_type][
|
||||
episode_choice
|
||||
]
|
||||
|
||||
streams = list(
|
||||
animepahe.get_episode_streams(anime_id, episode_number, translation_type)
|
||||
)
|
||||
if not streams:
|
||||
print("No streams available.")
|
||||
exit()
|
||||
|
||||
print("Available Streams:")
|
||||
for idx, stream in enumerate(streams, start=1):
|
||||
print(f"{idx}. Server: {stream['server']}")
|
||||
|
||||
server_choice = int(input("Enter the number of the server you want to use: ")) - 1
|
||||
selected_stream = streams[server_choice]
|
||||
|
||||
stream_link = selected_stream["links"][0]["link"]
|
||||
mpv_args = ["mpv", stream_link]
|
||||
headers = selected_stream["headers"]
|
||||
if headers:
|
||||
mpv_headers = "--http-header-fields="
|
||||
for header_name, header_value in headers.items():
|
||||
mpv_headers += f"{header_name}:{header_value},"
|
||||
mpv_args.append(mpv_headers)
|
||||
subprocess.run(mpv_args)
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
import re
|
||||
|
||||
ANIMEPAHE = "animepahe.ru"
|
||||
ANIMEPAHE_BASE = f"https://{ANIMEPAHE}"
|
||||
ANIMEPAHE_ENDPOINT = f"{ANIMEPAHE_BASE}/api?"
|
||||
ANIMEPAHE_ENDPOINT = f"{ANIMEPAHE_BASE}/api"
|
||||
|
||||
SERVERS_AVAILABLE = ["kwik"]
|
||||
REQUEST_HEADERS = {
|
||||
@@ -31,3 +33,5 @@ SERVER_HEADERS = {
|
||||
"Priority": "u=4",
|
||||
"TE": "trailers",
|
||||
}
|
||||
JUICY_STREAM_REGEX = re.compile(r"source='(.*)';")
|
||||
KWIK_RE = re.compile(r"Player\|(.+?)'")
|
||||
|
||||
75
fastanime/libs/anime_provider/animepahe/extractors.py
Normal file
75
fastanime/libs/anime_provider/animepahe/extractors.py
Normal file
@@ -0,0 +1,75 @@
|
||||
# from ..utils import int2base
|
||||
import re
|
||||
|
||||
from yt_dlp.utils import encode_base_n, get_element_text_and_html_by_tag
|
||||
|
||||
|
||||
def animepahe_key_creator(c: int, a: int):
|
||||
if c < a:
|
||||
val_a = ""
|
||||
else:
|
||||
val_a = animepahe_key_creator(int(c / a), a)
|
||||
c = c % a
|
||||
if c > 35:
|
||||
val_b = chr(c + 29)
|
||||
else:
|
||||
val_b = encode_base_n(c, 36)
|
||||
return val_a + val_b
|
||||
|
||||
|
||||
def animepahe_embed_decoder(
|
||||
encoded_js_p: str,
|
||||
base_a: int,
|
||||
no_of_keys_c: int,
|
||||
values_to_replace_with_k: list,
|
||||
):
|
||||
decode_mapper_d: dict = {}
|
||||
for i in range(no_of_keys_c):
|
||||
key = animepahe_key_creator(i, base_a)
|
||||
val = values_to_replace_with_k[i] or key
|
||||
decode_mapper_d[key] = val
|
||||
return re.sub(
|
||||
r"\b\w+\b", lambda match: decode_mapper_d[match.group(0)], encoded_js_p
|
||||
)
|
||||
|
||||
|
||||
PARAMETERS_REGEX = re.compile(r"eval\(function\(p,a,c,k,e,d\)\{.*\}\((.*?)\)\)$")
|
||||
ENCODE_JS_REGEX = re.compile(r"'(.*?);',(\d+),(\d+),'(.*)'\.split")
|
||||
|
||||
|
||||
def process_animepahe_embed_page(embed_page: str):
|
||||
encoded_js_string = ""
|
||||
embed_page_content = embed_page
|
||||
for _ in range(8):
|
||||
text, html = get_element_text_and_html_by_tag("script", embed_page_content)
|
||||
if not text:
|
||||
embed_page_content = re.sub(html, "", embed_page_content)
|
||||
continue
|
||||
encoded_js_string = text.strip()
|
||||
break
|
||||
if not encoded_js_string:
|
||||
return
|
||||
obsfucated_js_parameter_match = PARAMETERS_REGEX.search(encoded_js_string)
|
||||
if not obsfucated_js_parameter_match:
|
||||
return
|
||||
parameter_string = obsfucated_js_parameter_match.group(1)
|
||||
encoded_js_parameter_string = ENCODE_JS_REGEX.search(parameter_string)
|
||||
if not encoded_js_parameter_string:
|
||||
return
|
||||
p: str = encoded_js_parameter_string.group(1)
|
||||
a: int = int(encoded_js_parameter_string.group(2))
|
||||
c: int = int(encoded_js_parameter_string.group(3))
|
||||
k: list = encoded_js_parameter_string.group(4).split("|")
|
||||
return animepahe_embed_decoder(p, a, c, k).replace("\\", "")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
# Testing time
|
||||
filepath = input("Enter file name: ")
|
||||
if filepath:
|
||||
with open(filepath, "r") as file:
|
||||
data = file.read()
|
||||
else:
|
||||
data = """<script>eval(function(p,a,c,k,e,d){e=function(c){return(c<a?'':e(parseInt(c/a)))+((c=c%a)>35?String.fromCharCode(c+29):c.toString(36))};if(!''.replace(/^/,String)){while(c--){d[e(c)]=k[c]||e(c)}k=[function(e){return d[e]}];e=function(){return'\\w+'};c=1};while(c--){if(k[c]){p=p.replace(new RegExp('\\b'+e(c)+'\\b','g'),k[c])}}return p}('f $7={H:a(2){4 B(9.7.h(y z("(?:(?:^|.*;)\\\\s*"+d(2).h(/[\\-\\.\\+\\*]/g,"\\\\$&")+"\\\\s*\\\\=\\\\s*([^;]*).*$)|^.*$"),"$1"))||G},E:a(2,q,3,6,5,t){k(!2||/^(?:8|r\\-v|o|m|p)$/i.D(2)){4 w}f b="";k(3){F(3.J){j K:b=3===P?"; 8=O, I N Q M:u:u A":"; r-v="+3;n;j L:b="; 8="+3;n;j S:b="; 8="+3.Z();n}}9.7=d(2)+"="+d(q)+b+(5?"; m="+5:"")+(6?"; o="+6:"")+(t?"; p":"");4 x},Y:a(2,6,5){k(!2||!11.C(2)){4 w}9.7=d(2)+"=; 8=12, R 10 W l:l:l A"+(5?"; m="+5:"")+(6?"; o="+6:"");4 x},C:a(2){4(y z("(?:^|;\\\\s*)"+d(2).h(/[\\-\\.\\+\\*]/g,"\\\\$&")+"\\\\s*\\\\=")).D(9.7)},X:a(){f c=9.7.h(/((?:^|\\s*;)[^\\=]+)(?=;|$)|^\\s*|\\s*(?:\\=[^;]*)?(?:\\1|$)/g,"").T(/\\s*(?:\\=[^;]*)?;\\s*/);U(f e=0;e<c.V;e++){c[e]=B(c[e])}4 c}};',62,65,'||sKey|vEnd|return|sDomain|sPath|cookie|expires|document|function|sExpires|aKeys|encodeURIComponent|nIdx|var||replace||case|if|00|domain|break|path|secure|sValue|max||bSecure|59|age|false|true|new|RegExp|GMT|decodeURIComponent|hasItem|test|setItem|switch|null|getItem|31|constructor|Number|String|23|Dec|Fri|Infinity|9999|01|Date|split|for|length|1970|keys|removeItem|toUTCString|Jan|this|Thu'.split('|'),0,{}));eval(function(p,a,c,k,e,d){e=function(c){return(c<a?'':e(parseInt(c/a)))+((c=c%a)>35?String.fromCharCode(c+29):c.toString(36))};if(!''.replace(/^/,String)){while(c--){d[e(c)]=k[c]||e(c)}k=[function(e){return d[e]}];e=function(){return'\\w+'};c=1};while(c--){if(k[c]){p=p.replace(new RegExp('\\b'+e(c)+'\\b','g'),k[c])}}return p}('h o=\'1D://1C-E.1B.1A.1z/1y/E/1x/1w/1v.1u\';h d=s.r(\'d\');h 0=B 1t(d,{\'1s\':{\'1r\':i},\'1q\':\'16:9\',\'D\':1,\'1p\':5,\'1o\':{\'1n\':\'1m\'},1l:[\'7-1k\',\'7\',\'1j\',\'1i-1h\',\'1g\',\'1f-1e\',\'1d\',\'D\',\'1c\',\'1b\',\'1a\',\'19\',\'C\',\'18\'],\'C\':{\'17\':i}});8(!A.15()){d.14=o}x{j z={13:12,11:10,Z:Y,X:i,W:i};h c=B A(z);c.V(o);c.U(d);g.c=c}0.3("T",6=>{g.S.R.Q("P")});0.O=1;k v(b,n,m){8(b.y){b.y(n,m,N)}x 8(b.w){b.w(\'3\'+n,m)}}j 4=k(l){g.M.L(l,\'*\')};v(g,\'l\',k(e){j a=e.a;8(a===\'7\')0.7();8(a===\'f\')0.f();8(a===\'u\')0.u()});0.3(\'t\',6=>{4(\'t\')});0.3(\'7\',6=>{4(\'7\')});0.3(\'f\',6=>{4(\'f\')});0.3(\'K\',6=>{4(0.q);s.r(\'.J-I\').H=G(0.q.F(2))});0.3(\'p\',6=>{4(\'p\')});',62,102,'player|||on|sendMessage||event|play|if||data|element|hls|video||pause|window|const|true|var|function|message|eventHandler|eventName|source|ended|currentTime|querySelector|document|ready|stop|bindEvent|attachEvent|else|addEventListener|config|Hls|new|fullscreen|volume|01|toFixed|String|innerHTML|timestamp|ss|timeupdate|postMessage|parent|false|speed|landscape|lock|orientation|screen|enterfullscreen|attachMedia|loadSource|lowLatencyMode|enableWorker|Infinity|backBufferLength|600|maxMaxBufferLength|180|maxBufferLength|src|isSupported||iosNative|capture|airplay|pip|settings|captions|mute|time|current|progress|forward|fast|rewind|large|controls|kwik|key|storage|seekTime|ratio|global|keyboard|Plyr|m3u8|uwu|b92a392054c041a3f9c6eecabeb0e127183f44e547828447b10bca8d77523e6f|03|stream|org|nextcdn|files|eu|https'.split('|'),0,{}))</script>"""
|
||||
|
||||
print(process_animepahe_embed_page(data))
|
||||
@@ -1,7 +1,7 @@
|
||||
from typing import Literal, TypedDict
|
||||
|
||||
|
||||
class AnimeSearchResult(TypedDict):
|
||||
class AnimePaheSearchResult(TypedDict):
|
||||
id: int
|
||||
title: str
|
||||
type: str
|
||||
@@ -21,7 +21,7 @@ class AnimePaheSearchPage(TypedDict):
|
||||
last_page: int
|
||||
_from: int
|
||||
to: int
|
||||
data: list[AnimeSearchResult]
|
||||
data: list[AnimePaheSearchResult]
|
||||
|
||||
|
||||
class Episode(TypedDict):
|
||||
|
||||
@@ -1,81 +0,0 @@
|
||||
# from ..utils import int2base
|
||||
import re
|
||||
|
||||
from yt_dlp.utils import encode_base_n, get_element_text_and_html_by_tag
|
||||
|
||||
|
||||
def animepahe_key_creator(c: int, a: int):
|
||||
if c < a:
|
||||
val_a = ""
|
||||
else:
|
||||
val_a = animepahe_key_creator(int(c / a), a)
|
||||
c = c % a
|
||||
if c > 35:
|
||||
val_b = chr(c + 29)
|
||||
else:
|
||||
val_b = encode_base_n(c, 36)
|
||||
return val_a + val_b
|
||||
|
||||
|
||||
def animepahe_embed_decoder(
|
||||
encoded_js_p: str,
|
||||
base_a: int,
|
||||
no_of_keys_c: int,
|
||||
key_values_k: list,
|
||||
decode_mapper_d: dict = {},
|
||||
):
|
||||
for i in range(no_of_keys_c):
|
||||
key = animepahe_key_creator(i, base_a)
|
||||
val = key_values_k[i] or key
|
||||
decode_mapper_d[key] = val
|
||||
return re.sub(
|
||||
r"\b\w+\b", lambda match: decode_mapper_d[match.group(0)], encoded_js_p
|
||||
)
|
||||
|
||||
|
||||
PARAMETERS_REGEX = re.compile(r"eval\(function\(p,a,c,k,e,d\)\{.*\}\((.*?)\)\)$")
|
||||
ENCODE_JS_REGEX = re.compile(r"'(.*?);',(\d+),(\d+),'(.*)'\.split")
|
||||
|
||||
|
||||
def process_animepahe_embed_page(embed_page: str):
|
||||
encoded_js_string = ""
|
||||
embed_page_content = embed_page
|
||||
for _ in range(8):
|
||||
text, html = get_element_text_and_html_by_tag("script", embed_page_content)
|
||||
if not text:
|
||||
embed_page_content = re.sub(html, "", embed_page_content)
|
||||
continue
|
||||
encoded_js_string = text.strip()
|
||||
break
|
||||
if not encoded_js_string:
|
||||
return
|
||||
obsfucated_js_parameter_match = PARAMETERS_REGEX.search(encoded_js_string)
|
||||
if not obsfucated_js_parameter_match:
|
||||
return
|
||||
parameter_string = obsfucated_js_parameter_match.group(1)
|
||||
encoded_js_parameter_string = ENCODE_JS_REGEX.search(parameter_string)
|
||||
if not encoded_js_parameter_string:
|
||||
return
|
||||
p: str = encoded_js_parameter_string.group(1)
|
||||
a: int = int(encoded_js_parameter_string.group(2))
|
||||
c: int = int(encoded_js_parameter_string.group(3))
|
||||
k: list = encoded_js_parameter_string.group(4).split("|")
|
||||
return animepahe_embed_decoder(p, a, c, k).replace("\\", "")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
data = """<script>eval(function(p,a,c,k,e,d){e=function(c){return(c<a?'':e(parseInt(c/a)))+((c=c%a)>35?String.fromCharCode(c+29):c.toString(36))};if(!''.replace(/^/,String)){while(c--){d[e(c)]=k[c]||e(c)}k=[function(e){return d[e]}];e=function(){return'\\w+'};c=1};while(c--){if(k[c]){p=p.replace(new RegExp('\\b'+e(c)+'\\b','g'),k[c])}}return p}('f $7={H:a(2){4 B(9.7.h(y z("(?:(?:^|.*;)\\\\s*"+d(2).h(/[\\-\\.\\+\\*]/g,"\\\\$&")+"\\\\s*\\\\=\\\\s*([^;]*).*$)|^.*$"),"$1"))||G},E:a(2,q,3,6,5,t){k(!2||/^(?:8|r\\-v|o|m|p)$/i.D(2)){4 w}f b="";k(3){F(3.J){j K:b=3===P?"; 8=O, I N Q M:u:u A":"; r-v="+3;n;j L:b="; 8="+3;n;j S:b="; 8="+3.Z();n}}9.7=d(2)+"="+d(q)+b+(5?"; m="+5:"")+(6?"; o="+6:"")+(t?"; p":"");4 x},Y:a(2,6,5){k(!2||!11.C(2)){4 w}9.7=d(2)+"=; 8=12, R 10 W l:l:l A"+(5?"; m="+5:"")+(6?"; o="+6:"");4 x},C:a(2){4(y z("(?:^|;\\\\s*)"+d(2).h(/[\\-\\.\\+\\*]/g,"\\\\$&")+"\\\\s*\\\\=")).D(9.7)},X:a(){f c=9.7.h(/((?:^|\\s*;)[^\\=]+)(?=;|$)|^\\s*|\\s*(?:\\=[^;]*)?(?:\\1|$)/g,"").T(/\\s*(?:\\=[^;]*)?;\\s*/);U(f e=0;e<c.V;e++){c[e]=B(c[e])}4 c}};',62,65,'||sKey|vEnd|return|sDomain|sPath|cookie|expires|document|function|sExpires|aKeys|encodeURIComponent|nIdx|var||replace||case|if|00|domain|break|path|secure|sValue|max||bSecure|59|age|false|true|new|RegExp|GMT|decodeURIComponent|hasItem|test|setItem|switch|null|getItem|31|constructor|Number|String|23|Dec|Fri|Infinity|9999|01|Date|split|for|length|1970|keys|removeItem|toUTCString|Jan|this|Thu'.split('|'),0,{}));eval(function(p,a,c,k,e,d){e=function(c){return(c<a?'':e(parseInt(c/a)))+((c=c%a)>35?String.fromCharCode(c+29):c.toString(36))};if(!''.replace(/^/,String)){while(c--){d[e(c)]=k[c]||e(c)}k=[function(e){return d[e]}];e=function(){return'\\w+'};c=1};while(c--){if(k[c]){p=p.replace(new RegExp('\\b'+e(c)+'\\b','g'),k[c])}}return p}('h o=\'1D://1C-E.1B.1A.1z/1y/E/1x/1w/1v.1u\';h d=s.r(\'d\');h 0=B 1t(d,{\'1s\':{\'1r\':i},\'1q\':\'16:9\',\'D\':1,\'1p\':5,\'1o\':{\'1n\':\'1m\'},1l:[\'7-1k\',\'7\',\'1j\',\'1i-1h\',\'1g\',\'1f-1e\',\'1d\',\'D\',\'1c\',\'1b\',\'1a\',\'19\',\'C\',\'18\'],\'C\':{\'17\':i}});8(!A.15()){d.14=o}x{j z={13:12,11:10,Z:Y,X:i,W:i};h c=B A(z);c.V(o);c.U(d);g.c=c}0.3("T",6=>{g.S.R.Q("P")});0.O=1;k v(b,n,m){8(b.y){b.y(n,m,N)}x 8(b.w){b.w(\'3\'+n,m)}}j 4=k(l){g.M.L(l,\'*\')};v(g,\'l\',k(e){j a=e.a;8(a===\'7\')0.7();8(a===\'f\')0.f();8(a===\'u\')0.u()});0.3(\'t\',6=>{4(\'t\')});0.3(\'7\',6=>{4(\'7\')});0.3(\'f\',6=>{4(\'f\')});0.3(\'K\',6=>{4(0.q);s.r(\'.J-I\').H=G(0.q.F(2))});0.3(\'p\',6=>{4(\'p\')});',62,102,'player|||on|sendMessage||event|play|if||data|element|hls|video||pause|window|const|true|var|function|message|eventHandler|eventName|source|ended|currentTime|querySelector|document|ready|stop|bindEvent|attachEvent|else|addEventListener|config|Hls|new|fullscreen|volume|01|toFixed|String|innerHTML|timestamp|ss|timeupdate|postMessage|parent|false|speed|landscape|lock|orientation|screen|enterfullscreen|attachMedia|loadSource|lowLatencyMode|enableWorker|Infinity|backBufferLength|600|maxMaxBufferLength|180|maxBufferLength|src|isSupported||iosNative|capture|airplay|pip|settings|captions|mute|time|current|progress|forward|fast|rewind|large|controls|kwik|key|storage|seekTime|ratio|global|keyboard|Plyr|m3u8|uwu|b92a392054c041a3f9c6eecabeb0e127183f44e547828447b10bca8d77523e6f|03|stream|org|nextcdn|files|eu|https'.split('|'),0,{}))"""
|
||||
a = 62
|
||||
c = 102
|
||||
k = "player|||on|sendMessage||event|play|if||data|element|hls|video||pause|window|const|true|var|function|message|eventHandler|eventName|source|ended|currentTime|querySelector|document|ready|stop|bindEvent|attachEvent|else|addEventListener|config|Hls|new|fullscreen|volume|toFixed|String|innerHTML|timestamp|ss|timeupdate|postMessage|parent|false|speed|landscape|lock|orientation|screen|enterfullscreen|attachMedia|loadSource|lowLatencyMode|enableWorker|Infinity|backBufferLength|600|maxMaxBufferLength||180|maxBufferLength|src|isSupported||iosNative|capture|airplay|pip|settings|captions|mute|time|current|progress|forward|fast|rewind|large|controls|kwik|key|storage|seekTime|ratio|global|keyboard|Plyr|m3u8|uwu|cda74eaebce25a12f5e548f7c220bb5dc245700b0280bdb45ff98b2fe4803d2b|06|stream|org|nextcdn|files|eu|https".split(
|
||||
"|"
|
||||
)
|
||||
|
||||
p = "h o='1D://1C-11.1B.1A.1z/1y/11/1x/1w/1v.1u';h d=s.r('d');h 0=B 1t(d,{'1s':{'1r':i},'1q':'16:9','D':1,'1p':5,'1o':{'1n':'1m'},1l:['7-1k','7','1j','1i-1h','1g','1f-1e','1d','D','1c','1b','1a','19','C','18'],'C':{'17':i}});8(!A.15()){d.14=o}x{j z={13:12,10:Z,Y:X,W:i,V:i};h c=B A(z);c.U(o);c.T(d);g.c=c}0.3(\"S\",6=>{g.R.Q.P(\"O\")});0.N=1;k v(b,n,m){8(b.y){b.y(n,m,M)}x 8(b.w){b.w('3'+n,m)}}j 4=k(l){g.L.K(l,'*')};v(g,'l',k(e){j a=e.a;8(a==='7')0.7();8(a==='f')0.f();8(a==='u')0.u()});0.3('t',6=>{4('t')});0.3('7',6=>{4('7')});0.3('f',6=>{4('f')});0.3('J',6=>{4(0.q);s.r('.I-H').G=F(0.q.E(2))});0.3('p',6=>{4('p')});"
|
||||
result = animepahe_embed_decoder(
|
||||
p,
|
||||
a,
|
||||
c,
|
||||
k,
|
||||
)
|
||||
print(result) # Output: j player = B A();
|
||||
@@ -1,236 +0,0 @@
|
||||
import logging
|
||||
import re
|
||||
from html.parser import HTMLParser
|
||||
from itertools import cycle
|
||||
from urllib.parse import quote_plus
|
||||
|
||||
from yt_dlp.utils import (
|
||||
clean_html,
|
||||
extract_attributes,
|
||||
get_element_by_class,
|
||||
get_element_html_by_class,
|
||||
get_elements_by_class,
|
||||
get_elements_html_by_class,
|
||||
)
|
||||
|
||||
from ..base_provider import AnimeProvider
|
||||
from ..utils import give_random_quality
|
||||
from .constants import SERVERS_AVAILABLE
|
||||
from .types import AniWatchStream
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
LINK_TO_STREAMS_REGEX = re.compile(r".*://(.*)/embed-(2|4|6)/e-([0-9])/(.*)\?.*")
|
||||
IMAGE_HTML_ELEMENT_REGEX = re.compile(r"<img.*?>")
|
||||
|
||||
|
||||
class ParseAnchorAndImgTag(HTMLParser):
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.img_tag = None
|
||||
self.a_tag = None
|
||||
|
||||
def handle_starttag(self, tag, attrs):
|
||||
if tag == "img":
|
||||
self.img_tag = {attr[0]: attr[1] for attr in attrs}
|
||||
if tag == "a":
|
||||
self.a_tag = {attr[0]: attr[1] for attr in attrs}
|
||||
|
||||
|
||||
class AniWatchApi(AnimeProvider):
|
||||
# HEADERS = {"Referer": "https://hianime.to/home"}
|
||||
|
||||
def search_for_anime(self, anime_title: str, *args):
|
||||
try:
|
||||
query = quote_plus(anime_title)
|
||||
url = f"https://hianime.to/search?keyword={query}"
|
||||
response = self.session.get(url)
|
||||
if response.status_code != 200:
|
||||
return
|
||||
search_page = response.text
|
||||
search_results_html_items = get_elements_by_class("flw-item", search_page)
|
||||
results = []
|
||||
for search_results_html_item in search_results_html_items:
|
||||
film_poster_html = get_element_by_class(
|
||||
"film-poster", search_results_html_item
|
||||
)
|
||||
|
||||
if not film_poster_html:
|
||||
continue
|
||||
# get availableEpisodes
|
||||
episodes_html = get_element_html_by_class("tick-sub", film_poster_html)
|
||||
episodes = clean_html(episodes_html) or 12
|
||||
|
||||
# get anime id and poster image url
|
||||
parser = ParseAnchorAndImgTag()
|
||||
parser.feed(film_poster_html)
|
||||
image_data = parser.img_tag
|
||||
anime_link_data = parser.a_tag
|
||||
if not image_data or not anime_link_data:
|
||||
continue
|
||||
|
||||
episodes = int(episodes)
|
||||
|
||||
# finally!!
|
||||
image_link = image_data["data-src"]
|
||||
anime_id = anime_link_data["data-id"]
|
||||
title = anime_link_data["title"]
|
||||
|
||||
results.append(
|
||||
{
|
||||
"availableEpisodes": list(range(1, episodes)),
|
||||
"id": anime_id,
|
||||
"title": title,
|
||||
"poster": image_link,
|
||||
}
|
||||
)
|
||||
self.search_results = results
|
||||
return {"pageInfo": {}, "results": results}
|
||||
|
||||
except Exception as e:
|
||||
logger.error(e)
|
||||
|
||||
def get_anime(self, aniwatch_id, *args):
|
||||
try:
|
||||
anime_result = {}
|
||||
for anime in self.search_results:
|
||||
if anime["id"] == aniwatch_id:
|
||||
anime_result = anime
|
||||
break
|
||||
anime_url = f"https://hianime.to/ajax/v2/episode/list/{aniwatch_id}"
|
||||
response = self.session.get(anime_url, timeout=10)
|
||||
if response.status_code == 200:
|
||||
response_json = response.json()
|
||||
aniwatch_anime_page = response_json["html"]
|
||||
episodes_info_container_html = get_element_html_by_class(
|
||||
"ss-list", aniwatch_anime_page
|
||||
)
|
||||
episodes_info_html_list = get_elements_html_by_class(
|
||||
"ep-item", episodes_info_container_html
|
||||
)
|
||||
# keys: [ data-number: episode_number, data-id: episode_id, title: episode_title , href:episode_page_url]
|
||||
episodes_info_dicts = [
|
||||
extract_attributes(episode_dict)
|
||||
for episode_dict in episodes_info_html_list
|
||||
]
|
||||
episodes = [episode["data-number"] for episode in episodes_info_dicts]
|
||||
self.episodes_info = [
|
||||
{
|
||||
"id": episode["data-id"],
|
||||
"title": (
|
||||
(episode["title"] or "").replace(
|
||||
f"Episode {episode['data-number']}", ""
|
||||
)
|
||||
or anime_result["title"]
|
||||
)
|
||||
+ f"; Episode {episode['data-number']}",
|
||||
"episode": episode["data-number"],
|
||||
}
|
||||
for episode in episodes_info_dicts
|
||||
]
|
||||
return {
|
||||
"id": aniwatch_id,
|
||||
"availableEpisodesDetail": {
|
||||
"dub": episodes,
|
||||
"sub": episodes,
|
||||
"raw": episodes,
|
||||
},
|
||||
"poster": anime_result["poster"],
|
||||
"title": anime_result["title"],
|
||||
"episodes_info": self.episodes_info,
|
||||
}
|
||||
except Exception as e:
|
||||
logger.error(e)
|
||||
|
||||
def get_episode_streams(self, anime, episode, translation_type, *args):
|
||||
try:
|
||||
episode_details = [
|
||||
episode_details
|
||||
for episode_details in self.episodes_info
|
||||
if episode_details["episode"] == episode
|
||||
]
|
||||
if not episode_details:
|
||||
return
|
||||
episode_details = episode_details[0]
|
||||
episode_url = f"https://hianime.to/ajax/v2/episode/servers?episodeId={episode_details['id']}"
|
||||
response = self.session.get(episode_url)
|
||||
if response.status_code == 200:
|
||||
response_json = response.json()
|
||||
episode_page_html = response_json["html"]
|
||||
servers_containers_html = get_elements_html_by_class(
|
||||
"ps__-list", episode_page_html
|
||||
)
|
||||
if not servers_containers_html:
|
||||
return
|
||||
# sub servers
|
||||
try:
|
||||
servers_html_sub = get_elements_html_by_class(
|
||||
"server-item", servers_containers_html[0]
|
||||
)
|
||||
except Exception:
|
||||
logger.warn("AniWatch: sub not found")
|
||||
servers_html_sub = None
|
||||
|
||||
# dub servers
|
||||
try:
|
||||
servers_html_dub = get_elements_html_by_class(
|
||||
"server-item", servers_containers_html[1]
|
||||
)
|
||||
except Exception:
|
||||
logger.warn("AniWatch: dub not found")
|
||||
servers_html_dub = None
|
||||
|
||||
if translation_type == "dub":
|
||||
servers_html = servers_html_dub
|
||||
else:
|
||||
servers_html = servers_html_sub
|
||||
if not servers_html:
|
||||
return
|
||||
for server_name, server_html in zip(
|
||||
cycle(SERVERS_AVAILABLE), servers_html
|
||||
):
|
||||
try:
|
||||
# keys: [ data-type: translation_type, data-id: embed_id, data-server-id: server_id ]
|
||||
servers_info = extract_attributes(server_html)
|
||||
embed_url = f"https://hianime.to/ajax/v2/episode/sources?id={servers_info['data-id']}"
|
||||
embed_response = self.session.get(embed_url)
|
||||
if embed_response.status_code == 200:
|
||||
embed_json = embed_response.json()
|
||||
raw_link_to_streams = embed_json["link"]
|
||||
match = LINK_TO_STREAMS_REGEX.match(raw_link_to_streams)
|
||||
if not match:
|
||||
continue
|
||||
provider_domain = match.group(1)
|
||||
embed_type = match.group(2)
|
||||
episode_number = match.group(3)
|
||||
source_id = match.group(4)
|
||||
|
||||
link_to_streams = f"https://{provider_domain}/embed-{embed_type}/ajax/e-{episode_number}/getSources?id={source_id}"
|
||||
link_to_streams_response = self.session.get(link_to_streams)
|
||||
if link_to_streams_response.status_code == 200:
|
||||
juicy_streams_json: "AniWatchStream" = (
|
||||
link_to_streams_response.json()
|
||||
)
|
||||
yield {
|
||||
"headers": {},
|
||||
"subtitles": [
|
||||
{
|
||||
"url": track["file"],
|
||||
"language": track["label"],
|
||||
}
|
||||
for track in juicy_streams_json["tracks"]
|
||||
if track["kind"] == "captions"
|
||||
],
|
||||
"server": server_name,
|
||||
"episode_title": episode_details["title"],
|
||||
"links": give_random_quality(
|
||||
[
|
||||
{"link": link["file"], "type": link["type"]}
|
||||
for link in juicy_streams_json["sources"]
|
||||
]
|
||||
),
|
||||
}
|
||||
except Exception as e:
|
||||
logger.error(e)
|
||||
except Exception as e:
|
||||
logger.error(e)
|
||||
@@ -1 +0,0 @@
|
||||
SERVERS_AVAILABLE = ["HD1", "HD2", "StreamSB", "StreamTape"]
|
||||
@@ -1,26 +0,0 @@
|
||||
from typing import Literal, TypedDict
|
||||
|
||||
|
||||
class AniWatchSkipTime(TypedDict):
|
||||
start: int
|
||||
end: int
|
||||
|
||||
|
||||
class AniWatchSource(TypedDict):
|
||||
file: str
|
||||
type: str
|
||||
|
||||
|
||||
class AniWatchTrack(TypedDict):
|
||||
file: str
|
||||
label: str
|
||||
kind: Literal["captions", "thumbnails", "audio"]
|
||||
|
||||
|
||||
class AniWatchStream(TypedDict):
|
||||
sources: list[AniWatchSource]
|
||||
tracks: list[AniWatchTrack]
|
||||
encrypted: bool
|
||||
intro: AniWatchSkipTime
|
||||
outro: AniWatchSkipTime
|
||||
server: int
|
||||
@@ -1,65 +0,0 @@
|
||||
from html.parser import HTMLParser
|
||||
|
||||
from yt_dlp.utils import clean_html, get_element_by_class, get_elements_by_class
|
||||
|
||||
from ..base_provider import AnimeProvider
|
||||
from .constants import ANIWAVE_BASE, SEARCH_HEADERS
|
||||
|
||||
|
||||
class ParseAnchorAndImgTag(HTMLParser):
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.img_tag = None
|
||||
self.a_tag = None
|
||||
|
||||
def handle_starttag(self, tag, attrs):
|
||||
if tag == "img":
|
||||
self.img_tag = {attr[0]: attr[1] for attr in attrs}
|
||||
if tag == "a":
|
||||
self.a_tag = {attr[0]: attr[1] for attr in attrs}
|
||||
|
||||
|
||||
class AniWaveApi(AnimeProvider):
|
||||
def search_for_anime(self, anime_title, *args):
|
||||
self.session.headers.update(SEARCH_HEADERS)
|
||||
search_url = f"{ANIWAVE_BASE}/filter"
|
||||
params = {"keyword": anime_title}
|
||||
res = self.session.get(search_url, params=params)
|
||||
search_page = res.text
|
||||
search_results_html_list = get_elements_by_class("item", search_page)
|
||||
results = []
|
||||
for result_html in search_results_html_list:
|
||||
aniposter_html = get_element_by_class("poster", result_html)
|
||||
episode_html = get_element_by_class("sub", aniposter_html)
|
||||
episodes = clean_html(episode_html) or 12
|
||||
if not aniposter_html:
|
||||
return
|
||||
parser = ParseAnchorAndImgTag()
|
||||
parser.feed(aniposter_html)
|
||||
image_data = parser.img_tag
|
||||
anime_link_data = parser.a_tag
|
||||
if not image_data or not anime_link_data:
|
||||
continue
|
||||
|
||||
episodes = int(episodes)
|
||||
|
||||
# finally!!
|
||||
image_link = image_data["src"]
|
||||
title = image_data["alt"]
|
||||
anime_id = anime_link_data["href"]
|
||||
|
||||
results.append(
|
||||
{
|
||||
"availableEpisodes": list(range(1, episodes)),
|
||||
"id": anime_id,
|
||||
"title": title,
|
||||
"poster": image_link,
|
||||
}
|
||||
)
|
||||
self.search_results = results
|
||||
return {"pageInfo": {}, "results": results}
|
||||
|
||||
def get_anime(self, anime_id, *args):
|
||||
anime_page_url = f"{ANIWAVE_BASE}{anime_id}"
|
||||
self.session.get(anime_page_url)
|
||||
# TODO: to be continued; mostly js so very difficult
|
||||
@@ -1,20 +0,0 @@
|
||||
ANIWAVE_BASE = "https://aniwave.to"
|
||||
|
||||
SEARCH_HEADERS = {
|
||||
"Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/png,image/svg+xml,*/*;q=0.8",
|
||||
"Accept-Language": "en-US,en;q=0.5",
|
||||
# 'Accept-Encoding': 'Utf-8',
|
||||
"Referer": "https://aniwave.to/filter",
|
||||
"DNT": "1",
|
||||
"Upgrade-Insecure-Requests": "1",
|
||||
"Sec-Fetch-Dest": "document",
|
||||
"Sec-Fetch-Mode": "navigate",
|
||||
"Sec-Fetch-Site": "same-origin",
|
||||
"Sec-Fetch-User": "?1",
|
||||
"Connection": "keep-alive",
|
||||
"Alt-Used": "aniwave.to",
|
||||
# 'Cookie': '__pf=1; usertype=guest; session=BElk9DJdO3sFdDmLiGxuNiM9eGYO1TjktGsmdwjV',
|
||||
"Priority": "u=0, i",
|
||||
# Requests doesn't support trailers
|
||||
# 'TE': 'trailers',
|
||||
}
|
||||
@@ -1,6 +1,11 @@
|
||||
import os
|
||||
|
||||
import requests
|
||||
from yt_dlp.utils.networking import random_user_agent
|
||||
|
||||
from ...constants import APP_CACHE_DIR
|
||||
from .providers_store import ProviderStore
|
||||
|
||||
|
||||
class AnimeProvider:
|
||||
session: requests.Session
|
||||
@@ -8,6 +13,24 @@ class AnimeProvider:
|
||||
USER_AGENT = random_user_agent()
|
||||
HEADERS = {}
|
||||
|
||||
def __init__(self) -> None:
|
||||
self.session = requests.session()
|
||||
def __init__(self, cache_requests, use_persistent_provider_store) -> None:
|
||||
if cache_requests.lower() == "true":
|
||||
from ..common.requests_cacher import CachedRequestsSession
|
||||
|
||||
self.session = CachedRequestsSession(
|
||||
os.path.join(APP_CACHE_DIR, "cached_requests.db"),
|
||||
max_lifetime=int(
|
||||
os.environ.get("FASTANIME_MAX_CACHE_LIFETIME", 259200)
|
||||
),
|
||||
)
|
||||
else:
|
||||
self.session = requests.session()
|
||||
self.session.headers.update({"User-Agent": self.USER_AGENT, **self.HEADERS})
|
||||
if use_persistent_provider_store.lower() == "true":
|
||||
self.store = ProviderStore(
|
||||
"persistent",
|
||||
self.__class__.__name__,
|
||||
os.path.join(APP_CACHE_DIR, "anime_providers_store.db"),
|
||||
)
|
||||
else:
|
||||
self.store = ProviderStore("memory")
|
||||
|
||||
37
fastanime/libs/anime_provider/decorators.py
Normal file
37
fastanime/libs/anime_provider/decorators.py
Normal file
@@ -0,0 +1,37 @@
|
||||
import functools
|
||||
import logging
|
||||
import os
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def debug_provider(provider_function):
|
||||
@functools.wraps(provider_function)
|
||||
def _provider_function_wrapper(self, *args, **kwargs):
|
||||
provider_name = self.__class__.__name__.upper()
|
||||
if not os.environ.get("FASTANIME_DEBUG"):
|
||||
try:
|
||||
return provider_function(self, *args, **kwargs)
|
||||
except Exception as e:
|
||||
logger.error(f"[{provider_name}@{provider_function.__name__}]: {e}")
|
||||
else:
|
||||
return provider_function(self, *args, **kwargs)
|
||||
|
||||
return _provider_function_wrapper
|
||||
|
||||
|
||||
def ensure_internet_connection(provider_function):
|
||||
@functools.wraps(provider_function)
|
||||
def _wrapper(*args, **kwargs):
|
||||
import requests
|
||||
|
||||
try:
|
||||
requests.get("https://google.com", timeout=5)
|
||||
except requests.ConnectionError:
|
||||
from sys import exit
|
||||
|
||||
print("You are not connected to the internet;Aborting...")
|
||||
exit(1)
|
||||
return provider_function(*args, **kwargs)
|
||||
|
||||
return _wrapper
|
||||
274
fastanime/libs/anime_provider/hianime/api.py
Normal file
274
fastanime/libs/anime_provider/hianime/api.py
Normal file
@@ -0,0 +1,274 @@
|
||||
import logging
|
||||
import re
|
||||
from html.parser import HTMLParser
|
||||
from itertools import cycle
|
||||
from urllib.parse import quote_plus
|
||||
|
||||
from yt_dlp.utils import (
|
||||
clean_html,
|
||||
extract_attributes,
|
||||
get_element_by_class,
|
||||
get_element_html_by_class,
|
||||
get_elements_by_class,
|
||||
get_elements_html_by_class,
|
||||
)
|
||||
|
||||
from ..base_provider import AnimeProvider
|
||||
from ..decorators import debug_provider
|
||||
from ..utils import give_random_quality
|
||||
from .constants import SERVERS_AVAILABLE
|
||||
from .extractors import MegaCloud
|
||||
from .types import HiAnimeStream
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
LINK_TO_STREAMS_REGEX = re.compile(r".*://(.*)/embed-(2|4|6)/e-([0-9])/(.*)\?.*")
|
||||
IMAGE_HTML_ELEMENT_REGEX = re.compile(r"<img.*?>")
|
||||
|
||||
|
||||
class ParseAnchorAndImgTag(HTMLParser):
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.img_tag = None
|
||||
self.a_tag = None
|
||||
|
||||
def handle_starttag(self, tag, attrs):
|
||||
if tag == "img":
|
||||
self.img_tag = {attr[0]: attr[1] for attr in attrs}
|
||||
if tag == "a":
|
||||
self.a_tag = {attr[0]: attr[1] for attr in attrs}
|
||||
|
||||
|
||||
class HiAnime(AnimeProvider):
|
||||
# HEADERS = {"Referer": "https://hianime.to/home"}
|
||||
|
||||
@debug_provider
|
||||
def search_for_anime(self, anime_title: str, translation_type, **kwargs):
|
||||
query = quote_plus(anime_title)
|
||||
url = f"https://hianime.to/search?keyword={query}"
|
||||
response = self.session.get(url)
|
||||
if not response.ok:
|
||||
return
|
||||
search_page = response.text
|
||||
search_results_html_items = get_elements_by_class("flw-item", search_page)
|
||||
results = []
|
||||
for search_results_html_item in search_results_html_items:
|
||||
film_poster_html = get_element_by_class(
|
||||
"film-poster", search_results_html_item
|
||||
)
|
||||
|
||||
if not film_poster_html:
|
||||
continue
|
||||
# get availableEpisodes
|
||||
episodes_html = get_element_html_by_class("tick-sub", film_poster_html)
|
||||
episodes = clean_html(episodes_html) or 12
|
||||
|
||||
# get anime id and poster image url
|
||||
parser = ParseAnchorAndImgTag()
|
||||
parser.feed(film_poster_html)
|
||||
image_data = parser.img_tag
|
||||
anime_link_data = parser.a_tag
|
||||
if not image_data or not anime_link_data:
|
||||
continue
|
||||
|
||||
episodes = int(episodes)
|
||||
|
||||
# finally!!
|
||||
image_link = image_data["data-src"]
|
||||
anime_id = anime_link_data["data-id"]
|
||||
title = anime_link_data["title"]
|
||||
|
||||
result = {
|
||||
"availableEpisodes": list(range(1, episodes)),
|
||||
"id": anime_id,
|
||||
"title": title,
|
||||
"poster": image_link,
|
||||
}
|
||||
|
||||
results.append(result)
|
||||
|
||||
self.store.set(result["id"], "search_result", result)
|
||||
return {"pageInfo": {}, "results": results}
|
||||
|
||||
@debug_provider
|
||||
def get_anime(self, hianime_id, **kwargs):
|
||||
anime_result = {}
|
||||
if d := self.store.get(str(hianime_id), "search_result"):
|
||||
anime_result = d
|
||||
anime_url = f"https://hianime.to/ajax/v2/episode/list/{hianime_id}"
|
||||
response = self.session.get(anime_url, timeout=10)
|
||||
if response.ok:
|
||||
response_json = response.json()
|
||||
hianime_anime_page = response_json["html"]
|
||||
episodes_info_container_html = get_element_html_by_class(
|
||||
"ss-list", hianime_anime_page
|
||||
)
|
||||
episodes_info_html_list = get_elements_html_by_class(
|
||||
"ep-item", episodes_info_container_html
|
||||
)
|
||||
# keys: [ data-number: episode_number, data-id: episode_id, title: episode_title , href:episode_page_url]
|
||||
episodes_info_dicts = [
|
||||
extract_attributes(episode_dict)
|
||||
for episode_dict in episodes_info_html_list
|
||||
]
|
||||
episodes = [episode["data-number"] for episode in episodes_info_dicts]
|
||||
episodes_info = [
|
||||
{
|
||||
"id": episode["data-id"],
|
||||
"title": (
|
||||
(episode["title"] or "").replace(
|
||||
f"Episode {episode['data-number']}", ""
|
||||
)
|
||||
or anime_result["title"]
|
||||
)
|
||||
+ f"; Episode {episode['data-number']}",
|
||||
"episode": episode["data-number"],
|
||||
}
|
||||
for episode in episodes_info_dicts
|
||||
]
|
||||
self.store.set(
|
||||
str(hianime_id),
|
||||
"anime_info",
|
||||
episodes_info,
|
||||
)
|
||||
return {
|
||||
"id": hianime_id,
|
||||
"availableEpisodesDetail": {
|
||||
"dub": episodes,
|
||||
"sub": episodes,
|
||||
"raw": episodes,
|
||||
},
|
||||
"poster": anime_result["poster"],
|
||||
"title": anime_result["title"],
|
||||
"episodes_info": episodes_info,
|
||||
}
|
||||
|
||||
@debug_provider
|
||||
def get_episode_streams(self, anime_id, episode, translation_type, **kwargs):
|
||||
if d := self.store.get(str(anime_id), "anime_info"):
|
||||
episodes_info = d
|
||||
episode_details = [
|
||||
episode_details
|
||||
for episode_details in episodes_info
|
||||
if episode_details["episode"] == episode
|
||||
]
|
||||
if not episode_details:
|
||||
return
|
||||
episode_details = episode_details[0]
|
||||
episode_url = f"https://hianime.to/ajax/v2/episode/servers?episodeId={episode_details['id']}"
|
||||
response = self.session.get(episode_url)
|
||||
if response.ok:
|
||||
response_json = response.json()
|
||||
episode_page_html = response_json["html"]
|
||||
servers_containers_html = get_elements_html_by_class(
|
||||
"ps__-list", episode_page_html
|
||||
)
|
||||
if not servers_containers_html:
|
||||
return
|
||||
# sub servers
|
||||
try:
|
||||
servers_html_sub = get_elements_html_by_class(
|
||||
"server-item", servers_containers_html[0]
|
||||
)
|
||||
except Exception:
|
||||
logger.warning("HiAnime: sub not found")
|
||||
servers_html_sub = None
|
||||
|
||||
# dub servers
|
||||
try:
|
||||
servers_html_dub = get_elements_html_by_class(
|
||||
"server-item", servers_containers_html[1]
|
||||
)
|
||||
except Exception:
|
||||
logger.warning("HiAnime: dub not found")
|
||||
servers_html_dub = None
|
||||
|
||||
if translation_type == "dub":
|
||||
servers_html = servers_html_dub
|
||||
else:
|
||||
servers_html = servers_html_sub
|
||||
if not servers_html:
|
||||
return
|
||||
|
||||
@debug_provider
|
||||
def _get_server(server_name, server_html):
|
||||
# keys: [ data-type: translation_type, data-id: embed_id, data-server-id: server_id ]
|
||||
servers_info = extract_attributes(server_html)
|
||||
server_id = servers_info["data-id"]
|
||||
embed_url = (
|
||||
f"https://hianime.to/ajax/v2/episode/sources?id={server_id}"
|
||||
)
|
||||
embed_response = self.session.get(embed_url)
|
||||
if embed_response.ok:
|
||||
embed_json = embed_response.json()
|
||||
raw_link_to_streams = embed_json["link"]
|
||||
match server_name:
|
||||
# TODO: Finish the other servers
|
||||
case "HD2":
|
||||
data = MegaCloud(self.session).extract(
|
||||
raw_link_to_streams
|
||||
)
|
||||
return {
|
||||
"headers": {},
|
||||
"subtitles": [
|
||||
{
|
||||
"url": track["file"],
|
||||
"language": track["label"],
|
||||
}
|
||||
for track in data["tracks"]
|
||||
if track["kind"] == "captions"
|
||||
],
|
||||
"server": server_name,
|
||||
"episode_title": episode_details["title"],
|
||||
"links": give_random_quality(
|
||||
[
|
||||
{"link": link["url"]}
|
||||
for link in data["sources"]
|
||||
]
|
||||
),
|
||||
}
|
||||
case _:
|
||||
# NOTE: THIS METHOD DOES'NT WORK will get the other servers later
|
||||
match = LINK_TO_STREAMS_REGEX.match(raw_link_to_streams)
|
||||
if not match:
|
||||
return
|
||||
provider_domain = match.group(1)
|
||||
embed_type = match.group(2)
|
||||
episode_number = match.group(3)
|
||||
source_id = match.group(4)
|
||||
|
||||
link_to_streams = f"https://{provider_domain}/embed-{embed_type}/ajax/e-{episode_number}/getSources?id={source_id}"
|
||||
link_to_streams_response = self.session.get(
|
||||
link_to_streams
|
||||
)
|
||||
if link_to_streams_response.ok:
|
||||
juicy_streams_json: "HiAnimeStream" = (
|
||||
link_to_streams_response.json()
|
||||
)
|
||||
|
||||
return {
|
||||
"headers": {},
|
||||
"subtitles": [
|
||||
{
|
||||
"url": track["file"],
|
||||
"language": track["label"],
|
||||
}
|
||||
for track in juicy_streams_json["tracks"]
|
||||
if track["kind"] == "captions"
|
||||
],
|
||||
"server": server_name,
|
||||
"episode_title": episode_details["title"],
|
||||
"links": give_random_quality(
|
||||
[
|
||||
{"link": link["file"]}
|
||||
for link in juicy_streams_json["tracks"]
|
||||
]
|
||||
),
|
||||
}
|
||||
|
||||
for server_name, server_html in zip(
|
||||
cycle(SERVERS_AVAILABLE), servers_html
|
||||
):
|
||||
if server_name == "HD2":
|
||||
if server := _get_server(server_name, server_html):
|
||||
yield server
|
||||
26
fastanime/libs/anime_provider/hianime/constants.py
Normal file
26
fastanime/libs/anime_provider/hianime/constants.py
Normal file
@@ -0,0 +1,26 @@
|
||||
SERVERS_AVAILABLE = ["HD1", "HD2", "StreamSB", "StreamTape"]
|
||||
""""
|
||||
| "hd-1"
|
||||
| "hd-2"
|
||||
| "megacloud"
|
||||
| "streamsb"
|
||||
| "streamtape";
|
||||
|
||||
"""
|
||||
|
||||
|
||||
"""
|
||||
VidStreaming = "hd-1",
|
||||
MegaCloud = "megacloud",
|
||||
StreamSB = "streamsb",
|
||||
StreamTape = "streamtape",
|
||||
VidCloud = "hd-2",
|
||||
AsianLoad = "asianload",
|
||||
GogoCDN = "gogocdn",
|
||||
MixDrop = "mixdrop",
|
||||
UpCloud = "upcloud",
|
||||
VizCloud = "vizcloud",
|
||||
MyCloud = "mycloud",
|
||||
Filemoon = "filemoon",
|
||||
|
||||
"""
|
||||
191
fastanime/libs/anime_provider/hianime/extractors.py
Normal file
191
fastanime/libs/anime_provider/hianime/extractors.py
Normal file
@@ -0,0 +1,191 @@
|
||||
import hashlib
|
||||
import json
|
||||
import re
|
||||
import time
|
||||
from base64 import b64decode
|
||||
from typing import TYPE_CHECKING, Dict, List
|
||||
|
||||
from Crypto.Cipher import AES
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ...common.requests_cacher import CachedRequestsSession
|
||||
|
||||
|
||||
# Constants
|
||||
megacloud = {
|
||||
"script": "https://megacloud.tv/js/player/a/prod/e1-player.min.js?v=",
|
||||
"sources": "https://megacloud.tv/embed-2/ajax/e-1/getSources?id=",
|
||||
}
|
||||
|
||||
|
||||
class HiAnimeError(Exception):
|
||||
def __init__(self, message, context, status_code):
|
||||
super().__init__(f"{context}: {message} (Status: {status_code})")
|
||||
self.context = context
|
||||
self.status_code = status_code
|
||||
|
||||
|
||||
# Adapted from https://github.com/ghoshRitesh12/aniwatch
|
||||
class MegaCloud:
|
||||
def __init__(self, session):
|
||||
self.session: "CachedRequestsSession" = session
|
||||
|
||||
def extract(self, video_url: str) -> Dict:
|
||||
try:
|
||||
extracted_data = {
|
||||
"tracks": [],
|
||||
"intro": {"start": 0, "end": 0},
|
||||
"outro": {"start": 0, "end": 0},
|
||||
"sources": [],
|
||||
}
|
||||
|
||||
video_id = video_url.split("/")[-1].split("?")[0]
|
||||
response = self.session.get(
|
||||
megacloud["sources"] + video_id,
|
||||
headers={
|
||||
"Accept": "*/*",
|
||||
"X-Requested-With": "XMLHttpRequest",
|
||||
"Referer": video_url,
|
||||
},
|
||||
fresh=1, # pyright: ignore
|
||||
)
|
||||
srcs_data = response.json()
|
||||
|
||||
if not srcs_data:
|
||||
raise HiAnimeError(
|
||||
"Url may have an invalid video id", "getAnimeEpisodeSources", 400
|
||||
)
|
||||
|
||||
encrypted_string = srcs_data["sources"]
|
||||
if not srcs_data["encrypted"] and isinstance(encrypted_string, list):
|
||||
extracted_data.update(
|
||||
{
|
||||
"intro": srcs_data["intro"],
|
||||
"outro": srcs_data["outro"],
|
||||
"tracks": srcs_data["tracks"],
|
||||
"sources": [
|
||||
{"url": s["file"], "type": s["type"]}
|
||||
for s in encrypted_string
|
||||
],
|
||||
}
|
||||
)
|
||||
return extracted_data
|
||||
|
||||
# Fetch decryption script
|
||||
script_response = self.session.get(
|
||||
megacloud["script"] + str(int(time.time() * 1000)),
|
||||
fresh=1, # pyright: ignore
|
||||
)
|
||||
script_text = script_response.text
|
||||
if not script_text:
|
||||
raise HiAnimeError(
|
||||
"Couldn't fetch script to decrypt resource",
|
||||
"getAnimeEpisodeSources",
|
||||
500,
|
||||
)
|
||||
|
||||
vars_ = self.extract_variables(script_text)
|
||||
if not vars_:
|
||||
raise Exception(
|
||||
"Can't find variables. Perhaps the extractor is outdated."
|
||||
)
|
||||
|
||||
secret, encrypted_source = self.get_secret(encrypted_string, vars_)
|
||||
decrypted = self.decrypt(encrypted_source, secret)
|
||||
|
||||
try:
|
||||
sources = json.loads(decrypted)
|
||||
extracted_data.update(
|
||||
{
|
||||
"intro": srcs_data["intro"],
|
||||
"outro": srcs_data["outro"],
|
||||
"tracks": srcs_data["tracks"],
|
||||
"sources": [
|
||||
{"url": s["file"], "type": s["type"]} for s in sources
|
||||
],
|
||||
}
|
||||
)
|
||||
return extracted_data
|
||||
except Exception:
|
||||
raise HiAnimeError(
|
||||
"Failed to decrypt resource", "getAnimeEpisodeSources", 500
|
||||
)
|
||||
except Exception as err:
|
||||
raise err
|
||||
|
||||
def extract_variables(self, text: str) -> List[List[int]]:
|
||||
regex = r"case\s*0x[0-9a-f]+:(?![^;]*=partKey)\s*\w+\s*=\s*(\w+)\s*,\s*\w+\s*=\s*(\w+);"
|
||||
matches = re.finditer(regex, text)
|
||||
vars_ = []
|
||||
for match in matches:
|
||||
key1 = self.matching_key(match[1], text)
|
||||
key2 = self.matching_key(match[2], text)
|
||||
try:
|
||||
vars_.append([int(key1, 16), int(key2, 16)])
|
||||
except ValueError:
|
||||
continue
|
||||
return vars_
|
||||
|
||||
def get_secret(
|
||||
self, encrypted_string: str, values: List[List[int]]
|
||||
) -> tuple[str, str]:
|
||||
secret = []
|
||||
encrypted_source_array = list(encrypted_string)
|
||||
current_index = 0
|
||||
|
||||
for start, length in values:
|
||||
start += current_index
|
||||
end = start + length
|
||||
secret.extend(encrypted_string[start:end])
|
||||
encrypted_source_array[start:end] = [""] * length
|
||||
current_index += length
|
||||
|
||||
encrypted_source = "".join(encrypted_source_array) # .replace("\x00", "")
|
||||
return ("".join(secret), encrypted_source)
|
||||
|
||||
def decrypt(self, encrypted: str, key_or_secret: str, maybe_iv: str = "") -> str:
|
||||
if maybe_iv:
|
||||
key = key_or_secret.encode()
|
||||
iv = maybe_iv.encode()
|
||||
contents = encrypted
|
||||
else:
|
||||
# Decode the Base64 string
|
||||
cypher = b64decode(encrypted)
|
||||
|
||||
# Extract the salt from the cypher text
|
||||
salt = cypher[8:16]
|
||||
|
||||
# Combine the key_or_secret with the salt
|
||||
password = key_or_secret.encode() + salt
|
||||
|
||||
# Generate MD5 hashes
|
||||
md5_hashes = []
|
||||
digest = password
|
||||
for _ in range(3):
|
||||
md5 = hashlib.md5()
|
||||
md5.update(digest)
|
||||
md5_hashes.append(md5.digest())
|
||||
digest = md5_hashes[-1] + password
|
||||
|
||||
# Derive the key and IV
|
||||
key = md5_hashes[0] + md5_hashes[1]
|
||||
iv = md5_hashes[2]
|
||||
|
||||
# Extract the encrypted contents
|
||||
contents = cypher[16:]
|
||||
|
||||
# Initialize the AES decipher
|
||||
decipher = AES.new(key, AES.MODE_CBC, iv)
|
||||
|
||||
# Decrypt and decode
|
||||
decrypted = decipher.decrypt(contents).decode("utf-8") # pyright: ignore
|
||||
|
||||
# Remove any padding (PKCS#7)
|
||||
pad = ord(decrypted[-1])
|
||||
return decrypted[:-pad]
|
||||
|
||||
def matching_key(self, value: str, script: str) -> str:
|
||||
match = re.search(rf",{value}=((?:0x)?[0-9a-fA-F]+)", script)
|
||||
if match:
|
||||
return match.group(1).replace("0x", "")
|
||||
raise Exception("Failed to match the key")
|
||||
26
fastanime/libs/anime_provider/hianime/types.py
Normal file
26
fastanime/libs/anime_provider/hianime/types.py
Normal file
@@ -0,0 +1,26 @@
|
||||
from typing import Literal, TypedDict
|
||||
|
||||
|
||||
class HiAnimeSkipTime(TypedDict):
|
||||
start: int
|
||||
end: int
|
||||
|
||||
|
||||
class HiAnimeSource(TypedDict):
|
||||
file: str
|
||||
type: str
|
||||
|
||||
|
||||
class HiAnimeTrack(TypedDict):
|
||||
file: str
|
||||
label: str
|
||||
kind: Literal["captions", "thumbnails", "audio"]
|
||||
|
||||
|
||||
class HiAnimeStream(TypedDict):
|
||||
sources: list[HiAnimeSource]
|
||||
tracks: list[HiAnimeTrack]
|
||||
encrypted: bool
|
||||
intro: HiAnimeSkipTime
|
||||
outro: HiAnimeSkipTime
|
||||
server: int
|
||||
@@ -1,153 +0,0 @@
|
||||
import logging
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from requests import post
|
||||
from thefuzz import fuzz
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ..anilist.types import AnilistDataSchema
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
ANILIST_ENDPOINT = "https://graphql.anilist.co"
|
||||
"""
|
||||
query($query:String){
|
||||
Page(perPage:50){
|
||||
pageInfo{
|
||||
total
|
||||
currentPage
|
||||
hasNextPage
|
||||
}
|
||||
media(search:$query,type:ANIME){
|
||||
id
|
||||
idMal
|
||||
title{
|
||||
romaji
|
||||
english
|
||||
}
|
||||
episodes
|
||||
status
|
||||
nextAiringEpisode {
|
||||
timeUntilAiring
|
||||
airingAt
|
||||
episode
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
"""
|
||||
|
||||
|
||||
def search_for_anime_with_anilist(anime_title: str):
|
||||
query = """
|
||||
query($query:String){
|
||||
Page(perPage:50){
|
||||
pageInfo{
|
||||
total
|
||||
currentPage
|
||||
hasNextPage
|
||||
}
|
||||
media(search:$query,type:ANIME){
|
||||
id
|
||||
idMal
|
||||
title{
|
||||
romaji
|
||||
english
|
||||
}
|
||||
episodes
|
||||
status
|
||||
nextAiringEpisode {
|
||||
timeUntilAiring
|
||||
airingAt
|
||||
episode
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
"""
|
||||
response = post(
|
||||
ANILIST_ENDPOINT,
|
||||
json={"query": query, "variables": {"query": anime_title}},
|
||||
timeout=10,
|
||||
)
|
||||
if response.status_code == 200:
|
||||
anilist_data: "AnilistDataSchema" = response.json()
|
||||
return {
|
||||
"pageInfo": anilist_data["data"]["Page"]["pageInfo"],
|
||||
"results": [
|
||||
{
|
||||
"id": anime_result["id"],
|
||||
"title": anime_result["title"]["romaji"]
|
||||
or anime_result["title"]["english"],
|
||||
"type": "anime",
|
||||
"availableEpisodes": list(
|
||||
range(
|
||||
1,
|
||||
(
|
||||
anime_result["episodes"]
|
||||
if not anime_result["status"] == "RELEASING"
|
||||
and anime_result["episodes"]
|
||||
else (
|
||||
anime_result["nextAiringEpisode"]["episode"] - 1
|
||||
if anime_result["nextAiringEpisode"]
|
||||
else 0
|
||||
)
|
||||
),
|
||||
)
|
||||
),
|
||||
}
|
||||
for anime_result in anilist_data["data"]["Page"]["media"]
|
||||
],
|
||||
}
|
||||
|
||||
|
||||
def get_mal_id_and_anilist_id(anime_title: str) -> "dict[str,int] | None":
|
||||
"""the abstraction over all none authenticated requests and that returns data of a similar type
|
||||
|
||||
Args:
|
||||
query: the anilist query
|
||||
variables: the anilist api variables
|
||||
|
||||
Returns:
|
||||
a boolean indicating success and none or an anilist object depending on success
|
||||
"""
|
||||
query = """
|
||||
query($query:String){
|
||||
Page(perPage:50){
|
||||
pageInfo{
|
||||
total
|
||||
currentPage
|
||||
hasNextPage
|
||||
}
|
||||
media(search:$query,type:ANIME){
|
||||
id
|
||||
idMal
|
||||
title{
|
||||
romaji
|
||||
english
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
"""
|
||||
|
||||
try:
|
||||
variables = {"query": anime_title}
|
||||
response = post(
|
||||
ANILIST_ENDPOINT,
|
||||
json={"query": query, "variables": variables},
|
||||
timeout=10,
|
||||
)
|
||||
anilist_data: "AnilistDataSchema" = response.json()
|
||||
if response.status_code == 200:
|
||||
anime = max(
|
||||
anilist_data["data"]["Page"]["media"],
|
||||
key=lambda anime: max(
|
||||
(
|
||||
fuzz.ratio(anime, str(anime["title"]["romaji"])),
|
||||
fuzz.ratio(anime_title, str(anime["title"]["english"])),
|
||||
)
|
||||
),
|
||||
)
|
||||
return {"id_anilist": anime["id"], "id_mal": anime["idMal"]}
|
||||
except Exception as e:
|
||||
logger.error(f"Something unexpected occured {e}")
|
||||
0
fastanime/libs/anime_provider/nyaa/__init__.py
Normal file
0
fastanime/libs/anime_provider/nyaa/__init__.py
Normal file
344
fastanime/libs/anime_provider/nyaa/api.py
Normal file
344
fastanime/libs/anime_provider/nyaa/api.py
Normal file
@@ -0,0 +1,344 @@
|
||||
import os
|
||||
import re
|
||||
from logging import getLogger
|
||||
|
||||
from yt_dlp.utils import (
|
||||
extract_attributes,
|
||||
get_element_html_by_attribute,
|
||||
get_element_html_by_class,
|
||||
get_element_text_and_html_by_tag,
|
||||
get_elements_html_by_class,
|
||||
)
|
||||
|
||||
from ...common.mini_anilist import search_for_anime_with_anilist
|
||||
from ..base_provider import AnimeProvider
|
||||
from ..decorators import debug_provider
|
||||
from ..types import SearchResults
|
||||
from .constants import NYAA_ENDPOINT
|
||||
|
||||
logger = getLogger(__name__)
|
||||
|
||||
EXTRACT_USEFUL_INFO_PATTERN_1 = re.compile(
|
||||
r"\[(\w+)\] (.+) - (\d+) [\[\(](\d+)p[\]\)].*"
|
||||
)
|
||||
|
||||
EXTRACT_USEFUL_INFO_PATTERN_2 = re.compile(
|
||||
r"\[(\w+)\] (.+)E(\d+) [\[\(]?(\d+)p.*[\]\)]?.*"
|
||||
)
|
||||
|
||||
|
||||
class Nyaa(AnimeProvider):
|
||||
search_results: SearchResults
|
||||
|
||||
@debug_provider
|
||||
def search_for_anime(self, user_query: str, *args, **_):
|
||||
self.search_results = search_for_anime_with_anilist(
|
||||
user_query, True
|
||||
) # pyright: ignore
|
||||
self.user_query = user_query
|
||||
return self.search_results
|
||||
|
||||
@debug_provider
|
||||
def get_anime(self, anilist_id: str, *_):
|
||||
for anime in self.search_results["results"]:
|
||||
if anime["id"] == anilist_id:
|
||||
self.titles = [anime["title"], *anime["otherTitles"], self.user_query]
|
||||
return {
|
||||
"id": anime["id"],
|
||||
"title": anime["title"],
|
||||
"poster": anime["poster"],
|
||||
"availableEpisodesDetail": {
|
||||
"dub": anime["availableEpisodes"],
|
||||
"sub": anime["availableEpisodes"],
|
||||
"raw": anime["availableEpisodes"],
|
||||
},
|
||||
}
|
||||
|
||||
@debug_provider
|
||||
def get_episode_streams(
|
||||
self,
|
||||
anime_id: str,
|
||||
episode_number: str,
|
||||
translation_type: str,
|
||||
trusted_only=bool(int(os.environ.get("FA_NYAA_TRUSTED_ONLY", "0"))),
|
||||
allow_dangerous=bool(int(os.environ.get("FA_NYAA_ALLOW_DANGEROUS", "0"))),
|
||||
sort_by="seeders",
|
||||
*args,
|
||||
):
|
||||
anime_title = self.titles[0]
|
||||
logger.debug(f"Searching nyaa for query: '{anime_title} {episode_number}'")
|
||||
servers = {}
|
||||
|
||||
torrents_table = ""
|
||||
for title in self.titles:
|
||||
try:
|
||||
url_arguments: dict[str, str] = {
|
||||
"c": "1_2", # Language (English)
|
||||
"q": f"{title} {'0' if len(episode_number)==1 else ''}{episode_number}", # Search Query
|
||||
}
|
||||
# url_arguments["q"] = anime_title
|
||||
|
||||
# if trusted_only:
|
||||
# url_arguments["f"] = "2" # Trusted uploaders only
|
||||
|
||||
# What to sort torrents by
|
||||
if sort_by == "seeders":
|
||||
url_arguments["s"] = "seeders"
|
||||
elif sort_by == "date":
|
||||
url_arguments["s"] = "id"
|
||||
elif sort_by == "size":
|
||||
url_arguments["s"] = "size"
|
||||
elif sort_by == "comments":
|
||||
url_arguments["s"] = "comments"
|
||||
|
||||
logger.debug(f"URL Arguments: {url_arguments}")
|
||||
|
||||
response = self.session.get(NYAA_ENDPOINT, params=url_arguments)
|
||||
if not response.ok:
|
||||
logger.error(f"[NYAA]: {response.text}")
|
||||
return
|
||||
|
||||
try:
|
||||
torrents_table = get_element_text_and_html_by_tag(
|
||||
"table", response.text
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"[NYAA]: {e}")
|
||||
continue
|
||||
|
||||
if not torrents_table:
|
||||
continue
|
||||
|
||||
for anime_torrent in get_elements_html_by_class(
|
||||
"success", torrents_table[1]
|
||||
):
|
||||
td_title = get_element_html_by_attribute(
|
||||
"colspan", "2", anime_torrent
|
||||
)
|
||||
if not td_title:
|
||||
continue
|
||||
title_anchor_tag = get_element_text_and_html_by_tag("a", td_title)
|
||||
|
||||
if not title_anchor_tag:
|
||||
continue
|
||||
title_anchor_tag_attrs = extract_attributes(title_anchor_tag[1])
|
||||
if not title_anchor_tag_attrs:
|
||||
continue
|
||||
if "class" in title_anchor_tag_attrs:
|
||||
td_title = td_title.replace(title_anchor_tag[1], "")
|
||||
title_anchor_tag = get_element_text_and_html_by_tag(
|
||||
"a", td_title
|
||||
)
|
||||
|
||||
if not title_anchor_tag:
|
||||
continue
|
||||
title_anchor_tag_attrs = extract_attributes(title_anchor_tag[1])
|
||||
if not title_anchor_tag_attrs:
|
||||
continue
|
||||
anime_title_info = title_anchor_tag_attrs["title"]
|
||||
if not anime_title_info:
|
||||
continue
|
||||
match = EXTRACT_USEFUL_INFO_PATTERN_1.search(
|
||||
anime_title_info.strip()
|
||||
)
|
||||
if not match:
|
||||
continue
|
||||
server = match[1]
|
||||
match[2]
|
||||
_episode_number = match[3]
|
||||
quality = match[4]
|
||||
if float(episode_number) != float(_episode_number):
|
||||
continue
|
||||
|
||||
links_td = get_element_html_by_class("text-center", anime_torrent)
|
||||
if not links_td:
|
||||
continue
|
||||
torrent_anchor_tag = get_element_text_and_html_by_tag("a", links_td)
|
||||
if not torrent_anchor_tag:
|
||||
continue
|
||||
torrent_anchor_tag_atrrs = extract_attributes(torrent_anchor_tag[1])
|
||||
if not torrent_anchor_tag_atrrs:
|
||||
continue
|
||||
torrent_file_url = (
|
||||
f'{NYAA_ENDPOINT}{torrent_anchor_tag_atrrs["href"]}'
|
||||
)
|
||||
if server in servers:
|
||||
link = {
|
||||
"translation_type": "sub",
|
||||
"link": torrent_file_url,
|
||||
"quality": quality,
|
||||
}
|
||||
if link not in servers[server]["links"]:
|
||||
servers[server]["links"].append(link)
|
||||
else:
|
||||
servers[server] = {
|
||||
"server": server,
|
||||
"headers": {},
|
||||
"episode_title": f"{anime_title}; Episode {episode_number}",
|
||||
"subtitles": [],
|
||||
"links": [
|
||||
{
|
||||
"translation_type": "sub",
|
||||
"link": torrent_file_url,
|
||||
"quality": quality,
|
||||
}
|
||||
],
|
||||
}
|
||||
for anime_torrent in get_elements_html_by_class(
|
||||
"default", torrents_table[1]
|
||||
):
|
||||
td_title = get_element_html_by_attribute(
|
||||
"colspan", "2", anime_torrent
|
||||
)
|
||||
if not td_title:
|
||||
continue
|
||||
title_anchor_tag = get_element_text_and_html_by_tag("a", td_title)
|
||||
|
||||
if not title_anchor_tag:
|
||||
continue
|
||||
title_anchor_tag_attrs = extract_attributes(title_anchor_tag[1])
|
||||
if not title_anchor_tag_attrs:
|
||||
continue
|
||||
if "class" in title_anchor_tag_attrs:
|
||||
td_title = td_title.replace(title_anchor_tag[1], "")
|
||||
title_anchor_tag = get_element_text_and_html_by_tag(
|
||||
"a", td_title
|
||||
)
|
||||
|
||||
if not title_anchor_tag:
|
||||
continue
|
||||
title_anchor_tag_attrs = extract_attributes(title_anchor_tag[1])
|
||||
if not title_anchor_tag_attrs:
|
||||
continue
|
||||
anime_title_info = title_anchor_tag_attrs["title"]
|
||||
if not anime_title_info:
|
||||
continue
|
||||
match = EXTRACT_USEFUL_INFO_PATTERN_2.search(
|
||||
anime_title_info.strip()
|
||||
)
|
||||
if not match:
|
||||
continue
|
||||
server = match[1]
|
||||
match[2]
|
||||
_episode_number = match[3]
|
||||
quality = match[4]
|
||||
if float(episode_number) != float(_episode_number):
|
||||
continue
|
||||
|
||||
links_td = get_element_html_by_class("text-center", anime_torrent)
|
||||
if not links_td:
|
||||
continue
|
||||
torrent_anchor_tag = get_element_text_and_html_by_tag("a", links_td)
|
||||
if not torrent_anchor_tag:
|
||||
continue
|
||||
torrent_anchor_tag_atrrs = extract_attributes(torrent_anchor_tag[1])
|
||||
if not torrent_anchor_tag_atrrs:
|
||||
continue
|
||||
torrent_file_url = (
|
||||
f'{NYAA_ENDPOINT}{torrent_anchor_tag_atrrs["href"]}'
|
||||
)
|
||||
if server in servers:
|
||||
link = {
|
||||
"translation_type": "sub",
|
||||
"link": torrent_file_url,
|
||||
"quality": quality,
|
||||
}
|
||||
if link not in servers[server]["links"]:
|
||||
servers[server]["links"].append(link)
|
||||
else:
|
||||
servers[server] = {
|
||||
"server": server,
|
||||
"headers": {},
|
||||
"episode_title": f"{anime_title}; Episode {episode_number}",
|
||||
"subtitles": [],
|
||||
"links": [
|
||||
{
|
||||
"translation_type": "sub",
|
||||
"link": torrent_file_url,
|
||||
"quality": quality,
|
||||
}
|
||||
],
|
||||
}
|
||||
if not allow_dangerous:
|
||||
break
|
||||
for anime_torrent in get_elements_html_by_class(
|
||||
"danger", torrents_table[1]
|
||||
):
|
||||
td_title = get_element_html_by_attribute(
|
||||
"colspan", "2", anime_torrent
|
||||
)
|
||||
if not td_title:
|
||||
continue
|
||||
title_anchor_tag = get_element_text_and_html_by_tag("a", td_title)
|
||||
|
||||
if not title_anchor_tag:
|
||||
continue
|
||||
title_anchor_tag_attrs = extract_attributes(title_anchor_tag[1])
|
||||
if not title_anchor_tag_attrs:
|
||||
continue
|
||||
if "class" in title_anchor_tag_attrs:
|
||||
td_title = td_title.replace(title_anchor_tag[1], "")
|
||||
title_anchor_tag = get_element_text_and_html_by_tag(
|
||||
"a", td_title
|
||||
)
|
||||
|
||||
if not title_anchor_tag:
|
||||
continue
|
||||
title_anchor_tag_attrs = extract_attributes(title_anchor_tag[1])
|
||||
if not title_anchor_tag_attrs:
|
||||
continue
|
||||
anime_title_info = title_anchor_tag_attrs["title"]
|
||||
if not anime_title_info:
|
||||
continue
|
||||
match = EXTRACT_USEFUL_INFO_PATTERN_2.search(
|
||||
anime_title_info.strip()
|
||||
)
|
||||
if not match:
|
||||
continue
|
||||
server = match[1]
|
||||
match[2]
|
||||
_episode_number = match[3]
|
||||
quality = match[4]
|
||||
if float(episode_number) != float(_episode_number):
|
||||
continue
|
||||
|
||||
links_td = get_element_html_by_class("text-center", anime_torrent)
|
||||
if not links_td:
|
||||
continue
|
||||
torrent_anchor_tag = get_element_text_and_html_by_tag("a", links_td)
|
||||
if not torrent_anchor_tag:
|
||||
continue
|
||||
torrent_anchor_tag_atrrs = extract_attributes(torrent_anchor_tag[1])
|
||||
if not torrent_anchor_tag_atrrs:
|
||||
continue
|
||||
torrent_file_url = (
|
||||
f'{NYAA_ENDPOINT}{torrent_anchor_tag_atrrs["href"]}'
|
||||
)
|
||||
if server in servers:
|
||||
link = {
|
||||
"translation_type": "sub",
|
||||
"link": torrent_file_url,
|
||||
"quality": quality,
|
||||
}
|
||||
if link not in servers[server]["links"]:
|
||||
servers[server]["links"].append(link)
|
||||
else:
|
||||
servers[server] = {
|
||||
"server": server,
|
||||
"headers": {},
|
||||
"episode_title": f"{anime_title}; Episode {episode_number}",
|
||||
"subtitles": [],
|
||||
"links": [
|
||||
{
|
||||
"translation_type": "sub",
|
||||
"link": torrent_file_url,
|
||||
"quality": quality,
|
||||
}
|
||||
],
|
||||
}
|
||||
except Exception as e:
|
||||
logger.error(f"[NYAA]: {e}")
|
||||
continue
|
||||
|
||||
for server in servers:
|
||||
yield servers[server]
|
||||
1
fastanime/libs/anime_provider/nyaa/constants.py
Normal file
1
fastanime/libs/anime_provider/nyaa/constants.py
Normal file
@@ -0,0 +1 @@
|
||||
NYAA_ENDPOINT = "https://nyaa.si"
|
||||
126
fastanime/libs/anime_provider/nyaa/utils.py
Normal file
126
fastanime/libs/anime_provider/nyaa/utils.py
Normal file
@@ -0,0 +1,126 @@
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
|
||||
import libtorrent # pyright: ignore
|
||||
from rich import print
|
||||
from rich.progress import (
|
||||
BarColumn,
|
||||
DownloadColumn,
|
||||
Progress,
|
||||
TextColumn,
|
||||
TimeRemainingColumn,
|
||||
TransferSpeedColumn,
|
||||
)
|
||||
|
||||
logger = logging.getLogger("nyaa")
|
||||
|
||||
|
||||
def download_torrent(
|
||||
filename: str,
|
||||
result_filename: str | None = None,
|
||||
show_progress: bool = True,
|
||||
base_path: str = "Anime",
|
||||
) -> str:
|
||||
session = libtorrent.session({"listen_interfaces": "0.0.0.0:6881"})
|
||||
logger.debug("Started libtorrent session")
|
||||
|
||||
base_path = os.path.expanduser(base_path)
|
||||
logger.debug(f"Downloading output to: '{base_path}'")
|
||||
|
||||
info = libtorrent.torrent_info(filename)
|
||||
|
||||
logger.debug("Started downloading torrent")
|
||||
handle: libtorrent.torrent_handle = session.add_torrent(
|
||||
{"ti": info, "save_path": base_path}
|
||||
)
|
||||
|
||||
status: libtorrent.session_status = handle.status()
|
||||
|
||||
progress_bar = Progress(
|
||||
"[progress.description]{task.description}",
|
||||
BarColumn(bar_width=None),
|
||||
"[progress.percentage]{task.percentage:>3.1f}%",
|
||||
"•",
|
||||
DownloadColumn(),
|
||||
"•",
|
||||
TransferSpeedColumn(),
|
||||
"•",
|
||||
TimeRemainingColumn(),
|
||||
"•",
|
||||
TextColumn("[green]Peers: {task.fields[peers]}[/green]"),
|
||||
)
|
||||
|
||||
if show_progress:
|
||||
with progress_bar:
|
||||
download_task = progress_bar.add_task(
|
||||
"downloading",
|
||||
filename=status.name,
|
||||
total=status.total_wanted,
|
||||
peers=0,
|
||||
start=False,
|
||||
)
|
||||
|
||||
while not status.total_done:
|
||||
# Checking files
|
||||
status = handle.status()
|
||||
description = "[bold yellow]Checking files[/bold yellow]"
|
||||
progress_bar.update(
|
||||
download_task,
|
||||
completed=status.total_done,
|
||||
peers=status.num_peers,
|
||||
description=description,
|
||||
)
|
||||
|
||||
# Started download
|
||||
progress_bar.start_task(download_task)
|
||||
description = f"[bold blue]Downloading[/bold blue] [bold yellow]{result_filename}[/bold yellow]"
|
||||
|
||||
while not status.is_seeding:
|
||||
status = handle.status()
|
||||
|
||||
progress_bar.update(
|
||||
download_task,
|
||||
completed=status.total_done,
|
||||
peers=status.num_peers,
|
||||
description=description,
|
||||
)
|
||||
|
||||
alerts = session.pop_alerts()
|
||||
|
||||
alert: libtorrent.alert
|
||||
for alert in alerts:
|
||||
if (
|
||||
alert.category()
|
||||
& libtorrent.alert.category_t.error_notification
|
||||
):
|
||||
logger.debug(f"[Alert] {alert}")
|
||||
|
||||
time.sleep(1)
|
||||
|
||||
progress_bar.update(
|
||||
download_task,
|
||||
description=f"[bold blue]Finished Downloading[/bold blue] [bold green]{result_filename}[/bold green]",
|
||||
completed=status.total_wanted,
|
||||
)
|
||||
|
||||
if result_filename:
|
||||
old_name = f"{base_path}/{status.name}"
|
||||
new_name = f"{base_path}/{result_filename}"
|
||||
|
||||
os.rename(old_name, new_name)
|
||||
|
||||
logger.debug(f"Finished torrent download, renamed '{old_name}' to '{new_name}'")
|
||||
|
||||
return new_name
|
||||
|
||||
return ""
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
if len(sys.argv) < 2:
|
||||
print("You need to pass in the .torrent file path.")
|
||||
sys.exit(1)
|
||||
|
||||
download_torrent(sys.argv[1])
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user