feat: improve providers

This commit is contained in:
Benex254
2024-08-21 15:58:01 +03:00
parent 4e401dca40
commit 9badde62fb
8 changed files with 34 additions and 118 deletions

View File

@@ -1,6 +1,6 @@
from .allanime import SERVERS_AVAILABLE as ALLANIME_SERVERS
from .animepahe import SERVERS_AVAILABLE as ANIMEPAHESERVERS
from .aniwatch import SERVERS_AVAILABLE as ANIWATCHSERVERS
from .allanime.constants import SERVERS_AVAILABLE as ALLANIME_SERVERS
from .animepahe.constants import SERVERS_AVAILABLE as ANIMEPAHESERVERS
from .aniwatch.constants import SERVERS_AVAILABLE as ANIWATCHSERVERS
anime_sources = {
"allanime": "api.AllAnimeAPI",

View File

@@ -1 +0,0 @@
SERVERS_AVAILABLE = ["sharepoint", "dropbox", "gogoanime", "weTransfer", "wixmp", "Yt"]

View File

@@ -11,12 +11,7 @@ from requests.exceptions import Timeout
from ...anime_provider.base_provider import AnimeProvider
from ..utils import give_random_quality, one_digit_symmetric_xor
from .constants import (
ALLANIME_API_ENDPOINT,
ALLANIME_BASE,
ALLANIME_REFERER,
USER_AGENT,
)
from .constants import ALLANIME_API_ENDPOINT, ALLANIME_BASE, ALLANIME_REFERER
from .gql_queries import ALLANIME_EPISODES_GQL, ALLANIME_SEARCH_GQL, ALLANIME_SHOW_GQL
if TYPE_CHECKING:
@@ -36,6 +31,9 @@ class AllAnimeAPI(AnimeProvider):
"""
api_endpoint = ALLANIME_API_ENDPOINT
HEADERS = {
"Referer": ALLANIME_REFERER,
}
def _fetch_gql(self, query: str, variables: dict):
"""main abstraction over all requests to the allanime api
@@ -54,7 +52,6 @@ class AllAnimeAPI(AnimeProvider):
"variables": json.dumps(variables),
"query": query,
},
headers={"Referer": ALLANIME_REFERER, "User-Agent": USER_AGENT},
timeout=10,
)
if response.status_code == 200:
@@ -247,10 +244,6 @@ class AllAnimeAPI(AnimeProvider):
)
resp = self.session.get(
embed_url,
headers={
"Referer": ALLANIME_REFERER,
"User-Agent": USER_AGENT,
},
timeout=10,
)
@@ -328,85 +321,3 @@ class AllAnimeAPI(AnimeProvider):
except Exception as e:
logger.error(f"FA(Allanime): {e}")
return []
if __name__ == "__main__":
anime_provider = AllAnimeAPI()
# lets see if it works :)
import subprocess
import sys
from InquirerPy import inquirer, validator # pyright:ignore
anime = input("Enter the anime name: ")
translation = input("Enter the translation type: ")
search_results = anime_provider.search_for_anime(
anime, translation_type=translation.strip()
)
if not search_results:
raise Exception("No results found")
search_results = search_results["results"]
options = {show["title"]: show for show in search_results}
anime = inquirer.fuzzy(
"Enter the anime title",
list(options.keys()),
validate=validator.EmptyInputValidator(),
).execute()
if anime is None:
print("No anime was selected")
sys.exit(1)
anime_result = options[anime]
anime_data = anime_provider.get_anime(anime_result["id"])
if not anime_data:
raise Exception("Anime not found")
availableEpisodesDetail = anime_data["availableEpisodesDetail"]
if not availableEpisodesDetail.get(translation.strip()):
raise Exception("No episodes found")
stream_link = True
while stream_link != "quit":
print("select episode")
episode = inquirer.fuzzy(
"Choose an episode",
availableEpisodesDetail[translation.strip()],
validate=validator.EmptyInputValidator(),
).execute()
if episode is None:
print("No episode was selected")
sys.exit(1)
if not anime_data:
print("Sth went wrong")
break
episode_streams_ = anime_provider.get_episode_streams(
anime_data, # pyright: ignore
episode,
translation.strip(),
)
if episode_streams_ is None:
raise Exception("Episode not found")
episode_streams = list(episode_streams_)
stream_links = []
for server in episode_streams:
stream_links.extend([link["link"] for link in server["links"]])
stream_links.append("back")
stream_link = inquirer.fuzzy(
"Choose a link to stream",
stream_links,
validate=validator.EmptyInputValidator(),
).execute()
if stream_link == "quit":
print("Have a nice day")
sys.exit()
if not stream_link:
raise Exception("No stream was selected")
title = episode_streams[0].get(
"episode_title", "%s: Episode %s" % (anime_data["title"], episode)
)
subprocess.run(["mpv", f"--title={title}", stream_link])

View File

@@ -1,6 +1,4 @@
from yt_dlp.utils.networking import random_user_agent
SERVERS_AVAILABLE = ["sharepoint", "dropbox", "gogoanime", "weTransfer", "wixmp", "Yt"]
ALLANIME_BASE = "allanime.day"
ALLANIME_REFERER = "https://allanime.to/"
ALLANIME_API_ENDPOINT = "https://api.{}/api/".format(ALLANIME_BASE)
USER_AGENT = random_user_agent()

View File

@@ -1 +0,0 @@
SERVERS_AVAILABLE = ["kwik"]

View File

@@ -32,12 +32,14 @@ KWIK_RE = re.compile(r"Player\|(.+?)'")
class AnimePaheApi(AnimeProvider):
search_page: "AnimePaheSearchPage"
anime: "AnimePaheAnimePage"
HEADERS = REQUEST_HEADERS
def search_for_anime(self, user_query: str, *args):
try:
url = f"{ANIMEPAHE_ENDPOINT}m=search&q={user_query}"
headers = {**REQUEST_HEADERS}
response = self.session.get(url, headers=headers)
response = self.session.get(
url,
)
if not response.status_code == 200:
return
data: "AnimePaheSearchPage" = response.json()
@@ -85,7 +87,9 @@ class AnimePaheApi(AnimeProvider):
url,
page,
):
response = self.session.get(url, headers=REQUEST_HEADERS)
response = self.session.get(
url,
)
if response.status_code == 200:
if not data:
data.update(response.json())
@@ -171,7 +175,7 @@ class AnimePaheApi(AnimeProvider):
anime_id = anime["id"]
# fetch the episode page
url = f"{ANIMEPAHE_BASE}/play/{anime_id}/{episode['session']}"
response = self.session.get(url, headers=REQUEST_HEADERS)
response = self.session.get(url)
# get the element containing links to juicy streams
c = get_element_by_id("resolutionMenu", response.text)
resolutionMenuItems = get_elements_html_by_class("dropdown-item", c)
@@ -207,7 +211,13 @@ class AnimePaheApi(AnimeProvider):
)
return []
# get embed page
embed_response = self.session.get(embed_url, headers=SERVER_HEADERS)
print(self.session.headers)
input()
embed_response = self.session.get(
embed_url, headers={"User-Agent": self.USER_AGENT, **SERVER_HEADERS}
)
if not response.status_code == 200:
continue
embed_page = embed_response.text
decoded_js = process_animepahe_embed_page(embed_page)

View File

@@ -1,18 +1,14 @@
from yt_dlp.utils.networking import random_user_agent
USER_AGENT = random_user_agent()
ANIMEPAHE = "animepahe.ru"
ANIMEPAHE_BASE = f"https://{ANIMEPAHE}"
ANIMEPAHE_ENDPOINT = f"{ANIMEPAHE_BASE}/api?"
SERVERS_AVAILABLE = ["kwik"]
REQUEST_HEADERS = {
"Cookie": "__ddgid_=VvX0ebHrH2DsFZo4; __ddgmark_=3savRpSVFhvZcn5x; __ddg2_=buBJ3c4pNBYKFZNp; __ddg1_=rbVADKr9URtt55zoIGFa; SERVERID=janna; XSRF-TOKEN=eyJpdiI6IjV5bFNtd0phUHgvWGJxc25wL0VJSUE9PSIsInZhbHVlIjoicEJTZktlR2hxR2JZTWhnL0JzazlvZU5TQTR2bjBWZ2dDb0RwUXVUUWNSclhQWUhLRStYSmJmWmUxWkpiYkFRYU12RjFWejlSWHorME1wZG5qQ1U0TnFlNnBFR2laQjN1MjdyNjc5TjVPdXdJb2o5VkU1bEduRW9pRHNDTHh6Sy8iLCJtYWMiOiI0OTc0ZmNjY2UwMGJkOWY2MWNkM2NlMjk2ZGMyZGJmMWE0NTdjZTdkNGI2Y2IwNTIzZmFiZWU5ZTE2OTk0YmU4IiwidGFnIjoiIn0%3D; laravel_session=eyJpdiI6ImxvdlpqREFnTjdaeFJubUlXQWlJVWc9PSIsInZhbHVlIjoiQnE4R3VHdjZ4M1NDdEVWM1ZqMUxtNnVERnJCcmtCUHZKNzRPR2RFbzNFcStTL29xdnVTbWhsNVRBUXEybVZWNU1UYVlTazFqYlN5UjJva1k4czNGaXBTbkJJK01oTUd3VHRYVHBoc3dGUWxHYnFlS2NJVVNFbTFqMVBWdFpuVUgiLCJtYWMiOiI1NDdjZTVkYmNhNjUwZTMxZmRlZmVmMmRlMGNiYjAwYjlmYjFjY2U0MDc1YTQzZThiMTIxMjJlYTg1NTA4YjBmIiwidGFnIjoiIn0%3D; latest=5592 ",
"Host": ANIMEPAHE,
"User-Agent": USER_AGENT,
"Accept": "application , text/javascript, */*; q=0.01",
"Accept-Encoding": "gzip, deflate, br, zstd",
"Accept-Encoding": "Utf-8",
"Referer": ANIMEPAHE_BASE,
"X-Requested-With": "XMLHttpRequest",
"DNT": "1",
"Connection": "keep-alive",
"Sec-Fetch-Dest": "empty",
@@ -21,19 +17,17 @@ REQUEST_HEADERS = {
"TE": "trailers",
}
SERVER_HEADERS = {
"User-Agent": USER_AGENT,
"Host": "kwik.si",
"Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/png,image/svg+xml,*/*;q=0.8",
"Accept-Language": "en-US,en;q=0.5",
"Accept-Encoding": "gzip, deflate, br, zstd",
"Accept-Encoding": "Utf-8",
"DNT": "1",
"Alt-Used": "kwik.si",
"Connection": "keep-alive",
"Referer": ANIMEPAHE_BASE,
"Cookie": "kwik_session=eyJpdiI6IlZ5UDd0c0lKTDB1NXlhTHZPeWxFc2c9PSIsInZhbHVlIjoieDJZbGhZUG1QZDNaeWtqR3lwWFNnREdhaHBxNVZRMWNDOHVucGpiMHRJOVdhVmpBc3lpTko1VExRMTFWcE1yUVJtVitoTWdOOU5ObTQ0Q0dHU0MzZU0yRUVvNmtWcUdmY3R4UWx4YklJTmpUL0ZodjhtVEpjWU96cEZoUUhUbVYiLCJtYWMiOiI2OGY2YThkOGU0MTgwOThmYzcyZThmNzFlZjlhMzQzMDgwNjlmMTc4NTIzMzc2YjE3YjNmMWQyNTk4NzczMmZiIiwidGFnIjoiIn0%3D; srv=s0; cf_clearance=QMoZtUpZrX0Mh4XJiFmFSSmoWndISPne5FcsGmKKvTQ-1723297585-1.0.1.1-6tVUnP.aef9XeNj0CnN.19D1el_r53t.lhqddX.J88gohH9UnsPWKeJ4yT0pTbcaGRbPuXTLOS.U72.wdy.gMg",
"Referer": "https://animepahe.ru/",
"Upgrade-Insecure-Requests": "1",
"Sec-Fetch-Dest": "iframe",
"Sec-Fetch-Mode": "navigate",
"Sec-Fetch-Site": "cross-site",
"Sec-Fetch-User": "?1",
"Priority": "u=4",
"TE": "trailers",
}

View File

@@ -1,8 +1,13 @@
import requests
from yt_dlp.utils.networking import random_user_agent
class AnimeProvider:
session: requests.Session
USER_AGENT = random_user_agent()
HEADERS = {}
def __init__(self) -> None:
self.session = requests.session()
self.session.headers.update({"User-Agent": self.USER_AGENT, **self.HEADERS})