feat(animepahe): init

This commit is contained in:
Benex254
2024-08-10 23:02:26 +03:00
parent 0e5cb56970
commit fa42d0e403
2 changed files with 93 additions and 16 deletions

View File

@@ -1,18 +1,36 @@
import requests
import re
import shutil
import subprocess
from .constants import ANIMEPAHE_BASE, ANIMEPAHE_ENDPOINT, REQUEST_HEADERS
from yt_dlp.utils import (
extract_attributes,
get_element_by_id,
get_element_text_and_html_by_tag,
get_elements_html_by_class,
)
from ..base_provider import AnimeProvider
from .constants import (
ANIMEPAHE_BASE,
ANIMEPAHE_ENDPOINT,
REQUEST_HEADERS,
SERVER_HEADERS,
)
JUICY_STREAM_REGEX = re.compile(r"source='(.*)';")
# TODO: hack this to completion
class AnimePaheApi:
class AnimePaheApi(AnimeProvider):
def search_for_anime(self, user_query, *args):
try:
url = f"{ANIMEPAHE_ENDPOINT}m=search&q={user_query}"
headers = {**REQUEST_HEADERS}
response = requests.get(url, headers=headers)
response = self.session.get(url, headers=headers)
if not response.status_code == 200:
return
data = response.json()
return {
"pageInfo": {"total": data["total"]},
"results": [
@@ -31,9 +49,8 @@ class AnimePaheApi:
input()
def get_anime(self, session_id: str, *args):
url = "https://animepahe.ru/api?m=release&id=&sort=episode_asc&page=1"
url = f"{ANIMEPAHE_ENDPOINT}m=release&id={session_id}&sort=episode_asc&page=1"
response = requests.get(url, headers=REQUEST_HEADERS)
response = self.session.get(url, headers=REQUEST_HEADERS)
if not response.status_code == 200:
return
data = response.json()
@@ -52,12 +69,55 @@ class AnimePaheApi:
def get_episode_streams(self, anime, episode, *args):
episode_id = self.current["data"][int(episode)]["session"]
anime_id = anime["id"]
url = f"{ANIMEPAHE_BASE}play/{anime_id}{episode_id}"
response = requests.get(url, headers=REQUEST_HEADERS)
print(response.status_code)
input()
if not response.status_code == 200:
print(response.text)
return
print(response.text)
input()
url = f"{ANIMEPAHE_BASE}/play/{anime_id}/{episode_id}"
# response = requests.get(url, headers=REQUEST_HEADERS)
response = self.session.get(url, headers=REQUEST_HEADERS)
# print(clean_html(response.text))
c = get_element_by_id("resolutionMenu", response.text)
resolutionMenuItems = get_elements_html_by_class("dropdown-item", c)
res_dicts = [extract_attributes(item) for item in resolutionMenuItems]
streams = {"server": "kwik", "links": [], "episode_title": f"{episode}"}
for res_dict in res_dicts:
# get embed url
embed_url = res_dict["data-src"]
if not embed_url:
return
embed_response = self.session.get(embed_url, headers=SERVER_HEADERS)
embed = embed_response.text
# search for the encoded js
encoded_js = None
for _ in range(7):
content, html = get_element_text_and_html_by_tag("script", embed)
if not content:
embed = embed.replace(html, "")
continue
encoded_js = content
break
if not encoded_js:
return
# execute the encoded js with node for now or maybe forever
NODE = shutil.which("node")
if not NODE:
return
result = subprocess.run(
[NODE, "-e", encoded_js],
text=True,
capture_output=True,
)
evaluted_js = result.stderr
if not evaluted_js:
return
# get that juicy stream
match = JUICY_STREAM_REGEX.search(evaluted_js)
if not match:
return
juicy_stream = match.group(1)
streams["links"].append(
{
"quality": res_dict["data-resolution"],
"audio_language": res_dict["data-audio"],
"link": juicy_stream,
}
)
yield streams

View File

@@ -2,7 +2,7 @@ from yt_dlp.utils.networking import random_user_agent
USER_AGENT = random_user_agent()
ANIMEPAHE = "animepahe.ru"
ANIMEPAHE_BASE = f"https://{ANIMEPAHE}/"
ANIMEPAHE_BASE = f"https://{ANIMEPAHE}"
ANIMEPAHE_ENDPOINT = f"{ANIMEPAHE_BASE}/api?"
REQUEST_HEADERS = {
@@ -20,3 +20,20 @@ REQUEST_HEADERS = {
"Sec-Fetch-Mode": "cors",
"TE": "trailers",
}
SERVER_HEADERS = {
"User-Agent": USER_AGENT,
"Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/png,image/svg+xml,*/*;q=0.8",
"Accept-Language": "en-US,en;q=0.5",
"Accept-Encoding": "gzip, deflate, br, zstd",
"DNT": "1",
"Alt-Used": "kwik.si",
"Connection": "keep-alive",
"Referer": ANIMEPAHE_BASE,
"Cookie": "kwik_session=eyJpdiI6IlZ5UDd0c0lKTDB1NXlhTHZPeWxFc2c9PSIsInZhbHVlIjoieDJZbGhZUG1QZDNaeWtqR3lwWFNnREdhaHBxNVZRMWNDOHVucGpiMHRJOVdhVmpBc3lpTko1VExRMTFWcE1yUVJtVitoTWdOOU5ObTQ0Q0dHU0MzZU0yRUVvNmtWcUdmY3R4UWx4YklJTmpUL0ZodjhtVEpjWU96cEZoUUhUbVYiLCJtYWMiOiI2OGY2YThkOGU0MTgwOThmYzcyZThmNzFlZjlhMzQzMDgwNjlmMTc4NTIzMzc2YjE3YjNmMWQyNTk4NzczMmZiIiwidGFnIjoiIn0%3D; srv=s0; cf_clearance=QMoZtUpZrX0Mh4XJiFmFSSmoWndISPne5FcsGmKKvTQ-1723297585-1.0.1.1-6tVUnP.aef9XeNj0CnN.19D1el_r53t.lhqddX.J88gohH9UnsPWKeJ4yT0pTbcaGRbPuXTLOS.U72.wdy.gMg",
"Upgrade-Insecure-Requests": "1",
"Sec-Fetch-Dest": "iframe",
"Sec-Fetch-Mode": "navigate",
"Sec-Fetch-Site": "cross-site",
"Sec-Fetch-User": "?1",
"Priority": "u=4",
}