mirror of
https://github.com/Benexl/FastAnime.git
synced 2026-01-25 02:34:43 -08:00
Compare commits
12 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
77ffa27ed8 | ||
|
|
15f79b65c9 | ||
|
|
33c3af0241 | ||
|
|
9badde62fb | ||
|
|
4e401dca40 | ||
|
|
25422b1b7d | ||
|
|
e8463f13b4 | ||
|
|
556f42e41f | ||
|
|
b99a4f7efc | ||
|
|
f6f45cf322 | ||
|
|
ae6db1847a | ||
|
|
20d04ea07b |
11
README.md
11
README.md
@@ -362,6 +362,17 @@ fastanime download -t <anime-title> -r ':<episodes-end>'
|
|||||||
# remember python indexing starts at 0
|
# remember python indexing starts at 0
|
||||||
fastanime download -t <anime-title> -r '<episode-1>:<episode>'
|
fastanime download -t <anime-title> -r '<episode-1>:<episode>'
|
||||||
|
|
||||||
|
# merge subtitles with ffmpeg to mkv format; aniwatch tends to give subs as separate files
|
||||||
|
# and dont prompt for anything
|
||||||
|
# eg existing file in destination instead remove
|
||||||
|
# and clean
|
||||||
|
# ie remove original files (sub file and vid file)
|
||||||
|
# only keep merged files
|
||||||
|
fastanime download -t <anime-title> --merge --clean --no-prompt
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
```
|
```
|
||||||
|
|
||||||
#### search subcommand
|
#### search subcommand
|
||||||
|
|||||||
@@ -1,8 +1,14 @@
|
|||||||
import logging
|
import logging
|
||||||
|
import os
|
||||||
|
import shutil
|
||||||
|
import subprocess
|
||||||
|
import tempfile
|
||||||
from queue import Queue
|
from queue import Queue
|
||||||
from threading import Thread
|
from threading import Thread
|
||||||
|
|
||||||
import yt_dlp
|
import yt_dlp
|
||||||
|
from rich import print
|
||||||
|
from rich.prompt import Confirm
|
||||||
from yt_dlp.utils import sanitize_filename
|
from yt_dlp.utils import sanitize_filename
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
@@ -39,6 +45,9 @@ class YtDLPDownloader:
|
|||||||
verbose=False,
|
verbose=False,
|
||||||
headers={},
|
headers={},
|
||||||
sub="",
|
sub="",
|
||||||
|
merge=False,
|
||||||
|
clean=False,
|
||||||
|
prompt=True,
|
||||||
):
|
):
|
||||||
"""Helper function that downloads anime given url and path details
|
"""Helper function that downloads anime given url and path details
|
||||||
|
|
||||||
@@ -64,8 +73,82 @@ class YtDLPDownloader:
|
|||||||
urls = [url]
|
urls = [url]
|
||||||
if sub:
|
if sub:
|
||||||
urls.append(sub)
|
urls.append(sub)
|
||||||
with yt_dlp.YoutubeDL(ydl_opts) as ydl:
|
vid_path = ""
|
||||||
ydl.download(urls)
|
sub_path = ""
|
||||||
|
for i, url in enumerate(urls):
|
||||||
|
with yt_dlp.YoutubeDL(ydl_opts) as ydl:
|
||||||
|
info = ydl.extract_info(url, download=True)
|
||||||
|
if not info:
|
||||||
|
continue
|
||||||
|
if i == 0:
|
||||||
|
vid_path = info["requested_downloads"][0]["filepath"]
|
||||||
|
else:
|
||||||
|
sub_path = info["requested_downloads"][0]["filepath"]
|
||||||
|
if sub_path and vid_path and merge:
|
||||||
|
self.merge_subtitles(vid_path, sub_path, clean, prompt)
|
||||||
|
|
||||||
|
def merge_subtitles(self, video_path, sub_path, clean, prompt):
|
||||||
|
# Extract the directory and filename
|
||||||
|
video_dir = os.path.dirname(video_path)
|
||||||
|
video_name = os.path.basename(video_path)
|
||||||
|
video_name, _ = os.path.splitext(video_name)
|
||||||
|
video_name += ".mkv"
|
||||||
|
|
||||||
|
FFMPEG_EXECUTABLE = shutil.which("ffmpeg")
|
||||||
|
if not FFMPEG_EXECUTABLE:
|
||||||
|
print("[yellow bold]WARNING: [/]FFmpeg not found")
|
||||||
|
return
|
||||||
|
# Create a temporary directory
|
||||||
|
with tempfile.TemporaryDirectory() as temp_dir:
|
||||||
|
# Temporary output path in the temporary directory
|
||||||
|
temp_output_path = os.path.join(temp_dir, video_name)
|
||||||
|
# FFmpeg command to merge subtitles
|
||||||
|
command = [
|
||||||
|
FFMPEG_EXECUTABLE,
|
||||||
|
"-hide_banner",
|
||||||
|
"-i",
|
||||||
|
video_path,
|
||||||
|
"-i",
|
||||||
|
sub_path,
|
||||||
|
"-c",
|
||||||
|
"copy",
|
||||||
|
"-map",
|
||||||
|
"0",
|
||||||
|
"-map",
|
||||||
|
"1",
|
||||||
|
temp_output_path,
|
||||||
|
]
|
||||||
|
|
||||||
|
# Run the command
|
||||||
|
try:
|
||||||
|
subprocess.run(command, check=True)
|
||||||
|
|
||||||
|
# Move the file back to the original directory with the original name
|
||||||
|
final_output_path = os.path.join(video_dir, video_name)
|
||||||
|
|
||||||
|
if os.path.exists(final_output_path):
|
||||||
|
if not prompt or Confirm.ask(
|
||||||
|
f"File exists({final_output_path}) would you like to overwrite it",
|
||||||
|
default=True,
|
||||||
|
):
|
||||||
|
# move file to dest
|
||||||
|
os.remove(final_output_path)
|
||||||
|
shutil.move(temp_output_path, final_output_path)
|
||||||
|
else:
|
||||||
|
shutil.move(temp_output_path, final_output_path)
|
||||||
|
# clean up
|
||||||
|
if clean:
|
||||||
|
print("[cyan]Cleaning original files...[/]")
|
||||||
|
os.remove(video_path)
|
||||||
|
os.remove(sub_path)
|
||||||
|
|
||||||
|
print(
|
||||||
|
f"[green bold]Subtitles merged successfully.[/] Output file: {final_output_path}"
|
||||||
|
)
|
||||||
|
except subprocess.CalledProcessError as e:
|
||||||
|
print(f"[red bold]Error[/] during merging subtitles: {e}")
|
||||||
|
except Exception as e:
|
||||||
|
print(f"[red bold]An error[/] occurred: {e}")
|
||||||
|
|
||||||
# WARN: May remove this legacy functionality
|
# WARN: May remove this legacy functionality
|
||||||
def download_file(self, url: str, title, silent=True):
|
def download_file(self, url: str, title, silent=True):
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ if sys.version_info < (3, 10):
|
|||||||
) # noqa: F541
|
) # noqa: F541
|
||||||
|
|
||||||
|
|
||||||
__version__ = "v2.3.4"
|
__version__ = "v2.3.6"
|
||||||
|
|
||||||
APP_NAME = "FastAnime"
|
APP_NAME = "FastAnime"
|
||||||
AUTHOR = "Benex254"
|
AUTHOR = "Benex254"
|
||||||
|
|||||||
@@ -192,7 +192,7 @@ def run_cli(
|
|||||||
FORMAT = "%(message)s"
|
FORMAT = "%(message)s"
|
||||||
|
|
||||||
logging.basicConfig(
|
logging.basicConfig(
|
||||||
level="debug", format=FORMAT, datefmt="[%X]", handlers=[RichHandler()]
|
level=logging.DEBUG, format=FORMAT, datefmt="[%X]", handlers=[RichHandler()]
|
||||||
)
|
)
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
logger.info("logging has been initialized")
|
logger.info("logging has been initialized")
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
import time
|
|
||||||
from typing import TYPE_CHECKING
|
from typing import TYPE_CHECKING
|
||||||
|
|
||||||
import click
|
import click
|
||||||
@@ -41,6 +40,27 @@ if TYPE_CHECKING:
|
|||||||
default=True,
|
default=True,
|
||||||
)
|
)
|
||||||
@click.option("--verbose", "-v", is_flag=True, help="Download verbosely (everywhere)")
|
@click.option("--verbose", "-v", is_flag=True, help="Download verbosely (everywhere)")
|
||||||
|
@click.option(
|
||||||
|
"--merge", "-m", is_flag=True, help="Merge the subfile with video using ffmpeg"
|
||||||
|
)
|
||||||
|
@click.option(
|
||||||
|
"--clean",
|
||||||
|
"-c",
|
||||||
|
is_flag=True,
|
||||||
|
help="After merging delete the original files",
|
||||||
|
)
|
||||||
|
@click.option(
|
||||||
|
"--wait-time",
|
||||||
|
"-w",
|
||||||
|
type=int,
|
||||||
|
help="The amount of time to wait after downloading is complete before the screen is completely cleared",
|
||||||
|
default=10,
|
||||||
|
)
|
||||||
|
@click.option(
|
||||||
|
"--prompt/--no-prompt",
|
||||||
|
help="Dont prompt for anything instead just do the best thing",
|
||||||
|
default=True,
|
||||||
|
)
|
||||||
@click.pass_obj
|
@click.pass_obj
|
||||||
def download(
|
def download(
|
||||||
config: "Config",
|
config: "Config",
|
||||||
@@ -49,7 +69,13 @@ def download(
|
|||||||
force_unknown_ext,
|
force_unknown_ext,
|
||||||
silent,
|
silent,
|
||||||
verbose,
|
verbose,
|
||||||
|
merge,
|
||||||
|
clean,
|
||||||
|
wait_time,
|
||||||
|
prompt,
|
||||||
):
|
):
|
||||||
|
import time
|
||||||
|
|
||||||
from rich import print
|
from rich import print
|
||||||
from rich.progress import Progress
|
from rich.progress import Progress
|
||||||
from thefuzz import fuzz
|
from thefuzz import fuzz
|
||||||
@@ -83,7 +109,16 @@ def download(
|
|||||||
print("Search results failed")
|
print("Search results failed")
|
||||||
input("Enter to retry")
|
input("Enter to retry")
|
||||||
download(
|
download(
|
||||||
config, anime_title, episode_range, force_unknown_ext, silent, verbose
|
config,
|
||||||
|
anime_title,
|
||||||
|
episode_range,
|
||||||
|
force_unknown_ext,
|
||||||
|
silent,
|
||||||
|
verbose,
|
||||||
|
merge,
|
||||||
|
clean,
|
||||||
|
wait_time,
|
||||||
|
prompt,
|
||||||
)
|
)
|
||||||
return
|
return
|
||||||
search_results = search_results["results"]
|
search_results = search_results["results"]
|
||||||
@@ -119,7 +154,16 @@ def download(
|
|||||||
print("Sth went wring anime no found")
|
print("Sth went wring anime no found")
|
||||||
input("Enter to continue...")
|
input("Enter to continue...")
|
||||||
download(
|
download(
|
||||||
config, anime_title, episode_range, force_unknown_ext, silent, verbose
|
config,
|
||||||
|
anime_title,
|
||||||
|
episode_range,
|
||||||
|
force_unknown_ext,
|
||||||
|
silent,
|
||||||
|
verbose,
|
||||||
|
merge,
|
||||||
|
clean,
|
||||||
|
wait_time,
|
||||||
|
prompt,
|
||||||
)
|
)
|
||||||
return
|
return
|
||||||
|
|
||||||
@@ -223,7 +267,7 @@ def download(
|
|||||||
)
|
)
|
||||||
downloader._download_file(
|
downloader._download_file(
|
||||||
link,
|
link,
|
||||||
anime["title"],
|
search_result,
|
||||||
episode_title,
|
episode_title,
|
||||||
download_dir,
|
download_dir,
|
||||||
silent,
|
silent,
|
||||||
@@ -232,10 +276,14 @@ def download(
|
|||||||
verbose,
|
verbose,
|
||||||
headers=provider_headers,
|
headers=provider_headers,
|
||||||
sub=subtitles[0]["url"] if subtitles else "",
|
sub=subtitles[0]["url"] if subtitles else "",
|
||||||
|
merge=merge,
|
||||||
|
clean=clean,
|
||||||
|
prompt=prompt,
|
||||||
)
|
)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print(e)
|
print(e)
|
||||||
time.sleep(1)
|
time.sleep(1)
|
||||||
print("Continuing...")
|
print("Continuing...")
|
||||||
print("Done Downloading")
|
print("Done Downloading")
|
||||||
|
time.sleep(wait_time)
|
||||||
exit_app()
|
exit_app()
|
||||||
|
|||||||
@@ -19,6 +19,27 @@ BG_GREEN = "\033[48;2;120;233;12;m"
|
|||||||
GREEN = "\033[38;2;45;24;45;m"
|
GREEN = "\033[38;2;45;24;45;m"
|
||||||
|
|
||||||
|
|
||||||
|
def get_requested_quality_or_default_to_first(url, quality):
|
||||||
|
import yt_dlp
|
||||||
|
|
||||||
|
with yt_dlp.YoutubeDL({"quiet": True, "silent": True, "no_warnings": True}) as ydl:
|
||||||
|
m3u8_info = ydl.extract_info(url, False)
|
||||||
|
if not m3u8_info:
|
||||||
|
return
|
||||||
|
|
||||||
|
m3u8_formats = m3u8_info["formats"]
|
||||||
|
quality = int(quality)
|
||||||
|
quality_u = quality - 80
|
||||||
|
quality_l = quality + 80
|
||||||
|
for m3u8_format in m3u8_formats:
|
||||||
|
if m3u8_format["height"] == quality or (
|
||||||
|
m3u8_format["height"] < quality_u and m3u8_format["height"] > quality_l
|
||||||
|
):
|
||||||
|
return m3u8_format["url"]
|
||||||
|
else:
|
||||||
|
return m3u8_formats[0]["url"]
|
||||||
|
|
||||||
|
|
||||||
def move_preferred_subtitle_lang_to_top(sub_list, lang_str):
|
def move_preferred_subtitle_lang_to_top(sub_list, lang_str):
|
||||||
"""Moves the dictionary with the given ID to the front of the list.
|
"""Moves the dictionary with the given ID to the front of the list.
|
||||||
|
|
||||||
|
|||||||
@@ -1,10 +1,11 @@
|
|||||||
from .allanime import SERVERS_AVAILABLE as ALLANIME_SERVERS
|
from .allanime.constants import SERVERS_AVAILABLE as ALLANIME_SERVERS
|
||||||
from .animepahe import SERVERS_AVAILABLE as ANIMEPAHESERVERS
|
from .animepahe.constants import SERVERS_AVAILABLE as ANIMEPAHESERVERS
|
||||||
from .aniwatch import SERVERS_AVAILABLE as ANIWATCHSERVERS
|
from .aniwatch.constants import SERVERS_AVAILABLE as ANIWATCHSERVERS
|
||||||
|
|
||||||
anime_sources = {
|
anime_sources = {
|
||||||
"allanime": "api.AllAnimeAPI",
|
"allanime": "api.AllAnimeAPI",
|
||||||
"animepahe": "api.AnimePaheApi",
|
"animepahe": "api.AnimePaheApi",
|
||||||
"aniwatch": "api.AniWatchApi",
|
"aniwatch": "api.AniWatchApi",
|
||||||
|
"aniwave": "api.AniWaveApi",
|
||||||
}
|
}
|
||||||
SERVERS_AVAILABLE = [*ALLANIME_SERVERS, *ANIMEPAHESERVERS, *ANIWATCHSERVERS]
|
SERVERS_AVAILABLE = [*ALLANIME_SERVERS, *ANIMEPAHESERVERS, *ANIWATCHSERVERS]
|
||||||
|
|||||||
@@ -1 +0,0 @@
|
|||||||
SERVERS_AVAILABLE = ["sharepoint", "dropbox", "gogoanime", "weTransfer", "wixmp", "Yt"]
|
|
||||||
|
|||||||
@@ -11,12 +11,7 @@ from requests.exceptions import Timeout
|
|||||||
|
|
||||||
from ...anime_provider.base_provider import AnimeProvider
|
from ...anime_provider.base_provider import AnimeProvider
|
||||||
from ..utils import give_random_quality, one_digit_symmetric_xor
|
from ..utils import give_random_quality, one_digit_symmetric_xor
|
||||||
from .constants import (
|
from .constants import ALLANIME_API_ENDPOINT, ALLANIME_BASE, ALLANIME_REFERER
|
||||||
ALLANIME_API_ENDPOINT,
|
|
||||||
ALLANIME_BASE,
|
|
||||||
ALLANIME_REFERER,
|
|
||||||
USER_AGENT,
|
|
||||||
)
|
|
||||||
from .gql_queries import ALLANIME_EPISODES_GQL, ALLANIME_SEARCH_GQL, ALLANIME_SHOW_GQL
|
from .gql_queries import ALLANIME_EPISODES_GQL, ALLANIME_SEARCH_GQL, ALLANIME_SHOW_GQL
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
@@ -36,6 +31,9 @@ class AllAnimeAPI(AnimeProvider):
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
api_endpoint = ALLANIME_API_ENDPOINT
|
api_endpoint = ALLANIME_API_ENDPOINT
|
||||||
|
HEADERS = {
|
||||||
|
"Referer": ALLANIME_REFERER,
|
||||||
|
}
|
||||||
|
|
||||||
def _fetch_gql(self, query: str, variables: dict):
|
def _fetch_gql(self, query: str, variables: dict):
|
||||||
"""main abstraction over all requests to the allanime api
|
"""main abstraction over all requests to the allanime api
|
||||||
@@ -54,7 +52,6 @@ class AllAnimeAPI(AnimeProvider):
|
|||||||
"variables": json.dumps(variables),
|
"variables": json.dumps(variables),
|
||||||
"query": query,
|
"query": query,
|
||||||
},
|
},
|
||||||
headers={"Referer": ALLANIME_REFERER, "User-Agent": USER_AGENT},
|
|
||||||
timeout=10,
|
timeout=10,
|
||||||
)
|
)
|
||||||
if response.status_code == 200:
|
if response.status_code == 200:
|
||||||
@@ -247,10 +244,6 @@ class AllAnimeAPI(AnimeProvider):
|
|||||||
)
|
)
|
||||||
resp = self.session.get(
|
resp = self.session.get(
|
||||||
embed_url,
|
embed_url,
|
||||||
headers={
|
|
||||||
"Referer": ALLANIME_REFERER,
|
|
||||||
"User-Agent": USER_AGENT,
|
|
||||||
},
|
|
||||||
timeout=10,
|
timeout=10,
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -328,85 +321,3 @@ class AllAnimeAPI(AnimeProvider):
|
|||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"FA(Allanime): {e}")
|
logger.error(f"FA(Allanime): {e}")
|
||||||
return []
|
return []
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
anime_provider = AllAnimeAPI()
|
|
||||||
# lets see if it works :)
|
|
||||||
import subprocess
|
|
||||||
import sys
|
|
||||||
|
|
||||||
from InquirerPy import inquirer, validator # pyright:ignore
|
|
||||||
|
|
||||||
anime = input("Enter the anime name: ")
|
|
||||||
translation = input("Enter the translation type: ")
|
|
||||||
|
|
||||||
search_results = anime_provider.search_for_anime(
|
|
||||||
anime, translation_type=translation.strip()
|
|
||||||
)
|
|
||||||
|
|
||||||
if not search_results:
|
|
||||||
raise Exception("No results found")
|
|
||||||
|
|
||||||
search_results = search_results["results"]
|
|
||||||
options = {show["title"]: show for show in search_results}
|
|
||||||
anime = inquirer.fuzzy(
|
|
||||||
"Enter the anime title",
|
|
||||||
list(options.keys()),
|
|
||||||
validate=validator.EmptyInputValidator(),
|
|
||||||
).execute()
|
|
||||||
if anime is None:
|
|
||||||
print("No anime was selected")
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
anime_result = options[anime]
|
|
||||||
anime_data = anime_provider.get_anime(anime_result["id"])
|
|
||||||
if not anime_data:
|
|
||||||
raise Exception("Anime not found")
|
|
||||||
availableEpisodesDetail = anime_data["availableEpisodesDetail"]
|
|
||||||
if not availableEpisodesDetail.get(translation.strip()):
|
|
||||||
raise Exception("No episodes found")
|
|
||||||
|
|
||||||
stream_link = True
|
|
||||||
while stream_link != "quit":
|
|
||||||
print("select episode")
|
|
||||||
episode = inquirer.fuzzy(
|
|
||||||
"Choose an episode",
|
|
||||||
availableEpisodesDetail[translation.strip()],
|
|
||||||
validate=validator.EmptyInputValidator(),
|
|
||||||
).execute()
|
|
||||||
if episode is None:
|
|
||||||
print("No episode was selected")
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
if not anime_data:
|
|
||||||
print("Sth went wrong")
|
|
||||||
break
|
|
||||||
episode_streams_ = anime_provider.get_episode_streams(
|
|
||||||
anime_data, # pyright: ignore
|
|
||||||
episode,
|
|
||||||
translation.strip(),
|
|
||||||
)
|
|
||||||
if episode_streams_ is None:
|
|
||||||
raise Exception("Episode not found")
|
|
||||||
|
|
||||||
episode_streams = list(episode_streams_)
|
|
||||||
stream_links = []
|
|
||||||
for server in episode_streams:
|
|
||||||
stream_links.extend([link["link"] for link in server["links"]])
|
|
||||||
stream_links.append("back")
|
|
||||||
stream_link = inquirer.fuzzy(
|
|
||||||
"Choose a link to stream",
|
|
||||||
stream_links,
|
|
||||||
validate=validator.EmptyInputValidator(),
|
|
||||||
).execute()
|
|
||||||
if stream_link == "quit":
|
|
||||||
print("Have a nice day")
|
|
||||||
sys.exit()
|
|
||||||
if not stream_link:
|
|
||||||
raise Exception("No stream was selected")
|
|
||||||
|
|
||||||
title = episode_streams[0].get(
|
|
||||||
"episode_title", "%s: Episode %s" % (anime_data["title"], episode)
|
|
||||||
)
|
|
||||||
subprocess.run(["mpv", f"--title={title}", stream_link])
|
|
||||||
|
|||||||
@@ -1,6 +1,4 @@
|
|||||||
from yt_dlp.utils.networking import random_user_agent
|
SERVERS_AVAILABLE = ["sharepoint", "dropbox", "gogoanime", "weTransfer", "wixmp", "Yt"]
|
||||||
|
|
||||||
ALLANIME_BASE = "allanime.day"
|
ALLANIME_BASE = "allanime.day"
|
||||||
ALLANIME_REFERER = "https://allanime.to/"
|
ALLANIME_REFERER = "https://allanime.to/"
|
||||||
ALLANIME_API_ENDPOINT = "https://api.{}/api/".format(ALLANIME_BASE)
|
ALLANIME_API_ENDPOINT = "https://api.{}/api/".format(ALLANIME_BASE)
|
||||||
USER_AGENT = random_user_agent()
|
|
||||||
|
|||||||
@@ -1 +0,0 @@
|
|||||||
SERVERS_AVAILABLE = ["kwik"]
|
|
||||||
|
|||||||
@@ -32,12 +32,14 @@ KWIK_RE = re.compile(r"Player\|(.+?)'")
|
|||||||
class AnimePaheApi(AnimeProvider):
|
class AnimePaheApi(AnimeProvider):
|
||||||
search_page: "AnimePaheSearchPage"
|
search_page: "AnimePaheSearchPage"
|
||||||
anime: "AnimePaheAnimePage"
|
anime: "AnimePaheAnimePage"
|
||||||
|
HEADERS = REQUEST_HEADERS
|
||||||
|
|
||||||
def search_for_anime(self, user_query: str, *args):
|
def search_for_anime(self, user_query: str, *args):
|
||||||
try:
|
try:
|
||||||
url = f"{ANIMEPAHE_ENDPOINT}m=search&q={user_query}"
|
url = f"{ANIMEPAHE_ENDPOINT}m=search&q={user_query}"
|
||||||
headers = {**REQUEST_HEADERS}
|
response = self.session.get(
|
||||||
response = self.session.get(url, headers=headers)
|
url,
|
||||||
|
)
|
||||||
if not response.status_code == 200:
|
if not response.status_code == 200:
|
||||||
return
|
return
|
||||||
data: "AnimePaheSearchPage" = response.json()
|
data: "AnimePaheSearchPage" = response.json()
|
||||||
@@ -85,7 +87,9 @@ class AnimePaheApi(AnimeProvider):
|
|||||||
url,
|
url,
|
||||||
page,
|
page,
|
||||||
):
|
):
|
||||||
response = self.session.get(url, headers=REQUEST_HEADERS)
|
response = self.session.get(
|
||||||
|
url,
|
||||||
|
)
|
||||||
if response.status_code == 200:
|
if response.status_code == 200:
|
||||||
if not data:
|
if not data:
|
||||||
data.update(response.json())
|
data.update(response.json())
|
||||||
@@ -171,7 +175,7 @@ class AnimePaheApi(AnimeProvider):
|
|||||||
anime_id = anime["id"]
|
anime_id = anime["id"]
|
||||||
# fetch the episode page
|
# fetch the episode page
|
||||||
url = f"{ANIMEPAHE_BASE}/play/{anime_id}/{episode['session']}"
|
url = f"{ANIMEPAHE_BASE}/play/{anime_id}/{episode['session']}"
|
||||||
response = self.session.get(url, headers=REQUEST_HEADERS)
|
response = self.session.get(url)
|
||||||
# get the element containing links to juicy streams
|
# get the element containing links to juicy streams
|
||||||
c = get_element_by_id("resolutionMenu", response.text)
|
c = get_element_by_id("resolutionMenu", response.text)
|
||||||
resolutionMenuItems = get_elements_html_by_class("dropdown-item", c)
|
resolutionMenuItems = get_elements_html_by_class("dropdown-item", c)
|
||||||
@@ -207,7 +211,11 @@ class AnimePaheApi(AnimeProvider):
|
|||||||
)
|
)
|
||||||
return []
|
return []
|
||||||
# get embed page
|
# get embed page
|
||||||
embed_response = self.session.get(embed_url, headers=SERVER_HEADERS)
|
embed_response = self.session.get(
|
||||||
|
embed_url, headers={"User-Agent": self.USER_AGENT, **SERVER_HEADERS}
|
||||||
|
)
|
||||||
|
if not response.status_code == 200:
|
||||||
|
continue
|
||||||
embed_page = embed_response.text
|
embed_page = embed_response.text
|
||||||
|
|
||||||
decoded_js = process_animepahe_embed_page(embed_page)
|
decoded_js = process_animepahe_embed_page(embed_page)
|
||||||
|
|||||||
@@ -1,18 +1,14 @@
|
|||||||
from yt_dlp.utils.networking import random_user_agent
|
|
||||||
|
|
||||||
USER_AGENT = random_user_agent()
|
|
||||||
ANIMEPAHE = "animepahe.ru"
|
ANIMEPAHE = "animepahe.ru"
|
||||||
ANIMEPAHE_BASE = f"https://{ANIMEPAHE}"
|
ANIMEPAHE_BASE = f"https://{ANIMEPAHE}"
|
||||||
ANIMEPAHE_ENDPOINT = f"{ANIMEPAHE_BASE}/api?"
|
ANIMEPAHE_ENDPOINT = f"{ANIMEPAHE_BASE}/api?"
|
||||||
|
|
||||||
|
SERVERS_AVAILABLE = ["kwik"]
|
||||||
REQUEST_HEADERS = {
|
REQUEST_HEADERS = {
|
||||||
"Cookie": "__ddgid_=VvX0ebHrH2DsFZo4; __ddgmark_=3savRpSVFhvZcn5x; __ddg2_=buBJ3c4pNBYKFZNp; __ddg1_=rbVADKr9URtt55zoIGFa; SERVERID=janna; XSRF-TOKEN=eyJpdiI6IjV5bFNtd0phUHgvWGJxc25wL0VJSUE9PSIsInZhbHVlIjoicEJTZktlR2hxR2JZTWhnL0JzazlvZU5TQTR2bjBWZ2dDb0RwUXVUUWNSclhQWUhLRStYSmJmWmUxWkpiYkFRYU12RjFWejlSWHorME1wZG5qQ1U0TnFlNnBFR2laQjN1MjdyNjc5TjVPdXdJb2o5VkU1bEduRW9pRHNDTHh6Sy8iLCJtYWMiOiI0OTc0ZmNjY2UwMGJkOWY2MWNkM2NlMjk2ZGMyZGJmMWE0NTdjZTdkNGI2Y2IwNTIzZmFiZWU5ZTE2OTk0YmU4IiwidGFnIjoiIn0%3D; laravel_session=eyJpdiI6ImxvdlpqREFnTjdaeFJubUlXQWlJVWc9PSIsInZhbHVlIjoiQnE4R3VHdjZ4M1NDdEVWM1ZqMUxtNnVERnJCcmtCUHZKNzRPR2RFbzNFcStTL29xdnVTbWhsNVRBUXEybVZWNU1UYVlTazFqYlN5UjJva1k4czNGaXBTbkJJK01oTUd3VHRYVHBoc3dGUWxHYnFlS2NJVVNFbTFqMVBWdFpuVUgiLCJtYWMiOiI1NDdjZTVkYmNhNjUwZTMxZmRlZmVmMmRlMGNiYjAwYjlmYjFjY2U0MDc1YTQzZThiMTIxMjJlYTg1NTA4YjBmIiwidGFnIjoiIn0%3D; latest=5592 ",
|
"Cookie": "__ddgid_=VvX0ebHrH2DsFZo4; __ddgmark_=3savRpSVFhvZcn5x; __ddg2_=buBJ3c4pNBYKFZNp; __ddg1_=rbVADKr9URtt55zoIGFa; SERVERID=janna; XSRF-TOKEN=eyJpdiI6IjV5bFNtd0phUHgvWGJxc25wL0VJSUE9PSIsInZhbHVlIjoicEJTZktlR2hxR2JZTWhnL0JzazlvZU5TQTR2bjBWZ2dDb0RwUXVUUWNSclhQWUhLRStYSmJmWmUxWkpiYkFRYU12RjFWejlSWHorME1wZG5qQ1U0TnFlNnBFR2laQjN1MjdyNjc5TjVPdXdJb2o5VkU1bEduRW9pRHNDTHh6Sy8iLCJtYWMiOiI0OTc0ZmNjY2UwMGJkOWY2MWNkM2NlMjk2ZGMyZGJmMWE0NTdjZTdkNGI2Y2IwNTIzZmFiZWU5ZTE2OTk0YmU4IiwidGFnIjoiIn0%3D; laravel_session=eyJpdiI6ImxvdlpqREFnTjdaeFJubUlXQWlJVWc9PSIsInZhbHVlIjoiQnE4R3VHdjZ4M1NDdEVWM1ZqMUxtNnVERnJCcmtCUHZKNzRPR2RFbzNFcStTL29xdnVTbWhsNVRBUXEybVZWNU1UYVlTazFqYlN5UjJva1k4czNGaXBTbkJJK01oTUd3VHRYVHBoc3dGUWxHYnFlS2NJVVNFbTFqMVBWdFpuVUgiLCJtYWMiOiI1NDdjZTVkYmNhNjUwZTMxZmRlZmVmMmRlMGNiYjAwYjlmYjFjY2U0MDc1YTQzZThiMTIxMjJlYTg1NTA4YjBmIiwidGFnIjoiIn0%3D; latest=5592 ",
|
||||||
"Host": ANIMEPAHE,
|
"Host": ANIMEPAHE,
|
||||||
"User-Agent": USER_AGENT,
|
|
||||||
"Accept": "application , text/javascript, */*; q=0.01",
|
"Accept": "application , text/javascript, */*; q=0.01",
|
||||||
"Accept-Encoding": "gzip, deflate, br, zstd",
|
"Accept-Encoding": "Utf-8",
|
||||||
"Referer": ANIMEPAHE_BASE,
|
"Referer": ANIMEPAHE_BASE,
|
||||||
"X-Requested-With": "XMLHttpRequest",
|
|
||||||
"DNT": "1",
|
"DNT": "1",
|
||||||
"Connection": "keep-alive",
|
"Connection": "keep-alive",
|
||||||
"Sec-Fetch-Dest": "empty",
|
"Sec-Fetch-Dest": "empty",
|
||||||
@@ -21,19 +17,17 @@ REQUEST_HEADERS = {
|
|||||||
"TE": "trailers",
|
"TE": "trailers",
|
||||||
}
|
}
|
||||||
SERVER_HEADERS = {
|
SERVER_HEADERS = {
|
||||||
"User-Agent": USER_AGENT,
|
"Host": "kwik.si",
|
||||||
"Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/png,image/svg+xml,*/*;q=0.8",
|
"Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/png,image/svg+xml,*/*;q=0.8",
|
||||||
"Accept-Language": "en-US,en;q=0.5",
|
"Accept-Language": "en-US,en;q=0.5",
|
||||||
"Accept-Encoding": "gzip, deflate, br, zstd",
|
"Accept-Encoding": "Utf-8",
|
||||||
"DNT": "1",
|
"DNT": "1",
|
||||||
"Alt-Used": "kwik.si",
|
|
||||||
"Connection": "keep-alive",
|
"Connection": "keep-alive",
|
||||||
"Referer": ANIMEPAHE_BASE,
|
"Referer": "https://animepahe.ru/",
|
||||||
"Cookie": "kwik_session=eyJpdiI6IlZ5UDd0c0lKTDB1NXlhTHZPeWxFc2c9PSIsInZhbHVlIjoieDJZbGhZUG1QZDNaeWtqR3lwWFNnREdhaHBxNVZRMWNDOHVucGpiMHRJOVdhVmpBc3lpTko1VExRMTFWcE1yUVJtVitoTWdOOU5ObTQ0Q0dHU0MzZU0yRUVvNmtWcUdmY3R4UWx4YklJTmpUL0ZodjhtVEpjWU96cEZoUUhUbVYiLCJtYWMiOiI2OGY2YThkOGU0MTgwOThmYzcyZThmNzFlZjlhMzQzMDgwNjlmMTc4NTIzMzc2YjE3YjNmMWQyNTk4NzczMmZiIiwidGFnIjoiIn0%3D; srv=s0; cf_clearance=QMoZtUpZrX0Mh4XJiFmFSSmoWndISPne5FcsGmKKvTQ-1723297585-1.0.1.1-6tVUnP.aef9XeNj0CnN.19D1el_r53t.lhqddX.J88gohH9UnsPWKeJ4yT0pTbcaGRbPuXTLOS.U72.wdy.gMg",
|
|
||||||
"Upgrade-Insecure-Requests": "1",
|
"Upgrade-Insecure-Requests": "1",
|
||||||
"Sec-Fetch-Dest": "iframe",
|
"Sec-Fetch-Dest": "iframe",
|
||||||
"Sec-Fetch-Mode": "navigate",
|
"Sec-Fetch-Mode": "navigate",
|
||||||
"Sec-Fetch-Site": "cross-site",
|
"Sec-Fetch-Site": "cross-site",
|
||||||
"Sec-Fetch-User": "?1",
|
|
||||||
"Priority": "u=4",
|
"Priority": "u=4",
|
||||||
|
"TE": "trailers",
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1 +0,0 @@
|
|||||||
SERVERS_AVAILABLE = ["HD1", "HD2", "StreamSB", "StreamTape"]
|
|
||||||
|
|||||||
@@ -1,39 +1,102 @@
|
|||||||
import logging
|
import logging
|
||||||
import re
|
import re
|
||||||
|
from html.parser import HTMLParser
|
||||||
from itertools import cycle
|
from itertools import cycle
|
||||||
|
from urllib.parse import quote_plus
|
||||||
|
|
||||||
from yt_dlp.utils import (
|
from yt_dlp.utils import (
|
||||||
|
clean_html,
|
||||||
extract_attributes,
|
extract_attributes,
|
||||||
|
get_element_by_class,
|
||||||
get_element_html_by_class,
|
get_element_html_by_class,
|
||||||
|
get_elements_by_class,
|
||||||
get_elements_html_by_class,
|
get_elements_html_by_class,
|
||||||
)
|
)
|
||||||
|
|
||||||
from ..base_provider import AnimeProvider
|
from ..base_provider import AnimeProvider
|
||||||
from ..common import fetch_anime_info_from_bal
|
|
||||||
from ..mini_anilist import search_for_anime_with_anilist
|
|
||||||
from ..utils import give_random_quality
|
from ..utils import give_random_quality
|
||||||
from . import SERVERS_AVAILABLE
|
from .constants import SERVERS_AVAILABLE
|
||||||
from .types import AniWatchStream
|
from .types import AniWatchStream
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
LINK_TO_STREAMS_REGEX = re.compile(r".*://(.*)/embed-(2|4|6)/e-([0-9])/(.*)\?.*")
|
LINK_TO_STREAMS_REGEX = re.compile(r".*://(.*)/embed-(2|4|6)/e-([0-9])/(.*)\?.*")
|
||||||
|
IMAGE_HTML_ELEMENT_REGEX = re.compile(r"<img.*?>")
|
||||||
|
|
||||||
|
|
||||||
|
class ParseAnchorAndImgTag(HTMLParser):
|
||||||
|
def __init__(self):
|
||||||
|
super().__init__()
|
||||||
|
self.img_tag = None
|
||||||
|
self.a_tag = None
|
||||||
|
|
||||||
|
def handle_starttag(self, tag, attrs):
|
||||||
|
if tag == "img":
|
||||||
|
self.img_tag = {attr[0]: attr[1] for attr in attrs}
|
||||||
|
if tag == "a":
|
||||||
|
self.a_tag = {attr[0]: attr[1] for attr in attrs}
|
||||||
|
|
||||||
|
|
||||||
class AniWatchApi(AnimeProvider):
|
class AniWatchApi(AnimeProvider):
|
||||||
|
# HEADERS = {"Referer": "https://hianime.to/home"}
|
||||||
|
|
||||||
def search_for_anime(self, anime_title: str, *args):
|
def search_for_anime(self, anime_title: str, *args):
|
||||||
try:
|
try:
|
||||||
return search_for_anime_with_anilist(anime_title)
|
query = quote_plus(anime_title)
|
||||||
|
url = f"https://hianime.to/search?keyword={query}"
|
||||||
|
response = self.session.get(url)
|
||||||
|
if response.status_code != 200:
|
||||||
|
return
|
||||||
|
search_page = response.text
|
||||||
|
search_results_html_items = get_elements_by_class("flw-item", search_page)
|
||||||
|
results = []
|
||||||
|
for search_results_html_item in search_results_html_items:
|
||||||
|
film_poster_html = get_element_by_class(
|
||||||
|
"film-poster", search_results_html_item
|
||||||
|
)
|
||||||
|
|
||||||
|
if not film_poster_html:
|
||||||
|
continue
|
||||||
|
# get availableEpisodes
|
||||||
|
episodes_html = get_element_html_by_class("tick-sub", film_poster_html)
|
||||||
|
episodes = clean_html(episodes_html) or 12
|
||||||
|
|
||||||
|
# get anime id and poster image url
|
||||||
|
parser = ParseAnchorAndImgTag()
|
||||||
|
parser.feed(film_poster_html)
|
||||||
|
image_data = parser.img_tag
|
||||||
|
anime_link_data = parser.a_tag
|
||||||
|
if not image_data or not anime_link_data:
|
||||||
|
continue
|
||||||
|
|
||||||
|
episodes = int(episodes)
|
||||||
|
|
||||||
|
# finally!!
|
||||||
|
image_link = image_data["data-src"]
|
||||||
|
anime_id = anime_link_data["data-id"]
|
||||||
|
title = anime_link_data["title"]
|
||||||
|
|
||||||
|
results.append(
|
||||||
|
{
|
||||||
|
"availableEpisodes": list(range(1, episodes)),
|
||||||
|
"id": anime_id,
|
||||||
|
"title": title,
|
||||||
|
"poster": image_link,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
self.search_results = results
|
||||||
|
return {"pageInfo": {}, "results": results}
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(e)
|
logger.error(e)
|
||||||
|
|
||||||
def get_anime(self, anilist_id, *args):
|
def get_anime(self, aniwatch_id, *args):
|
||||||
try:
|
try:
|
||||||
bal_results = fetch_anime_info_from_bal(anilist_id)
|
anime_result = {}
|
||||||
if not bal_results:
|
for anime in self.search_results:
|
||||||
return
|
if anime["id"] == aniwatch_id:
|
||||||
ZORO = bal_results["Sites"]["Zoro"]
|
anime_result = anime
|
||||||
aniwatch_id = list(ZORO.keys())[0]
|
break
|
||||||
anime_url = f"https://hianime.to/ajax/v2/episode/list/{aniwatch_id}"
|
anime_url = f"https://hianime.to/ajax/v2/episode/list/{aniwatch_id}"
|
||||||
response = self.session.get(anime_url, timeout=10)
|
response = self.session.get(anime_url, timeout=10)
|
||||||
if response.status_code == 200:
|
if response.status_code == 200:
|
||||||
@@ -58,7 +121,7 @@ class AniWatchApi(AnimeProvider):
|
|||||||
(episode["title"] or "").replace(
|
(episode["title"] or "").replace(
|
||||||
f"Episode {episode['data-number']}", ""
|
f"Episode {episode['data-number']}", ""
|
||||||
)
|
)
|
||||||
or ZORO[aniwatch_id]["title"]
|
or anime_result["title"]
|
||||||
)
|
)
|
||||||
+ f"; Episode {episode['data-number']}",
|
+ f"; Episode {episode['data-number']}",
|
||||||
"episode": episode["data-number"],
|
"episode": episode["data-number"],
|
||||||
@@ -72,8 +135,8 @@ class AniWatchApi(AnimeProvider):
|
|||||||
"sub": episodes,
|
"sub": episodes,
|
||||||
"raw": episodes,
|
"raw": episodes,
|
||||||
},
|
},
|
||||||
"poster": ZORO[aniwatch_id]["image"],
|
"poster": anime_result["poster"],
|
||||||
"title": ZORO[aniwatch_id]["title"],
|
"title": anime_result["title"],
|
||||||
"episodes_info": self.episodes_info,
|
"episodes_info": self.episodes_info,
|
||||||
}
|
}
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
|
|||||||
@@ -0,0 +1 @@
|
|||||||
|
SERVERS_AVAILABLE = ["HD1", "HD2", "StreamSB", "StreamTape"]
|
||||||
|
|||||||
65
fastanime/libs/anime_provider/aniwave/api.py
Normal file
65
fastanime/libs/anime_provider/aniwave/api.py
Normal file
@@ -0,0 +1,65 @@
|
|||||||
|
from html.parser import HTMLParser
|
||||||
|
|
||||||
|
from yt_dlp.utils import clean_html, get_element_by_class, get_elements_by_class
|
||||||
|
|
||||||
|
from ..base_provider import AnimeProvider
|
||||||
|
from .constants import ANIWAVE_BASE, SEARCH_HEADERS
|
||||||
|
|
||||||
|
|
||||||
|
class ParseAnchorAndImgTag(HTMLParser):
|
||||||
|
def __init__(self):
|
||||||
|
super().__init__()
|
||||||
|
self.img_tag = None
|
||||||
|
self.a_tag = None
|
||||||
|
|
||||||
|
def handle_starttag(self, tag, attrs):
|
||||||
|
if tag == "img":
|
||||||
|
self.img_tag = {attr[0]: attr[1] for attr in attrs}
|
||||||
|
if tag == "a":
|
||||||
|
self.a_tag = {attr[0]: attr[1] for attr in attrs}
|
||||||
|
|
||||||
|
|
||||||
|
class AniWaveApi(AnimeProvider):
|
||||||
|
def search_for_anime(self, anime_title, *args):
|
||||||
|
self.session.headers.update(SEARCH_HEADERS)
|
||||||
|
search_url = f"{ANIWAVE_BASE}/filter"
|
||||||
|
params = {"keyword": anime_title}
|
||||||
|
res = self.session.get(search_url, params=params)
|
||||||
|
search_page = res.text
|
||||||
|
search_results_html_list = get_elements_by_class("item", search_page)
|
||||||
|
results = []
|
||||||
|
for result_html in search_results_html_list:
|
||||||
|
aniposter_html = get_element_by_class("poster", result_html)
|
||||||
|
episode_html = get_element_by_class("sub", aniposter_html)
|
||||||
|
episodes = clean_html(episode_html) or 12
|
||||||
|
if not aniposter_html:
|
||||||
|
return
|
||||||
|
parser = ParseAnchorAndImgTag()
|
||||||
|
parser.feed(aniposter_html)
|
||||||
|
image_data = parser.img_tag
|
||||||
|
anime_link_data = parser.a_tag
|
||||||
|
if not image_data or not anime_link_data:
|
||||||
|
continue
|
||||||
|
|
||||||
|
episodes = int(episodes)
|
||||||
|
|
||||||
|
# finally!!
|
||||||
|
image_link = image_data["src"]
|
||||||
|
title = image_data["alt"]
|
||||||
|
anime_id = anime_link_data["href"]
|
||||||
|
|
||||||
|
results.append(
|
||||||
|
{
|
||||||
|
"availableEpisodes": list(range(1, episodes)),
|
||||||
|
"id": anime_id,
|
||||||
|
"title": title,
|
||||||
|
"poster": image_link,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
self.search_results = results
|
||||||
|
return {"pageInfo": {}, "results": results}
|
||||||
|
|
||||||
|
def get_anime(self, anime_id, *args):
|
||||||
|
anime_page_url = f"{ANIWAVE_BASE}{anime_id}"
|
||||||
|
self.session.get(anime_page_url)
|
||||||
|
# TODO: to be continued; mostly js so very difficult
|
||||||
20
fastanime/libs/anime_provider/aniwave/constants.py
Normal file
20
fastanime/libs/anime_provider/aniwave/constants.py
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
ANIWAVE_BASE = "https://aniwave.to"
|
||||||
|
|
||||||
|
SEARCH_HEADERS = {
|
||||||
|
"Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/png,image/svg+xml,*/*;q=0.8",
|
||||||
|
"Accept-Language": "en-US,en;q=0.5",
|
||||||
|
# 'Accept-Encoding': 'Utf-8',
|
||||||
|
"Referer": "https://aniwave.to/filter",
|
||||||
|
"DNT": "1",
|
||||||
|
"Upgrade-Insecure-Requests": "1",
|
||||||
|
"Sec-Fetch-Dest": "document",
|
||||||
|
"Sec-Fetch-Mode": "navigate",
|
||||||
|
"Sec-Fetch-Site": "same-origin",
|
||||||
|
"Sec-Fetch-User": "?1",
|
||||||
|
"Connection": "keep-alive",
|
||||||
|
"Alt-Used": "aniwave.to",
|
||||||
|
# 'Cookie': '__pf=1; usertype=guest; session=BElk9DJdO3sFdDmLiGxuNiM9eGYO1TjktGsmdwjV',
|
||||||
|
"Priority": "u=0, i",
|
||||||
|
# Requests doesn't support trailers
|
||||||
|
# 'TE': 'trailers',
|
||||||
|
}
|
||||||
@@ -1,8 +1,13 @@
|
|||||||
import requests
|
import requests
|
||||||
|
from yt_dlp.utils.networking import random_user_agent
|
||||||
|
|
||||||
|
|
||||||
class AnimeProvider:
|
class AnimeProvider:
|
||||||
session: requests.Session
|
session: requests.Session
|
||||||
|
|
||||||
|
USER_AGENT = random_user_agent()
|
||||||
|
HEADERS = {}
|
||||||
|
|
||||||
def __init__(self) -> None:
|
def __init__(self) -> None:
|
||||||
self.session = requests.session()
|
self.session = requests.session()
|
||||||
|
self.session.headers.update({"User-Agent": self.USER_AGENT, **self.HEADERS})
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
[tool.poetry]
|
[tool.poetry]
|
||||||
name = "fastanime"
|
name = "fastanime"
|
||||||
version = "2.3.4"
|
version = "2.3.6"
|
||||||
description = "A browser anime site experience from the terminal"
|
description = "A browser anime site experience from the terminal"
|
||||||
authors = ["Benextempest <benextempest@gmail.com>"]
|
authors = ["Benextempest <benextempest@gmail.com>"]
|
||||||
license = "UNLICENSE"
|
license = "UNLICENSE"
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
{
|
{
|
||||||
"typeCheckingMode": "standard",
|
"venvPath": ".",
|
||||||
"reportPrivateImportUsage": false
|
"venv": ".venv",
|
||||||
|
"pythonVersion": "3.10"
|
||||||
}
|
}
|
||||||
|
|||||||
Reference in New Issue
Block a user