mirror of
https://github.com/Benexl/FastAnime.git
synced 2025-12-13 00:00:01 -08:00
Compare commits
130 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
dc58fc8536 | ||
|
|
1d5c3016fc | ||
|
|
8737aea746 | ||
|
|
bd03866f5e | ||
|
|
81690a8015 | ||
|
|
933112a52b | ||
|
|
eb513dfe0e | ||
|
|
3928b77506 | ||
|
|
95cb2bd78c | ||
|
|
4fa1c45eb2 | ||
|
|
b9051bc792 | ||
|
|
a590024f1c | ||
|
|
2f51936679 | ||
|
|
327c50d290 | ||
|
|
031dfbb9b5 | ||
|
|
050365302a | ||
|
|
0f248b1119 | ||
|
|
871d5cf758 | ||
|
|
320376d2e8 | ||
|
|
02e7fdff6f | ||
|
|
2c5c28f295 | ||
|
|
2d3509ccc1 | ||
|
|
30babf2d69 | ||
|
|
cfbbabf898 | ||
|
|
5ac6c45fdf | ||
|
|
a14645b563 | ||
|
|
90dbc26c46 | ||
|
|
54cc830c35 | ||
|
|
4928ff5b74 | ||
|
|
bb481fe21a | ||
|
|
0d27b8f652 | ||
|
|
bdd3aae399 | ||
|
|
af94cd7eb5 | ||
|
|
54044f9527 | ||
|
|
1e5c039ece | ||
|
|
15555759dc | ||
|
|
0ed51e05cc | ||
|
|
634ef6febf | ||
|
|
bda4b2dbe1 | ||
|
|
f015305e7c | ||
|
|
d32b7e917f | ||
|
|
3b35e80199 | ||
|
|
c65a1a2815 | ||
|
|
0b3615c9f5 | ||
|
|
3ac4e1ac71 | ||
|
|
d62f580d7a | ||
|
|
02e35b66cb | ||
|
|
7b11e0a301 | ||
|
|
aa8b91aed3 | ||
|
|
fe0fa97576 | ||
|
|
92059cd5ed | ||
|
|
ed3064e3b1 | ||
|
|
441d1e5e6c | ||
|
|
653b2cf4eb | ||
|
|
8d4b71e0c8 | ||
|
|
29cc6cad09 | ||
|
|
8119eef263 | ||
|
|
912c8674cf | ||
|
|
6b3ca236dd | ||
|
|
f1c352d4ff | ||
|
|
714533d845 | ||
|
|
56dd25df8d | ||
|
|
8248dc53df | ||
|
|
1a8a187de6 | ||
|
|
bc86be8c93 | ||
|
|
75026d4fc5 | ||
|
|
f8a5ccb8d2 | ||
|
|
719d1bd187 | ||
|
|
0dd83463c6 | ||
|
|
1ee50e8a55 | ||
|
|
ae95c5ea3d | ||
|
|
d64ad5e11d | ||
|
|
d1a47c6d44 | ||
|
|
51a834a62f | ||
|
|
3a030bf6f7 | ||
|
|
eb6a6fc82c | ||
|
|
437ccd94e4 | ||
|
|
d65868cc30 | ||
|
|
8678aa6544 | ||
|
|
00e5141152 | ||
|
|
90e757dfe1 | ||
|
|
8b471b08e8 | ||
|
|
158bc5710f | ||
|
|
a0b946a13d | ||
|
|
b547b75f03 | ||
|
|
58c7427a47 | ||
|
|
6220b9c55d | ||
|
|
6b9b5c131c | ||
|
|
212f2af39c | ||
|
|
f7b2b4e0c9 | ||
|
|
a747529279 | ||
|
|
1dfdcc27ce | ||
|
|
3c03289453 | ||
|
|
06fd446a72 | ||
|
|
172d912d8b | ||
|
|
2396018607 | ||
|
|
a9be9779c5 | ||
|
|
2f76b26a99 | ||
|
|
2fe5edf810 | ||
|
|
d67ee6a779 | ||
|
|
e06ec5dbd4 | ||
|
|
c1b24ba2aa | ||
|
|
59e9cf9fd0 | ||
|
|
58761f5b96 | ||
|
|
ac959da229 | ||
|
|
bacc8c48ec | ||
|
|
905a159428 | ||
|
|
20f734cab2 | ||
|
|
7c2c644aef | ||
|
|
0efc92081a | ||
|
|
fafeee2367 | ||
|
|
e03063cd76 | ||
|
|
93b38b055f | ||
|
|
045635fb55 | ||
|
|
de7f773e9e | ||
|
|
ef6a465bd2 | ||
|
|
0c623af8a4 | ||
|
|
0589f83998 | ||
|
|
e17608afd5 | ||
|
|
b915654685 | ||
|
|
2ce9bf6c47 | ||
|
|
3c22232432 | ||
|
|
3474e9520c | ||
|
|
e9bacf4f9c | ||
|
|
ef422ed6fd | ||
|
|
d0f5366908 | ||
|
|
3557205feb | ||
|
|
ba4c41d888 | ||
|
|
1427a3193c | ||
|
|
b5cee20e56 |
29
.github/workflows/build.yml
vendored
29
.github/workflows/build.yml
vendored
@@ -8,31 +8,24 @@ jobs:
|
||||
debug_build:
|
||||
if: ${{ github.event.workflow_run.conclusion == 'success' }}
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Install Python
|
||||
|
||||
- name: "Set up Python"
|
||||
uses: actions/setup-python@v5
|
||||
- name: Install poetry
|
||||
uses: abatilo/actions-poetry@v2
|
||||
- name: Setup a local virtual environment (if no poetry.toml file)
|
||||
run: |
|
||||
poetry config virtualenvs.create true --local
|
||||
poetry config virtualenvs.in-project true --local
|
||||
- uses: actions/cache@v3
|
||||
name: Define a cache for the virtual environment based on the dependencies lock file
|
||||
|
||||
- name: Install uv
|
||||
uses: astral-sh/setup-uv@v3
|
||||
with:
|
||||
path: ./.venv
|
||||
key: venv-${{ hashFiles('poetry.lock') }}
|
||||
- name: Install the project dependencies
|
||||
run: poetry install --all-extras
|
||||
- name: build app
|
||||
run: poetry build
|
||||
enable-cache: true
|
||||
|
||||
- name: Build fastanime
|
||||
run: uv build
|
||||
|
||||
- name: Archive production artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: fastanime_debug_build
|
||||
path: |
|
||||
dist
|
||||
!dist/*.whl
|
||||
# - name: Run the automated tests (for example)
|
||||
# run: poetry run pytest -v
|
||||
|
||||
12
.github/workflows/publish.yml
vendored
12
.github/workflows/publish.yml
vendored
@@ -27,11 +27,13 @@ jobs:
|
||||
with:
|
||||
python-version: "3.10"
|
||||
|
||||
- name: Build release distributions
|
||||
run: |
|
||||
# NOTE: put your own distribution build steps here.
|
||||
python -m pip install build
|
||||
python -m build
|
||||
- name: Install uv
|
||||
uses: astral-sh/setup-uv@v3
|
||||
with:
|
||||
enable-cache: true
|
||||
|
||||
- name: Build fastanime
|
||||
run: uv build
|
||||
|
||||
- name: Upload distributions
|
||||
uses: actions/upload-artifact@v4
|
||||
|
||||
40
.github/workflows/test.yml
vendored
40
.github/workflows/test.yml
vendored
@@ -6,37 +6,35 @@ on:
|
||||
pull_request:
|
||||
branches:
|
||||
- master
|
||||
|
||||
jobs:
|
||||
test:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ["3.10", "3.11"] # List the Python versions you want to test
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Install Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install poetry
|
||||
uses: abatilo/actions-poetry@v2
|
||||
- name: Setup a local virtual environment (if no poetry.toml file)
|
||||
run: |
|
||||
poetry config virtualenvs.create true --local
|
||||
poetry config virtualenvs.in-project true --local
|
||||
- uses: actions/cache@v3
|
||||
name: Define a cache for the virtual environment based on the dependencies lock file
|
||||
|
||||
- name: Install uv
|
||||
uses: astral-sh/setup-uv@v3
|
||||
with:
|
||||
path: ./.venv
|
||||
key: venv-${{ hashFiles('poetry.lock') }}
|
||||
- name: Install the project dependencies
|
||||
run: poetry install --all-extras
|
||||
- name: run linter, formatters and sort imports
|
||||
run: |
|
||||
poetry run black .
|
||||
poetry run ruff check --output-format=github . --fix
|
||||
poetry run isort . --profile black
|
||||
- name: run type checking
|
||||
run: poetry run pyright
|
||||
- name: run tests
|
||||
run: poetry run pytest
|
||||
enable-cache: true
|
||||
|
||||
- name: Install the project
|
||||
run: uv sync --all-extras --dev
|
||||
|
||||
- name: Run linter and formater
|
||||
run: uv run ruff check --output-format=github
|
||||
|
||||
- name: Run type checking
|
||||
run: uv run pyright
|
||||
|
||||
- name: Run tests
|
||||
run: uv run pytest tests
|
||||
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -176,3 +176,4 @@ app/View/SearchScreen/.search_screen.py.un~
|
||||
app/View/SearchScreen/search_screen.py~
|
||||
app/user_data.json
|
||||
.buildozer
|
||||
result
|
||||
|
||||
40
DISCLAIMER.md
Normal file
40
DISCLAIMER.md
Normal file
@@ -0,0 +1,40 @@
|
||||
<h1 align="center">Disclaimer</h1>
|
||||
|
||||
<div align="center">
|
||||
|
||||
<h2>This project: fastanime</h2>
|
||||
|
||||
<br>
|
||||
|
||||
The core aim of this project is to co-relate automation and efficiency to extract what is provided to a user on the internet. All content available through the project is hosted by external non-affiliated sources.
|
||||
|
||||
<br>
|
||||
|
||||
<b>All content served through this project is publicly accessible. If your site is listed in this project, the code is pretty much public. Take necessary measures to counter the exploits used to extract content in your site.</b>
|
||||
|
||||
Think of this project as your normal browser, but a bit more straight-forward and specific. While an average browser makes hundreds of requests to get everything from a site, this project goes on to only make requests associated with getting the content served by the sites.
|
||||
|
||||
<b>
|
||||
|
||||
This project is to be used at the user's own risk, based on their government and laws.
|
||||
|
||||
This project has no control on the content it is serving, using copyrighted content from the providers is not going to be accounted for by the developer. It is the user's own risk.
|
||||
|
||||
</b>
|
||||
|
||||
|
||||
<br>
|
||||
|
||||
<h2>DMCA and Copyright Infrigements</h3>
|
||||
|
||||
<br>
|
||||
|
||||
<b>
|
||||
|
||||
A browser is a tool, and the maliciousness of the tool is directly based on the user.
|
||||
</b>
|
||||
|
||||
|
||||
This project uses client-side content access mechanisms. Hence, the copyright infrigements or DMCA in this project's regards are to be forwarded to the associated site by the associated notifier of any such claims. This is one of the main reasons the sites are listed in this project.
|
||||
|
||||
<b>Do not harass the developer. Any personal information about the developer is intentionally not made public. Exploiting such information without consent in regards to this topic will lead to legal actions by the developer themselves.</b>
|
||||
11
Dockerfile
11
Dockerfile
@@ -1,10 +1,7 @@
|
||||
FROM ubuntu
|
||||
RUN apt-get update
|
||||
RUN apt-get -y install python3
|
||||
RUN apt-get update
|
||||
RUN apt-get -y install pipx
|
||||
RUN pipx ensurepath
|
||||
FROM python:3.12-slim-bookworm
|
||||
COPY --from=ghcr.io/astral-sh/uv:latest /uv /uvx /bin/
|
||||
COPY . /fastanime
|
||||
ENV PATH=/root/.local/bin:$PATH
|
||||
WORKDIR /fastanime
|
||||
RUN pipx install .
|
||||
RUN uv tool install .
|
||||
CMD ["bash"]
|
||||
|
||||
5
fa
5
fa
@@ -1,4 +1,3 @@
|
||||
#!/usr/bin/env sh
|
||||
# exec "${PYTHON:-python3}" -Werror -Xdev -m "$(dirname "$(realpath "$0")")/fastanime" "$@"
|
||||
cd "$(dirname "$(realpath "$0")")" || exit 1
|
||||
exec python -m fastanime "$@"
|
||||
CLI_DIR="$(dirname "$(realpath "$0")")"
|
||||
exec uv run --directory "$CLI_DIR/../" fastanime "$@"
|
||||
|
||||
@@ -1,10 +1,8 @@
|
||||
"""An abstraction over all providers offering added features with a simple and well typed api
|
||||
|
||||
[TODO:description]
|
||||
"""
|
||||
"""An abstraction over all providers offering added features with a simple and well typed api"""
|
||||
|
||||
import importlib
|
||||
import logging
|
||||
import os
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from .libs.anime_provider import anime_sources
|
||||
@@ -32,19 +30,36 @@ class AnimeProvider:
|
||||
PROVIDERS = list(anime_sources.keys())
|
||||
provider = PROVIDERS[0]
|
||||
|
||||
def __init__(self, provider, dynamic=False, retries=0) -> None:
|
||||
def __init__(
|
||||
self,
|
||||
provider,
|
||||
cache_requests=os.environ.get("FASTANIME_CACHE_REQUESTS", "false"),
|
||||
use_persistent_provider_store=os.environ.get(
|
||||
"FASTANIME_USE_PERSISTENT_PROVIDER_STORE", "false"
|
||||
),
|
||||
dynamic=False,
|
||||
retries=0,
|
||||
) -> None:
|
||||
self.provider = provider
|
||||
self.dynamic = dynamic
|
||||
self.retries = retries
|
||||
self.cache_requests = cache_requests
|
||||
self.use_persistent_provider_store = use_persistent_provider_store
|
||||
self.lazyload_provider(self.provider)
|
||||
|
||||
def lazyload_provider(self, provider):
|
||||
"""updates the current provider being used"""
|
||||
try:
|
||||
self.anime_provider.session.kill_connection_to_db()
|
||||
except Exception:
|
||||
pass
|
||||
_, anime_provider_cls_name = anime_sources[provider].split(".", 1)
|
||||
package = f"fastanime.libs.anime_provider.{provider}"
|
||||
provider_api = importlib.import_module(".api", package)
|
||||
anime_provider = getattr(provider_api, anime_provider_cls_name)
|
||||
self.anime_provider = anime_provider()
|
||||
self.anime_provider = anime_provider(
|
||||
self.cache_requests, self.use_persistent_provider_store
|
||||
)
|
||||
|
||||
def search_for_anime(
|
||||
self,
|
||||
@@ -93,7 +108,6 @@ class AnimeProvider:
|
||||
def get_episode_streams(
|
||||
self,
|
||||
anime_id,
|
||||
anime_title,
|
||||
episode: str,
|
||||
translation_type: str,
|
||||
) -> "Iterator[Server] | None":
|
||||
@@ -110,6 +124,6 @@ class AnimeProvider:
|
||||
"""
|
||||
anime_provider = self.anime_provider
|
||||
results = anime_provider.get_episode_streams(
|
||||
anime_id, anime_title, episode, translation_type
|
||||
anime_id, episode, translation_type
|
||||
)
|
||||
return results
|
||||
|
||||
@@ -1,13 +1,16 @@
|
||||
import re
|
||||
from datetime import datetime
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ..libs.anilist.types import AnilistDateObject, AnilistMediaNextAiringEpisode
|
||||
|
||||
COMMA_REGEX = re.compile(r"([0-9]{3})(?=\d)")
|
||||
|
||||
|
||||
# TODO: Add formating options for the final date
|
||||
def format_anilist_date_object(anilist_date_object: "AnilistDateObject"):
|
||||
if anilist_date_object:
|
||||
if anilist_date_object and anilist_date_object["day"]:
|
||||
return f"{anilist_date_object['day']}/{anilist_date_object['month']}/{anilist_date_object['year']}"
|
||||
else:
|
||||
return "Unknown"
|
||||
@@ -27,6 +30,12 @@ def format_list_data_with_comma(data: list | None):
|
||||
return "None"
|
||||
|
||||
|
||||
def format_number_with_commas(number: int | None):
|
||||
if not number:
|
||||
return "0"
|
||||
return COMMA_REGEX.sub(lambda match: f"{match.group(1)},", str(number)[::-1])[::-1]
|
||||
|
||||
|
||||
def extract_next_airing_episode(airing_episode: "AnilistMediaNextAiringEpisode"):
|
||||
if airing_episode:
|
||||
return f"{airing_episode['episode']} on {format_anilist_timestamp(airing_episode['airingAt'])}"
|
||||
|
||||
@@ -9,10 +9,12 @@ anime_normalizer_raw = {
|
||||
"Magia Record: Mahou Shoujo Madoka☆Magica Gaiden (TV)": "Mahou Shoujo Madoka☆Magica",
|
||||
"Dungeon ni Deai o Motomeru no wa Machigatte Iru Darouka": "Dungeon ni Deai wo Motomeru no wa Machigatteiru Darou ka",
|
||||
'Hazurewaku no "Joutai Ijou Skill" de Saikyou ni Natta Ore ga Subete wo Juurin suru made': "Hazure Waku no [Joutai Ijou Skill] de Saikyou ni Natta Ore ga Subete wo Juurin Suru made",
|
||||
"Re:Zero kara Hajimeru Isekai Seikatsu Season 3": "Re:Zero kara Hajimeru Isekai Seikatsu 3rd Season",
|
||||
},
|
||||
"hianime": {"My Star": "Oshi no Ko"},
|
||||
"animepahe": {"Azumanga Daiou The Animation": "Azumanga Daioh"},
|
||||
"nyaa": {},
|
||||
"yugen": {},
|
||||
}
|
||||
|
||||
|
||||
@@ -20,7 +22,7 @@ def get_anime_normalizer():
|
||||
"""Used because there are different providers"""
|
||||
import os
|
||||
|
||||
current_provider = os.environ["CURRENT_FASTANIME_PROVIDER"]
|
||||
current_provider = os.environ.get("FASTANIME_PROVIDER", "allanime")
|
||||
return anime_normalizer_raw[current_provider]
|
||||
|
||||
|
||||
|
||||
@@ -16,6 +16,7 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
class YtDLPDownloader:
|
||||
downloads_queue = Queue()
|
||||
_thread = None
|
||||
|
||||
def _worker(self):
|
||||
while True:
|
||||
@@ -26,11 +27,6 @@ class YtDLPDownloader:
|
||||
logger.error(f"Something went wrong {e}")
|
||||
self.downloads_queue.task_done()
|
||||
|
||||
def __init__(self):
|
||||
self._thread = Thread(target=self._worker)
|
||||
self._thread.daemon = True
|
||||
self._thread.start()
|
||||
|
||||
def _download_file(
|
||||
self,
|
||||
url: str,
|
||||
@@ -38,6 +34,7 @@ class YtDLPDownloader:
|
||||
episode_title: str,
|
||||
download_dir: str,
|
||||
silent: bool,
|
||||
progress_hooks=[],
|
||||
vid_format: str = "best",
|
||||
force_unknown_ext=False,
|
||||
verbose=False,
|
||||
@@ -86,6 +83,7 @@ class YtDLPDownloader:
|
||||
"verbose": verbose,
|
||||
"format": vid_format,
|
||||
"compat_opts": ("allow-unsafe-ext",) if force_unknown_ext else tuple(),
|
||||
"progress_hooks": progress_hooks,
|
||||
}
|
||||
urls = [url]
|
||||
if sub:
|
||||
@@ -174,8 +172,15 @@ class YtDLPDownloader:
|
||||
except Exception as e:
|
||||
print(f"[red bold]An error[/] occurred: {e}")
|
||||
|
||||
# WARN: May remove this legacy functionality
|
||||
def download_file(self, url: str, title, silent=True):
|
||||
def download_file(
|
||||
self,
|
||||
url: str,
|
||||
anime_title: str,
|
||||
episode_title: str,
|
||||
download_dir: str,
|
||||
silent: bool = True,
|
||||
**kwargs,
|
||||
):
|
||||
"""A helper that just does things in the background
|
||||
|
||||
Args:
|
||||
@@ -183,7 +188,17 @@ class YtDLPDownloader:
|
||||
silent ([TODO:parameter]): [TODO:description]
|
||||
url: [TODO:description]
|
||||
"""
|
||||
self.downloads_queue.put((self._download_file, (url, title, silent)))
|
||||
if not self._thread:
|
||||
self._thread = Thread(target=self._worker)
|
||||
self._thread.daemon = True
|
||||
self._thread.start()
|
||||
|
||||
self.downloads_queue.put(
|
||||
(
|
||||
self._download_file,
|
||||
(url, anime_title, episode_title, download_dir, silent),
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
downloader = YtDLPDownloader()
|
||||
|
||||
@@ -37,6 +37,10 @@ def anime_title_percentage_match(
|
||||
title_a = str(anime["title"]["romaji"])
|
||||
title_b = str(anime["title"]["english"])
|
||||
percentage_ratio = max(
|
||||
*[
|
||||
fuzz.ratio(title.lower(), possible_user_requested_anime_title.lower())
|
||||
for title in anime["synonyms"]
|
||||
],
|
||||
fuzz.ratio(title_a.lower(), possible_user_requested_anime_title.lower()),
|
||||
fuzz.ratio(title_b.lower(), possible_user_requested_anime_title.lower()),
|
||||
)
|
||||
|
||||
@@ -2,11 +2,11 @@ import sys
|
||||
|
||||
if sys.version_info < (3, 10):
|
||||
raise ImportError(
|
||||
"You are using an unsupported version of Python. Only Python versions 3.8 and above are supported by yt-dlp"
|
||||
"You are using an unsupported version of Python. Only Python versions 3.10 and above are supported by FastAnime"
|
||||
) # noqa: F541
|
||||
|
||||
|
||||
__version__ = "v2.5.6"
|
||||
__version__ = "v2.8.0"
|
||||
|
||||
APP_NAME = "FastAnime"
|
||||
AUTHOR = "Benex254"
|
||||
|
||||
93
fastanime/api/__init__.py
Normal file
93
fastanime/api/__init__.py
Normal file
@@ -0,0 +1,93 @@
|
||||
from typing import Literal
|
||||
|
||||
from fastapi import FastAPI
|
||||
from requests import post
|
||||
from thefuzz import fuzz
|
||||
|
||||
from ..AnimeProvider import AnimeProvider
|
||||
from ..Utility.data import anime_normalizer
|
||||
|
||||
app = FastAPI()
|
||||
anime_provider = AnimeProvider("allanime", "true", "true")
|
||||
ANILIST_ENDPOINT = "https://graphql.anilist.co"
|
||||
|
||||
|
||||
@app.get("/search")
|
||||
def search_for_anime(title: str, translation_type: Literal["dub", "sub"] = "sub"):
|
||||
return anime_provider.search_for_anime(title, translation_type)
|
||||
|
||||
|
||||
@app.get("/anime/{anime_id}")
|
||||
def get_anime(anime_id: str):
|
||||
return anime_provider.get_anime(anime_id)
|
||||
|
||||
|
||||
@app.get("/anime/{anime_id}/watch")
|
||||
def get_episode_streams(
|
||||
anime_id: str, episode: str, translation_type: Literal["sub", "dub"]
|
||||
):
|
||||
return anime_provider.get_episode_streams(anime_id, episode, translation_type)
|
||||
|
||||
|
||||
def get_anime_by_anilist_id(anilist_id: int):
|
||||
query = f"""
|
||||
query {{
|
||||
Media(id: {anilist_id}) {{
|
||||
id
|
||||
title {{
|
||||
romaji
|
||||
english
|
||||
native
|
||||
}}
|
||||
synonyms
|
||||
episodes
|
||||
duration
|
||||
}}
|
||||
}}
|
||||
"""
|
||||
response = post(ANILIST_ENDPOINT, json={"query": query}).json()
|
||||
return response["data"]["Media"]
|
||||
|
||||
|
||||
@app.get("/watch/{anilist_id}")
|
||||
def get_episode_streams_by_anilist_id(
|
||||
anilist_id: int, episode: str, translation_type: Literal["sub", "dub"]
|
||||
):
|
||||
anime = get_anime_by_anilist_id(anilist_id)
|
||||
if not anime:
|
||||
return
|
||||
if search_results := anime_provider.search_for_anime(
|
||||
str(anime["title"]["romaji"] or anime["title"]["english"]), translation_type
|
||||
):
|
||||
if not search_results["results"]:
|
||||
return
|
||||
|
||||
def match_title(possible_user_requested_anime_title):
|
||||
possible_user_requested_anime_title = anime_normalizer.get(
|
||||
possible_user_requested_anime_title, possible_user_requested_anime_title
|
||||
)
|
||||
title_a = str(anime["title"]["romaji"])
|
||||
title_b = str(anime["title"]["english"])
|
||||
percentage_ratio = max(
|
||||
*[
|
||||
fuzz.ratio(
|
||||
title.lower(), possible_user_requested_anime_title.lower()
|
||||
)
|
||||
for title in anime["synonyms"]
|
||||
],
|
||||
fuzz.ratio(
|
||||
title_a.lower(), possible_user_requested_anime_title.lower()
|
||||
),
|
||||
fuzz.ratio(
|
||||
title_b.lower(), possible_user_requested_anime_title.lower()
|
||||
),
|
||||
)
|
||||
return percentage_ratio
|
||||
|
||||
provider_anime = max(
|
||||
search_results["results"], key=lambda x: match_title(x["title"])
|
||||
)
|
||||
anime_provider.get_anime(provider_anime["id"])
|
||||
return anime_provider.get_episode_streams(
|
||||
provider_anime["id"], episode, translation_type
|
||||
)
|
||||
84
fastanime/assets/rofi_theme.rasi
Normal file
84
fastanime/assets/rofi_theme.rasi
Normal file
@@ -0,0 +1,84 @@
|
||||
// https://github.com/Wraient/curd/blob/main/rofi/selectanime.rasi
|
||||
// Go give there project a star!
|
||||
// Was too lazy to make my own preview, so I just used theirs
|
||||
|
||||
|
||||
configuration {
|
||||
font: "Sans 12";
|
||||
line-margin: 10;
|
||||
display-drun: "";
|
||||
}
|
||||
|
||||
* {
|
||||
background: #000000; /* Black background for everything */
|
||||
background-alt: #000000; /* Ensures no alternation */
|
||||
foreground: #CCCCCC;
|
||||
selected: #3584E4;
|
||||
active: #2E7D32;
|
||||
urgent: #C62828;
|
||||
}
|
||||
|
||||
window {
|
||||
fullscreen: false;
|
||||
background-color: rgba(0, 0, 0, 1); /* Solid black background */
|
||||
}
|
||||
|
||||
mainbox {
|
||||
padding: 50px 100px;
|
||||
background-color: rgba(0, 0, 0, 1); /* Ensures black background fills entire main area */
|
||||
children: [inputbar, listview];
|
||||
spacing: 20px;
|
||||
}
|
||||
|
||||
inputbar {
|
||||
background-color: #333333; /* Dark gray background for input bar */
|
||||
padding: 8px;
|
||||
border-radius: 8px;
|
||||
children: [prompt, entry];
|
||||
}
|
||||
|
||||
prompt {
|
||||
enabled: true;
|
||||
padding: 8px;
|
||||
background-color: @selected;
|
||||
text-color: #000000;
|
||||
border-radius: 4px;
|
||||
}
|
||||
|
||||
entry {
|
||||
padding: 8px;
|
||||
background-color: #444444; /* Slightly lighter gray for visibility */
|
||||
text-color: #FFFFFF; /* White text to make typing visible */
|
||||
placeholder: "Search...";
|
||||
placeholder-color: rgba(255, 255, 255, 0.5);
|
||||
border-radius: 6px;
|
||||
}
|
||||
|
||||
listview {
|
||||
layout: vertical;
|
||||
spacing: 8px;
|
||||
lines: 10;
|
||||
background-color: @background; /* Consistent black background for list items */
|
||||
}
|
||||
|
||||
element {
|
||||
padding: 12px;
|
||||
border-radius: 4px;
|
||||
background-color: @background; /* Uniform color for each list item */
|
||||
text-color: @foreground;
|
||||
}
|
||||
|
||||
element normal.normal {
|
||||
background-color: @background; /* Ensures no alternating color */
|
||||
}
|
||||
|
||||
element selected.normal {
|
||||
background-color: @selected;
|
||||
text-color: #FFFFFF;
|
||||
}
|
||||
|
||||
element-text {
|
||||
background-color: transparent;
|
||||
text-color: inherit;
|
||||
vertical-align: 0.5;
|
||||
}
|
||||
55
fastanime/assets/rofi_theme_confirm.rasi
Normal file
55
fastanime/assets/rofi_theme_confirm.rasi
Normal file
@@ -0,0 +1,55 @@
|
||||
// https://github.com/Wraient/curd/blob/main/rofi/userinput.rasi
|
||||
// Go give there project a star!
|
||||
// Was too lazy to make my own preview, so I just used theirs
|
||||
|
||||
configuration {
|
||||
font: "Sans 12";
|
||||
}
|
||||
|
||||
* {
|
||||
background-color: rgba(0, 0, 0, 0.7);
|
||||
text-color: #FFFFFF;
|
||||
}
|
||||
|
||||
window {
|
||||
fullscreen: true;
|
||||
transparency: "real";
|
||||
background-color: @background-color;
|
||||
}
|
||||
|
||||
mainbox {
|
||||
children: [ message, listview, inputbar ];
|
||||
padding: 40% 30%;
|
||||
}
|
||||
|
||||
message {
|
||||
border: 0;
|
||||
padding: 10px;
|
||||
margin: 0 0 20px 0;
|
||||
font: "Sans Bold 24"; /* Increased font size and made it bold */
|
||||
}
|
||||
|
||||
inputbar {
|
||||
children: [ prompt, entry ];
|
||||
background-color: rgba(255, 255, 255, 0.1);
|
||||
padding: 8px;
|
||||
border-radius: 4px;
|
||||
}
|
||||
|
||||
prompt {
|
||||
padding: 8px;
|
||||
}
|
||||
|
||||
entry {
|
||||
padding: 8px;
|
||||
}
|
||||
|
||||
listview {
|
||||
lines: 0;
|
||||
}
|
||||
|
||||
/* Style for the message text specifically */
|
||||
textbox {
|
||||
horizontal-align: 0.5; /* Center the text */
|
||||
font: "Sans Bold 24"; /* Match message font */
|
||||
}
|
||||
55
fastanime/assets/rofi_theme_input.rasi
Normal file
55
fastanime/assets/rofi_theme_input.rasi
Normal file
@@ -0,0 +1,55 @@
|
||||
// https://github.com/Wraient/curd/blob/main/rofi/userinput.rasi
|
||||
// Go give there project a star!
|
||||
// Was too lazy to make my own preview, so I just used theirs
|
||||
|
||||
configuration {
|
||||
font: "Sans 12";
|
||||
}
|
||||
|
||||
* {
|
||||
background-color: rgba(0, 0, 0, 0.7);
|
||||
text-color: #FFFFFF;
|
||||
}
|
||||
|
||||
window {
|
||||
fullscreen: true;
|
||||
transparency: "real";
|
||||
background-color: @background-color;
|
||||
}
|
||||
|
||||
mainbox {
|
||||
children: [ message, listview, inputbar ];
|
||||
padding: 40% 30%;
|
||||
}
|
||||
|
||||
message {
|
||||
border: 0;
|
||||
padding: 10px;
|
||||
margin: 0 0 20px 0;
|
||||
font: "Sans Bold 24"; /* Increased font size and made it bold */
|
||||
}
|
||||
|
||||
inputbar {
|
||||
children: [ prompt, entry ];
|
||||
background-color: rgba(255, 255, 255, 0.1);
|
||||
padding: 8px;
|
||||
border-radius: 4px;
|
||||
}
|
||||
|
||||
prompt {
|
||||
padding: 8px;
|
||||
}
|
||||
|
||||
entry {
|
||||
padding: 8px;
|
||||
}
|
||||
|
||||
listview {
|
||||
lines: 0;
|
||||
}
|
||||
|
||||
/* Style for the message text specifically */
|
||||
textbox {
|
||||
horizontal-align: 0.5; /* Center the text */
|
||||
font: "Sans Bold 24"; /* Match message font */
|
||||
}
|
||||
122
fastanime/assets/rofi_theme_preview.rasi
Normal file
122
fastanime/assets/rofi_theme_preview.rasi
Normal file
@@ -0,0 +1,122 @@
|
||||
// Based on https://github.com/Wraient/curd/blob/main/rofi/selectanimepreview.rasi
|
||||
// Go give there project a star!
|
||||
// Was too lazy to make my own preview, so I just used theirs
|
||||
|
||||
// Colours
|
||||
* {
|
||||
background-color: transparent;
|
||||
background: #1D2330;
|
||||
background-transparent: #1D2330A0;
|
||||
text-color: #BBBBBB;
|
||||
text-color-selected: #FFFFFF;
|
||||
primary: #BB77BB;
|
||||
important: #BF616A;
|
||||
}
|
||||
|
||||
configuration {
|
||||
font: "Roboto 17";
|
||||
show-icons: true;
|
||||
}
|
||||
|
||||
window {
|
||||
fullscreen: true;
|
||||
height: 100%;
|
||||
width: 100%;
|
||||
transparency: "real";
|
||||
background-color: @background-transparent;
|
||||
border: 0px;
|
||||
border-color: @primary;
|
||||
}
|
||||
|
||||
mainbox {
|
||||
children: [prompt, inputbar-box, listview];
|
||||
padding: 0px;
|
||||
}
|
||||
|
||||
prompt {
|
||||
width: 100%;
|
||||
margin: 10px 0px 0px 30px;
|
||||
text-color: @important;
|
||||
font: "Roboto Bold 27";
|
||||
}
|
||||
|
||||
listview {
|
||||
layout: vertical;
|
||||
padding: 60px;
|
||||
dynamic: true;
|
||||
columns: 7;
|
||||
spacing: 20px;
|
||||
horizontal-align: center; /* Center the list items */
|
||||
}
|
||||
|
||||
inputbar-box {
|
||||
children: [dummy, inputbar, dummy];
|
||||
orientation: horizontal;
|
||||
expand: false;
|
||||
}
|
||||
|
||||
inputbar {
|
||||
children: [textbox-prompt, entry];
|
||||
margin: 0px;
|
||||
background-color: @primary;
|
||||
border: 4px;
|
||||
border-color: @primary;
|
||||
border-radius: 8px;
|
||||
}
|
||||
|
||||
textbox-prompt {
|
||||
text-color: @background;
|
||||
horizontal-align: 0.5;
|
||||
vertical-align: 0.5;
|
||||
expand: false;
|
||||
}
|
||||
|
||||
entry {
|
||||
expand: false;
|
||||
padding: 8px;
|
||||
margin: -6px;
|
||||
horizontal-align: 0;
|
||||
width: 300;
|
||||
background-color: @background;
|
||||
border: 6px;
|
||||
border-color: @primary;
|
||||
border-radius: 8px;
|
||||
cursor: text;
|
||||
}
|
||||
|
||||
element {
|
||||
children: [dummy, element-box, dummy];
|
||||
padding: 5px;
|
||||
orientation: vertical;
|
||||
border: 0px;
|
||||
border-radius: 16px;
|
||||
background-color: transparent; /* Default background */
|
||||
}
|
||||
|
||||
element selected {
|
||||
background-color: @primary; /* Solid color for selected item */
|
||||
}
|
||||
|
||||
element-box {
|
||||
children: [element-icon, element-text];
|
||||
orientation: vertical;
|
||||
expand: false;
|
||||
cursor: pointer;
|
||||
}
|
||||
|
||||
element-icon {
|
||||
padding: 10px;
|
||||
cursor: inherit;
|
||||
size: 33%;
|
||||
margin: 10px;
|
||||
}
|
||||
|
||||
element-text {
|
||||
horizontal-align: 0.5;
|
||||
cursor: inherit;
|
||||
text-color: @text-color;
|
||||
}
|
||||
|
||||
element-text selected {
|
||||
text-color: @text-color-selected;
|
||||
}
|
||||
@@ -16,6 +16,7 @@ commands = {
|
||||
"completions": "completions.completions",
|
||||
"update": "update.update",
|
||||
"grab": "grab.grab",
|
||||
"serve": "serve.serve",
|
||||
}
|
||||
|
||||
|
||||
@@ -157,6 +158,9 @@ signal.signal(signal.SIGINT, handle_exit)
|
||||
@click.option("--sub", help="Set the translation type to sub", is_flag=True)
|
||||
@click.option("--rofi", help="Use rofi for the ui", is_flag=True)
|
||||
@click.option("--rofi-theme", help="Rofi theme to use", type=click.Path())
|
||||
@click.option(
|
||||
"--rofi-theme-preview", help="Rofi theme to use for previews", type=click.Path()
|
||||
)
|
||||
@click.option(
|
||||
"--rofi-theme-confirm",
|
||||
help="Rofi theme to use for the confirm prompt",
|
||||
@@ -177,6 +181,9 @@ signal.signal(signal.SIGINT, handle_exit)
|
||||
help="the player to use when streaming",
|
||||
type=click.Choice(["mpv", "vlc"]),
|
||||
)
|
||||
@click.option(
|
||||
"--fresh-requests", is_flag=True, help="Force the requests cache to be updated"
|
||||
)
|
||||
@click.pass_context
|
||||
def run_cli(
|
||||
ctx: click.Context,
|
||||
@@ -206,15 +213,53 @@ def run_cli(
|
||||
sub,
|
||||
rofi,
|
||||
rofi_theme,
|
||||
rofi_theme_preview,
|
||||
rofi_theme_confirm,
|
||||
rofi_theme_input,
|
||||
use_python_mpv,
|
||||
sync_play,
|
||||
player,
|
||||
fresh_requests,
|
||||
):
|
||||
import os
|
||||
|
||||
from .config import Config
|
||||
|
||||
ctx.obj = Config()
|
||||
if ctx.obj.check_for_updates:
|
||||
from .app_updater import check_for_updates
|
||||
|
||||
print("Checking for updates...")
|
||||
print("So you can enjoy the latest features and bug fixes")
|
||||
print(
|
||||
"You can disable this by setting check_for_updates to False in the config"
|
||||
)
|
||||
is_latest, github_release_data = check_for_updates()
|
||||
if not is_latest:
|
||||
from rich.console import Console
|
||||
from rich.markdown import Markdown
|
||||
from .app_updater import update_app
|
||||
from rich.prompt import Confirm
|
||||
|
||||
def _print_release(release_data):
|
||||
console = Console()
|
||||
body = Markdown(release_data["body"])
|
||||
tag = github_release_data["tag_name"]
|
||||
tag_title = release_data["name"]
|
||||
github_page_url = release_data["html_url"]
|
||||
console.print(f"Release Page: {github_page_url}")
|
||||
console.print(f"Tag: {tag}")
|
||||
console.print(f"Title: {tag_title}")
|
||||
console.print(body)
|
||||
|
||||
if Confirm.ask(
|
||||
"A new version of fastanime is available, would you like to update?"
|
||||
):
|
||||
_, release_json = update_app()
|
||||
print("Successfully updated")
|
||||
_print_release(release_json)
|
||||
exit(0)
|
||||
|
||||
ctx.obj.manga = manga
|
||||
if log:
|
||||
import logging
|
||||
@@ -250,13 +295,12 @@ def run_cli(
|
||||
|
||||
install()
|
||||
|
||||
if fresh_requests:
|
||||
os.environ["FASTANIME_FRESH_REQUESTS"] = "1"
|
||||
if sync_play:
|
||||
ctx.obj.sync_play = sync_play
|
||||
if provider:
|
||||
import os
|
||||
|
||||
ctx.obj.provider = provider
|
||||
os.environ["CURRENT_FASTANIME_PROVIDER"] = provider
|
||||
if server:
|
||||
ctx.obj.server = server
|
||||
if format:
|
||||
@@ -319,6 +363,10 @@ def run_cli(
|
||||
if rofi:
|
||||
from ..libs.rofi import Rofi
|
||||
|
||||
if rofi_theme_preview:
|
||||
ctx.obj.rofi_theme_preview = rofi_theme_preview
|
||||
Rofi.rofi_theme_preview = rofi_theme_preview
|
||||
|
||||
if rofi_theme:
|
||||
ctx.obj.rofi_theme = rofi_theme
|
||||
Rofi.rofi_theme = rofi_theme
|
||||
@@ -330,3 +378,4 @@ def run_cli(
|
||||
if rofi_theme_confirm:
|
||||
ctx.obj.rofi_theme_confirm = rofi_theme_confirm
|
||||
Rofi.rofi_theme_confirm = rofi_theme_confirm
|
||||
ctx.obj.set_fastanime_config_environs()
|
||||
|
||||
@@ -45,8 +45,9 @@ def check_for_updates():
|
||||
|
||||
return (is_latest, release_json)
|
||||
else:
|
||||
print("Failed to check for updates")
|
||||
print(request.text)
|
||||
return (False, {})
|
||||
return (True, {})
|
||||
|
||||
|
||||
def is_git_repo(author, repository):
|
||||
@@ -75,9 +76,9 @@ def is_git_repo(author, repository):
|
||||
return bool(match) and match.group(1) == f"{author}/{repository}"
|
||||
|
||||
|
||||
def update_app():
|
||||
def update_app(force=False):
|
||||
is_latest, release_json = check_for_updates()
|
||||
if is_latest:
|
||||
if is_latest and not force:
|
||||
print("[green]App is up to date[/]")
|
||||
return False, release_json
|
||||
tag_name = release_json["tag_name"]
|
||||
@@ -101,8 +102,10 @@ def update_app():
|
||||
)
|
||||
|
||||
else:
|
||||
if PIPX_EXECUTABLE := shutil.which("pipx"):
|
||||
process = subprocess.run([PIPX_EXECUTABLE, "upgrade", APP_NAME])
|
||||
if UV := shutil.which("uv"):
|
||||
process = subprocess.run([UV, "tool", "upgrade", APP_NAME])
|
||||
elif PIPX := shutil.which("pipx"):
|
||||
process = subprocess.run([PIPX, "upgrade", APP_NAME])
|
||||
else:
|
||||
PYTHON_EXECUTABLE = sys.executable
|
||||
|
||||
|
||||
@@ -284,7 +284,7 @@ def download(
|
||||
with Progress() as progress:
|
||||
progress.add_task("Fetching Episode Streams...", total=None)
|
||||
streams = anime_provider.get_episode_streams(
|
||||
anime["id"], anime["title"], episode, config.translation_type
|
||||
anime["id"], episode, config.translation_type
|
||||
)
|
||||
if not streams:
|
||||
print("No streams skipping")
|
||||
@@ -361,9 +361,9 @@ def download(
|
||||
episode_title,
|
||||
download_dir,
|
||||
silent,
|
||||
config.format,
|
||||
force_unknown_ext,
|
||||
verbose,
|
||||
vid_format=config.format,
|
||||
force_unknown_ext=force_unknown_ext,
|
||||
verbose=verbose,
|
||||
headers=provider_headers,
|
||||
sub=subtitles[0]["url"] if subtitles else "",
|
||||
merge=merge,
|
||||
|
||||
@@ -217,7 +217,7 @@ def grab(
|
||||
if episode not in episodes:
|
||||
continue
|
||||
streams = anime_provider.get_episode_streams(
|
||||
anime["id"], anime["title"], episode, config.translation_type
|
||||
anime["id"], episode, config.translation_type
|
||||
)
|
||||
if not streams:
|
||||
continue
|
||||
|
||||
@@ -283,7 +283,7 @@ def search(config: "Config", anime_titles: str, episode_range: str):
|
||||
with Progress() as progress:
|
||||
progress.add_task("Fetching Episode Streams...", total=None)
|
||||
streams = anime_provider.get_episode_streams(
|
||||
anime["id"], anime["title"], episode, config.translation_type
|
||||
anime["id"], episode, config.translation_type
|
||||
)
|
||||
if not streams:
|
||||
print("Failed to get streams")
|
||||
|
||||
31
fastanime/cli/commands/serve.py
Normal file
31
fastanime/cli/commands/serve.py
Normal file
@@ -0,0 +1,31 @@
|
||||
import click
|
||||
|
||||
|
||||
@click.command(
|
||||
help="Command that automates the starting of the builtin fastanime server",
|
||||
epilog="""
|
||||
\b
|
||||
\b\bExamples:
|
||||
# default
|
||||
fastanime serve
|
||||
|
||||
# specify host and port
|
||||
fastanime serve --host 127.0.0.1 --port 8080
|
||||
""",
|
||||
)
|
||||
@click.option("--host", "-H", help="Specify the host to run the server on")
|
||||
@click.option("--port", "-p", help="Specify the port to run the server on")
|
||||
def serve(host, port):
|
||||
import os
|
||||
import sys
|
||||
|
||||
from ...constants import APP_DIR
|
||||
|
||||
args = [sys.executable, "-m", "fastapi", "run"]
|
||||
if host:
|
||||
args.extend(["--host", host])
|
||||
|
||||
if port:
|
||||
args.extend(["--port", port])
|
||||
args.append(os.path.join(APP_DIR, "api"))
|
||||
os.execv(sys.executable, args)
|
||||
@@ -11,12 +11,14 @@ import click
|
||||
\b
|
||||
# check for latest release
|
||||
fastanime update --check
|
||||
|
||||
# Force an update regardless of the current version
|
||||
fastanime update --force
|
||||
""",
|
||||
)
|
||||
@click.option("--check", "-c", help="Check for the latest release", is_flag=True)
|
||||
def update(
|
||||
check,
|
||||
):
|
||||
@click.option("--force", "-c", help="Force update", is_flag=True)
|
||||
def update(check, force):
|
||||
from rich.console import Console
|
||||
from rich.markdown import Markdown
|
||||
|
||||
@@ -45,7 +47,7 @@ def update(
|
||||
print(f"You are running the latest version ({__version__}) of fastanime")
|
||||
_print_release(github_release_data)
|
||||
else:
|
||||
success, github_release_data = update_app()
|
||||
success, github_release_data = update_app(force)
|
||||
_print_release(github_release_data)
|
||||
if success:
|
||||
print("Successfully updated")
|
||||
|
||||
@@ -8,7 +8,9 @@ from ..constants import (
|
||||
USER_CONFIG_PATH,
|
||||
USER_DATA_PATH,
|
||||
USER_VIDEOS_DIR,
|
||||
ASSETS_DIR,
|
||||
USER_WATCH_HISTORY_PATH,
|
||||
S_PLATFORM,
|
||||
)
|
||||
from ..libs.rofi import Rofi
|
||||
|
||||
@@ -26,46 +28,53 @@ class Config(object):
|
||||
"https://anilist.co/api/v2/oauth/authorize?client_id=20148&response_type=token"
|
||||
)
|
||||
anime_provider: "AnimeProvider"
|
||||
user_data = {"watch_history": {}, "animelist": [], "user": {}}
|
||||
default_options = {
|
||||
"quality": "1080",
|
||||
user_data = {"recent_anime": [], "animelist": [], "user": {}}
|
||||
default_config = {
|
||||
"auto_next": "False",
|
||||
"auto_select": "True",
|
||||
"sort_by": "search match",
|
||||
"downloads_dir": USER_VIDEOS_DIR,
|
||||
"translation_type": "sub",
|
||||
"server": "top",
|
||||
"cache_requests": "true",
|
||||
"check_for_updates": "True",
|
||||
"continue_from_history": "True",
|
||||
"preferred_history": "local",
|
||||
"use_python_mpv": "false",
|
||||
"force_window": "immediate",
|
||||
"preferred_language": "english",
|
||||
"use_fzf": "False",
|
||||
"preview": "False",
|
||||
"format": "best[height<=1080]/bestvideo[height<=1080]+bestaudio/best",
|
||||
"provider": "allanime",
|
||||
"icons": "false",
|
||||
"notification_duration": "2",
|
||||
"skip": "false",
|
||||
"use_rofi": "false",
|
||||
"rofi_theme": "",
|
||||
"rofi_theme_input": "",
|
||||
"rofi_theme_confirm": "",
|
||||
"ffmpegthumnailer_seek_time": "-1",
|
||||
"sub_lang": "eng",
|
||||
"normalize_titles": "true",
|
||||
"player": "mpv",
|
||||
"episode_complete_at": "80",
|
||||
"force_forward_tracking": "true",
|
||||
"default_media_list_tracking": "None",
|
||||
"downloads_dir": USER_VIDEOS_DIR,
|
||||
"disable_mpv_popen": "True",
|
||||
"episode_complete_at": "80",
|
||||
"ffmpegthumbnailer_seek_time": "-1",
|
||||
"force_forward_tracking": "true",
|
||||
"force_window": "immediate",
|
||||
"format": "best[height<=1080]/bestvideo[height<=1080]+bestaudio/best",
|
||||
"icons": "false",
|
||||
"image_previews": "True" if S_PLATFORM != "win32" else "False",
|
||||
"normalize_titles": "True",
|
||||
"notification_duration": "2",
|
||||
"player": "mpv",
|
||||
"preferred_history": "local",
|
||||
"preferred_language": "english",
|
||||
"preview": "False",
|
||||
"provider": "allanime",
|
||||
"quality": "1080",
|
||||
"recent": "50",
|
||||
"rofi_theme": os.path.join(ASSETS_DIR, "rofi_theme.rasi"),
|
||||
"rofi_theme_preview": os.path.join(ASSETS_DIR, "rofi_theme_preview.rasi"),
|
||||
"rofi_theme_confirm": os.path.join(ASSETS_DIR, "rofi_theme_confirm.rasi"),
|
||||
"rofi_theme_input": os.path.join(ASSETS_DIR, "rofi_theme_input.rasi"),
|
||||
"server": "top",
|
||||
"skip": "false",
|
||||
"sort_by": "search match",
|
||||
"sub_lang": "eng",
|
||||
"translation_type": "sub",
|
||||
"use_fzf": "False",
|
||||
"use_persistent_provider_store": "false",
|
||||
"use_python_mpv": "false",
|
||||
"use_rofi": "false",
|
||||
}
|
||||
|
||||
def __init__(self) -> None:
|
||||
self.initialize_user_data_and_watch_history()
|
||||
self.initialize_user_data_and_watch_history_recent_anime()
|
||||
self.load_config()
|
||||
|
||||
def load_config(self):
|
||||
self.configparser = ConfigParser(self.default_options)
|
||||
self.configparser = ConfigParser(self.default_config)
|
||||
self.configparser.add_section("stream")
|
||||
self.configparser.add_section("general")
|
||||
self.configparser.add_section("anilist")
|
||||
@@ -74,53 +83,103 @@ class Config(object):
|
||||
if os.path.exists(USER_CONFIG_PATH):
|
||||
self.configparser.read(USER_CONFIG_PATH, encoding="utf-8")
|
||||
|
||||
self.downloads_dir = self.get_downloads_dir()
|
||||
self.sub_lang = self.get_sub_lang()
|
||||
self.provider = self.get_provider()
|
||||
self.use_fzf = self.get_use_fzf()
|
||||
self.use_rofi = self.get_use_rofi()
|
||||
self.skip = self.get_skip()
|
||||
self.icons = self.get_icons()
|
||||
self.preview = self.get_preview()
|
||||
self.translation_type = self.get_translation_type()
|
||||
self.sort_by = self.get_sort_by()
|
||||
self.continue_from_history = self.get_continue_from_history()
|
||||
self.auto_next = self.get_auto_next()
|
||||
self.normalize_titles = self.get_normalize_titles()
|
||||
self.auto_select = self.get_auto_select()
|
||||
self.use_python_mpv = self.get_use_mpv_mod()
|
||||
self.quality = self.get_quality()
|
||||
self.notification_duration = self.get_notification_duration()
|
||||
self.episode_complete_at = self.get_episode_complete_at()
|
||||
self.default_media_list_tracking = self.get_default_media_list_tracking()
|
||||
self.force_forward_tracking = self.get_force_forward_tracking()
|
||||
self.server = self.get_server()
|
||||
self.format = self.get_format()
|
||||
self.player = self.get_player()
|
||||
self.force_window = self.get_force_window()
|
||||
self.preferred_language = self.get_preferred_language()
|
||||
self.preferred_history = self.get_preferred_history()
|
||||
self.rofi_theme = self.get_rofi_theme()
|
||||
# get the configuration
|
||||
self.auto_next = self.configparser.getboolean("stream", "auto_next")
|
||||
self.auto_select = self.configparser.getboolean("stream", "auto_select")
|
||||
self.cache_requests = self.configparser.getboolean("general", "cache_requests")
|
||||
self.check_for_updates = self.configparser.getboolean(
|
||||
"general", "check_for_updates"
|
||||
)
|
||||
self.continue_from_history = self.configparser.getboolean(
|
||||
"stream", "continue_from_history"
|
||||
)
|
||||
self.default_media_list_tracking = self.configparser.get(
|
||||
"general", "default_media_list_tracking"
|
||||
)
|
||||
self.disable_mpv_popen = self.configparser.getboolean(
|
||||
"stream", "disable_mpv_popen"
|
||||
)
|
||||
self.downloads_dir = self.configparser.get("general", "downloads_dir")
|
||||
self.episode_complete_at = self.configparser.getint(
|
||||
"stream", "episode_complete_at"
|
||||
)
|
||||
self.ffmpegthumbnailer_seek_time = self.configparser.getint(
|
||||
"general", "ffmpegthumbnailer_seek_time"
|
||||
)
|
||||
self.force_forward_tracking = self.configparser.getboolean(
|
||||
"general", "force_forward_tracking"
|
||||
)
|
||||
self.force_window = self.configparser.get("stream", "force_window")
|
||||
self.format = self.configparser.get("stream", "format")
|
||||
self.icons = self.configparser.getboolean("general", "icons")
|
||||
self.image_previews = self.configparser.getboolean("general", "image_previews")
|
||||
self.normalize_titles = self.configparser.getboolean(
|
||||
"general", "normalize_titles"
|
||||
)
|
||||
self.notification_duration = self.configparser.getint(
|
||||
"general", "notification_duration"
|
||||
)
|
||||
self.player = self.configparser.get("stream", "player")
|
||||
self.preferred_history = self.configparser.get("stream", "preferred_history")
|
||||
self.preferred_language = self.configparser.get("general", "preferred_language")
|
||||
self.preview = self.configparser.getboolean("general", "preview")
|
||||
self.provider = self.configparser.get("general", "provider")
|
||||
self.quality = self.configparser.get("stream", "quality")
|
||||
self.recent = self.configparser.getint("general", "recent")
|
||||
self.rofi_theme_confirm = self.configparser.get("general", "rofi_theme_confirm")
|
||||
self.rofi_theme_input = self.configparser.get("general", "rofi_theme_input")
|
||||
self.rofi_theme = self.configparser.get("general", "rofi_theme")
|
||||
self.rofi_theme_preview = self.configparser.get("general", "rofi_theme_preview")
|
||||
self.server = self.configparser.get("stream", "server")
|
||||
self.skip = self.configparser.getboolean("stream", "skip")
|
||||
self.sort_by = self.configparser.get("anilist", "sort_by")
|
||||
self.sub_lang = self.configparser.get("general", "sub_lang")
|
||||
self.translation_type = self.configparser.get("stream", "translation_type")
|
||||
self.use_fzf = self.configparser.getboolean("general", "use_fzf")
|
||||
self.use_python_mpv = self.configparser.getboolean("stream", "use_python_mpv")
|
||||
self.use_rofi = self.configparser.getboolean("general", "use_rofi")
|
||||
self.use_persistent_provider_store = self.configparser.getboolean(
|
||||
"general", "use_persistent_provider_store"
|
||||
)
|
||||
|
||||
Rofi.rofi_theme = self.rofi_theme
|
||||
self.rofi_theme_input = self.get_rofi_theme_input()
|
||||
Rofi.rofi_theme_input = self.rofi_theme_input
|
||||
self.rofi_theme_confirm = self.get_rofi_theme_confirm()
|
||||
Rofi.rofi_theme_confirm = self.rofi_theme_confirm
|
||||
self.ffmpegthumbnailer_seek_time = self.get_ffmpegthumnailer_seek_time()
|
||||
Rofi.rofi_theme_preview = self.rofi_theme_preview
|
||||
|
||||
# ---- setup user data ------
|
||||
self.anime_list: list = self.user_data.get("animelist", [])
|
||||
self.user: dict = self.user_data.get("user", {})
|
||||
|
||||
os.environ["CURRENT_FASTANIME_PROVIDER"] = self.provider
|
||||
if not os.path.exists(USER_CONFIG_PATH):
|
||||
with open(USER_CONFIG_PATH, "w", encoding="utf-8") as config:
|
||||
config.write(self.__repr__())
|
||||
|
||||
def set_fastanime_config_environs(self):
|
||||
current_config = []
|
||||
for key in self.default_config:
|
||||
current_config.append((f"FASTANIME_{key.upper()}", str(getattr(self, key))))
|
||||
os.environ.update(current_config)
|
||||
|
||||
def update_user(self, user):
|
||||
self.user = user
|
||||
self.user_data["user"] = user
|
||||
self._update_user_data()
|
||||
|
||||
def update_recent(self, recent_anime: list):
|
||||
recent_anime_ids = []
|
||||
_recent_anime = []
|
||||
for anime in recent_anime:
|
||||
if (
|
||||
anime["id"] not in recent_anime_ids
|
||||
and len(recent_anime_ids) <= self.recent
|
||||
):
|
||||
_recent_anime.append(anime)
|
||||
recent_anime_ids.append(anime["id"])
|
||||
|
||||
self.user_data["recent_anime"] = _recent_anime
|
||||
self._update_user_data()
|
||||
|
||||
def media_list_track(
|
||||
self,
|
||||
anime_id: int,
|
||||
@@ -142,7 +201,7 @@ class Config(object):
|
||||
with open(USER_WATCH_HISTORY_PATH, "w") as f:
|
||||
json.dump(self.watch_history, f)
|
||||
|
||||
def initialize_user_data_and_watch_history(self):
|
||||
def initialize_user_data_and_watch_history_recent_anime(self):
|
||||
try:
|
||||
if os.path.isfile(USER_DATA_PATH):
|
||||
with open(USER_DATA_PATH, "r") as f:
|
||||
@@ -163,101 +222,6 @@ class Config(object):
|
||||
with open(USER_DATA_PATH, "w") as f:
|
||||
json.dump(self.user_data, f)
|
||||
|
||||
# getters for user configuration
|
||||
|
||||
# --- general section ---
|
||||
def get_provider(self):
|
||||
return self.configparser.get("general", "provider")
|
||||
|
||||
def get_ffmpegthumnailer_seek_time(self):
|
||||
return self.configparser.getint("general", "ffmpegthumnailer_seek_time")
|
||||
|
||||
def get_preferred_language(self):
|
||||
return self.configparser.get("general", "preferred_language")
|
||||
|
||||
def get_sub_lang(self):
|
||||
return self.configparser.get("general", "sub_lang")
|
||||
|
||||
def get_downloads_dir(self):
|
||||
return self.configparser.get("general", "downloads_dir")
|
||||
|
||||
def get_icons(self):
|
||||
return self.configparser.getboolean("general", "icons")
|
||||
|
||||
def get_preview(self):
|
||||
return self.configparser.getboolean("general", "preview")
|
||||
|
||||
def get_use_fzf(self):
|
||||
return self.configparser.getboolean("general", "use_fzf")
|
||||
|
||||
# rofi conifiguration
|
||||
def get_use_rofi(self):
|
||||
return self.configparser.getboolean("general", "use_rofi")
|
||||
|
||||
def get_rofi_theme(self):
|
||||
return self.configparser.get("general", "rofi_theme")
|
||||
|
||||
def get_rofi_theme_input(self):
|
||||
return self.configparser.get("general", "rofi_theme_input")
|
||||
|
||||
def get_rofi_theme_confirm(self):
|
||||
return self.configparser.get("general", "rofi_theme_confirm")
|
||||
|
||||
def get_force_forward_tracking(self):
|
||||
return self.configparser.getboolean("general", "force_forward_tracking")
|
||||
|
||||
def get_default_media_list_tracking(self):
|
||||
return self.configparser.get("general", "default_media_list_tracking")
|
||||
|
||||
def get_normalize_titles(self):
|
||||
return self.configparser.getboolean("general", "normalize_titles")
|
||||
|
||||
# --- stream section ---
|
||||
def get_skip(self):
|
||||
return self.configparser.getboolean("stream", "skip")
|
||||
|
||||
def get_auto_next(self):
|
||||
return self.configparser.getboolean("stream", "auto_next")
|
||||
|
||||
def get_auto_select(self):
|
||||
return self.configparser.getboolean("stream", "auto_select")
|
||||
|
||||
def get_continue_from_history(self):
|
||||
return self.configparser.getboolean("stream", "continue_from_history")
|
||||
|
||||
def get_use_mpv_mod(self):
|
||||
return self.configparser.getboolean("stream", "use_python_mpv")
|
||||
|
||||
def get_notification_duration(self):
|
||||
return self.configparser.getint("general", "notification_duration")
|
||||
|
||||
def get_episode_complete_at(self):
|
||||
return self.configparser.getint("stream", "episode_complete_at")
|
||||
|
||||
def get_force_window(self):
|
||||
return self.configparser.get("stream", "force_window")
|
||||
|
||||
def get_translation_type(self):
|
||||
return self.configparser.get("stream", "translation_type")
|
||||
|
||||
def get_preferred_history(self):
|
||||
return self.configparser.get("stream", "preferred_history")
|
||||
|
||||
def get_quality(self):
|
||||
return self.configparser.get("stream", "quality")
|
||||
|
||||
def get_server(self):
|
||||
return self.configparser.get("stream", "server")
|
||||
|
||||
def get_format(self):
|
||||
return self.configparser.get("stream", "format")
|
||||
|
||||
def get_player(self):
|
||||
return self.configparser.get("stream", "player")
|
||||
|
||||
def get_sort_by(self):
|
||||
return self.configparser.get("anilist", "sort_by")
|
||||
|
||||
def update_config(self, section: str, key: str, value: str):
|
||||
self.configparser.set(section, key, value)
|
||||
with open(USER_CONFIG_PATH, "w") as config:
|
||||
@@ -276,29 +240,39 @@ class Config(object):
|
||||
[general]
|
||||
# whether to show the icons in the tui [True/False]
|
||||
# more like emojis
|
||||
# by the way if you have any recommendations to which should be used where please
|
||||
# by the way if you have any recommendations
|
||||
# to which should be used where please
|
||||
# don't hesitate to share your opinion
|
||||
# cause it's a lot of work to look for the right one for each menu option
|
||||
# cause it's a lot of work
|
||||
# to look for the right one for each menu option
|
||||
# be sure to also give the replacement emoji
|
||||
icons = {self.icons}
|
||||
|
||||
# the quality of the stream [1080,720,480,360]
|
||||
# this option is usually only reliable when:
|
||||
# provider=animepahe
|
||||
# since it provides links that actually point to streams of different qualities
|
||||
# while the rest just point to another link that can provide the anime from the same server
|
||||
quality = {self.quality}
|
||||
|
||||
# whether to normalize provider titles [True/False]
|
||||
# basically takes the provider titles and finds the corresponding anilist title then changes the title to that
|
||||
# useful for uniformity especially when downloading from different providers
|
||||
# this also applies to episode titles
|
||||
normalize_titles = {self.normalize_titles}
|
||||
|
||||
# can be [allanime, animepahe, hianime]
|
||||
# whether to check for updates every time you run the script [True/False]
|
||||
# this is useful for keeping your script up to date
|
||||
# cause there are always new features being added 😄
|
||||
check_for_updates = {self.check_for_updates}
|
||||
|
||||
# can be [allanime, animepahe, hianime, nyaa, yugen]
|
||||
# allanime is the most realible
|
||||
# animepahe provides different links to streams of different quality so a quality can be selected reliably with --quality option
|
||||
# hianime which is now hianime usually provides subs in different languuages and its servers are generally faster
|
||||
# hianime usually provides subs in different languuages and its servers are generally faster
|
||||
# NOTE: currently they are encrypting the video links
|
||||
# though am working on it
|
||||
# however, you can still get the links to the subs
|
||||
# with ```fastanime grab``` command
|
||||
# yugen meh
|
||||
# nyaa those who prefer torrents, though not reliable due to auto selection of results
|
||||
# as most of the data in nyaa is not structured
|
||||
# though works relatively well for new anime
|
||||
# esp with subsplease and horriblesubs
|
||||
# oh and you should have webtorrent cli to use this
|
||||
provider = {self.provider}
|
||||
|
||||
# Display language [english, romaji]
|
||||
@@ -318,6 +292,13 @@ downloads_dir = {self.downloads_dir}
|
||||
# try it and you will see
|
||||
preview = {self.preview}
|
||||
|
||||
# whether to show images in the preview [true/false]
|
||||
# windows users just swtich to linux 😄
|
||||
# cause even if you enable it
|
||||
# it won't look pretty
|
||||
# so forget it exists 🤣
|
||||
image_previews = {self.image_previews}
|
||||
|
||||
# the time to seek when using ffmpegthumbnailer [-1 to 100]
|
||||
# -1 means random and is the default
|
||||
# ffmpegthumbnailer is used to generate previews and you can select at what time in the video to extract an image
|
||||
@@ -333,13 +314,17 @@ use_fzf = {self.use_fzf}
|
||||
# though if you want it to be your sole interface even when fastanime is run directly from the terminal
|
||||
use_rofi = {self.use_rofi}
|
||||
|
||||
# rofi themes to use
|
||||
# rofi themes to use <path>
|
||||
# the values of this option is the path to the rofi config files to use
|
||||
# i choose to split it into three since it gives the best look and feel
|
||||
# i choose to split it into 4 since it gives the best look and feel
|
||||
# you can refer to the rofi demo on github to see for your self
|
||||
# by the way i recommend getting the rofi themes from this project;
|
||||
# i need help designing the default rofi themes
|
||||
# if you fancy yourself a rofi ricer please contribute to making
|
||||
# the default theme better
|
||||
rofi_theme = {self.rofi_theme}
|
||||
|
||||
rofi_theme_preview = {self.rofi_theme_preview}
|
||||
|
||||
rofi_theme_input = {self.rofi_theme_input}
|
||||
|
||||
rofi_theme_confirm = {self.rofi_theme_confirm}
|
||||
@@ -351,7 +336,7 @@ notification_duration = {self.notification_duration}
|
||||
# used when the provider gives subs of different languages
|
||||
# currently its the case for:
|
||||
# hianime
|
||||
# the values for this option are the short names for countries
|
||||
# the values for this option are the short names for languages
|
||||
# regex is used to determine what you selected
|
||||
sub_lang = {self.sub_lang}
|
||||
|
||||
@@ -366,8 +351,34 @@ default_media_list_tracking = {self.default_media_list_tracking}
|
||||
# this affects only your anilist anime list
|
||||
force_forward_tracking = {self.force_forward_tracking}
|
||||
|
||||
# whether to cache requests [true/false]
|
||||
# this makes the experience better and more faster
|
||||
# as data need not always be fetched from web server
|
||||
# and instead can be gotten locally
|
||||
# from the cached_requests_db
|
||||
cache_requests = {self.cache_requests}
|
||||
|
||||
# whether to use a persistent store (basically a sqlitedb) for storing some data the provider requires
|
||||
# to enable a seamless experience [true/false]
|
||||
# this option exists primarily because i think it may help in the optimization
|
||||
# of fastanime as a library in a website project
|
||||
# for now i don't recommend changing it
|
||||
# leave it as is
|
||||
use_persistent_provider_store = {self.use_persistent_provider_store}
|
||||
|
||||
# no of recent anime to keep [0-50]
|
||||
# 0 will disable recent anime tracking
|
||||
recent = {self.recent}
|
||||
|
||||
|
||||
[stream]
|
||||
# the quality of the stream [1080,720,480,360]
|
||||
# this option is usually only reliable when:
|
||||
# provider=animepahe
|
||||
# since it provides links that actually point to streams of different qualities
|
||||
# while the rest just point to another link that can provide the anime from the same server
|
||||
quality = {self.quality}
|
||||
|
||||
# Auto continue from watch history [True/False]
|
||||
# this will make fastanime to choose the episode that you last watched to completion
|
||||
# and increment it by one
|
||||
@@ -389,6 +400,7 @@ translation_type = {self.translation_type}
|
||||
# allanime: [dropbox, sharepoint, wetransfer, gogoanime, wixmp]
|
||||
# animepahe: [kwik]
|
||||
# hianime: [HD1, HD2, StreamSB, StreamTape]
|
||||
# yugen: [gogoanime]
|
||||
# 'top' can also be used as a value for this option
|
||||
# 'top' will cause fastanime to auto select the first server it sees
|
||||
# this saves on resources and is faster since not all servers are being fetched
|
||||
@@ -406,14 +418,23 @@ auto_next = {self.auto_next}
|
||||
# that are there own preference rather than the official names
|
||||
# But 99% of the time will be accurate
|
||||
# if this happens just turn of auto_select in the menus or from the commandline and manually select the correct anime title
|
||||
# and then please open an issue at <> highlighting the normalized title and the title given by the provider for the anime you wished to watch
|
||||
# or even better edit this file <> and open a pull request
|
||||
# and then please open an issue
|
||||
# highlighting the normalized title
|
||||
# and the title given by the provider for the anime you wished to watch
|
||||
# or even better edit this file <https://github.com/Benex254/FastAnime/blob/master/fastanime/Utility/data.py>
|
||||
# and open a pull request
|
||||
# prefrably, so you can give me a small break
|
||||
# of doing everything 😄
|
||||
# and its always nice to see people contributing
|
||||
# to projects they love and use
|
||||
auto_select = {self.auto_select}
|
||||
|
||||
# whether to skip the opening and ending theme songs [True/False]
|
||||
# NOTE: requires ani-skip to be in path
|
||||
# for python-mpv users am planning to create this functionality n python without the use of an external script
|
||||
# so its disabled for now
|
||||
# and anyways Dan Da Dan
|
||||
# taught as the importance of letting it flow 🙃
|
||||
skip = {self.skip}
|
||||
|
||||
# at what percentage progress should the episode be considered as completed [0-100]
|
||||
@@ -425,8 +446,11 @@ episode_complete_at = {self.episode_complete_at}
|
||||
# whether to use python-mpv [True/False]
|
||||
# to enable superior control over the player
|
||||
# adding more options to it
|
||||
# Enable this one and you will be wonder why you did not discover fastanime sooner
|
||||
# Since you basically don't have to close the player window to go to the next or previous episode, switch servers, change translation type or change to a given episode x
|
||||
# Enable this one and you will be wonder
|
||||
# why you did not discover fastanime sooner 🙃
|
||||
# Since you basically don't have to close the player window
|
||||
# to go to the next or previous episode, switch servers,
|
||||
# change translation type or change to a given episode x
|
||||
# so try it if you haven't already
|
||||
# if you have any issues setting it up
|
||||
# don't be afraid to ask
|
||||
@@ -438,6 +462,15 @@ episode_complete_at = {self.episode_complete_at}
|
||||
# or just switch to arch linux
|
||||
use_python_mpv = {self.use_python_mpv}
|
||||
|
||||
|
||||
# whether to use popen to get the timestamps for continue_from_history
|
||||
# implemented because popen does not work for some reason in nixos
|
||||
# if you are on nixos and you have a solution to this problem please share
|
||||
# i will be glad to hear it 😄
|
||||
# So for now ignore this option
|
||||
# and anyways the new method of getting timestamps is better
|
||||
disable_mpv_popen = {self.disable_mpv_popen}
|
||||
|
||||
# force mpv window
|
||||
# the default 'immediate' just makes mpv to open the window even if the video has not yet loaded
|
||||
# done for asthetics
|
||||
@@ -468,6 +501,7 @@ player = {self.player}
|
||||
# since we may not always have the time to immediately implement the changes
|
||||
#
|
||||
# HOPE YOU ENJOY FASTANIME AND BE SURE TO STAR THE PROJECT ON GITHUB
|
||||
# https://github.com/Benex254/FastAnime
|
||||
#
|
||||
"""
|
||||
return current_config_state
|
||||
|
||||
@@ -388,7 +388,6 @@ def provider_anime_episode_servers_menu(
|
||||
progress.add_task("Fetching Episode Streams...", total=None)
|
||||
episode_streams_generator = anime_provider.get_episode_streams(
|
||||
provider_anime["id"],
|
||||
provider_anime["title"],
|
||||
current_episode_number,
|
||||
translation_type,
|
||||
)
|
||||
@@ -540,6 +539,14 @@ def provider_anime_episode_servers_menu(
|
||||
episode_title = episode_detail["title"]
|
||||
break
|
||||
|
||||
if config.recent:
|
||||
config.update_recent(
|
||||
[
|
||||
fastanime_runtime_state.selected_anime_anilist,
|
||||
*config.user_data["recent_anime"],
|
||||
]
|
||||
)
|
||||
print("Updating recent anime...")
|
||||
if config.sync_play:
|
||||
from ..utils.syncplay import SyncPlayer
|
||||
|
||||
@@ -586,22 +593,18 @@ def provider_anime_episode_servers_menu(
|
||||
# this will try to update the episode to be the next episode if delta has reached a specific threshhold
|
||||
# this update will only apply locally
|
||||
# the remote(anilist) is only updated when its certain you are going to open the player
|
||||
available_episodes: list[str] = sorted(
|
||||
fastanime_runtime_state.provider_available_episodes, key=float
|
||||
)
|
||||
if stop_time == "0" or total_time == "0":
|
||||
# increment the episodes
|
||||
next_episode = available_episodes.index(current_episode_number) + 1
|
||||
if next_episode >= len(available_episodes):
|
||||
next_episode = len(available_episodes) - 1
|
||||
episode = available_episodes[next_episode]
|
||||
# next_episode = available_episodes.index(current_episode_number) + 1
|
||||
# if next_episode >= len(available_episodes):
|
||||
# next_episode = len(available_episodes) - 1
|
||||
# episode = available_episodes[next_episode]
|
||||
pass
|
||||
else:
|
||||
percentage_completion_of_episode = calculate_percentage_completion(
|
||||
stop_time, total_time
|
||||
)
|
||||
if percentage_completion_of_episode < config.episode_complete_at:
|
||||
episode = current_episode_number
|
||||
else:
|
||||
if percentage_completion_of_episode > config.episode_complete_at:
|
||||
# -- update anilist progress if user --
|
||||
remote_progress = (
|
||||
fastanime_runtime_state.selected_anime_anilist["mediaListEntry"] or {}
|
||||
@@ -627,16 +630,16 @@ def provider_anime_episode_servers_menu(
|
||||
)
|
||||
|
||||
# increment the episodes
|
||||
next_episode = available_episodes.index(current_episode_number) + 1
|
||||
if next_episode >= len(available_episodes):
|
||||
next_episode = len(available_episodes) - 1
|
||||
episode = available_episodes[next_episode]
|
||||
stop_time = "0"
|
||||
total_time = "0"
|
||||
# next_episode = available_episodes.index(current_episode_number) + 1
|
||||
# if next_episode >= len(available_episodes):
|
||||
# next_episode = len(available_episodes) - 1
|
||||
# episode = available_episodes[next_episode]
|
||||
# stop_time = "0"
|
||||
# total_time = "0"
|
||||
|
||||
config.media_list_track(
|
||||
anime_id_anilist,
|
||||
episode_no=episode,
|
||||
episode_no=current_episode_number,
|
||||
episode_stopped_at=stop_time,
|
||||
episode_total_length=total_time,
|
||||
progress_tracking=fastanime_runtime_state.progress_tracking,
|
||||
@@ -671,7 +674,7 @@ def provider_anime_episodes_menu(
|
||||
)
|
||||
|
||||
# prompt for episode number
|
||||
total_episodes = sorted(
|
||||
available_episodes = sorted(
|
||||
provider_anime["availableEpisodesDetail"][translation_type], key=float
|
||||
)
|
||||
current_episode_number = ""
|
||||
@@ -682,7 +685,7 @@ def provider_anime_episodes_menu(
|
||||
# will be preferred over remote
|
||||
if (
|
||||
user_watch_history.get(str(anime_id_anilist), {}).get("episode_no")
|
||||
in total_episodes
|
||||
in available_episodes
|
||||
):
|
||||
if (
|
||||
config.preferred_history == "local"
|
||||
@@ -691,6 +694,29 @@ def provider_anime_episodes_menu(
|
||||
current_episode_number = user_watch_history[str(anime_id_anilist)][
|
||||
"episode_no"
|
||||
]
|
||||
|
||||
stop_time = user_watch_history.get(str(anime_id_anilist), {}).get(
|
||||
"episode_stopped_at", "0"
|
||||
)
|
||||
total_time = user_watch_history.get(str(anime_id_anilist), {}).get(
|
||||
"episode_total_length", "0"
|
||||
)
|
||||
if stop_time != "0" or total_time != "0":
|
||||
percentage_completion_of_episode = calculate_percentage_completion(
|
||||
stop_time, total_time
|
||||
)
|
||||
if percentage_completion_of_episode > config.episode_complete_at:
|
||||
# increment the episodes
|
||||
next_episode = (
|
||||
available_episodes.index(current_episode_number) + 1
|
||||
)
|
||||
if next_episode >= len(available_episodes):
|
||||
next_episode = len(available_episodes) - 1
|
||||
episode = available_episodes[next_episode]
|
||||
stop_time = "0"
|
||||
total_time = "0"
|
||||
current_episode_number = episode
|
||||
|
||||
else:
|
||||
current_episode_number = str(
|
||||
(selected_anime_anilist["mediaListEntry"] or {"progress": 0}).get(
|
||||
@@ -708,7 +734,7 @@ def provider_anime_episodes_menu(
|
||||
"progress"
|
||||
)
|
||||
)
|
||||
if current_episode_number not in total_episodes:
|
||||
if current_episode_number not in available_episodes:
|
||||
current_episode_number = ""
|
||||
print(
|
||||
f"[bold cyan]Continuing from Episode:[/] [bold]{current_episode_number}[/]"
|
||||
@@ -718,8 +744,8 @@ def provider_anime_episodes_menu(
|
||||
current_episode_number = ""
|
||||
|
||||
# prompt for episode number if not set
|
||||
if not current_episode_number or current_episode_number not in total_episodes:
|
||||
choices = [*total_episodes, "Back"]
|
||||
if not current_episode_number or current_episode_number not in available_episodes:
|
||||
choices = [*available_episodes, "Back"]
|
||||
preview = None
|
||||
if config.preview:
|
||||
from .utils import get_fzf_episode_preview
|
||||
@@ -728,7 +754,7 @@ def provider_anime_episodes_menu(
|
||||
if e:
|
||||
eps = range(0, e + 1)
|
||||
else:
|
||||
eps = total_episodes
|
||||
eps = available_episodes
|
||||
preview = get_fzf_episode_preview(
|
||||
fastanime_runtime_state.selected_anime_anilist, eps
|
||||
)
|
||||
@@ -757,7 +783,7 @@ def provider_anime_episodes_menu(
|
||||
# )
|
||||
|
||||
# update runtime data
|
||||
fastanime_runtime_state.provider_available_episodes = total_episodes
|
||||
fastanime_runtime_state.provider_available_episodes = available_episodes
|
||||
fastanime_runtime_state.provider_current_episode_number = current_episode_number
|
||||
|
||||
# next interface
|
||||
@@ -1421,7 +1447,7 @@ def anilist_results_menu(
|
||||
choices = []
|
||||
for title in anime_data.keys():
|
||||
icon_path = os.path.join(IMAGES_CACHE_DIR, title)
|
||||
choices.append(f"{title}\0icon\x1f{icon_path}")
|
||||
choices.append(f"{title}\0icon\x1f{icon_path}.png")
|
||||
choices.append("Back")
|
||||
selected_anime_title = Rofi.run_with_icons(choices, "Select Anime")
|
||||
else:
|
||||
@@ -1563,6 +1589,12 @@ def fastanime_main_menu(
|
||||
watch_history = list(map(int, config.watch_history.keys()))
|
||||
return AniList.search(id_in=watch_history, sort="TRENDING_DESC")
|
||||
|
||||
def _recent():
|
||||
return (
|
||||
True,
|
||||
{"data": {"Page": {"media": config.user_data["recent_anime"]}}},
|
||||
)
|
||||
|
||||
# WARNING: Will probably be depracated
|
||||
def _anime_list():
|
||||
anime_list = config.anime_list
|
||||
@@ -1581,6 +1613,8 @@ def fastanime_main_menu(
|
||||
else:
|
||||
config.load_config()
|
||||
|
||||
config.set_fastanime_config_environs()
|
||||
|
||||
config.anime_provider.provider = config.provider
|
||||
config.anime_provider.lazyload_provider(config.provider)
|
||||
|
||||
@@ -1590,6 +1624,7 @@ def fastanime_main_menu(
|
||||
# each option maps to anilist data that is described by the option name
|
||||
options = {
|
||||
f"{'🔥 ' if icons else ''}Trending": AniList.get_trending,
|
||||
f"{'🎞️ ' if icons else ''}Recent": _recent,
|
||||
f"{'📺 ' if icons else ''}Watching": lambda media_list_type="Watching": handle_animelist(
|
||||
config, fastanime_runtime_state, media_list_type
|
||||
),
|
||||
|
||||
@@ -9,7 +9,7 @@ from threading import Thread
|
||||
import requests
|
||||
from yt_dlp.utils import clean_html, sanitize_filename
|
||||
|
||||
from ...constants import APP_CACHE_DIR
|
||||
from ...constants import APP_CACHE_DIR, S_PLATFORM
|
||||
from ...libs.anilist.types import AnilistBaseMediaDataSchema
|
||||
from ...Utility import anilist_data_helper
|
||||
from ..utils.scripts import fzf_preview
|
||||
@@ -46,7 +46,9 @@ def aniskip(mal_id: int, episode: str):
|
||||
|
||||
# NOTE: May change this to a temp dir but there were issues so later
|
||||
WORKING_DIR = APP_CACHE_DIR # tempfile.gettempdir()
|
||||
|
||||
HEADER_COLOR = 215, 0, 95
|
||||
SEPARATOR_COLOR = 208, 208, 208
|
||||
SINGLE_QUOTE = "'"
|
||||
IMAGES_CACHE_DIR = os.path.join(WORKING_DIR, "images")
|
||||
if not os.path.exists(IMAGES_CACHE_DIR):
|
||||
os.mkdir(IMAGES_CACHE_DIR)
|
||||
@@ -63,7 +65,7 @@ def save_image_from_url(url: str, file_name: str):
|
||||
file_name: filename to use
|
||||
"""
|
||||
image = requests.get(url)
|
||||
with open(f"{IMAGES_CACHE_DIR}/{file_name}", "wb") as f:
|
||||
with open(os.path.join(IMAGES_CACHE_DIR,f"{file_name}.png"), "wb") as f:
|
||||
f.write(image.content)
|
||||
|
||||
|
||||
@@ -74,7 +76,7 @@ def save_info_from_str(info: str, file_name: str):
|
||||
info: the information anilist has on the anime
|
||||
file_name: the filename to use
|
||||
"""
|
||||
with open(f"{ANIME_INFO_CACHE_DIR}/{file_name}", "w") as f:
|
||||
with open(os.path.join(ANIME_INFO_CACHE_DIR,file_name,), "w",encoding="utf-8") as f:
|
||||
f.write(info)
|
||||
|
||||
|
||||
@@ -91,18 +93,16 @@ def write_search_results(
|
||||
workers:number of threads to use defaults to as many as possible
|
||||
"""
|
||||
# NOTE: Will probably make this a configuraable option
|
||||
HEADER_COLOR = 215, 0, 95
|
||||
SEPARATOR_COLOR = 208, 208, 208
|
||||
SEPARATOR_WIDTH = 30
|
||||
# use concurency to download and write as fast as possible
|
||||
with concurrent.futures.ThreadPoolExecutor(max_workers=workers) as executor:
|
||||
future_to_task = {}
|
||||
for anime, title in zip(anilist_results, titles):
|
||||
# actual image url
|
||||
image_url = anime["coverImage"]["large"]
|
||||
future_to_task[executor.submit(save_image_from_url, image_url, title)] = (
|
||||
image_url
|
||||
)
|
||||
if os.environ.get("FASTANIME_IMAGE_PREVIEWS", "true").lower() == "true":
|
||||
image_url = anime["coverImage"]["large"]
|
||||
future_to_task[
|
||||
executor.submit(save_image_from_url, image_url, title)
|
||||
] = image_url
|
||||
|
||||
mediaListName = "Not in any of your lists"
|
||||
progress = "UNKNOWN"
|
||||
@@ -111,28 +111,57 @@ def write_search_results(
|
||||
progress = anime_list["progress"]
|
||||
# handle the text data
|
||||
template = f"""
|
||||
{get_true_fg("-"*SEPARATOR_WIDTH,*SEPARATOR_COLOR,bold=False)}
|
||||
{get_true_fg('Title(jp):',*HEADER_COLOR)} {anime['title']['romaji']}
|
||||
{get_true_fg('Title(eng):',*HEADER_COLOR)} {anime['title']['english']}
|
||||
{get_true_fg('Popularity:',*HEADER_COLOR)} {anime['popularity']}
|
||||
{get_true_fg('Favourites:',*HEADER_COLOR)} {anime['favourites']}
|
||||
{get_true_fg('Status:',*HEADER_COLOR)} {anime['status']}
|
||||
{get_true_fg('Episodes:',*HEADER_COLOR)} {anime['episodes']}
|
||||
{get_true_fg('Genres:',*HEADER_COLOR)} {anilist_data_helper.format_list_data_with_comma(anime['genres'])}
|
||||
{get_true_fg('Next Episode:',*HEADER_COLOR)} {anilist_data_helper.extract_next_airing_episode(anime['nextAiringEpisode'])}
|
||||
{get_true_fg('Start Date:',*HEADER_COLOR)} {anilist_data_helper.format_anilist_date_object(anime['startDate'])}
|
||||
{get_true_fg('End Date:',*HEADER_COLOR)} {anilist_data_helper.format_anilist_date_object(anime['endDate'])}
|
||||
{get_true_fg("-"*SEPARATOR_WIDTH,*SEPARATOR_COLOR,bold=False)}
|
||||
{get_true_fg('Media List:',*HEADER_COLOR)} {mediaListName}
|
||||
{get_true_fg('Progress:',*HEADER_COLOR)} {progress}
|
||||
{get_true_fg("-"*SEPARATOR_WIDTH,*SEPARATOR_COLOR,bold=False)}
|
||||
{get_true_fg('Description:',*HEADER_COLOR)}
|
||||
ll=2
|
||||
while [ $ll -le $FZF_PREVIEW_COLUMNS ];do
|
||||
echo -n -e "{get_true_fg("─",*SEPARATOR_COLOR,bold=False)}"
|
||||
((ll++))
|
||||
done
|
||||
echo
|
||||
echo "{get_true_fg('Title(jp):',*HEADER_COLOR)} {(anime['title']['romaji'] or "").replace('"',SINGLE_QUOTE)}"
|
||||
echo "{get_true_fg('Title(eng):',*HEADER_COLOR)} {(anime['title']['english'] or "").replace('"',SINGLE_QUOTE)}"
|
||||
ll=2
|
||||
while [ $ll -le $FZF_PREVIEW_COLUMNS ];do
|
||||
echo -n -e "{get_true_fg("─",*SEPARATOR_COLOR,bold=False)}"
|
||||
((ll++))
|
||||
done
|
||||
echo
|
||||
echo "{get_true_fg('Popularity:',*HEADER_COLOR)} {anilist_data_helper.format_number_with_commas(anime['popularity'])}"
|
||||
echo "{get_true_fg('Favourites:',*HEADER_COLOR)} {anilist_data_helper.format_number_with_commas(anime['favourites'])}"
|
||||
echo "{get_true_fg('Status:',*HEADER_COLOR)} {str(anime['status']).replace('"',SINGLE_QUOTE)}"
|
||||
echo "{get_true_fg('Next Episode:',*HEADER_COLOR)} {anilist_data_helper.extract_next_airing_episode(anime['nextAiringEpisode']).replace('"',SINGLE_QUOTE)}"
|
||||
echo "{get_true_fg('Genres:',*HEADER_COLOR)} {anilist_data_helper.format_list_data_with_comma(anime['genres']).replace('"',SINGLE_QUOTE)}"
|
||||
ll=2
|
||||
while [ $ll -le $FZF_PREVIEW_COLUMNS ];do
|
||||
echo -n -e "{get_true_fg("─",*SEPARATOR_COLOR,bold=False)}"
|
||||
((ll++))
|
||||
done
|
||||
echo
|
||||
echo "{get_true_fg('Episodes:',*HEADER_COLOR)} {(anime['episodes']) or 'UNKNOWN'}"
|
||||
echo "{get_true_fg('Start Date:',*HEADER_COLOR)} {anilist_data_helper.format_anilist_date_object(anime['startDate']).replace('"',SINGLE_QUOTE)}"
|
||||
echo "{get_true_fg('End Date:',*HEADER_COLOR)} {anilist_data_helper.format_anilist_date_object(anime['endDate']).replace('"',SINGLE_QUOTE)}"
|
||||
ll=2
|
||||
while [ $ll -le $FZF_PREVIEW_COLUMNS ];do
|
||||
echo -n -e "{get_true_fg("─",*SEPARATOR_COLOR,bold=False)}"
|
||||
((ll++))
|
||||
done
|
||||
echo
|
||||
echo "{get_true_fg('Media List:',*HEADER_COLOR)} {mediaListName.replace('"',SINGLE_QUOTE)}"
|
||||
echo "{get_true_fg('Progress:',*HEADER_COLOR)} {progress}"
|
||||
ll=2
|
||||
while [ $ll -le $FZF_PREVIEW_COLUMNS ];do
|
||||
echo -n -e "{get_true_fg("─",*SEPARATOR_COLOR,bold=False)}"
|
||||
((ll++))
|
||||
done
|
||||
echo
|
||||
# echo "{get_true_fg('Description:',*HEADER_COLOR).replace('"',SINGLE_QUOTE)}"
|
||||
"""
|
||||
template = textwrap.dedent(template)
|
||||
template = f"""
|
||||
{template}
|
||||
echo "
|
||||
{textwrap.fill(clean_html(
|
||||
str(anime['description'])), width=45)}
|
||||
(anime['description']) or "").replace('"',SINGLE_QUOTE), width=45)}
|
||||
"
|
||||
"""
|
||||
future_to_task[executor.submit(save_info_from_str, template, title)] = title
|
||||
|
||||
@@ -212,8 +241,8 @@ def get_fzf_manga_preview(manga_results, workers=None, wait=False):
|
||||
background_worker = Thread(
|
||||
target=_worker,
|
||||
)
|
||||
# ensure images and info exists
|
||||
background_worker.daemon = True
|
||||
# ensure images and info exists
|
||||
background_worker.start()
|
||||
|
||||
# the preview script is in bash so making sure fzf doesnt use any other shell lang to process the preview script
|
||||
@@ -270,8 +299,13 @@ def get_fzf_episode_preview(
|
||||
] = image_url
|
||||
template = textwrap.dedent(
|
||||
f"""
|
||||
{get_true_fg('Anime Title:',*HEADER_COLOR)} {anilist_result['title']['romaji'] or anilist_result['title']['english']}
|
||||
{get_true_fg('Episode Title:',*HEADER_COLOR)} {episode_title}
|
||||
ll=2
|
||||
while [ $ll -le $FZF_PREVIEW_COLUMNS ];do
|
||||
echo -n -e "{get_true_fg("─",*SEPARATOR_COLOR,bold=False)}"
|
||||
((ll++))
|
||||
done
|
||||
echo "{get_true_fg('Anime Title:',*HEADER_COLOR)} {(anilist_result['title']['romaji'] or anilist_result['title']['english']).replace('"',SINGLE_QUOTE)}"
|
||||
echo "{get_true_fg('Episode Title:',*HEADER_COLOR)} {str(episode_title).replace('"',SINGLE_QUOTE)}"
|
||||
"""
|
||||
)
|
||||
future_to_url[
|
||||
@@ -289,27 +323,61 @@ def get_fzf_episode_preview(
|
||||
background_worker = Thread(
|
||||
target=_worker,
|
||||
)
|
||||
# ensure images and info exists
|
||||
background_worker.daemon = True
|
||||
# ensure images and info exists
|
||||
background_worker.start()
|
||||
|
||||
# the preview script is in bash so making sure fzf doesnt use any other shell lang to process the preview script
|
||||
os.environ["SHELL"] = shutil.which("bash") or "bash"
|
||||
preview = """
|
||||
%s
|
||||
if [ -s %s/{} ]; then fzf-preview %s/{}
|
||||
else echo Loading...
|
||||
fi
|
||||
if [ -s %s/{} ]; then cat %s/{}
|
||||
else echo Loading...
|
||||
fi
|
||||
""" % (
|
||||
fzf_preview,
|
||||
IMAGES_CACHE_DIR,
|
||||
IMAGES_CACHE_DIR,
|
||||
ANIME_INFO_CACHE_DIR,
|
||||
ANIME_INFO_CACHE_DIR,
|
||||
)
|
||||
if S_PLATFORM == "win32":
|
||||
preview = """
|
||||
%s
|
||||
title={}
|
||||
show_image_previews="%s"
|
||||
dim=${FZF_PREVIEW_COLUMNS}x${FZF_PREVIEW_LINES}
|
||||
if [ $show_image_previews = "true" ];then
|
||||
if [ -s "%s\\\\\\${title}.png" ]; then
|
||||
if command -v "chafa">/dev/null;then
|
||||
chafa -s $dim "%s\\\\\\${title}.png"
|
||||
else
|
||||
echo please install chafa to enjoy image previews
|
||||
fi
|
||||
echo
|
||||
else
|
||||
echo Loading...
|
||||
fi
|
||||
fi
|
||||
if [ -s "%s\\\\\\$title" ]; then source "%s\\\\\\$title"
|
||||
else echo Loading...
|
||||
fi
|
||||
""" % (
|
||||
fzf_preview,
|
||||
os.environ.get("FASTANIME_IMAGE_PREVIEWS", "true").lower(),
|
||||
IMAGES_CACHE_DIR.replace("\\", "\\\\\\"),
|
||||
IMAGES_CACHE_DIR.replace("\\", "\\\\\\"),
|
||||
ANIME_INFO_CACHE_DIR.replace("\\", "\\\\\\"),
|
||||
ANIME_INFO_CACHE_DIR.replace("\\", "\\\\\\"),
|
||||
)
|
||||
else:
|
||||
preview = """
|
||||
%s
|
||||
show_image_previews="%s"
|
||||
if [ $show_image_previews = "true" ];then
|
||||
if [ -s %s/{} ]; then fzf-preview %s/{}
|
||||
else echo Loading...
|
||||
fi
|
||||
fi
|
||||
if [ -s %s/{} ]; then source %s/{}
|
||||
else echo Loading...
|
||||
fi
|
||||
""" % (
|
||||
fzf_preview,
|
||||
os.environ.get("FASTANIME_IMAGE_PREVIEWS", "true").lower(),
|
||||
IMAGES_CACHE_DIR,
|
||||
IMAGES_CACHE_DIR,
|
||||
ANIME_INFO_CACHE_DIR,
|
||||
ANIME_INFO_CACHE_DIR,
|
||||
)
|
||||
if wait:
|
||||
background_worker.join()
|
||||
return preview
|
||||
@@ -329,7 +397,7 @@ def get_fzf_anime_preview(
|
||||
THe fzf preview script to use
|
||||
"""
|
||||
# ensure images and info exists
|
||||
from ...constants import S_PLATFORM
|
||||
|
||||
background_worker = Thread(
|
||||
target=write_search_results, args=(anilist_results, titles)
|
||||
)
|
||||
@@ -342,34 +410,47 @@ def get_fzf_anime_preview(
|
||||
preview = """
|
||||
%s
|
||||
title={}
|
||||
show_image_previews="%s"
|
||||
dim=${FZF_PREVIEW_COLUMNS}x${FZF_PREVIEW_LINES}
|
||||
if [ -s "%s\\\\\\$title" ]; then
|
||||
if command -v chafa >/dev/null;then
|
||||
chafa -f kitty -s $dim "%s\\\\\\$title"
|
||||
if [ $show_image_previews = "true" ];then
|
||||
if [ -s "%s\\\\\\${title}.png" ]; then
|
||||
if command -v "chafa">/dev/null;then
|
||||
chafa -s $dim "%s\\\\\\${title}.png"
|
||||
else
|
||||
echo please install chafa to enjoy image previews
|
||||
fi
|
||||
echo
|
||||
else
|
||||
echo Loading...
|
||||
fi
|
||||
fi
|
||||
else echo Loading...
|
||||
fi
|
||||
if [ -s "%s\\\\\\$title" ]; then cat "%s\\\\\\$title"
|
||||
else echo Loading...
|
||||
if [ -s "%s\\\\\\$title" ]; then source "%s\\\\\\$title"
|
||||
else echo Loading...
|
||||
fi
|
||||
""" % (
|
||||
fzf_preview,
|
||||
IMAGES_CACHE_DIR.replace("\\","\\\\\\"),
|
||||
IMAGES_CACHE_DIR.replace("\\","\\\\\\"),
|
||||
ANIME_INFO_CACHE_DIR.replace("\\","\\\\\\"),
|
||||
ANIME_INFO_CACHE_DIR.replace("\\","\\\\\\"),
|
||||
os.environ.get("FASTANIME_IMAGE_PREVIEWS", "true").lower(),
|
||||
IMAGES_CACHE_DIR.replace("\\", "\\\\\\"),
|
||||
IMAGES_CACHE_DIR.replace("\\", "\\\\\\"),
|
||||
ANIME_INFO_CACHE_DIR.replace("\\", "\\\\\\"),
|
||||
ANIME_INFO_CACHE_DIR.replace("\\", "\\\\\\"),
|
||||
)
|
||||
else:
|
||||
preview = """
|
||||
%s
|
||||
if [ -s %s/{} ]; then fzf-preview %s/{}
|
||||
else echo Loading...
|
||||
title={}
|
||||
show_image_previews="%s"
|
||||
if [ $show_image_previews = "true" ];then
|
||||
if [ -s "%s/${title}.png" ]; then fzf-preview "%s/${title}.png"
|
||||
else echo Loading...
|
||||
fi
|
||||
fi
|
||||
if [ -s %s/{} ]; then cat %s/{}
|
||||
if [ -s "%s/$title" ]; then source "%s/$title"
|
||||
else echo Loading...
|
||||
fi
|
||||
""" % (
|
||||
fzf_preview,
|
||||
os.environ.get("FASTANIME_IMAGE_PREVIEWS", "true").lower(),
|
||||
IMAGES_CACHE_DIR,
|
||||
IMAGES_CACHE_DIR,
|
||||
ANIME_INFO_CACHE_DIR,
|
||||
|
||||
@@ -1,50 +1,76 @@
|
||||
import re
|
||||
import os
|
||||
import shutil
|
||||
import subprocess
|
||||
import logging
|
||||
import time
|
||||
|
||||
from fastanime.constants import S_PLATFORM
|
||||
from ...constants import S_PLATFORM
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
mpv_av_time_pattern = re.compile(r"AV: ([0-9:]*) / ([0-9:]*) \(([0-9]*)%\)")
|
||||
|
||||
|
||||
def stream_video(MPV, url, mpv_args, custom_args):
|
||||
process = subprocess.Popen(
|
||||
[MPV, url, *mpv_args, *custom_args],
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
text=True,
|
||||
)
|
||||
|
||||
last_time = None
|
||||
av_time_pattern = re.compile(r"AV: ([0-9:]*) / ([0-9:]*) \(([0-9]*)%\)")
|
||||
last_time = "0"
|
||||
total_time = "0"
|
||||
if os.environ.get("FASTANIME_DISABLE_MPV_POPEN", "False") == "False":
|
||||
process = subprocess.Popen(
|
||||
[
|
||||
MPV,
|
||||
url,
|
||||
*mpv_args,
|
||||
*custom_args,
|
||||
"--no-terminal",
|
||||
],
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
text=True,
|
||||
bufsize=1,
|
||||
encoding="utf-8",
|
||||
)
|
||||
|
||||
try:
|
||||
while True:
|
||||
if not process.stderr:
|
||||
continue
|
||||
output = process.stderr.readline()
|
||||
try:
|
||||
while True:
|
||||
if not process.stderr:
|
||||
time.sleep(0.1)
|
||||
continue
|
||||
output = process.stderr.readline()
|
||||
|
||||
if output:
|
||||
# Match the timestamp in the output
|
||||
match = av_time_pattern.search(output.strip())
|
||||
if output:
|
||||
# Match the timestamp in the output
|
||||
match = mpv_av_time_pattern.search(output.strip())
|
||||
if match:
|
||||
current_time = match.group(1)
|
||||
total_time = match.group(2)
|
||||
last_time = current_time
|
||||
|
||||
# Check if the process has terminated
|
||||
retcode = process.poll()
|
||||
if retcode is not None:
|
||||
break
|
||||
|
||||
except Exception as e:
|
||||
print(f"An error occurred: {e}")
|
||||
logger.error(f"An error occurred: {e}")
|
||||
finally:
|
||||
process.terminate()
|
||||
process.wait()
|
||||
else:
|
||||
proc = subprocess.run(
|
||||
[MPV, url, *mpv_args, *custom_args],
|
||||
capture_output=True,
|
||||
text=True,
|
||||
encoding="utf-8",
|
||||
)
|
||||
if proc.stdout:
|
||||
for line in reversed(proc.stdout.split("\n")):
|
||||
match = mpv_av_time_pattern.search(line.strip())
|
||||
if match:
|
||||
current_time = match.group(1)
|
||||
last_time = match.group(1)
|
||||
total_time = match.group(2)
|
||||
match.group(3)
|
||||
last_time = current_time
|
||||
# print(f"Current stream time: {current_time}, Total time: {total_time}, Progress: {percentage}%")
|
||||
|
||||
# Check if the process has terminated
|
||||
retcode = process.poll()
|
||||
if retcode is not None:
|
||||
print("Finshed at: ", last_time)
|
||||
break
|
||||
|
||||
except Exception as e:
|
||||
print(f"An error occurred: {e}")
|
||||
finally:
|
||||
process.terminate()
|
||||
|
||||
break
|
||||
return last_time, total_time
|
||||
|
||||
|
||||
@@ -74,7 +100,7 @@ def run_mpv(
|
||||
time.sleep(120)
|
||||
return "0", "0"
|
||||
cmd = [WEBTORRENT_CLI, link, f"--{player}"]
|
||||
subprocess.run(cmd)
|
||||
subprocess.run(cmd, encoding="utf-8")
|
||||
return "0", "0"
|
||||
if player == "vlc":
|
||||
VLC = shutil.which("vlc")
|
||||
@@ -125,7 +151,7 @@ def run_mpv(
|
||||
if title:
|
||||
args.append("--video-title")
|
||||
args.append(title)
|
||||
subprocess.run(args)
|
||||
subprocess.run(args, encoding="utf-8")
|
||||
return "0", "0"
|
||||
else:
|
||||
# Determine if mpv is available
|
||||
@@ -184,13 +210,3 @@ def run_mpv(
|
||||
mpv_args.append(f"--ytdl-format={ytdl_format}")
|
||||
stop_time, total_time = stream_video(MPV, link, mpv_args, custom_args)
|
||||
return stop_time, total_time
|
||||
|
||||
|
||||
# Example usage
|
||||
if __name__ == "__main__":
|
||||
run_mpv(
|
||||
"https://www.youtube.com/watch?v=dQw4w9WgXcQ",
|
||||
"Example Video",
|
||||
"--fullscreen",
|
||||
"--volume=50",
|
||||
)
|
||||
|
||||
@@ -121,7 +121,6 @@ class MpvPlayer(object):
|
||||
# get them juicy streams
|
||||
episode_streams = anime_provider.get_episode_streams(
|
||||
provider_anime["id"],
|
||||
provider_anime["title"],
|
||||
current_episode_number,
|
||||
translation_type,
|
||||
)
|
||||
|
||||
14
fastanime/fastanime.py
Executable file
14
fastanime/fastanime.py
Executable file
@@ -0,0 +1,14 @@
|
||||
#!/usr/bin/env python3
|
||||
import os
|
||||
import sys
|
||||
|
||||
# Add the application root directory to Python path
|
||||
if getattr(sys, "frozen", False):
|
||||
application_path = os.path.dirname(sys.executable)
|
||||
sys.path.insert(0, application_path)
|
||||
|
||||
# Import and run the main application
|
||||
from fastanime import FastAnime
|
||||
|
||||
if __name__ == "__main__":
|
||||
FastAnime()
|
||||
@@ -225,6 +225,7 @@ query ($userId: Int, $status: MediaListStatus, $type: MediaType) {
|
||||
averageScore
|
||||
episodes
|
||||
genres
|
||||
synonyms
|
||||
studios {
|
||||
nodes {
|
||||
name
|
||||
@@ -369,6 +370,7 @@ query($query:String,%s){
|
||||
averageScore
|
||||
episodes
|
||||
genres
|
||||
synonyms
|
||||
studios {
|
||||
nodes {
|
||||
name
|
||||
@@ -428,6 +430,7 @@ query ($type: MediaType) {
|
||||
favourites
|
||||
averageScore
|
||||
genres
|
||||
synonyms
|
||||
episodes
|
||||
description
|
||||
studios {
|
||||
@@ -503,6 +506,7 @@ query ($type: MediaType) {
|
||||
episodes
|
||||
description
|
||||
genres
|
||||
synonyms
|
||||
studios {
|
||||
nodes {
|
||||
name
|
||||
@@ -566,6 +570,7 @@ query ($type: MediaType) {
|
||||
averageScore
|
||||
description
|
||||
genres
|
||||
synonyms
|
||||
studios {
|
||||
nodes {
|
||||
name
|
||||
@@ -624,6 +629,7 @@ query ($type: MediaType) {
|
||||
description
|
||||
episodes
|
||||
genres
|
||||
synonyms
|
||||
mediaListEntry {
|
||||
status
|
||||
id
|
||||
@@ -698,6 +704,7 @@ query ($type: MediaType) {
|
||||
averageScore
|
||||
description
|
||||
genres
|
||||
synonyms
|
||||
episodes
|
||||
studios {
|
||||
nodes {
|
||||
@@ -759,6 +766,7 @@ query ($type: MediaType) {
|
||||
id
|
||||
}
|
||||
genres
|
||||
synonyms
|
||||
averageScore
|
||||
popularity
|
||||
streamingEpisodes {
|
||||
@@ -862,6 +870,7 @@ query ($id: Int, $type: MediaType) {
|
||||
id
|
||||
}
|
||||
genres
|
||||
synonyms
|
||||
averageScore
|
||||
popularity
|
||||
streamingEpisodes {
|
||||
@@ -954,6 +963,7 @@ query ($page: Int, $type: MediaType) {
|
||||
favourites
|
||||
averageScore
|
||||
genres
|
||||
synonyms
|
||||
episodes
|
||||
description
|
||||
studios {
|
||||
|
||||
@@ -7,5 +7,6 @@ anime_sources = {
|
||||
"animepahe": "api.AnimePaheApi",
|
||||
"hianime": "api.HiAnimeApi",
|
||||
"nyaa": "api.NyaaApi",
|
||||
"yugen": "api.YugenApi"
|
||||
}
|
||||
SERVERS_AVAILABLE = [*ALLANIME_SERVERS, *ANIMEPAHE_SERVERS, *HIANIME_SERVERS]
|
||||
|
||||
@@ -26,6 +26,7 @@ class AllAnimeAPI(AnimeProvider):
|
||||
Provides a fast and effective interface to AllAnime site.
|
||||
"""
|
||||
|
||||
PROVIDER = "allanime"
|
||||
api_endpoint = ALLANIME_API_ENDPOINT
|
||||
HEADERS = {
|
||||
"Referer": ALLANIME_REFERER,
|
||||
@@ -55,7 +56,7 @@ class AllAnimeAPI(AnimeProvider):
|
||||
logger.error("[ALLANIME-ERROR]: ", response.text)
|
||||
return {}
|
||||
|
||||
@debug_provider("ALLANIME")
|
||||
@debug_provider(PROVIDER.upper())
|
||||
def search_for_anime(
|
||||
self,
|
||||
user_query: str,
|
||||
@@ -106,7 +107,7 @@ class AllAnimeAPI(AnimeProvider):
|
||||
}
|
||||
return normalized_search_results
|
||||
|
||||
@debug_provider("ALLANIME")
|
||||
@debug_provider(PROVIDER.upper())
|
||||
def get_anime(self, allanime_show_id: str):
|
||||
"""get an anime details given its id
|
||||
|
||||
@@ -121,6 +122,7 @@ class AllAnimeAPI(AnimeProvider):
|
||||
id: str = anime["show"]["_id"]
|
||||
title: str = anime["show"]["name"]
|
||||
availableEpisodesDetail = anime["show"]["availableEpisodesDetail"]
|
||||
self.store.set(allanime_show_id, "anime_info", {"title": title})
|
||||
type = anime.get("__typename")
|
||||
normalized_anime = {
|
||||
"id": id,
|
||||
@@ -130,9 +132,9 @@ class AllAnimeAPI(AnimeProvider):
|
||||
}
|
||||
return normalized_anime
|
||||
|
||||
@debug_provider("ALLANIME")
|
||||
@debug_provider(PROVIDER.upper())
|
||||
def _get_anime_episode(
|
||||
self, allanime_show_id: str, episode_string: str, translation_type: str = "sub"
|
||||
self, allanime_show_id: str, episode, translation_type: str = "sub"
|
||||
) -> "AllAnimeEpisode | dict":
|
||||
"""get the episode details and sources info
|
||||
|
||||
@@ -147,14 +149,14 @@ class AllAnimeAPI(AnimeProvider):
|
||||
variables = {
|
||||
"showId": allanime_show_id,
|
||||
"translationType": translation_type,
|
||||
"episodeString": episode_string,
|
||||
"episodeString": episode,
|
||||
}
|
||||
episode = self._fetch_gql(ALLANIME_EPISODES_GQL, variables)
|
||||
return episode["episode"]
|
||||
|
||||
@debug_provider("ALLANIME")
|
||||
@debug_provider(PROVIDER.upper())
|
||||
def get_episode_streams(
|
||||
self, anime_id, anime_title, episode_number: str, translation_type="sub"
|
||||
self, anime_id, episode_number: str, translation_type="sub"
|
||||
):
|
||||
"""get the streams of an episode
|
||||
|
||||
@@ -166,6 +168,10 @@ class AllAnimeAPI(AnimeProvider):
|
||||
Yields:
|
||||
[TODO:description]
|
||||
"""
|
||||
|
||||
anime_title = (self.store.get(anime_id, "anime_info", "") or {"title": ""})[
|
||||
"title"
|
||||
]
|
||||
allanime_episode = self._get_anime_episode(
|
||||
anime_id, episode_number, translation_type
|
||||
)
|
||||
@@ -174,7 +180,7 @@ class AllAnimeAPI(AnimeProvider):
|
||||
|
||||
embeds = allanime_episode["sourceUrls"]
|
||||
|
||||
@debug_provider("ALLANIME")
|
||||
@debug_provider(self.PROVIDER.upper())
|
||||
def _get_server(embed):
|
||||
# filter the working streams no need to get all since the others are mostly hsl
|
||||
# TODO: should i just get all the servers and handle the hsl??
|
||||
@@ -227,7 +233,7 @@ class AllAnimeAPI(AnimeProvider):
|
||||
logger.debug("allanime:Found streams from gogoanime")
|
||||
return {
|
||||
"server": "gogoanime",
|
||||
"headers": {},
|
||||
"headers": {"Referer": f"https://{ALLANIME_BASE}/"},
|
||||
"subtitles": [],
|
||||
"episode_title": (
|
||||
allanime_episode["notes"] or f"{anime_title}"
|
||||
@@ -239,7 +245,7 @@ class AllAnimeAPI(AnimeProvider):
|
||||
logger.debug("allanime:Found streams from wetransfer")
|
||||
return {
|
||||
"server": "wetransfer",
|
||||
"headers": {},
|
||||
"headers": {"Referer": f"https://{ALLANIME_BASE}/"},
|
||||
"subtitles": [],
|
||||
"episode_title": (
|
||||
allanime_episode["notes"] or f"{anime_title}"
|
||||
@@ -251,7 +257,7 @@ class AllAnimeAPI(AnimeProvider):
|
||||
logger.debug("allanime:Found streams from sharepoint")
|
||||
return {
|
||||
"server": "sharepoint",
|
||||
"headers": {},
|
||||
"headers": {"Referer": f"https://{ALLANIME_BASE}/"},
|
||||
"subtitles": [],
|
||||
"episode_title": (
|
||||
allanime_episode["notes"] or f"{anime_title}"
|
||||
@@ -263,7 +269,7 @@ class AllAnimeAPI(AnimeProvider):
|
||||
logger.debug("allanime:Found streams from dropbox")
|
||||
return {
|
||||
"server": "dropbox",
|
||||
"headers": {},
|
||||
"headers": {"Referer": f"https://{ALLANIME_BASE}/"},
|
||||
"subtitles": [],
|
||||
"episode_title": (
|
||||
allanime_episode["notes"] or f"{anime_title}"
|
||||
@@ -275,7 +281,7 @@ class AllAnimeAPI(AnimeProvider):
|
||||
logger.debug("allanime:Found streams from wixmp")
|
||||
return {
|
||||
"server": "wixmp",
|
||||
"headers": {},
|
||||
"headers": {"Referer": f"https://{ALLANIME_BASE}/"},
|
||||
"subtitles": [],
|
||||
"episode_title": (
|
||||
allanime_episode["notes"] or f"{anime_title}"
|
||||
|
||||
@@ -21,7 +21,7 @@ from .constants import (
|
||||
from .utils import process_animepahe_embed_page
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .types import AnimePaheAnimePage, AnimePaheSearchPage, AnimeSearchResult
|
||||
from .types import AnimePaheAnimePage, AnimePaheSearchPage, AnimePaheSearchResult
|
||||
JUICY_STREAM_REGEX = re.compile(r"source='(.*)';")
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -32,8 +32,9 @@ class AnimePaheApi(AnimeProvider):
|
||||
search_page: "AnimePaheSearchPage"
|
||||
anime: "AnimePaheAnimePage"
|
||||
HEADERS = REQUEST_HEADERS
|
||||
PROVIDER = "animepahe"
|
||||
|
||||
@debug_provider("ANIMEPAHE")
|
||||
@debug_provider(PROVIDER.upper())
|
||||
def search_for_anime(self, user_query: str, *args):
|
||||
url = f"{ANIMEPAHE_ENDPOINT}m=search&q={user_query}"
|
||||
response = self.session.get(
|
||||
@@ -43,6 +44,12 @@ class AnimePaheApi(AnimeProvider):
|
||||
return
|
||||
data: "AnimePaheSearchPage" = response.json()
|
||||
self.search_page = data
|
||||
for animepahe_search_result in data["data"]:
|
||||
self.store.set(
|
||||
str(animepahe_search_result["session"]),
|
||||
"search_result",
|
||||
animepahe_search_result,
|
||||
)
|
||||
|
||||
return {
|
||||
"pageInfo": {
|
||||
@@ -66,96 +73,98 @@ class AnimePaheApi(AnimeProvider):
|
||||
],
|
||||
}
|
||||
|
||||
@debug_provider("ANIMEPAHE")
|
||||
@debug_provider(PROVIDER.upper())
|
||||
def get_anime(self, session_id: str, *args):
|
||||
page = 1
|
||||
anime_result: "AnimeSearchResult" = [
|
||||
anime
|
||||
for anime in self.search_page["data"]
|
||||
if anime["session"] == session_id
|
||||
][0]
|
||||
data: "AnimePaheAnimePage" = {} # pyright:ignore
|
||||
if d := self.store.get(str(session_id), "search_result"):
|
||||
anime_result: "AnimePaheSearchResult" = d
|
||||
data: "AnimePaheAnimePage" = {} # pyright:ignore
|
||||
|
||||
url = f"{ANIMEPAHE_ENDPOINT}m=release&id={session_id}&sort=episode_asc&page={page}"
|
||||
url = f"{ANIMEPAHE_ENDPOINT}m=release&id={session_id}&sort=episode_asc&page={page}"
|
||||
|
||||
def _pages_loader(
|
||||
url,
|
||||
page,
|
||||
):
|
||||
response = self.session.get(
|
||||
def _pages_loader(
|
||||
url,
|
||||
)
|
||||
if response.ok:
|
||||
if not data:
|
||||
data.update(response.json())
|
||||
else:
|
||||
if ep_data := response.json().get("data"):
|
||||
data["data"].extend(ep_data)
|
||||
if response.json()["next_page_url"]:
|
||||
# TODO: Refine this
|
||||
time.sleep(
|
||||
random.choice(
|
||||
[
|
||||
0.25,
|
||||
0.1,
|
||||
0.5,
|
||||
0.75,
|
||||
1,
|
||||
]
|
||||
page,
|
||||
):
|
||||
response = self.session.get(
|
||||
url,
|
||||
)
|
||||
if response.ok:
|
||||
if not data:
|
||||
data.update(response.json())
|
||||
else:
|
||||
if ep_data := response.json().get("data"):
|
||||
data["data"].extend(ep_data)
|
||||
if response.json()["next_page_url"]:
|
||||
# TODO: Refine this
|
||||
time.sleep(
|
||||
random.choice(
|
||||
[
|
||||
0.25,
|
||||
0.1,
|
||||
0.5,
|
||||
0.75,
|
||||
1,
|
||||
]
|
||||
)
|
||||
)
|
||||
page += 1
|
||||
url = f"{ANIMEPAHE_ENDPOINT}m=release&id={session_id}&sort=episode_asc&page={page}"
|
||||
_pages_loader(
|
||||
url,
|
||||
page,
|
||||
)
|
||||
)
|
||||
page += 1
|
||||
url = f"{ANIMEPAHE_ENDPOINT}m=release&id={session_id}&sort=episode_asc&page={page}"
|
||||
_pages_loader(
|
||||
url,
|
||||
page,
|
||||
)
|
||||
|
||||
_pages_loader(
|
||||
url,
|
||||
page,
|
||||
)
|
||||
_pages_loader(
|
||||
url,
|
||||
page,
|
||||
)
|
||||
|
||||
if not data:
|
||||
return {}
|
||||
self.anime = data # pyright:ignore
|
||||
episodes = list(map(str, [episode["episode"] for episode in data["data"]]))
|
||||
title = ""
|
||||
return {
|
||||
"id": session_id,
|
||||
"title": anime_result["title"],
|
||||
"year": anime_result["year"],
|
||||
"season": anime_result["season"],
|
||||
"poster": anime_result["poster"],
|
||||
"score": anime_result["score"],
|
||||
"availableEpisodesDetail": {
|
||||
"sub": episodes,
|
||||
"dub": episodes,
|
||||
"raw": episodes,
|
||||
},
|
||||
"episodesInfo": [
|
||||
{
|
||||
"title": f"{episode['title'] or title};{episode['episode']}",
|
||||
"episode": episode["episode"],
|
||||
"id": episode["session"],
|
||||
"translation_type": episode["audio"],
|
||||
"duration": episode["duration"],
|
||||
"poster": episode["snapshot"],
|
||||
}
|
||||
for episode in data["data"]
|
||||
],
|
||||
}
|
||||
if not data:
|
||||
return {}
|
||||
data["title"] = anime_result["title"] # pyright:ignore
|
||||
self.store.set(str(session_id), "anime_info", data)
|
||||
episodes = list(map(str, [episode["episode"] for episode in data["data"]]))
|
||||
title = ""
|
||||
return {
|
||||
"id": session_id,
|
||||
"title": anime_result["title"],
|
||||
"year": anime_result["year"],
|
||||
"season": anime_result["season"],
|
||||
"poster": anime_result["poster"],
|
||||
"score": anime_result["score"],
|
||||
"availableEpisodesDetail": {
|
||||
"sub": episodes,
|
||||
"dub": episodes,
|
||||
"raw": episodes,
|
||||
},
|
||||
"episodesInfo": [
|
||||
{
|
||||
"title": f"{episode['title'] or title};{episode['episode']}",
|
||||
"episode": episode["episode"],
|
||||
"id": episode["session"],
|
||||
"translation_type": episode["audio"],
|
||||
"duration": episode["duration"],
|
||||
"poster": episode["snapshot"],
|
||||
}
|
||||
for episode in data["data"]
|
||||
],
|
||||
}
|
||||
|
||||
@debug_provider("ANIMEPAHE")
|
||||
@debug_provider(PROVIDER.upper())
|
||||
def get_episode_streams(
|
||||
self, anime_id, anime_title, episode_number: str, translation_type, *args
|
||||
self, anime_id, episode_number: str, translation_type, *args
|
||||
):
|
||||
anime_title = ""
|
||||
episode = None
|
||||
# extract episode details from memory
|
||||
episode = [
|
||||
episode
|
||||
for episode in self.anime["data"]
|
||||
if float(episode["episode"]) == float(episode_number)
|
||||
]
|
||||
if d := self.store.get(str(anime_id), "anime_info"):
|
||||
anime_title = d["title"]
|
||||
episode = [
|
||||
episode
|
||||
for episode in d["data"]
|
||||
if float(episode["episode"]) == float(episode_number)
|
||||
]
|
||||
|
||||
if not episode:
|
||||
logger.error(f"[ANIMEPAHE-ERROR]: episode {episode_number} doesn't exist")
|
||||
@@ -195,7 +204,7 @@ class AnimePaheApi(AnimeProvider):
|
||||
continue
|
||||
|
||||
if not embed_url:
|
||||
logger.warn(
|
||||
logger.warning(
|
||||
"[ANIMEPAHE-WARN]: embed url not found please report to the developers"
|
||||
)
|
||||
return []
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
from typing import Literal, TypedDict
|
||||
|
||||
|
||||
class AnimeSearchResult(TypedDict):
|
||||
class AnimePaheSearchResult(TypedDict):
|
||||
id: int
|
||||
title: str
|
||||
type: str
|
||||
@@ -21,7 +21,7 @@ class AnimePaheSearchPage(TypedDict):
|
||||
last_page: int
|
||||
_from: int
|
||||
to: int
|
||||
data: list[AnimeSearchResult]
|
||||
data: list[AnimePaheSearchResult]
|
||||
|
||||
|
||||
class Episode(TypedDict):
|
||||
|
||||
@@ -1,13 +1,34 @@
|
||||
import os
|
||||
|
||||
import requests
|
||||
from yt_dlp.utils.networking import random_user_agent
|
||||
|
||||
from ...constants import APP_CACHE_DIR
|
||||
from .providers_store import ProviderStore
|
||||
|
||||
|
||||
class AnimeProvider:
|
||||
session: requests.Session
|
||||
|
||||
PROVIDER = ""
|
||||
USER_AGENT = random_user_agent()
|
||||
HEADERS = {}
|
||||
|
||||
def __init__(self) -> None:
|
||||
self.session = requests.session()
|
||||
def __init__(self, cache_requests, use_persistent_provider_store) -> None:
|
||||
if cache_requests.lower() == "true":
|
||||
from ..common.requests_cacher import CachedRequestsSession
|
||||
|
||||
self.session = CachedRequestsSession(
|
||||
os.path.join(APP_CACHE_DIR, "cached_requests.db")
|
||||
)
|
||||
else:
|
||||
self.session = requests.session()
|
||||
self.session.headers.update({"User-Agent": self.USER_AGENT, **self.HEADERS})
|
||||
if use_persistent_provider_store.lower() == "true":
|
||||
self.store = ProviderStore(
|
||||
"persistent",
|
||||
self.PROVIDER,
|
||||
os.path.join(APP_CACHE_DIR, "anime_providers_store.db"),
|
||||
)
|
||||
else:
|
||||
self.store = ProviderStore("memory")
|
||||
|
||||
@@ -41,7 +41,9 @@ class ParseAnchorAndImgTag(HTMLParser):
|
||||
class HiAnimeApi(AnimeProvider):
|
||||
# HEADERS = {"Referer": "https://hianime.to/home"}
|
||||
|
||||
@debug_provider("HIANIME")
|
||||
PROVIDER = "hianime"
|
||||
|
||||
@debug_provider(PROVIDER.upper())
|
||||
def search_for_anime(self, anime_title: str, *args):
|
||||
query = quote_plus(anime_title)
|
||||
url = f"https://hianime.to/search?keyword={query}"
|
||||
@@ -77,157 +79,165 @@ class HiAnimeApi(AnimeProvider):
|
||||
anime_id = anime_link_data["data-id"]
|
||||
title = anime_link_data["title"]
|
||||
|
||||
results.append(
|
||||
{
|
||||
"availableEpisodes": list(range(1, episodes)),
|
||||
"id": anime_id,
|
||||
"title": title,
|
||||
"poster": image_link,
|
||||
}
|
||||
)
|
||||
self.search_results = results
|
||||
return {"pageInfo": {}, "results": results}
|
||||
|
||||
@debug_provider("HIANIME")
|
||||
def get_anime(self, hianime_id, *args):
|
||||
anime_result = {}
|
||||
for anime in self.search_results:
|
||||
if anime["id"] == hianime_id:
|
||||
anime_result = anime
|
||||
break
|
||||
anime_url = f"https://hianime.to/ajax/v2/episode/list/{hianime_id}"
|
||||
response = self.session.get(anime_url, timeout=10)
|
||||
if response.ok:
|
||||
response_json = response.json()
|
||||
hianime_anime_page = response_json["html"]
|
||||
episodes_info_container_html = get_element_html_by_class(
|
||||
"ss-list", hianime_anime_page
|
||||
)
|
||||
episodes_info_html_list = get_elements_html_by_class(
|
||||
"ep-item", episodes_info_container_html
|
||||
)
|
||||
# keys: [ data-number: episode_number, data-id: episode_id, title: episode_title , href:episode_page_url]
|
||||
episodes_info_dicts = [
|
||||
extract_attributes(episode_dict)
|
||||
for episode_dict in episodes_info_html_list
|
||||
]
|
||||
episodes = [episode["data-number"] for episode in episodes_info_dicts]
|
||||
self.episodes_info = [
|
||||
{
|
||||
"id": episode["data-id"],
|
||||
"title": (
|
||||
(episode["title"] or "").replace(
|
||||
f"Episode {episode['data-number']}", ""
|
||||
)
|
||||
or anime_result["title"]
|
||||
)
|
||||
+ f"; Episode {episode['data-number']}",
|
||||
"episode": episode["data-number"],
|
||||
}
|
||||
for episode in episodes_info_dicts
|
||||
]
|
||||
return {
|
||||
"id": hianime_id,
|
||||
"availableEpisodesDetail": {
|
||||
"dub": episodes,
|
||||
"sub": episodes,
|
||||
"raw": episodes,
|
||||
},
|
||||
"poster": anime_result["poster"],
|
||||
"title": anime_result["title"],
|
||||
"episodes_info": self.episodes_info,
|
||||
result = {
|
||||
"availableEpisodes": list(range(1, episodes)),
|
||||
"id": anime_id,
|
||||
"title": title,
|
||||
"poster": image_link,
|
||||
}
|
||||
|
||||
@debug_provider("HIANIME")
|
||||
def get_episode_streams(
|
||||
self, anime_id, anime_title, episode, translation_type, *args
|
||||
):
|
||||
episode_details = [
|
||||
episode_details
|
||||
for episode_details in self.episodes_info
|
||||
if episode_details["episode"] == episode
|
||||
]
|
||||
if not episode_details:
|
||||
return
|
||||
episode_details = episode_details[0]
|
||||
episode_url = f"https://hianime.to/ajax/v2/episode/servers?episodeId={episode_details['id']}"
|
||||
response = self.session.get(episode_url)
|
||||
if response.ok:
|
||||
response_json = response.json()
|
||||
episode_page_html = response_json["html"]
|
||||
servers_containers_html = get_elements_html_by_class(
|
||||
"ps__-list", episode_page_html
|
||||
)
|
||||
if not servers_containers_html:
|
||||
return
|
||||
# sub servers
|
||||
try:
|
||||
servers_html_sub = get_elements_html_by_class(
|
||||
"server-item", servers_containers_html[0]
|
||||
results.append(result)
|
||||
|
||||
self.store.set(result["id"], "search_result", result)
|
||||
return {"pageInfo": {}, "results": results}
|
||||
|
||||
@debug_provider(PROVIDER.upper())
|
||||
def get_anime(self, hianime_id, *args):
|
||||
anime_result = {}
|
||||
if d := self.store.get(str(hianime_id), "search_result"):
|
||||
anime_result = d
|
||||
anime_url = f"https://hianime.to/ajax/v2/episode/list/{hianime_id}"
|
||||
response = self.session.get(anime_url, timeout=10)
|
||||
if response.ok:
|
||||
response_json = response.json()
|
||||
hianime_anime_page = response_json["html"]
|
||||
episodes_info_container_html = get_element_html_by_class(
|
||||
"ss-list", hianime_anime_page
|
||||
)
|
||||
except Exception:
|
||||
logger.warning("HiAnime: sub not found")
|
||||
servers_html_sub = None
|
||||
|
||||
# dub servers
|
||||
try:
|
||||
servers_html_dub = get_elements_html_by_class(
|
||||
"server-item", servers_containers_html[1]
|
||||
episodes_info_html_list = get_elements_html_by_class(
|
||||
"ep-item", episodes_info_container_html
|
||||
)
|
||||
except Exception:
|
||||
logger.warning("HiAnime: dub not found")
|
||||
servers_html_dub = None
|
||||
|
||||
if translation_type == "dub":
|
||||
servers_html = servers_html_dub
|
||||
else:
|
||||
servers_html = servers_html_sub
|
||||
if not servers_html:
|
||||
return
|
||||
|
||||
@debug_provider("HIANIME")
|
||||
def _get_server(server_name, server_html):
|
||||
# keys: [ data-type: translation_type, data-id: embed_id, data-server-id: server_id ]
|
||||
servers_info = extract_attributes(server_html)
|
||||
embed_url = f"https://hianime.to/ajax/v2/episode/sources?id={servers_info['data-id']}"
|
||||
embed_response = self.session.get(embed_url)
|
||||
if embed_response.ok:
|
||||
embed_json = embed_response.json()
|
||||
raw_link_to_streams = embed_json["link"]
|
||||
match = LINK_TO_STREAMS_REGEX.match(raw_link_to_streams)
|
||||
if not match:
|
||||
return
|
||||
provider_domain = match.group(1)
|
||||
embed_type = match.group(2)
|
||||
episode_number = match.group(3)
|
||||
source_id = match.group(4)
|
||||
|
||||
link_to_streams = f"https://{provider_domain}/embed-{embed_type}/ajax/e-{episode_number}/getSources?id={source_id}"
|
||||
link_to_streams_response = self.session.get(link_to_streams)
|
||||
if link_to_streams_response.ok:
|
||||
juicy_streams_json: "HiAnimeStream" = (
|
||||
link_to_streams_response.json()
|
||||
# keys: [ data-number: episode_number, data-id: episode_id, title: episode_title , href:episode_page_url]
|
||||
episodes_info_dicts = [
|
||||
extract_attributes(episode_dict)
|
||||
for episode_dict in episodes_info_html_list
|
||||
]
|
||||
episodes = [episode["data-number"] for episode in episodes_info_dicts]
|
||||
episodes_info = [
|
||||
{
|
||||
"id": episode["data-id"],
|
||||
"title": (
|
||||
(episode["title"] or "").replace(
|
||||
f"Episode {episode['data-number']}", ""
|
||||
)
|
||||
or anime_result["title"]
|
||||
)
|
||||
return {
|
||||
"headers": {},
|
||||
"subtitles": [
|
||||
{
|
||||
"url": track["file"],
|
||||
"language": track["label"],
|
||||
}
|
||||
for track in juicy_streams_json["tracks"]
|
||||
if track["kind"] == "captions"
|
||||
],
|
||||
"server": server_name,
|
||||
"episode_title": episode_details["title"],
|
||||
"links": give_random_quality(
|
||||
[
|
||||
{"link": link["file"], "type": link["type"]}
|
||||
for link in juicy_streams_json["sources"]
|
||||
]
|
||||
),
|
||||
}
|
||||
+ f"; Episode {episode['data-number']}",
|
||||
"episode": episode["data-number"],
|
||||
}
|
||||
for episode in episodes_info_dicts
|
||||
]
|
||||
self.store.set(
|
||||
str(hianime_id),
|
||||
"anime_info",
|
||||
episodes_info,
|
||||
)
|
||||
return {
|
||||
"id": hianime_id,
|
||||
"availableEpisodesDetail": {
|
||||
"dub": episodes,
|
||||
"sub": episodes,
|
||||
"raw": episodes,
|
||||
},
|
||||
"poster": anime_result["poster"],
|
||||
"title": anime_result["title"],
|
||||
"episodes_info": episodes_info,
|
||||
}
|
||||
|
||||
for server_name, server_html in zip(cycle(SERVERS_AVAILABLE), servers_html):
|
||||
if server := _get_server(server_name, server_html):
|
||||
yield server
|
||||
@debug_provider(PROVIDER.upper())
|
||||
def get_episode_streams(self, anime_id, episode, translation_type, *args):
|
||||
if d := self.store.get(str(anime_id), "anime_info"):
|
||||
episodes_info = d
|
||||
episode_details = [
|
||||
episode_details
|
||||
for episode_details in episodes_info
|
||||
if episode_details["episode"] == episode
|
||||
]
|
||||
if not episode_details:
|
||||
return
|
||||
episode_details = episode_details[0]
|
||||
episode_url = f"https://hianime.to/ajax/v2/episode/servers?episodeId={episode_details['id']}"
|
||||
response = self.session.get(episode_url)
|
||||
if response.ok:
|
||||
response_json = response.json()
|
||||
episode_page_html = response_json["html"]
|
||||
servers_containers_html = get_elements_html_by_class(
|
||||
"ps__-list", episode_page_html
|
||||
)
|
||||
if not servers_containers_html:
|
||||
return
|
||||
# sub servers
|
||||
try:
|
||||
servers_html_sub = get_elements_html_by_class(
|
||||
"server-item", servers_containers_html[0]
|
||||
)
|
||||
except Exception:
|
||||
logger.warning("HiAnime: sub not found")
|
||||
servers_html_sub = None
|
||||
|
||||
# dub servers
|
||||
try:
|
||||
servers_html_dub = get_elements_html_by_class(
|
||||
"server-item", servers_containers_html[1]
|
||||
)
|
||||
except Exception:
|
||||
logger.warning("HiAnime: dub not found")
|
||||
servers_html_dub = None
|
||||
|
||||
if translation_type == "dub":
|
||||
servers_html = servers_html_dub
|
||||
else:
|
||||
servers_html = servers_html_sub
|
||||
if not servers_html:
|
||||
return
|
||||
|
||||
@debug_provider(self.PROVIDER.upper())
|
||||
def _get_server(server_name, server_html):
|
||||
# keys: [ data-type: translation_type, data-id: embed_id, data-server-id: server_id ]
|
||||
servers_info = extract_attributes(server_html)
|
||||
embed_url = f"https://hianime.to/ajax/v2/episode/sources?id={servers_info['data-id']}"
|
||||
embed_response = self.session.get(embed_url)
|
||||
if embed_response.ok:
|
||||
embed_json = embed_response.json()
|
||||
raw_link_to_streams = embed_json["link"]
|
||||
match = LINK_TO_STREAMS_REGEX.match(raw_link_to_streams)
|
||||
if not match:
|
||||
return
|
||||
provider_domain = match.group(1)
|
||||
embed_type = match.group(2)
|
||||
episode_number = match.group(3)
|
||||
source_id = match.group(4)
|
||||
|
||||
link_to_streams = f"https://{provider_domain}/embed-{embed_type}/ajax/e-{episode_number}/getSources?id={source_id}"
|
||||
link_to_streams_response = self.session.get(link_to_streams)
|
||||
if link_to_streams_response.ok:
|
||||
juicy_streams_json: "HiAnimeStream" = (
|
||||
link_to_streams_response.json()
|
||||
)
|
||||
# TODO: Hianime decided to fucking encrypt shit
|
||||
# so got to fix it later
|
||||
return {
|
||||
"headers": {},
|
||||
"subtitles": [
|
||||
{
|
||||
"url": track["file"],
|
||||
"language": track["label"],
|
||||
}
|
||||
for track in juicy_streams_json["tracks"]
|
||||
if track["kind"] == "captions"
|
||||
],
|
||||
"server": server_name,
|
||||
"episode_title": episode_details["title"],
|
||||
"links": give_random_quality(
|
||||
[
|
||||
{"link": link["file"]}
|
||||
for link in juicy_streams_json["tracks"]
|
||||
]
|
||||
),
|
||||
}
|
||||
|
||||
for server_name, server_html in zip(
|
||||
cycle(SERVERS_AVAILABLE), servers_html
|
||||
):
|
||||
if server := _get_server(server_name, server_html):
|
||||
yield server
|
||||
|
||||
@@ -29,8 +29,9 @@ EXTRACT_USEFUL_INFO_PATTERN_2 = re.compile(
|
||||
|
||||
class NyaaApi(AnimeProvider):
|
||||
search_results: SearchResults
|
||||
PROVIDER = "nyaa"
|
||||
|
||||
@debug_provider("NYAA")
|
||||
@debug_provider(PROVIDER.upper())
|
||||
def search_for_anime(self, user_query: str, *args, **_):
|
||||
self.search_results = search_for_anime_with_anilist(
|
||||
user_query, True
|
||||
@@ -38,7 +39,7 @@ class NyaaApi(AnimeProvider):
|
||||
self.user_query = user_query
|
||||
return self.search_results
|
||||
|
||||
@debug_provider("NYAA")
|
||||
@debug_provider(PROVIDER.upper())
|
||||
def get_anime(self, anilist_id: str, *_):
|
||||
for anime in self.search_results["results"]:
|
||||
if anime["id"] == anilist_id:
|
||||
@@ -54,11 +55,10 @@ class NyaaApi(AnimeProvider):
|
||||
},
|
||||
}
|
||||
|
||||
@debug_provider("NYAA")
|
||||
@debug_provider(PROVIDER.upper())
|
||||
def get_episode_streams(
|
||||
self,
|
||||
anime_id: str,
|
||||
anime_title: str,
|
||||
episode_number: str,
|
||||
translation_type: str,
|
||||
trusted_only=bool(int(os.environ.get("FA_NYAA_TRUSTED_ONLY", "0"))),
|
||||
@@ -66,6 +66,7 @@ class NyaaApi(AnimeProvider):
|
||||
sort_by="seeders",
|
||||
*args,
|
||||
):
|
||||
anime_title = self.titles[0]
|
||||
logger.debug(f"Searching nyaa for query: '{anime_title} {episode_number}'")
|
||||
servers = {}
|
||||
|
||||
|
||||
114
fastanime/libs/anime_provider/providers_store.py
Normal file
114
fastanime/libs/anime_provider/providers_store.py
Normal file
@@ -0,0 +1,114 @@
|
||||
import json
|
||||
import logging
|
||||
import time
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ProviderStoreDB:
|
||||
def __init__(
|
||||
self,
|
||||
provider_name,
|
||||
cache_db_path: str,
|
||||
max_lifetime: int = 604800,
|
||||
max_size: int = (1024**2) * 10,
|
||||
table_name: str = "fastanime_providers_store",
|
||||
clean_db=False,
|
||||
):
|
||||
from ..common.sqlitedb_helper import SqliteDB
|
||||
|
||||
self.cache_db_path = cache_db_path
|
||||
self.clean_db = clean_db
|
||||
self.provider_name = provider_name
|
||||
self.max_lifetime = max_lifetime
|
||||
self.max_size = max_size
|
||||
self.table_name = table_name
|
||||
self.sqlite_db_connection = SqliteDB(self.cache_db_path)
|
||||
|
||||
# Prepare the cache table if it doesn't exist
|
||||
self._create_store_table()
|
||||
|
||||
def _create_store_table(self):
|
||||
"""Create cache table if it doesn't exist."""
|
||||
with self.sqlite_db_connection as conn:
|
||||
conn.execute(
|
||||
f"""
|
||||
CREATE TABLE IF NOT EXISTS {self.table_name} (
|
||||
id TEXT,
|
||||
data_type TEXT,
|
||||
provider_name TEXT,
|
||||
data TEXT,
|
||||
cache_expiry INTEGER
|
||||
)"""
|
||||
)
|
||||
|
||||
def get(self, id: str, data_type: str, default=None):
|
||||
with self.sqlite_db_connection as conn:
|
||||
cursor = conn.cursor()
|
||||
cursor.execute(
|
||||
f"""
|
||||
SELECT
|
||||
data
|
||||
FROM {self.table_name}
|
||||
WHERE
|
||||
id = ?
|
||||
AND data_type = ?
|
||||
AND provider_name = ?
|
||||
AND cache_expiry > ?
|
||||
""",
|
||||
(id, data_type, self.provider_name, int(time.time())),
|
||||
)
|
||||
cached_data = cursor.fetchone()
|
||||
|
||||
if cached_data:
|
||||
logger.debug("Found existing request in cache")
|
||||
(json_data,) = cached_data
|
||||
return json.loads(json_data)
|
||||
return default
|
||||
|
||||
def set(self, id: str, data_type: str, data):
|
||||
with self.sqlite_db_connection as connection:
|
||||
cursor = connection.cursor()
|
||||
cursor.execute(
|
||||
f"""
|
||||
INSERT INTO {self.table_name}
|
||||
VALUES ( ?, ?,?, ?, ?)
|
||||
""",
|
||||
(
|
||||
id,
|
||||
data_type,
|
||||
self.provider_name,
|
||||
json.dumps(data),
|
||||
int(time.time()) + self.max_lifetime,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
class ProviderStoreMem:
|
||||
def __init__(self) -> None:
|
||||
from collections import defaultdict
|
||||
|
||||
self._store = defaultdict(dict)
|
||||
|
||||
def get(self, id: str, data_type: str, default=None):
|
||||
return self._store[id][data_type]
|
||||
|
||||
def set(self, id: str, data_type: str, data):
|
||||
self._store[id][data_type] = data
|
||||
|
||||
|
||||
def ProviderStore(store_type, *args, **kwargs):
|
||||
if store_type == "persistent":
|
||||
return ProviderStoreDB(*args, **kwargs)
|
||||
else:
|
||||
return ProviderStoreMem()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
store = ProviderStore("persistent", "test_provider", "provider_store")
|
||||
store.set("123", "test", {"hello": "world"})
|
||||
print(store.get("123", "test"))
|
||||
print("-------------------------------")
|
||||
store = ProviderStore("memory")
|
||||
store.set("1", "test", {"hello": "world"})
|
||||
print(store.get("1", "test"))
|
||||
216
fastanime/libs/anime_provider/yugen/api.py
Normal file
216
fastanime/libs/anime_provider/yugen/api.py
Normal file
@@ -0,0 +1,216 @@
|
||||
import base64
|
||||
from itertools import cycle
|
||||
from yt_dlp.utils import (
|
||||
get_element_text_and_html_by_tag,
|
||||
get_elements_text_and_html_by_attribute,
|
||||
extract_attributes,
|
||||
get_element_by_attribute,
|
||||
)
|
||||
import re
|
||||
|
||||
from yt_dlp.utils.traversal import get_element_html_by_attribute
|
||||
from .constants import YUGEN_ENDPOINT, SEARCH_URL
|
||||
from ..decorators import debug_provider
|
||||
from ..base_provider import AnimeProvider
|
||||
|
||||
|
||||
# ** Adapted from anipy-cli **
|
||||
class YugenApi(AnimeProvider):
|
||||
"""
|
||||
Provides a fast and effective interface to YugenApi site.
|
||||
"""
|
||||
|
||||
PROVIDER = "yugen"
|
||||
api_endpoint = YUGEN_ENDPOINT
|
||||
# HEADERS = {
|
||||
# "Referer": ALLANIME_REFERER,
|
||||
# }
|
||||
|
||||
@debug_provider(PROVIDER.upper())
|
||||
def search_for_anime(
|
||||
self,
|
||||
user_query: str,
|
||||
translation_type: str = "sub",
|
||||
nsfw=True,
|
||||
unknown=True,
|
||||
**kwargs,
|
||||
):
|
||||
results = []
|
||||
has_next = True
|
||||
page = 0
|
||||
while has_next:
|
||||
page += 1
|
||||
response = self.session.get(
|
||||
SEARCH_URL, params={"q": user_query, "page": page}
|
||||
)
|
||||
search_results = response.json()
|
||||
has_next = search_results["hasNext"]
|
||||
|
||||
results_html = search_results["query"]
|
||||
anime = get_elements_text_and_html_by_attribute(
|
||||
"class", "anime-meta", results_html, tag="a"
|
||||
)
|
||||
id_regex = re.compile(r"(\d+)\/([^\/]+)")
|
||||
for _a in anime:
|
||||
if not _a:
|
||||
continue
|
||||
a = extract_attributes(_a[1])
|
||||
|
||||
if not a:
|
||||
continue
|
||||
uri = a["href"]
|
||||
identifier = id_regex.search(uri) # pyright:ignore
|
||||
if identifier is None:
|
||||
continue
|
||||
|
||||
if len(identifier.groups()) != 2:
|
||||
continue
|
||||
|
||||
identifier = base64.b64encode(
|
||||
f"{identifier.group(1)}/{identifier.group(2)}".encode()
|
||||
).decode()
|
||||
|
||||
anime_title = a["title"]
|
||||
languages = {"sub": 1, "dub": 0}
|
||||
excl = get_element_by_attribute(
|
||||
"class", "ani-exclamation", _a[1], tag="div"
|
||||
)
|
||||
if excl is not None:
|
||||
if "dub" in excl.lower():
|
||||
languages["dub"] = 1
|
||||
#
|
||||
results.append(
|
||||
{
|
||||
"id": identifier,
|
||||
"title": anime_title,
|
||||
"availableEpisodes": languages,
|
||||
}
|
||||
)
|
||||
|
||||
page += 1
|
||||
|
||||
return {
|
||||
"pageInfo": {"total": len(results)},
|
||||
"results": results,
|
||||
}
|
||||
|
||||
@debug_provider(PROVIDER.upper())
|
||||
def get_anime(self, anime_id: str, **kwargs):
|
||||
identifier = base64.b64decode(anime_id).decode()
|
||||
response = self.session.get(f"{YUGEN_ENDPOINT}/anime/{identifier}")
|
||||
html_page = response.text
|
||||
data_map = {
|
||||
"id": anime_id,
|
||||
"title": None,
|
||||
"poster": None,
|
||||
"genres": [],
|
||||
"synopsis": None,
|
||||
"release_year": None,
|
||||
"status": None,
|
||||
"otherTitles": [],
|
||||
"availableEpisodesDetail": {},
|
||||
}
|
||||
|
||||
sub_match = re.search(
|
||||
r'<div class="ap-.+?">Episodes</div><span class="description" .+?>(\d+)</span></div>',
|
||||
html_page,
|
||||
)
|
||||
|
||||
if sub_match:
|
||||
eps = int(sub_match.group(1))
|
||||
data_map["availableEpisodesDetail"]["sub"] = list(map(str,range(1, eps + 1)))
|
||||
|
||||
dub_match = re.search(
|
||||
r'<div class="ap-.+?">Episodes \(Dub\)</div><span class="description" .+?>(\d+)</span></div>',
|
||||
html_page,
|
||||
)
|
||||
|
||||
if dub_match:
|
||||
eps = int(dub_match.group(1))
|
||||
data_map["availableEpisodesDetail"]["dub"] = list(map(str,range(1, eps + 1)))
|
||||
|
||||
name = get_element_text_and_html_by_tag("h1", html_page)
|
||||
if name is not None:
|
||||
data_map["title"] = name[0].strip()
|
||||
|
||||
synopsis = get_element_by_attribute("class", "description", html_page, tag="p")
|
||||
if synopsis is not None:
|
||||
data_map["synopsis"] = synopsis
|
||||
|
||||
# FIXME: This is not working because ytdl is too strict on also getting a closing tag
|
||||
try:
|
||||
image = get_element_html_by_attribute(
|
||||
"class", "cover", html_page, tag="img"
|
||||
)
|
||||
img_attrs = extract_attributes(image)
|
||||
if img_attrs is not None:
|
||||
data_map["image"] = img_attrs.get("src")
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
data = get_elements_text_and_html_by_attribute(
|
||||
"class", "data", html_page, tag="div"
|
||||
)
|
||||
for d in data:
|
||||
title = get_element_text_and_html_by_tag("div", d[1])
|
||||
desc = get_element_text_and_html_by_tag("span", d[1])
|
||||
if title is None or desc is None:
|
||||
continue
|
||||
title = title[0]
|
||||
desc = desc[0]
|
||||
if title in ["Native", "Romaji"]:
|
||||
data_map["alternative_names"].append(desc)
|
||||
elif title == "Synonyms":
|
||||
data_map["alternative_names"].extend(desc.split(","))
|
||||
elif title == "Premiered":
|
||||
try:
|
||||
data_map["release_year"] = int(desc.split()[-1])
|
||||
except (ValueError, TypeError):
|
||||
pass
|
||||
elif title == "Status":
|
||||
data_map["status"] = title
|
||||
elif title == "Genres":
|
||||
data_map["genres"].extend([g.strip() for g in desc.split(",")])
|
||||
|
||||
return data_map
|
||||
|
||||
@debug_provider(PROVIDER.upper())
|
||||
def get_episode_streams(
|
||||
self, anime_id, episode_number: str, translation_type="sub"
|
||||
):
|
||||
"""get the streams of an episode
|
||||
|
||||
Args:
|
||||
translation_type ([TODO:parameter]): [TODO:description]
|
||||
anime: [TODO:description]
|
||||
episode_number: [TODO:description]
|
||||
|
||||
Yields:
|
||||
[TODO:description]
|
||||
"""
|
||||
|
||||
identifier = base64.b64decode(anime_id).decode()
|
||||
|
||||
id_num, anime_title = identifier.split("/")
|
||||
if translation_type == "dub":
|
||||
video_query = f"{id_num}|{episode_number}|dub"
|
||||
else:
|
||||
video_query = f"{id_num}|{episode_number}"
|
||||
#
|
||||
|
||||
res = self.session.post(
|
||||
f"{YUGEN_ENDPOINT}/api/embed/",
|
||||
data={
|
||||
"id": base64.b64encode(video_query.encode()).decode(),
|
||||
"ac": "0",
|
||||
},
|
||||
headers={"x-requested-with": "XMLHttpRequest"},
|
||||
)
|
||||
res = res.json()
|
||||
yield {
|
||||
"server": "gogoanime",
|
||||
"episode_title": f"{anime_title}; Episode {episode_number}",
|
||||
"headers": {},
|
||||
"subtitles": [],
|
||||
"links": [{"quality": quality, "link": link} for quality,link in zip(cycle(["1080","720","480","360"]),res["hls"])],
|
||||
}
|
||||
5
fastanime/libs/anime_provider/yugen/constants.py
Normal file
5
fastanime/libs/anime_provider/yugen/constants.py
Normal file
@@ -0,0 +1,5 @@
|
||||
|
||||
YUGEN_ENDPOINT: str = "https://yugenanime.tv"
|
||||
|
||||
SEARCH_URL = YUGEN_ENDPOINT + "/api/discover/"
|
||||
SERVERS_AVAILABLE = ["gogoanime"]
|
||||
@@ -176,11 +176,17 @@ query ($query: String) {
|
||||
if not anime_result["status"] == "RELEASING"
|
||||
and anime_result["episodes"]
|
||||
else (
|
||||
anime_result["nextAiringEpisode"]["episode"] - 1
|
||||
if anime_result["nextAiringEpisode"]
|
||||
else 0
|
||||
(
|
||||
anime_result["nextAiringEpisode"]["episode"]
|
||||
- 1
|
||||
if anime_result["nextAiringEpisode"]
|
||||
else 0
|
||||
)
|
||||
if not anime_result["episodes"]
|
||||
else anime_result["episodes"]
|
||||
)
|
||||
),
|
||||
)
|
||||
+ 1,
|
||||
),
|
||||
)
|
||||
),
|
||||
|
||||
209
fastanime/libs/common/requests_cacher.py
Normal file
209
fastanime/libs/common/requests_cacher.py
Normal file
@@ -0,0 +1,209 @@
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
import time
|
||||
from datetime import datetime
|
||||
from urllib.parse import urlencode
|
||||
|
||||
import requests
|
||||
|
||||
from .sqlitedb_helper import SqliteDB
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
caching_mimetypes = {
|
||||
"application": {
|
||||
"json",
|
||||
"xml",
|
||||
"x-www-form-urlencoded",
|
||||
"x-javascript",
|
||||
"javascript",
|
||||
},
|
||||
"text": {"html", "css", "javascript", "plain", "xml", "xsl", "x-javascript"},
|
||||
}
|
||||
|
||||
|
||||
class CachedRequestsSession(requests.Session):
|
||||
__request_functions__ = (
|
||||
"get",
|
||||
"options",
|
||||
"head",
|
||||
"post",
|
||||
"put",
|
||||
"patch",
|
||||
"delete",
|
||||
)
|
||||
|
||||
def __new__(cls, *args, **kwargs):
|
||||
def caching_params(name: str):
|
||||
def wrapper(self, *args, **kwargs):
|
||||
return cls.request(self, name, *args, **kwargs)
|
||||
|
||||
return wrapper
|
||||
|
||||
for func in cls.__request_functions__:
|
||||
setattr(cls, func, caching_params(func))
|
||||
|
||||
return super().__new__(cls)
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
cache_db_path: str,
|
||||
max_lifetime: int = 259200,
|
||||
max_size: int = (1024**2) * 10,
|
||||
table_name: str = "fastanime_requests_cache",
|
||||
clean_db=False,
|
||||
*args,
|
||||
**kwargs,
|
||||
):
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
self.cache_db_path = cache_db_path
|
||||
self.max_lifetime = max_lifetime
|
||||
self.max_size = max_size
|
||||
self.table_name = table_name
|
||||
self.sqlite_db_connection = SqliteDB(self.cache_db_path)
|
||||
|
||||
# Prepare the cache table if it doesn't exist
|
||||
self._create_cache_table()
|
||||
|
||||
def _create_cache_table(self):
|
||||
"""Create cache table if it doesn't exist."""
|
||||
with self.sqlite_db_connection as conn:
|
||||
conn.execute(
|
||||
f"""
|
||||
CREATE TABLE IF NOT EXISTS {self.table_name} (
|
||||
url TEXT,
|
||||
status_code INTEGER,
|
||||
request_headers TEXT,
|
||||
response_headers TEXT,
|
||||
data BLOB,
|
||||
redirection_policy INT,
|
||||
cache_expiry INTEGER
|
||||
)"""
|
||||
)
|
||||
|
||||
def request(
|
||||
self,
|
||||
method,
|
||||
url,
|
||||
params=None,
|
||||
force_caching=False,
|
||||
fresh=int(os.environ.get("FASTANIME_FRESH_REQUESTS", 0)),
|
||||
*args,
|
||||
**kwargs,
|
||||
):
|
||||
if params:
|
||||
url += "?" + urlencode(params)
|
||||
|
||||
redirection_policy = int(kwargs.get("force_redirects", False))
|
||||
|
||||
with self.sqlite_db_connection as conn:
|
||||
cursor = conn.cursor()
|
||||
time_before_access_db = datetime.now()
|
||||
|
||||
logger.debug("Checking for existing request in cache")
|
||||
cursor.execute(
|
||||
f"""
|
||||
SELECT
|
||||
status_code,
|
||||
request_headers,
|
||||
response_headers,
|
||||
data,
|
||||
redirection_policy
|
||||
FROM {self.table_name}
|
||||
WHERE
|
||||
url = ?
|
||||
AND redirection_policy = ?
|
||||
AND cache_expiry > ?
|
||||
""",
|
||||
(url, redirection_policy, int(time.time())),
|
||||
)
|
||||
cached_request = cursor.fetchone()
|
||||
time_after_access_db = datetime.now()
|
||||
|
||||
if cached_request and not fresh:
|
||||
logger.debug("Found existing request in cache")
|
||||
(
|
||||
status_code,
|
||||
request_headers,
|
||||
response_headers,
|
||||
data,
|
||||
redirection_policy,
|
||||
) = cached_request
|
||||
|
||||
response = requests.Response()
|
||||
response.headers.update(json.loads(response_headers))
|
||||
response.status_code = status_code
|
||||
response._content = data
|
||||
|
||||
if "timeout" in kwargs:
|
||||
kwargs.pop("timeout")
|
||||
if "headers" in kwargs:
|
||||
kwargs.pop("headers")
|
||||
_request = requests.Request(
|
||||
method, url, headers=json.loads(request_headers), *args, **kwargs
|
||||
)
|
||||
response.request = _request.prepare()
|
||||
response.elapsed = time_after_access_db - time_before_access_db
|
||||
|
||||
return response
|
||||
|
||||
# Perform the request and cache it
|
||||
response = super().request(method, url, *args, **kwargs)
|
||||
if response.ok and (
|
||||
force_caching
|
||||
or self.is_content_type_cachable(
|
||||
response.headers.get("content-type"), caching_mimetypes
|
||||
)
|
||||
and len(response.content) < self.max_size
|
||||
):
|
||||
logger.debug("Caching the current request")
|
||||
cursor.execute(
|
||||
f"""
|
||||
INSERT INTO {self.table_name}
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?)
|
||||
""",
|
||||
(
|
||||
url,
|
||||
response.status_code,
|
||||
json.dumps(dict(response.request.headers)),
|
||||
json.dumps(dict(response.headers)),
|
||||
response.content,
|
||||
redirection_policy,
|
||||
int(time.time()) + self.max_lifetime,
|
||||
),
|
||||
)
|
||||
|
||||
return response
|
||||
|
||||
@staticmethod
|
||||
def is_content_type_cachable(content_type, caching_mimetypes):
|
||||
"""Checks whether the given encoding is supported by the cacher"""
|
||||
if content_type is None:
|
||||
return True
|
||||
|
||||
mime, contents = content_type.split("/")
|
||||
|
||||
contents = re.sub(r";.*$", "", contents)
|
||||
|
||||
return mime in caching_mimetypes and any(
|
||||
content in caching_mimetypes[mime] for content in contents.split("+")
|
||||
)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
with CachedRequestsSession("cache.db") as session:
|
||||
response = session.get(
|
||||
"https://google.com",
|
||||
)
|
||||
|
||||
response_b = session.get(
|
||||
"https://google.com",
|
||||
)
|
||||
|
||||
print("A: ", response.elapsed)
|
||||
print("B: ", response_b.elapsed)
|
||||
|
||||
print(response_b.text[0:30])
|
||||
34
fastanime/libs/common/sqlitedb_helper.py
Normal file
34
fastanime/libs/common/sqlitedb_helper.py
Normal file
@@ -0,0 +1,34 @@
|
||||
import logging
|
||||
import sqlite3
|
||||
import time
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class SqliteDB:
|
||||
def __init__(self, db_path: str) -> None:
|
||||
self.db_path = db_path
|
||||
self.connection = sqlite3.connect(self.db_path)
|
||||
logger.debug("Enabling WAL mode for concurrent access")
|
||||
self.connection.execute("PRAGMA journal_mode=WAL;")
|
||||
self.connection.close()
|
||||
self.connection = None
|
||||
|
||||
def __enter__(self):
|
||||
logger.debug("Starting new connection...")
|
||||
start_time = time.time()
|
||||
self.connection = sqlite3.connect(self.db_path)
|
||||
logger.debug(
|
||||
"Successfully got a new connection in {} seconds".format(
|
||||
time.time() - start_time
|
||||
)
|
||||
)
|
||||
return self.connection
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
if self.connection:
|
||||
logger.debug("Closing connection to cache db")
|
||||
self.connection.commit()
|
||||
self.connection.close()
|
||||
self.connection = None
|
||||
logger.debug("Successfully closed connection to cache db")
|
||||
@@ -49,7 +49,7 @@ class FZF:
|
||||
"--info=hidden",
|
||||
"--layout=reverse",
|
||||
"--height=100%",
|
||||
"--bind=right:accept",
|
||||
"--bind=right:accept,ctrl-/:toggle-preview,ctrl-space:toggle-wrap+toggle-preview-wrap",
|
||||
"--no-margin",
|
||||
"+m",
|
||||
"-i",
|
||||
|
||||
@@ -11,6 +11,7 @@ class RofiApi:
|
||||
ROFI_EXECUTABLE = which("rofi")
|
||||
|
||||
rofi_theme = ""
|
||||
rofi_theme_preview = ""
|
||||
rofi_theme_confirm = ""
|
||||
rofi_theme_input = ""
|
||||
|
||||
@@ -21,8 +22,8 @@ class RofiApi:
|
||||
raise Exception("Rofi not found")
|
||||
|
||||
args = [self.ROFI_EXECUTABLE]
|
||||
if self.rofi_theme:
|
||||
args.extend(["-no-config", "-theme", self.rofi_theme])
|
||||
if self.rofi_theme_preview:
|
||||
args.extend(["-no-config", "-theme", self.rofi_theme_preview])
|
||||
args.extend(["-p", f"{prompt_text.title()}", "-i", "-show-icons", "-dmenu"])
|
||||
result = subprocess.run(
|
||||
args,
|
||||
|
||||
61
flake.lock
generated
Normal file
61
flake.lock
generated
Normal file
@@ -0,0 +1,61 @@
|
||||
{
|
||||
"nodes": {
|
||||
"flake-utils": {
|
||||
"inputs": {
|
||||
"systems": "systems"
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1731533236,
|
||||
"narHash": "sha256-l0KFg5HjrsfsO/JpG+r7fRrqm12kzFHyUHqHCVpMMbI=",
|
||||
"owner": "numtide",
|
||||
"repo": "flake-utils",
|
||||
"rev": "11707dc2f618dd54ca8739b309ec4fc024de578b",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "numtide",
|
||||
"repo": "flake-utils",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"nixpkgs": {
|
||||
"locked": {
|
||||
"lastModified": 1731676054,
|
||||
"narHash": "sha256-OZiZ3m8SCMfh3B6bfGC/Bm4x3qc1m2SVEAlkV6iY7Yg=",
|
||||
"owner": "NixOS",
|
||||
"repo": "nixpkgs",
|
||||
"rev": "5e4fbfb6b3de1aa2872b76d49fafc942626e2add",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "NixOS",
|
||||
"ref": "nixos-unstable",
|
||||
"repo": "nixpkgs",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"root": {
|
||||
"inputs": {
|
||||
"flake-utils": "flake-utils",
|
||||
"nixpkgs": "nixpkgs"
|
||||
}
|
||||
},
|
||||
"systems": {
|
||||
"locked": {
|
||||
"lastModified": 1681028828,
|
||||
"narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
|
||||
"owner": "nix-systems",
|
||||
"repo": "default",
|
||||
"rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "nix-systems",
|
||||
"repo": "default",
|
||||
"type": "github"
|
||||
}
|
||||
}
|
||||
},
|
||||
"root": "root",
|
||||
"version": 7
|
||||
}
|
||||
60
flake.nix
Normal file
60
flake.nix
Normal file
@@ -0,0 +1,60 @@
|
||||
{
|
||||
description = "FastAnime Project Flake";
|
||||
|
||||
inputs = {
|
||||
nixpkgs.url = "github:NixOS/nixpkgs/nixos-unstable";
|
||||
flake-utils.url = "github:numtide/flake-utils";
|
||||
};
|
||||
|
||||
outputs = { self, nixpkgs, flake-utils }: flake-utils.lib.eachDefaultSystem (system:
|
||||
let
|
||||
pkgs = import nixpkgs { inherit system; };
|
||||
|
||||
python = pkgs.python312;
|
||||
pythonPackages = python.pkgs;
|
||||
fastanimeEnv = pythonPackages.buildPythonApplication {
|
||||
pname = "fastanime";
|
||||
version = "2.8.0";
|
||||
|
||||
src = ./.;
|
||||
|
||||
preBuild = ''
|
||||
sed -i 's/rich>=13.9.2/rich>=13.8.1/' pyproject.toml
|
||||
'';
|
||||
|
||||
# Add runtime dependencies
|
||||
propagatedBuildInputs = with pythonPackages; [
|
||||
click
|
||||
inquirerpy
|
||||
requests
|
||||
rich
|
||||
thefuzz
|
||||
yt-dlp
|
||||
dbus-python
|
||||
hatchling
|
||||
plyer
|
||||
mpv
|
||||
fastapi
|
||||
];
|
||||
|
||||
# Ensure compatibility with the pyproject.toml
|
||||
format = "pyproject";
|
||||
};
|
||||
|
||||
in
|
||||
{
|
||||
packages.default = fastanimeEnv;
|
||||
|
||||
# DevShell for development
|
||||
devShells.default = pkgs.mkShell {
|
||||
buildInputs = [
|
||||
fastanimeEnv
|
||||
pythonPackages.hatchling
|
||||
pkgs.mpv
|
||||
pkgs.libmpv
|
||||
pkgs.fzf
|
||||
pkgs.rofi
|
||||
];
|
||||
};
|
||||
});
|
||||
}
|
||||
11
make_release
Executable file
11
make_release
Executable file
@@ -0,0 +1,11 @@
|
||||
#! /usr/bin/env sh
|
||||
CLI_DIR="$(dirname "$(realpath "$0")")"
|
||||
VERSION=$1
|
||||
[ -z "$VERSION" ] && echo no version provided && exit 1
|
||||
[ "$VERSION" = "current" ] && fastanime --version && exit 0
|
||||
sed -i "s/^version.*/version = \"$VERSION\"/" "$CLI_DIR/pyproject.toml" &&
|
||||
sed -i "s/__version__.*/__version__ = \"v$VERSION\"/" "$CLI_DIR/fastanime/__init__.py" &&
|
||||
git stage "$CLI_DIR/pyproject.toml" "$CLI_DIR/fastanime/__init__.py" &&
|
||||
git commit -m "chore: bump version (v$VERSION)" &&
|
||||
git push &&
|
||||
gh release create "v$VERSION"
|
||||
1391
poetry.lock
generated
1391
poetry.lock
generated
File diff suppressed because it is too large
Load Diff
65
pyinstaller.spec
Normal file
65
pyinstaller.spec
Normal file
@@ -0,0 +1,65 @@
|
||||
# -*- mode: python ; coding: utf-8 -*-
|
||||
from PyInstaller.utils.hooks import collect_data_files, collect_submodules
|
||||
|
||||
block_cipher = None
|
||||
|
||||
# Collect all required data files
|
||||
datas = [
|
||||
('fastanime/assets/*', 'fastanime/assets'),
|
||||
]
|
||||
|
||||
# Collect all required hidden imports
|
||||
hiddenimports = [
|
||||
'click',
|
||||
'rich',
|
||||
'requests',
|
||||
'yt_dlp',
|
||||
'python_mpv',
|
||||
'fuzzywuzzy',
|
||||
'fastanime',
|
||||
] + collect_submodules('fastanime')
|
||||
|
||||
a = Analysis(
|
||||
['./fastanime/fastanime.py'], # Changed entry point
|
||||
pathex=[],
|
||||
binaries=[],
|
||||
datas=datas,
|
||||
hiddenimports=hiddenimports,
|
||||
hookspath=[],
|
||||
hooksconfig={},
|
||||
runtime_hooks=[],
|
||||
excludes=[],
|
||||
win_no_prefer_redirects=False,
|
||||
win_private_assemblies=False,
|
||||
cipher=block_cipher,
|
||||
strip=True, # Strip debug information
|
||||
optimize=2 # Optimize bytecode noarchive=False
|
||||
)
|
||||
|
||||
pyz = PYZ(
|
||||
a.pure,
|
||||
a.zipped_data,
|
||||
optimize=2 # Optimize bytecode cipher=block_cipher
|
||||
)
|
||||
|
||||
exe = EXE(
|
||||
pyz,
|
||||
a.scripts,
|
||||
a.binaries,
|
||||
a.zipfiles,
|
||||
a.datas,
|
||||
[],
|
||||
name='fastanime',
|
||||
debug=False,
|
||||
bootloader_ignore_signals=False,
|
||||
strip=True,
|
||||
upx=True,
|
||||
upx_exclude=[],
|
||||
runtime_tmpdir=None,
|
||||
console=True,
|
||||
disable_windowed_traceback=False,
|
||||
target_arch=None,
|
||||
codesign_identity=None,
|
||||
entitlements_file=None,
|
||||
icon='fastanime/assets/logo.ico'
|
||||
)
|
||||
@@ -1,42 +1,36 @@
|
||||
[tool.poetry]
|
||||
[project]
|
||||
name = "fastanime"
|
||||
version = "2.5.6.dev1"
|
||||
version = "2.8.0"
|
||||
description = "A browser anime site experience from the terminal"
|
||||
authors = ["Benextempest <benextempest@gmail.com>"]
|
||||
license = "UNLICENSE"
|
||||
readme = "README.md"
|
||||
requires-python = ">=3.10"
|
||||
dependencies = [
|
||||
"click>=8.1.7",
|
||||
"inquirerpy>=0.3.4",
|
||||
"requests>=2.32.3",
|
||||
"rich>=13.9.2",
|
||||
"thefuzz>=0.22.1",
|
||||
"yt-dlp>=2024.10.7",
|
||||
]
|
||||
|
||||
[tool.poetry.dependencies]
|
||||
python = "^3.10"
|
||||
yt-dlp = "^2024.5.27"
|
||||
thefuzz = "^0.22.1"
|
||||
requests = "^2.32.3"
|
||||
rich = { version = "^13.7.1", optional = false }
|
||||
click = { version = "^8.1.7", optional = false }
|
||||
inquirerpy = { version = "^0.3.4", optional = false }
|
||||
mpv = { version = "^1.0.7", optional = true }
|
||||
plyer = { version = "^2.1.0", optional = true }
|
||||
|
||||
lbry-libtorrent = "^1.2.4"
|
||||
[tool.poetry.extras]
|
||||
full = ["plyer", "mpv"]
|
||||
# cli = ["rich", "click", "inquirerpy"]
|
||||
mpv = ["mpv"]
|
||||
notifications = ["plyer"]
|
||||
|
||||
[tool.poetry.group.dev.dependencies]
|
||||
black = "^24.4.2"
|
||||
isort = "^5.13.2"
|
||||
pytest = "^8.2.2"
|
||||
ruff = "^0.4.10"
|
||||
pre-commit = "^3.7.1"
|
||||
autoflake = "^2.3.1"
|
||||
tox = "^4.16.0"
|
||||
|
||||
pyright = "^1.1.374"
|
||||
[build-system]
|
||||
requires = ["poetry-core"]
|
||||
build-backend = "poetry.core.masonry.api"
|
||||
|
||||
[tool.poetry.scripts]
|
||||
[project.scripts]
|
||||
fastanime = 'fastanime:FastAnime'
|
||||
|
||||
[project.optional-dependencies]
|
||||
standard = ["fastapi[standard]>=0.115.0", "mpv>=1.0.7", "plyer>=2.1.0"]
|
||||
api = ["fastapi[standard]>=0.115.0"]
|
||||
notifications = ["plyer>=2.1.0"]
|
||||
mpv = ["mpv>=1.0.7"]
|
||||
|
||||
[build-system]
|
||||
requires = ["hatchling"]
|
||||
build-backend = "hatchling.build"
|
||||
|
||||
[tool.uv]
|
||||
dev-dependencies = [
|
||||
"pyinstaller>=6.11.1",
|
||||
"pyright>=1.1.384",
|
||||
"pytest>=8.3.3",
|
||||
"ruff>=0.6.9",
|
||||
]
|
||||
|
||||
18
shell.nix
Normal file
18
shell.nix
Normal file
@@ -0,0 +1,18 @@
|
||||
let
|
||||
pkgs = import <nixpkgs> {};
|
||||
in pkgs.mkShell {
|
||||
packages = [
|
||||
(pkgs.python3.withPackages (python-pkgs: [
|
||||
python-pkgs.yt-dlp
|
||||
python-pkgs.dbus-python
|
||||
python-pkgs.requests
|
||||
python-pkgs.rich
|
||||
python-pkgs.click
|
||||
python-pkgs.inquirerpy
|
||||
python-pkgs.mpv
|
||||
python-pkgs.fastapi
|
||||
python-pkgs.thefuzz
|
||||
python-pkgs.plyer
|
||||
]))
|
||||
];
|
||||
}
|
||||
@@ -6,7 +6,7 @@ from fastanime.cli import run_cli
|
||||
|
||||
@pytest.fixture
|
||||
def runner():
|
||||
return CliRunner()
|
||||
return CliRunner(env={"FASTANIME_CACHE_REQUESTS": "false"})
|
||||
|
||||
|
||||
def test_main_help(runner: CliRunner):
|
||||
|
||||
18
tox.ini
18
tox.ini
@@ -5,23 +5,23 @@ env_list = lint, pyright, py{310,311}
|
||||
|
||||
[testenv]
|
||||
description = run unit tests
|
||||
deps =poetry
|
||||
deps =uv
|
||||
commands =
|
||||
poetry install --all-extras
|
||||
poetry run pytest
|
||||
uv sync --dev --all-extras
|
||||
uv run pytest
|
||||
|
||||
[testenv:lint]
|
||||
description = run linters
|
||||
skip_install = true
|
||||
deps =poetry
|
||||
deps =uv
|
||||
commands =
|
||||
poetry install --all-extras
|
||||
poetry run black .
|
||||
uv sync --dev --all-extras
|
||||
uv run ruff format .
|
||||
|
||||
[testenv:pyright]
|
||||
description = run type checking
|
||||
skip_install = true
|
||||
deps =poetry
|
||||
deps =uv
|
||||
commands =
|
||||
poetry install --no-root --all-extras
|
||||
poetry run pyright
|
||||
uv sync --dev --all-extras
|
||||
uv run pyright
|
||||
|
||||
Reference in New Issue
Block a user