mirror of
https://github.com/Benexl/FastAnime.git
synced 2026-01-02 07:50:04 -08:00
Compare commits
43 Commits
v3.3.3
...
contributi
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
beacf4fb2d | ||
|
|
92eb1f12b2 | ||
|
|
efa1340e41 | ||
|
|
ac7e90acdf | ||
|
|
8c5b066019 | ||
|
|
a826f391c1 | ||
|
|
6a31f4191f | ||
|
|
b8f77d80e9 | ||
|
|
6192252d10 | ||
|
|
efed80f4dc | ||
|
|
e49baed46f | ||
|
|
6e26ac500d | ||
|
|
5db33d2fa0 | ||
|
|
0524af6e26 | ||
|
|
a2fc9e442d | ||
|
|
f9ca8bbd79 | ||
|
|
dd9d9695e7 | ||
|
|
c9d948ae4b | ||
|
|
b9766af11a | ||
|
|
9d72a50916 | ||
|
|
acb14d025c | ||
|
|
ba9b170ba8 | ||
|
|
ecc4de6ae6 | ||
|
|
e065c8e8fc | ||
|
|
32df0503d0 | ||
|
|
11449378e9 | ||
|
|
8837c542f2 | ||
|
|
eb8c443775 | ||
|
|
b052ee8300 | ||
|
|
f684f561df | ||
|
|
7ed45ce07e | ||
|
|
10d1211388 | ||
|
|
efa6f4d142 | ||
|
|
0ca63dd765 | ||
|
|
b62d878a0e | ||
|
|
bcc5e7df8e | ||
|
|
df8e925eec | ||
|
|
9d9fa55b69 | ||
|
|
42f7e1d4e2 | ||
|
|
7f4a1f265a | ||
|
|
12ef447eaf | ||
|
|
75b1b8fab4 | ||
|
|
6f4155dd65 |
152
.github/workflows/release-binaries.yml
vendored
Normal file
152
.github/workflows/release-binaries.yml
vendored
Normal file
@@ -0,0 +1,152 @@
|
||||
name: Build Release Binaries
|
||||
|
||||
on:
|
||||
release:
|
||||
types: [published]
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
tag:
|
||||
description: "Tag/version to build (leave empty for latest)"
|
||||
required: false
|
||||
type: string
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
|
||||
jobs:
|
||||
build:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
target: linux
|
||||
asset_name: viu-linux-x86_64
|
||||
executable: viu
|
||||
- os: windows-latest
|
||||
target: windows
|
||||
asset_name: viu-windows-x86_64.exe
|
||||
executable: viu.exe
|
||||
- os: macos-latest
|
||||
target: macos
|
||||
asset_name: viu-macos-x86_64
|
||||
executable: viu
|
||||
|
||||
runs-on: ${{ matrix.os }}
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ github.event.inputs.tag || github.ref }}
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.11"
|
||||
|
||||
- name: Install uv
|
||||
uses: astral-sh/setup-uv@v3
|
||||
with:
|
||||
enable-cache: true
|
||||
|
||||
- name: Install system dependencies (Linux)
|
||||
if: runner.os == 'Linux'
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y libdbus-1-dev libglib2.0-dev
|
||||
|
||||
- name: Install dependencies
|
||||
run: uv sync --all-extras --all-groups
|
||||
|
||||
- name: Build executable with PyInstaller
|
||||
run: uv run pyinstaller bundle/pyinstaller.spec --distpath dist --workpath build/pyinstaller --clean
|
||||
|
||||
- name: Rename executable
|
||||
shell: bash
|
||||
run: mv dist/${{ matrix.executable }} dist/${{ matrix.asset_name }}
|
||||
|
||||
- name: Upload artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ matrix.asset_name }}
|
||||
path: dist/${{ matrix.asset_name }}
|
||||
if-no-files-found: error
|
||||
|
||||
- name: Upload to Release
|
||||
if: github.event_name == 'release'
|
||||
uses: softprops/action-gh-release@v2
|
||||
with:
|
||||
files: dist/${{ matrix.asset_name }}
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
# Build for macOS ARM (Apple Silicon)
|
||||
build-macos-arm:
|
||||
runs-on: macos-14
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ github.event.inputs.tag || github.ref }}
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.11"
|
||||
|
||||
- name: Install uv
|
||||
uses: astral-sh/setup-uv@v3
|
||||
with:
|
||||
enable-cache: true
|
||||
|
||||
- name: Install dependencies
|
||||
run: uv sync --all-extras --all-groups
|
||||
|
||||
- name: Build executable with PyInstaller
|
||||
run: uv run pyinstaller bundle/pyinstaller.spec --distpath dist --workpath build/pyinstaller --clean
|
||||
|
||||
- name: Rename executable
|
||||
run: mv dist/viu dist/viu-macos-arm64
|
||||
|
||||
- name: Upload artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: viu-macos-arm64
|
||||
path: dist/viu-macos-arm64
|
||||
if-no-files-found: error
|
||||
|
||||
- name: Upload to Release
|
||||
if: github.event_name == 'release'
|
||||
uses: softprops/action-gh-release@v2
|
||||
with:
|
||||
files: dist/viu-macos-arm64
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
# Create checksums after all builds complete
|
||||
checksums:
|
||||
needs: [build, build-macos-arm]
|
||||
runs-on: ubuntu-latest
|
||||
if: github.event_name == 'release'
|
||||
|
||||
steps:
|
||||
- name: Download all artifacts
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
path: artifacts
|
||||
merge-multiple: true
|
||||
|
||||
- name: Generate checksums
|
||||
run: |
|
||||
cd artifacts
|
||||
sha256sum * > SHA256SUMS.txt
|
||||
cat SHA256SUMS.txt
|
||||
|
||||
- name: Upload checksums to Release
|
||||
uses: softprops/action-gh-release@v2
|
||||
with:
|
||||
files: artifacts/SHA256SUMS.txt
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
6
.github/workflows/stale.yml
vendored
6
.github/workflows/stale.yml
vendored
@@ -1,9 +1,9 @@
|
||||
name: Mark Stale Issues and Pull Requests
|
||||
|
||||
on:
|
||||
schedule:
|
||||
# Runs every day at 6:30 UTC
|
||||
- cron: "30 6 * * *"
|
||||
# schedule:
|
||||
# Runs every day at 6:30 UTC
|
||||
# - cron: "30 6 * * *"
|
||||
# Allows you to run this workflow manually from the Actions tab for testing
|
||||
workflow_dispatch:
|
||||
|
||||
|
||||
83
README.md
83
README.md
@@ -114,38 +114,78 @@ uv tool install "viu-media[notifications]" # For desktop notifications
|
||||
```
|
||||
#### Termux
|
||||
You may have to have rust installed see this issue: https://github.com/pydantic/pydantic-core/issues/1012#issuecomment-2511269688.
|
||||
|
||||
```bash
|
||||
pkg install python # though uv will probably install python for you, but doesn't hurt to have it :)
|
||||
pkg install rust # maybe required cause of pydantic
|
||||
|
||||
|
||||
# Recommended (with pip due to more control)
|
||||
pkg install python
|
||||
pkg install rust # required cause of pydantic
|
||||
|
||||
# NOTE: order matters
|
||||
|
||||
# get pydantic from the termux user repository
|
||||
pip install pydantic --extra-index-url https://termux-user-repository.github.io/pypi/
|
||||
|
||||
# the above will take a while if you want to see more output and feel like sth is happening lol
|
||||
pip install pydantic --extra-index-url https://termux-user-repository.github.io/pypi/ -v
|
||||
|
||||
# now you can install viu
|
||||
pip install viu-media
|
||||
|
||||
# you may need to install pydantic manually
|
||||
python -m pip install pydantic --extra-index-url https://termux-user-repository.github.io/pypi/ # may also be necessary incase the above fails
|
||||
# === optional deps ===
|
||||
# if you have reach here awesome lol :)
|
||||
|
||||
# add yt-dlp by
|
||||
# yt-dlp for downloading m3u8 and hls streams
|
||||
pip install yt-dlp[default,curl-cffi]
|
||||
|
||||
# prefer without standard and manually install the things you need lxml, yt-dlp and
|
||||
pip install viu-media[standard]
|
||||
# you may also need ffmpeg for processing the videos
|
||||
pkg install ffmpeg
|
||||
|
||||
# you may need to manually install lxml and plyer manually eg
|
||||
python -m pip install lxml --extra-index-url https://termux-user-repository.github.io/pypi/ # may also be necessary incase the above fails
|
||||
# tip if you also want yt functionality
|
||||
pip install yt-dlp-ejs
|
||||
|
||||
# Alternative With Uv may work, no promises
|
||||
pkg install uv
|
||||
# you require js runtime
|
||||
# eg the recommended one
|
||||
pkg install deno
|
||||
|
||||
uv tool install viu-media
|
||||
# for faster fuzzy search
|
||||
pip install thefuzz
|
||||
|
||||
# and to add yt-dlp only you can do
|
||||
uv tool install viu-media --with yt-dlp[default,curl-cffi]
|
||||
# if you want faster scraping, though barely noticeable lol
|
||||
pip install lxml --extra-index-url https://termux-user-repository.github.io/pypi/
|
||||
|
||||
# or though may fail, cause of lxml and plyer, in that case try to install manually
|
||||
uv tool install viu-media[standard]
|
||||
# if compilation fails you need to have
|
||||
pkg install libxml2 libxslt
|
||||
|
||||
# == ui setup ==
|
||||
pkg install fzf
|
||||
|
||||
# then enable fzf in the config
|
||||
viu --selector fzf config --update
|
||||
|
||||
# if you want previews as well specify preview option
|
||||
# though images arent that pretty lol, so you can stick to text over full
|
||||
viu --preview text config --update
|
||||
|
||||
# if you set preview to full you need a terminal image renderer
|
||||
pkg install chafa
|
||||
|
||||
# == player setup ==
|
||||
# for this you need to strictly install from playstore
|
||||
# search for mpv or vlc (recommended, since has nicer ui)
|
||||
# the only limitation is currently its not possible to pass headers to the android players
|
||||
# through android intents
|
||||
# so use servers like sharepoint and wixmp
|
||||
# though this is not an issue when it comes to downloading ;)
|
||||
# if you have installed using 'pkg' uninstall it
|
||||
|
||||
# okey now you are all set, i promise the hussle is worth it lol :)
|
||||
# posted a video of it working to motivate you
|
||||
# note i recorded it from waydroid which is android for linux sought of like an emulator(bluestacks for example)
|
||||
```
|
||||
|
||||
|
||||
https://github.com/user-attachments/assets/0c628421-a439-4dea-91bb-7153e8f20ccf
|
||||
|
||||
```
|
||||
|
||||
#### Using pipx (for isolated environments)
|
||||
```bash
|
||||
@@ -181,7 +221,7 @@ Get up and running in three simple steps:
|
||||
```bash
|
||||
viu anilist auth
|
||||
```
|
||||
This will open your browser. Authorize the app and paste the obtained token back into the terminal.
|
||||
This will open your browser. Authorize the app and paste the obtained token back into the terminal. Alternatively, you can pass the token directly as an argument, or provide a path to a text file containing the token.
|
||||
|
||||
2. **Launch the Interactive TUI:**
|
||||
```bash
|
||||
@@ -362,6 +402,9 @@ You can run the background worker as a systemd service for persistence.
|
||||
systemctl --user daemon-reload
|
||||
systemctl --user enable --now viu-worker.service
|
||||
```
|
||||
|
||||
## Project using it
|
||||
**[Inazuma](https://github.com/viu-media/Inazuma)** - official gui wrapper over viu built in kivymd
|
||||
|
||||
## Contributing
|
||||
|
||||
|
||||
@@ -1,26 +1,46 @@
|
||||
# -*- mode: python ; coding: utf-8 -*-
|
||||
import sys
|
||||
from PyInstaller.utils.hooks import collect_data_files, collect_submodules
|
||||
|
||||
block_cipher = None
|
||||
|
||||
# Platform-specific settings
|
||||
is_windows = sys.platform == 'win32'
|
||||
is_macos = sys.platform == 'darwin'
|
||||
|
||||
# Collect all required data files
|
||||
datas = [
|
||||
('viu/assets/*', 'viu/assets'),
|
||||
('../viu_media/assets', 'viu_media/assets'),
|
||||
]
|
||||
|
||||
# Collect all required hidden imports
|
||||
# Include viu_media and all its submodules to ensure menu modules are bundled
|
||||
hiddenimports = [
|
||||
'click',
|
||||
'rich',
|
||||
'requests',
|
||||
'yt_dlp',
|
||||
'python_mpv',
|
||||
'fuzzywuzzy',
|
||||
'viu',
|
||||
] + collect_submodules('viu')
|
||||
'viu_media',
|
||||
'viu_media.cli.interactive.menu',
|
||||
'viu_media.cli.interactive.menu.media',
|
||||
# Explicit menu modules (PyInstaller doesn't always pick these up)
|
||||
'viu_media.cli.interactive.menu.media.downloads',
|
||||
'viu_media.cli.interactive.menu.media.download_episodes',
|
||||
'viu_media.cli.interactive.menu.media.dynamic_search',
|
||||
'viu_media.cli.interactive.menu.media.episodes',
|
||||
'viu_media.cli.interactive.menu.media.main',
|
||||
'viu_media.cli.interactive.menu.media.media_actions',
|
||||
'viu_media.cli.interactive.menu.media.media_airing_schedule',
|
||||
'viu_media.cli.interactive.menu.media.media_characters',
|
||||
'viu_media.cli.interactive.menu.media.media_review',
|
||||
'viu_media.cli.interactive.menu.media.player_controls',
|
||||
'viu_media.cli.interactive.menu.media.play_downloads',
|
||||
'viu_media.cli.interactive.menu.media.provider_search',
|
||||
'viu_media.cli.interactive.menu.media.results',
|
||||
'viu_media.cli.interactive.menu.media.servers',
|
||||
] + collect_submodules('viu_media')
|
||||
|
||||
a = Analysis(
|
||||
['./viu/viu.py'], # Changed entry point
|
||||
['../viu_media/viu.py'],
|
||||
pathex=[],
|
||||
binaries=[],
|
||||
datas=datas,
|
||||
@@ -32,16 +52,18 @@ a = Analysis(
|
||||
win_no_prefer_redirects=False,
|
||||
win_private_assemblies=False,
|
||||
cipher=block_cipher,
|
||||
strip=True, # Strip debug information
|
||||
optimize=2 # Optimize bytecode noarchive=False
|
||||
noarchive=False,
|
||||
)
|
||||
|
||||
pyz = PYZ(
|
||||
a.pure,
|
||||
a.zipped_data,
|
||||
optimize=2 # Optimize bytecode cipher=block_cipher
|
||||
cipher=block_cipher,
|
||||
)
|
||||
|
||||
# Icon path - only use .ico on Windows
|
||||
icon_path = '../viu_media/assets/icons/logo.ico' if is_windows else None
|
||||
|
||||
exe = EXE(
|
||||
pyz,
|
||||
a.scripts,
|
||||
@@ -52,7 +74,7 @@ exe = EXE(
|
||||
name='viu',
|
||||
debug=False,
|
||||
bootloader_ignore_signals=False,
|
||||
strip=True,
|
||||
strip=not is_windows, # strip doesn't work well on Windows without proper tools
|
||||
upx=True,
|
||||
upx_exclude=[],
|
||||
runtime_tmpdir=None,
|
||||
@@ -61,5 +83,5 @@ exe = EXE(
|
||||
target_arch=None,
|
||||
codesign_identity=None,
|
||||
entitlements_file=None,
|
||||
icon='viu/assets/logo.ico'
|
||||
icon=icon_path,
|
||||
)
|
||||
|
||||
@@ -1,16 +1,16 @@
|
||||
[project]
|
||||
name = "viu-media"
|
||||
version = "3.3.3"
|
||||
version = "3.3.6"
|
||||
description = "A browser anime site experience from the terminal"
|
||||
license = "UNLICENSE"
|
||||
readme = "README.md"
|
||||
requires-python = ">=3.11"
|
||||
dependencies = [
|
||||
"click>=8.1.7",
|
||||
"httpx>=0.28.1",
|
||||
"inquirerpy>=0.3.4",
|
||||
"pydantic>=2.11.7",
|
||||
"rich>=13.9.2",
|
||||
"click>=8.1.7",
|
||||
"httpx>=0.28.1",
|
||||
"inquirerpy>=0.3.4",
|
||||
"pydantic>=2.11.7",
|
||||
"rich>=13.9.2",
|
||||
]
|
||||
|
||||
[project.scripts]
|
||||
@@ -18,32 +18,27 @@ viu = 'viu_media:Cli'
|
||||
|
||||
[project.optional-dependencies]
|
||||
standard = [
|
||||
"thefuzz>=0.22.1",
|
||||
"yt-dlp>=2025.7.21",
|
||||
"pycryptodomex>=3.23.0",
|
||||
"pypiwin32; sys_platform == 'win32'", # For Windows-specific functionality
|
||||
"pyobjc; sys_platform == 'darwin'", # For macOS-specific functionality
|
||||
"dbus-python; sys_platform == 'linux'", # For Linux-specific functionality (e.g., notifications),
|
||||
"plyer>=2.1.0",
|
||||
"lxml>=6.0.0"
|
||||
"thefuzz>=0.22.1",
|
||||
"yt-dlp>=2025.7.21",
|
||||
"pycryptodomex>=3.23.0",
|
||||
"pypiwin32; sys_platform == 'win32'", # For Windows-specific functionality
|
||||
"pyobjc; sys_platform == 'darwin'", # For macOS-specific functionality
|
||||
"dbus-python; sys_platform == 'linux'", # For Linux-specific functionality (e.g., notifications),
|
||||
"plyer>=2.1.0",
|
||||
"lxml>=6.0.0",
|
||||
]
|
||||
notifications = [
|
||||
"dbus-python>=1.4.0",
|
||||
"pypiwin32; sys_platform == 'win32'", # For Windows-specific functionality
|
||||
"pyobjc; sys_platform == 'darwin'", # For macOS-specific functionality
|
||||
"dbus-python>=1.4.0; sys_platform == 'linux'",
|
||||
"plyer>=2.1.0",
|
||||
]
|
||||
mpv = [
|
||||
"mpv>=1.0.7",
|
||||
]
|
||||
mpv = ["mpv>=1.0.7"]
|
||||
torrent = ["libtorrent>=2.0.11"]
|
||||
lxml = ["lxml>=6.0.0"]
|
||||
discord = ["pypresence>=4.3.0"]
|
||||
download = [
|
||||
"pycryptodomex>=3.23.0",
|
||||
"yt-dlp>=2025.7.21",
|
||||
]
|
||||
torrents = [
|
||||
"libtorrent>=2.0.11",
|
||||
]
|
||||
download = ["pycryptodomex>=3.23.0", "yt-dlp>=2025.7.21"]
|
||||
torrents = ["libtorrent>=2.0.11"]
|
||||
|
||||
[build-system]
|
||||
requires = ["hatchling"]
|
||||
@@ -51,12 +46,12 @@ build-backend = "hatchling.build"
|
||||
|
||||
[dependency-groups]
|
||||
dev = [
|
||||
"pre-commit>=4.0.1",
|
||||
"pyinstaller>=6.11.1",
|
||||
"pyright>=1.1.384",
|
||||
"pytest>=8.3.3",
|
||||
"pytest-httpx>=0.35.0",
|
||||
"ruff>=0.6.9",
|
||||
"pre-commit>=4.0.1",
|
||||
"pyinstaller>=6.11.1",
|
||||
"pyright>=1.1.384",
|
||||
"pytest>=8.3.3",
|
||||
"pytest-httpx>=0.35.0",
|
||||
"ruff>=0.6.9",
|
||||
]
|
||||
|
||||
[tool.pytest.ini_options]
|
||||
|
||||
284
tests/cli/commands/anilist/commands/test_auth.py
Normal file
284
tests/cli/commands/anilist/commands/test_auth.py
Normal file
@@ -0,0 +1,284 @@
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
import pytest
|
||||
from click.testing import CliRunner
|
||||
|
||||
from viu_media.cli.commands.anilist.commands.auth import auth
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def runner():
|
||||
return CliRunner()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_config():
|
||||
config = MagicMock()
|
||||
config.user.interactive = True
|
||||
return config
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_auth_service():
|
||||
with patch("viu_media.cli.service.auth.AuthService") as mock:
|
||||
yield mock
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_feedback_service():
|
||||
with patch("viu_media.cli.service.feedback.FeedbackService") as mock:
|
||||
yield mock
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_selector():
|
||||
with patch("viu_media.libs.selectors.selector.create_selector") as mock:
|
||||
yield mock
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_api_client():
|
||||
with patch("viu_media.libs.media_api.api.create_api_client") as mock:
|
||||
yield mock
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_webbrowser():
|
||||
with patch("viu_media.cli.commands.anilist.commands.auth.webbrowser") as mock:
|
||||
yield mock
|
||||
|
||||
|
||||
def test_auth_with_token_argument(
|
||||
runner,
|
||||
mock_config,
|
||||
mock_auth_service,
|
||||
mock_feedback_service,
|
||||
mock_selector,
|
||||
mock_api_client,
|
||||
):
|
||||
"""Test 'viu anilist auth <token>'."""
|
||||
api_client_instance = mock_api_client.return_value
|
||||
profile_mock = MagicMock()
|
||||
profile_mock.name = "testuser"
|
||||
api_client_instance.authenticate.return_value = profile_mock
|
||||
|
||||
auth_service_instance = mock_auth_service.return_value
|
||||
auth_service_instance.get_auth.return_value = None
|
||||
|
||||
result = runner.invoke(auth, ["test_token"], obj=mock_config)
|
||||
|
||||
assert result.exit_code == 0
|
||||
mock_api_client.assert_called_with("anilist", mock_config)
|
||||
api_client_instance.authenticate.assert_called_with("test_token")
|
||||
auth_service_instance.save_user_profile.assert_called_with(
|
||||
profile_mock, "test_token"
|
||||
)
|
||||
feedback_instance = mock_feedback_service.return_value
|
||||
feedback_instance.info.assert_called_with("Successfully logged in as testuser! ✨")
|
||||
|
||||
|
||||
def test_auth_with_token_file(
|
||||
runner,
|
||||
mock_config,
|
||||
mock_auth_service,
|
||||
mock_feedback_service,
|
||||
mock_selector,
|
||||
mock_api_client,
|
||||
tmp_path,
|
||||
):
|
||||
"""Test 'viu anilist auth <path/to/token.txt>'."""
|
||||
token_file = tmp_path / "token.txt"
|
||||
token_file.write_text("file_token")
|
||||
|
||||
api_client_instance = mock_api_client.return_value
|
||||
profile_mock = MagicMock()
|
||||
profile_mock.name = "testuser"
|
||||
api_client_instance.authenticate.return_value = profile_mock
|
||||
|
||||
auth_service_instance = mock_auth_service.return_value
|
||||
auth_service_instance.get_auth.return_value = None
|
||||
|
||||
result = runner.invoke(auth, [str(token_file)], obj=mock_config)
|
||||
|
||||
assert result.exit_code == 0
|
||||
mock_api_client.assert_called_with("anilist", mock_config)
|
||||
api_client_instance.authenticate.assert_called_with("file_token")
|
||||
auth_service_instance.save_user_profile.assert_called_with(
|
||||
profile_mock, "file_token"
|
||||
)
|
||||
feedback_instance = mock_feedback_service.return_value
|
||||
feedback_instance.info.assert_called_with("Successfully logged in as testuser! ✨")
|
||||
|
||||
|
||||
def test_auth_with_empty_token_file(
|
||||
runner,
|
||||
mock_config,
|
||||
mock_auth_service,
|
||||
mock_feedback_service,
|
||||
mock_selector,
|
||||
mock_api_client,
|
||||
tmp_path,
|
||||
):
|
||||
"""Test 'viu anilist auth' with an empty token file."""
|
||||
token_file = tmp_path / "token.txt"
|
||||
token_file.write_text("")
|
||||
|
||||
auth_service_instance = mock_auth_service.return_value
|
||||
auth_service_instance.get_auth.return_value = None
|
||||
|
||||
result = runner.invoke(auth, [str(token_file)], obj=mock_config)
|
||||
|
||||
assert result.exit_code == 0
|
||||
feedback_instance = mock_feedback_service.return_value
|
||||
feedback_instance.error.assert_called_with(f"Token file is empty: {token_file}")
|
||||
|
||||
|
||||
def test_auth_interactive(
|
||||
runner,
|
||||
mock_config,
|
||||
mock_auth_service,
|
||||
mock_feedback_service,
|
||||
mock_selector,
|
||||
mock_api_client,
|
||||
mock_webbrowser,
|
||||
):
|
||||
"""Test 'viu anilist auth' interactive mode."""
|
||||
mock_webbrowser.open.return_value = True
|
||||
|
||||
selector_instance = mock_selector.return_value
|
||||
selector_instance.ask.return_value = "interactive_token"
|
||||
|
||||
api_client_instance = mock_api_client.return_value
|
||||
profile_mock = MagicMock()
|
||||
profile_mock.name = "testuser"
|
||||
api_client_instance.authenticate.return_value = profile_mock
|
||||
|
||||
auth_service_instance = mock_auth_service.return_value
|
||||
auth_service_instance.get_auth.return_value = None
|
||||
|
||||
result = runner.invoke(auth, [], obj=mock_config)
|
||||
|
||||
assert result.exit_code == 0
|
||||
selector_instance.ask.assert_called_with("Enter your AniList Access Token")
|
||||
api_client_instance.authenticate.assert_called_with("interactive_token")
|
||||
auth_service_instance.save_user_profile.assert_called_with(
|
||||
profile_mock, "interactive_token"
|
||||
)
|
||||
feedback_instance = mock_feedback_service.return_value
|
||||
feedback_instance.info.assert_called_with("Successfully logged in as testuser! ✨")
|
||||
|
||||
|
||||
def test_auth_status_logged_in(
|
||||
runner, mock_config, mock_auth_service, mock_feedback_service
|
||||
):
|
||||
"""Test 'viu anilist auth --status' when logged in."""
|
||||
auth_service_instance = mock_auth_service.return_value
|
||||
user_data_mock = MagicMock()
|
||||
user_data_mock.user_profile = "testuser"
|
||||
auth_service_instance.get_auth.return_value = user_data_mock
|
||||
|
||||
result = runner.invoke(auth, ["--status"], obj=mock_config)
|
||||
|
||||
assert result.exit_code == 0
|
||||
feedback_instance = mock_feedback_service.return_value
|
||||
feedback_instance.info.assert_called_with("Logged in as: testuser")
|
||||
|
||||
|
||||
def test_auth_status_logged_out(
|
||||
runner, mock_config, mock_auth_service, mock_feedback_service
|
||||
):
|
||||
"""Test 'viu anilist auth --status' when logged out."""
|
||||
auth_service_instance = mock_auth_service.return_value
|
||||
auth_service_instance.get_auth.return_value = None
|
||||
|
||||
result = runner.invoke(auth, ["--status"], obj=mock_config)
|
||||
|
||||
assert result.exit_code == 0
|
||||
feedback_instance = mock_feedback_service.return_value
|
||||
feedback_instance.error.assert_called_with("Not logged in.")
|
||||
|
||||
|
||||
def test_auth_logout(
|
||||
runner, mock_config, mock_auth_service, mock_feedback_service, mock_selector
|
||||
):
|
||||
"""Test 'viu anilist auth --logout'."""
|
||||
selector_instance = mock_selector.return_value
|
||||
selector_instance.confirm.return_value = True
|
||||
|
||||
result = runner.invoke(auth, ["--logout"], obj=mock_config)
|
||||
|
||||
assert result.exit_code == 0
|
||||
auth_service_instance = mock_auth_service.return_value
|
||||
auth_service_instance.clear_user_profile.assert_called_once()
|
||||
feedback_instance = mock_feedback_service.return_value
|
||||
feedback_instance.info.assert_called_with("You have been logged out.")
|
||||
|
||||
|
||||
def test_auth_logout_cancel(
|
||||
runner, mock_config, mock_auth_service, mock_feedback_service, mock_selector
|
||||
):
|
||||
"""Test 'viu anilist auth --logout' when user cancels."""
|
||||
selector_instance = mock_selector.return_value
|
||||
selector_instance.confirm.return_value = False
|
||||
|
||||
result = runner.invoke(auth, ["--logout"], obj=mock_config)
|
||||
|
||||
assert result.exit_code == 0
|
||||
auth_service_instance = mock_auth_service.return_value
|
||||
auth_service_instance.clear_user_profile.assert_not_called()
|
||||
|
||||
|
||||
def test_auth_already_logged_in_relogin_yes(
|
||||
runner,
|
||||
mock_config,
|
||||
mock_auth_service,
|
||||
mock_feedback_service,
|
||||
mock_selector,
|
||||
mock_api_client,
|
||||
):
|
||||
"""Test 'viu anilist auth' when already logged in and user chooses to relogin."""
|
||||
auth_service_instance = mock_auth_service.return_value
|
||||
auth_profile_mock = MagicMock()
|
||||
auth_profile_mock.user_profile.name = "testuser"
|
||||
auth_service_instance.get_auth.return_value = auth_profile_mock
|
||||
|
||||
selector_instance = mock_selector.return_value
|
||||
selector_instance.confirm.return_value = True
|
||||
selector_instance.ask.return_value = "new_token"
|
||||
|
||||
api_client_instance = mock_api_client.return_value
|
||||
new_profile_mock = MagicMock()
|
||||
new_profile_mock.name = "newuser"
|
||||
api_client_instance.authenticate.return_value = new_profile_mock
|
||||
|
||||
result = runner.invoke(auth, [], obj=mock_config)
|
||||
|
||||
assert result.exit_code == 0
|
||||
selector_instance.confirm.assert_called_with(
|
||||
"You are already logged in as testuser. Would you like to relogin"
|
||||
)
|
||||
auth_service_instance.save_user_profile.assert_called_with(
|
||||
new_profile_mock, "new_token"
|
||||
)
|
||||
feedback_instance = mock_feedback_service.return_value
|
||||
feedback_instance.info.assert_called_with("Successfully logged in as newuser! ✨")
|
||||
|
||||
|
||||
def test_auth_already_logged_in_relogin_no(
|
||||
runner, mock_config, mock_auth_service, mock_feedback_service, mock_selector
|
||||
):
|
||||
"""Test 'viu anilist auth' when already logged in and user chooses not to relogin."""
|
||||
auth_service_instance = mock_auth_service.return_value
|
||||
auth_profile_mock = MagicMock()
|
||||
auth_profile_mock.user_profile.name = "testuser"
|
||||
auth_service_instance.get_auth.return_value = auth_profile_mock
|
||||
|
||||
selector_instance = mock_selector.return_value
|
||||
selector_instance.confirm.return_value = False
|
||||
|
||||
result = runner.invoke(auth, [], obj=mock_config)
|
||||
|
||||
assert result.exit_code == 0
|
||||
auth_service_instance.save_user_profile.assert_not_called()
|
||||
feedback_instance = mock_feedback_service.return_value
|
||||
feedback_instance.info.assert_not_called()
|
||||
0
tests/libs/__init__.py
Normal file
0
tests/libs/__init__.py
Normal file
0
tests/libs/media_api/__init__.py
Normal file
0
tests/libs/media_api/__init__.py
Normal file
0
tests/libs/media_api/anilist/__init__.py
Normal file
0
tests/libs/media_api/anilist/__init__.py
Normal file
54
tests/libs/media_api/anilist/test_mapper.py
Normal file
54
tests/libs/media_api/anilist/test_mapper.py
Normal file
@@ -0,0 +1,54 @@
|
||||
from typing import Any
|
||||
|
||||
from viu_media.libs.media_api.anilist.mapper import to_generic_user_profile
|
||||
from viu_media.libs.media_api.anilist.types import AnilistViewerData
|
||||
from viu_media.libs.media_api.types import UserProfile
|
||||
|
||||
|
||||
def test_to_generic_user_profile_success():
|
||||
data: AnilistViewerData = {
|
||||
"data": {
|
||||
"Viewer": {
|
||||
"id": 123,
|
||||
"name": "testuser",
|
||||
"avatar": {
|
||||
"large": "https://example.com/avatar.png",
|
||||
"medium": "https://example.com/avatar_medium.png",
|
||||
"extraLarge": "https://example.com/avatar_extraLarge.png",
|
||||
"small": "https://example.com/avatar_small.png",
|
||||
},
|
||||
"bannerImage": "https://example.com/banner.png",
|
||||
"token": "test_token",
|
||||
}
|
||||
}
|
||||
}
|
||||
profile = to_generic_user_profile(data)
|
||||
assert isinstance(profile, UserProfile)
|
||||
assert profile.id == 123
|
||||
assert profile.name == "testuser"
|
||||
assert profile.avatar_url == "https://example.com/avatar.png"
|
||||
assert profile.banner_url == "https://example.com/banner.png"
|
||||
|
||||
|
||||
def test_to_generic_user_profile_data_none():
|
||||
data: Any = {"data": None}
|
||||
profile = to_generic_user_profile(data)
|
||||
assert profile is None
|
||||
|
||||
|
||||
def test_to_generic_user_profile_no_data_key():
|
||||
data: Any = {"errors": [{"message": "Invalid token"}]}
|
||||
profile = to_generic_user_profile(data)
|
||||
assert profile is None
|
||||
|
||||
|
||||
def test_to_generic_user_profile_no_viewer_key():
|
||||
data: Any = {"data": {"Page": {}}}
|
||||
profile = to_generic_user_profile(data)
|
||||
assert profile is None
|
||||
|
||||
|
||||
def test_to_generic_user_profile_viewer_none():
|
||||
data: Any = {"data": {"Viewer": None}}
|
||||
profile = to_generic_user_profile(data)
|
||||
assert profile is None
|
||||
98
uv.lock
generated
98
uv.lock
generated
@@ -89,11 +89,11 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "filelock"
|
||||
version = "3.20.0"
|
||||
version = "3.20.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/58/46/0028a82567109b5ef6e4d2a1f04a583fb513e6cf9527fcdd09afd817deeb/filelock-3.20.0.tar.gz", hash = "sha256:711e943b4ec6be42e1d4e6690b48dc175c822967466bb31c0c293f34334c13f4", size = 18922, upload-time = "2025-10-08T18:03:50.056Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/a7/23/ce7a1126827cedeb958fc043d61745754464eb56c5937c35bbf2b8e26f34/filelock-3.20.1.tar.gz", hash = "sha256:b8360948b351b80f420878d8516519a2204b07aefcdcfd24912a5d33127f188c", size = 19476, upload-time = "2025-12-15T23:54:28.027Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/76/91/7216b27286936c16f5b4d0c530087e4a54eead683e6b0b73dd0c64844af6/filelock-3.20.0-py3-none-any.whl", hash = "sha256:339b4732ffda5cd79b13f4e2711a31b0365ce445d95d243bb996273d072546a2", size = 16054, upload-time = "2025-10-08T18:03:48.35Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e3/7f/a1a97644e39e7316d850784c642093c99df1290a460df4ede27659056834/filelock-3.20.1-py3-none-any.whl", hash = "sha256:15d9e9a67306188a44baa72f569d2bfd803076269365fdea0934385da4dc361a", size = 16666, upload-time = "2025-12-15T23:54:26.874Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -353,11 +353,11 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "nodeenv"
|
||||
version = "1.9.1"
|
||||
version = "1.10.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/43/16/fc88b08840de0e0a72a2f9d8c6bae36be573e475a6326ae854bcc549fc45/nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f", size = 47437, upload-time = "2024-06-04T18:44:11.171Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/24/bf/d1bda4f6168e0b2e9e5958945e01910052158313224ada5ce1fb2e1113b8/nodeenv-1.10.0.tar.gz", hash = "sha256:996c191ad80897d076bdfba80a41994c2b47c68e224c542b48feba42ba00f8bb", size = 55611, upload-time = "2025-12-20T14:08:54.006Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/d2/1d/1b658dbd2b9fa9c4c9f32accbfc0205d532c8c6194dc0f2a4c0428e7128a/nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9", size = 22314, upload-time = "2024-06-04T18:44:08.352Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/88/b2/d0896bdcdc8d28a7fc5717c305f1a861c26e18c05047949fb371034d98bd/nodeenv-1.10.0-py2.py3-none-any.whl", hash = "sha256:5bb13e3eed2923615535339b3c620e76779af4cb4c6a90deccc9e36b274d3827", size = 23438, upload-time = "2025-12-20T14:08:52.782Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -389,11 +389,11 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "platformdirs"
|
||||
version = "4.5.0"
|
||||
version = "4.5.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/61/33/9611380c2bdb1225fdef633e2a9610622310fed35ab11dac9620972ee088/platformdirs-4.5.0.tar.gz", hash = "sha256:70ddccdd7c99fc5942e9fc25636a8b34d04c24b335100223152c2803e4063312", size = 21632, upload-time = "2025-10-08T17:44:48.791Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/cf/86/0248f086a84f01b37aaec0fa567b397df1a119f73c16f6c7a9aac73ea309/platformdirs-4.5.1.tar.gz", hash = "sha256:61d5cdcc6065745cdd94f0f878977f8de9437be93de97c1c12f853c9c0cdcbda", size = 21715, upload-time = "2025-12-05T13:52:58.638Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/73/cb/ac7874b3e5d58441674fb70742e6c374b28b0c7cb988d37d991cde47166c/platformdirs-4.5.0-py3-none-any.whl", hash = "sha256:e578a81bb873cbb89a41fcc904c7ef523cc18284b7e3b3ccf06aca1403b7ebd3", size = 18651, upload-time = "2025-10-08T17:44:47.223Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/cb/28/3bfe2fa5a7b9c46fe7e13c97bda14c895fb10fa2ebf1d0abb90e0cea7ee1/platformdirs-4.5.1-py3-none-any.whl", hash = "sha256:d03afa3963c806a9bed9d5125c8f4cb2fdaf74a55ab60e5d59b3fde758104d31", size = 18731, upload-time = "2025-12-05T13:52:56.823Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -416,7 +416,7 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "pre-commit"
|
||||
version = "4.5.0"
|
||||
version = "4.5.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "cfgv" },
|
||||
@@ -425,9 +425,9 @@ dependencies = [
|
||||
{ name = "pyyaml" },
|
||||
{ name = "virtualenv" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/f4/9b/6a4ffb4ed980519da959e1cf3122fc6cb41211daa58dbae1c73c0e519a37/pre_commit-4.5.0.tar.gz", hash = "sha256:dc5a065e932b19fc1d4c653c6939068fe54325af8e741e74e88db4d28a4dd66b", size = 198428, upload-time = "2025-11-22T21:02:42.304Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/40/f1/6d86a29246dfd2e9b6237f0b5823717f60cad94d47ddc26afa916d21f525/pre_commit-4.5.1.tar.gz", hash = "sha256:eb545fcff725875197837263e977ea257a402056661f09dae08e4b149b030a61", size = 198232, upload-time = "2025-12-16T21:14:33.552Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/5d/c4/b2d28e9d2edf4f1713eb3c29307f1a63f3d67cf09bdda29715a36a68921a/pre_commit-4.5.0-py2.py3-none-any.whl", hash = "sha256:25e2ce09595174d9c97860a95609f9f852c0614ba602de3561e267547f2335e1", size = 226429, upload-time = "2025-11-22T21:02:40.836Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5d/19/fd3ef348460c80af7bb4669ea7926651d1f95c23ff2df18b9d24bab4f3fa/pre_commit-4.5.1-py2.py3-none-any.whl", hash = "sha256:3b3afd891e97337708c1674210f8eba659b52a38ea5f822ff142d10786221f77", size = 226437, upload-time = "2025-12-16T21:14:32.409Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -623,15 +623,15 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "pyinstaller-hooks-contrib"
|
||||
version = "2025.10"
|
||||
version = "2025.11"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "packaging" },
|
||||
{ name = "setuptools" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/26/4f/e33132acdb8f732978e577b8a0130a412cbfe7a3414605e3fd380a975522/pyinstaller_hooks_contrib-2025.10.tar.gz", hash = "sha256:a1a737e5c0dccf1cf6f19a25e2efd109b9fec9ddd625f97f553dac16ee884881", size = 168155, upload-time = "2025-11-22T09:34:36.138Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/45/2f/2c68b6722d233dae3e5243751aafc932940b836919cfaca22dd0c60d417c/pyinstaller_hooks_contrib-2025.11.tar.gz", hash = "sha256:dfe18632e06655fa88d218e0d768fd753e1886465c12a6d4bce04f1aaeec917d", size = 169183, upload-time = "2025-12-23T12:59:37.361Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/86/de/a7688eed49a1d3df337cdaa4c0d64e231309a52f269850a72051975e3c4a/pyinstaller_hooks_contrib-2025.10-py3-none-any.whl", hash = "sha256:aa7a378518772846221f63a84d6306d9827299323243db890851474dfd1231a9", size = 447760, upload-time = "2025-11-22T09:34:34.753Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a7/c4/3a096c6e701832443b957b9dac18a163103360d0c7f5842ca41695371148/pyinstaller_hooks_contrib-2025.11-py3-none-any.whl", hash = "sha256:777e163e2942474aa41a8e6d31ac1635292d63422c3646c176d584d04d971c34", size = 449478, upload-time = "2025-12-23T12:59:35.987Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -3457,7 +3457,7 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "pytest"
|
||||
version = "8.4.2"
|
||||
version = "9.0.2"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "colorama", marker = "sys_platform == 'win32'" },
|
||||
@@ -3466,22 +3466,22 @@ dependencies = [
|
||||
{ name = "pluggy" },
|
||||
{ name = "pygments" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/a3/5c/00a0e072241553e1a7496d638deababa67c5058571567b92a7eaa258397c/pytest-8.4.2.tar.gz", hash = "sha256:86c0d0b93306b961d58d62a4db4879f27fe25513d4b969df351abdddb3c30e01", size = 1519618, upload-time = "2025-09-04T14:34:22.711Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/d1/db/7ef3487e0fb0049ddb5ce41d3a49c235bf9ad299b6a25d5780a89f19230f/pytest-9.0.2.tar.gz", hash = "sha256:75186651a92bd89611d1d9fc20f0b4345fd827c41ccd5c299a868a05d70edf11", size = 1568901, upload-time = "2025-12-06T21:30:51.014Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/a8/a4/20da314d277121d6534b3a980b29035dcd51e6744bd79075a6ce8fa4eb8d/pytest-8.4.2-py3-none-any.whl", hash = "sha256:872f880de3fc3a5bdc88a11b39c9710c3497a547cfa9320bc3c5e62fbf272e79", size = 365750, upload-time = "2025-09-04T14:34:20.226Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3b/ab/b3226f0bd7cdcf710fbede2b3548584366da3b19b5021e74f5bde2a8fa3f/pytest-9.0.2-py3-none-any.whl", hash = "sha256:711ffd45bf766d5264d487b917733b453d917afd2b0ad65223959f59089f875b", size = 374801, upload-time = "2025-12-06T21:30:49.154Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pytest-httpx"
|
||||
version = "0.35.0"
|
||||
version = "0.36.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "httpx" },
|
||||
{ name = "pytest" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/1f/89/5b12b7b29e3d0af3a4b9c071ee92fa25a9017453731a38f08ba01c280f4c/pytest_httpx-0.35.0.tar.gz", hash = "sha256:d619ad5d2e67734abfbb224c3d9025d64795d4b8711116b1a13f72a251ae511f", size = 54146, upload-time = "2024-11-28T19:16:54.237Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/ca/bc/5574834da9499066fa1a5ea9c336f94dba2eae02298d36dab192fcf95c86/pytest_httpx-0.36.0.tar.gz", hash = "sha256:9edb66a5fd4388ce3c343189bc67e7e1cb50b07c2e3fc83b97d511975e8a831b", size = 56793, upload-time = "2025-12-02T16:34:57.414Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/b0/ed/026d467c1853dd83102411a78126b4842618e86c895f93528b0528c7a620/pytest_httpx-0.35.0-py3-none-any.whl", hash = "sha256:ee11a00ffcea94a5cbff47af2114d34c5b231c326902458deed73f9c459fd744", size = 19442, upload-time = "2024-11-28T19:16:52.787Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e2/d2/1eb1ea9c84f0d2033eb0b49675afdc71aa4ea801b74615f00f3c33b725e3/pytest_httpx-0.36.0-py3-none-any.whl", hash = "sha256:bd4c120bb80e142df856e825ec9f17981effb84d159f9fa29ed97e2357c3a9c8", size = 20229, upload-time = "2025-12-02T16:34:56.45Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -3661,28 +3661,28 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "ruff"
|
||||
version = "0.14.7"
|
||||
version = "0.14.10"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/b7/5b/dd7406afa6c95e3d8fa9d652b6d6dd17dd4a6bf63cb477014e8ccd3dcd46/ruff-0.14.7.tar.gz", hash = "sha256:3417deb75d23bd14a722b57b0a1435561db65f0ad97435b4cf9f85ffcef34ae5", size = 5727324, upload-time = "2025-11-28T20:55:10.525Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/57/08/52232a877978dd8f9cf2aeddce3e611b40a63287dfca29b6b8da791f5e8d/ruff-0.14.10.tar.gz", hash = "sha256:9a2e830f075d1a42cd28420d7809ace390832a490ed0966fe373ba288e77aaf4", size = 5859763, upload-time = "2025-12-18T19:28:57.98Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/8c/b1/7ea5647aaf90106f6d102230e5df874613da43d1089864da1553b899ba5e/ruff-0.14.7-py3-none-linux_armv6l.whl", hash = "sha256:b9d5cb5a176c7236892ad7224bc1e63902e4842c460a0b5210701b13e3de4fca", size = 13414475, upload-time = "2025-11-28T20:54:54.569Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/af/19/fddb4cd532299db9cdaf0efdc20f5c573ce9952a11cb532d3b859d6d9871/ruff-0.14.7-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:3f64fe375aefaf36ca7d7250292141e39b4cea8250427482ae779a2aa5d90015", size = 13634613, upload-time = "2025-11-28T20:55:17.54Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/40/2b/469a66e821d4f3de0440676ed3e04b8e2a1dc7575cf6fa3ba6d55e3c8557/ruff-0.14.7-py3-none-macosx_11_0_arm64.whl", hash = "sha256:93e83bd3a9e1a3bda64cb771c0d47cda0e0d148165013ae2d3554d718632d554", size = 12765458, upload-time = "2025-11-28T20:55:26.128Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f1/05/0b001f734fe550bcfde4ce845948ac620ff908ab7241a39a1b39bb3c5f49/ruff-0.14.7-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3838948e3facc59a6070795de2ae16e5786861850f78d5914a03f12659e88f94", size = 13236412, upload-time = "2025-11-28T20:55:28.602Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/11/36/8ed15d243f011b4e5da75cd56d6131c6766f55334d14ba31cce5461f28aa/ruff-0.14.7-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:24c8487194d38b6d71cd0fd17a5b6715cda29f59baca1defe1e3a03240f851d1", size = 13182949, upload-time = "2025-11-28T20:55:33.265Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3b/cf/fcb0b5a195455729834f2a6eadfe2e4519d8ca08c74f6d2b564a4f18f553/ruff-0.14.7-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:79c73db6833f058a4be8ffe4a0913b6d4ad41f6324745179bd2aa09275b01d0b", size = 13816470, upload-time = "2025-11-28T20:55:08.203Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7f/5d/34a4748577ff7a5ed2f2471456740f02e86d1568a18c9faccfc73bd9ca3f/ruff-0.14.7-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:12eb7014fccff10fc62d15c79d8a6be4d0c2d60fe3f8e4d169a0d2def75f5dad", size = 15289621, upload-time = "2025-11-28T20:55:30.837Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/53/53/0a9385f047a858ba133d96f3f8e3c9c66a31cc7c4b445368ef88ebeac209/ruff-0.14.7-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6c623bbdc902de7ff715a93fa3bb377a4e42dd696937bf95669118773dbf0c50", size = 14975817, upload-time = "2025-11-28T20:55:24.107Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a8/d7/2f1c32af54c3b46e7fadbf8006d8b9bcfbea535c316b0bd8813d6fb25e5d/ruff-0.14.7-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f53accc02ed2d200fa621593cdb3c1ae06aa9b2c3cae70bc96f72f0000ae97a9", size = 14284549, upload-time = "2025-11-28T20:55:06.08Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/92/05/434ddd86becd64629c25fb6b4ce7637dd52a45cc4a4415a3008fe61c27b9/ruff-0.14.7-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:281f0e61a23fcdcffca210591f0f53aafaa15f9025b5b3f9706879aaa8683bc4", size = 14071389, upload-time = "2025-11-28T20:55:35.617Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ff/50/fdf89d4d80f7f9d4f420d26089a79b3bb1538fe44586b148451bc2ba8d9c/ruff-0.14.7-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:dbbaa5e14148965b91cb090236931182ee522a5fac9bc5575bafc5c07b9f9682", size = 14202679, upload-time = "2025-11-28T20:55:01.472Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/77/54/87b34988984555425ce967f08a36df0ebd339bb5d9d0e92a47e41151eafc/ruff-0.14.7-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:1464b6e54880c0fe2f2d6eaefb6db15373331414eddf89d6b903767ae2458143", size = 13147677, upload-time = "2025-11-28T20:55:19.933Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/67/29/f55e4d44edfe053918a16a3299e758e1c18eef216b7a7092550d7a9ec51c/ruff-0.14.7-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:f217ed871e4621ea6128460df57b19ce0580606c23aeab50f5de425d05226784", size = 13151392, upload-time = "2025-11-28T20:55:21.967Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/36/69/47aae6dbd4f1d9b4f7085f4d9dcc84e04561ee7ad067bf52e0f9b02e3209/ruff-0.14.7-py3-none-musllinux_1_2_i686.whl", hash = "sha256:6be02e849440ed3602d2eb478ff7ff07d53e3758f7948a2a598829660988619e", size = 13412230, upload-time = "2025-11-28T20:55:12.749Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b7/4b/6e96cb6ba297f2ba502a231cd732ed7c3de98b1a896671b932a5eefa3804/ruff-0.14.7-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:19a0f116ee5e2b468dfe80c41c84e2bbd6b74f7b719bee86c2ecde0a34563bcc", size = 14195397, upload-time = "2025-11-28T20:54:56.896Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/69/82/251d5f1aa4dcad30aed491b4657cecd9fb4274214da6960ffec144c260f7/ruff-0.14.7-py3-none-win32.whl", hash = "sha256:e33052c9199b347c8937937163b9b149ef6ab2e4bb37b042e593da2e6f6cccfa", size = 13126751, upload-time = "2025-11-28T20:55:03.47Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a8/b5/d0b7d145963136b564806f6584647af45ab98946660d399ec4da79cae036/ruff-0.14.7-py3-none-win_amd64.whl", hash = "sha256:e17a20ad0d3fad47a326d773a042b924d3ac31c6ca6deb6c72e9e6b5f661a7c6", size = 14531726, upload-time = "2025-11-28T20:54:59.121Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1d/d2/1637f4360ada6a368d3265bf39f2cf737a0aaab15ab520fc005903e883f8/ruff-0.14.7-py3-none-win_arm64.whl", hash = "sha256:be4d653d3bea1b19742fcc6502354e32f65cd61ff2fbdb365803ef2c2aec6228", size = 13609215, upload-time = "2025-11-28T20:55:15.375Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/60/01/933704d69f3f05ee16ef11406b78881733c186fe14b6a46b05cfcaf6d3b2/ruff-0.14.10-py3-none-linux_armv6l.whl", hash = "sha256:7a3ce585f2ade3e1f29ec1b92df13e3da262178df8c8bdf876f48fa0e8316c49", size = 13527080, upload-time = "2025-12-18T19:29:25.642Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/df/58/a0349197a7dfa603ffb7f5b0470391efa79ddc327c1e29c4851e85b09cc5/ruff-0.14.10-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:674f9be9372907f7257c51f1d4fc902cb7cf014b9980152b802794317941f08f", size = 13797320, upload-time = "2025-12-18T19:29:02.571Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7b/82/36be59f00a6082e38c23536df4e71cdbc6af8d7c707eade97fcad5c98235/ruff-0.14.10-py3-none-macosx_11_0_arm64.whl", hash = "sha256:d85713d522348837ef9df8efca33ccb8bd6fcfc86a2cde3ccb4bc9d28a18003d", size = 12918434, upload-time = "2025-12-18T19:28:51.202Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a6/00/45c62a7f7e34da92a25804f813ebe05c88aa9e0c25e5cb5a7d23dd7450e3/ruff-0.14.10-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6987ebe0501ae4f4308d7d24e2d0fe3d7a98430f5adfd0f1fead050a740a3a77", size = 13371961, upload-time = "2025-12-18T19:29:04.991Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/40/31/a5906d60f0405f7e57045a70f2d57084a93ca7425f22e1d66904769d1628/ruff-0.14.10-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:16a01dfb7b9e4eee556fbfd5392806b1b8550c9b4a9f6acd3dbe6812b193c70a", size = 13275629, upload-time = "2025-12-18T19:29:21.381Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3e/60/61c0087df21894cf9d928dc04bcd4fb10e8b2e8dca7b1a276ba2155b2002/ruff-0.14.10-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7165d31a925b7a294465fa81be8c12a0e9b60fb02bf177e79067c867e71f8b1f", size = 14029234, upload-time = "2025-12-18T19:29:00.132Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/44/84/77d911bee3b92348b6e5dab5a0c898d87084ea03ac5dc708f46d88407def/ruff-0.14.10-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:c561695675b972effb0c0a45db233f2c816ff3da8dcfbe7dfc7eed625f218935", size = 15449890, upload-time = "2025-12-18T19:28:53.573Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e9/36/480206eaefa24a7ec321582dda580443a8f0671fdbf6b1c80e9c3e93a16a/ruff-0.14.10-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4bb98fcbbc61725968893682fd4df8966a34611239c9fd07a1f6a07e7103d08e", size = 15123172, upload-time = "2025-12-18T19:29:23.453Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5c/38/68e414156015ba80cef5473d57919d27dfb62ec804b96180bafdeaf0e090/ruff-0.14.10-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f24b47993a9d8cb858429e97bdf8544c78029f09b520af615c1d261bf827001d", size = 14460260, upload-time = "2025-12-18T19:29:27.808Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b3/19/9e050c0dca8aba824d67cc0db69fb459c28d8cd3f6855b1405b3f29cc91d/ruff-0.14.10-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:59aabd2e2c4fd614d2862e7939c34a532c04f1084476d6833dddef4afab87e9f", size = 14229978, upload-time = "2025-12-18T19:29:11.32Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/51/eb/e8dd1dd6e05b9e695aa9dd420f4577debdd0f87a5ff2fedda33c09e9be8c/ruff-0.14.10-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:213db2b2e44be8625002dbea33bb9c60c66ea2c07c084a00d55732689d697a7f", size = 14338036, upload-time = "2025-12-18T19:29:09.184Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6a/12/f3e3a505db7c19303b70af370d137795fcfec136d670d5de5391e295c134/ruff-0.14.10-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:b914c40ab64865a17a9a5b67911d14df72346a634527240039eb3bd650e5979d", size = 13264051, upload-time = "2025-12-18T19:29:13.431Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/08/64/8c3a47eaccfef8ac20e0484e68e0772013eb85802f8a9f7603ca751eb166/ruff-0.14.10-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:1484983559f026788e3a5c07c81ef7d1e97c1c78ed03041a18f75df104c45405", size = 13283998, upload-time = "2025-12-18T19:29:06.994Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/12/84/534a5506f4074e5cc0529e5cd96cfc01bb480e460c7edf5af70d2bcae55e/ruff-0.14.10-py3-none-musllinux_1_2_i686.whl", hash = "sha256:c70427132db492d25f982fffc8d6c7535cc2fd2c83fc8888f05caaa248521e60", size = 13601891, upload-time = "2025-12-18T19:28:55.811Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0d/1e/14c916087d8598917dbad9b2921d340f7884824ad6e9c55de948a93b106d/ruff-0.14.10-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:5bcf45b681e9f1ee6445d317ce1fa9d6cba9a6049542d1c3d5b5958986be8830", size = 14336660, upload-time = "2025-12-18T19:29:16.531Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f2/1c/d7b67ab43f30013b47c12b42d1acd354c195351a3f7a1d67f59e54227ede/ruff-0.14.10-py3-none-win32.whl", hash = "sha256:104c49fc7ab73f3f3a758039adea978869a918f31b73280db175b43a2d9b51d6", size = 13196187, upload-time = "2025-12-18T19:29:19.006Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/fb/9c/896c862e13886fae2af961bef3e6312db9ebc6adc2b156fe95e615dee8c1/ruff-0.14.10-py3-none-win_amd64.whl", hash = "sha256:466297bd73638c6bdf06485683e812db1c00c7ac96d4ddd0294a338c62fdc154", size = 14661283, upload-time = "2025-12-18T19:29:30.16Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/74/31/b0e29d572670dca3674eeee78e418f20bdf97fa8aa9ea71380885e175ca0/ruff-0.14.10-py3-none-win_arm64.whl", hash = "sha256:e51d046cf6dda98a4633b8a8a771451107413b0f07183b2bef03f075599e44e6", size = 13729839, upload-time = "2025-12-18T19:28:48.636Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -3743,7 +3743,7 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "viu-media"
|
||||
version = "3.3.3"
|
||||
version = "3.3.6"
|
||||
source = { editable = "." }
|
||||
dependencies = [
|
||||
{ name = "click" },
|
||||
@@ -3768,8 +3768,10 @@ mpv = [
|
||||
{ name = "mpv" },
|
||||
]
|
||||
notifications = [
|
||||
{ name = "dbus-python" },
|
||||
{ name = "dbus-python", marker = "sys_platform == 'linux'" },
|
||||
{ name = "plyer" },
|
||||
{ name = "pyobjc", marker = "sys_platform == 'darwin'" },
|
||||
{ name = "pypiwin32", marker = "sys_platform == 'win32'" },
|
||||
]
|
||||
standard = [
|
||||
{ name = "dbus-python", marker = "sys_platform == 'linux'" },
|
||||
@@ -3801,8 +3803,8 @@ dev = [
|
||||
[package.metadata]
|
||||
requires-dist = [
|
||||
{ name = "click", specifier = ">=8.1.7" },
|
||||
{ name = "dbus-python", marker = "sys_platform == 'linux' and extra == 'notifications'", specifier = ">=1.4.0" },
|
||||
{ name = "dbus-python", marker = "sys_platform == 'linux' and extra == 'standard'" },
|
||||
{ name = "dbus-python", marker = "extra == 'notifications'", specifier = ">=1.4.0" },
|
||||
{ name = "httpx", specifier = ">=0.28.1" },
|
||||
{ name = "inquirerpy", specifier = ">=0.3.4" },
|
||||
{ name = "libtorrent", marker = "extra == 'torrent'", specifier = ">=2.0.11" },
|
||||
@@ -3815,7 +3817,9 @@ requires-dist = [
|
||||
{ name = "pycryptodomex", marker = "extra == 'download'", specifier = ">=3.23.0" },
|
||||
{ name = "pycryptodomex", marker = "extra == 'standard'", specifier = ">=3.23.0" },
|
||||
{ name = "pydantic", specifier = ">=2.11.7" },
|
||||
{ name = "pyobjc", marker = "sys_platform == 'darwin' and extra == 'notifications'" },
|
||||
{ name = "pyobjc", marker = "sys_platform == 'darwin' and extra == 'standard'" },
|
||||
{ name = "pypiwin32", marker = "sys_platform == 'win32' and extra == 'notifications'" },
|
||||
{ name = "pypiwin32", marker = "sys_platform == 'win32' and extra == 'standard'" },
|
||||
{ name = "pypresence", marker = "extra == 'discord'", specifier = ">=4.3.0" },
|
||||
{ name = "rich", specifier = ">=13.9.2" },
|
||||
@@ -3846,9 +3850,9 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "yt-dlp"
|
||||
version = "2025.11.12"
|
||||
version = "2025.12.8"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/cf/41/53ad8c6e74d6627bd598dfbb8ad7c19d5405e438210ad0bbaf1b288387e7/yt_dlp-2025.11.12.tar.gz", hash = "sha256:5f0795a6b8fc57a5c23332d67d6c6acf819a0b46b91a6324bae29414fa97f052", size = 3076928, upload-time = "2025-11-12T01:00:38.43Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/14/77/db924ebbd99d0b2b571c184cb08ed232cf4906c6f9b76eed763cd2c84170/yt_dlp-2025.12.8.tar.gz", hash = "sha256:b773c81bb6b71cb2c111cfb859f453c7a71cf2ef44eff234ff155877184c3e4f", size = 3088947, upload-time = "2025-12-08T00:16:01.649Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/5f/16/fdebbee6473473a1c0576bd165a50e4a70762484d638c1d59fa9074e175b/yt_dlp-2025.11.12-py3-none-any.whl", hash = "sha256:b47af37bbb16b08efebb36825a280ea25a507c051f93bf413a6e4a0e586c6e79", size = 3279151, upload-time = "2025-11-12T01:00:35.813Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6e/2f/98c3596ad923f8efd32c90dca62e241e8ad9efcebf20831173c357042ba0/yt_dlp-2025.12.8-py3-none-any.whl", hash = "sha256:36e2584342e409cfbfa0b5e61448a1c5189e345cf4564294456ee509e7d3e065", size = 3291464, upload-time = "2025-12-08T00:15:58.556Z" },
|
||||
]
|
||||
|
||||
@@ -5,7 +5,8 @@
|
||||
"Dungeon ni Deai o Motomeru no wa Machigatte Iru Darouka": "Dungeon ni Deai wo Motomeru no wa Machigatteiru Darou ka",
|
||||
"Hazurewaku no \"Joutai Ijou Skill\" de Saikyou ni Natta Ore ga Subete wo Juurin suru made": "Hazure Waku no [Joutai Ijou Skill] de Saikyou ni Natta Ore ga Subete wo Juurin Suru made",
|
||||
"Re:Zero kara Hajimeru Isekai Seikatsu Season 3": "Re:Zero kara Hajimeru Isekai Seikatsu 3rd Season",
|
||||
"Hanka×Hanka (2011)": "Hunter × Hunter (2011)"
|
||||
"Hanka×Hanka (2011)": "Hunter × Hunter (2011)",
|
||||
"Burichi -": "bleach"
|
||||
},
|
||||
"hianime": {
|
||||
"My Star": "Oshi no Ko"
|
||||
|
||||
323
viu_media/assets/scripts/fzf/_filter_parser.py
Normal file
323
viu_media/assets/scripts/fzf/_filter_parser.py
Normal file
@@ -0,0 +1,323 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Filter Parser for Dynamic Search
|
||||
|
||||
This module provides a parser for the special filter syntax used in dynamic search.
|
||||
Filter syntax allows users to add filters inline with their search query.
|
||||
|
||||
SYNTAX:
|
||||
@filter:value - Apply a filter with the given value
|
||||
@filter:value1,value2 - Apply multiple values (for array filters)
|
||||
@filter:!value - Exclude/negate a filter value
|
||||
|
||||
SUPPORTED FILTERS:
|
||||
@genre:action,comedy - Filter by genres
|
||||
@genre:!hentai - Exclude genre
|
||||
@status:airing - Filter by status (airing, finished, upcoming, cancelled, hiatus)
|
||||
@year:2024 - Filter by season year
|
||||
@season:winter - Filter by season (winter, spring, summer, fall)
|
||||
@format:tv,movie - Filter by format (tv, movie, ova, ona, special, music)
|
||||
@sort:score - Sort by (score, popularity, trending, title, date)
|
||||
@score:>80 - Minimum score
|
||||
@score:<50 - Maximum score
|
||||
@popularity:>10000 - Minimum popularity
|
||||
@onlist - Only show anime on user's list
|
||||
@onlist:false - Only show anime NOT on user's list
|
||||
|
||||
EXAMPLES:
|
||||
"naruto @genre:action @status:finished"
|
||||
"isekai @year:2024 @season:winter @sort:score"
|
||||
"@genre:action,adventure @status:airing"
|
||||
"romance @genre:!hentai @format:tv,movie"
|
||||
"""
|
||||
|
||||
import re
|
||||
from typing import Any, Dict, List, Optional, Tuple
|
||||
|
||||
# Mapping of user-friendly filter names to GraphQL variable names
|
||||
FILTER_ALIASES = {
|
||||
# Status aliases
|
||||
"airing": "RELEASING",
|
||||
"releasing": "RELEASING",
|
||||
"finished": "FINISHED",
|
||||
"completed": "FINISHED",
|
||||
"upcoming": "NOT_YET_RELEASED",
|
||||
"not_yet_released": "NOT_YET_RELEASED",
|
||||
"unreleased": "NOT_YET_RELEASED",
|
||||
"cancelled": "CANCELLED",
|
||||
"canceled": "CANCELLED",
|
||||
"hiatus": "HIATUS",
|
||||
"paused": "HIATUS",
|
||||
# Format aliases
|
||||
"tv": "TV",
|
||||
"tv_short": "TV_SHORT",
|
||||
"tvshort": "TV_SHORT",
|
||||
"movie": "MOVIE",
|
||||
"film": "MOVIE",
|
||||
"ova": "OVA",
|
||||
"ona": "ONA",
|
||||
"special": "SPECIAL",
|
||||
"music": "MUSIC",
|
||||
# Season aliases
|
||||
"winter": "WINTER",
|
||||
"spring": "SPRING",
|
||||
"summer": "SUMMER",
|
||||
"fall": "FALL",
|
||||
"autumn": "FALL",
|
||||
# Sort aliases
|
||||
"score": "SCORE_DESC",
|
||||
"score_desc": "SCORE_DESC",
|
||||
"score_asc": "SCORE",
|
||||
"popularity": "POPULARITY_DESC",
|
||||
"popularity_desc": "POPULARITY_DESC",
|
||||
"popularity_asc": "POPULARITY",
|
||||
"trending": "TRENDING_DESC",
|
||||
"trending_desc": "TRENDING_DESC",
|
||||
"trending_asc": "TRENDING",
|
||||
"title": "TITLE_ROMAJI",
|
||||
"title_desc": "TITLE_ROMAJI_DESC",
|
||||
"date": "START_DATE_DESC",
|
||||
"date_desc": "START_DATE_DESC",
|
||||
"date_asc": "START_DATE",
|
||||
"newest": "START_DATE_DESC",
|
||||
"oldest": "START_DATE",
|
||||
"favourites": "FAVOURITES_DESC",
|
||||
"favorites": "FAVOURITES_DESC",
|
||||
"episodes": "EPISODES_DESC",
|
||||
}
|
||||
|
||||
# Genre name normalization (lowercase -> proper case)
|
||||
GENRE_NAMES = {
|
||||
"action": "Action",
|
||||
"adventure": "Adventure",
|
||||
"comedy": "Comedy",
|
||||
"drama": "Drama",
|
||||
"ecchi": "Ecchi",
|
||||
"fantasy": "Fantasy",
|
||||
"horror": "Horror",
|
||||
"mahou_shoujo": "Mahou Shoujo",
|
||||
"mahou": "Mahou Shoujo",
|
||||
"magical_girl": "Mahou Shoujo",
|
||||
"mecha": "Mecha",
|
||||
"music": "Music",
|
||||
"mystery": "Mystery",
|
||||
"psychological": "Psychological",
|
||||
"romance": "Romance",
|
||||
"sci-fi": "Sci-Fi",
|
||||
"scifi": "Sci-Fi",
|
||||
"sci_fi": "Sci-Fi",
|
||||
"slice_of_life": "Slice of Life",
|
||||
"sol": "Slice of Life",
|
||||
"sports": "Sports",
|
||||
"supernatural": "Supernatural",
|
||||
"thriller": "Thriller",
|
||||
"hentai": "Hentai",
|
||||
}
|
||||
|
||||
# Filter pattern: @key:value or @key (boolean flags)
|
||||
FILTER_PATTERN = re.compile(r"@(\w+)(?::([^\s]+))?", re.IGNORECASE)
|
||||
|
||||
# Comparison operators for numeric filters
|
||||
COMPARISON_PATTERN = re.compile(r"^([<>]=?)?(\d+)$")
|
||||
|
||||
|
||||
def normalize_value(value: str, value_type: str) -> str:
|
||||
"""Normalize a filter value based on its type."""
|
||||
value_lower = value.lower().strip()
|
||||
|
||||
if value_type == "genre":
|
||||
return GENRE_NAMES.get(value_lower, value.title())
|
||||
elif value_type in ("status", "format", "season", "sort"):
|
||||
return FILTER_ALIASES.get(value_lower, value.upper())
|
||||
|
||||
return value
|
||||
|
||||
|
||||
def parse_value_list(value_str: str) -> Tuple[List[str], List[str]]:
|
||||
"""
|
||||
Parse a comma-separated value string, separating includes from excludes.
|
||||
|
||||
Returns:
|
||||
Tuple of (include_values, exclude_values)
|
||||
"""
|
||||
includes = []
|
||||
excludes = []
|
||||
|
||||
for val in value_str.split(","):
|
||||
val = val.strip()
|
||||
if not val:
|
||||
continue
|
||||
if val.startswith("!"):
|
||||
excludes.append(val[1:])
|
||||
else:
|
||||
includes.append(val)
|
||||
|
||||
return includes, excludes
|
||||
|
||||
|
||||
def parse_comparison(value: str) -> Tuple[Optional[str], Optional[int]]:
|
||||
"""
|
||||
Parse a comparison value like ">80" or "<50".
|
||||
|
||||
Returns:
|
||||
Tuple of (operator, number) or (None, None) if invalid
|
||||
"""
|
||||
match = COMPARISON_PATTERN.match(value)
|
||||
if match:
|
||||
operator = match.group(1) or ">" # Default to greater than
|
||||
number = int(match.group(2))
|
||||
return operator, number
|
||||
return None, None
|
||||
|
||||
|
||||
def parse_filters(query: str) -> Tuple[str, Dict[str, Any]]:
|
||||
"""
|
||||
Parse a search query and extract filter directives.
|
||||
|
||||
Args:
|
||||
query: The full search query including filter syntax
|
||||
|
||||
Returns:
|
||||
Tuple of (clean_query, filters_dict)
|
||||
- clean_query: The query with filter syntax removed
|
||||
- filters_dict: Dictionary of GraphQL variables to apply
|
||||
"""
|
||||
filters: Dict[str, Any] = {}
|
||||
|
||||
# Find all filter matches
|
||||
matches = list(FILTER_PATTERN.finditer(query))
|
||||
|
||||
for match in matches:
|
||||
filter_name = match.group(1).lower()
|
||||
filter_value = match.group(2) # May be None for boolean flags
|
||||
|
||||
# Handle different filter types
|
||||
if filter_name == "genre":
|
||||
if filter_value:
|
||||
includes, excludes = parse_value_list(filter_value)
|
||||
if includes:
|
||||
normalized = [normalize_value(v, "genre") for v in includes]
|
||||
filters.setdefault("genre_in", []).extend(normalized)
|
||||
if excludes:
|
||||
normalized = [normalize_value(v, "genre") for v in excludes]
|
||||
filters.setdefault("genre_not_in", []).extend(normalized)
|
||||
|
||||
elif filter_name == "status":
|
||||
if filter_value:
|
||||
includes, excludes = parse_value_list(filter_value)
|
||||
if includes:
|
||||
normalized = [normalize_value(v, "status") for v in includes]
|
||||
filters.setdefault("status_in", []).extend(normalized)
|
||||
if excludes:
|
||||
normalized = [normalize_value(v, "status") for v in excludes]
|
||||
filters.setdefault("status_not_in", []).extend(normalized)
|
||||
|
||||
elif filter_name == "format":
|
||||
if filter_value:
|
||||
includes, _ = parse_value_list(filter_value)
|
||||
if includes:
|
||||
normalized = [normalize_value(v, "format") for v in includes]
|
||||
filters.setdefault("format_in", []).extend(normalized)
|
||||
|
||||
elif filter_name == "year":
|
||||
if filter_value:
|
||||
try:
|
||||
filters["seasonYear"] = int(filter_value)
|
||||
except ValueError:
|
||||
pass # Invalid year, skip
|
||||
|
||||
elif filter_name == "season":
|
||||
if filter_value:
|
||||
filters["season"] = normalize_value(filter_value, "season")
|
||||
|
||||
elif filter_name == "sort":
|
||||
if filter_value:
|
||||
sort_val = normalize_value(filter_value, "sort")
|
||||
filters["sort"] = [sort_val]
|
||||
|
||||
elif filter_name == "score":
|
||||
if filter_value:
|
||||
op, num = parse_comparison(filter_value)
|
||||
if num is not None:
|
||||
if op in (">", ">="):
|
||||
filters["averageScore_greater"] = num
|
||||
elif op in ("<", "<="):
|
||||
filters["averageScore_lesser"] = num
|
||||
|
||||
elif filter_name == "popularity":
|
||||
if filter_value:
|
||||
op, num = parse_comparison(filter_value)
|
||||
if num is not None:
|
||||
if op in (">", ">="):
|
||||
filters["popularity_greater"] = num
|
||||
elif op in ("<", "<="):
|
||||
filters["popularity_lesser"] = num
|
||||
|
||||
elif filter_name == "onlist":
|
||||
if filter_value is None or filter_value.lower() in ("true", "yes", "1"):
|
||||
filters["on_list"] = True
|
||||
elif filter_value.lower() in ("false", "no", "0"):
|
||||
filters["on_list"] = False
|
||||
|
||||
elif filter_name == "tag":
|
||||
if filter_value:
|
||||
includes, excludes = parse_value_list(filter_value)
|
||||
if includes:
|
||||
# Tags use title case typically
|
||||
normalized = [v.replace("_", " ").title() for v in includes]
|
||||
filters.setdefault("tag_in", []).extend(normalized)
|
||||
if excludes:
|
||||
normalized = [v.replace("_", " ").title() for v in excludes]
|
||||
filters.setdefault("tag_not_in", []).extend(normalized)
|
||||
|
||||
# Remove filter syntax from query to get clean search text
|
||||
clean_query = FILTER_PATTERN.sub("", query).strip()
|
||||
# Clean up multiple spaces
|
||||
clean_query = re.sub(r"\s+", " ", clean_query).strip()
|
||||
|
||||
return clean_query, filters
|
||||
|
||||
|
||||
def get_help_text() -> str:
|
||||
"""Return a help string describing the filter syntax."""
|
||||
return """
|
||||
╭─────────────────── Filter Syntax Help ───────────────────╮
|
||||
│ │
|
||||
│ @genre:action,comedy Filter by genres │
|
||||
│ @genre:!hentai Exclude genre │
|
||||
│ @status:airing Status: airing, finished, │
|
||||
│ upcoming, cancelled, hiatus │
|
||||
│ @year:2024 Filter by year │
|
||||
│ @season:winter winter, spring, summer, fall │
|
||||
│ @format:tv,movie tv, movie, ova, ona, special │
|
||||
│ @sort:score score, popularity, trending, │
|
||||
│ date, title, newest, oldest │
|
||||
│ @score:>80 Minimum score │
|
||||
│ @score:<50 Maximum score │
|
||||
│ @popularity:>10000 Minimum popularity │
|
||||
│ @onlist Only on your list │
|
||||
│ @onlist:false Not on your list │
|
||||
│ @tag:isekai,reincarnation Filter by tags │
|
||||
│ │
|
||||
│ Examples: │
|
||||
│ naruto @genre:action @status:finished │
|
||||
│ @genre:action,adventure @year:2024 @sort:score │
|
||||
│ isekai @season:winter @year:2024 │
|
||||
│ │
|
||||
╰──────────────────────────────────────────────────────────╯
|
||||
""".strip()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
# Test the parser
|
||||
import json
|
||||
import sys
|
||||
|
||||
if len(sys.argv) > 1:
|
||||
test_query = " ".join(sys.argv[1:])
|
||||
clean, filters = parse_filters(test_query)
|
||||
print(f"Original: {test_query}")
|
||||
print(f"Clean query: {clean}")
|
||||
print(f"Filters: {json.dumps(filters, indent=2)}")
|
||||
else:
|
||||
print(get_help_text())
|
||||
@@ -45,6 +45,15 @@ def format_number(num):
|
||||
return f"{num:,}"
|
||||
|
||||
|
||||
def format_score_stars(score):
|
||||
"""Format score as stars out of 6."""
|
||||
if score is None:
|
||||
return "N/A"
|
||||
# Convert 0-100 score to 0-6 stars, capped at 6 for consistency
|
||||
stars = min(round(score * 6 / 100), 6)
|
||||
return "⭐" * stars + f" ({score}/100)"
|
||||
|
||||
|
||||
def format_date(date_obj):
|
||||
"""Format date object to string."""
|
||||
if not date_obj or date_obj == "null":
|
||||
@@ -342,31 +351,68 @@ def main():
|
||||
# Extract data
|
||||
status = media.get("status", "Unknown")
|
||||
format_type = media.get("format", "Unknown")
|
||||
episodes = media.get("episodes", "?")
|
||||
episodes = media.get("episodes", "??")
|
||||
duration = media.get("duration")
|
||||
duration_str = f"{duration} min" if duration else "Unknown"
|
||||
duration_str = f"{duration} min/ep" if duration else "Unknown"
|
||||
|
||||
score = media.get("averageScore")
|
||||
score_str = f"{score}/100" if score else "N/A"
|
||||
score_str = format_score_stars(score)
|
||||
|
||||
favourites = format_number(media.get("favourites", 0))
|
||||
popularity = format_number(media.get("popularity", 0))
|
||||
|
||||
genres = ", ".join(media.get("genres", [])[:5]) or "Unknown"
|
||||
genres = ", ".join(media.get("genres", [])) or "Unknown"
|
||||
|
||||
start_date = format_date(media.get("startDate"))
|
||||
end_date = format_date(media.get("endDate"))
|
||||
|
||||
studios_list = media.get("studios", {}).get("nodes", [])
|
||||
studios = ", ".join([s.get("name", "") for s in studios_list[:3]]) or "Unknown"
|
||||
# Studios are those with isAnimationStudio=true
|
||||
studios = ", ".join([s["name"] for s in studios_list if s.get("name") and s.get("isAnimationStudio")]) or "N/A"
|
||||
# Producers are those with isAnimationStudio=false
|
||||
producers = ", ".join([s["name"] for s in studios_list if s.get("name") and not s.get("isAnimationStudio")]) or "N/A"
|
||||
|
||||
synonyms_list = media.get("synonyms", [])
|
||||
synonyms = ", ".join(synonyms_list[:3]) or "N/A"
|
||||
# Include romaji in synonyms if different from title
|
||||
romaji = title_obj.get("romaji")
|
||||
if romaji and romaji != title and romaji not in synonyms_list:
|
||||
synonyms_list = [romaji] + synonyms_list
|
||||
synonyms = ", ".join(synonyms_list) or "N/A"
|
||||
|
||||
# Tags
|
||||
tags_list = media.get("tags", [])
|
||||
tags = ", ".join([t.get("name", "") for t in tags_list if t.get("name")]) or "N/A"
|
||||
|
||||
# Next airing episode
|
||||
next_airing = media.get("nextAiringEpisode")
|
||||
if next_airing:
|
||||
next_ep = next_airing.get("episode", "?")
|
||||
airing_at = next_airing.get("airingAt")
|
||||
if airing_at:
|
||||
from datetime import datetime
|
||||
try:
|
||||
dt = datetime.fromtimestamp(airing_at)
|
||||
next_episode_str = f"Episode {next_ep} on {dt.strftime('%A, %d %B %Y at %H:%M')}"
|
||||
except (ValueError, OSError):
|
||||
next_episode_str = f"Episode {next_ep}"
|
||||
else:
|
||||
next_episode_str = f"Episode {next_ep}"
|
||||
else:
|
||||
next_episode_str = "N/A"
|
||||
|
||||
# User list status
|
||||
media_list_entry = media.get("mediaListEntry")
|
||||
if media_list_entry:
|
||||
user_status = media_list_entry.get("status", "NOT_ON_LIST")
|
||||
user_progress = f"Episode {media_list_entry.get('progress', 0)}"
|
||||
else:
|
||||
user_status = "NOT_ON_LIST"
|
||||
user_progress = "0"
|
||||
|
||||
description = media.get("description", "No description available.")
|
||||
description = strip_markdown(description)
|
||||
|
||||
# Print sections matching media_info.py structure
|
||||
# Print sections matching media_info.py structure exactly
|
||||
rows = [
|
||||
("Score", score_str),
|
||||
("Favorites", favourites),
|
||||
@@ -376,16 +422,17 @@ def main():
|
||||
|
||||
print_rule(SEPARATOR_COLOR)
|
||||
for key, value in rows:
|
||||
print_table_row(key, value, HEADER_COLOR, 0, 0)
|
||||
print_table_row(key, value, HEADER_COLOR, 15, term_width - 20)
|
||||
|
||||
rows = [
|
||||
("Episodes", str(episodes)),
|
||||
("Duration", duration_str),
|
||||
("Next Episode", next_episode_str),
|
||||
]
|
||||
|
||||
print_rule(SEPARATOR_COLOR)
|
||||
for key, value in rows:
|
||||
print_table_row(key, value, HEADER_COLOR, 0, 0)
|
||||
print_table_row(key, value, HEADER_COLOR, 15, term_width - 20)
|
||||
|
||||
rows = [
|
||||
("Genres", genres),
|
||||
@@ -394,7 +441,16 @@ def main():
|
||||
|
||||
print_rule(SEPARATOR_COLOR)
|
||||
for key, value in rows:
|
||||
print_table_row(key, value, HEADER_COLOR, 0, 0)
|
||||
print_table_row(key, value, HEADER_COLOR, 15, term_width - 20)
|
||||
|
||||
rows = [
|
||||
("List Status", user_status),
|
||||
("Progress", user_progress),
|
||||
]
|
||||
|
||||
print_rule(SEPARATOR_COLOR)
|
||||
for key, value in rows:
|
||||
print_table_row(key, value, HEADER_COLOR, 15, term_width - 20)
|
||||
|
||||
rows = [
|
||||
("Start Date", start_date),
|
||||
@@ -403,15 +459,16 @@ def main():
|
||||
|
||||
print_rule(SEPARATOR_COLOR)
|
||||
for key, value in rows:
|
||||
print_table_row(key, value, HEADER_COLOR, 0, 0)
|
||||
print_table_row(key, value, HEADER_COLOR, 15, term_width - 20)
|
||||
|
||||
rows = [
|
||||
("Studios", studios),
|
||||
("Producers", producers),
|
||||
]
|
||||
|
||||
print_rule(SEPARATOR_COLOR)
|
||||
for key, value in rows:
|
||||
print_table_row(key, value, HEADER_COLOR, 0, 0)
|
||||
print_table_row(key, value, HEADER_COLOR, 15, term_width - 20)
|
||||
|
||||
rows = [
|
||||
("Synonyms", synonyms),
|
||||
@@ -419,7 +476,15 @@ def main():
|
||||
|
||||
print_rule(SEPARATOR_COLOR)
|
||||
for key, value in rows:
|
||||
print_table_row(key, value, HEADER_COLOR, 0, 0)
|
||||
print_table_row(key, value, HEADER_COLOR, 15, term_width - 20)
|
||||
|
||||
rows = [
|
||||
("Tags", tags),
|
||||
]
|
||||
|
||||
print_rule(SEPARATOR_COLOR)
|
||||
for key, value in rows:
|
||||
print_table_row(key, value, HEADER_COLOR, 15, term_width - 20)
|
||||
|
||||
print_rule(SEPARATOR_COLOR)
|
||||
print(wrap_text(description, term_width))
|
||||
|
||||
@@ -67,6 +67,7 @@ for key, value in rows:
|
||||
|
||||
rows = [
|
||||
("Studios", "{STUDIOS}"),
|
||||
("Producers", "{PRODUCERS}"),
|
||||
]
|
||||
|
||||
print_rule(SEPARATOR_COLOR)
|
||||
|
||||
@@ -5,6 +5,18 @@
|
||||
# This script is a template for dynamic search functionality in fzf.
|
||||
# The placeholders in curly braces, like {GRAPHQL_ENDPOINT} are dynamically
|
||||
# filled by Python using .replace() during runtime.
|
||||
#
|
||||
# FILTER SYNTAX:
|
||||
# @genre:action,comedy Filter by genres
|
||||
# @genre:!hentai Exclude genre
|
||||
# @status:airing Status: airing, finished, upcoming, cancelled, hiatus
|
||||
# @year:2024 Filter by year
|
||||
# @season:winter winter, spring, summer, fall
|
||||
# @format:tv,movie tv, movie, ova, ona, special
|
||||
# @sort:score score, popularity, trending, date, title
|
||||
# @score:>80 / @score:<50 Min/max score
|
||||
# @onlist / @onlist:false Filter by list status
|
||||
# @tag:isekai Filter by tags
|
||||
|
||||
import json
|
||||
import sys
|
||||
@@ -12,9 +24,13 @@ from pathlib import Path
|
||||
from urllib import request
|
||||
from urllib.error import URLError
|
||||
|
||||
# Import the filter parser
|
||||
from _filter_parser import parse_filters
|
||||
|
||||
# --- Template Variables (Injected by Python) ---
|
||||
GRAPHQL_ENDPOINT = "{GRAPHQL_ENDPOINT}"
|
||||
SEARCH_RESULTS_FILE = Path("{SEARCH_RESULTS_FILE}")
|
||||
LAST_QUERY_FILE = Path("{LAST_QUERY_FILE}")
|
||||
AUTH_HEADER = "{AUTH_HEADER}"
|
||||
|
||||
# The GraphQL query is injected as a properly escaped JSON string
|
||||
@@ -22,17 +38,29 @@ GRAPHQL_QUERY = "{GRAPHQL_QUERY}"
|
||||
|
||||
# --- Get Query from fzf ---
|
||||
# fzf passes the current query as the first argument when using --bind change:reload
|
||||
QUERY = sys.argv[1] if len(sys.argv) > 1 else ""
|
||||
RAW_QUERY = sys.argv[1] if len(sys.argv) > 1 else ""
|
||||
|
||||
# If query is empty, exit with empty results
|
||||
if not QUERY.strip():
|
||||
print("")
|
||||
# Parse the query to extract filters and clean search text
|
||||
QUERY, PARSED_FILTERS = parse_filters(RAW_QUERY)
|
||||
|
||||
# If query is empty and no filters, show help hint
|
||||
if not RAW_QUERY.strip():
|
||||
print("💡 Tip: Use @genre:action @status:airing for filters (type @help for syntax)")
|
||||
sys.exit(0)
|
||||
|
||||
# Show filter help if requested
|
||||
if RAW_QUERY.strip().lower() in ("@help", "@?", "@h"):
|
||||
from _filter_parser import get_help_text
|
||||
print(get_help_text())
|
||||
sys.exit(0)
|
||||
|
||||
# If we only have filters (no search text), that's valid - we'll search with filters only
|
||||
# But if we have neither query nor filters, we already showed the help hint above
|
||||
|
||||
|
||||
def make_graphql_request(
|
||||
endpoint: str, query: str, variables: dict, auth_token: str = ""
|
||||
) -> dict | None:
|
||||
) -> tuple[dict | None, str | None]:
|
||||
"""
|
||||
Make a GraphQL request to the specified endpoint.
|
||||
|
||||
@@ -43,7 +71,7 @@ def make_graphql_request(
|
||||
auth_token: Optional authorization token (Bearer token)
|
||||
|
||||
Returns:
|
||||
Response JSON as a dictionary, or None if request fails
|
||||
Tuple of (Response JSON, error message) - one will be None
|
||||
"""
|
||||
payload = {"query": query, "variables": variables}
|
||||
|
||||
@@ -61,10 +89,13 @@ def make_graphql_request(
|
||||
)
|
||||
|
||||
with request.urlopen(req, timeout=10) as response:
|
||||
return json.loads(response.read().decode("utf-8"))
|
||||
except (URLError, json.JSONDecodeError, Exception) as e:
|
||||
print(f"❌ Request failed: {e}", file=sys.stderr)
|
||||
return None
|
||||
return json.loads(response.read().decode("utf-8")), None
|
||||
except URLError as e:
|
||||
return None, f"Network error: {e.reason}"
|
||||
except json.JSONDecodeError as e:
|
||||
return None, f"Invalid response: {e}"
|
||||
except Exception as e:
|
||||
return None, f"Request error: {e}"
|
||||
|
||||
|
||||
def extract_title(media_item: dict) -> str:
|
||||
@@ -90,34 +121,67 @@ def main():
|
||||
# Ensure parent directory exists
|
||||
SEARCH_RESULTS_FILE.parent.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
# Create GraphQL variables
|
||||
# Base GraphQL variables
|
||||
variables = {
|
||||
"query": QUERY,
|
||||
"type": "ANIME",
|
||||
"per_page": 50,
|
||||
"genre_not_in": ["Hentai"],
|
||||
"genre_not_in": ["Hentai"], # Default exclusion
|
||||
}
|
||||
|
||||
# Add search query if provided
|
||||
if QUERY:
|
||||
variables["query"] = QUERY
|
||||
|
||||
# Apply parsed filters from the filter syntax
|
||||
for key, value in PARSED_FILTERS.items():
|
||||
# Handle array merging for _in and _not_in fields
|
||||
if key.endswith("_in") or key.endswith("_not_in"):
|
||||
if key in variables:
|
||||
# Merge arrays, avoiding duplicates
|
||||
existing = set(variables[key])
|
||||
existing.update(value)
|
||||
variables[key] = list(existing)
|
||||
else:
|
||||
variables[key] = value
|
||||
else:
|
||||
variables[key] = value
|
||||
|
||||
# Make the GraphQL request
|
||||
response = make_graphql_request(
|
||||
response, error = make_graphql_request(
|
||||
GRAPHQL_ENDPOINT, GRAPHQL_QUERY, variables, AUTH_HEADER
|
||||
)
|
||||
|
||||
if response is None:
|
||||
print("❌ Search failed")
|
||||
if error:
|
||||
print(f"❌ {error}")
|
||||
# Also show what we tried to search for debugging
|
||||
print(f" Query: {QUERY or '(none)'}")
|
||||
print(f" Filters: {json.dumps(PARSED_FILTERS) if PARSED_FILTERS else '(none)'}")
|
||||
sys.exit(1)
|
||||
|
||||
if response is None:
|
||||
print("❌ Search failed: No response received")
|
||||
sys.exit(1)
|
||||
|
||||
# Check for GraphQL errors first (these come in the response body)
|
||||
if "errors" in response:
|
||||
errors = response["errors"]
|
||||
if errors:
|
||||
# Extract error messages
|
||||
error_msgs = [e.get("message", str(e)) for e in errors]
|
||||
print(f"❌ API Error: {'; '.join(error_msgs)}")
|
||||
# Show variables for debugging
|
||||
print(f" Filters used: {json.dumps(PARSED_FILTERS, indent=2) if PARSED_FILTERS else '(none)'}")
|
||||
sys.exit(1)
|
||||
|
||||
# Save the raw response for later processing by dynamic_search.py
|
||||
try:
|
||||
with open(SEARCH_RESULTS_FILE, "w", encoding="utf-8") as f:
|
||||
json.dump(response, f, ensure_ascii=False, indent=2)
|
||||
# Also save the raw query so it can be restored when going back
|
||||
with open(LAST_QUERY_FILE, "w", encoding="utf-8") as f:
|
||||
f.write(RAW_QUERY)
|
||||
except IOError as e:
|
||||
print(f"❌ Failed to save results: {e}", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
# Parse and display results
|
||||
if "errors" in response:
|
||||
print(f"❌ Search error: {response['errors']}")
|
||||
print(f"❌ Failed to save results: {e}")
|
||||
sys.exit(1)
|
||||
|
||||
# Navigate the response structure
|
||||
@@ -126,7 +190,9 @@ def main():
|
||||
media_list = page.get("media", [])
|
||||
|
||||
if not media_list:
|
||||
print("❌ No results found")
|
||||
print("🔍 No results found")
|
||||
if PARSED_FILTERS:
|
||||
print(" Try adjusting your filters")
|
||||
sys.exit(0)
|
||||
|
||||
# Output titles for fzf (one per line)
|
||||
@@ -141,5 +207,5 @@ if __name__ == "__main__":
|
||||
except KeyboardInterrupt:
|
||||
sys.exit(0)
|
||||
except Exception as e:
|
||||
print(f"❌ Unexpected error: {e}", file=sys.stderr)
|
||||
print(f"❌ Unexpected error: {type(e).__name__}: {e}")
|
||||
sys.exit(1)
|
||||
|
||||
@@ -189,7 +189,7 @@ You can disable this message by turning off the welcome_screen option in the con
|
||||
):
|
||||
import subprocess
|
||||
|
||||
_cli_cmd_name="viu" if not shutil.which("viu-media") else "viu-media"
|
||||
_cli_cmd_name = "viu" if not shutil.which("viu-media") else "viu-media"
|
||||
cmd = [_cli_cmd_name, "config", "--update"]
|
||||
print(f"running '{' '.join(cmd)}'...")
|
||||
subprocess.run(cmd)
|
||||
|
||||
@@ -1,25 +1,72 @@
|
||||
import click
|
||||
import webbrowser
|
||||
from pathlib import Path
|
||||
import click
|
||||
|
||||
from .....core.config.model import AppConfig
|
||||
|
||||
|
||||
def _get_token(feedback, selector, token_input: str | None) -> str | None:
|
||||
"""
|
||||
Retrieves the authentication token from a file path, a direct string, or an interactive prompt.
|
||||
"""
|
||||
if token_input:
|
||||
path = Path(token_input)
|
||||
if path.is_file():
|
||||
try:
|
||||
token = path.read_text().strip()
|
||||
if not token:
|
||||
feedback.error(f"Token file is empty: {path}")
|
||||
return None
|
||||
return token
|
||||
except Exception as e:
|
||||
feedback.error(f"Error reading token from file: {e}")
|
||||
return None
|
||||
return token_input
|
||||
|
||||
from .....core.constants import ANILIST_AUTH
|
||||
|
||||
open_success = webbrowser.open(ANILIST_AUTH, new=2)
|
||||
if open_success:
|
||||
feedback.info("Your browser has been opened to obtain an AniList token.")
|
||||
feedback.info(
|
||||
f"Or you can visit the site manually [magenta][link={ANILIST_AUTH}]here[/link][/magenta]."
|
||||
)
|
||||
else:
|
||||
feedback.warning(
|
||||
f"Failed to open the browser. Please visit the site manually [magenta][link={ANILIST_AUTH}]here[/link][/magenta]."
|
||||
)
|
||||
feedback.info(
|
||||
"After authorizing, copy the token from the address bar and paste it below."
|
||||
)
|
||||
return selector.ask("Enter your AniList Access Token")
|
||||
|
||||
|
||||
@click.command(help="Login to your AniList account to enable progress tracking.")
|
||||
@click.option("--status", "-s", is_flag=True, help="Check current login status.")
|
||||
@click.option("--logout", "-l", is_flag=True, help="Log out and erase credentials.")
|
||||
@click.argument("token_input", required=False, type=str)
|
||||
@click.pass_obj
|
||||
def auth(config: AppConfig, status: bool, logout: bool):
|
||||
"""Handles user authentication and credential management."""
|
||||
from .....core.constants import ANILIST_AUTH
|
||||
def auth(config: AppConfig, status: bool, logout: bool, token_input: str | None):
|
||||
"""
|
||||
Handles user authentication and credential management.
|
||||
|
||||
This command allows you to log in to your AniList account to enable
|
||||
progress tracking and other features.
|
||||
|
||||
You can provide your authentication token in three ways:
|
||||
1. Interactively: Run the command without arguments to open a browser
|
||||
and be prompted to paste the token.
|
||||
2. As an argument: Pass the token string directly to the command.
|
||||
$ viu anilist auth "your_token_here"
|
||||
3. As a file: Pass the path to a text file containing the token.
|
||||
$ viu anilist auth /path/to/token.txt
|
||||
"""
|
||||
from .....libs.media_api.api import create_api_client
|
||||
from .....libs.selectors.selector import create_selector
|
||||
from ....service.auth import AuthService
|
||||
from ....service.feedback import FeedbackService
|
||||
|
||||
auth_service = AuthService("anilist")
|
||||
feedback = FeedbackService(config)
|
||||
selector = create_selector(config)
|
||||
feedback.clear_console()
|
||||
|
||||
if status:
|
||||
user_data = auth_service.get_auth()
|
||||
@@ -29,6 +76,11 @@ def auth(config: AppConfig, status: bool, logout: bool):
|
||||
feedback.error("Not logged in.")
|
||||
return
|
||||
|
||||
from .....libs.selectors.selector import create_selector
|
||||
|
||||
selector = create_selector(config)
|
||||
feedback.clear_console()
|
||||
|
||||
if logout:
|
||||
if selector.confirm("Are you sure you want to log out and erase your token?"):
|
||||
auth_service.clear_user_profile()
|
||||
@@ -40,27 +92,14 @@ def auth(config: AppConfig, status: bool, logout: bool):
|
||||
f"You are already logged in as {auth_profile.user_profile.name}.Would you like to relogin"
|
||||
):
|
||||
return
|
||||
api_client = create_api_client("anilist", config)
|
||||
token = _get_token(feedback, selector, token_input)
|
||||
|
||||
open_success = webbrowser.open(ANILIST_AUTH, new=2)
|
||||
if open_success:
|
||||
feedback.info("Your browser has been opened to obtain an AniList token.")
|
||||
feedback.info(
|
||||
f"or you can visit the site manually [magenta][link={ANILIST_AUTH}]here[/link][/magenta]."
|
||||
)
|
||||
else:
|
||||
feedback.warning(
|
||||
f"Failed to open the browser. Please visit the site manually [magenta][link={ANILIST_AUTH}]here[/link][/magenta]."
|
||||
)
|
||||
feedback.info(
|
||||
"After authorizing, copy the token from the address bar and paste it below."
|
||||
)
|
||||
|
||||
token = selector.ask("Enter your AniList Access Token")
|
||||
if not token:
|
||||
feedback.error("Login cancelled.")
|
||||
if not token_input:
|
||||
feedback.error("Login cancelled.")
|
||||
return
|
||||
|
||||
api_client = create_api_client("anilist", config)
|
||||
# Use the API client to validate the token and get profile info
|
||||
profile = api_client.authenticate(token.strip())
|
||||
|
||||
|
||||
@@ -30,6 +30,9 @@ from ...core.config import AppConfig
|
||||
\b
|
||||
# view the current contents of your config
|
||||
viu config --view
|
||||
\b
|
||||
# clear cached GitHub authentication token
|
||||
viu config --clear-github-auth
|
||||
""",
|
||||
)
|
||||
@click.option("--path", "-p", help="Print the config location and exit", is_flag=True)
|
||||
@@ -60,6 +63,11 @@ from ...core.config import AppConfig
|
||||
is_flag=True,
|
||||
help="Start the interactive configuration wizard.",
|
||||
)
|
||||
@click.option(
|
||||
"--clear-github-auth",
|
||||
is_flag=True,
|
||||
help="Clear cached GitHub authentication token.",
|
||||
)
|
||||
@click.pass_obj
|
||||
def config(
|
||||
user_config: AppConfig,
|
||||
@@ -69,12 +77,18 @@ def config(
|
||||
generate_desktop_entry,
|
||||
update,
|
||||
interactive,
|
||||
clear_github_auth,
|
||||
):
|
||||
from ...core.constants import USER_CONFIG
|
||||
from ..config.editor import InteractiveConfigEditor
|
||||
from ..config.generate import generate_config_toml_from_app_model
|
||||
|
||||
if path:
|
||||
if clear_github_auth:
|
||||
from ..service.github import GitHubContributionService
|
||||
|
||||
GitHubContributionService.clear_cached_auth_static()
|
||||
click.echo("GitHub authentication cache cleared.")
|
||||
elif path:
|
||||
print(USER_CONFIG)
|
||||
elif view:
|
||||
from rich.console import Console
|
||||
|
||||
@@ -71,7 +71,7 @@ class ConfigLoader:
|
||||
|
||||
return app_config
|
||||
|
||||
def load(self, update: Dict = {}) -> AppConfig:
|
||||
def load(self, update: Dict = {}, allow_setup=True) -> AppConfig:
|
||||
"""
|
||||
Loads the configuration and returns a populated, validated AppConfig object.
|
||||
|
||||
@@ -84,7 +84,7 @@ class ConfigLoader:
|
||||
Raises:
|
||||
ConfigError: If the configuration file contains validation or parsing errors.
|
||||
"""
|
||||
if not self.config_path.exists():
|
||||
if not self.config_path.exists() and allow_setup:
|
||||
return self._handle_first_run()
|
||||
|
||||
try:
|
||||
|
||||
1
viu_media/cli/interactive/menu/__init__.py
Normal file
1
viu_media/cli/interactive/menu/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
# Menu package for interactive session
|
||||
18
viu_media/cli/interactive/menu/media/__init__.py
Normal file
18
viu_media/cli/interactive/menu/media/__init__.py
Normal file
@@ -0,0 +1,18 @@
|
||||
# Media menu modules
|
||||
# Explicit module list for PyInstaller compatibility
|
||||
__all__ = [
|
||||
"downloads",
|
||||
"download_episodes",
|
||||
"dynamic_search",
|
||||
"episodes",
|
||||
"main",
|
||||
"media_actions",
|
||||
"media_airing_schedule",
|
||||
"media_characters",
|
||||
"media_review",
|
||||
"player_controls",
|
||||
"play_downloads",
|
||||
"provider_search",
|
||||
"results",
|
||||
"servers",
|
||||
]
|
||||
@@ -1,9 +1,10 @@
|
||||
import json
|
||||
import logging
|
||||
import sys
|
||||
import shutil
|
||||
from pathlib import Path
|
||||
|
||||
from .....core.constants import APP_CACHE_DIR, SCRIPTS_DIR
|
||||
from .....core.utils.detect import get_python_executable
|
||||
from .....libs.media_api.params import MediaSearchParams
|
||||
from ...session import Context, session
|
||||
from ...state import InternalDirective, MediaApiState, MenuName, State
|
||||
@@ -12,8 +13,36 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
SEARCH_CACHE_DIR = APP_CACHE_DIR / "previews" / "dynamic-search"
|
||||
SEARCH_RESULTS_FILE = SEARCH_CACHE_DIR / "current_search_results.json"
|
||||
LAST_QUERY_FILE = SEARCH_CACHE_DIR / "last_query.txt"
|
||||
RESTORE_MODE_FILE = SEARCH_CACHE_DIR / ".restore_mode"
|
||||
FZF_SCRIPTS_DIR = SCRIPTS_DIR / "fzf"
|
||||
SEARCH_TEMPLATE_SCRIPT = (FZF_SCRIPTS_DIR / "search.py").read_text(encoding="utf-8")
|
||||
FILTER_PARSER_SCRIPT = FZF_SCRIPTS_DIR / "_filter_parser.py"
|
||||
|
||||
|
||||
def _load_cached_titles() -> list[str]:
|
||||
"""Load titles from cached search results for display in fzf."""
|
||||
if not SEARCH_RESULTS_FILE.exists():
|
||||
return []
|
||||
|
||||
try:
|
||||
with open(SEARCH_RESULTS_FILE, "r", encoding="utf-8") as f:
|
||||
data = json.load(f)
|
||||
|
||||
media_list = data.get("data", {}).get("Page", {}).get("media", [])
|
||||
titles = []
|
||||
for media in media_list:
|
||||
title_obj = media.get("title", {})
|
||||
title = (
|
||||
title_obj.get("english")
|
||||
or title_obj.get("romaji")
|
||||
or title_obj.get("native")
|
||||
or "Unknown"
|
||||
)
|
||||
titles.append(title)
|
||||
return titles
|
||||
except (IOError, json.JSONDecodeError):
|
||||
return []
|
||||
|
||||
|
||||
@session.menu
|
||||
@@ -25,6 +54,12 @@ def dynamic_search(ctx: Context, state: State) -> State | InternalDirective:
|
||||
# Ensure cache directory exists
|
||||
SEARCH_CACHE_DIR.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
# Check if we're in restore mode (coming back from media_actions)
|
||||
restore_mode = RESTORE_MODE_FILE.exists()
|
||||
if restore_mode:
|
||||
# Clear the restore flag
|
||||
RESTORE_MODE_FILE.unlink(missing_ok=True)
|
||||
|
||||
# Read the GraphQL search query
|
||||
from .....libs.media_api.anilist import gql
|
||||
|
||||
@@ -44,6 +79,7 @@ def dynamic_search(ctx: Context, state: State) -> State | InternalDirective:
|
||||
"GRAPHQL_ENDPOINT": "https://graphql.anilist.co",
|
||||
"GRAPHQL_QUERY": search_query_json,
|
||||
"SEARCH_RESULTS_FILE": SEARCH_RESULTS_FILE.as_posix(),
|
||||
"LAST_QUERY_FILE": LAST_QUERY_FILE.as_posix(),
|
||||
"AUTH_HEADER": auth_header,
|
||||
}
|
||||
|
||||
@@ -54,12 +90,34 @@ def dynamic_search(ctx: Context, state: State) -> State | InternalDirective:
|
||||
search_script_file = SEARCH_CACHE_DIR / "search.py"
|
||||
search_script_file.write_text(search_command, encoding="utf-8")
|
||||
|
||||
# Copy the filter parser module to the cache directory
|
||||
# This is required for the search script to import it
|
||||
filter_parser_dest = SEARCH_CACHE_DIR / "_filter_parser.py"
|
||||
if FILTER_PARSER_SCRIPT.exists():
|
||||
shutil.copy2(FILTER_PARSER_SCRIPT, filter_parser_dest)
|
||||
|
||||
# Make the search script executable by calling it with python3
|
||||
# fzf will pass the query as {q} which becomes the first argument
|
||||
search_command_final = (
|
||||
f"{Path(sys.executable).as_posix()} {search_script_file.as_posix()} {{q}}"
|
||||
f"{Path(get_python_executable()).as_posix()} {search_script_file.as_posix()} {{q}}"
|
||||
)
|
||||
|
||||
# Header hint for filter syntax
|
||||
filter_hint = "💡 Filters: @genre:action @status:airing @year:2024 @sort:score (type @help for more)"
|
||||
|
||||
# Only load previous query if we're in restore mode (coming back from media_actions)
|
||||
initial_query = None
|
||||
cached_results = None
|
||||
if restore_mode:
|
||||
# Load previous query
|
||||
if LAST_QUERY_FILE.exists():
|
||||
try:
|
||||
initial_query = LAST_QUERY_FILE.read_text(encoding="utf-8").strip()
|
||||
except IOError:
|
||||
pass
|
||||
# Load cached results to display immediately without network request
|
||||
cached_results = _load_cached_titles()
|
||||
|
||||
try:
|
||||
# Prepare preview functionality
|
||||
preview_command = None
|
||||
@@ -73,11 +131,17 @@ def dynamic_search(ctx: Context, state: State) -> State | InternalDirective:
|
||||
prompt="Search Anime",
|
||||
search_command=search_command_final,
|
||||
preview=preview_command,
|
||||
header=filter_hint,
|
||||
initial_query=initial_query,
|
||||
initial_results=cached_results,
|
||||
)
|
||||
else:
|
||||
choice = ctx.selector.search(
|
||||
prompt="Search Anime",
|
||||
search_command=search_command_final,
|
||||
header=filter_hint,
|
||||
initial_query=initial_query,
|
||||
initial_results=cached_results,
|
||||
)
|
||||
except NotImplementedError:
|
||||
feedback.error("Dynamic search is not supported by your current selector")
|
||||
@@ -116,6 +180,9 @@ def dynamic_search(ctx: Context, state: State) -> State | InternalDirective:
|
||||
logger.error(f"Could not find selected media for choice: {choice}")
|
||||
return InternalDirective.MAIN
|
||||
|
||||
# Set restore mode flag so we can restore state when user goes back
|
||||
RESTORE_MODE_FILE.touch()
|
||||
|
||||
# Navigate to media actions with the selected item
|
||||
return State(
|
||||
menu_name=MenuName.MEDIA_ACTIONS,
|
||||
|
||||
@@ -73,6 +73,21 @@ def provider_search(ctx: Context, state: State) -> State | InternalDirective:
|
||||
update_user_normalizer_json(
|
||||
chosen_title, media_title, config.general.provider.value
|
||||
)
|
||||
|
||||
# Offer to submit the mapping to GitHub
|
||||
if selector.confirm(
|
||||
"Would you like to contribute this mapping to the project on GitHub?"
|
||||
):
|
||||
from ....service.github import GitHubContribution
|
||||
|
||||
contribution = GitHubContribution(
|
||||
provider_name=config.general.provider.value,
|
||||
provider_title=chosen_title,
|
||||
media_api_title=media_title,
|
||||
anilist_id=media_item.id if hasattr(media_item, "id") else None,
|
||||
)
|
||||
ctx.github.submit_contribution(contribution)
|
||||
|
||||
selected_provider_anime = provider_results_map[chosen_title]
|
||||
|
||||
with feedback.progress(
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import importlib
|
||||
import importlib.util
|
||||
import logging
|
||||
import os
|
||||
import pkgutil
|
||||
from dataclasses import dataclass, field
|
||||
from typing import TYPE_CHECKING, Callable, List, Optional, Union
|
||||
|
||||
@@ -16,6 +17,7 @@ if TYPE_CHECKING:
|
||||
from ...libs.selectors.base import BaseSelector
|
||||
from ..service.auth import AuthService
|
||||
from ..service.feedback import FeedbackService
|
||||
from ..service.github import GitHubContributionService
|
||||
from ..service.player import PlayerService
|
||||
from ..service.registry import MediaRegistryService
|
||||
from ..service.session import SessionsService
|
||||
@@ -91,6 +93,7 @@ class Context:
|
||||
_session: Optional["SessionsService"] = None
|
||||
_auth: Optional["AuthService"] = None
|
||||
_player: Optional["PlayerService"] = None
|
||||
_github: Optional["GitHubContributionService"] = None
|
||||
|
||||
@property
|
||||
def provider(self) -> "BaseAnimeProvider":
|
||||
@@ -190,6 +193,17 @@ class Context:
|
||||
self._auth = AuthService(self.config.general.media_api)
|
||||
return self._auth
|
||||
|
||||
@property
|
||||
def github(self) -> "GitHubContributionService":
|
||||
if not self._github:
|
||||
from ..service.github.service import GitHubContributionService
|
||||
|
||||
self._github = GitHubContributionService(
|
||||
selector=self.selector,
|
||||
feedback=self.feedback,
|
||||
)
|
||||
return self._github
|
||||
|
||||
|
||||
MenuFunction = Callable[[Context, State], Union[State, InternalDirective]]
|
||||
|
||||
@@ -309,30 +323,46 @@ class Session:
|
||||
return decorator
|
||||
|
||||
def load_menus_from_folder(self, package: str):
|
||||
package_path = MENUS_DIR / package
|
||||
package_name = package_path.name
|
||||
logger.debug(f"Loading menus from '{package_path}'...")
|
||||
"""Load menu modules from a subfolder.
|
||||
|
||||
Uses pkgutil to discover modules for regular Python, and falls back
|
||||
to the package's __all__ list for PyInstaller frozen executables.
|
||||
"""
|
||||
full_package_name = f"viu_media.cli.interactive.menu.{package}"
|
||||
logger.debug(f"Loading menus from package '{full_package_name}'...")
|
||||
|
||||
for filename in os.listdir(package_path):
|
||||
if filename.endswith(".py") and not filename.startswith("__"):
|
||||
module_name = filename[:-3]
|
||||
full_module_name = (
|
||||
f"viu_media.cli.interactive.menu.{package_name}.{module_name}"
|
||||
try:
|
||||
# Import the parent package first
|
||||
parent_package = importlib.import_module(full_package_name)
|
||||
except ImportError as e:
|
||||
logger.error(f"Failed to import menu package '{full_package_name}': {e}")
|
||||
return
|
||||
|
||||
# Try pkgutil first (works in regular Python)
|
||||
package_path = getattr(parent_package, "__path__", None)
|
||||
module_names = []
|
||||
|
||||
if package_path:
|
||||
module_names = [
|
||||
name for _, name, ispkg in pkgutil.iter_modules(package_path)
|
||||
if not ispkg and not name.startswith("_")
|
||||
]
|
||||
|
||||
# Fallback to __all__ for PyInstaller frozen executables
|
||||
if not module_names:
|
||||
module_names = getattr(parent_package, "__all__", [])
|
||||
logger.debug(f"Using __all__ fallback with {len(module_names)} modules")
|
||||
|
||||
for module_name in module_names:
|
||||
full_module_name = f"{full_package_name}.{module_name}"
|
||||
try:
|
||||
# Simply importing the module will execute it,
|
||||
# which runs the @session.menu decorators
|
||||
importlib.import_module(full_module_name)
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Failed to load menu module '{full_module_name}': {e}"
|
||||
)
|
||||
file_path = package_path / filename
|
||||
|
||||
try:
|
||||
spec = importlib.util.spec_from_file_location(
|
||||
full_module_name, file_path
|
||||
)
|
||||
if spec and spec.loader:
|
||||
module = importlib.util.module_from_spec(spec)
|
||||
# The act of executing the module runs the @session.menu decorators
|
||||
spec.loader.exec_module(module)
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Failed to load menu module '{full_module_name}': {e}"
|
||||
)
|
||||
|
||||
|
||||
# Create a single, global instance of the Session to be imported by menu modules.
|
||||
|
||||
4
viu_media/cli/service/github/__init__.py
Normal file
4
viu_media/cli/service/github/__init__.py
Normal file
@@ -0,0 +1,4 @@
|
||||
from .model import AuthMethod, GitHubContribution, GitHubPRResponse
|
||||
from .service import GitHubContributionService
|
||||
|
||||
__all__ = ["GitHubContributionService", "GitHubContribution", "AuthMethod", "GitHubPRResponse"]
|
||||
66
viu_media/cli/service/github/model.py
Normal file
66
viu_media/cli/service/github/model.py
Normal file
@@ -0,0 +1,66 @@
|
||||
from enum import Enum
|
||||
from typing import Optional
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
|
||||
class AuthMethod(str, Enum):
|
||||
"""Authentication method for GitHub API."""
|
||||
|
||||
BROWSER = "browser"
|
||||
GH_CLI = "gh"
|
||||
|
||||
|
||||
class GitHubAuth(BaseModel):
|
||||
"""Stored GitHub authentication credentials."""
|
||||
|
||||
access_token: str
|
||||
token_type: str = "bearer"
|
||||
scope: str = ""
|
||||
|
||||
|
||||
class GitHubContribution(BaseModel):
|
||||
"""Represents a normalizer mapping contribution."""
|
||||
|
||||
provider_name: str = Field(..., description="The provider name (e.g., 'allanime')")
|
||||
provider_title: str = Field(
|
||||
..., description="The title as it appears on the provider"
|
||||
)
|
||||
media_api_title: str = Field(..., description="The normalized media API title")
|
||||
anilist_id: Optional[int] = Field(
|
||||
default=None, description="Optional AniList ID for reference"
|
||||
)
|
||||
|
||||
|
||||
class GitHubPRResponse(BaseModel):
|
||||
"""Response from GitHub API when creating a pull request."""
|
||||
|
||||
id: int
|
||||
number: int
|
||||
html_url: str
|
||||
title: str
|
||||
state: str
|
||||
|
||||
|
||||
class GitHubUser(BaseModel):
|
||||
"""GitHub user information."""
|
||||
|
||||
login: str
|
||||
id: int
|
||||
|
||||
|
||||
class GitHubRepo(BaseModel):
|
||||
"""GitHub repository information."""
|
||||
|
||||
full_name: str
|
||||
default_branch: str
|
||||
fork: bool = False
|
||||
|
||||
|
||||
class GitHubFileContent(BaseModel):
|
||||
"""GitHub file content response."""
|
||||
|
||||
sha: str
|
||||
content: str
|
||||
encoding: str = "base64"
|
||||
|
||||
674
viu_media/cli/service/github/service.py
Normal file
674
viu_media/cli/service/github/service.py
Normal file
@@ -0,0 +1,674 @@
|
||||
"""
|
||||
GitHub Contribution Service
|
||||
|
||||
Provides functionality to submit normalizer mappings to the viu repository
|
||||
via Pull Request, using either browser-based OAuth or the GitHub CLI (gh).
|
||||
"""
|
||||
|
||||
import base64
|
||||
import json
|
||||
import logging
|
||||
import shutil
|
||||
import subprocess
|
||||
import time
|
||||
import webbrowser
|
||||
from typing import TYPE_CHECKING, Optional
|
||||
|
||||
import httpx
|
||||
|
||||
from ....core.constants import APP_DATA_DIR, AUTHOR, CLI_NAME
|
||||
from ....core.utils.file import AtomicWriter, FileLock
|
||||
from ....core.utils.normalizer import USER_NORMALIZER_JSON
|
||||
from .model import (
|
||||
AuthMethod,
|
||||
GitHubAuth,
|
||||
GitHubContribution,
|
||||
GitHubFileContent,
|
||||
GitHubPRResponse,
|
||||
GitHubRepo,
|
||||
GitHubUser,
|
||||
)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ....libs.selectors.base import BaseSelector
|
||||
from ...service.feedback import FeedbackService
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# GitHub OAuth configuration
|
||||
GITHUB_CLIENT_ID = "Iv23liXUYWot4d4Zvjxa" # Register your OAuth app on GitHub
|
||||
GITHUB_OAUTH_SCOPES = "public_repo"
|
||||
GITHUB_API_BASE = "https://api.github.com"
|
||||
|
||||
# Repository information
|
||||
REPO_OWNER = AUTHOR
|
||||
REPO_NAME = "viu" # Must match GitHub repo name exactly (case-sensitive)
|
||||
NORMALIZER_FILE_PATH = "viu_media/assets/normalizer.json"
|
||||
|
||||
AUTH_FILE = APP_DATA_DIR / "github_auth.json"
|
||||
|
||||
|
||||
class GitHubContributionService:
|
||||
"""Service for submitting normalizer mappings to GitHub."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
selector: "BaseSelector",
|
||||
feedback: Optional["FeedbackService"] = None,
|
||||
):
|
||||
self.selector = selector
|
||||
self.feedback = feedback
|
||||
self._lock = FileLock(APP_DATA_DIR / "github_auth.lock")
|
||||
self._http_client = httpx.Client(
|
||||
headers={
|
||||
"Accept": "application/json",
|
||||
"User-Agent": f"{CLI_NAME}/1.0",
|
||||
},
|
||||
timeout=30.0,
|
||||
follow_redirects=True, # Follow redirects for all request types
|
||||
)
|
||||
|
||||
def __del__(self):
|
||||
"""Cleanup HTTP client."""
|
||||
if hasattr(self, "_http_client"):
|
||||
self._http_client.close()
|
||||
|
||||
def is_gh_cli_available(self) -> bool:
|
||||
"""Check if GitHub CLI (gh) is installed and available."""
|
||||
return shutil.which("gh") is not None
|
||||
|
||||
def is_gh_cli_authenticated(self) -> bool:
|
||||
"""Check if GitHub CLI is authenticated."""
|
||||
if not self.is_gh_cli_available():
|
||||
return False
|
||||
try:
|
||||
result = subprocess.run(
|
||||
["gh", "auth", "status"],
|
||||
capture_output=True,
|
||||
text=True,
|
||||
timeout=10,
|
||||
)
|
||||
return result.returncode == 0
|
||||
except (subprocess.SubprocessError, OSError):
|
||||
return False
|
||||
|
||||
def get_available_auth_methods(self) -> list[AuthMethod]:
|
||||
"""Get list of available authentication methods."""
|
||||
methods = [AuthMethod.BROWSER]
|
||||
if self.is_gh_cli_available():
|
||||
methods.insert(0, AuthMethod.GH_CLI) # Prefer gh CLI if available
|
||||
return methods
|
||||
|
||||
def prompt_auth_method(self) -> Optional[AuthMethod]:
|
||||
"""
|
||||
Prompt user to select their preferred authentication method.
|
||||
|
||||
Returns:
|
||||
Selected AuthMethod or None if cancelled.
|
||||
"""
|
||||
methods = self.get_available_auth_methods()
|
||||
|
||||
choices = []
|
||||
for method in methods:
|
||||
if method == AuthMethod.GH_CLI:
|
||||
status = "✓ authenticated" if self.is_gh_cli_authenticated() else ""
|
||||
choices.append(f"gh CLI {status}".strip())
|
||||
else:
|
||||
choices.append("Browser (OAuth)")
|
||||
|
||||
choices.append("Cancel")
|
||||
|
||||
choice = self.selector.choose(
|
||||
prompt="Select GitHub authentication method",
|
||||
choices=choices,
|
||||
)
|
||||
|
||||
if not choice or choice == "Cancel":
|
||||
return None
|
||||
|
||||
if choice.startswith("gh CLI"):
|
||||
return AuthMethod.GH_CLI
|
||||
return AuthMethod.BROWSER
|
||||
|
||||
def submit_contribution(
|
||||
self,
|
||||
contribution: GitHubContribution,
|
||||
auth_method: Optional[AuthMethod] = None,
|
||||
) -> Optional[str]:
|
||||
"""
|
||||
Submit a normalizer mapping contribution to GitHub as a Pull Request.
|
||||
|
||||
This will:
|
||||
1. Fork the repository (if not already forked)
|
||||
2. Create a new branch with the updated normalizer.json
|
||||
3. Open a Pull Request to the upstream repository
|
||||
|
||||
Args:
|
||||
contribution: The mapping contribution to submit.
|
||||
auth_method: The authentication method to use. If None, will prompt.
|
||||
|
||||
Returns:
|
||||
URL of the created PR, or None if failed.
|
||||
"""
|
||||
if auth_method is None:
|
||||
auth_method = self.prompt_auth_method()
|
||||
if auth_method is None:
|
||||
return None
|
||||
|
||||
if auth_method == AuthMethod.GH_CLI:
|
||||
return self._submit_pr_via_gh_cli(contribution)
|
||||
else:
|
||||
return self._submit_pr_via_api(contribution)
|
||||
|
||||
def _get_user_normalizer_content(self) -> Optional[dict]:
|
||||
"""Read the user's local normalizer.json file."""
|
||||
if not USER_NORMALIZER_JSON.exists():
|
||||
self._log_error(
|
||||
f"Local normalizer.json not found at {USER_NORMALIZER_JSON}"
|
||||
)
|
||||
return None
|
||||
|
||||
try:
|
||||
with USER_NORMALIZER_JSON.open("r", encoding="utf-8") as f:
|
||||
return json.load(f)
|
||||
except (json.JSONDecodeError, OSError) as e:
|
||||
self._log_error(f"Failed to read normalizer.json: {e}")
|
||||
return None
|
||||
|
||||
def _submit_pr_via_gh_cli(
|
||||
self, contribution: GitHubContribution
|
||||
) -> Optional[str]:
|
||||
"""Submit PR using GitHub CLI."""
|
||||
if not self.is_gh_cli_available():
|
||||
self._log_error("GitHub CLI (gh) is not installed")
|
||||
return None
|
||||
|
||||
if not self.is_gh_cli_authenticated():
|
||||
self._log_info("GitHub CLI not authenticated. Running 'gh auth login'...")
|
||||
try:
|
||||
subprocess.run(["gh", "auth", "login"], check=True)
|
||||
except subprocess.SubprocessError:
|
||||
self._log_error("Failed to authenticate with GitHub CLI")
|
||||
return None
|
||||
|
||||
# Read local normalizer content
|
||||
normalizer_content = self._get_user_normalizer_content()
|
||||
if not normalizer_content:
|
||||
return None
|
||||
|
||||
# Get current username
|
||||
try:
|
||||
result = subprocess.run(
|
||||
["gh", "api", "user", "--jq", ".login"],
|
||||
capture_output=True,
|
||||
text=True,
|
||||
timeout=30,
|
||||
)
|
||||
if result.returncode != 0:
|
||||
self._log_error("Failed to get GitHub username")
|
||||
return None
|
||||
username = result.stdout.strip()
|
||||
except subprocess.SubprocessError as e:
|
||||
self._log_error(f"Failed to get username: {e}")
|
||||
return None
|
||||
|
||||
# Fork the repository if not already forked
|
||||
self._log_info("Ensuring fork exists...")
|
||||
try:
|
||||
subprocess.run(
|
||||
["gh", "repo", "fork", f"{REPO_OWNER}/{REPO_NAME}", "--clone=false"],
|
||||
capture_output=True,
|
||||
text=True,
|
||||
timeout=60,
|
||||
)
|
||||
except subprocess.SubprocessError:
|
||||
pass # Fork may already exist, continue
|
||||
|
||||
# Create branch name
|
||||
branch_name = f"normalizer/{contribution.provider_name}-{int(time.time())}"
|
||||
|
||||
# Create the PR using gh pr create with the file content
|
||||
title = self._format_pr_title(contribution)
|
||||
body = self._format_pr_body(contribution)
|
||||
|
||||
# We need to create the branch and commit via API since gh doesn't support this directly
|
||||
# Fall back to API method for the actual PR creation
|
||||
self._log_info("Creating pull request...")
|
||||
|
||||
# Get token from gh CLI
|
||||
try:
|
||||
result = subprocess.run(
|
||||
["gh", "auth", "token"],
|
||||
capture_output=True,
|
||||
text=True,
|
||||
timeout=10,
|
||||
)
|
||||
if result.returncode != 0:
|
||||
self._log_error("Failed to get auth token from gh CLI")
|
||||
return None
|
||||
token = result.stdout.strip()
|
||||
except subprocess.SubprocessError as e:
|
||||
self._log_error(f"Failed to get token: {e}")
|
||||
return None
|
||||
|
||||
return self._create_pr_via_api(contribution, token, normalizer_content)
|
||||
|
||||
def _submit_pr_via_api(self, contribution: GitHubContribution) -> Optional[str]:
|
||||
"""Submit PR using browser-based OAuth and GitHub API."""
|
||||
# Authenticate
|
||||
auth = self._load_cached_auth()
|
||||
|
||||
if not auth or not self._validate_token(auth.access_token):
|
||||
auth = self._perform_device_flow_auth()
|
||||
if not auth:
|
||||
self._log_error("Failed to authenticate with GitHub")
|
||||
return None
|
||||
self._save_auth(auth)
|
||||
|
||||
# Read local normalizer content
|
||||
normalizer_content = self._get_user_normalizer_content()
|
||||
if not normalizer_content:
|
||||
return None
|
||||
|
||||
return self._create_pr_via_api(contribution, auth.access_token, normalizer_content)
|
||||
|
||||
def _create_pr_via_api(
|
||||
self,
|
||||
contribution: GitHubContribution,
|
||||
token: str,
|
||||
normalizer_content: dict,
|
||||
) -> Optional[str]:
|
||||
"""Create a Pull Request via GitHub API."""
|
||||
headers = {"Authorization": f"Bearer {token}"}
|
||||
|
||||
# Step 1: Get current user
|
||||
self._log_info("Getting user info...")
|
||||
try:
|
||||
response = self._http_client.get(
|
||||
f"{GITHUB_API_BASE}/user", headers=headers
|
||||
)
|
||||
response.raise_for_status()
|
||||
user = GitHubUser.model_validate(response.json())
|
||||
except httpx.HTTPError as e:
|
||||
self._log_error(f"Failed to get user info: {e}")
|
||||
return None
|
||||
|
||||
# Step 2: Fork the repository (if not already forked)
|
||||
self._log_info("Ensuring fork exists...")
|
||||
fork_exists = False
|
||||
fork_full_name = ""
|
||||
|
||||
try:
|
||||
# Check if fork exists by listing user's forks of the repo
|
||||
response = self._http_client.get(
|
||||
f"{GITHUB_API_BASE}/repos/{REPO_OWNER}/{REPO_NAME}/forks",
|
||||
headers=headers,
|
||||
)
|
||||
response.raise_for_status()
|
||||
forks = response.json()
|
||||
|
||||
# Find user's fork
|
||||
user_fork = next(
|
||||
(f for f in forks if f["owner"]["login"].lower() == user.login.lower()),
|
||||
None
|
||||
)
|
||||
|
||||
if user_fork:
|
||||
fork_full_name = user_fork["full_name"]
|
||||
fork_exists = True
|
||||
else:
|
||||
# Create fork
|
||||
self._log_info("Creating fork...")
|
||||
response = self._http_client.post(
|
||||
f"{GITHUB_API_BASE}/repos/{REPO_OWNER}/{REPO_NAME}/forks",
|
||||
headers=headers,
|
||||
)
|
||||
response.raise_for_status()
|
||||
fork_data = response.json()
|
||||
fork_full_name = fork_data["full_name"]
|
||||
# Wait for fork to be ready
|
||||
time.sleep(5)
|
||||
except httpx.HTTPError as e:
|
||||
self._log_error(f"Failed to create/check fork: {e}")
|
||||
return None
|
||||
|
||||
self._log_info(f"Using fork: {fork_full_name}")
|
||||
|
||||
# Step 3: Get the default branch SHA from upstream
|
||||
self._log_info("Getting upstream branch info...")
|
||||
try:
|
||||
response = self._http_client.get(
|
||||
f"{GITHUB_API_BASE}/repos/{REPO_OWNER}/{REPO_NAME}/git/ref/heads/master",
|
||||
headers=headers,
|
||||
)
|
||||
response.raise_for_status()
|
||||
base_sha = response.json()["object"]["sha"]
|
||||
except httpx.HTTPError as e:
|
||||
self._log_error(f"Failed to get base branch: {e}")
|
||||
return None
|
||||
|
||||
# Step 3.5: Sync fork with upstream if it already existed
|
||||
if fork_exists:
|
||||
self._log_info("Syncing fork with upstream...")
|
||||
try:
|
||||
response = self._http_client.post(
|
||||
f"{GITHUB_API_BASE}/repos/{fork_full_name}/merge-upstream",
|
||||
headers=headers,
|
||||
json={"branch": "master"},
|
||||
)
|
||||
# 409 means already up to date, which is fine
|
||||
if response.status_code not in (200, 409):
|
||||
response.raise_for_status()
|
||||
except httpx.HTTPError as e:
|
||||
self._log_info(f"Could not sync fork (continuing anyway): {e}")
|
||||
|
||||
# Step 4: Create a new branch in the fork
|
||||
branch_name = f"normalizer/{contribution.provider_name}-{int(time.time())}"
|
||||
self._log_info(f"Creating branch: {branch_name}")
|
||||
|
||||
try:
|
||||
response = self._http_client.post(
|
||||
f"{GITHUB_API_BASE}/repos/{fork_full_name}/git/refs",
|
||||
headers=headers,
|
||||
json={"ref": f"refs/heads/{branch_name}", "sha": base_sha},
|
||||
)
|
||||
response.raise_for_status()
|
||||
except httpx.HTTPStatusError as e:
|
||||
error_detail = ""
|
||||
try:
|
||||
error_detail = str(e.response.json())
|
||||
except Exception:
|
||||
pass
|
||||
self._log_error(f"Failed to create branch: {e} {error_detail}")
|
||||
return None
|
||||
except httpx.HTTPError as e:
|
||||
self._log_error(f"Failed to create branch: {e}")
|
||||
return None
|
||||
|
||||
# Step 5: Get current normalizer.json from the fork's new branch to get SHA
|
||||
self._log_info("Fetching current normalizer.json...")
|
||||
try:
|
||||
response = self._http_client.get(
|
||||
f"{GITHUB_API_BASE}/repos/{fork_full_name}/contents/{NORMALIZER_FILE_PATH}",
|
||||
headers=headers,
|
||||
params={"ref": branch_name},
|
||||
)
|
||||
response.raise_for_status()
|
||||
file_info = GitHubFileContent.model_validate(response.json())
|
||||
file_sha = file_info.sha
|
||||
|
||||
# Decode existing content and merge with user's mappings
|
||||
existing_content = json.loads(
|
||||
base64.b64decode(file_info.content).decode("utf-8")
|
||||
)
|
||||
|
||||
# Merge: user's normalizer takes precedence
|
||||
merged_content = existing_content.copy()
|
||||
for provider, mappings in normalizer_content.items():
|
||||
if provider not in merged_content:
|
||||
merged_content[provider] = {}
|
||||
merged_content[provider].update(mappings)
|
||||
|
||||
except httpx.HTTPError as e:
|
||||
self._log_error(f"Failed to get normalizer.json: {e}")
|
||||
return None
|
||||
|
||||
# Step 6: Update the file in the fork
|
||||
self._log_info("Committing changes...")
|
||||
new_content = json.dumps(merged_content, indent=2, ensure_ascii=False)
|
||||
encoded_content = base64.b64encode(new_content.encode("utf-8")).decode("utf-8")
|
||||
|
||||
commit_message = (
|
||||
f"feat(normalizer): add mapping for '{contribution.provider_title}'\n\n"
|
||||
f"Provider: {contribution.provider_name}\n"
|
||||
f"Maps: {contribution.provider_title} -> {contribution.media_api_title}"
|
||||
)
|
||||
|
||||
try:
|
||||
response = self._http_client.put(
|
||||
f"{GITHUB_API_BASE}/repos/{fork_full_name}/contents/{NORMALIZER_FILE_PATH}",
|
||||
headers=headers,
|
||||
json={
|
||||
"message": commit_message,
|
||||
"content": encoded_content,
|
||||
"sha": file_sha,
|
||||
"branch": branch_name,
|
||||
},
|
||||
)
|
||||
response.raise_for_status()
|
||||
except httpx.HTTPStatusError as e:
|
||||
error_detail = ""
|
||||
try:
|
||||
error_detail = str(e.response.json())
|
||||
except Exception:
|
||||
pass
|
||||
self._log_error(f"Failed to commit changes: {e} {error_detail}")
|
||||
return None
|
||||
except httpx.HTTPError as e:
|
||||
self._log_error(f"Failed to commit changes: {e}")
|
||||
return None
|
||||
|
||||
# Step 7: Create the Pull Request
|
||||
self._log_info("Creating pull request...")
|
||||
title = self._format_pr_title(contribution)
|
||||
body = self._format_pr_body(contribution)
|
||||
|
||||
try:
|
||||
response = self._http_client.post(
|
||||
f"{GITHUB_API_BASE}/repos/{REPO_OWNER}/{REPO_NAME}/pulls",
|
||||
headers=headers,
|
||||
json={
|
||||
"title": title,
|
||||
"body": body,
|
||||
"head": f"{user.login}:{branch_name}",
|
||||
"base": "master",
|
||||
},
|
||||
)
|
||||
response.raise_for_status()
|
||||
pr = GitHubPRResponse.model_validate(response.json())
|
||||
self._log_success(f"Created PR #{pr.number}: {pr.html_url}")
|
||||
return pr.html_url
|
||||
|
||||
except httpx.HTTPStatusError as e:
|
||||
error_detail = ""
|
||||
try:
|
||||
error_json = e.response.json()
|
||||
error_detail = error_json.get("message", "")
|
||||
# GitHub includes detailed errors in 'errors' array
|
||||
if "errors" in error_json:
|
||||
errors = error_json["errors"]
|
||||
error_detail += " | " + str(errors)
|
||||
except Exception:
|
||||
pass
|
||||
self._log_error(f"Failed to create PR: {e} {error_detail}")
|
||||
return None
|
||||
except httpx.HTTPError as e:
|
||||
self._log_error(f"Failed to create PR: {e}")
|
||||
return None
|
||||
|
||||
def _format_pr_title(self, contribution: GitHubContribution) -> str:
|
||||
"""Format the PR title."""
|
||||
return (
|
||||
f"feat(normalizer): add mapping for '{contribution.provider_title}' "
|
||||
f"({contribution.provider_name})"
|
||||
)
|
||||
|
||||
def _format_pr_body(self, contribution: GitHubContribution) -> str:
|
||||
"""Format the PR body."""
|
||||
return f"""## Normalizer Mapping Contribution
|
||||
|
||||
This PR adds a new title mapping to the normalizer.
|
||||
|
||||
### Mapping Details
|
||||
|
||||
| Field | Value |
|
||||
|-------|-------|
|
||||
| **Provider** | `{contribution.provider_name}` |
|
||||
| **Provider Title** | `{contribution.provider_title}` |
|
||||
| **Media API Title** | `{contribution.media_api_title}` |
|
||||
| **AniList ID** | {contribution.anilist_id or 'N/A'} |
|
||||
|
||||
### Changes
|
||||
|
||||
This PR updates `{NORMALIZER_FILE_PATH}` with the following mapping:
|
||||
|
||||
```json
|
||||
"{contribution.provider_title}": "{contribution.media_api_title.lower()}"
|
||||
```
|
||||
|
||||
---
|
||||
*Submitted automatically via {CLI_NAME} CLI*
|
||||
"""
|
||||
|
||||
def _perform_device_flow_auth(self) -> Optional[GitHubAuth]:
|
||||
"""
|
||||
Perform GitHub Device Flow authentication.
|
||||
|
||||
This is more reliable for CLI apps than the web redirect flow.
|
||||
"""
|
||||
self._log_info("Starting GitHub authentication...")
|
||||
|
||||
# Request device code
|
||||
try:
|
||||
response = self._http_client.post(
|
||||
"https://github.com/login/device/code",
|
||||
data={
|
||||
"client_id": GITHUB_CLIENT_ID,
|
||||
"scope": GITHUB_OAUTH_SCOPES,
|
||||
},
|
||||
headers={"Accept": "application/json"},
|
||||
)
|
||||
response.raise_for_status()
|
||||
data = response.json()
|
||||
except httpx.HTTPError as e:
|
||||
self._log_error(f"Failed to start authentication: {e}")
|
||||
return None
|
||||
|
||||
device_code = data.get("device_code")
|
||||
user_code = data.get("user_code")
|
||||
verification_uri = data.get("verification_uri")
|
||||
expires_in = data.get("expires_in", 900)
|
||||
interval = data.get("interval", 5)
|
||||
|
||||
if not all([device_code, user_code, verification_uri]):
|
||||
self._log_error("Invalid response from GitHub")
|
||||
return None
|
||||
|
||||
# Show user the code and open browser
|
||||
self._log_info(f"\n🔑 Your code: {user_code}")
|
||||
self._log_info(f"Opening {verification_uri} in your browser...")
|
||||
self._log_info("Enter the code above to authenticate.\n")
|
||||
|
||||
webbrowser.open(verification_uri)
|
||||
|
||||
# Poll for token
|
||||
import time
|
||||
|
||||
start_time = time.time()
|
||||
while time.time() - start_time < expires_in:
|
||||
time.sleep(interval)
|
||||
|
||||
try:
|
||||
token_response = self._http_client.post(
|
||||
"https://github.com/login/oauth/access_token",
|
||||
data={
|
||||
"client_id": GITHUB_CLIENT_ID,
|
||||
"device_code": device_code,
|
||||
"grant_type": "urn:ietf:params:oauth:grant-type:device_code",
|
||||
},
|
||||
headers={"Accept": "application/json"},
|
||||
)
|
||||
token_data = token_response.json()
|
||||
|
||||
if "access_token" in token_data:
|
||||
self._log_success("Authentication successful!")
|
||||
return GitHubAuth(
|
||||
access_token=token_data["access_token"],
|
||||
token_type=token_data.get("token_type", "bearer"),
|
||||
scope=token_data.get("scope", ""),
|
||||
)
|
||||
|
||||
error = token_data.get("error")
|
||||
if error == "authorization_pending":
|
||||
continue
|
||||
elif error == "slow_down":
|
||||
interval += 5
|
||||
elif error == "expired_token":
|
||||
self._log_error("Authentication expired. Please try again.")
|
||||
return None
|
||||
elif error == "access_denied":
|
||||
self._log_error("Authentication denied by user.")
|
||||
return None
|
||||
else:
|
||||
self._log_error(f"Authentication error: {error}")
|
||||
return None
|
||||
|
||||
except httpx.HTTPError:
|
||||
continue
|
||||
|
||||
self._log_error("Authentication timed out. Please try again.")
|
||||
return None
|
||||
|
||||
def _validate_token(self, token: str) -> bool:
|
||||
"""Check if a GitHub token is still valid."""
|
||||
try:
|
||||
response = self._http_client.get(
|
||||
f"{GITHUB_API_BASE}/user",
|
||||
headers={"Authorization": f"Bearer {token}"},
|
||||
)
|
||||
return response.status_code == 200
|
||||
except httpx.HTTPError:
|
||||
return False
|
||||
|
||||
def _load_cached_auth(self) -> Optional[GitHubAuth]:
|
||||
"""Load cached GitHub authentication."""
|
||||
if not AUTH_FILE.exists():
|
||||
return None
|
||||
|
||||
try:
|
||||
with AUTH_FILE.open("r", encoding="utf-8") as f:
|
||||
data = json.load(f)
|
||||
return GitHubAuth.model_validate(data)
|
||||
except (json.JSONDecodeError, ValueError):
|
||||
return None
|
||||
|
||||
def _save_auth(self, auth: GitHubAuth) -> None:
|
||||
"""Save GitHub authentication to cache."""
|
||||
APP_DATA_DIR.mkdir(parents=True, exist_ok=True)
|
||||
with self._lock:
|
||||
with AtomicWriter(AUTH_FILE) as f:
|
||||
json.dump(auth.model_dump(), f, indent=2)
|
||||
|
||||
def clear_cached_auth(self) -> None:
|
||||
"""Clear cached GitHub authentication."""
|
||||
if AUTH_FILE.exists():
|
||||
AUTH_FILE.unlink()
|
||||
logger.info("Cleared GitHub authentication cache")
|
||||
|
||||
@staticmethod
|
||||
def clear_cached_auth_static() -> None:
|
||||
"""Clear cached GitHub authentication (static method for CLI use)."""
|
||||
if AUTH_FILE.exists():
|
||||
AUTH_FILE.unlink()
|
||||
logger.info("Cleared GitHub authentication cache")
|
||||
|
||||
def _log_info(self, message: str) -> None:
|
||||
"""Log info message."""
|
||||
if self.feedback:
|
||||
self.feedback.info(message)
|
||||
else:
|
||||
logger.info(message)
|
||||
|
||||
def _log_success(self, message: str) -> None:
|
||||
"""Log success message."""
|
||||
if self.feedback:
|
||||
self.feedback.success(message)
|
||||
else:
|
||||
logger.info(message)
|
||||
|
||||
def _log_error(self, message: str) -> None:
|
||||
"""Log error message."""
|
||||
if self.feedback:
|
||||
self.feedback.error(message)
|
||||
else:
|
||||
logger.error(message)
|
||||
@@ -57,6 +57,9 @@ class MPVIPCClient:
|
||||
|
||||
def connect(self, timeout: float = 5.0) -> None:
|
||||
"""Connect to MPV IPC socket and start the reader thread."""
|
||||
if not hasattr(socket, "AF_UNIX"):
|
||||
raise MPVIPCError("Unix domain sockets are unavailable on this platform")
|
||||
|
||||
start_time = time.time()
|
||||
while time.time() - start_time < timeout:
|
||||
try:
|
||||
@@ -299,6 +302,10 @@ class MpvIPCPlayer(BaseIPCPlayer):
|
||||
def _play_with_ipc(self, player: BasePlayer, params: PlayerParams) -> PlayerResult:
|
||||
"""Play media using MPV IPC."""
|
||||
try:
|
||||
if not hasattr(socket, "AF_UNIX"):
|
||||
raise MPVIPCError(
|
||||
"MPV IPC requires Unix domain sockets, which are unavailable on this platform."
|
||||
)
|
||||
self._start_mpv_process(player, params)
|
||||
self._connect_ipc()
|
||||
self._setup_event_handling()
|
||||
|
||||
@@ -2,7 +2,6 @@ import logging
|
||||
from pathlib import Path
|
||||
import re
|
||||
from hashlib import sha256
|
||||
import sys
|
||||
from typing import Dict, List, Optional
|
||||
|
||||
import httpx
|
||||
@@ -11,6 +10,7 @@ from viu_media.core.utils import formatter
|
||||
|
||||
from ...core.config import AppConfig
|
||||
from ...core.constants import APP_CACHE_DIR, SCRIPTS_DIR
|
||||
from ...core.utils.detect import get_python_executable
|
||||
from ...core.utils.file import AtomicWriter
|
||||
from ...libs.media_api.types import (
|
||||
AiringScheduleResult,
|
||||
@@ -327,7 +327,7 @@ def get_anime_preview(
|
||||
preview_file.write_text(preview_script, encoding="utf-8")
|
||||
|
||||
preview_script_final = (
|
||||
f"{Path(sys.executable).as_posix()} {preview_file.as_posix()} {{}}"
|
||||
f"{Path(get_python_executable()).as_posix()} {preview_file.as_posix()} {{}}"
|
||||
)
|
||||
return preview_script_final
|
||||
|
||||
@@ -387,7 +387,7 @@ def get_episode_preview(
|
||||
preview_file.write_text(preview_script, encoding="utf-8")
|
||||
|
||||
preview_script_final = (
|
||||
f"{Path(sys.executable).as_posix()} {preview_file.as_posix()} {{}}"
|
||||
f"{Path(get_python_executable()).as_posix()} {preview_file.as_posix()} {{}}"
|
||||
)
|
||||
return preview_script_final
|
||||
|
||||
@@ -435,7 +435,7 @@ def get_character_preview(choice_map: Dict[str, Character], config: AppConfig) -
|
||||
preview_file.write_text(preview_script, encoding="utf-8")
|
||||
|
||||
preview_script_final = (
|
||||
f"{Path(sys.executable).as_posix()} {preview_file.as_posix()} {{}}"
|
||||
f"{Path(get_python_executable()).as_posix()} {preview_file.as_posix()} {{}}"
|
||||
)
|
||||
return preview_script_final
|
||||
|
||||
@@ -483,7 +483,7 @@ def get_review_preview(choice_map: Dict[str, MediaReview], config: AppConfig) ->
|
||||
preview_file.write_text(preview_script, encoding="utf-8")
|
||||
|
||||
preview_script_final = (
|
||||
f"{Path(sys.executable).as_posix()} {preview_file.as_posix()} {{}}"
|
||||
f"{Path(get_python_executable()).as_posix()} {preview_file.as_posix()} {{}}"
|
||||
)
|
||||
return preview_script_final
|
||||
|
||||
@@ -599,7 +599,7 @@ def get_dynamic_anime_preview(config: AppConfig) -> str:
|
||||
|
||||
# Return the command to execute the preview script
|
||||
preview_script_final = (
|
||||
f"{Path(sys.executable).as_posix()} {preview_file.as_posix()} {{}}"
|
||||
f"{Path(get_python_executable()).as_posix()} {preview_file.as_posix()} {{}}"
|
||||
)
|
||||
return preview_script_final
|
||||
|
||||
|
||||
@@ -189,7 +189,12 @@ class PreviewCacheWorker(ManagedBackgroundWorker):
|
||||
),
|
||||
"STUDIOS": formatter.shell_safe(
|
||||
formatter.format_list_with_commas(
|
||||
[t.name for t in media_item.studios if t.name]
|
||||
[t.name for t in media_item.studios if t.name and t.is_animation_studio]
|
||||
)
|
||||
),
|
||||
"PRODUCERS": formatter.shell_safe(
|
||||
formatter.format_list_with_commas(
|
||||
[t.name for t in media_item.studios if t.name and not t.is_animation_studio]
|
||||
)
|
||||
),
|
||||
"SYNONYMNS": formatter.shell_safe(
|
||||
|
||||
@@ -132,6 +132,7 @@ APP_SERVICE = "Configuration for the background download service."
|
||||
APP_FZF = "Settings for the FZF selector interface."
|
||||
APP_ROFI = "Settings for the Rofi selector interface."
|
||||
APP_MPV = "Configuration for the MPV media player."
|
||||
APP_VLC = "Configuration for the VLC media player."
|
||||
APP_MEDIA_REGISTRY = "Configuration for the media registry."
|
||||
APP_SESSIONS = "Configuration for sessions."
|
||||
|
||||
|
||||
@@ -534,6 +534,7 @@ class AppConfig(BaseModel):
|
||||
description=desc.APP_ROFI,
|
||||
)
|
||||
mpv: MpvConfig = Field(default_factory=MpvConfig, description=desc.APP_MPV)
|
||||
vlc: VlcConfig = Field(default_factory=VlcConfig, description=desc.APP_VLC)
|
||||
media_registry: MediaRegistryConfig = Field(
|
||||
default_factory=MediaRegistryConfig, description=desc.APP_MEDIA_REGISTRY
|
||||
)
|
||||
|
||||
@@ -9,6 +9,7 @@ class DownloadParams:
|
||||
episode_title: str
|
||||
silent: bool
|
||||
progress_hooks: list[Callable] = field(default_factory=list)
|
||||
logger: object | None = None
|
||||
vid_format: str = "best"
|
||||
force_unknown_ext: bool = False
|
||||
verbose: bool = False
|
||||
|
||||
@@ -30,6 +30,9 @@ class YtDLPDownloader(BaseDownloader):
|
||||
sub_paths = []
|
||||
merged_path = None
|
||||
|
||||
logger.debug(f"Starting download for URL: {params.url}")
|
||||
logger.debug(f"Using Headers: {params.headers}")
|
||||
|
||||
if TORRENT_REGEX.match(params.url):
|
||||
from .torrents import download_torrent_with_webtorrent_cli
|
||||
|
||||
@@ -91,6 +94,7 @@ class YtDLPDownloader(BaseDownloader):
|
||||
else tuple(),
|
||||
"progress_hooks": params.progress_hooks,
|
||||
"nocheckcertificate": params.no_check_certificate,
|
||||
"logger": params.logger,
|
||||
}
|
||||
opts = opts
|
||||
if params.force_ffmpeg or params.hls_use_mpegts or params.hls_use_h264:
|
||||
|
||||
@@ -56,3 +56,30 @@ def is_running_kitty_terminal() -> bool:
|
||||
|
||||
def has_fzf() -> bool:
|
||||
return True if shutil.which("fzf") else False
|
||||
|
||||
|
||||
def is_frozen() -> bool:
|
||||
"""Check if running as a PyInstaller frozen executable."""
|
||||
return getattr(sys, "frozen", False)
|
||||
|
||||
|
||||
def get_python_executable() -> str:
|
||||
"""
|
||||
Get the Python executable path.
|
||||
|
||||
In frozen (PyInstaller) apps, sys.executable points to the .exe,
|
||||
so we need to find the system Python instead.
|
||||
|
||||
Returns:
|
||||
Path to a Python executable.
|
||||
"""
|
||||
if is_frozen():
|
||||
# We're in a frozen app - find system Python
|
||||
for python_name in ["python3", "python", "py"]:
|
||||
python_path = shutil.which(python_name)
|
||||
if python_path:
|
||||
return python_path
|
||||
# Fallback - this likely won't work but is the best we can do
|
||||
return "python"
|
||||
else:
|
||||
return sys.executable
|
||||
|
||||
@@ -184,13 +184,22 @@ def format_score(score: Optional[float]) -> str:
|
||||
|
||||
def shell_safe(text: Optional[str]) -> str:
|
||||
"""
|
||||
Escapes a string for safe inclusion in a shell script,
|
||||
specifically for use within double quotes. It escapes backticks,
|
||||
double quotes, and dollar signs.
|
||||
Escapes a string for safe inclusion in a Python script string literal.
|
||||
This is used when generating Python cache scripts with embedded text content.
|
||||
|
||||
For Python triple-quoted strings, we need to:
|
||||
- Escape backslashes first (so existing backslashes don't interfere)
|
||||
- Escape triple quotes (to not break the string literal)
|
||||
- Remove or replace problematic characters
|
||||
"""
|
||||
if not text:
|
||||
return ""
|
||||
return text.replace("`", "\\`").replace('"', '\\"').replace("$", "\\$")
|
||||
# Escape backslashes first
|
||||
result = text.replace("\\", "\\\\")
|
||||
# Escape triple quotes (both types) for Python triple-quoted string literals
|
||||
result = result.replace('"""', r'\"\"\"')
|
||||
result = result.replace("'''", r"\'\'\'")
|
||||
return result
|
||||
|
||||
|
||||
def extract_episode_number(title: str) -> Optional[float]:
|
||||
|
||||
@@ -50,15 +50,10 @@ def _load_normalizer_data() -> Dict[str, Dict[str, str]]:
|
||||
def update_user_normalizer_json(
|
||||
provider_title: str, media_api_title: str, provider_name: str
|
||||
):
|
||||
import time
|
||||
|
||||
from .file import AtomicWriter
|
||||
|
||||
print(
|
||||
"UPDATING USER NORMALIZER JSON. PLEASE CONTRIBUTE TO THE PROJECT BY OPENING A PR ON GITHUB TO MERGE YOUR NORMALIZER JSON TO MAIN. MAEMOTTE KANSHA SHIMASU :)"
|
||||
)
|
||||
print(f"NORMALIZER JSON PATH IS: {USER_NORMALIZER_JSON}")
|
||||
time.sleep(5)
|
||||
logger.info(f"Updating user normalizer JSON at: {USER_NORMALIZER_JSON}")
|
||||
|
||||
if not _normalizer_cache:
|
||||
raise RuntimeError(
|
||||
"Fatal _normalizer_cache missing this should not be the case : (. Please report"
|
||||
|
||||
@@ -323,7 +323,14 @@ def to_generic_user_list_result(data: AnilistMediaLists) -> Optional[MediaSearch
|
||||
def to_generic_user_profile(data: AnilistViewerData) -> Optional[UserProfile]:
|
||||
"""Maps a raw AniList viewer response to a generic UserProfile."""
|
||||
|
||||
viewer_data: Optional[AnilistCurrentlyLoggedInUser] = data["data"]["Viewer"]
|
||||
data_node = data.get("data")
|
||||
if not data_node:
|
||||
return None
|
||||
|
||||
viewer_data: Optional[AnilistCurrentlyLoggedInUser] = data_node.get("Viewer")
|
||||
|
||||
if not viewer_data:
|
||||
return None
|
||||
|
||||
return UserProfile(
|
||||
id=viewer_data["id"],
|
||||
|
||||
@@ -52,7 +52,7 @@ class MpvPlayer(BasePlayer):
|
||||
if TORRENT_REGEX.match(params.url) and detect.is_running_in_termux():
|
||||
raise ViuError("Unable to play torrents on termux")
|
||||
elif params.syncplay and detect.is_running_in_termux():
|
||||
raise ViuError("Unable to play torrents on termux")
|
||||
raise ViuError("Unable to play with syncplay on termux")
|
||||
elif detect.is_running_in_termux():
|
||||
return self._play_on_mobile(params)
|
||||
else:
|
||||
|
||||
@@ -41,6 +41,10 @@ class PlayerFactory:
|
||||
from .mpv.player import MpvPlayer
|
||||
|
||||
return MpvPlayer(config.mpv)
|
||||
elif player_name == "vlc":
|
||||
from .vlc.player import VlcPlayer
|
||||
|
||||
return VlcPlayer(config.vlc)
|
||||
raise NotImplementedError(
|
||||
f"Configuration logic for player '{player_name}' not implemented in factory."
|
||||
)
|
||||
|
||||
@@ -46,10 +46,11 @@ class VlcPlayer(BasePlayer):
|
||||
Returns:
|
||||
PlayerResult: Information about the playback session.
|
||||
"""
|
||||
if not self.executable:
|
||||
raise ViuError("VLC executable not found in PATH.")
|
||||
|
||||
if TORRENT_REGEX.match(params.url) and detect.is_running_in_termux():
|
||||
raise ViuError("Unable to play torrents on termux")
|
||||
elif params.syncplay and detect.is_running_in_termux():
|
||||
raise ViuError("Unable to play with syncplay on termux")
|
||||
elif detect.is_running_in_termux():
|
||||
return self._play_on_mobile(params)
|
||||
else:
|
||||
return self._play_on_desktop(params)
|
||||
@@ -116,6 +117,9 @@ class VlcPlayer(BasePlayer):
|
||||
Returns:
|
||||
PlayerResult: Information about the playback session.
|
||||
"""
|
||||
if not self.executable:
|
||||
raise ViuError("VLC executable not found in PATH.")
|
||||
|
||||
if TORRENT_REGEX.search(params.url):
|
||||
return self._stream_on_desktop_with_webtorrent_cli(params)
|
||||
|
||||
|
||||
@@ -3,6 +3,8 @@ import re
|
||||
ANIMEPAHE = "animepahe.si"
|
||||
ANIMEPAHE_BASE = f"https://{ANIMEPAHE}"
|
||||
ANIMEPAHE_ENDPOINT = f"{ANIMEPAHE_BASE}/api"
|
||||
CDN_PROVIDER = "kwik.cx"
|
||||
CDN_PROVIDER_BASE = f"https://{CDN_PROVIDER}"
|
||||
|
||||
SERVERS_AVAILABLE = ["kwik"]
|
||||
REQUEST_HEADERS = {
|
||||
@@ -25,7 +27,7 @@ SERVER_HEADERS = {
|
||||
"Accept-Encoding": "Utf-8",
|
||||
"DNT": "1",
|
||||
"Connection": "keep-alive",
|
||||
"Referer": "https://animepahe.si/",
|
||||
"Referer": ANIMEPAHE_BASE + "/",
|
||||
"Upgrade-Insecure-Requests": "1",
|
||||
"Sec-Fetch-Dest": "iframe",
|
||||
"Sec-Fetch-Mode": "navigate",
|
||||
@@ -33,5 +35,22 @@ SERVER_HEADERS = {
|
||||
"Priority": "u=4",
|
||||
"TE": "trailers",
|
||||
}
|
||||
|
||||
STREAM_HEADERS = {
|
||||
# "Host": "vault-16.owocdn.top", # This will have to be the actual host of the stream (behind Kwik)
|
||||
"Accept": "*/*",
|
||||
"Accept-Language": "en-US,en;q=0.5",
|
||||
"Accept-Encoding": "gzip, deflate, br, zstd",
|
||||
"Origin": CDN_PROVIDER_BASE,
|
||||
"Sec-GPC": "1",
|
||||
"Connection": "keep-alive",
|
||||
"Referer": CDN_PROVIDER_BASE + "/",
|
||||
"Sec-Fetch-Dest": "empty",
|
||||
"Sec-Fetch-Mode": "cors",
|
||||
"Sec-Fetch-Site": "cross-site",
|
||||
"TE": "trailers",
|
||||
}
|
||||
|
||||
|
||||
JUICY_STREAM_REGEX = re.compile(r"source='(.*)';")
|
||||
KWIK_RE = re.compile(r"Player\|(.+?)'")
|
||||
|
||||
@@ -88,6 +88,7 @@ def map_to_server(
|
||||
episode: AnimeEpisodeInfo,
|
||||
translation_type: str,
|
||||
stream_links: list[tuple[str, str]],
|
||||
headers: dict[str, str],
|
||||
) -> Server:
|
||||
links = [
|
||||
EpisodeStream(
|
||||
@@ -97,4 +98,6 @@ def map_to_server(
|
||||
)
|
||||
for link in stream_links
|
||||
]
|
||||
return Server(name="kwik", links=links, episode_title=episode.title)
|
||||
return Server(
|
||||
name="kwik", links=links, episode_title=episode.title, headers=headers
|
||||
)
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import logging
|
||||
from functools import lru_cache
|
||||
from typing import Iterator, Optional
|
||||
from urllib.parse import urlparse
|
||||
|
||||
from ..base import BaseAnimeProvider
|
||||
from ..params import AnimeParams, EpisodeStreamsParams, SearchParams
|
||||
@@ -9,9 +10,11 @@ from ..utils.debug import debug_provider
|
||||
from .constants import (
|
||||
ANIMEPAHE_BASE,
|
||||
ANIMEPAHE_ENDPOINT,
|
||||
CDN_PROVIDER,
|
||||
JUICY_STREAM_REGEX,
|
||||
REQUEST_HEADERS,
|
||||
SERVER_HEADERS,
|
||||
STREAM_HEADERS,
|
||||
)
|
||||
from .extractor import process_animepahe_embed_page
|
||||
from .mappers import map_to_anime_result, map_to_search_results, map_to_server
|
||||
@@ -132,6 +135,7 @@ class AnimePahe(BaseAnimeProvider):
|
||||
quality = None
|
||||
translation_type = None
|
||||
stream_links = []
|
||||
stream_host = None
|
||||
|
||||
# TODO: better document the scraping process
|
||||
for res_dict in res_dicts:
|
||||
@@ -170,13 +174,21 @@ class AnimePahe(BaseAnimeProvider):
|
||||
continue
|
||||
logger.debug(f"Found juicy stream: {juicy_stream.group(1)}")
|
||||
juicy_stream = juicy_stream.group(1)
|
||||
stream_host = urlparse(juicy_stream).hostname
|
||||
quality = res_dict["resolution"]
|
||||
logger.debug(f"Found quality: {quality}")
|
||||
translation_type = data_audio
|
||||
stream_links.append((quality, juicy_stream))
|
||||
|
||||
if translation_type and stream_links:
|
||||
yield map_to_server(episode, translation_type, stream_links)
|
||||
headers = {
|
||||
"User-Agent": self.client.headers["User-Agent"],
|
||||
"Host": stream_host or CDN_PROVIDER,
|
||||
**STREAM_HEADERS,
|
||||
}
|
||||
yield map_to_server(
|
||||
episode, translation_type, stream_links, headers=headers
|
||||
)
|
||||
|
||||
@lru_cache()
|
||||
def _get_episode_info(
|
||||
|
||||
@@ -88,6 +88,8 @@ class BaseSelector(ABC):
|
||||
*,
|
||||
preview: Optional[str] = None,
|
||||
header: Optional[str] = None,
|
||||
initial_query: Optional[str] = None,
|
||||
initial_results: Optional[List[str]] = None,
|
||||
) -> str | None:
|
||||
"""
|
||||
Provides dynamic search functionality that reloads results based on user input.
|
||||
@@ -97,6 +99,8 @@ class BaseSelector(ABC):
|
||||
search_command: The command to execute for searching/reloading results.
|
||||
preview: An optional command or string for a preview window.
|
||||
header: An optional header to display above the choices.
|
||||
initial_query: An optional initial query to pre-populate the search.
|
||||
initial_results: Optional list of results to display initially (avoids network request).
|
||||
|
||||
Returns:
|
||||
The string of the chosen item.
|
||||
|
||||
@@ -117,26 +117,42 @@ class FzfSelector(BaseSelector):
|
||||
lines = result.stdout.strip().splitlines()
|
||||
return lines[-1] if lines else (default or "")
|
||||
|
||||
def search(self, prompt, search_command, *, preview=None, header=None):
|
||||
def search(self, prompt, search_command, *, preview=None, header=None, initial_query=None, initial_results=None):
|
||||
"""Enhanced search using fzf's --reload flag for dynamic search."""
|
||||
# Build the header with optional custom header line
|
||||
display_header = self.header
|
||||
if header:
|
||||
display_header = f"{self.header}\n{header}"
|
||||
|
||||
commands = [
|
||||
self.executable,
|
||||
"--prompt",
|
||||
f"{prompt.title()}: ",
|
||||
"--header",
|
||||
self.header,
|
||||
display_header,
|
||||
"--header-first",
|
||||
"--disabled", # Disable local filtering - rely on external search command
|
||||
"--bind",
|
||||
f"change:reload({search_command})",
|
||||
"--ansi",
|
||||
]
|
||||
|
||||
# If there's an initial query, set it
|
||||
if initial_query:
|
||||
commands.extend(["--query", initial_query])
|
||||
# Only trigger reload on start if we don't have cached results
|
||||
if not initial_results:
|
||||
commands.extend(["--bind", f"start:reload({search_command})"])
|
||||
|
||||
if preview:
|
||||
commands.extend(["--preview", preview])
|
||||
|
||||
# Use cached results as initial input if provided (avoids network request)
|
||||
fzf_input = "\n".join(initial_results) if initial_results else ""
|
||||
|
||||
result = subprocess.run(
|
||||
commands,
|
||||
input="",
|
||||
input=fzf_input,
|
||||
stdout=subprocess.PIPE,
|
||||
text=True,
|
||||
encoding="utf-8",
|
||||
|
||||
Reference in New Issue
Block a user