mirror of
https://github.com/Benexl/FastAnime.git
synced 2025-12-12 07:40:41 -08:00
feat: improve text display for dynamic search
This commit is contained in:
@@ -23,33 +23,38 @@ try:
|
|||||||
strip_markdown,
|
strip_markdown,
|
||||||
wrap_text,
|
wrap_text,
|
||||||
)
|
)
|
||||||
|
|
||||||
ANSI_UTILS_AVAILABLE = True
|
ANSI_UTILS_AVAILABLE = True
|
||||||
except ImportError:
|
except ImportError:
|
||||||
ANSI_UTILS_AVAILABLE = False
|
ANSI_UTILS_AVAILABLE = False
|
||||||
|
|
||||||
# Fallback if _ansi_utils is not available
|
# Fallback if _ansi_utils is not available
|
||||||
def get_terminal_width():
|
def get_terminal_width():
|
||||||
return int(os.environ.get("FZF_PREVIEW_COLUMNS", "80"))
|
return int(os.environ.get("FZF_PREVIEW_COLUMNS", "80"))
|
||||||
|
|
||||||
def print_rule(sep_color):
|
def print_rule(sep_color):
|
||||||
r, g, b = map(int, sep_color.split(","))
|
r, g, b = map(int, sep_color.split(","))
|
||||||
width = get_terminal_width()
|
width = get_terminal_width()
|
||||||
print(f"\x1b[38;2;{r};{g};{b}m" + ("─" * width) + "\x1b[0m")
|
print(f"\x1b[38;2;{r};{g};{b}m" + ("─" * width) + "\x1b[0m")
|
||||||
|
|
||||||
def print_table_row(key, value, header_color, _key_width, _value_width):
|
def print_table_row(key, value, header_color, _key_width, _value_width):
|
||||||
r, g, b = map(int, header_color.split(","))
|
r, g, b = map(int, header_color.split(","))
|
||||||
print(f"\x1b[38;2;{r};{g};{b};1m{key}\x1b[0m: {value}")
|
print(f"\x1b[38;2;{r};{g};{b};1m{key}\x1b[0m: {value}")
|
||||||
|
|
||||||
def strip_markdown(text):
|
def strip_markdown(text):
|
||||||
import re
|
import re
|
||||||
text = re.sub(r'\*\*(.+?)\*\*', r'\1', text)
|
|
||||||
text = re.sub(r'__(.+?)__', r'\1', text)
|
text = re.sub(r"\*\*(.+?)\*\*", r"\1", text)
|
||||||
text = re.sub(r'\*(.+?)\*', r'\1', text)
|
text = re.sub(r"__(.+?)__", r"\1", text)
|
||||||
text = re.sub(r'_(.+?)_', r'\1', text)
|
text = re.sub(r"\*(.+?)\*", r"\1", text)
|
||||||
|
text = re.sub(r"_(.+?)_", r"\1", text)
|
||||||
return text
|
return text
|
||||||
|
|
||||||
def wrap_text(text, width):
|
def wrap_text(text, width):
|
||||||
import textwrap
|
import textwrap
|
||||||
return '\n'.join(textwrap.wrap(text, width))
|
|
||||||
|
return "\n".join(textwrap.wrap(text, width))
|
||||||
|
|
||||||
|
|
||||||
# --- Template Variables (Injected by Python) ---
|
# --- Template Variables (Injected by Python) ---
|
||||||
SEARCH_RESULTS_FILE = Path("{SEARCH_RESULTS_FILE}")
|
SEARCH_RESULTS_FILE = Path("{SEARCH_RESULTS_FILE}")
|
||||||
@@ -76,11 +81,11 @@ def format_date(date_obj):
|
|||||||
"""Format date object to string."""
|
"""Format date object to string."""
|
||||||
if not date_obj or date_obj == "null":
|
if not date_obj or date_obj == "null":
|
||||||
return "N/A"
|
return "N/A"
|
||||||
|
|
||||||
year = date_obj.get("year")
|
year = date_obj.get("year")
|
||||||
month = date_obj.get("month")
|
month = date_obj.get("month")
|
||||||
day = date_obj.get("day")
|
day = date_obj.get("day")
|
||||||
|
|
||||||
if not year:
|
if not year:
|
||||||
return "N/A"
|
return "N/A"
|
||||||
if month and day:
|
if month and day:
|
||||||
@@ -94,22 +99,22 @@ def get_media_from_results(title):
|
|||||||
"""Find media item in search results by title."""
|
"""Find media item in search results by title."""
|
||||||
if not SEARCH_RESULTS_FILE.exists():
|
if not SEARCH_RESULTS_FILE.exists():
|
||||||
return None
|
return None
|
||||||
|
|
||||||
try:
|
try:
|
||||||
with open(SEARCH_RESULTS_FILE, "r", encoding="utf-8") as f:
|
with open(SEARCH_RESULTS_FILE, "r", encoding="utf-8") as f:
|
||||||
data = json.load(f)
|
data = json.load(f)
|
||||||
|
|
||||||
media_list = data.get("data", {}).get("Page", {}).get("media", [])
|
media_list = data.get("data", {}).get("Page", {}).get("media", [])
|
||||||
|
|
||||||
for media in media_list:
|
for media in media_list:
|
||||||
title_obj = media.get("title", {})
|
title_obj = media.get("title", {})
|
||||||
eng = title_obj.get("english")
|
eng = title_obj.get("english")
|
||||||
rom = title_obj.get("romaji")
|
rom = title_obj.get("romaji")
|
||||||
nat = title_obj.get("native")
|
nat = title_obj.get("native")
|
||||||
|
|
||||||
if title in (eng, rom, nat):
|
if title in (eng, rom, nat):
|
||||||
return media
|
return media
|
||||||
|
|
||||||
return None
|
return None
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print(f"Error reading search results: {e}", file=sys.stderr)
|
print(f"Error reading search results: {e}", file=sys.stderr)
|
||||||
@@ -121,7 +126,7 @@ def download_image(url: str, output_path: Path) -> bool:
|
|||||||
try:
|
try:
|
||||||
# Try using urllib (stdlib)
|
# Try using urllib (stdlib)
|
||||||
from urllib import request
|
from urllib import request
|
||||||
|
|
||||||
req = request.Request(url, headers={"User-Agent": "viu/1.0"})
|
req = request.Request(url, headers={"User-Agent": "viu/1.0"})
|
||||||
with request.urlopen(req, timeout=5) as response:
|
with request.urlopen(req, timeout=5) as response:
|
||||||
data = response.read()
|
data = response.read()
|
||||||
@@ -141,10 +146,10 @@ def get_terminal_dimensions():
|
|||||||
"""Get terminal dimensions from FZF environment."""
|
"""Get terminal dimensions from FZF environment."""
|
||||||
fzf_cols = os.environ.get("FZF_PREVIEW_COLUMNS")
|
fzf_cols = os.environ.get("FZF_PREVIEW_COLUMNS")
|
||||||
fzf_lines = os.environ.get("FZF_PREVIEW_LINES")
|
fzf_lines = os.environ.get("FZF_PREVIEW_LINES")
|
||||||
|
|
||||||
if fzf_cols and fzf_lines:
|
if fzf_cols and fzf_lines:
|
||||||
return int(fzf_cols), int(fzf_lines)
|
return int(fzf_cols), int(fzf_lines)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
rows, cols = (
|
rows, cols = (
|
||||||
subprocess.check_output(
|
subprocess.check_output(
|
||||||
@@ -313,36 +318,41 @@ def main():
|
|||||||
if not SELECTED_TITLE:
|
if not SELECTED_TITLE:
|
||||||
print("No selection")
|
print("No selection")
|
||||||
return
|
return
|
||||||
|
|
||||||
# Get the media data from cached search results
|
# Get the media data from cached search results
|
||||||
media = get_media_from_results(SELECTED_TITLE)
|
media = get_media_from_results(SELECTED_TITLE)
|
||||||
|
|
||||||
if not media:
|
if not media:
|
||||||
print("Loading preview...")
|
print("Loading preview...")
|
||||||
return
|
return
|
||||||
|
|
||||||
term_width = get_terminal_width()
|
term_width = get_terminal_width()
|
||||||
|
|
||||||
# Extract media information
|
# Extract media information
|
||||||
title_obj = media.get("title", {})
|
title_obj = media.get("title", {})
|
||||||
title = title_obj.get("english") or title_obj.get("romaji") or title_obj.get("native") or "Unknown"
|
title = (
|
||||||
|
title_obj.get("english")
|
||||||
|
or title_obj.get("romaji")
|
||||||
|
or title_obj.get("native")
|
||||||
|
or "Unknown"
|
||||||
|
)
|
||||||
|
|
||||||
# Show image if in image or full mode
|
# Show image if in image or full mode
|
||||||
if PREVIEW_MODE in ("image", "full"):
|
if PREVIEW_MODE in ("image", "full"):
|
||||||
cover_image = media.get("coverImage", {}).get("large", "")
|
cover_image = media.get("coverImage", {}).get("large", "")
|
||||||
if cover_image:
|
if cover_image:
|
||||||
# Ensure image cache directory exists
|
# Ensure image cache directory exists
|
||||||
IMAGE_CACHE_DIR.mkdir(parents=True, exist_ok=True)
|
IMAGE_CACHE_DIR.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
# Generate hash matching the preview worker pattern
|
# Generate hash matching the preview worker pattern
|
||||||
# Use "anime-" prefix and hash of just the title (no KEY prefix for dynamic search)
|
# Use "anime-" prefix and hash of just the title (no KEY prefix for dynamic search)
|
||||||
hash_id = f"anime-{sha256(SELECTED_TITLE.encode('utf-8')).hexdigest()}"
|
hash_id = f"anime-{sha256(SELECTED_TITLE.encode('utf-8')).hexdigest()}"
|
||||||
image_file = IMAGE_CACHE_DIR / f"{hash_id}.png"
|
image_file = IMAGE_CACHE_DIR / f"{hash_id}.png"
|
||||||
|
|
||||||
# Download image if not cached
|
# Download image if not cached
|
||||||
if not image_file.exists():
|
if not image_file.exists():
|
||||||
download_image(cover_image, image_file)
|
download_image(cover_image, image_file)
|
||||||
|
|
||||||
# Try to render the image
|
# Try to render the image
|
||||||
if image_file.exists():
|
if image_file.exists():
|
||||||
fzf_image_preview(str(image_file))
|
fzf_image_preview(str(image_file))
|
||||||
@@ -350,44 +360,44 @@ def main():
|
|||||||
else:
|
else:
|
||||||
print("🖼️ Loading image...")
|
print("🖼️ Loading image...")
|
||||||
print()
|
print()
|
||||||
|
|
||||||
# Show text info if in text or full mode
|
# Show text info if in text or full mode
|
||||||
if PREVIEW_MODE in ("text", "full"):
|
if PREVIEW_MODE in ("text", "full"):
|
||||||
# Separator line
|
# Separator line
|
||||||
r, g, b = map(int, SEPARATOR_COLOR.split(","))
|
r, g, b = map(int, SEPARATOR_COLOR.split(","))
|
||||||
separator = f"\x1b[38;2;{r};{g};{b}m" + ("─" * term_width) + "\x1b[0m"
|
separator = f"\x1b[38;2;{r};{g};{b}m" + ("─" * term_width) + "\x1b[0m"
|
||||||
print(separator, flush=True)
|
print(separator, flush=True)
|
||||||
|
|
||||||
# Title centered
|
# Title centered
|
||||||
print(title.center(term_width))
|
print(title.center(term_width))
|
||||||
|
|
||||||
# Extract data
|
# Extract data
|
||||||
status = media.get("status", "Unknown")
|
status = media.get("status", "Unknown")
|
||||||
format_type = media.get("format", "Unknown")
|
format_type = media.get("format", "Unknown")
|
||||||
episodes = media.get("episodes", "?")
|
episodes = media.get("episodes", "?")
|
||||||
duration = media.get("duration")
|
duration = media.get("duration")
|
||||||
duration_str = f"{duration} min" if duration else "Unknown"
|
duration_str = f"{duration} min" if duration else "Unknown"
|
||||||
|
|
||||||
score = media.get("averageScore")
|
score = media.get("averageScore")
|
||||||
score_str = f"{score}/100" if score else "N/A"
|
score_str = f"{score}/100" if score else "N/A"
|
||||||
|
|
||||||
favourites = format_number(media.get("favourites", 0))
|
favourites = format_number(media.get("favourites", 0))
|
||||||
popularity = format_number(media.get("popularity", 0))
|
popularity = format_number(media.get("popularity", 0))
|
||||||
|
|
||||||
genres = ", ".join(media.get("genres", [])[:5]) or "Unknown"
|
genres = ", ".join(media.get("genres", [])[:5]) or "Unknown"
|
||||||
|
|
||||||
start_date = format_date(media.get("startDate"))
|
start_date = format_date(media.get("startDate"))
|
||||||
end_date = format_date(media.get("endDate"))
|
end_date = format_date(media.get("endDate"))
|
||||||
|
|
||||||
studios_list = media.get("studios", {}).get("nodes", [])
|
studios_list = media.get("studios", {}).get("nodes", [])
|
||||||
studios = ", ".join([s.get("name", "") for s in studios_list[:3]]) or "Unknown"
|
studios = ", ".join([s.get("name", "") for s in studios_list[:3]]) or "Unknown"
|
||||||
|
|
||||||
synonyms_list = media.get("synonyms", [])
|
synonyms_list = media.get("synonyms", [])
|
||||||
synonyms = ", ".join(synonyms_list[:3]) or "N/A"
|
synonyms = ", ".join(synonyms_list[:3]) or "N/A"
|
||||||
|
|
||||||
description = media.get("description", "No description available.")
|
description = media.get("description", "No description available.")
|
||||||
description = strip_markdown(description)
|
description = strip_markdown(description)
|
||||||
|
|
||||||
# Print sections matching media_info.py structure
|
# Print sections matching media_info.py structure
|
||||||
rows = [
|
rows = [
|
||||||
("Score", score_str),
|
("Score", score_str),
|
||||||
@@ -395,72 +405,72 @@ def main():
|
|||||||
("Popularity", popularity),
|
("Popularity", popularity),
|
||||||
("Status", status),
|
("Status", status),
|
||||||
]
|
]
|
||||||
|
|
||||||
print_rule(SEPARATOR_COLOR)
|
print_rule(SEPARATOR_COLOR)
|
||||||
for key, value in rows:
|
for key, value in rows:
|
||||||
if ANSI_UTILS_AVAILABLE:
|
if ANSI_UTILS_AVAILABLE:
|
||||||
print_table_row(key, value, HEADER_COLOR, 0, 0)
|
print_table_row(key, value, HEADER_COLOR, 0, 0)
|
||||||
else:
|
else:
|
||||||
print_table_row(key, value, HEADER_COLOR, 15, term_width - 20)
|
print_table_row(key, value, HEADER_COLOR, 15, term_width - 20)
|
||||||
|
|
||||||
rows = [
|
rows = [
|
||||||
("Episodes", str(episodes)),
|
("Episodes", str(episodes)),
|
||||||
("Duration", duration_str),
|
("Duration", duration_str),
|
||||||
]
|
]
|
||||||
|
|
||||||
print_rule(SEPARATOR_COLOR)
|
print_rule(SEPARATOR_COLOR)
|
||||||
for key, value in rows:
|
for key, value in rows:
|
||||||
if ANSI_UTILS_AVAILABLE:
|
if ANSI_UTILS_AVAILABLE:
|
||||||
print_table_row(key, value, HEADER_COLOR, 0, 0)
|
print_table_row(key, value, HEADER_COLOR, 0, 0)
|
||||||
else:
|
else:
|
||||||
print_table_row(key, value, HEADER_COLOR, 15, term_width - 20)
|
print_table_row(key, value, HEADER_COLOR, 15, term_width - 20)
|
||||||
|
|
||||||
rows = [
|
rows = [
|
||||||
("Genres", genres),
|
("Genres", genres),
|
||||||
("Format", format_type),
|
("Format", format_type),
|
||||||
]
|
]
|
||||||
|
|
||||||
print_rule(SEPARATOR_COLOR)
|
print_rule(SEPARATOR_COLOR)
|
||||||
for key, value in rows:
|
for key, value in rows:
|
||||||
if ANSI_UTILS_AVAILABLE:
|
if ANSI_UTILS_AVAILABLE:
|
||||||
print_table_row(key, value, HEADER_COLOR, 0, 0)
|
print_table_row(key, value, HEADER_COLOR, 0, 0)
|
||||||
else:
|
else:
|
||||||
print_table_row(key, value, HEADER_COLOR, 15, term_width - 20)
|
print_table_row(key, value, HEADER_COLOR, 15, term_width - 20)
|
||||||
|
|
||||||
rows = [
|
rows = [
|
||||||
("Start Date", start_date),
|
("Start Date", start_date),
|
||||||
("End Date", end_date),
|
("End Date", end_date),
|
||||||
]
|
]
|
||||||
|
|
||||||
print_rule(SEPARATOR_COLOR)
|
print_rule(SEPARATOR_COLOR)
|
||||||
for key, value in rows:
|
for key, value in rows:
|
||||||
if ANSI_UTILS_AVAILABLE:
|
if ANSI_UTILS_AVAILABLE:
|
||||||
print_table_row(key, value, HEADER_COLOR, 0, 0)
|
print_table_row(key, value, HEADER_COLOR, 0, 0)
|
||||||
else:
|
else:
|
||||||
print_table_row(key, value, HEADER_COLOR, 15, term_width - 20)
|
print_table_row(key, value, HEADER_COLOR, 15, term_width - 20)
|
||||||
|
|
||||||
rows = [
|
rows = [
|
||||||
("Studios", studios),
|
("Studios", studios),
|
||||||
]
|
]
|
||||||
|
|
||||||
print_rule(SEPARATOR_COLOR)
|
print_rule(SEPARATOR_COLOR)
|
||||||
for key, value in rows:
|
for key, value in rows:
|
||||||
if ANSI_UTILS_AVAILABLE:
|
if ANSI_UTILS_AVAILABLE:
|
||||||
print_table_row(key, value, HEADER_COLOR, 0, 0)
|
print_table_row(key, value, HEADER_COLOR, 0, 0)
|
||||||
else:
|
else:
|
||||||
print_table_row(key, value, HEADER_COLOR, 15, term_width - 20)
|
print_table_row(key, value, HEADER_COLOR, 15, term_width - 20)
|
||||||
|
|
||||||
rows = [
|
rows = [
|
||||||
("Synonyms", synonyms),
|
("Synonyms", synonyms),
|
||||||
]
|
]
|
||||||
|
|
||||||
print_rule(SEPARATOR_COLOR)
|
print_rule(SEPARATOR_COLOR)
|
||||||
for key, value in rows:
|
for key, value in rows:
|
||||||
if ANSI_UTILS_AVAILABLE:
|
if ANSI_UTILS_AVAILABLE:
|
||||||
print_table_row(key, value, HEADER_COLOR, 0, 0)
|
print_table_row(key, value, HEADER_COLOR, 0, 0)
|
||||||
else:
|
else:
|
||||||
print_table_row(key, value, HEADER_COLOR, 15, term_width - 20)
|
print_table_row(key, value, HEADER_COLOR, 15, term_width - 20)
|
||||||
|
|
||||||
print_rule(SEPARATOR_COLOR)
|
print_rule(SEPARATOR_COLOR)
|
||||||
print(wrap_text(description, term_width))
|
print(wrap_text(description, term_width))
|
||||||
|
|
||||||
|
|||||||
@@ -31,40 +31,36 @@ if not QUERY.strip():
|
|||||||
sys.exit(0)
|
sys.exit(0)
|
||||||
|
|
||||||
|
|
||||||
def make_graphql_request(endpoint: str, query: str, variables: dict, auth_token: str = "") -> dict | None:
|
def make_graphql_request(
|
||||||
|
endpoint: str, query: str, variables: dict, auth_token: str = ""
|
||||||
|
) -> dict | None:
|
||||||
"""
|
"""
|
||||||
Make a GraphQL request to the specified endpoint.
|
Make a GraphQL request to the specified endpoint.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
endpoint: GraphQL API endpoint URL
|
endpoint: GraphQL API endpoint URL
|
||||||
query: GraphQL query string
|
query: GraphQL query string
|
||||||
variables: Query variables as a dictionary
|
variables: Query variables as a dictionary
|
||||||
auth_token: Optional authorization token (Bearer token)
|
auth_token: Optional authorization token (Bearer token)
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
Response JSON as a dictionary, or None if request fails
|
Response JSON as a dictionary, or None if request fails
|
||||||
"""
|
"""
|
||||||
payload = {
|
payload = {"query": query, "variables": variables}
|
||||||
"query": query,
|
|
||||||
"variables": variables
|
headers = {"Content-Type": "application/json", "User-Agent": "viu/1.0"}
|
||||||
}
|
|
||||||
|
|
||||||
headers = {
|
|
||||||
"Content-Type": "application/json",
|
|
||||||
"User-Agent": "viu/1.0"
|
|
||||||
}
|
|
||||||
|
|
||||||
if auth_token:
|
if auth_token:
|
||||||
headers["Authorization"] = auth_token
|
headers["Authorization"] = auth_token
|
||||||
|
|
||||||
try:
|
try:
|
||||||
req = request.Request(
|
req = request.Request(
|
||||||
endpoint,
|
endpoint,
|
||||||
data=json.dumps(payload).encode("utf-8"),
|
data=json.dumps(payload).encode("utf-8"),
|
||||||
headers=headers,
|
headers=headers,
|
||||||
method="POST"
|
method="POST",
|
||||||
)
|
)
|
||||||
|
|
||||||
with request.urlopen(req, timeout=10) as response:
|
with request.urlopen(req, timeout=10) as response:
|
||||||
return json.loads(response.read().decode("utf-8"))
|
return json.loads(response.read().decode("utf-8"))
|
||||||
except (URLError, json.JSONDecodeError, Exception) as e:
|
except (URLError, json.JSONDecodeError, Exception) as e:
|
||||||
@@ -75,46 +71,43 @@ def make_graphql_request(endpoint: str, query: str, variables: dict, auth_token:
|
|||||||
def extract_title(media_item: dict) -> str:
|
def extract_title(media_item: dict) -> str:
|
||||||
"""
|
"""
|
||||||
Extract the best available title from a media item.
|
Extract the best available title from a media item.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
media_item: Media object from GraphQL response
|
media_item: Media object from GraphQL response
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
Title string (english > romaji > native > "Unknown")
|
Title string (english > romaji > native > "Unknown")
|
||||||
"""
|
"""
|
||||||
title_obj = media_item.get("title", {})
|
title_obj = media_item.get("title", {})
|
||||||
return (
|
return (
|
||||||
title_obj.get("english") or
|
title_obj.get("english")
|
||||||
title_obj.get("romaji") or
|
or title_obj.get("romaji")
|
||||||
title_obj.get("native") or
|
or title_obj.get("native")
|
||||||
"Unknown"
|
or "Unknown"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
# Ensure parent directory exists
|
# Ensure parent directory exists
|
||||||
SEARCH_RESULTS_FILE.parent.mkdir(parents=True, exist_ok=True)
|
SEARCH_RESULTS_FILE.parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
# Create GraphQL variables
|
# Create GraphQL variables
|
||||||
variables = {
|
variables = {
|
||||||
"query": QUERY,
|
"query": QUERY,
|
||||||
"type": "ANIME",
|
"type": "ANIME",
|
||||||
"per_page": 50,
|
"per_page": 50,
|
||||||
"genre_not_in": ["Hentai"]
|
"genre_not_in": ["Hentai"],
|
||||||
}
|
}
|
||||||
|
|
||||||
# Make the GraphQL request
|
# Make the GraphQL request
|
||||||
response = make_graphql_request(
|
response = make_graphql_request(
|
||||||
GRAPHQL_ENDPOINT,
|
GRAPHQL_ENDPOINT, GRAPHQL_QUERY, variables, AUTH_HEADER
|
||||||
GRAPHQL_QUERY,
|
|
||||||
variables,
|
|
||||||
AUTH_HEADER
|
|
||||||
)
|
)
|
||||||
|
|
||||||
if response is None:
|
if response is None:
|
||||||
print("❌ Search failed")
|
print("❌ Search failed")
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
# Save the raw response for later processing by dynamic_search.py
|
# Save the raw response for later processing by dynamic_search.py
|
||||||
try:
|
try:
|
||||||
with open(SEARCH_RESULTS_FILE, "w", encoding="utf-8") as f:
|
with open(SEARCH_RESULTS_FILE, "w", encoding="utf-8") as f:
|
||||||
@@ -122,21 +115,21 @@ def main():
|
|||||||
except IOError as e:
|
except IOError as e:
|
||||||
print(f"❌ Failed to save results: {e}", file=sys.stderr)
|
print(f"❌ Failed to save results: {e}", file=sys.stderr)
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
# Parse and display results
|
# Parse and display results
|
||||||
if "errors" in response:
|
if "errors" in response:
|
||||||
print(f"❌ Search error: {response['errors']}")
|
print(f"❌ Search error: {response['errors']}")
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
# Navigate the response structure
|
# Navigate the response structure
|
||||||
data = response.get("data", {})
|
data = response.get("data", {})
|
||||||
page = data.get("Page", {})
|
page = data.get("Page", {})
|
||||||
media_list = page.get("media", [])
|
media_list = page.get("media", [])
|
||||||
|
|
||||||
if not media_list:
|
if not media_list:
|
||||||
print("❌ No results found")
|
print("❌ No results found")
|
||||||
sys.exit(0)
|
sys.exit(0)
|
||||||
|
|
||||||
# Output titles for fzf (one per line)
|
# Output titles for fzf (one per line)
|
||||||
for media in media_list:
|
for media in media_list:
|
||||||
title = extract_title(media)
|
title = extract_title(media)
|
||||||
|
|||||||
@@ -12,9 +12,7 @@ logger = logging.getLogger(__name__)
|
|||||||
SEARCH_CACHE_DIR = APP_CACHE_DIR / "search"
|
SEARCH_CACHE_DIR = APP_CACHE_DIR / "search"
|
||||||
SEARCH_RESULTS_FILE = SEARCH_CACHE_DIR / "current_search_results.json"
|
SEARCH_RESULTS_FILE = SEARCH_CACHE_DIR / "current_search_results.json"
|
||||||
FZF_SCRIPTS_DIR = SCRIPTS_DIR / "fzf"
|
FZF_SCRIPTS_DIR = SCRIPTS_DIR / "fzf"
|
||||||
SEARCH_TEMPLATE_SCRIPT = (FZF_SCRIPTS_DIR / "search.py").read_text(
|
SEARCH_TEMPLATE_SCRIPT = (FZF_SCRIPTS_DIR / "search.py").read_text(encoding="utf-8")
|
||||||
encoding="utf-8"
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@session.menu
|
@session.menu
|
||||||
|
|||||||
Reference in New Issue
Block a user