mirror of
https://github.com/Benexl/FastAnime.git
synced 2025-12-12 15:50:01 -08:00
feat: dynamic search
This commit is contained in:
304
fastanime/assets/scripts/fzf/dynamic_preview.template.sh
Normal file
304
fastanime/assets/scripts/fzf/dynamic_preview.template.sh
Normal file
@@ -0,0 +1,304 @@
|
||||
#!/bin/bash
|
||||
#
|
||||
# FZF Dynamic Preview Script Template
|
||||
#
|
||||
# This script handles previews for dynamic search results by parsing the JSON
|
||||
# search results file and extracting info for the selected item.
|
||||
# The placeholders in curly braces are dynamically filled by Python using .replace()
|
||||
|
||||
WIDTH=${FZF_PREVIEW_COLUMNS:-80}
|
||||
IMAGE_RENDERER="{IMAGE_RENDERER}"
|
||||
SEARCH_RESULTS_FILE="{SEARCH_RESULTS_FILE}"
|
||||
IMAGE_CACHE_PATH="{IMAGE_CACHE_PATH}"
|
||||
INFO_CACHE_PATH="{INFO_CACHE_PATH}"
|
||||
PATH_SEP="{PATH_SEP}"
|
||||
|
||||
# Color codes injected by Python
|
||||
C_TITLE="{C_TITLE}"
|
||||
C_KEY="{C_KEY}"
|
||||
C_VALUE="{C_VALUE}"
|
||||
C_RULE="{C_RULE}"
|
||||
RESET="{RESET}"
|
||||
|
||||
# Selected item from fzf
|
||||
SELECTED_ITEM="$1"
|
||||
|
||||
generate_sha256() {
|
||||
local input="$1"
|
||||
if command -v sha256sum &>/dev/null; then
|
||||
echo -n "$input" | sha256sum | awk '{print $1}'
|
||||
elif command -v shasum &>/dev/null; then
|
||||
echo -n "$input" | shasum -a 256 | awk '{print $1}'
|
||||
elif command -v sha256 &>/dev/null; then
|
||||
echo -n "$input" | sha256 | awk '{print $1}'
|
||||
elif command -v openssl &>/dev/null; then
|
||||
echo -n "$input" | openssl dgst -sha256 | awk '{print $2}'
|
||||
else
|
||||
echo -n "$input" | base64 | tr '/+' '_-' | tr -d '\n'
|
||||
fi
|
||||
}
|
||||
|
||||
fzf_preview() {
|
||||
file=$1
|
||||
dim=${FZF_PREVIEW_COLUMNS}x${FZF_PREVIEW_LINES}
|
||||
if [ "$dim" = x ]; then
|
||||
dim=$(stty size </dev/tty | awk "{print \$2 \"x\" \$1}")
|
||||
fi
|
||||
if ! [ "$IMAGE_RENDERER" = "icat" ] && [ -z "$KITTY_WINDOW_ID" ] && [ "$((FZF_PREVIEW_TOP + FZF_PREVIEW_LINES))" -eq "$(stty size </dev/tty | awk "{print \$1}")" ]; then
|
||||
dim=${FZF_PREVIEW_COLUMNS}x$((FZF_PREVIEW_LINES - 1))
|
||||
fi
|
||||
|
||||
if [ "$IMAGE_RENDERER" = "icat" ] && [ -z "$GHOSTTY_BIN_DIR" ]; then
|
||||
if command -v kitten >/dev/null 2>&1; then
|
||||
kitten icat --clear --transfer-mode=memory --unicode-placeholder --stdin=no --place="$dim@0x0" "$file" | sed "\$d" | sed "$(printf "\$s/\$/\033[m/")"
|
||||
elif command -v icat >/dev/null 2>&1; then
|
||||
icat --clear --transfer-mode=memory --unicode-placeholder --stdin=no --place="$dim@0x0" "$file" | sed "\$d" | sed "$(printf "\$s/\$/\033[m/")"
|
||||
else
|
||||
kitty icat --clear --transfer-mode=memory --unicode-placeholder --stdin=no --place="$dim@0x0" "$file" | sed "\$d" | sed "$(printf "\$s/\$/\033[m/")"
|
||||
fi
|
||||
elif [ -n "$GHOSTTY_BIN_DIR" ]; then
|
||||
if command -v kitten >/dev/null 2>&1; then
|
||||
kitten icat --clear --transfer-mode=memory --unicode-placeholder --stdin=no --place="$dim@0x0" "$file" | sed "\$d" | sed "$(printf "\$s/\$/\033[m/")"
|
||||
elif command -v icat >/dev/null 2>&1; then
|
||||
icat --clear --transfer-mode=memory --unicode-placeholder --stdin=no --place="$dim@0x0" "$file" | sed "\$d" | sed "$(printf "\$s/\$/\033[m/")"
|
||||
else
|
||||
chafa -s "$dim" "$file"
|
||||
fi
|
||||
elif command -v chafa >/dev/null 2>&1; then
|
||||
case "$PLATFORM" in
|
||||
android) chafa -s "$dim" "$file" ;;
|
||||
windows) chafa -f sixel -s "$dim" "$file" ;;
|
||||
*) chafa -s "$dim" "$file" ;;
|
||||
esac
|
||||
echo
|
||||
elif command -v imgcat >/dev/null; then
|
||||
imgcat -W "${dim%%x*}" -H "${dim##*x}" "$file"
|
||||
else
|
||||
echo please install a terminal image viewer
|
||||
echo either icat for kitty terminal and wezterm or imgcat or chafa
|
||||
fi
|
||||
}
|
||||
|
||||
print_kv() {
|
||||
local key="$1"
|
||||
local value="$2"
|
||||
local key_len=${#key}
|
||||
local value_len=${#value}
|
||||
local multiplier="${3:-1}"
|
||||
|
||||
local padding_len=$((WIDTH - key_len - 2 - value_len * multiplier))
|
||||
|
||||
if [ "$padding_len" -lt 1 ]; then
|
||||
padding_len=1
|
||||
value=$(echo $value| fold -s -w "$((WIDTH - key_len - 3))")
|
||||
printf "${C_KEY}%s:${RESET}%*s%s\\n" "$key" "$padding_len" "" " $value"
|
||||
else
|
||||
printf "${C_KEY}%s:${RESET}%*s%s\\n" "$key" "$padding_len" "" " $value"
|
||||
fi
|
||||
}
|
||||
|
||||
draw_rule() {
|
||||
ll=2
|
||||
while [ $ll -le $FZF_PREVIEW_COLUMNS ];do
|
||||
echo -n -e "${C_RULE}─${RESET}"
|
||||
((ll++))
|
||||
done
|
||||
echo
|
||||
}
|
||||
|
||||
clean_html() {
|
||||
echo "$1" | sed 's/<[^>]*>//g' | sed 's/</</g' | sed 's/>/>/g' | sed 's/&/\&/g' | sed 's/"/"/g' | sed "s/'/'/g"
|
||||
}
|
||||
|
||||
format_date() {
|
||||
local date_obj="$1"
|
||||
if [ "$date_obj" = "null" ] || [ -z "$date_obj" ]; then
|
||||
echo "N/A"
|
||||
return
|
||||
fi
|
||||
|
||||
# Extract year, month, day from the date object
|
||||
if command -v jq >/dev/null 2>&1; then
|
||||
year=$(echo "$date_obj" | jq -r '.year // "N/A"' 2>/dev/null || echo "N/A")
|
||||
month=$(echo "$date_obj" | jq -r '.month // ""' 2>/dev/null || echo "")
|
||||
day=$(echo "$date_obj" | jq -r '.day // ""' 2>/dev/null || echo "")
|
||||
else
|
||||
year=$(echo "$date_obj" | python3 -c "import json, sys; data=json.load(sys.stdin); print(data.get('year', 'N/A'))" 2>/dev/null || echo "N/A")
|
||||
month=$(echo "$date_obj" | python3 -c "import json, sys; data=json.load(sys.stdin); print(data.get('month', ''))" 2>/dev/null || echo "")
|
||||
day=$(echo "$date_obj" | python3 -c "import json, sys; data=json.load(sys.stdin); print(data.get('day', ''))" 2>/dev/null || echo "")
|
||||
fi
|
||||
|
||||
if [ "$year" = "N/A" ] || [ "$year" = "null" ]; then
|
||||
echo "N/A"
|
||||
elif [ -n "$month" ] && [ "$month" != "null" ] && [ -n "$day" ] && [ "$day" != "null" ]; then
|
||||
echo "$day/$month/$year"
|
||||
elif [ -n "$month" ] && [ "$month" != "null" ]; then
|
||||
echo "$month/$year"
|
||||
else
|
||||
echo "$year"
|
||||
fi
|
||||
}
|
||||
|
||||
# If no selection or search results file doesn't exist, show placeholder
|
||||
if [ -z "$SELECTED_ITEM" ] || [ ! -f "$SEARCH_RESULTS_FILE" ]; then
|
||||
echo "${C_TITLE}Dynamic Search Preview${RESET}"
|
||||
draw_rule
|
||||
echo "Type to search for anime..."
|
||||
echo "Results will appear here as you type."
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Parse the search results JSON and find the matching item
|
||||
if command -v jq >/dev/null 2>&1; then
|
||||
# Use jq for faster and more reliable JSON parsing
|
||||
MEDIA_DATA=$(cat "$SEARCH_RESULTS_FILE" | jq --arg selected "$SELECTED_ITEM" '
|
||||
.data.Page.media[]? |
|
||||
select(
|
||||
((.title.english // .title.romaji // .title.native // "Unknown") +
|
||||
" (" + (.startDate.year // "Unknown" | tostring) + ") " +
|
||||
"[" + (.status // "Unknown") + "] - " +
|
||||
((.genres[:3] // []) | join(", ") | if . == "" then "Unknown" else . end)
|
||||
) == $selected
|
||||
)
|
||||
' 2>/dev/null)
|
||||
else
|
||||
# Fallback to Python for JSON parsing
|
||||
MEDIA_DATA=$(cat "$SEARCH_RESULTS_FILE" | python3 -c "
|
||||
import json
|
||||
import sys
|
||||
|
||||
try:
|
||||
data = json.load(sys.stdin)
|
||||
selected_item = '''$SELECTED_ITEM'''
|
||||
|
||||
if 'data' not in data or 'Page' not in data['data'] or 'media' not in data['data']['Page']:
|
||||
sys.exit(1)
|
||||
|
||||
media_list = data['data']['Page']['media']
|
||||
|
||||
for media in media_list:
|
||||
title = media.get('title', {})
|
||||
english_title = title.get('english') or title.get('romaji') or title.get('native', 'Unknown')
|
||||
year = media.get('startDate', {}).get('year', 'Unknown') if media.get('startDate') else 'Unknown'
|
||||
status = media.get('status', 'Unknown')
|
||||
genres = ', '.join(media.get('genres', [])[:3]) or 'Unknown'
|
||||
|
||||
display_format = f'{english_title} ({year}) [{status}] - {genres}'
|
||||
|
||||
if selected_item.strip() == display_format.strip():
|
||||
json.dump(media, sys.stdout, indent=2)
|
||||
sys.exit(0)
|
||||
|
||||
sys.exit(1)
|
||||
except Exception as e:
|
||||
print(f'Error: {e}', file=sys.stderr)
|
||||
sys.exit(1)
|
||||
" 2>/dev/null)
|
||||
fi
|
||||
|
||||
# If we couldn't find the media data, show error
|
||||
if [ $? -ne 0 ] || [ -z "$MEDIA_DATA" ]; then
|
||||
echo "${C_TITLE}Preview Error${RESET}"
|
||||
draw_rule
|
||||
echo "Could not load preview data for:"
|
||||
echo "$SELECTED_ITEM"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Extract information from the media data
|
||||
if command -v jq >/dev/null 2>&1; then
|
||||
# Use jq for faster extraction
|
||||
TITLE=$(echo "$MEDIA_DATA" | jq -r '.title.english // .title.romaji // .title.native // "Unknown"' 2>/dev/null || echo "Unknown")
|
||||
STATUS=$(echo "$MEDIA_DATA" | jq -r '.status // "Unknown"' 2>/dev/null || echo "Unknown")
|
||||
FORMAT=$(echo "$MEDIA_DATA" | jq -r '.format // "Unknown"' 2>/dev/null || echo "Unknown")
|
||||
EPISODES=$(echo "$MEDIA_DATA" | jq -r '.episodes // "Unknown"' 2>/dev/null || echo "Unknown")
|
||||
DURATION=$(echo "$MEDIA_DATA" | jq -r 'if .duration then "\(.duration) min" else "Unknown" end' 2>/dev/null || echo "Unknown")
|
||||
SCORE=$(echo "$MEDIA_DATA" | jq -r 'if .averageScore then "\(.averageScore)/100" else "N/A" end' 2>/dev/null || echo "N/A")
|
||||
FAVOURITES=$(echo "$MEDIA_DATA" | jq -r '.favourites // 0' 2>/dev/null | sed ':a;s/\B[0-9]\{3\}\>/,&/;ta' || echo "0")
|
||||
POPULARITY=$(echo "$MEDIA_DATA" | jq -r '.popularity // 0' 2>/dev/null | sed ':a;s/\B[0-9]\{3\}\>/,&/;ta' || echo "0")
|
||||
GENRES=$(echo "$MEDIA_DATA" | jq -r '(.genres[:5] // []) | join(", ") | if . == "" then "Unknown" else . end' 2>/dev/null || echo "Unknown")
|
||||
DESCRIPTION=$(echo "$MEDIA_DATA" | jq -r '.description // "No description available."' 2>/dev/null || echo "No description available.")
|
||||
|
||||
# Get start and end dates as JSON objects
|
||||
START_DATE_OBJ=$(echo "$MEDIA_DATA" | jq -c '.startDate' 2>/dev/null || echo "null")
|
||||
END_DATE_OBJ=$(echo "$MEDIA_DATA" | jq -c '.endDate' 2>/dev/null || echo "null")
|
||||
|
||||
# Get cover image URL
|
||||
COVER_IMAGE=$(echo "$MEDIA_DATA" | jq -r '.coverImage.large // ""' 2>/dev/null || echo "")
|
||||
else
|
||||
# Fallback to Python for extraction
|
||||
TITLE=$(echo "$MEDIA_DATA" | python3 -c "import json, sys; data=json.load(sys.stdin); title=data.get('title',{}); print(title.get('english') or title.get('romaji') or title.get('native', 'Unknown'))" 2>/dev/null || echo "Unknown")
|
||||
STATUS=$(echo "$MEDIA_DATA" | python3 -c "import json, sys; data=json.load(sys.stdin); print(data.get('status', 'Unknown'))" 2>/dev/null || echo "Unknown")
|
||||
FORMAT=$(echo "$MEDIA_DATA" | python3 -c "import json, sys; data=json.load(sys.stdin); print(data.get('format', 'Unknown'))" 2>/dev/null || echo "Unknown")
|
||||
EPISODES=$(echo "$MEDIA_DATA" | python3 -c "import json, sys; data=json.load(sys.stdin); print(data.get('episodes', 'Unknown'))" 2>/dev/null || echo "Unknown")
|
||||
DURATION=$(echo "$MEDIA_DATA" | python3 -c "import json, sys; data=json.load(sys.stdin); duration=data.get('duration'); print(f'{duration} min' if duration else 'Unknown')" 2>/dev/null || echo "Unknown")
|
||||
SCORE=$(echo "$MEDIA_DATA" | python3 -c "import json, sys; data=json.load(sys.stdin); score=data.get('averageScore'); print(f'{score}/100' if score else 'N/A')" 2>/dev/null || echo "N/A")
|
||||
FAVOURITES=$(echo "$MEDIA_DATA" | python3 -c "import json, sys; data=json.load(sys.stdin); print(f\"{data.get('favourites', 0):,}\")" 2>/dev/null || echo "0")
|
||||
POPULARITY=$(echo "$MEDIA_DATA" | python3 -c "import json, sys; data=json.load(sys.stdin); print(f\"{data.get('popularity', 0):,}\")" 2>/dev/null || echo "0")
|
||||
GENRES=$(echo "$MEDIA_DATA" | python3 -c "import json, sys; data=json.load(sys.stdin); print(', '.join(data.get('genres', [])[:5]))" 2>/dev/null || echo "Unknown")
|
||||
DESCRIPTION=$(echo "$MEDIA_DATA" | python3 -c "import json, sys; data=json.load(sys.stdin); print(data.get('description', 'No description available.'))" 2>/dev/null || echo "No description available.")
|
||||
|
||||
# Get start and end dates
|
||||
START_DATE_OBJ=$(echo "$MEDIA_DATA" | python3 -c "import json, sys; data=json.load(sys.stdin); json.dump(data.get('startDate'), sys.stdout)" 2>/dev/null || echo "null")
|
||||
END_DATE_OBJ=$(echo "$MEDIA_DATA" | python3 -c "import json, sys; data=json.load(sys.stdin); json.dump(data.get('endDate'), sys.stdout)" 2>/dev/null || echo "null")
|
||||
|
||||
# Get cover image URL
|
||||
COVER_IMAGE=$(echo "$MEDIA_DATA" | python3 -c "import json, sys; data=json.load(sys.stdin); cover=data.get('coverImage',{}); print(cover.get('large', ''))" 2>/dev/null || echo "")
|
||||
fi
|
||||
|
||||
# Format the dates
|
||||
START_DATE=$(format_date "$START_DATE_OBJ")
|
||||
END_DATE=$(format_date "$END_DATE_OBJ")
|
||||
|
||||
# Generate cache hash for this item
|
||||
CACHE_HASH=$(generate_sha256 "dynamic_search_$TITLE")
|
||||
|
||||
# Try to show image if available
|
||||
if [ "{PREVIEW_MODE}" = "full" ] || [ "{PREVIEW_MODE}" = "image" ]; then
|
||||
image_file="${IMAGE_CACHE_PATH}${PATH_SEP}${CACHE_HASH}.png"
|
||||
|
||||
# If image not cached and we have a URL, try to download it quickly
|
||||
if [ ! -f "$image_file" ] && [ -n "$COVER_IMAGE" ]; then
|
||||
if command -v curl >/dev/null 2>&1; then
|
||||
# Quick download with timeout
|
||||
curl -s -m 3 -L "$COVER_IMAGE" -o "$image_file" 2>/dev/null || rm -f "$image_file" 2>/dev/null
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ -f "$image_file" ]; then
|
||||
fzf_preview "$image_file"
|
||||
else
|
||||
echo "🖼️ Loading image..."
|
||||
fi
|
||||
echo
|
||||
fi
|
||||
|
||||
# Display text info if configured
|
||||
if [ "{PREVIEW_MODE}" = "full" ] || [ "{PREVIEW_MODE}" = "text" ]; then
|
||||
draw_rule
|
||||
print_kv "Title" "$TITLE"
|
||||
draw_rule
|
||||
|
||||
print_kv "Score" "$SCORE"
|
||||
print_kv "Favourites" "$FAVOURITES"
|
||||
print_kv "Popularity" "$POPULARITY"
|
||||
print_kv "Status" "$STATUS"
|
||||
|
||||
draw_rule
|
||||
|
||||
print_kv "Episodes" "$EPISODES"
|
||||
print_kv "Duration" "$DURATION"
|
||||
print_kv "Format" "$FORMAT"
|
||||
|
||||
draw_rule
|
||||
|
||||
print_kv "Genres" "$GENRES"
|
||||
print_kv "Start Date" "$START_DATE"
|
||||
print_kv "End Date" "$END_DATE"
|
||||
|
||||
draw_rule
|
||||
|
||||
# Clean and display description
|
||||
CLEAN_DESCRIPTION=$(clean_html "$DESCRIPTION")
|
||||
echo "$CLEAN_DESCRIPTION" | fold -s -w "$WIDTH"
|
||||
fi
|
||||
@@ -1,77 +1,122 @@
|
||||
fetch_anime_for_fzf() {
|
||||
local search_term="$1"
|
||||
if [ -z "$search_term" ]; then exit 0; fi
|
||||
#!/bin/bash
|
||||
#
|
||||
# FZF Dynamic Search Script Template
|
||||
#
|
||||
# This script is a template for dynamic search functionality in fzf.
|
||||
# The placeholders in curly braces, like {QUERY} are dynamically filled by Python using .replace()
|
||||
|
||||
local query='
|
||||
query ($search: String) {
|
||||
Page(page: 1, perPage: 25) {
|
||||
media(search: $search, type: ANIME, sort: [SEARCH_MATCH]) {
|
||||
id
|
||||
title { romaji english }
|
||||
meanScore
|
||||
format
|
||||
status
|
||||
}
|
||||
}
|
||||
}
|
||||
'
|
||||
# Configuration variables (injected by Python)
|
||||
GRAPHQL_ENDPOINT="{GRAPHQL_ENDPOINT}"
|
||||
CACHE_DIR="{CACHE_DIR}"
|
||||
SEARCH_RESULTS_FILE="{SEARCH_RESULTS_FILE}"
|
||||
AUTH_HEADER="{AUTH_HEADER}"
|
||||
|
||||
local json_payload
|
||||
json_payload=$(jq -n --arg query "$query" --arg search "$search_term" \
|
||||
'{query: $query, variables: {search: $search}}')
|
||||
# Get the current query from fzf
|
||||
QUERY="$1"
|
||||
|
||||
curl --silent \
|
||||
--header "Content-Type: application/json" \
|
||||
--header "Accept: application/json" \
|
||||
--request POST \
|
||||
--data "$json_payload" \
|
||||
https://graphql.anilist.co |
|
||||
jq -r '.data.Page.media[]? | select(.title.romaji) |
|
||||
"\(.title.english // .title.romaji) | Score: \(.meanScore // "N/A") | ID: \(.id)"'
|
||||
# If query is empty, exit with empty results
|
||||
if [ -z "$QUERY" ]; then
|
||||
echo ""
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Create GraphQL variables
|
||||
VARIABLES=$(cat <<EOF
|
||||
{
|
||||
"query": "$QUERY",
|
||||
"type": "ANIME",
|
||||
"per_page": 50,
|
||||
"genre_not_in": ["Hentai"]
|
||||
}
|
||||
fetch_anime_details() {
|
||||
local anime_id
|
||||
anime_id=$(echo "$1" | sed -n 's/.*ID: \([0-9]*\).*/\1/p')
|
||||
if [ -z "$anime_id" ]; then
|
||||
echo "Select an item to see details..."
|
||||
return
|
||||
fi
|
||||
EOF
|
||||
)
|
||||
|
||||
local query='
|
||||
query ($id: Int) {
|
||||
Media(id: $id, type: ANIME) {
|
||||
title { romaji english }
|
||||
description(asHtml: false)
|
||||
genres
|
||||
meanScore
|
||||
episodes
|
||||
status
|
||||
format
|
||||
season
|
||||
seasonYear
|
||||
studios(isMain: true) { nodes { name } }
|
||||
}
|
||||
}
|
||||
'
|
||||
local json_payload
|
||||
json_payload=$(jq -n --arg query "$query" --argjson id "$anime_id" \
|
||||
'{query: $query, variables: {id: $id}}')
|
||||
# The GraphQL query is injected here as a properly escaped string
|
||||
GRAPHQL_QUERY='{GRAPHQL_QUERY}'
|
||||
|
||||
# Fetch and format details for the preview window
|
||||
curl --silent \
|
||||
--header "Content-Type: application/json" \
|
||||
--header "Accept: application/json" \
|
||||
--request POST \
|
||||
--data "$json_payload" \
|
||||
https://graphql.anilist.co |
|
||||
jq -r '
|
||||
.data.Media |
|
||||
"Title: \(.title.english // .title.romaji)\n" +
|
||||
"Score: \(.meanScore // "N/A") | Episodes: \(.episodes // "N/A")\n" +
|
||||
"Status: \(.status // "N/A") | Format: \(.format // "N/A")\n" +
|
||||
"Season: \(.season // "N/A") \(.seasonYear // "")\n" +
|
||||
"Genres: \(.genres | join(", "))\n" +
|
||||
"Studio: \(.studios.nodes[0].name // "N/A")\n\n" +
|
||||
"\(.description | gsub("<br><br>"; "\n\n") | gsub("<[^>]*>"; "") | gsub("""; "\""))"
|
||||
'
|
||||
# Create the GraphQL request payload
|
||||
PAYLOAD=$(cat <<EOF
|
||||
{
|
||||
"query": $GRAPHQL_QUERY,
|
||||
"variables": $VARIABLES
|
||||
}
|
||||
EOF
|
||||
)
|
||||
|
||||
# Make the GraphQL request and save raw results
|
||||
if [ -n "$AUTH_HEADER" ]; then
|
||||
RESPONSE=$(curl -s -X POST \
|
||||
-H "Content-Type: application/json" \
|
||||
-H "Authorization: $AUTH_HEADER" \
|
||||
-d "$PAYLOAD" \
|
||||
"$GRAPHQL_ENDPOINT")
|
||||
else
|
||||
RESPONSE=$(curl -s -X POST \
|
||||
-H "Content-Type: application/json" \
|
||||
-d "$PAYLOAD" \
|
||||
"$GRAPHQL_ENDPOINT")
|
||||
fi
|
||||
|
||||
# Check if the request was successful
|
||||
if [ $? -ne 0 ] || [ -z "$RESPONSE" ]; then
|
||||
echo "❌ Search failed"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Save the raw response for later processing
|
||||
echo "$RESPONSE" > "$SEARCH_RESULTS_FILE"
|
||||
|
||||
# Parse and display results
|
||||
if command -v jq >/dev/null 2>&1; then
|
||||
# Use jq for faster and more reliable JSON parsing
|
||||
echo "$RESPONSE" | jq -r '
|
||||
if .errors then
|
||||
"❌ Search error: " + (.errors | tostring)
|
||||
elif (.data.Page.media // []) | length == 0 then
|
||||
"❌ No results found"
|
||||
else
|
||||
.data.Page.media[] |
|
||||
((.title.english // .title.romaji // .title.native // "Unknown") +
|
||||
" (" + (.startDate.year // "Unknown" | tostring) + ") " +
|
||||
"[" + (.status // "Unknown") + "] - " +
|
||||
((.genres[:3] // []) | join(", ") | if . == "" then "Unknown" else . end))
|
||||
end
|
||||
' 2>/dev/null || echo "❌ Parse error"
|
||||
else
|
||||
# Fallback to Python for JSON parsing
|
||||
echo "$RESPONSE" | python3 -c "
|
||||
import json
|
||||
import sys
|
||||
|
||||
try:
|
||||
data = json.load(sys.stdin)
|
||||
|
||||
if 'errors' in data:
|
||||
print('❌ Search error: ' + str(data['errors']))
|
||||
sys.exit(1)
|
||||
|
||||
if 'data' not in data or 'Page' not in data['data'] or 'media' not in data['data']['Page']:
|
||||
print('❌ No results found')
|
||||
sys.exit(0)
|
||||
|
||||
media_list = data['data']['Page']['media']
|
||||
|
||||
if not media_list:
|
||||
print('❌ No results found')
|
||||
sys.exit(0)
|
||||
|
||||
for media in media_list:
|
||||
title = media.get('title', {})
|
||||
english_title = title.get('english') or title.get('romaji') or title.get('native', 'Unknown')
|
||||
year = media.get('startDate', {}).get('year', 'Unknown') if media.get('startDate') else 'Unknown'
|
||||
status = media.get('status', 'Unknown')
|
||||
genres = ', '.join(media.get('genres', [])[:3]) or 'Unknown'
|
||||
|
||||
# Format: Title (Year) [Status] - Genres
|
||||
print(f'{english_title} ({year}) [{status}] - {genres}')
|
||||
|
||||
except Exception as e:
|
||||
print(f'❌ Parse error: {str(e)}')
|
||||
sys.exit(1)
|
||||
"
|
||||
fi
|
||||
|
||||
150
fastanime/cli/interactive/menu/media/dynamic_search.py
Normal file
150
fastanime/cli/interactive/menu/media/dynamic_search.py
Normal file
@@ -0,0 +1,150 @@
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import tempfile
|
||||
from pathlib import Path
|
||||
|
||||
from .....core.constants import APP_CACHE_DIR, SCRIPTS_DIR
|
||||
from .....libs.media_api.params import MediaSearchParams
|
||||
from ...session import Context, session
|
||||
from ...state import InternalDirective, MediaApiState, MenuName, State
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
SEARCH_CACHE_DIR = APP_CACHE_DIR / "search"
|
||||
SEARCH_RESULTS_FILE = SEARCH_CACHE_DIR / "current_search_results.json"
|
||||
FZF_SCRIPTS_DIR = SCRIPTS_DIR / "fzf"
|
||||
SEARCH_TEMPLATE_SCRIPT = (FZF_SCRIPTS_DIR / "search.template.sh").read_text(
|
||||
encoding="utf-8"
|
||||
)
|
||||
|
||||
|
||||
@session.menu
|
||||
def dynamic_search(ctx: Context, state: State) -> State | InternalDirective:
|
||||
"""Dynamic search menu that provides real-time search results."""
|
||||
feedback = ctx.service.feedback
|
||||
feedback.clear_console()
|
||||
|
||||
# Ensure cache directory exists
|
||||
SEARCH_CACHE_DIR.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
# Read the GraphQL search query
|
||||
from .....libs.media_api.anilist import gql
|
||||
|
||||
search_query = gql.SEARCH_MEDIA.read_text(encoding="utf-8")
|
||||
# Properly escape the GraphQL query for JSON
|
||||
search_query_escaped = json.dumps(search_query)
|
||||
|
||||
# Prepare the search script
|
||||
auth_header = ""
|
||||
if ctx.media_api.is_authenticated() and hasattr(ctx.media_api, 'token'):
|
||||
auth_header = f"Bearer {ctx.media_api.token}"
|
||||
|
||||
# Create a temporary search script
|
||||
with tempfile.NamedTemporaryFile(mode='w', suffix='.sh', delete=False) as temp_script:
|
||||
script_content = SEARCH_TEMPLATE_SCRIPT
|
||||
|
||||
replacements = {
|
||||
"GRAPHQL_ENDPOINT": "https://graphql.anilist.co",
|
||||
"GRAPHQL_QUERY": search_query_escaped,
|
||||
"CACHE_DIR": str(SEARCH_CACHE_DIR),
|
||||
"SEARCH_RESULTS_FILE": str(SEARCH_RESULTS_FILE),
|
||||
"AUTH_HEADER": auth_header,
|
||||
}
|
||||
|
||||
for key, value in replacements.items():
|
||||
script_content = script_content.replace(f"{{{key}}}", str(value))
|
||||
|
||||
temp_script.write(script_content)
|
||||
temp_script_path = temp_script.name
|
||||
|
||||
try:
|
||||
# Make the script executable
|
||||
os.chmod(temp_script_path, 0o755)
|
||||
|
||||
# Use the selector's search functionality
|
||||
try:
|
||||
# Prepare preview functionality
|
||||
preview_command = None
|
||||
if ctx.config.general.preview != "none":
|
||||
from ....utils.preview import create_preview_context
|
||||
|
||||
with create_preview_context() as preview_ctx:
|
||||
preview_command = preview_ctx.get_dynamic_anime_preview(ctx.config)
|
||||
|
||||
choice = ctx.selector.search(
|
||||
prompt="Search Anime",
|
||||
search_command=f"bash {temp_script_path} {{q}}",
|
||||
preview=preview_command,
|
||||
header="Type to search for anime dynamically"
|
||||
)
|
||||
else:
|
||||
choice = ctx.selector.search(
|
||||
prompt="Search Anime",
|
||||
search_command=f"bash {temp_script_path} {{q}}",
|
||||
header="Type to search for anime dynamically"
|
||||
)
|
||||
except NotImplementedError:
|
||||
feedback.error("Dynamic search is not supported by your current selector")
|
||||
feedback.info("Please use the regular search option or switch to fzf selector")
|
||||
return InternalDirective.MAIN
|
||||
|
||||
if not choice:
|
||||
return InternalDirective.MAIN
|
||||
|
||||
# Read the cached search results
|
||||
if not SEARCH_RESULTS_FILE.exists():
|
||||
logger.error("Search results file not found")
|
||||
return InternalDirective.MAIN
|
||||
|
||||
try:
|
||||
with open(SEARCH_RESULTS_FILE, 'r', encoding='utf-8') as f:
|
||||
raw_data = json.load(f)
|
||||
|
||||
# Transform the raw data into MediaSearchResult
|
||||
search_result = ctx.media_api.transform_raw_search_data(raw_data)
|
||||
|
||||
if not search_result or not search_result.media:
|
||||
feedback.info("No results found")
|
||||
return InternalDirective.MAIN
|
||||
|
||||
# Find the selected media item by matching the choice with the displayed format
|
||||
selected_media = None
|
||||
for media_item in search_result.media:
|
||||
title = media_item.title.english or media_item.title.romaji or media_item.title.native or "Unknown"
|
||||
year = media_item.start_date.year if media_item.start_date else "Unknown"
|
||||
status = media_item.status.value if media_item.status else "Unknown"
|
||||
genres = ", ".join([genre.value for genre in media_item.genres[:3]]) if media_item.genres else "Unknown"
|
||||
|
||||
display_format = f"{title} ({year}) [{status}] - {genres}"
|
||||
|
||||
if choice.strip() == display_format.strip():
|
||||
selected_media = media_item
|
||||
break
|
||||
|
||||
if not selected_media:
|
||||
logger.error(f"Could not find selected media for choice: {choice}")
|
||||
return InternalDirective.MAIN
|
||||
|
||||
# Navigate to media actions with the selected item
|
||||
return State(
|
||||
menu_name=MenuName.MEDIA_ACTIONS,
|
||||
media_api=MediaApiState(
|
||||
search_result={selected_media.id: selected_media},
|
||||
media_id=selected_media.id,
|
||||
search_params=MediaSearchParams(),
|
||||
page_info=search_result.page_info,
|
||||
),
|
||||
)
|
||||
|
||||
except (json.JSONDecodeError, KeyError, Exception) as e:
|
||||
logger.error(f"Error processing search results: {e}")
|
||||
feedback.error("Failed to process search results")
|
||||
return InternalDirective.MAIN
|
||||
|
||||
finally:
|
||||
# Clean up the temporary script
|
||||
try:
|
||||
os.unlink(temp_script_path)
|
||||
except OSError:
|
||||
pass
|
||||
@@ -39,6 +39,7 @@ def main(ctx: Context, state: State) -> State | InternalDirective:
|
||||
ctx, state, UserMediaListStatus.PLANNING
|
||||
),
|
||||
f"{'🔎 ' if icons else ''}Search": _create_search_media_list(ctx, state),
|
||||
f"{'🔍 ' if icons else ''}Dynamic Search": _create_dynamic_search_action(ctx, state),
|
||||
f"{'🏠 ' if icons else ''}Downloads": _create_downloads_action(ctx, state),
|
||||
f"{'🔔 ' if icons else ''}Recently Updated": _create_media_list_action(
|
||||
ctx, state, MediaSort.UPDATED_AT_DESC
|
||||
@@ -228,3 +229,12 @@ def _create_downloads_action(ctx: Context, state: State) -> MenuAction:
|
||||
return State(menu_name=MenuName.DOWNLOADS)
|
||||
|
||||
return action
|
||||
|
||||
|
||||
def _create_dynamic_search_action(ctx: Context, state: State) -> MenuAction:
|
||||
"""Create action to navigate to the dynamic search menu."""
|
||||
|
||||
def action():
|
||||
return State(menu_name=MenuName.DYNAMIC_SEARCH)
|
||||
|
||||
return action
|
||||
|
||||
@@ -40,6 +40,7 @@ class MenuName(Enum):
|
||||
SESSION_MANAGEMENT = "SESSION_MANAGEMENT"
|
||||
MEDIA_ACTIONS = "MEDIA_ACTIONS"
|
||||
DOWNLOADS = "DOWNLOADS"
|
||||
DYNAMIC_SEARCH = "DYNAMIC_SEARCH"
|
||||
|
||||
|
||||
class StateModel(BaseModel):
|
||||
|
||||
@@ -21,6 +21,9 @@ FZF_SCRIPTS_DIR = SCRIPTS_DIR / "fzf"
|
||||
TEMPLATE_PREVIEW_SCRIPT = (FZF_SCRIPTS_DIR / "preview.template.sh").read_text(
|
||||
encoding="utf-8"
|
||||
)
|
||||
DYNAMIC_PREVIEW_SCRIPT = (FZF_SCRIPTS_DIR / "dynamic_preview.template.sh").read_text(
|
||||
encoding="utf-8"
|
||||
)
|
||||
|
||||
EPISODE_PATTERN = re.compile(r"^Episode\s+(\d+)\s-\s.*")
|
||||
|
||||
@@ -79,6 +82,12 @@ class PreviewContext:
|
||||
self._manager = _get_preview_manager()
|
||||
return get_episode_preview(episodes, media_item, config)
|
||||
|
||||
def get_dynamic_anime_preview(self, config: AppConfig) -> str:
|
||||
"""Get dynamic anime preview script for search functionality."""
|
||||
if not self._manager:
|
||||
self._manager = _get_preview_manager()
|
||||
return get_dynamic_anime_preview(config)
|
||||
|
||||
def cancel_all_tasks(self) -> int:
|
||||
"""Cancel all running preview tasks."""
|
||||
if not self._manager:
|
||||
@@ -213,6 +222,61 @@ def get_episode_preview(
|
||||
return preview_script
|
||||
|
||||
|
||||
def get_dynamic_anime_preview(config: AppConfig) -> str:
|
||||
"""
|
||||
Generate dynamic anime preview script for search functionality.
|
||||
|
||||
This is different from regular anime preview because:
|
||||
1. We don't have media items upfront
|
||||
2. The preview needs to work with search results as they come in
|
||||
3. Preview is handled entirely in shell by parsing JSON results
|
||||
|
||||
Args:
|
||||
config: Application configuration
|
||||
|
||||
Returns:
|
||||
Preview script content for fzf dynamic search
|
||||
"""
|
||||
# Ensure cache directories exist
|
||||
IMAGES_CACHE_DIR.mkdir(parents=True, exist_ok=True)
|
||||
INFO_CACHE_DIR.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
HEADER_COLOR = config.fzf.preview_header_color.split(",")
|
||||
SEPARATOR_COLOR = config.fzf.preview_separator_color.split(",")
|
||||
|
||||
# Use the dynamic preview script template
|
||||
preview_script = DYNAMIC_PREVIEW_SCRIPT
|
||||
|
||||
# We need to return the path to the search results file
|
||||
from ...core.constants import APP_CACHE_DIR
|
||||
search_cache_dir = APP_CACHE_DIR / "search"
|
||||
search_results_file = search_cache_dir / "current_search_results.json"
|
||||
|
||||
# Prepare values to inject into the template
|
||||
path_sep = "\\" if PLATFORM == "win32" else "/"
|
||||
|
||||
# Format the template with the dynamic values
|
||||
replacements = {
|
||||
"PREVIEW_MODE": config.general.preview,
|
||||
"IMAGE_CACHE_PATH": str(IMAGES_CACHE_DIR),
|
||||
"INFO_CACHE_PATH": str(INFO_CACHE_DIR),
|
||||
"PATH_SEP": path_sep,
|
||||
"IMAGE_RENDERER": config.general.image_renderer,
|
||||
"SEARCH_RESULTS_FILE": str(search_results_file),
|
||||
# Color codes
|
||||
"C_TITLE": ansi.get_true_fg(HEADER_COLOR, bold=True),
|
||||
"C_KEY": ansi.get_true_fg(HEADER_COLOR, bold=True),
|
||||
"C_VALUE": ansi.get_true_fg(HEADER_COLOR, bold=True),
|
||||
"C_RULE": ansi.get_true_fg(SEPARATOR_COLOR, bold=True),
|
||||
"RESET": ansi.RESET,
|
||||
}
|
||||
|
||||
for key, value in replacements.items():
|
||||
preview_script = preview_script.replace(f"{{{key}}}", value)
|
||||
|
||||
return preview_script
|
||||
|
||||
|
||||
def _get_preview_manager() -> PreviewWorkerManager:
|
||||
"""Get or create the global preview worker manager."""
|
||||
global _preview_manager
|
||||
|
||||
@@ -260,6 +260,22 @@ class AniListApi(BaseApiClient):
|
||||
)
|
||||
return response.json() if response else None
|
||||
|
||||
def transform_raw_search_data(self, raw_data: Dict) -> Optional[MediaSearchResult]:
|
||||
"""
|
||||
Transform raw AniList API response data into a MediaSearchResult.
|
||||
|
||||
Args:
|
||||
raw_data: Raw response data from the AniList GraphQL API
|
||||
|
||||
Returns:
|
||||
MediaSearchResult object or None if transformation fails
|
||||
"""
|
||||
try:
|
||||
return mapper.to_generic_search_result(raw_data)
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to transform raw search data: {e}")
|
||||
return None
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
from httpx import Client
|
||||
|
||||
@@ -78,3 +78,16 @@ class BaseApiClient(abc.ABC):
|
||||
self, params: MediaAiringScheduleParams
|
||||
) -> Optional[Dict]:
|
||||
pass
|
||||
|
||||
@abc.abstractmethod
|
||||
def transform_raw_search_data(self, raw_data: Dict) -> Optional[MediaSearchResult]:
|
||||
"""
|
||||
Transform raw API response data into a MediaSearchResult.
|
||||
|
||||
Args:
|
||||
raw_data: Raw response data from the API
|
||||
|
||||
Returns:
|
||||
MediaSearchResult object or None if transformation fails
|
||||
"""
|
||||
pass
|
||||
|
||||
@@ -104,3 +104,28 @@ class BaseSelector(ABC):
|
||||
The string entered by the user.
|
||||
"""
|
||||
pass
|
||||
|
||||
def search(
|
||||
self,
|
||||
prompt: str,
|
||||
search_command: str,
|
||||
*,
|
||||
preview: Optional[str] = None,
|
||||
header: Optional[str] = None,
|
||||
) -> str | None:
|
||||
"""
|
||||
Provides dynamic search functionality that reloads results based on user input.
|
||||
|
||||
Args:
|
||||
prompt: The message to display to the user.
|
||||
search_command: The command to execute for searching/reloading results.
|
||||
preview: An optional command or string for a preview window.
|
||||
header: An optional header to display above the choices.
|
||||
|
||||
Returns:
|
||||
The string of the chosen item.
|
||||
|
||||
Raises:
|
||||
NotImplementedError: If the selector doesn't support dynamic search.
|
||||
"""
|
||||
raise NotImplementedError("Dynamic search is not supported by this selector")
|
||||
|
||||
@@ -114,3 +114,30 @@ class FzfSelector(BaseSelector):
|
||||
# The output contains the selection (if any) and the query on the last line
|
||||
lines = result.stdout.strip().splitlines()
|
||||
return lines[-1] if lines else (default or "")
|
||||
|
||||
def search(self, prompt, search_command, *, preview=None, header=None):
|
||||
"""Enhanced search using fzf's --reload flag for dynamic search."""
|
||||
commands = [
|
||||
self.executable,
|
||||
"--prompt",
|
||||
f"{prompt.title()}: ",
|
||||
"--header",
|
||||
header or self.header,
|
||||
"--header-first",
|
||||
"--bind",
|
||||
f"change:reload({search_command})",
|
||||
"--ansi",
|
||||
]
|
||||
|
||||
if preview:
|
||||
commands.extend(["--preview", preview])
|
||||
|
||||
result = subprocess.run(
|
||||
commands,
|
||||
input="",
|
||||
stdout=subprocess.PIPE,
|
||||
text=True,
|
||||
)
|
||||
if result.returncode != 0:
|
||||
return None
|
||||
return result.stdout.strip()
|
||||
|
||||
Reference in New Issue
Block a user