mirror of
https://github.com/Benexl/FastAnime.git
synced 2025-12-12 15:50:01 -08:00
refactor: service import paths
This commit is contained in:
@@ -9,7 +9,7 @@ from ...libs.api.types import UserProfile
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class AuthManager:
|
||||
class AuthService:
|
||||
"""
|
||||
Handles loading, saving, and clearing of user credentials and profile data.
|
||||
|
||||
|
||||
@@ -1,28 +1,3 @@
|
||||
"""
|
||||
Download tracking services for FastAnime.
|
||||
from .service import DownloadService
|
||||
|
||||
This module provides comprehensive download tracking and management capabilities
|
||||
including progress monitoring, queue management, and integration with watch history.
|
||||
"""
|
||||
|
||||
from .manager import DownloadManager, get_download_manager
|
||||
from .models import (
|
||||
DownloadIndex,
|
||||
DownloadQueueItem,
|
||||
EpisodeDownload,
|
||||
MediaDownloadRecord,
|
||||
MediaIndexEntry,
|
||||
)
|
||||
from .tracker import DownloadTracker, get_download_tracker
|
||||
|
||||
__all__ = [
|
||||
"DownloadManager",
|
||||
"get_download_manager",
|
||||
"DownloadTracker",
|
||||
"get_download_tracker",
|
||||
"EpisodeDownload",
|
||||
"MediaDownloadRecord",
|
||||
"DownloadIndex",
|
||||
"MediaIndexEntry",
|
||||
"DownloadQueueItem",
|
||||
]
|
||||
__all__ = ["DownloadService"]
|
||||
|
||||
@@ -31,43 +31,43 @@ from .models import (
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class DownloadManager:
|
||||
class DownloadService:
|
||||
"""
|
||||
Core download manager using Pydantic models and integrating with existing infrastructure.
|
||||
|
||||
|
||||
Manages download tracking, queue operations, and storage with atomic operations
|
||||
and thread safety. Integrates with the existing downloader infrastructure.
|
||||
"""
|
||||
|
||||
|
||||
def __init__(self, config: DownloadsConfig):
|
||||
self.config = config
|
||||
self.downloads_dir = config.downloads_dir
|
||||
|
||||
|
||||
# Storage directories
|
||||
self.tracking_dir = APP_DATA_DIR / "downloads"
|
||||
self.cache_dir = APP_CACHE_DIR / "downloads"
|
||||
self.media_dir = self.tracking_dir / "media"
|
||||
|
||||
|
||||
# File paths
|
||||
self.index_file = self.tracking_dir / "index.json"
|
||||
self.queue_file = self.tracking_dir / "queue.json"
|
||||
|
||||
|
||||
# Thread safety
|
||||
self._lock = threading.RLock()
|
||||
self._loaded_records: Dict[int, MediaDownloadRecord] = {}
|
||||
self._index: Optional[DownloadIndex] = None
|
||||
self._queue: Optional[DownloadQueue] = None
|
||||
|
||||
|
||||
# Initialize storage and downloader
|
||||
self._initialize_storage()
|
||||
|
||||
|
||||
# Use existing downloader infrastructure
|
||||
try:
|
||||
self.downloader = create_downloader(config)
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to initialize downloader: {e}")
|
||||
self.downloader = None
|
||||
|
||||
|
||||
def _initialize_storage(self) -> None:
|
||||
"""Initialize storage directories and files."""
|
||||
try:
|
||||
@@ -75,182 +75,199 @@ class DownloadManager:
|
||||
self.tracking_dir.mkdir(parents=True, exist_ok=True)
|
||||
self.media_dir.mkdir(parents=True, exist_ok=True)
|
||||
self.cache_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
|
||||
# Create subdirectories for cache
|
||||
(self.cache_dir / "thumbnails").mkdir(exist_ok=True)
|
||||
(self.cache_dir / "metadata").mkdir(exist_ok=True)
|
||||
(self.cache_dir / "temp").mkdir(exist_ok=True)
|
||||
|
||||
|
||||
# Initialize index if it doesn't exist
|
||||
if not self.index_file.exists():
|
||||
self._create_empty_index()
|
||||
|
||||
|
||||
# Initialize queue if it doesn't exist
|
||||
if not self.queue_file.exists():
|
||||
self._create_empty_queue()
|
||||
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to initialize download storage: {e}")
|
||||
raise
|
||||
|
||||
|
||||
def _create_empty_index(self) -> None:
|
||||
"""Create an empty download index."""
|
||||
empty_index = DownloadIndex()
|
||||
self._save_index(empty_index)
|
||||
|
||||
|
||||
def _create_empty_queue(self) -> None:
|
||||
"""Create an empty download queue."""
|
||||
empty_queue = DownloadQueue(max_size=self.config.queue_max_size)
|
||||
self._save_queue(empty_queue)
|
||||
|
||||
|
||||
def _load_index(self) -> DownloadIndex:
|
||||
"""Load the download index with Pydantic validation."""
|
||||
if self._index is not None:
|
||||
return self._index
|
||||
|
||||
|
||||
try:
|
||||
if not self.index_file.exists():
|
||||
self._create_empty_index()
|
||||
|
||||
with open(self.index_file, 'r', encoding='utf-8') as f:
|
||||
|
||||
with open(self.index_file, "r", encoding="utf-8") as f:
|
||||
data = json.load(f)
|
||||
|
||||
|
||||
self._index = DownloadIndex.model_validate(data)
|
||||
return self._index
|
||||
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to load download index: {e}")
|
||||
# Create new empty index as fallback
|
||||
self._create_empty_index()
|
||||
return self._load_index()
|
||||
|
||||
|
||||
def _save_index(self, index: DownloadIndex) -> None:
|
||||
"""Save index with atomic write operation."""
|
||||
temp_file = self.index_file.with_suffix('.tmp')
|
||||
|
||||
temp_file = self.index_file.with_suffix(".tmp")
|
||||
|
||||
try:
|
||||
with open(temp_file, 'w', encoding='utf-8') as f:
|
||||
json.dump(index.model_dump(), f, indent=2, ensure_ascii=False, default=str)
|
||||
|
||||
with open(temp_file, "w", encoding="utf-8") as f:
|
||||
json.dump(
|
||||
index.model_dump(), f, indent=2, ensure_ascii=False, default=str
|
||||
)
|
||||
|
||||
# Atomic replace
|
||||
temp_file.replace(self.index_file)
|
||||
self._index = index
|
||||
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to save download index: {e}")
|
||||
if temp_file.exists():
|
||||
temp_file.unlink()
|
||||
raise
|
||||
|
||||
|
||||
def _load_queue(self) -> DownloadQueue:
|
||||
"""Load the download queue with Pydantic validation."""
|
||||
if self._queue is not None:
|
||||
return self._queue
|
||||
|
||||
|
||||
try:
|
||||
if not self.queue_file.exists():
|
||||
self._create_empty_queue()
|
||||
|
||||
with open(self.queue_file, 'r', encoding='utf-8') as f:
|
||||
|
||||
with open(self.queue_file, "r", encoding="utf-8") as f:
|
||||
data = json.load(f)
|
||||
|
||||
|
||||
self._queue = DownloadQueue.model_validate(data)
|
||||
return self._queue
|
||||
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to load download queue: {e}")
|
||||
# Create new empty queue as fallback
|
||||
self._create_empty_queue()
|
||||
return self._load_queue()
|
||||
|
||||
|
||||
def _save_queue(self, queue: DownloadQueue) -> None:
|
||||
"""Save queue with atomic write operation."""
|
||||
temp_file = self.queue_file.with_suffix('.tmp')
|
||||
|
||||
temp_file = self.queue_file.with_suffix(".tmp")
|
||||
|
||||
try:
|
||||
with open(temp_file, 'w', encoding='utf-8') as f:
|
||||
json.dump(queue.model_dump(), f, indent=2, ensure_ascii=False, default=str)
|
||||
|
||||
with open(temp_file, "w", encoding="utf-8") as f:
|
||||
json.dump(
|
||||
queue.model_dump(), f, indent=2, ensure_ascii=False, default=str
|
||||
)
|
||||
|
||||
# Atomic replace
|
||||
temp_file.replace(self.queue_file)
|
||||
self._queue = queue
|
||||
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to save download queue: {e}")
|
||||
if temp_file.exists():
|
||||
temp_file.unlink()
|
||||
raise
|
||||
|
||||
|
||||
def get_download_record(self, media_id: int) -> Optional[MediaDownloadRecord]:
|
||||
"""Get download record for an anime with caching."""
|
||||
with self._lock:
|
||||
# Check cache first
|
||||
if media_id in self._loaded_records:
|
||||
return self._loaded_records[media_id]
|
||||
|
||||
|
||||
try:
|
||||
record_file = self.media_dir / f"{media_id}.json"
|
||||
|
||||
|
||||
if not record_file.exists():
|
||||
return None
|
||||
|
||||
with open(record_file, 'r', encoding='utf-8') as f:
|
||||
|
||||
with open(record_file, "r", encoding="utf-8") as f:
|
||||
data = json.load(f)
|
||||
|
||||
|
||||
record = MediaDownloadRecord.model_validate(data)
|
||||
|
||||
|
||||
# Cache the record
|
||||
self._loaded_records[media_id] = record
|
||||
|
||||
|
||||
return record
|
||||
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to load download record for media {media_id}: {e}")
|
||||
logger.error(
|
||||
f"Failed to load download record for media {media_id}: {e}"
|
||||
)
|
||||
return None
|
||||
|
||||
|
||||
def save_download_record(self, record: MediaDownloadRecord) -> bool:
|
||||
"""Save a download record with atomic operation."""
|
||||
with self._lock:
|
||||
try:
|
||||
media_id = record.media_item.id
|
||||
record_file = self.media_dir / f"{media_id}.json"
|
||||
temp_file = record_file.with_suffix('.tmp')
|
||||
|
||||
temp_file = record_file.with_suffix(".tmp")
|
||||
|
||||
# Update last_modified timestamp
|
||||
record.update_last_modified()
|
||||
|
||||
|
||||
# Write to temp file first
|
||||
with open(temp_file, 'w', encoding='utf-8') as f:
|
||||
json.dump(record.model_dump(), f, indent=2, ensure_ascii=False, default=str)
|
||||
|
||||
with open(temp_file, "w", encoding="utf-8") as f:
|
||||
json.dump(
|
||||
record.model_dump(),
|
||||
f,
|
||||
indent=2,
|
||||
ensure_ascii=False,
|
||||
default=str,
|
||||
)
|
||||
|
||||
# Atomic replace
|
||||
temp_file.replace(record_file)
|
||||
|
||||
|
||||
# Update cache
|
||||
self._loaded_records[media_id] = record
|
||||
|
||||
|
||||
# Update index
|
||||
index = self._load_index()
|
||||
index.add_media_entry(record)
|
||||
self._save_index(index)
|
||||
|
||||
|
||||
logger.debug(f"Saved download record for media {media_id}")
|
||||
return True
|
||||
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to save download record: {e}")
|
||||
if temp_file.exists():
|
||||
temp_file.unlink()
|
||||
return False
|
||||
|
||||
def add_to_queue(self, media_item: MediaItem, episodes: List[int],
|
||||
quality: Optional[str] = None, priority: int = 0) -> bool:
|
||||
|
||||
def add_to_queue(
|
||||
self,
|
||||
media_item: MediaItem,
|
||||
episodes: List[int],
|
||||
quality: Optional[str] = None,
|
||||
priority: int = 0,
|
||||
) -> bool:
|
||||
"""Add episodes to download queue."""
|
||||
with self._lock:
|
||||
try:
|
||||
queue = self._load_queue()
|
||||
quality = quality or self.config.preferred_quality
|
||||
|
||||
|
||||
success_count = 0
|
||||
for episode in episodes:
|
||||
queue_item = DownloadQueueItem(
|
||||
@@ -258,46 +275,50 @@ class DownloadManager:
|
||||
episode_number=episode,
|
||||
priority=priority,
|
||||
quality_preference=quality,
|
||||
max_retries=self.config.retry_attempts
|
||||
max_retries=self.config.retry_attempts,
|
||||
)
|
||||
|
||||
|
||||
if queue.add_item(queue_item):
|
||||
success_count += 1
|
||||
logger.info(f"Added episode {episode} of {media_item.title.english or media_item.title.romaji} to download queue")
|
||||
|
||||
logger.info(
|
||||
f"Added episode {episode} of {media_item.title.english or media_item.title.romaji} to download queue"
|
||||
)
|
||||
|
||||
if success_count > 0:
|
||||
self._save_queue(queue)
|
||||
|
||||
|
||||
# Create download record if it doesn't exist
|
||||
if not self.get_download_record(media_item.id):
|
||||
download_path = self.downloads_dir / self._sanitize_filename(
|
||||
media_item.title.english or media_item.title.romaji or f"Anime_{media_item.id}"
|
||||
media_item.title.english
|
||||
or media_item.title.romaji
|
||||
or f"Anime_{media_item.id}"
|
||||
)
|
||||
|
||||
|
||||
record = MediaDownloadRecord(
|
||||
media_item=media_item,
|
||||
download_path=download_path,
|
||||
preferred_quality=quality
|
||||
preferred_quality=quality,
|
||||
)
|
||||
self.save_download_record(record)
|
||||
|
||||
|
||||
return success_count > 0
|
||||
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to add episodes to queue: {e}")
|
||||
return False
|
||||
|
||||
|
||||
def get_next_download(self) -> Optional[DownloadQueueItem]:
|
||||
"""Get the next item from the download queue."""
|
||||
with self._lock:
|
||||
try:
|
||||
queue = self._load_queue()
|
||||
return queue.get_next_item()
|
||||
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to get next download: {e}")
|
||||
return None
|
||||
|
||||
|
||||
def mark_download_started(self, media_id: int, episode: int) -> bool:
|
||||
"""Mark an episode download as started."""
|
||||
with self._lock:
|
||||
@@ -305,190 +326,207 @@ class DownloadManager:
|
||||
record = self.get_download_record(media_id)
|
||||
if not record:
|
||||
return False
|
||||
|
||||
|
||||
# Create episode download entry
|
||||
download_path = record.download_path / f"Episode_{episode:02d}.mkv"
|
||||
|
||||
|
||||
episode_download = EpisodeDownload(
|
||||
episode_number=episode,
|
||||
file_path=download_path,
|
||||
file_size=0,
|
||||
quality=record.preferred_quality,
|
||||
source_provider="unknown", # Will be updated by actual downloader
|
||||
status="downloading"
|
||||
status="downloading",
|
||||
)
|
||||
|
||||
|
||||
# Update record
|
||||
new_episodes = record.episodes.copy()
|
||||
new_episodes[episode] = episode_download
|
||||
|
||||
|
||||
updated_record = record.model_copy(update={"episodes": new_episodes})
|
||||
self.save_download_record(updated_record)
|
||||
|
||||
|
||||
return True
|
||||
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to mark download started: {e}")
|
||||
return False
|
||||
|
||||
def mark_download_completed(self, media_id: int, episode: int,
|
||||
file_path: Path, file_size: int,
|
||||
checksum: Optional[str] = None) -> bool:
|
||||
|
||||
def mark_download_completed(
|
||||
self,
|
||||
media_id: int,
|
||||
episode: int,
|
||||
file_path: Path,
|
||||
file_size: int,
|
||||
checksum: Optional[str] = None,
|
||||
) -> bool:
|
||||
"""Mark an episode download as completed."""
|
||||
with self._lock:
|
||||
try:
|
||||
record = self.get_download_record(media_id)
|
||||
if not record or episode not in record.episodes:
|
||||
return False
|
||||
|
||||
|
||||
# Update episode download
|
||||
episode_download = record.episodes[episode]
|
||||
updated_episode = episode_download.model_copy(update={
|
||||
"file_path": file_path,
|
||||
"file_size": file_size,
|
||||
"status": "completed",
|
||||
"download_progress": 1.0,
|
||||
"checksum": checksum
|
||||
})
|
||||
|
||||
updated_episode = episode_download.model_copy(
|
||||
update={
|
||||
"file_path": file_path,
|
||||
"file_size": file_size,
|
||||
"status": "completed",
|
||||
"download_progress": 1.0,
|
||||
"checksum": checksum,
|
||||
}
|
||||
)
|
||||
|
||||
# Update record
|
||||
new_episodes = record.episodes.copy()
|
||||
new_episodes[episode] = updated_episode
|
||||
|
||||
|
||||
updated_record = record.model_copy(update={"episodes": new_episodes})
|
||||
self.save_download_record(updated_record)
|
||||
|
||||
|
||||
# Remove from queue
|
||||
queue = self._load_queue()
|
||||
queue.remove_item(media_id, episode)
|
||||
self._save_queue(queue)
|
||||
|
||||
logger.info(f"Marked episode {episode} of media {media_id} as completed")
|
||||
|
||||
logger.info(
|
||||
f"Marked episode {episode} of media {media_id} as completed"
|
||||
)
|
||||
return True
|
||||
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to mark download completed: {e}")
|
||||
return False
|
||||
|
||||
def mark_download_failed(self, media_id: int, episode: int, error_message: str) -> bool:
|
||||
|
||||
def mark_download_failed(
|
||||
self, media_id: int, episode: int, error_message: str
|
||||
) -> bool:
|
||||
"""Mark an episode download as failed."""
|
||||
with self._lock:
|
||||
try:
|
||||
record = self.get_download_record(media_id)
|
||||
if not record or episode not in record.episodes:
|
||||
return False
|
||||
|
||||
|
||||
# Update episode download
|
||||
episode_download = record.episodes[episode]
|
||||
updated_episode = episode_download.model_copy(update={
|
||||
"status": "failed",
|
||||
"error_message": error_message
|
||||
})
|
||||
|
||||
updated_episode = episode_download.model_copy(
|
||||
update={"status": "failed", "error_message": error_message}
|
||||
)
|
||||
|
||||
# Update record
|
||||
new_episodes = record.episodes.copy()
|
||||
new_episodes[episode] = updated_episode
|
||||
|
||||
|
||||
updated_record = record.model_copy(update={"episodes": new_episodes})
|
||||
self.save_download_record(updated_record)
|
||||
|
||||
logger.warning(f"Marked episode {episode} of media {media_id} as failed: {error_message}")
|
||||
|
||||
logger.warning(
|
||||
f"Marked episode {episode} of media {media_id} as failed: {error_message}"
|
||||
)
|
||||
return True
|
||||
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to mark download failed: {e}")
|
||||
return False
|
||||
|
||||
def list_downloads(self, status_filter: Optional[str] = None,
|
||||
limit: Optional[int] = None) -> List[MediaDownloadRecord]:
|
||||
|
||||
def list_downloads(
|
||||
self, status_filter: Optional[str] = None, limit: Optional[int] = None
|
||||
) -> List[MediaDownloadRecord]:
|
||||
"""List download records with optional filtering."""
|
||||
try:
|
||||
index = self._load_index()
|
||||
records = []
|
||||
|
||||
|
||||
media_ids = list(index.media_index.keys())
|
||||
if limit:
|
||||
media_ids = media_ids[:limit]
|
||||
|
||||
|
||||
for media_id in media_ids:
|
||||
record = self.get_download_record(media_id)
|
||||
if record is None:
|
||||
continue
|
||||
|
||||
|
||||
if status_filter and record.status != status_filter:
|
||||
continue
|
||||
|
||||
|
||||
records.append(record)
|
||||
|
||||
|
||||
# Sort by last updated (most recent first)
|
||||
records.sort(key=lambda x: x.last_updated, reverse=True)
|
||||
|
||||
|
||||
return records
|
||||
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to list downloads: {e}")
|
||||
return []
|
||||
|
||||
|
||||
def cleanup_failed_downloads(self) -> int:
|
||||
"""Clean up old failed downloads based on retention policy."""
|
||||
try:
|
||||
cutoff_date = datetime.now() - timedelta(days=self.config.retention_days)
|
||||
cleaned_count = 0
|
||||
|
||||
|
||||
for record in self.list_downloads():
|
||||
episodes_to_remove = []
|
||||
|
||||
|
||||
for episode_num, episode_download in record.episodes.items():
|
||||
if (episode_download.status == "failed" and
|
||||
episode_download.download_date < cutoff_date):
|
||||
if (
|
||||
episode_download.status == "failed"
|
||||
and episode_download.download_date < cutoff_date
|
||||
):
|
||||
episodes_to_remove.append(episode_num)
|
||||
|
||||
|
||||
if episodes_to_remove:
|
||||
new_episodes = record.episodes.copy()
|
||||
for episode_num in episodes_to_remove:
|
||||
del new_episodes[episode_num]
|
||||
cleaned_count += 1
|
||||
|
||||
updated_record = record.model_copy(update={"episodes": new_episodes})
|
||||
|
||||
updated_record = record.model_copy(
|
||||
update={"episodes": new_episodes}
|
||||
)
|
||||
self.save_download_record(updated_record)
|
||||
|
||||
|
||||
logger.info(f"Cleaned up {cleaned_count} failed downloads")
|
||||
return cleaned_count
|
||||
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to cleanup failed downloads: {e}")
|
||||
return 0
|
||||
|
||||
|
||||
def get_download_stats(self) -> Dict:
|
||||
"""Get download statistics."""
|
||||
try:
|
||||
index = self._load_index()
|
||||
|
||||
|
||||
stats = {
|
||||
"total_anime": index.media_count,
|
||||
"total_episodes": index.total_episodes,
|
||||
"total_size_gb": round(index.total_size_gb, 2),
|
||||
"completion_stats": index.completion_stats,
|
||||
"queue_size": len(self._load_queue().items)
|
||||
"queue_size": len(self._load_queue().items),
|
||||
}
|
||||
|
||||
|
||||
return stats
|
||||
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to get download stats: {e}")
|
||||
return {}
|
||||
|
||||
|
||||
def _sanitize_filename(self, filename: str) -> str:
|
||||
"""Sanitize filename for filesystem compatibility."""
|
||||
# Remove or replace invalid characters
|
||||
invalid_chars = '<>:"/\\|?*'
|
||||
for char in invalid_chars:
|
||||
filename = filename.replace(char, '_')
|
||||
|
||||
filename = filename.replace(char, "_")
|
||||
|
||||
# Limit length
|
||||
if len(filename) > 100:
|
||||
filename = filename[:100]
|
||||
|
||||
|
||||
return filename.strip()
|
||||
|
||||
|
||||
@@ -499,8 +537,8 @@ _download_manager: Optional[DownloadManager] = None
|
||||
def get_download_manager(config: DownloadsConfig) -> DownloadManager:
|
||||
"""Get or create the global download manager instance."""
|
||||
global _download_manager
|
||||
|
||||
|
||||
if _download_manager is None:
|
||||
_download_manager = DownloadManager(config)
|
||||
|
||||
|
||||
return _download_manager
|
||||
|
||||
@@ -0,0 +1,3 @@
|
||||
from .service import FeedbackService
|
||||
|
||||
__all__ = ["FeedbackService"]
|
||||
|
||||
@@ -1,26 +1,3 @@
|
||||
"""
|
||||
Unified Media Registry for FastAnime.
|
||||
from .service import MediaRegistryService
|
||||
|
||||
This module provides a unified system for tracking both watch history and downloads
|
||||
for anime, eliminating data duplication between separate systems.
|
||||
"""
|
||||
|
||||
from .manager import MediaRegistryManager, get_media_registry
|
||||
from .models import (
|
||||
EpisodeStatus,
|
||||
MediaRecord,
|
||||
MediaRegistryIndex,
|
||||
UserMediaData,
|
||||
)
|
||||
from .tracker import MediaTracker, get_media_tracker
|
||||
|
||||
__all__ = [
|
||||
"MediaRegistryManager",
|
||||
"get_media_registry",
|
||||
"EpisodeStatus",
|
||||
"MediaRecord",
|
||||
"MediaRegistryIndex",
|
||||
"UserMediaData",
|
||||
"MediaTracker",
|
||||
"get_media_tracker",
|
||||
]
|
||||
__all__ = ["MediaRegistryService"]
|
||||
|
||||
@@ -0,0 +1,3 @@
|
||||
from .service import SessionService
|
||||
|
||||
__all__ = ["SessionService"]
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
Session state management utilities for the interactive CLI.
|
||||
Provides comprehensive session save/resume functionality with error handling and metadata.
|
||||
"""
|
||||
|
||||
import json
|
||||
import logging
|
||||
from datetime import datetime
|
||||
@@ -14,28 +15,28 @@ from ...interactive.state import State
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Session storage directory
|
||||
SESSIONS_DIR = APP_DATA_DIR / "sessions"
|
||||
SESSIONS_DIR = APP_DATA_DIR / "sessions"
|
||||
AUTO_SAVE_FILE = SESSIONS_DIR / "auto_save.json"
|
||||
CRASH_BACKUP_FILE = SESSIONS_DIR / "crash_backup.json"
|
||||
|
||||
|
||||
class SessionMetadata:
|
||||
"""Metadata for saved sessions."""
|
||||
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
created_at: Optional[datetime] = None,
|
||||
last_saved: Optional[datetime] = None,
|
||||
session_name: Optional[str] = None,
|
||||
description: Optional[str] = None,
|
||||
state_count: int = 0
|
||||
state_count: int = 0,
|
||||
):
|
||||
self.created_at = created_at or datetime.now()
|
||||
self.last_saved = last_saved or datetime.now()
|
||||
self.session_name = session_name
|
||||
self.description = description
|
||||
self.state_count = state_count
|
||||
|
||||
|
||||
def to_dict(self) -> dict:
|
||||
"""Convert metadata to dictionary for JSON serialization."""
|
||||
return {
|
||||
@@ -43,81 +44,85 @@ class SessionMetadata:
|
||||
"last_saved": self.last_saved.isoformat(),
|
||||
"session_name": self.session_name,
|
||||
"description": self.description,
|
||||
"state_count": self.state_count
|
||||
"state_count": self.state_count,
|
||||
}
|
||||
|
||||
|
||||
@classmethod
|
||||
def from_dict(cls, data: dict) -> "SessionMetadata":
|
||||
"""Create metadata from dictionary."""
|
||||
return cls(
|
||||
created_at=datetime.fromisoformat(data.get("created_at", datetime.now().isoformat())),
|
||||
last_saved=datetime.fromisoformat(data.get("last_saved", datetime.now().isoformat())),
|
||||
created_at=datetime.fromisoformat(
|
||||
data.get("created_at", datetime.now().isoformat())
|
||||
),
|
||||
last_saved=datetime.fromisoformat(
|
||||
data.get("last_saved", datetime.now().isoformat())
|
||||
),
|
||||
session_name=data.get("session_name"),
|
||||
description=data.get("description"),
|
||||
state_count=data.get("state_count", 0)
|
||||
state_count=data.get("state_count", 0),
|
||||
)
|
||||
|
||||
|
||||
class SessionData:
|
||||
"""Complete session data including history and metadata."""
|
||||
|
||||
|
||||
def __init__(self, history: List[State], metadata: SessionMetadata):
|
||||
self.history = history
|
||||
self.metadata = metadata
|
||||
|
||||
|
||||
def to_dict(self) -> dict:
|
||||
"""Convert session data to dictionary for JSON serialization."""
|
||||
return {
|
||||
"metadata": self.metadata.to_dict(),
|
||||
"history": [state.model_dump(mode="json") for state in self.history],
|
||||
"format_version": "1.0" # For future compatibility
|
||||
"format_version": "1.0", # For future compatibility
|
||||
}
|
||||
|
||||
|
||||
@classmethod
|
||||
def from_dict(cls, data: dict) -> "SessionData":
|
||||
"""Create session data from dictionary."""
|
||||
metadata = SessionMetadata.from_dict(data.get("metadata", {}))
|
||||
history_data = data.get("history", [])
|
||||
history = []
|
||||
|
||||
|
||||
for state_dict in history_data:
|
||||
try:
|
||||
state = State.model_validate(state_dict)
|
||||
history.append(state)
|
||||
except Exception as e:
|
||||
logger.warning(f"Skipping invalid state in session: {e}")
|
||||
|
||||
|
||||
return cls(history, metadata)
|
||||
|
||||
|
||||
class SessionManager:
|
||||
class SessionService:
|
||||
"""Manages session save/resume functionality with comprehensive error handling."""
|
||||
|
||||
|
||||
def __init__(self):
|
||||
self._ensure_sessions_directory()
|
||||
|
||||
|
||||
def _ensure_sessions_directory(self):
|
||||
"""Ensure the sessions directory exists."""
|
||||
SESSIONS_DIR.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
|
||||
def save_session(
|
||||
self,
|
||||
history: List[State],
|
||||
self,
|
||||
history: List[State],
|
||||
file_path: Path,
|
||||
session_name: Optional[str] = None,
|
||||
description: Optional[str] = None,
|
||||
feedback=None
|
||||
feedback=None,
|
||||
) -> bool:
|
||||
"""
|
||||
Save session history to a JSON file with metadata.
|
||||
|
||||
|
||||
Args:
|
||||
history: List of session states
|
||||
file_path: Path to save the session
|
||||
session_name: Optional name for the session
|
||||
description: Optional description
|
||||
feedback: Optional feedback manager for user notifications
|
||||
|
||||
|
||||
Returns:
|
||||
True if successful, False otherwise
|
||||
"""
|
||||
@@ -126,40 +131,40 @@ class SessionManager:
|
||||
metadata = SessionMetadata(
|
||||
session_name=session_name,
|
||||
description=description,
|
||||
state_count=len(history)
|
||||
state_count=len(history),
|
||||
)
|
||||
|
||||
|
||||
# Create session data
|
||||
session_data = SessionData(history, metadata)
|
||||
|
||||
|
||||
# Save to file
|
||||
with file_path.open('w', encoding='utf-8') as f:
|
||||
with file_path.open("w", encoding="utf-8") as f:
|
||||
json.dump(session_data.to_dict(), f, indent=2, ensure_ascii=False)
|
||||
|
||||
|
||||
if feedback:
|
||||
feedback.success(
|
||||
"Session saved successfully",
|
||||
f"Saved {len(history)} states to {file_path.name}"
|
||||
f"Saved {len(history)} states to {file_path.name}",
|
||||
)
|
||||
|
||||
|
||||
logger.info(f"Session saved to {file_path} with {len(history)} states")
|
||||
return True
|
||||
|
||||
|
||||
except Exception as e:
|
||||
error_msg = f"Failed to save session: {e}"
|
||||
if feedback:
|
||||
feedback.error("Failed to save session", str(e))
|
||||
logger.error(error_msg)
|
||||
return False
|
||||
|
||||
|
||||
def load_session(self, file_path: Path, feedback=None) -> Optional[List[State]]:
|
||||
"""
|
||||
Load session history from a JSON file.
|
||||
|
||||
|
||||
Args:
|
||||
file_path: Path to the session file
|
||||
feedback: Optional feedback manager for user notifications
|
||||
|
||||
|
||||
Returns:
|
||||
List of states if successful, None otherwise
|
||||
"""
|
||||
@@ -167,26 +172,28 @@ class SessionManager:
|
||||
if feedback:
|
||||
feedback.warning(
|
||||
"Session file not found",
|
||||
f"The file {file_path.name} does not exist"
|
||||
f"The file {file_path.name} does not exist",
|
||||
)
|
||||
logger.warning(f"Session file not found: {file_path}")
|
||||
return None
|
||||
|
||||
|
||||
try:
|
||||
with file_path.open('r', encoding='utf-8') as f:
|
||||
with file_path.open("r", encoding="utf-8") as f:
|
||||
data = json.load(f)
|
||||
|
||||
|
||||
session_data = SessionData.from_dict(data)
|
||||
|
||||
|
||||
if feedback:
|
||||
feedback.success(
|
||||
"Session loaded successfully",
|
||||
f"Loaded {len(session_data.history)} states from {file_path.name}"
|
||||
f"Loaded {len(session_data.history)} states from {file_path.name}",
|
||||
)
|
||||
|
||||
logger.info(f"Session loaded from {file_path} with {len(session_data.history)} states")
|
||||
|
||||
logger.info(
|
||||
f"Session loaded from {file_path} with {len(session_data.history)} states"
|
||||
)
|
||||
return session_data.history
|
||||
|
||||
|
||||
except json.JSONDecodeError as e:
|
||||
error_msg = f"Session file is corrupted: {e}"
|
||||
if feedback:
|
||||
@@ -199,14 +206,14 @@ class SessionManager:
|
||||
feedback.error("Failed to load session", str(e))
|
||||
logger.error(error_msg)
|
||||
return None
|
||||
|
||||
|
||||
def auto_save_session(self, history: List[State]) -> bool:
|
||||
"""
|
||||
Auto-save session for crash recovery.
|
||||
|
||||
|
||||
Args:
|
||||
history: Current session history
|
||||
|
||||
|
||||
Returns:
|
||||
True if successful, False otherwise
|
||||
"""
|
||||
@@ -214,16 +221,16 @@ class SessionManager:
|
||||
history,
|
||||
AUTO_SAVE_FILE,
|
||||
session_name="Auto Save",
|
||||
description="Automatically saved session"
|
||||
description="Automatically saved session",
|
||||
)
|
||||
|
||||
|
||||
def create_crash_backup(self, history: List[State]) -> bool:
|
||||
"""
|
||||
Create a crash backup of the current session.
|
||||
|
||||
|
||||
Args:
|
||||
history: Current session history
|
||||
|
||||
|
||||
Returns:
|
||||
True if successful, False otherwise
|
||||
"""
|
||||
@@ -231,25 +238,25 @@ class SessionManager:
|
||||
history,
|
||||
CRASH_BACKUP_FILE,
|
||||
session_name="Crash Backup",
|
||||
description="Session backup created before potential crash"
|
||||
description="Session backup created before potential crash",
|
||||
)
|
||||
|
||||
|
||||
def has_auto_save(self) -> bool:
|
||||
"""Check if an auto-save file exists."""
|
||||
return AUTO_SAVE_FILE.exists()
|
||||
|
||||
|
||||
def has_crash_backup(self) -> bool:
|
||||
"""Check if a crash backup file exists."""
|
||||
return CRASH_BACKUP_FILE.exists()
|
||||
|
||||
|
||||
def load_auto_save(self, feedback=None) -> Optional[List[State]]:
|
||||
"""Load the auto-save session."""
|
||||
return self.load_session(AUTO_SAVE_FILE, feedback)
|
||||
|
||||
|
||||
def load_crash_backup(self, feedback=None) -> Optional[List[State]]:
|
||||
"""Load the crash backup session."""
|
||||
return self.load_session(CRASH_BACKUP_FILE, feedback)
|
||||
|
||||
|
||||
def clear_auto_save(self) -> bool:
|
||||
"""Clear the auto-save file."""
|
||||
try:
|
||||
@@ -259,7 +266,7 @@ class SessionManager:
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to clear auto-save: {e}")
|
||||
return False
|
||||
|
||||
|
||||
def clear_crash_backup(self) -> bool:
|
||||
"""Clear the crash backup file."""
|
||||
try:
|
||||
@@ -269,59 +276,63 @@ class SessionManager:
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to clear crash backup: {e}")
|
||||
return False
|
||||
|
||||
|
||||
def list_saved_sessions(self) -> List[Dict[str, str]]:
|
||||
"""
|
||||
List all saved session files with their metadata.
|
||||
|
||||
|
||||
Returns:
|
||||
List of dictionaries containing session information
|
||||
"""
|
||||
sessions = []
|
||||
|
||||
|
||||
for session_file in SESSIONS_DIR.glob("*.json"):
|
||||
if session_file.name in ["auto_save.json", "crash_backup.json"]:
|
||||
continue
|
||||
|
||||
|
||||
try:
|
||||
with session_file.open('r', encoding='utf-8') as f:
|
||||
with session_file.open("r", encoding="utf-8") as f:
|
||||
data = json.load(f)
|
||||
|
||||
|
||||
metadata = data.get("metadata", {})
|
||||
sessions.append({
|
||||
"file": session_file.name,
|
||||
"path": str(session_file),
|
||||
"name": metadata.get("session_name", "Unnamed"),
|
||||
"description": metadata.get("description", "No description"),
|
||||
"created": metadata.get("created_at", "Unknown"),
|
||||
"last_saved": metadata.get("last_saved", "Unknown"),
|
||||
"state_count": metadata.get("state_count", 0)
|
||||
})
|
||||
sessions.append(
|
||||
{
|
||||
"file": session_file.name,
|
||||
"path": str(session_file),
|
||||
"name": metadata.get("session_name", "Unnamed"),
|
||||
"description": metadata.get("description", "No description"),
|
||||
"created": metadata.get("created_at", "Unknown"),
|
||||
"last_saved": metadata.get("last_saved", "Unknown"),
|
||||
"state_count": metadata.get("state_count", 0),
|
||||
}
|
||||
)
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to read session metadata from {session_file}: {e}")
|
||||
|
||||
logger.warning(
|
||||
f"Failed to read session metadata from {session_file}: {e}"
|
||||
)
|
||||
|
||||
# Sort by last saved time (newest first)
|
||||
sessions.sort(key=lambda x: x["last_saved"], reverse=True)
|
||||
return sessions
|
||||
|
||||
|
||||
def cleanup_old_sessions(self, max_sessions: int = 10) -> int:
|
||||
"""
|
||||
Clean up old session files, keeping only the most recent ones.
|
||||
|
||||
|
||||
Args:
|
||||
max_sessions: Maximum number of sessions to keep
|
||||
|
||||
|
||||
Returns:
|
||||
Number of sessions deleted
|
||||
"""
|
||||
sessions = self.list_saved_sessions()
|
||||
|
||||
|
||||
if len(sessions) <= max_sessions:
|
||||
return 0
|
||||
|
||||
|
||||
deleted_count = 0
|
||||
sessions_to_delete = sessions[max_sessions:]
|
||||
|
||||
|
||||
for session in sessions_to_delete:
|
||||
try:
|
||||
Path(session["path"]).unlink()
|
||||
@@ -329,5 +340,5 @@ class SessionManager:
|
||||
logger.info(f"Deleted old session: {session['name']}")
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to delete session {session['name']}: {e}")
|
||||
|
||||
|
||||
return deleted_count
|
||||
|
||||
@@ -0,0 +1,3 @@
|
||||
from .service import WatchHistoryService
|
||||
|
||||
__all__ = ["WatchHistoryService"]
|
||||
|
||||
@@ -15,17 +15,17 @@ from .types import WatchHistoryData, WatchHistoryEntry
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class WatchHistoryManager:
|
||||
class WatchHistoryService:
|
||||
"""
|
||||
Manages local watch history storage and operations.
|
||||
Provides comprehensive watch history management with error handling.
|
||||
"""
|
||||
|
||||
|
||||
def __init__(self, history_file_path: Path = USER_WATCH_HISTORY_PATH):
|
||||
self.history_file_path = history_file_path
|
||||
self._data: Optional[WatchHistoryData] = None
|
||||
self._ensure_history_file()
|
||||
|
||||
|
||||
def _ensure_history_file(self):
|
||||
"""Ensure the watch history file and directory exist."""
|
||||
try:
|
||||
@@ -34,78 +34,80 @@ class WatchHistoryManager:
|
||||
# Create empty watch history file
|
||||
empty_data = WatchHistoryData()
|
||||
self._save_data(empty_data)
|
||||
logger.info(f"Created new watch history file at {self.history_file_path}")
|
||||
logger.info(
|
||||
f"Created new watch history file at {self.history_file_path}"
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to ensure watch history file: {e}")
|
||||
|
||||
|
||||
def _load_data(self) -> WatchHistoryData:
|
||||
"""Load watch history data from file."""
|
||||
if self._data is not None:
|
||||
return self._data
|
||||
|
||||
|
||||
try:
|
||||
if not self.history_file_path.exists():
|
||||
self._data = WatchHistoryData()
|
||||
return self._data
|
||||
|
||||
with self.history_file_path.open('r', encoding='utf-8') as f:
|
||||
|
||||
with self.history_file_path.open("r", encoding="utf-8") as f:
|
||||
data = json.load(f)
|
||||
|
||||
|
||||
self._data = WatchHistoryData.from_dict(data)
|
||||
logger.debug(f"Loaded watch history with {len(self._data.entries)} entries")
|
||||
return self._data
|
||||
|
||||
|
||||
except json.JSONDecodeError as e:
|
||||
logger.error(f"Watch history file is corrupted: {e}")
|
||||
# Create backup of corrupted file
|
||||
backup_path = self.history_file_path.with_suffix('.backup')
|
||||
backup_path = self.history_file_path.with_suffix(".backup")
|
||||
self.history_file_path.rename(backup_path)
|
||||
logger.info(f"Corrupted file moved to {backup_path}")
|
||||
|
||||
|
||||
# Create new empty data
|
||||
self._data = WatchHistoryData()
|
||||
self._save_data(self._data)
|
||||
return self._data
|
||||
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to load watch history: {e}")
|
||||
self._data = WatchHistoryData()
|
||||
return self._data
|
||||
|
||||
|
||||
def _save_data(self, data: WatchHistoryData) -> bool:
|
||||
"""Save watch history data to file."""
|
||||
try:
|
||||
# Create backup of existing file
|
||||
if self.history_file_path.exists():
|
||||
backup_path = self.history_file_path.with_suffix('.bak')
|
||||
backup_path = self.history_file_path.with_suffix(".bak")
|
||||
self.history_file_path.rename(backup_path)
|
||||
|
||||
with self.history_file_path.open('w', encoding='utf-8') as f:
|
||||
|
||||
with self.history_file_path.open("w", encoding="utf-8") as f:
|
||||
json.dump(data.to_dict(), f, indent=2, ensure_ascii=False)
|
||||
|
||||
|
||||
# Remove backup on successful save
|
||||
backup_path = self.history_file_path.with_suffix('.bak')
|
||||
backup_path = self.history_file_path.with_suffix(".bak")
|
||||
if backup_path.exists():
|
||||
backup_path.unlink()
|
||||
|
||||
|
||||
logger.debug(f"Saved watch history with {len(data.entries)} entries")
|
||||
return True
|
||||
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to save watch history: {e}")
|
||||
# Restore backup if save failed
|
||||
backup_path = self.history_file_path.with_suffix('.bak')
|
||||
backup_path = self.history_file_path.with_suffix(".bak")
|
||||
if backup_path.exists():
|
||||
backup_path.rename(self.history_file_path)
|
||||
return False
|
||||
|
||||
|
||||
def add_or_update_entry(
|
||||
self,
|
||||
media_item: MediaItem,
|
||||
episode: int = 0,
|
||||
progress: float = 0.0,
|
||||
self,
|
||||
media_item: MediaItem,
|
||||
episode: int = 0,
|
||||
progress: float = 0.0,
|
||||
status: str = "watching",
|
||||
notes: str = ""
|
||||
notes: str = "",
|
||||
) -> bool:
|
||||
"""Add or update a watch history entry."""
|
||||
try:
|
||||
@@ -113,17 +115,17 @@ class WatchHistoryManager:
|
||||
entry = data.add_or_update_entry(media_item, episode, progress, status)
|
||||
if notes:
|
||||
entry.notes = notes
|
||||
|
||||
|
||||
success = self._save_data(data)
|
||||
if success:
|
||||
self._data = data # Update cached data
|
||||
logger.info(f"Updated watch history for {entry.get_display_title()}")
|
||||
return success
|
||||
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to add/update watch history entry: {e}")
|
||||
return False
|
||||
|
||||
|
||||
def get_entry(self, media_id: int) -> Optional[WatchHistoryEntry]:
|
||||
"""Get a specific watch history entry."""
|
||||
try:
|
||||
@@ -132,13 +134,13 @@ class WatchHistoryManager:
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to get watch history entry: {e}")
|
||||
return None
|
||||
|
||||
|
||||
def remove_entry(self, media_id: int) -> bool:
|
||||
"""Remove an entry from watch history."""
|
||||
try:
|
||||
data = self._load_data()
|
||||
removed = data.remove_entry(media_id)
|
||||
|
||||
|
||||
if removed:
|
||||
success = self._save_data(data)
|
||||
if success:
|
||||
@@ -146,11 +148,11 @@ class WatchHistoryManager:
|
||||
logger.info(f"Removed watch history entry for media ID {media_id}")
|
||||
return success
|
||||
return False
|
||||
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to remove watch history entry: {e}")
|
||||
return False
|
||||
|
||||
|
||||
def get_all_entries(self) -> List[WatchHistoryEntry]:
|
||||
"""Get all watch history entries."""
|
||||
try:
|
||||
@@ -159,7 +161,7 @@ class WatchHistoryManager:
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to get all entries: {e}")
|
||||
return []
|
||||
|
||||
|
||||
def get_entries_by_status(self, status: str) -> List[WatchHistoryEntry]:
|
||||
"""Get entries by status (watching, completed, etc.)."""
|
||||
try:
|
||||
@@ -168,7 +170,7 @@ class WatchHistoryManager:
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to get entries by status: {e}")
|
||||
return []
|
||||
|
||||
|
||||
def get_recently_watched(self, limit: int = 10) -> List[WatchHistoryEntry]:
|
||||
"""Get recently watched entries."""
|
||||
try:
|
||||
@@ -177,7 +179,7 @@ class WatchHistoryManager:
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to get recently watched: {e}")
|
||||
return []
|
||||
|
||||
|
||||
def search_entries(self, query: str) -> List[WatchHistoryEntry]:
|
||||
"""Search entries by title."""
|
||||
try:
|
||||
@@ -186,27 +188,26 @@ class WatchHistoryManager:
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to search entries: {e}")
|
||||
return []
|
||||
|
||||
|
||||
def get_watching_entries(self) -> List[WatchHistoryEntry]:
|
||||
"""Get entries that are currently being watched."""
|
||||
return self.get_entries_by_status("watching")
|
||||
|
||||
|
||||
def get_completed_entries(self) -> List[WatchHistoryEntry]:
|
||||
"""Get completed entries."""
|
||||
return self.get_entries_by_status("completed")
|
||||
|
||||
def mark_episode_watched(self, media_id: int, episode: int, progress: float = 1.0) -> bool:
|
||||
|
||||
def mark_episode_watched(
|
||||
self, media_id: int, episode: int, progress: float = 1.0
|
||||
) -> bool:
|
||||
"""Mark a specific episode as watched."""
|
||||
entry = self.get_entry(media_id)
|
||||
if entry:
|
||||
return self.add_or_update_entry(
|
||||
entry.media_item,
|
||||
episode,
|
||||
progress,
|
||||
entry.status
|
||||
entry.media_item, episode, progress, entry.status
|
||||
)
|
||||
return False
|
||||
|
||||
|
||||
def mark_completed(self, media_id: int) -> bool:
|
||||
"""Mark an anime as completed."""
|
||||
entry = self.get_entry(media_id)
|
||||
@@ -215,7 +216,7 @@ class WatchHistoryManager:
|
||||
data = self._load_data()
|
||||
return self._save_data(data)
|
||||
return False
|
||||
|
||||
|
||||
def change_status(self, media_id: int, new_status: str) -> bool:
|
||||
"""Change the status of an entry."""
|
||||
entry = self.get_entry(media_id)
|
||||
@@ -224,10 +225,10 @@ class WatchHistoryManager:
|
||||
entry.media_item,
|
||||
entry.last_watched_episode,
|
||||
entry.watch_progress,
|
||||
new_status
|
||||
new_status,
|
||||
)
|
||||
return False
|
||||
|
||||
|
||||
def update_notes(self, media_id: int, notes: str) -> bool:
|
||||
"""Update notes for an entry."""
|
||||
entry = self.get_entry(media_id)
|
||||
@@ -237,10 +238,10 @@ class WatchHistoryManager:
|
||||
entry.last_watched_episode,
|
||||
entry.watch_progress,
|
||||
entry.status,
|
||||
notes
|
||||
notes,
|
||||
)
|
||||
return False
|
||||
|
||||
|
||||
def get_stats(self) -> dict:
|
||||
"""Get watch history statistics."""
|
||||
try:
|
||||
@@ -255,33 +256,33 @@ class WatchHistoryManager:
|
||||
"dropped": 0,
|
||||
"paused": 0,
|
||||
"total_episodes_watched": 0,
|
||||
"last_updated": "Unknown"
|
||||
"last_updated": "Unknown",
|
||||
}
|
||||
|
||||
|
||||
def export_history(self, export_path: Path) -> bool:
|
||||
"""Export watch history to a file."""
|
||||
try:
|
||||
data = self._load_data()
|
||||
with export_path.open('w', encoding='utf-8') as f:
|
||||
with export_path.open("w", encoding="utf-8") as f:
|
||||
json.dump(data.to_dict(), f, indent=2, ensure_ascii=False)
|
||||
logger.info(f"Exported watch history to {export_path}")
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to export watch history: {e}")
|
||||
return False
|
||||
|
||||
|
||||
def import_history(self, import_path: Path, merge: bool = True) -> bool:
|
||||
"""Import watch history from a file."""
|
||||
try:
|
||||
if not import_path.exists():
|
||||
logger.error(f"Import file does not exist: {import_path}")
|
||||
return False
|
||||
|
||||
with import_path.open('r', encoding='utf-8') as f:
|
||||
|
||||
with import_path.open("r", encoding="utf-8") as f:
|
||||
import_data = json.load(f)
|
||||
|
||||
|
||||
imported_history = WatchHistoryData.from_dict(import_data)
|
||||
|
||||
|
||||
if merge:
|
||||
# Merge with existing data
|
||||
current_data = self._load_data()
|
||||
@@ -291,17 +292,17 @@ class WatchHistoryManager:
|
||||
else:
|
||||
# Replace existing data
|
||||
success = self._save_data(imported_history)
|
||||
|
||||
|
||||
if success:
|
||||
self._data = None # Force reload on next access
|
||||
logger.info(f"Imported watch history from {import_path}")
|
||||
|
||||
|
||||
return success
|
||||
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to import watch history: {e}")
|
||||
return False
|
||||
|
||||
|
||||
def clear_history(self) -> bool:
|
||||
"""Clear all watch history."""
|
||||
try:
|
||||
@@ -314,29 +315,37 @@ class WatchHistoryManager:
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to clear watch history: {e}")
|
||||
return False
|
||||
|
||||
|
||||
def backup_history(self, backup_path: Path = None) -> bool:
|
||||
"""Create a backup of watch history."""
|
||||
try:
|
||||
if backup_path is None:
|
||||
from datetime import datetime
|
||||
timestamp = datetime.now().strftime('%Y%m%d_%H%M%S')
|
||||
backup_path = self.history_file_path.parent / f"watch_history_backup_{timestamp}.json"
|
||||
|
||||
|
||||
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
|
||||
backup_path = (
|
||||
self.history_file_path.parent
|
||||
/ f"watch_history_backup_{timestamp}.json"
|
||||
)
|
||||
|
||||
return self.export_history(backup_path)
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to backup watch history: {e}")
|
||||
return False
|
||||
|
||||
|
||||
def get_entry_by_media_id(self, media_id: int) -> Optional[WatchHistoryEntry]:
|
||||
"""Get watch history entry by media ID (alias for get_entry)."""
|
||||
return self.get_entry(media_id)
|
||||
|
||||
|
||||
def save_entry(self, entry: WatchHistoryEntry) -> bool:
|
||||
"""Save a watch history entry (alias for add_or_update_entry)."""
|
||||
return self.add_or_update_entry(entry.media_item, entry.status,
|
||||
entry.last_watched_episode, entry.watch_progress)
|
||||
|
||||
return self.add_or_update_entry(
|
||||
entry.media_item,
|
||||
entry.status,
|
||||
entry.last_watched_episode,
|
||||
entry.watch_progress,
|
||||
)
|
||||
|
||||
def get_currently_watching(self) -> List[WatchHistoryEntry]:
|
||||
"""Get entries that are currently being watched."""
|
||||
return self.get_watching_entries()
|
||||
|
||||
Reference in New Issue
Block a user