From b049904247836930e93f3bf43e4be137a220762e Mon Sep 17 00:00:00 2001 From: kyle nguyen Date: Mon, 8 Sep 2025 00:27:28 -0400 Subject: [PATCH 01/30] Complete session save/load roundtrip --- picard/options.py | 29 ++ picard/session/__init__.py | 0 picard/session/session.py | 486 +++++++++++++++++++++++++++++++ picard/tagger.py | 93 +++++- picard/ui/enums.py | 4 + picard/ui/mainwindow/__init__.py | 59 +++- picard/ui/mainwindow/actions.py | 24 ++ picard/ui/options/dialog.py | 1 + picard/ui/options/sessions.py | 84 ++++++ test/test_sessions.py | 210 +++++++++++++ 10 files changed, 983 insertions(+), 7 deletions(-) create mode 100644 picard/session/__init__.py create mode 100644 picard/session/session.py create mode 100644 picard/ui/options/sessions.py create mode 100644 test/test_sessions.py diff --git a/picard/options.py b/picard/options.py index 9a78dcc3f6..144e62f503 100644 --- a/picard/options.py +++ b/picard/options.py @@ -146,6 +146,8 @@ Option('persist', 'window_state', QtCore.QByteArray()) ListOption('persist', 'filters_FileTreeView', None) ListOption('persist', 'filters_AlbumTreeView', None) +TextOption('persist', 'last_session_path', '') +TextOption('persist', 'session_autosave_path', '') # picard/ui/metadatabox.py # @@ -491,6 +493,33 @@ def make_default_toolbar_layout(): Option('setting', 'file_renaming_scripts', {}) TextOption('setting', 'selected_file_naming_script_id', '', title=N_("Selected file naming script")) +# picard/ui/options/sessions.py +# Sessions +BoolOption( + 'setting', + 'session_safe_restore', + True, + title=N_("Preserve session placement and edits when loading sessions"), +) +BoolOption( + 'setting', + 'session_load_last_on_startup', + False, + title=N_("Load last saved session on startup"), +) +IntOption( + 'setting', + 'session_autosave_interval_min', + 0, + title=N_("Auto-save session every N minutes (0 disables)"), +) +BoolOption( + 'setting', + 'session_backup_on_crash', + True, + title=N_("Attempt to keep a session backup on unexpected shutdown"), +) + # picard/ui/searchdialog/album.py # Option('persist', 'albumsearchdialog_header_state', QtCore.QByteArray()) diff --git a/picard/session/__init__.py b/picard/session/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/picard/session/session.py b/picard/session/session.py new file mode 100644 index 0000000000..f8acedfa42 --- /dev/null +++ b/picard/session/session.py @@ -0,0 +1,486 @@ +# -*- coding: utf-8 -*- +# +# Picard, the next-generation MusicBrainz tagger +# +# Copyright (C) 2025 The MusicBrainz Team +# +# This program is free software; you can redistribute it and/or +# modify it under the terms of the GNU General Public License +# as published by the Free Software Foundation; either version 2 +# of the License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. + +"""Session management for Picard. + +This module provides functionality to save and restore Picard sessions, +including file locations, metadata overrides, and configuration options. +Sessions allow users to preserve their work state across application restarts. + +Classes +------- +SessionItemLocation + Dataclass representing the location of a file within a session. + +Functions +--------- +export_session + Export current session data to a dictionary. +save_session_to_path + Save session data to a file. +load_session_from_path + Load session data from a file. + +Notes +----- +Session files use the .mbps extension and contain JSON data with version +information, options, file locations, and metadata overrides. +""" + +from __future__ import annotations + +from contextlib import suppress +from dataclasses import dataclass +import json +from pathlib import Path +from typing import Any + +from PyQt6 import QtCore + +from picard.album import Album, NatAlbum +from picard.cluster import Cluster, UnclusteredFiles +from picard.config import get_config +from picard.file import File +from picard.metadata import Metadata + + +SESSION_FILE_EXTENSION = ".mbps" + + +@dataclass(frozen=True) +class SessionItemLocation: + """Location information for a file within a session. + + Parameters + ---------- + type : str + The type of location (e.g., "unclustered", "track", "album_unmatched", "cluster", "nat"). + album_id : str | None, optional + The MusicBrainz album ID if the file is associated with an album. + recording_id : str | None, optional + The MusicBrainz recording ID if the file is associated with a specific track. + cluster_title : str | None, optional + The cluster title for files in a cluster. + cluster_artist : str | None, optional + The cluster artist for files in a cluster. + """ + + type: str + album_id: str | None = None + recording_id: str | None = None + cluster_title: str | None = None + cluster_artist: str | None = None + + +def _serialize_metadata_for_file(file: File) -> dict[str, list[Any]]: + # Store only user-visible tags, skip internal (~) ones + tags: dict[str, list[Any]] = {} + for key, values in file.metadata.rawitems(): + if key.startswith("~") or key == "length": + continue + # Copy as list to be JSON serializable + tags[key] = list(values) + return tags + + +def _deserialize_metadata(tags: dict[str, list[Any]]) -> Metadata: + md = Metadata() + for key, values in tags.items(): + md[key] = values + return md + + +def _as_list(values: Any) -> list[Any]: + if isinstance(values, (list, tuple)): + return list(values) + # Treat scalars / strings as single-value list + return [values] + + +def _detect_location(file: File) -> SessionItemLocation: + parent = file.parent_item + if parent is None: + return SessionItemLocation(type="unclustered") + + # File under a track (right pane) + if hasattr(parent, "album") and isinstance(parent.album, Album): + if isinstance(parent.album, NatAlbum): + # NAT special handling + return SessionItemLocation(type="nat", recording_id=parent.id) + # Track placement + if hasattr(parent, "id"): + return SessionItemLocation(type="track", album_id=parent.album.id, recording_id=parent.id) + # Fallback to album unmatched + return SessionItemLocation(type="album_unmatched", album_id=parent.album.id) + + # Unmatched files inside an album + if isinstance(parent, Cluster) and parent.related_album: + return SessionItemLocation(type="album_unmatched", album_id=parent.related_album.id) + + # Left pane cluster + if isinstance(parent, Cluster): + if isinstance(parent, UnclusteredFiles): + return SessionItemLocation(type="unclustered") + return SessionItemLocation( + type="cluster", + cluster_title=str(parent.metadata["album"]), + cluster_artist=str(parent.metadata["albumartist"]), + ) + + # Default + return SessionItemLocation(type="unclustered") + + +def export_session(tagger) -> dict[str, Any]: + """Export current session data to a dictionary. + + Parameters + ---------- + tagger + The Picard tagger instance to export session data from. + + Returns + ------- + dict[str, Any] + Dictionary containing session data with the following keys: + - version: Session format version (currently 1) + - options: Configuration options (rename_files, move_files, dont_write_tags) + - items: List of file items with paths and locations + - album_track_overrides: Track-level metadata overrides per album + - album_overrides: Album-level metadata overrides + + Notes + ----- + Only user-visible tags are exported, internal tags (starting with ~) are excluded. + The function captures manual metadata overrides made in the UI. + """ + config = get_config() + session: dict[str, Any] = { + "version": 1, + "options": { + "rename_files": bool(config.setting["rename_files"]), + "move_files": bool(config.setting["move_files"]), + "dont_write_tags": bool(config.setting["dont_write_tags"]), + }, + "items": [], + "album_track_overrides": {}, # album_id -> recording_id -> {tag: [values]} + "album_overrides": {}, # album_id -> {tag: [values]} + } + + for file in tagger.iter_all_files(): + loc = _detect_location(file) + entry: dict[str, Any] = { + "file_path": str(Path(file.filename)), + "location": { + k: v + for k, v in { + "type": loc.type, + "album_id": loc.album_id, + "recording_id": loc.recording_id, + "cluster_title": loc.cluster_title, + "cluster_artist": loc.cluster_artist, + }.items() + if v is not None + }, + } + # Persist unsaved tag changes + if not file.is_saved(): + entry["metadata"] = {"tags": _serialize_metadata_for_file(file)} + session["items"].append(entry) + + # Capture manual track-level overrides per album/track + album_overrides: dict[str, dict[str, dict[str, list[Any]]]] = {} + # Capture album-level overrides (e.g. albumartist) + album_meta_overrides: dict[str, dict[str, list[Any]]] = {} + EXCLUDED_OVERRIDE_TAGS = {"length", "~length"} + for album in tagger.albums.values(): + if isinstance(album, NatAlbum): + continue + overrides_for_album: dict[str, dict[str, list[Any]]] = {} + # Album-level diffs vs orig_metadata + album_diff = album.metadata.diff(album.orig_metadata) + if album_diff: + album_meta_overrides[album.id] = { + k: _as_list(v) for k, v in album_diff.rawitems() if k not in EXCLUDED_OVERRIDE_TAGS + } + for track in album.tracks: + # The difference to scripted_metadata are user edits made in UI + diff = track.metadata.diff(track.scripted_metadata) + if diff: + # Convert to JSON-friendly dict; ensure values are lists of strings + overrides_for_album[track.id] = { + k: _as_list(v) for k, v in diff.rawitems() if k not in EXCLUDED_OVERRIDE_TAGS + } + if overrides_for_album: + album_overrides[album.id] = overrides_for_album + if album_overrides: + session["album_track_overrides"] = album_overrides + if album_meta_overrides: + session["album_overrides"] = album_meta_overrides + return session + + +def save_session_to_path(tagger, path: str | Path) -> None: + """Save session data to a file. + + Parameters + ---------- + tagger + The Picard tagger instance to save session data from. + path : str | Path + The file path to save the session to. If the extension is not .mbps, + it will be automatically added. + + Notes + ----- + The session is saved as JSON with UTF-8 encoding and 2-space indentation. + If the file already exists, it will be overwritten. + """ + p = Path(path) + if p.suffix.lower() != SESSION_FILE_EXTENSION: + p = p.with_suffix(SESSION_FILE_EXTENSION) + data = export_session(tagger) + p.write_text(json.dumps(data, ensure_ascii=False, indent=2), encoding="utf-8") + + +def _apply_saved_metadata_if_any(tagger, file_path_to_md: dict[Path, Metadata]) -> None: + # Try applying metadata after files have loaded + pending: list[Path] = [] + for fpath, md in file_path_to_md.items(): + file = tagger.files.get(str(fpath)) + if not file or file.state == File.PENDING: + pending.append(fpath) + continue + with suppress(OSError, ValueError, AttributeError, KeyError): + # Preserve computed length from current metadata + md.length = file.metadata.length or file.orig_metadata.length + file.copy_metadata(md) + file.update() + + if pending: + QtCore.QTimer.singleShot( + 200, lambda: _apply_saved_metadata_if_any(tagger, {p: file_path_to_md[p] for p in pending}) + ) + + +def load_session_from_path(tagger, path: str | Path) -> None: + """Load session data from a file. + + Parameters + ---------- + tagger + The Picard tagger instance to load session data into. + path : str | Path + The file path to load the session from. + + Notes + ----- + This function will: + - Clear the current session + - Restore configuration options + - Load files to their original locations (unclustered, clusters, albums, tracks) + - Apply saved metadata overrides + - Handle NAT (Non-Album Track) items + + The function respects the session_safe_restore configuration setting + to prevent overwriting unsaved changes. + """ + p = Path(path) + data = json.loads(p.read_text(encoding="utf-8")) + + # Close current session + tagger.clear_session() + # Respect user setting for safe restore (defaults enabled) + if get_config().setting['session_safe_restore']: + tagger._restoring_session = True + + # Restore quick option states (affect menu toggles) + opts = data.get("options", {}) + config = get_config() + config.setting["rename_files"] = bool(opts.get("rename_files", config.setting["rename_files"])) + config.setting["move_files"] = bool(opts.get("move_files", config.setting["move_files"])) + config.setting["dont_write_tags"] = bool(opts.get("dont_write_tags", config.setting["dont_write_tags"])) + + items = data.get("items", []) + track_overrides_by_album: dict[str, dict[str, dict[str, list[Any]]]] = data.get("album_track_overrides", {}) + album_meta_overrides: dict[str, dict[str, list[Any]]] = data.get("album_overrides", {}) + + # Group by placement target to leverage Tagger.add_files batching + by_unclustered: list[Path] = [] + by_cluster: dict[tuple[str, str], list[Path]] = {} + by_album: dict[str, dict[str, list[Path]]] = {} + nat_items: list[tuple[Path, str]] = [] # (path, recording_id) + + # Collect metadata to apply after loading + metadata_by_path: dict[Path, Metadata] = {} + + for it in items: + fpath = Path(it["file_path"]).expanduser() + loc = it.get("location", {}) + ltype = str(loc.get("type", "unclustered")) + md = it.get("metadata", {}) + if "tags" in md: + tags = {k: _as_list(v) for k, v in md["tags"].items()} + metadata_by_path[fpath] = _deserialize_metadata(tags) # type: ignore[arg-type] + + if ltype == "unclustered": + by_unclustered.append(fpath) + elif ltype == "cluster": + key = (str(loc.get("cluster_title", "")), str(loc.get("cluster_artist", ""))) + by_cluster.setdefault(key, []).append(fpath) + elif ltype in {"album_unmatched", "track"}: + album_id = str(loc.get("album_id")) + entry = by_album.setdefault(album_id, {"unmatched": [], "tracks": []}) + if ltype == "album_unmatched": + entry["unmatched"].append(fpath) + else: + entry["tracks"].append((fpath, str(loc.get("recording_id")))) + elif ltype == "nat": + nat_items.append((fpath, str(loc.get("recording_id")))) + else: + by_unclustered.append(fpath) + + # Helper to convert Paths to strings for Tagger.add_files + def _to_strs(paths: list[Path]) -> list[str]: + return [str(p) for p in paths] + + # Load albums upfront + loaded_albums: dict[str, Album] = {} + for album_id in by_album.keys() | set(track_overrides_by_album.keys()) | set(album_meta_overrides.keys()): + loaded_albums[album_id] = tagger.load_album(album_id) + + # Add unclustered files + if by_unclustered: + tagger.add_files(_to_strs(by_unclustered), target=tagger.unclustered_files) + + # Add cluster files + for (title, artist), paths in by_cluster.items(): + cluster = tagger.load_cluster(title, artist) + tagger.add_files(_to_strs(paths), target=cluster) + + # Add album files (both unmatched and those destined for tracks) + for album_id, groups in by_album.items(): + album = loaded_albums[album_id] + all_paths: list[Path] = list(groups["unmatched"]) + [fp for (fp, _rid) in groups["tracks"]] + if all_paths: + tagger.add_files(_to_strs(all_paths), target=album.unmatched_files) + + # Ensure album node is expanded/visible early + def _ensure_album_visible(a: Album): + def _run(): + a.update(update_tracks=True) + if a.ui_item: + a.ui_item.setExpanded(True) + + a.run_when_loaded(_run) + + _ensure_album_visible(album) + + # After album is loaded move files to their tracks, waiting for files to be ready + def _move_when_loaded(album: Album, track_specs: list[tuple[Path, str]]): + def _attempt_move(fpath: Path, rid: str): + file = tagger.files.get(str(fpath)) + if not file or file.state == File.PENDING: + QtCore.QTimer.singleShot(150, lambda: _attempt_move(fpath, rid)) + return + rec_to_track = {t.id: t for t in album.tracks} + track = rec_to_track.get(rid) + if track is None: + # Album not ready yet, retry + QtCore.QTimer.singleShot(150, lambda: _attempt_move(fpath, rid)) + return + file.move(track) + + def _run(): + for fpath, rid in track_specs: + _attempt_move(fpath, rid) + + album.run_when_loaded(_run) + + if groups["tracks"]: + _move_when_loaded(album, groups["tracks"]) # type: ignore[arg-type] + + # Apply manual track-level overrides after album data has loaded + for album_id, track_overrides in track_overrides_by_album.items(): + album = loaded_albums.get(album_id) + if not album: + continue + + def _apply_overrides(a: Album, overrides: dict[str, dict[str, list[Any]]]): + def _run(): + track_by_id = {t.id: t for t in a.tracks} + for track_id, tags in overrides.items(): + tr = track_by_id.get(track_id) + if not tr: + continue + # Apply overrides to track metadata so columns reflect user edits + for tag, values in tags.items(): + # Never override computed lengths + if tag in {"length", "~length"}: + continue + tr.metadata[tag] = _as_list(values) + tr.update() + + a.run_when_loaded(_run) + + _apply_overrides(album, track_overrides) + + # Apply album-level overrides after album data has loaded + for album_id, overrides in album_meta_overrides.items(): + album = loaded_albums.get(album_id) + if not album: + continue + + def _apply_album_overrides(a: Album, tags: dict[str, list[Any]]): + def _run(): + for tag, values in tags.items(): + a.metadata[tag] = _as_list(values) + a.update(update_tracks=False) + + a.run_when_loaded(_run) + + _apply_album_overrides(album, overrides) + + # Handle NAT items + for fpath, rid in nat_items: + + def _move_nat(path: Path = fpath, recording_id: str = rid): + file = tagger.files.get(str(path)) + if not file or file.state == File.PENDING: + QtCore.QTimer.singleShot(200, lambda: _move_nat(path, recording_id)) + return + tagger.move_file_to_nat(file, recording_id) + + _move_nat() + + # Apply metadata edits after load completes (retry until loaded) + if metadata_by_path: + QtCore.QTimer.singleShot(200, lambda: _apply_saved_metadata_if_any(tagger, metadata_by_path)) + + # Unset restoring flag when all file loads and web requests finish + def _unset_when_idle(): + if not get_config().setting['session_safe_restore']: + return + if tagger._pending_files_count == 0 and not tagger.webservice.num_pending_web_requests: + tagger._restoring_session = False + else: + QtCore.QTimer.singleShot(200, _unset_when_idle) + + QtCore.QTimer.singleShot(200, _unset_when_idle) diff --git a/picard/tagger.py b/picard/tagger.py index 745d24caa2..7052d6e110 100644 --- a/picard/tagger.py +++ b/picard/tagger.py @@ -47,10 +47,13 @@ import argparse from collections import namedtuple +import contextlib from functools import partial from hashlib import blake2b +import json import logging import os +from pathlib import Path import platform import re import shutil @@ -371,6 +374,8 @@ def _init_tagger_entities(self): self.mbid_redirects = {} self.unclustered_files = UnclusteredFiles() self.nats = None + # When True, we are restoring a session; skip auto-matching by MBIDs + self._restoring_session = False def _init_ui(self, config): """Initialize User Interface / Main Window""" @@ -489,6 +494,34 @@ def iter_all_files(self): yield from self.iter_album_files() yield from self.clusters.iterfiles() + # ============================== + # Session export / import + # ============================== + def export_session(self) -> dict: + from picard import config as _cfg + from picard.session import export_session as _export_session + + # Expose config on self for session helpers + self.config = _cfg # type: ignore[attr-defined] + return _export_session(self) + + def import_session(self, data: dict) -> None: + # This method expects a file path usually; keep a convenience for future extensions + raise NotImplementedError + + def clear_session(self) -> None: + """Remove all files, clusters and albums from current UI state.""" + with self.window.ignore_selection_changes: + # Remove all albums (includes NAT) + for album in list(self.albums.values()): + self.remove_album(album) + # Remove all left-pane clusters + for cluster in list(self.clusters): + self.remove_cluster(cluster) + # Remove all unclustered files + if self.unclustered_files.files: + self.remove_files(list(self.unclustered_files.files)) + def _init_remote_commands(self): self.commands = RemoteCommands.commands() @@ -602,16 +635,61 @@ def exit(self): if self.stopping: return self.stopping = True + + # Best-effort crash/exit backup if enabled + with contextlib.suppress(OSError, PermissionError, FileNotFoundError, ValueError, OverflowError): + config = get_config() + if config.setting['session_backup_on_crash']: + from picard.session import save_session_to_path + + path = config.persist['session_autosave_path'] or config.persist['last_session_path'] + if path: + save_session_to_path(self, path) + log.debug("Picard stopping") self.run_cleanup() QtCore.QCoreApplication.processEvents() def _run_init(self): + config = get_config() + # Load last session if configured + if config.setting['session_load_last_on_startup']: + last_path = config.persist['last_session_path'] + if last_path: + with contextlib.suppress(OSError, PermissionError, FileNotFoundError, json.JSONDecodeError, KeyError): + from picard.session import load_session_from_path + + load_session_from_path(self, last_path) + if self._to_load: self.load_to_picard(self._to_load) del self._to_load def run(self): + # Setup autosave if configured + config = get_config() + interval_min = int(config.setting['session_autosave_interval_min']) + if interval_min > 0: + from picard.session import save_session_to_path + + self._session_autosave_timer = QtCore.QTimer(self) + self._session_autosave_timer.setInterval(max(1, interval_min) * 60 * 1000) + + def _autosave(): + path = config.persist['session_autosave_path'] if 'session_autosave_path' in config.persist else None + if not path: + path = config.persist['last_session_path'] if 'last_session_path' in config.persist else None + if not path: + path = Path(USER_DIR) / 'autosave.mbps' + config.persist['session_autosave_path'] = path + + with contextlib.suppress(OSError, PermissionError, FileNotFoundError, ValueError, OverflowError): + # Best effort autosave; do not crash programme + save_session_to_path(self, path) + + self._session_autosave_timer.timeout.connect(_autosave) + self._session_autosave_timer.start() + self.update_browser_integration() self.window.show() QtCore.QTimer.singleShot(0, self._run_init) @@ -658,7 +736,7 @@ def _file_loaded(self, file, target=None, remove_file=False, unmatched_files=Non return file_moved = False - if not config.setting['ignore_file_mbids']: + if not config.setting['ignore_file_mbids'] and not getattr(self, '_restoring_session', False): recordingid = file.metadata.getall('musicbrainz_recordingid') recordingid = recordingid[0] if recordingid else '' is_valid_recordingid = mbid_validate(recordingid) @@ -689,7 +767,12 @@ def _file_loaded(self, file, target=None, remove_file=False, unmatched_files=Non unmatched_files.append(file) # fallback on analyze if nothing else worked - if not file_moved and config.setting['analyze_new_files'] and file.can_analyze: + if ( + not file_moved + and not getattr(self, '_restoring_session', False) + and config.setting['analyze_new_files'] + and file.can_analyze + ): log.debug("Trying to analyze %r …", file) self.analyze([file]) @@ -703,7 +786,11 @@ def move_file(self, file, target): Returns the actual target the files has been moved to or None """ if isinstance(target, Album): - self.move_files_to_album([file], album=target) + # During restore place into album's unmatched bucket without matching + if getattr(self, '_restoring_session', False): + file.move(target.unmatched_files) + else: + self.move_files_to_album([file], album=target) else: if isinstance(target, File) and target.parent_item: target = target.parent_item diff --git a/picard/ui/enums.py b/picard/ui/enums.py index f8cbd39190..888034d771 100644 --- a/picard/ui/enums.py +++ b/picard/ui/enums.py @@ -106,3 +106,7 @@ class MainAction(str, Enum): VIEW_HISTORY = 'view_history_action' VIEW_INFO = 'view_info_action' VIEW_LOG = 'view_log_action' + # Session management + SAVE_SESSION = 'save_session_action' + LOAD_SESSION = 'load_session_action' + CLOSE_SESSION = 'close_session_action' diff --git a/picard/ui/mainwindow/__init__.py b/picard/ui/mainwindow/__init__.py index c7e9a54c3f..c80b5b117a 100644 --- a/picard/ui/mainwindow/__init__.py +++ b/picard/ui/mainwindow/__init__.py @@ -298,10 +298,8 @@ def handle_settings_changed(self, name, old_value, new_value): self.actions[MainAction.ENABLE_MOVING].setChecked(new_value) elif name == 'dont_write_tags': self.actions[MainAction.ENABLE_TAG_SAVING].setChecked(not new_value) - elif name == 'save_images_to_tags': - self.actions[MainAction.ENABLE_SAVE_IMAGES_TO_TAGS].setChecked(new_value) - elif name == 'save_images_to_files': - self.actions[MainAction.ENABLE_SAVE_IMAGES_TO_FILES].setChecked(new_value) + # Note: image saving toggles are handled on their respective options pages. + # see: https://github.com/metabrainz/picard/commit/a5d32b9e0986f057fb1d08b0b47ce3b6425ed087 elif name in {'file_renaming_scripts', 'selected_file_naming_script_id'}: self._make_script_selector_menu() @@ -667,6 +665,10 @@ def add_menu(menu_title, *args): MainAction.SAVE, MainAction.SUBMIT_ACOUSTID, '-', + MainAction.LOAD_SESSION, + MainAction.SAVE_SESSION, + MainAction.CLOSE_SESSION, + '-', MainAction.EXIT, ) @@ -1042,6 +1044,55 @@ def save(self): if proceed_with_save: self.tagger.save(self.selected_objects) + def save_session(self): + from picard.session import save_session_to_path + + from picard.ui.util import FileDialog + + config = get_config() + start_dir = config.persist['current_directory'] or os.path.expanduser('~') + path, _filter = FileDialog.getSaveFileName( + parent=self, + dir=start_dir, + filter=_("MusicBrainz Picard Session (*.mbps);;All files (*)"), + ) + if path: + try: + save_session_to_path(self.tagger, path) + config.persist['current_directory'] = os.path.dirname(path) + config.persist['last_session_path'] = path + self.set_statusbar_message(N_("Session saved to '%(path)s'"), {'path': path}) + except Exception as e: + QtWidgets.QMessageBox.critical(self, _("Failed to save session"), str(e)) + + def load_session(self): + from picard.session import load_session_from_path + + from picard.ui.util import FileDialog + + config = get_config() + start_dir = config.persist['current_directory'] or os.path.expanduser('~') + path, _filter = FileDialog.getOpenFileName( + parent=self, + dir=start_dir, + filter=_("MusicBrainz Picard Session (*.mbps);;All files (*)"), + ) + if path: + try: + load_session_from_path(self.tagger, path) + config.persist['current_directory'] = os.path.dirname(path) + config.persist['last_session_path'] = path + self.set_statusbar_message(N_("Session loaded from '%(path)s'"), {'path': path}) + except Exception as e: + QtWidgets.QMessageBox.critical(self, _("Failed to load session"), str(e)) + + def close_session(self): + # Ask to save if unsaved files + if not self.show_quit_confirmation(): + return + # Clear current state + self.tagger.clear_session() + def remove_selected_objects(self): """Tell the tagger to remove the selected objects.""" self.panel.remove(self.selected_objects) diff --git a/picard/ui/mainwindow/actions.py b/picard/ui/mainwindow/actions.py index 5a7c0bad8e..bf3b0c9f6f 100644 --- a/picard/ui/mainwindow/actions.py +++ b/picard/ui/mainwindow/actions.py @@ -543,3 +543,27 @@ def _create_check_update_action(parent): else: action = None return action + + +@add_action(MainAction.SAVE_SESSION) +def _create_save_session_action(parent): + action = QtGui.QAction(icontheme.lookup('document-save'), _("Save Ses&sion…"), parent) + action.setStatusTip(_("Save the current session to a file")) + action.triggered.connect(parent.save_session) + return action + + +@add_action(MainAction.LOAD_SESSION) +def _create_load_session_action(parent): + action = QtGui.QAction(icontheme.lookup('document-open'), _("&Load Session…"), parent) + action.setStatusTip(_("Load a session file")) + action.triggered.connect(parent.load_session) + return action + + +@add_action(MainAction.CLOSE_SESSION) +def _create_close_session_action(parent): + action = QtGui.QAction(_("&Close Session"), parent) + action.setStatusTip(_("Close the current session")) + action.triggered.connect(parent.close_session) + return action diff --git a/picard/ui/options/dialog.py b/picard/ui/options/dialog.py index 8cd8142ff7..8454cb2b4e 100644 --- a/picard/ui/options/dialog.py +++ b/picard/ui/options/dialog.py @@ -92,6 +92,7 @@ renaming, renaming_compat, scripting, + sessions, tags, tags_compatibility_aac, tags_compatibility_ac3, diff --git a/picard/ui/options/sessions.py b/picard/ui/options/sessions.py new file mode 100644 index 0000000000..208bf9bd90 --- /dev/null +++ b/picard/ui/options/sessions.py @@ -0,0 +1,84 @@ +# -*- coding: utf-8 -*- +# +# Picard, the next-generation MusicBrainz tagger +# +# Copyright (C) 2025 The MusicBrainz Team +# +# This program is free software; you can redistribute it and/or +# modify it under the terms of the GNU General Public License +# as published by the Free Software Foundation; either version 2 +# of the License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. + +from PyQt6 import QtWidgets + +from picard.config import get_config +from picard.extension_points.options_pages import register_options_page +from picard.i18n import N_, gettext as _ + +from picard.ui.options import OptionsPage + + +class SessionsOptionsPage(OptionsPage): + NAME = 'sessions' + TITLE = N_('Sessions') + PARENT = 'advanced' + SORT_ORDER = 90 + ACTIVE = True + + OPTIONS = ( + ('session_safe_restore', ['safe_restore_checkbox']), + ('session_load_last_on_startup', ['load_last_checkbox']), + ('session_autosave_interval_min', ['autosave_spin']), + ('session_backup_on_crash', ['backup_checkbox']), + ) + + def __init__(self, parent=None): + super().__init__(parent) + self.vbox = QtWidgets.QVBoxLayout(self) + + self.safe_restore_checkbox = QtWidgets.QCheckBox( + _('Honor local edits and placement on load (no auto-matching)') + ) + self.vbox.addWidget(self.safe_restore_checkbox) + + self.load_last_checkbox = QtWidgets.QCheckBox(_('Load last saved session on startup')) + self.vbox.addWidget(self.load_last_checkbox) + + autosave_layout = QtWidgets.QHBoxLayout() + self.autosave_label = QtWidgets.QLabel(_('Auto-save session every N minutes (0 disables)')) + self.autosave_spin = QtWidgets.QSpinBox() + self.autosave_spin.setRange(0, 1440) + autosave_layout.addWidget(self.autosave_label) + autosave_layout.addWidget(self.autosave_spin) + self.vbox.addLayout(autosave_layout) + + self.backup_checkbox = QtWidgets.QCheckBox(_('Attempt to keep a session backup on unexpected shutdown')) + self.vbox.addWidget(self.backup_checkbox) + + self.vbox.addStretch(1) + + def load(self): + config = get_config() + self.safe_restore_checkbox.setChecked(config.setting['session_safe_restore']) + self.load_last_checkbox.setChecked(config.setting['session_load_last_on_startup']) + self.autosave_spin.setValue(config.setting['session_autosave_interval_min']) + self.backup_checkbox.setChecked(config.setting['session_backup_on_crash']) + + def save(self): + config = get_config() + config.setting['session_safe_restore'] = self.safe_restore_checkbox.isChecked() + config.setting['session_load_last_on_startup'] = self.load_last_checkbox.isChecked() + config.setting['session_autosave_interval_min'] = int(self.autosave_spin.value()) + config.setting['session_backup_on_crash'] = self.backup_checkbox.isChecked() + + +register_options_page(SessionsOptionsPage) diff --git a/test/test_sessions.py b/test/test_sessions.py new file mode 100644 index 0000000000..28397818bc --- /dev/null +++ b/test/test_sessions.py @@ -0,0 +1,210 @@ +# -*- coding: utf-8 -*- +# +# Picard, the next-generation MusicBrainz tagger +# +# Copyright (C) 2025 The MusicBrainz Team +# +# This program is free software; you can redistribute it and/or +# modify it under the terms of the GNU General Public License +# as published by the Free Software Foundation; either version 2 +# of the License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. + +from pathlib import Path +from types import SimpleNamespace +from typing import Any + +import picard.config as picard_config +from picard.metadata import Metadata +from picard.session import export_session + +import pytest + + +class _StubFile: + def __init__(self, filename: str, metadata: Metadata, saved: bool, parent_item: Any = None) -> None: + self.filename = filename + self.metadata = metadata + self._saved = saved + self.parent_item = parent_item + + def is_saved(self) -> bool: + return self._saved + + +class _StubTrack: + def __init__(self, track_id: str, scripted: Metadata, current: Metadata) -> None: + self.id = track_id + self.scripted_metadata = scripted + self.metadata = current + + +class _StubAlbum: + def __init__(self, album_id: str, orig: Metadata, current: Metadata, tracks: list[_StubTrack]) -> None: + self.id = album_id + self.orig_metadata = orig + self.metadata = current + self.tracks = tracks + + +class _StubTagger: + def __init__(self, files: list[_StubFile], albums: dict[str, Any] | None = None) -> None: + self._files = files + self.albums = albums or {} + + def iter_all_files(self): + yield from self._files + + +@pytest.fixture(autouse=True) +def _fake_script_config(monkeypatch: pytest.MonkeyPatch) -> SimpleNamespace: + """Provide minimal config so functions accessing get_config() have settings.""" + + class _FakeSetting(dict): + def raw_value(self, name, qtype=None): + return self.get(name) + + def key(self, name): + return name + + cfg = SimpleNamespace(setting=_FakeSetting({'enabled_plugins': []}), sync=lambda: None) + import picard.config as picard_config_mod + import picard.extension_points as ext_points_mod + import picard.session as session_mod + + monkeypatch.setattr(picard_config_mod, 'get_config', lambda: cfg, raising=True) + monkeypatch.setattr(ext_points_mod, 'get_config', lambda: cfg, raising=True) + monkeypatch.setattr(session_mod, 'get_config', lambda: cfg, raising=True) + return cfg + + +@pytest.fixture() +def cfg_options() -> None: + cfg = picard_config.get_config() + # Ensure required keys exist with defaults + cfg.setting['rename_files'] = False + cfg.setting['move_files'] = False + cfg.setting['dont_write_tags'] = False + + +def test_export_session_empty(tmp_path: Path) -> None: + # Ensure options keys exist + cfg = picard_config.get_config() + cfg.setting['rename_files'] = False + cfg.setting['move_files'] = False + cfg.setting['dont_write_tags'] = True + + data = export_session(_StubTagger(files=[], albums={})) + assert isinstance(data, dict) + assert data['version'] == 1 + assert set(data['options'].keys()) == {'rename_files', 'move_files', 'dont_write_tags'} + assert data['options']['dont_write_tags'] is True + assert data['items'] == [] + + +@pytest.mark.parametrize('saved', [True, False]) +def test_export_session_includes_items_and_metadata_tags(cfg_options: None, tmp_path: Path, saved: bool) -> None: + m = Metadata() + m['title'] = 'Song' + m['artist'] = 'Artist' + m['~internal'] = 'x' + m['length'] = '123456' + f = _StubFile(filename=str(tmp_path / 'a.flac'), metadata=m, saved=saved, parent_item=None) + tagger = _StubTagger(files=[f]) + + data = export_session(tagger) + + assert isinstance(data['items'], list) and len(data['items']) == 1 + item = data['items'][0] + assert Path(item['file_path']).name == 'a.flac' + + loc = item['location'] + assert loc['type'] == 'unclustered' + assert 'album_id' not in loc and 'recording_id' not in loc + + if saved: + assert 'metadata' not in item + else: + # Only user-visible tags; internal and length excluded; values are lists + tags = item['metadata']['tags'] + assert set(tags.keys()) == {'title', 'artist'} + assert isinstance(tags['title'], list) and tags['title'] == ['Song'] + + +def test_export_session_options_reflect_config_flags(cfg_options: None) -> None: + cfg = picard_config.get_config() + cfg.setting['rename_files'] = True + cfg.setting['move_files'] = True + cfg.setting['dont_write_tags'] = True + + tagger = _StubTagger(files=[]) + data = export_session(tagger) + assert data['options'] == { + 'rename_files': True, + 'move_files': True, + 'dont_write_tags': True, + } + + +def test_export_session_captures_album_and_track_overrides(cfg_options: None, tmp_path: Path) -> None: + # File present to ensure items list not empty, but focus is on overrides capture + fm = Metadata() + fm['title'] = 'Song' + f = _StubFile(filename=str(tmp_path / 'b.mp3'), metadata=fm, saved=True, parent_item=None) + + # Album-level override (albumartist changed) + album_orig = Metadata() + album_orig['albumartist'] = 'Orig Artist' + album_cur = Metadata() + album_cur['albumartist'] = 'New Artist' + + # Track-level override vs scripted_metadata; exclude length + scripted = Metadata() + scripted['title'] = 'Old Title' + scripted['length'] = '1000' + track_cur = Metadata() + track_cur['title'] = 'New Title' + track_cur['length'] = '2000' # must be excluded + + tr = _StubTrack('track-1', scripted=scripted, current=track_cur) + alb = _StubAlbum('album-1', orig=album_orig, current=album_cur, tracks=[tr]) + tagger = _StubTagger(files=[f], albums={'album-1': alb}) + + data = export_session(tagger) + + # Track-level overrides captured and listified + atr = data['album_track_overrides'] + assert 'album-1' in atr and 'track-1' in atr['album-1'] + assert atr['album-1']['track-1'] == {'title': ['New Title']} + + # Album-level overrides captured and listified + aor = data['album_overrides'] + assert aor == {'album-1': {'albumartist': ['New Artist']}} + + +@pytest.mark.parametrize( + ("value", "expected"), + [ + ('Rock', ['Rock']), + (['Rock', 'Pop'], ['Rock', 'Pop']), + ], +) +def test_export_session_listifies_override_values(cfg_options: None, value: Any, expected: list[str]) -> None: + # Construct album with scalar/list diffs + album_orig = Metadata() + album_orig['genre'] = '' + album_cur = Metadata() + album_cur['genre'] = value + alb = _StubAlbum('album-X', orig=album_orig, current=album_cur, tracks=[]) + + tagger = _StubTagger(files=[], albums={'album-X': alb}) + data = export_session(tagger) + assert data['album_overrides'] == {'album-X': {'genre': expected}} From 16f21c79d77432df53ee12886d23a3e97c5669a2 Mon Sep 17 00:00:00 2001 From: kyle nguyen Date: Mon, 8 Sep 2025 01:55:38 -0400 Subject: [PATCH 02/30] Refactor sessions manager and add unit tests --- picard/session/__init__.py | 40 ++ picard/session/constants.py | 48 +++ picard/session/location_detector.py | 159 +++++++ picard/session/metadata_handler.py | 180 ++++++++ picard/session/retry_helper.py | 140 ++++++ picard/session/session.py | 486 --------------------- picard/session/session_data.py | 194 +++++++++ picard/session/session_exporter.py | 254 +++++++++++ picard/session/session_loader.py | 444 +++++++++++++++++++ picard/session/session_manager.py | 133 ++++++ picard/session/track_mover.py | 134 ++++++ picard/tagger.py | 8 +- picard/ui/mainwindow/__init__.py | 4 +- test/session/__init__.py | 21 + test/session/conftest.py | 445 +++++++++++++++++++ test/session/test_data.py | 244 +++++++++++ test/session/test_location_detector.py | 280 ++++++++++++ test/session/test_metadata_handler.py | 336 +++++++++++++++ test/session/test_retry_helper.py | 354 +++++++++++++++ test/session/test_session_constants.py | 60 +++ test/session/test_session_exporter.py | 452 +++++++++++++++++++ test/session/test_session_loader.py | 576 +++++++++++++++++++++++++ test/session/test_session_manager.py | 324 ++++++++++++++ test/{ => session}/test_sessions.py | 116 +++-- test/session/test_track_mover.py | 351 +++++++++++++++ 25 files changed, 5224 insertions(+), 559 deletions(-) create mode 100644 picard/session/constants.py create mode 100644 picard/session/location_detector.py create mode 100644 picard/session/metadata_handler.py create mode 100644 picard/session/retry_helper.py delete mode 100644 picard/session/session.py create mode 100644 picard/session/session_data.py create mode 100644 picard/session/session_exporter.py create mode 100644 picard/session/session_loader.py create mode 100644 picard/session/session_manager.py create mode 100644 picard/session/track_mover.py create mode 100644 test/session/__init__.py create mode 100644 test/session/conftest.py create mode 100644 test/session/test_data.py create mode 100644 test/session/test_location_detector.py create mode 100644 test/session/test_metadata_handler.py create mode 100644 test/session/test_retry_helper.py create mode 100644 test/session/test_session_constants.py create mode 100644 test/session/test_session_exporter.py create mode 100644 test/session/test_session_loader.py create mode 100644 test/session/test_session_manager.py rename test/{ => session}/test_sessions.py (70%) create mode 100644 test/session/test_track_mover.py diff --git a/picard/session/__init__.py b/picard/session/__init__.py index e69de29bb2..aca65e7370 100644 --- a/picard/session/__init__.py +++ b/picard/session/__init__.py @@ -0,0 +1,40 @@ +# -*- coding: utf-8 -*- +# +# Picard, the next-generation MusicBrainz tagger +# +# Copyright (C) 2025 The MusicBrainz Team +# +# This program is free software; you can redistribute it and/or +# modify it under the terms of the GNU General Public License +# as published by the Free Software Foundation; either version 2 +# of the License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. + +"""Session management package for Picard. + +This package provides functionality to save and restore Picard sessions, +including file locations, metadata overrides, and configuration options. +""" + +from picard.session.session_data import SessionItemLocation +from picard.session.session_manager import ( + export_session, + load_session_from_path, + save_session_to_path, +) + + +__all__ = [ + 'SessionItemLocation', + 'export_session', + 'load_session_from_path', + 'save_session_to_path', +] diff --git a/picard/session/constants.py b/picard/session/constants.py new file mode 100644 index 0000000000..48e2a3cb9f --- /dev/null +++ b/picard/session/constants.py @@ -0,0 +1,48 @@ +# -*- coding: utf-8 -*- +# +# Picard, the next-generation MusicBrainz tagger +# +# Copyright (C) 2025 The MusicBrainz Team +# +# This program is free software; you can redistribute it and/or +# modify it under the terms of the GNU General Public License +# as published by the Free Software Foundation; either version 2 +# of the License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. + +"""Constants for session management. + +This module contains all constants used throughout the session management system, +including retry delays, file extensions, and excluded tags. +""" + + +class SessionConstants: + """Constants for session management operations.""" + + # File handling + SESSION_FILE_EXTENSION = ".mbps" + SESSION_FORMAT_VERSION = 1 + + # Retry delays in milliseconds + DEFAULT_RETRY_DELAY_MS = 200 + FAST_RETRY_DELAY_MS = 150 + + # Metadata handling + INTERNAL_TAG_PREFIX = "~" + EXCLUDED_OVERRIDE_TAGS = frozenset({"length", "~length"}) + + # Location types + LOCATION_UNCLUSTERED = "unclustered" + LOCATION_TRACK = "track" + LOCATION_ALBUM_UNMATCHED = "album_unmatched" + LOCATION_CLUSTER = "cluster" + LOCATION_NAT = "nat" diff --git a/picard/session/location_detector.py b/picard/session/location_detector.py new file mode 100644 index 0000000000..882ff18dd8 --- /dev/null +++ b/picard/session/location_detector.py @@ -0,0 +1,159 @@ +# -*- coding: utf-8 -*- +# +# Picard, the next-generation MusicBrainz tagger +# +# Copyright (C) 2025 The MusicBrainz Team +# +# This program is free software; you can redistribute it and/or +# modify it under the terms of the GNU General Public License +# as published by the Free Software Foundation; either version 2 +# of the License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. + +"""Location detection for session management. + +This module handles detecting where files should be placed within a session, +separating the complex location detection logic from other concerns. +""" + +from __future__ import annotations + +from picard.album import Album, NatAlbum +from picard.cluster import Cluster, UnclusteredFiles +from picard.file import File +from picard.session.constants import SessionConstants +from picard.session.session_data import SessionItemLocation + + +class LocationDetector: + """Detects the location type of files in the session.""" + + def detect(self, file: File) -> SessionItemLocation: + """Detect where a file should be placed in the session. + + Parameters + ---------- + file : File + The file to detect the location for. + + Returns + ------- + SessionItemLocation + The location information for the file. + + Notes + ----- + This method analyzes the file's parent item to determine its proper + location within the session structure. + """ + parent = file.parent_item + if parent is None: + return self._unclustered_location() + + if self._is_track_parent(parent): + return self._detect_track_location(parent) + elif self._is_cluster_parent(parent): + return self._detect_cluster_location(parent) + else: + return self._unclustered_location() + + def _is_track_parent(self, parent: object) -> bool: + """Check if parent is a track (has album attribute). + + Parameters + ---------- + parent : object + The parent item to check. + + Returns + ------- + bool + True if parent is a track. + """ + return hasattr(parent, "album") and isinstance(parent.album, Album) + + def _is_cluster_parent(self, parent: object) -> bool: + """Check if parent is a cluster. + + Parameters + ---------- + parent : object + The parent item to check. + + Returns + ------- + bool + True if parent is a cluster. + """ + return isinstance(parent, Cluster) + + def _detect_track_location(self, parent: object) -> SessionItemLocation: + """Detect location for files under a track. + + Parameters + ---------- + parent : object + The track parent item. + + Returns + ------- + SessionItemLocation + The location information for the track. + """ + if isinstance(parent.album, NatAlbum): + # NAT special handling + return SessionItemLocation(type=SessionConstants.LOCATION_NAT, recording_id=parent.id) + + # Track placement + if hasattr(parent, "id") and parent.id: + return SessionItemLocation( + type=SessionConstants.LOCATION_TRACK, album_id=parent.album.id, recording_id=parent.id + ) + + # Fallback to album unmatched + return SessionItemLocation(type=SessionConstants.LOCATION_ALBUM_UNMATCHED, album_id=parent.album.id) + + def _detect_cluster_location(self, parent: Cluster) -> SessionItemLocation: + """Detect location for files under a cluster. + + Parameters + ---------- + parent : Cluster + The cluster parent item. + + Returns + ------- + SessionItemLocation + The location information for the cluster. + """ + # Unmatched files inside an album + if parent.related_album: + return SessionItemLocation(type=SessionConstants.LOCATION_ALBUM_UNMATCHED, album_id=parent.related_album.id) + + # Left pane cluster + if isinstance(parent, UnclusteredFiles): + return self._unclustered_location() + + return SessionItemLocation( + type=SessionConstants.LOCATION_CLUSTER, + cluster_title=str(parent.metadata["album"]), + cluster_artist=str(parent.metadata["albumartist"]), + ) + + def _unclustered_location(self) -> SessionItemLocation: + """Create an unclustered location. + + Returns + ------- + SessionItemLocation + Location for unclustered files. + """ + return SessionItemLocation(type=SessionConstants.LOCATION_UNCLUSTERED) diff --git a/picard/session/metadata_handler.py b/picard/session/metadata_handler.py new file mode 100644 index 0000000000..3afd658fd7 --- /dev/null +++ b/picard/session/metadata_handler.py @@ -0,0 +1,180 @@ +# -*- coding: utf-8 -*- +# +# Picard, the next-generation MusicBrainz tagger +# +# Copyright (C) 2025 The MusicBrainz Team +# +# This program is free software; you can redistribute it and/or +# modify it under the terms of the GNU General Public License +# as published by the Free Software Foundation; either version 2 +# of the License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. + +"""Metadata handling for session management. + +This module provides utilities for serializing and deserializing metadata +for session files, with proper error handling and validation. +""" + +from __future__ import annotations + +from pathlib import Path +from typing import Any + +from picard.file import File +from picard.log import log +from picard.metadata import Metadata +from picard.session.constants import SessionConstants + + +class MetadataHandler: + """Handles metadata serialization and deserialization for sessions.""" + + @staticmethod + def serialize_metadata_for_file(file: File) -> dict[str, list[Any]]: + """Serialize metadata for a file, excluding internal tags. + + Parameters + ---------- + file : File + The file to serialize metadata for. + + Returns + ------- + dict[str, list[Any]] + Dictionary containing serialized metadata tags. + + Notes + ----- + Only user-visible tags are serialized, internal tags (starting with ~) + and length are excluded. + """ + tags: dict[str, list[Any]] = {} + for key, values in file.metadata.rawitems(): + if key.startswith(SessionConstants.INTERNAL_TAG_PREFIX) or key == "length": + continue + # Copy as list to be JSON serializable + tags[key] = list(values) + return tags + + @staticmethod + def deserialize_metadata(tags: dict[str, list[Any]]) -> Metadata: + """Deserialize metadata from a dictionary. + + Parameters + ---------- + tags : dict[str, list[Any]] + Dictionary containing serialized metadata tags. + + Returns + ------- + Metadata + The deserialized metadata object. + """ + md = Metadata() + for key, values in tags.items(): + md[key] = values + return md + + @staticmethod + def as_list(values: Any) -> list[Any]: + """Convert values to a list format. + + Parameters + ---------- + values : Any + Values to convert to list format. + + Returns + ------- + list[Any] + List representation of the values. + + Notes + ----- + Treats scalars/strings as single-value lists for consistency. + """ + if isinstance(values, (list, tuple)): + return list(values) + # Treat scalars / strings as single-value list + return [values] + + @staticmethod + def safe_apply_metadata(file: File, metadata: Metadata) -> bool: + """Safely apply metadata to a file with proper error handling. + + Parameters + ---------- + file : File + The file to apply metadata to. + metadata : Metadata + The metadata to apply. + + Returns + ------- + bool + True if metadata was applied successfully, False otherwise. + + Notes + ----- + This method provides specific error handling instead of broad exception + suppression, with proper logging for debugging. + """ + try: + # Preserve computed length from current metadata + metadata.length = file.metadata.length or file.orig_metadata.length + file.copy_metadata(metadata) + file.update() + return True + except (AttributeError, KeyError) as e: + log.warning(f"Failed to apply metadata to {file.filename}: {e}") + return False + except Exception as e: + log.error(f"Unexpected error applying metadata: {e}") + return False + + @staticmethod + def apply_saved_metadata_if_any(tagger: Any, file_path_to_md: dict[Path, Metadata]) -> None: + """Apply saved metadata to files when they are ready. + + Parameters + ---------- + tagger : Any + The Picard tagger instance. + file_path_to_md : dict[Path, Metadata] + Mapping of file paths to their metadata. + + Notes + ----- + This method retries applying metadata until files are loaded and ready. + Files that are still pending will be retried later. + """ + from picard.session.retry_helper import RetryHelper + + pending: list[Path] = [] + for fpath, md in file_path_to_md.items(): + file = tagger.files.get(str(fpath)) + if not file or file.state == File.PENDING: + pending.append(fpath) + continue + + if not MetadataHandler.safe_apply_metadata(file, md): + # If metadata application failed, we might want to retry + pending.append(fpath) + + if pending: + RetryHelper.retry_until( + condition_fn=lambda: len(pending) == 0, + action_fn=lambda: MetadataHandler.apply_saved_metadata_if_any( + tagger, {p: file_path_to_md[p] for p in pending} + ), + delay_ms=SessionConstants.DEFAULT_RETRY_DELAY_MS, + ) diff --git a/picard/session/retry_helper.py b/picard/session/retry_helper.py new file mode 100644 index 0000000000..493cd3641a --- /dev/null +++ b/picard/session/retry_helper.py @@ -0,0 +1,140 @@ +# -*- coding: utf-8 -*- +# +# Picard, the next-generation MusicBrainz tagger +# +# Copyright (C) 2025 The MusicBrainz Team +# +# This program is free software; you can redistribute it and/or +# modify it under the terms of the GNU General Public License +# as published by the Free Software Foundation; either version 2 +# of the License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. + +"""Retry utility for session management operations. + +This module provides utilities for retrying operations until conditions are met, +replacing the scattered timer patterns throughout the session management code. +""" + +from __future__ import annotations + +from typing import Callable + +from PyQt6 import QtCore + +from picard.session.constants import SessionConstants + + +class RetryHelper: + """Utility for retrying operations until conditions are met.""" + + @staticmethod + def retry_until( + condition_fn: Callable[[], bool], + action_fn: Callable[[], None], + delay_ms: int = SessionConstants.DEFAULT_RETRY_DELAY_MS, + max_attempts: int | None = None, + ) -> None: + """Retry an action until a condition is met. + + Parameters + ---------- + condition_fn : Callable[[], bool] + Function that returns True when the condition is met. + action_fn : Callable[[], None] + Function to execute when the condition is met. + delay_ms : int, optional + Delay between retry attempts in milliseconds. Defaults to DEFAULT_RETRY_DELAY_MS. + max_attempts : int | None, optional + Maximum number of retry attempts. If None, retry indefinitely. + + Notes + ----- + This replaces the scattered QtCore.QTimer.singleShot patterns throughout + the session management code with a centralized retry mechanism. + """ + attempts = [0] + + def attempt() -> None: + attempts[0] += 1 + if max_attempts and attempts[0] > max_attempts: + return + + if condition_fn(): + action_fn() + else: + QtCore.QTimer.singleShot(delay_ms, attempt) + + attempt() + + @staticmethod + def retry_until_file_ready( + file_getter: Callable[[], object | None], + action_fn: Callable[[], None], + delay_ms: int = SessionConstants.FAST_RETRY_DELAY_MS, + ) -> None: + """Retry an action until a file is ready (not PENDING state). + + Parameters + ---------- + file_getter : Callable[[], object | None] + Function that returns the file object or None. + action_fn : Callable[[], None] + Function to execute when the file is ready. + delay_ms : int, optional + Delay between retry attempts in milliseconds. Defaults to FAST_RETRY_DELAY_MS. + + Notes + ----- + This is a specialized version of retry_until for the common pattern + of waiting for files to be loaded and ready for operations. + """ + + def is_file_ready() -> bool: + file_obj = file_getter() + if not file_obj: + return False + # Check if file has a state attribute and it's not PENDING + return hasattr(file_obj, 'state') and file_obj.state != getattr(file_obj, 'PENDING', 0) + + RetryHelper.retry_until(is_file_ready, action_fn, delay_ms) + + @staticmethod + def retry_until_album_ready( + album_getter: Callable[[], object | None], + action_fn: Callable[[], None], + delay_ms: int = SessionConstants.FAST_RETRY_DELAY_MS, + ) -> None: + """Retry an action until an album is ready (has tracks loaded). + + Parameters + ---------- + album_getter : Callable[[], object | None] + Function that returns the album object or None. + action_fn : Callable[[], None] + Function to execute when the album is ready. + delay_ms : int, optional + Delay between retry attempts in milliseconds. Defaults to FAST_RETRY_DELAY_MS. + + Notes + ----- + This is a specialized version of retry_until for the common pattern + of waiting for albums to be loaded with their tracks. + """ + + def is_album_ready() -> bool: + album = album_getter() + if not album: + return False + # Check if album has tracks loaded + return hasattr(album, 'tracks') and len(album.tracks) > 0 + + RetryHelper.retry_until(is_album_ready, action_fn, delay_ms) diff --git a/picard/session/session.py b/picard/session/session.py deleted file mode 100644 index f8acedfa42..0000000000 --- a/picard/session/session.py +++ /dev/null @@ -1,486 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Picard, the next-generation MusicBrainz tagger -# -# Copyright (C) 2025 The MusicBrainz Team -# -# This program is free software; you can redistribute it and/or -# modify it under the terms of the GNU General Public License -# as published by the Free Software Foundation; either version 2 -# of the License, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. - -"""Session management for Picard. - -This module provides functionality to save and restore Picard sessions, -including file locations, metadata overrides, and configuration options. -Sessions allow users to preserve their work state across application restarts. - -Classes -------- -SessionItemLocation - Dataclass representing the location of a file within a session. - -Functions ---------- -export_session - Export current session data to a dictionary. -save_session_to_path - Save session data to a file. -load_session_from_path - Load session data from a file. - -Notes ------ -Session files use the .mbps extension and contain JSON data with version -information, options, file locations, and metadata overrides. -""" - -from __future__ import annotations - -from contextlib import suppress -from dataclasses import dataclass -import json -from pathlib import Path -from typing import Any - -from PyQt6 import QtCore - -from picard.album import Album, NatAlbum -from picard.cluster import Cluster, UnclusteredFiles -from picard.config import get_config -from picard.file import File -from picard.metadata import Metadata - - -SESSION_FILE_EXTENSION = ".mbps" - - -@dataclass(frozen=True) -class SessionItemLocation: - """Location information for a file within a session. - - Parameters - ---------- - type : str - The type of location (e.g., "unclustered", "track", "album_unmatched", "cluster", "nat"). - album_id : str | None, optional - The MusicBrainz album ID if the file is associated with an album. - recording_id : str | None, optional - The MusicBrainz recording ID if the file is associated with a specific track. - cluster_title : str | None, optional - The cluster title for files in a cluster. - cluster_artist : str | None, optional - The cluster artist for files in a cluster. - """ - - type: str - album_id: str | None = None - recording_id: str | None = None - cluster_title: str | None = None - cluster_artist: str | None = None - - -def _serialize_metadata_for_file(file: File) -> dict[str, list[Any]]: - # Store only user-visible tags, skip internal (~) ones - tags: dict[str, list[Any]] = {} - for key, values in file.metadata.rawitems(): - if key.startswith("~") or key == "length": - continue - # Copy as list to be JSON serializable - tags[key] = list(values) - return tags - - -def _deserialize_metadata(tags: dict[str, list[Any]]) -> Metadata: - md = Metadata() - for key, values in tags.items(): - md[key] = values - return md - - -def _as_list(values: Any) -> list[Any]: - if isinstance(values, (list, tuple)): - return list(values) - # Treat scalars / strings as single-value list - return [values] - - -def _detect_location(file: File) -> SessionItemLocation: - parent = file.parent_item - if parent is None: - return SessionItemLocation(type="unclustered") - - # File under a track (right pane) - if hasattr(parent, "album") and isinstance(parent.album, Album): - if isinstance(parent.album, NatAlbum): - # NAT special handling - return SessionItemLocation(type="nat", recording_id=parent.id) - # Track placement - if hasattr(parent, "id"): - return SessionItemLocation(type="track", album_id=parent.album.id, recording_id=parent.id) - # Fallback to album unmatched - return SessionItemLocation(type="album_unmatched", album_id=parent.album.id) - - # Unmatched files inside an album - if isinstance(parent, Cluster) and parent.related_album: - return SessionItemLocation(type="album_unmatched", album_id=parent.related_album.id) - - # Left pane cluster - if isinstance(parent, Cluster): - if isinstance(parent, UnclusteredFiles): - return SessionItemLocation(type="unclustered") - return SessionItemLocation( - type="cluster", - cluster_title=str(parent.metadata["album"]), - cluster_artist=str(parent.metadata["albumartist"]), - ) - - # Default - return SessionItemLocation(type="unclustered") - - -def export_session(tagger) -> dict[str, Any]: - """Export current session data to a dictionary. - - Parameters - ---------- - tagger - The Picard tagger instance to export session data from. - - Returns - ------- - dict[str, Any] - Dictionary containing session data with the following keys: - - version: Session format version (currently 1) - - options: Configuration options (rename_files, move_files, dont_write_tags) - - items: List of file items with paths and locations - - album_track_overrides: Track-level metadata overrides per album - - album_overrides: Album-level metadata overrides - - Notes - ----- - Only user-visible tags are exported, internal tags (starting with ~) are excluded. - The function captures manual metadata overrides made in the UI. - """ - config = get_config() - session: dict[str, Any] = { - "version": 1, - "options": { - "rename_files": bool(config.setting["rename_files"]), - "move_files": bool(config.setting["move_files"]), - "dont_write_tags": bool(config.setting["dont_write_tags"]), - }, - "items": [], - "album_track_overrides": {}, # album_id -> recording_id -> {tag: [values]} - "album_overrides": {}, # album_id -> {tag: [values]} - } - - for file in tagger.iter_all_files(): - loc = _detect_location(file) - entry: dict[str, Any] = { - "file_path": str(Path(file.filename)), - "location": { - k: v - for k, v in { - "type": loc.type, - "album_id": loc.album_id, - "recording_id": loc.recording_id, - "cluster_title": loc.cluster_title, - "cluster_artist": loc.cluster_artist, - }.items() - if v is not None - }, - } - # Persist unsaved tag changes - if not file.is_saved(): - entry["metadata"] = {"tags": _serialize_metadata_for_file(file)} - session["items"].append(entry) - - # Capture manual track-level overrides per album/track - album_overrides: dict[str, dict[str, dict[str, list[Any]]]] = {} - # Capture album-level overrides (e.g. albumartist) - album_meta_overrides: dict[str, dict[str, list[Any]]] = {} - EXCLUDED_OVERRIDE_TAGS = {"length", "~length"} - for album in tagger.albums.values(): - if isinstance(album, NatAlbum): - continue - overrides_for_album: dict[str, dict[str, list[Any]]] = {} - # Album-level diffs vs orig_metadata - album_diff = album.metadata.diff(album.orig_metadata) - if album_diff: - album_meta_overrides[album.id] = { - k: _as_list(v) for k, v in album_diff.rawitems() if k not in EXCLUDED_OVERRIDE_TAGS - } - for track in album.tracks: - # The difference to scripted_metadata are user edits made in UI - diff = track.metadata.diff(track.scripted_metadata) - if diff: - # Convert to JSON-friendly dict; ensure values are lists of strings - overrides_for_album[track.id] = { - k: _as_list(v) for k, v in diff.rawitems() if k not in EXCLUDED_OVERRIDE_TAGS - } - if overrides_for_album: - album_overrides[album.id] = overrides_for_album - if album_overrides: - session["album_track_overrides"] = album_overrides - if album_meta_overrides: - session["album_overrides"] = album_meta_overrides - return session - - -def save_session_to_path(tagger, path: str | Path) -> None: - """Save session data to a file. - - Parameters - ---------- - tagger - The Picard tagger instance to save session data from. - path : str | Path - The file path to save the session to. If the extension is not .mbps, - it will be automatically added. - - Notes - ----- - The session is saved as JSON with UTF-8 encoding and 2-space indentation. - If the file already exists, it will be overwritten. - """ - p = Path(path) - if p.suffix.lower() != SESSION_FILE_EXTENSION: - p = p.with_suffix(SESSION_FILE_EXTENSION) - data = export_session(tagger) - p.write_text(json.dumps(data, ensure_ascii=False, indent=2), encoding="utf-8") - - -def _apply_saved_metadata_if_any(tagger, file_path_to_md: dict[Path, Metadata]) -> None: - # Try applying metadata after files have loaded - pending: list[Path] = [] - for fpath, md in file_path_to_md.items(): - file = tagger.files.get(str(fpath)) - if not file or file.state == File.PENDING: - pending.append(fpath) - continue - with suppress(OSError, ValueError, AttributeError, KeyError): - # Preserve computed length from current metadata - md.length = file.metadata.length or file.orig_metadata.length - file.copy_metadata(md) - file.update() - - if pending: - QtCore.QTimer.singleShot( - 200, lambda: _apply_saved_metadata_if_any(tagger, {p: file_path_to_md[p] for p in pending}) - ) - - -def load_session_from_path(tagger, path: str | Path) -> None: - """Load session data from a file. - - Parameters - ---------- - tagger - The Picard tagger instance to load session data into. - path : str | Path - The file path to load the session from. - - Notes - ----- - This function will: - - Clear the current session - - Restore configuration options - - Load files to their original locations (unclustered, clusters, albums, tracks) - - Apply saved metadata overrides - - Handle NAT (Non-Album Track) items - - The function respects the session_safe_restore configuration setting - to prevent overwriting unsaved changes. - """ - p = Path(path) - data = json.loads(p.read_text(encoding="utf-8")) - - # Close current session - tagger.clear_session() - # Respect user setting for safe restore (defaults enabled) - if get_config().setting['session_safe_restore']: - tagger._restoring_session = True - - # Restore quick option states (affect menu toggles) - opts = data.get("options", {}) - config = get_config() - config.setting["rename_files"] = bool(opts.get("rename_files", config.setting["rename_files"])) - config.setting["move_files"] = bool(opts.get("move_files", config.setting["move_files"])) - config.setting["dont_write_tags"] = bool(opts.get("dont_write_tags", config.setting["dont_write_tags"])) - - items = data.get("items", []) - track_overrides_by_album: dict[str, dict[str, dict[str, list[Any]]]] = data.get("album_track_overrides", {}) - album_meta_overrides: dict[str, dict[str, list[Any]]] = data.get("album_overrides", {}) - - # Group by placement target to leverage Tagger.add_files batching - by_unclustered: list[Path] = [] - by_cluster: dict[tuple[str, str], list[Path]] = {} - by_album: dict[str, dict[str, list[Path]]] = {} - nat_items: list[tuple[Path, str]] = [] # (path, recording_id) - - # Collect metadata to apply after loading - metadata_by_path: dict[Path, Metadata] = {} - - for it in items: - fpath = Path(it["file_path"]).expanduser() - loc = it.get("location", {}) - ltype = str(loc.get("type", "unclustered")) - md = it.get("metadata", {}) - if "tags" in md: - tags = {k: _as_list(v) for k, v in md["tags"].items()} - metadata_by_path[fpath] = _deserialize_metadata(tags) # type: ignore[arg-type] - - if ltype == "unclustered": - by_unclustered.append(fpath) - elif ltype == "cluster": - key = (str(loc.get("cluster_title", "")), str(loc.get("cluster_artist", ""))) - by_cluster.setdefault(key, []).append(fpath) - elif ltype in {"album_unmatched", "track"}: - album_id = str(loc.get("album_id")) - entry = by_album.setdefault(album_id, {"unmatched": [], "tracks": []}) - if ltype == "album_unmatched": - entry["unmatched"].append(fpath) - else: - entry["tracks"].append((fpath, str(loc.get("recording_id")))) - elif ltype == "nat": - nat_items.append((fpath, str(loc.get("recording_id")))) - else: - by_unclustered.append(fpath) - - # Helper to convert Paths to strings for Tagger.add_files - def _to_strs(paths: list[Path]) -> list[str]: - return [str(p) for p in paths] - - # Load albums upfront - loaded_albums: dict[str, Album] = {} - for album_id in by_album.keys() | set(track_overrides_by_album.keys()) | set(album_meta_overrides.keys()): - loaded_albums[album_id] = tagger.load_album(album_id) - - # Add unclustered files - if by_unclustered: - tagger.add_files(_to_strs(by_unclustered), target=tagger.unclustered_files) - - # Add cluster files - for (title, artist), paths in by_cluster.items(): - cluster = tagger.load_cluster(title, artist) - tagger.add_files(_to_strs(paths), target=cluster) - - # Add album files (both unmatched and those destined for tracks) - for album_id, groups in by_album.items(): - album = loaded_albums[album_id] - all_paths: list[Path] = list(groups["unmatched"]) + [fp for (fp, _rid) in groups["tracks"]] - if all_paths: - tagger.add_files(_to_strs(all_paths), target=album.unmatched_files) - - # Ensure album node is expanded/visible early - def _ensure_album_visible(a: Album): - def _run(): - a.update(update_tracks=True) - if a.ui_item: - a.ui_item.setExpanded(True) - - a.run_when_loaded(_run) - - _ensure_album_visible(album) - - # After album is loaded move files to their tracks, waiting for files to be ready - def _move_when_loaded(album: Album, track_specs: list[tuple[Path, str]]): - def _attempt_move(fpath: Path, rid: str): - file = tagger.files.get(str(fpath)) - if not file or file.state == File.PENDING: - QtCore.QTimer.singleShot(150, lambda: _attempt_move(fpath, rid)) - return - rec_to_track = {t.id: t for t in album.tracks} - track = rec_to_track.get(rid) - if track is None: - # Album not ready yet, retry - QtCore.QTimer.singleShot(150, lambda: _attempt_move(fpath, rid)) - return - file.move(track) - - def _run(): - for fpath, rid in track_specs: - _attempt_move(fpath, rid) - - album.run_when_loaded(_run) - - if groups["tracks"]: - _move_when_loaded(album, groups["tracks"]) # type: ignore[arg-type] - - # Apply manual track-level overrides after album data has loaded - for album_id, track_overrides in track_overrides_by_album.items(): - album = loaded_albums.get(album_id) - if not album: - continue - - def _apply_overrides(a: Album, overrides: dict[str, dict[str, list[Any]]]): - def _run(): - track_by_id = {t.id: t for t in a.tracks} - for track_id, tags in overrides.items(): - tr = track_by_id.get(track_id) - if not tr: - continue - # Apply overrides to track metadata so columns reflect user edits - for tag, values in tags.items(): - # Never override computed lengths - if tag in {"length", "~length"}: - continue - tr.metadata[tag] = _as_list(values) - tr.update() - - a.run_when_loaded(_run) - - _apply_overrides(album, track_overrides) - - # Apply album-level overrides after album data has loaded - for album_id, overrides in album_meta_overrides.items(): - album = loaded_albums.get(album_id) - if not album: - continue - - def _apply_album_overrides(a: Album, tags: dict[str, list[Any]]): - def _run(): - for tag, values in tags.items(): - a.metadata[tag] = _as_list(values) - a.update(update_tracks=False) - - a.run_when_loaded(_run) - - _apply_album_overrides(album, overrides) - - # Handle NAT items - for fpath, rid in nat_items: - - def _move_nat(path: Path = fpath, recording_id: str = rid): - file = tagger.files.get(str(path)) - if not file or file.state == File.PENDING: - QtCore.QTimer.singleShot(200, lambda: _move_nat(path, recording_id)) - return - tagger.move_file_to_nat(file, recording_id) - - _move_nat() - - # Apply metadata edits after load completes (retry until loaded) - if metadata_by_path: - QtCore.QTimer.singleShot(200, lambda: _apply_saved_metadata_if_any(tagger, metadata_by_path)) - - # Unset restoring flag when all file loads and web requests finish - def _unset_when_idle(): - if not get_config().setting['session_safe_restore']: - return - if tagger._pending_files_count == 0 and not tagger.webservice.num_pending_web_requests: - tagger._restoring_session = False - else: - QtCore.QTimer.singleShot(200, _unset_when_idle) - - QtCore.QTimer.singleShot(200, _unset_when_idle) diff --git a/picard/session/session_data.py b/picard/session/session_data.py new file mode 100644 index 0000000000..98855b06ee --- /dev/null +++ b/picard/session/session_data.py @@ -0,0 +1,194 @@ +# -*- coding: utf-8 -*- +# +# Picard, the next-generation MusicBrainz tagger +# +# Copyright (C) 2025 The MusicBrainz Team +# +# This program is free software; you can redistribute it and/or +# modify it under the terms of the GNU General Public License +# as published by the Free Software Foundation; either version 2 +# of the License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. + +"""Data structures for session management. + +This module contains data classes and type definitions used throughout +the session management system. +""" + +from __future__ import annotations + +from dataclasses import dataclass +from pathlib import Path +from typing import Any + +from picard.metadata import Metadata + + +@dataclass(frozen=True) +class SessionItemLocation: + """Location information for a file within a session. + + Parameters + ---------- + type : str + The type of location (e.g., "unclustered", "track", "album_unmatched", "cluster", "nat"). + album_id : str | None, optional + The MusicBrainz album ID if the file is associated with an album. + recording_id : str | None, optional + The MusicBrainz recording ID if the file is associated with a specific track. + cluster_title : str | None, optional + The cluster title for files in a cluster. + cluster_artist : str | None, optional + The cluster artist for files in a cluster. + """ + + type: str + album_id: str | None = None + recording_id: str | None = None + cluster_title: str | None = None + cluster_artist: str | None = None + + +@dataclass +class SessionOptions: + """Configuration options for a session. + + Parameters + ---------- + rename_files : bool + Whether to rename files during processing. + move_files : bool + Whether to move files during processing. + dont_write_tags : bool + Whether to skip writing tags to files. + """ + + rename_files: bool + move_files: bool + dont_write_tags: bool + + +@dataclass +class SessionItem: + """A single file item in a session. + + Parameters + ---------- + file_path : Path + The path to the file. + location : SessionItemLocation + The location information for the file. + metadata : Metadata | None, optional + Optional metadata overrides for the file. + """ + + file_path: Path + location: SessionItemLocation + metadata: Metadata | None = None + + +@dataclass +class SessionData: + """Container for complete session data. + + Parameters + ---------- + version : int + The session format version. + options : SessionOptions + Configuration options for the session. + items : list[SessionItem] + List of file items in the session. + album_track_overrides : dict[str, dict[str, dict[str, list[Any]]]] + Track-level metadata overrides per album. + album_overrides : dict[str, dict[str, list[Any]]] + Album-level metadata overrides. + unmatched_albums : list[str] + List of album IDs that are loaded but have no files matched. + """ + + version: int + options: SessionOptions + items: list[SessionItem] + album_track_overrides: dict[str, dict[str, dict[str, list[Any]]]] + album_overrides: dict[str, dict[str, list[Any]]] + unmatched_albums: list[str] + + +@dataclass +class GroupedItems: + """Grouped session items by location type. + + Parameters + ---------- + unclustered : list[Path] + Files to be placed in unclustered area. + by_cluster : dict[tuple[str, str], list[Path]] + Files grouped by cluster (title, artist). + by_album : dict[str, AlbumItems] + Files grouped by album ID. + nat_items : list[tuple[Path, str]] + NAT items with their recording IDs. + """ + + unclustered: list[Path] + by_cluster: dict[tuple[str, str], list[Path]] + by_album: dict[str, AlbumItems] + nat_items: list[tuple[Path, str]] + + +@dataclass +class AlbumItems: + """Items associated with a specific album. + + Parameters + ---------- + unmatched : list[Path] + Files to be placed in album unmatched area. + tracks : list[tuple[Path, str]] + Files to be moved to specific tracks (path, recording_id). + """ + + unmatched: list[Path] + tracks: list[tuple[Path, str]] + + +@dataclass +class TrackOverrides: + """Metadata overrides for a specific track. + + Parameters + ---------- + track_id : str + The recording ID of the track. + overrides : dict[str, list[Any]] + The metadata overrides. + """ + + track_id: str + overrides: dict[str, list[Any]] + + +@dataclass +class AlbumOverrides: + """Metadata overrides for a specific album. + + Parameters + ---------- + album_id : str + The album ID. + overrides : dict[str, list[Any]] + The metadata overrides. + """ + + album_id: str + overrides: dict[str, list[Any]] diff --git a/picard/session/session_exporter.py b/picard/session/session_exporter.py new file mode 100644 index 0000000000..a5ffb5f64a --- /dev/null +++ b/picard/session/session_exporter.py @@ -0,0 +1,254 @@ +# -*- coding: utf-8 -*- +# +# Picard, the next-generation MusicBrainz tagger +# +# Copyright (C) 2025 The MusicBrainz Team +# +# This program is free software; you can redistribute it and/or +# modify it under the terms of the GNU General Public License +# as published by the Free Software Foundation; either version 2 +# of the License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. + +"""Session export functionality. + +This module handles exporting current session data to a dictionary format, +separating the export logic from other session management concerns. +""" + +from __future__ import annotations + +from pathlib import Path +from typing import Any + +from picard.album import NatAlbum +from picard.config import get_config +from picard.session.constants import SessionConstants +from picard.session.location_detector import LocationDetector +from picard.session.metadata_handler import MetadataHandler +from picard.session.session_data import SessionItemLocation + + +class SessionExporter: + """Handles exporting session data from the current Picard state.""" + + def __init__(self) -> None: + """Initialize the session exporter.""" + self.location_detector = LocationDetector() + + def export_session(self, tagger: Any) -> dict[str, Any]: + """Export current session data to a dictionary. + + Parameters + ---------- + tagger : Any + The Picard tagger instance to export session data from. + + Returns + ------- + dict[str, Any] + Dictionary containing session data with the following keys: + - version: Session format version (currently 1) + - options: Configuration options (rename_files, move_files, dont_write_tags) + - items: List of file items with paths and locations + - album_track_overrides: Track-level metadata overrides per album + - album_overrides: Album-level metadata overrides + - unmatched_albums: List of album IDs that are loaded but have no files matched + + Notes + ----- + Only user-visible tags are exported, internal tags (starting with ~) are excluded. + The function captures manual metadata overrides made in the UI. + Unmatched albums are preserved so they can be restored even when no files are matched to them. + """ + config = get_config() + session_data = { + "version": SessionConstants.SESSION_FORMAT_VERSION, + "options": self._export_options(config), + "items": [], + "album_track_overrides": {}, + "album_overrides": {}, + "unmatched_albums": [], + "expanded_albums": [], + } + + # Export file items + for file in tagger.iter_all_files(): + item = self._export_file_item(file) + session_data["items"].append(item) + + # Export metadata overrides and unmatched albums + album_overrides, album_meta_overrides, unmatched_albums = self._export_metadata_overrides(tagger) + if album_overrides: + session_data["album_track_overrides"] = album_overrides + if album_meta_overrides: + session_data["album_overrides"] = album_meta_overrides + if unmatched_albums: + session_data["unmatched_albums"] = unmatched_albums + + # Export UI state (expanded albums) + expanded_albums = self._export_ui_state(tagger) + if expanded_albums: + session_data["expanded_albums"] = expanded_albums + + return session_data + + def _export_ui_state(self, tagger: Any) -> list[str]: + """Export UI expansion state for albums in album view. + + Parameters + ---------- + tagger : Any + The Picard tagger instance. + + Returns + ------- + list[str] + List of album IDs whose items are expanded in the album view. + """ + expanded: list[str] = [] + for album in tagger.albums.values(): + ui_item = getattr(album, "ui_item", None) + if ui_item is not None and ui_item.isExpanded(): + expanded.append(album.id) + return expanded + + def _export_options(self, config: Any) -> dict[str, bool]: + """Export configuration options. + + Parameters + ---------- + config : Any + The Picard configuration object. + + Returns + ------- + dict[str, bool] + Dictionary containing the relevant configuration options. + """ + return { + "rename_files": bool(config.setting["rename_files"]), + "move_files": bool(config.setting["move_files"]), + "dont_write_tags": bool(config.setting["dont_write_tags"]), + } + + def _export_file_item(self, file: Any) -> dict[str, Any]: + """Export a single file item. + + Parameters + ---------- + file : Any + The file object to export. + + Returns + ------- + dict[str, Any] + Dictionary containing the file item data. + """ + loc = self.location_detector.detect(file) + entry = { + "file_path": str(Path(file.filename)), + "location": self._serialize_location(loc), + } + + # Persist unsaved tag changes + if not file.is_saved(): + entry["metadata"] = {"tags": MetadataHandler.serialize_metadata_for_file(file)} + + return entry + + def _serialize_location(self, location: SessionItemLocation) -> dict[str, Any]: + """Serialize a location object to a dictionary. + + Parameters + ---------- + location : SessionItemLocation + The location object to serialize. + + Returns + ------- + dict[str, Any] + Dictionary containing the location data. + """ + return { + k: v + for k, v in { + "type": location.type, + "album_id": location.album_id, + "recording_id": location.recording_id, + "cluster_title": location.cluster_title, + "cluster_artist": location.cluster_artist, + }.items() + if v is not None + } + + def _export_metadata_overrides( + self, tagger: Any + ) -> tuple[dict[str, dict[str, dict[str, list[Any]]]], dict[str, dict[str, list[Any]]], list[str]]: + """Export metadata overrides for albums and tracks. + + Parameters + ---------- + tagger : Any + The Picard tagger instance. + + Returns + ------- + tuple[dict, dict, list] + Tuple containing (album_track_overrides, album_overrides, unmatched_albums). + """ + album_overrides: dict[str, dict[str, dict[str, list[Any]]]] = {} + album_meta_overrides: dict[str, dict[str, list[Any]]] = {} + unmatched_albums: list[str] = [] + + # Get all album IDs that have files matched to them + albums_with_files = set() + for file in tagger.iter_all_files(): + if hasattr(file, 'parent_item') and file.parent_item: + if hasattr(file.parent_item, 'album') and file.parent_item.album: + albums_with_files.add(file.parent_item.album.id) + + for album in tagger.albums.values(): + if isinstance(album, NatAlbum): + continue + + # Check if this album has any files matched to it + has_files = album.id in albums_with_files + + # Album-level diffs vs orig_metadata + album_diff = album.metadata.diff(album.orig_metadata) + if album_diff: + album_meta_overrides[album.id] = { + k: MetadataHandler.as_list(v) + for k, v in album_diff.rawitems() + if k not in SessionConstants.EXCLUDED_OVERRIDE_TAGS + } + + # Track-level overrides + overrides_for_album: dict[str, dict[str, list[Any]]] = {} + for track in album.tracks: + # The difference to scripted_metadata are user edits made in UI + diff = track.metadata.diff(track.scripted_metadata) + if diff: + overrides_for_album[track.id] = { + k: MetadataHandler.as_list(v) + for k, v in diff.rawitems() + if k not in SessionConstants.EXCLUDED_OVERRIDE_TAGS + } + + if overrides_for_album: + album_overrides[album.id] = overrides_for_album + + # If album has no files matched and no overrides, it's an unmatched album + if not has_files and not album_diff and not overrides_for_album: + unmatched_albums.append(album.id) + + return album_overrides, album_meta_overrides, unmatched_albums diff --git a/picard/session/session_loader.py b/picard/session/session_loader.py new file mode 100644 index 0000000000..d54ebe5696 --- /dev/null +++ b/picard/session/session_loader.py @@ -0,0 +1,444 @@ +# -*- coding: utf-8 -*- +# +# Picard, the next-generation MusicBrainz tagger +# +# Copyright (C) 2025 The MusicBrainz Team +# +# This program is free software; you can redistribute it and/or +# modify it under the terms of the GNU General Public License +# as published by the Free Software Foundation; either version 2 +# of the License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. + +"""Session loading functionality. + +This module handles loading and restoring Picard sessions from files, +breaking down the complex loading logic into focused, manageable components. +""" + +from __future__ import annotations + +import json +from pathlib import Path +from typing import Any + +from PyQt6 import QtCore + +from picard.album import Album +from picard.config import get_config +from picard.session.constants import SessionConstants +from picard.session.metadata_handler import MetadataHandler +from picard.session.session_data import AlbumItems, GroupedItems +from picard.session.track_mover import TrackMover + + +class SessionLoader: + """Handles loading and restoring Picard sessions.""" + + def __init__(self, tagger: Any) -> None: + """Initialize the session loader. + + Parameters + ---------- + tagger : Any + The Picard tagger instance. + """ + self.tagger = tagger + self.track_mover = TrackMover(tagger) + self.loaded_albums: dict[str, Album] = {} + # Saved UI expansion state from session (None = not provided) + self._saved_expanded_albums: set[str] | None = None + + def load_from_path(self, path: str | Path) -> None: + """Main entry point for loading a session. + + Parameters + ---------- + path : str | Path + The file path to load the session from. + + Notes + ----- + This method orchestrates the entire session loading process: + 1. Read and parse the session file + 2. Prepare the session (clear current, set flags) + 3. Restore configuration options + 4. Group items by location type + 5. Load items to their proper locations + 6. Apply metadata overrides + 7. Schedule metadata application + """ + data = self._read_session_file(path) + self._prepare_session(data) + self._restore_options(data.get("options", {})) + # Cache saved UI expansion state for later album updates + self._saved_expanded_albums = set(data.get("expanded_albums", [])) if "expanded_albums" in data else None + + items = data.get("items", []) + grouped_items = self._group_items_by_location(items) + metadata_map = self._extract_metadata(items) + + self._load_items(grouped_items) + self._load_unmatched_albums(data.get("unmatched_albums", [])) + self._apply_overrides(data) + + if metadata_map: + self._schedule_metadata_application(metadata_map) + + # Restore UI state (expanded albums and file view roots) + self._restore_ui_state(data) + + def _read_session_file(self, path: Path) -> dict[str, Any]: + """Read and parse session file. + + Parameters + ---------- + path : Path + The file path to read. + + Returns + ------- + dict[str, Any] + The parsed session data. + + Raises + ------ + json.JSONDecodeError + If the file contains invalid JSON. + FileNotFoundError + If the file does not exist. + """ + p = Path(path) + return json.loads(p.read_text(encoding="utf-8")) + + def _prepare_session(self, data: dict[str, Any]) -> None: + """Prepare the session for loading. + + Parameters + ---------- + data : dict[str, Any] + The session data. + """ + # Close current session + self.tagger.clear_session() + # Respect user setting for safe restore (defaults enabled) + if get_config().setting['session_safe_restore']: + self.tagger._restoring_session = True + + def _restore_options(self, options: dict[str, Any]) -> None: + """Restore configuration options. + + Parameters + ---------- + options : dict[str, Any] + The options to restore. + """ + config = get_config() + config.setting["rename_files"] = bool(options.get("rename_files", config.setting["rename_files"])) + config.setting["move_files"] = bool(options.get("move_files", config.setting["move_files"])) + config.setting["dont_write_tags"] = bool(options.get("dont_write_tags", config.setting["dont_write_tags"])) + + def _group_items_by_location(self, items: list[dict[str, Any]]) -> GroupedItems: + """Group items by their target location. + + Parameters + ---------- + items : list[dict[str, Any]] + List of session items. + + Returns + ------- + GroupedItems + Items grouped by location type. + """ + by_unclustered: list[Path] = [] + by_cluster: dict[tuple[str, str], list[Path]] = {} + by_album: dict[str, AlbumItems] = {} + nat_items: list[tuple[Path, str]] = [] + + for it in items: + fpath = Path(it["file_path"]).expanduser() + loc = it.get("location", {}) + ltype = str(loc.get("type", SessionConstants.LOCATION_UNCLUSTERED)) + + if ltype == SessionConstants.LOCATION_UNCLUSTERED: + by_unclustered.append(fpath) + elif ltype == SessionConstants.LOCATION_CLUSTER: + key = (str(loc.get("cluster_title", "")), str(loc.get("cluster_artist", ""))) + by_cluster.setdefault(key, []).append(fpath) + elif ltype in {SessionConstants.LOCATION_ALBUM_UNMATCHED, SessionConstants.LOCATION_TRACK}: + album_id = str(loc.get("album_id")) + entry = by_album.setdefault(album_id, AlbumItems(unmatched=[], tracks=[])) + if ltype == SessionConstants.LOCATION_ALBUM_UNMATCHED: + entry.unmatched.append(fpath) + else: + entry.tracks.append((fpath, str(loc.get("recording_id")))) + elif ltype == SessionConstants.LOCATION_NAT: + nat_items.append((fpath, str(loc.get("recording_id")))) + else: + by_unclustered.append(fpath) + + return GroupedItems(unclustered=by_unclustered, by_cluster=by_cluster, by_album=by_album, nat_items=nat_items) + + def _extract_metadata(self, items: list[dict[str, Any]]) -> dict[Path, Any]: + """Extract metadata from session items. + + Parameters + ---------- + items : list[dict[str, Any]] + List of session items. + + Returns + ------- + dict[Path, Any] + Mapping of file paths to their metadata. + """ + metadata_by_path: dict[Path, Any] = {} + for it in items: + fpath = Path(it["file_path"]).expanduser() + md = it.get("metadata", {}) + if "tags" in md: + tags = {k: MetadataHandler.as_list(v) for k, v in md["tags"].items()} + metadata_by_path[fpath] = MetadataHandler.deserialize_metadata(tags) + return metadata_by_path + + def _load_items(self, grouped_items: GroupedItems) -> None: + """Load items to their proper locations. + + Parameters + ---------- + grouped_items : GroupedItems + Items grouped by location type. + """ + # Load albums upfront + self._load_albums(grouped_items) + + # Add unclustered files + if grouped_items.unclustered: + self.tagger.add_files([str(p) for p in grouped_items.unclustered], target=self.tagger.unclustered_files) + + # Add cluster files + for (title, artist), paths in grouped_items.by_cluster.items(): + cluster = self.tagger.load_cluster(title, artist) + self.tagger.add_files([str(p) for p in paths], target=cluster) + + # Add album files + self._load_album_files(grouped_items.by_album) + + # Handle NAT items + for fpath, rid in grouped_items.nat_items: + self.track_mover.move_file_to_nat(fpath, rid) + + def _load_unmatched_albums(self, unmatched_album_ids: list[str]) -> None: + """Load albums that have no files matched to them. + + Parameters + ---------- + unmatched_album_ids : list[str] + List of album IDs to load. + """ + for album_id in unmatched_album_ids: + if album_id not in self.loaded_albums: + album = self.tagger.load_album(album_id) + self.loaded_albums[album_id] = album + # Ensure album becomes visible and expanded once loaded + self._ensure_album_visible(album) + + def _load_albums(self, grouped_items: GroupedItems) -> None: + """Load albums that will be needed. + + Parameters + ---------- + grouped_items : GroupedItems + Items grouped by location type. + """ + album_ids = set(grouped_items.by_album.keys()) + for album_id in album_ids: + self.loaded_albums[album_id] = self.tagger.load_album(album_id) + + def _load_album_files(self, by_album: dict[str, AlbumItems]) -> None: + """Load files into albums and move them to tracks. + + Parameters + ---------- + by_album : dict[str, AlbumItems] + Files grouped by album ID. + """ + for album_id, groups in by_album.items(): + album = self.loaded_albums[album_id] + all_paths = list(groups.unmatched) + [fp for (fp, _rid) in groups.tracks] + if all_paths: + self.tagger.add_files([str(p) for p in all_paths], target=album.unmatched_files) + + # Ensure album node is expanded/visible early + self._ensure_album_visible(album) + + # Move files to their tracks + if groups.tracks: + self.track_mover.move_files_to_tracks(album, groups.tracks) + + def _ensure_album_visible(self, album: Album) -> None: + """Ensure album node is expanded and visible. + + Parameters + ---------- + album : Album + The album to make visible. + """ + + def run() -> None: + album.update(update_tracks=True) + if album.ui_item: + if self._saved_expanded_albums is not None: + album.ui_item.setExpanded(album.id in self._saved_expanded_albums) + else: + album.ui_item.setExpanded(True) + + album.run_when_loaded(run) + + def _restore_ui_state(self, data: dict[str, Any]) -> None: + """Restore saved UI expansion state. + + Parameters + ---------- + data : dict[str, Any] + The session data. + """ + expanded_albums = set(data.get("expanded_albums", [])) + + def set_expansions() -> None: + # Album view: set expansion for albums we have + for album_id, album in self.tagger.albums.items(): + ui_item = getattr(album, "ui_item", None) + if ui_item is None: + continue + ui_item.setExpanded(album_id in expanded_albums) + + # File view roots: keep default expansion for unmatched / clusters + # (Optional future: persist these as well.) + + # Delay until after albums finished initial load to avoid toggling too early + QtCore.QTimer.singleShot(SessionConstants.DEFAULT_RETRY_DELAY_MS, set_expansions) + + def _apply_overrides(self, data: dict[str, Any]) -> None: + """Apply metadata overrides to albums and tracks. + + Parameters + ---------- + data : dict[str, Any] + The session data containing overrides. + """ + track_overrides_by_album = data.get("album_track_overrides", {}) + album_meta_overrides = data.get("album_overrides", {}) + + # Ensure albums referenced by overrides are loaded and visible + referenced_album_ids = set(track_overrides_by_album.keys()) | set(album_meta_overrides.keys()) + for album_id in referenced_album_ids: + if album_id not in self.loaded_albums: + album = self.tagger.load_album(album_id) + self.loaded_albums[album_id] = album + self._ensure_album_visible(album) + + # Apply track-level overrides + for album_id, track_overrides in track_overrides_by_album.items(): + album = self.loaded_albums.get(album_id) + if album: + self._apply_track_overrides(album, track_overrides) + + # Apply album-level overrides + for album_id, overrides in album_meta_overrides.items(): + album = self.loaded_albums.get(album_id) + if album: + self._apply_album_overrides(album, overrides) + + def _apply_track_overrides(self, album: Album, overrides: dict[str, dict[str, list[Any]]]) -> None: + """Apply track-level metadata overrides. + + Parameters + ---------- + album : Album + The album containing the tracks. + overrides : dict[str, dict[str, list[Any]]] + Track overrides by track ID. + """ + + def run() -> None: + track_by_id = {t.id: t for t in album.tracks} + for track_id, tags in overrides.items(): + tr = track_by_id.get(track_id) + if not tr: + continue + # Apply overrides to track metadata so columns reflect user edits + for tag, values in tags.items(): + # Never override computed lengths + if tag in SessionConstants.EXCLUDED_OVERRIDE_TAGS: + continue + tr.metadata[tag] = MetadataHandler.as_list(values) + tr.update() + + album.run_when_loaded(run) + + def _apply_album_overrides(self, album: Album, overrides: dict[str, list[Any]]) -> None: + """Apply album-level metadata overrides. + + Parameters + ---------- + album : Album + The album to apply overrides to. + overrides : dict[str, list[Any]] + Album-level overrides. + """ + + def run() -> None: + for tag, values in overrides.items(): + album.metadata[tag] = MetadataHandler.as_list(values) + album.update(update_tracks=False) + + album.run_when_loaded(run) + + def _schedule_metadata_application(self, metadata_map: dict[Path, Any]) -> None: + """Schedule metadata application after files are loaded. + + Parameters + ---------- + metadata_map : dict[Path, Any] + Mapping of file paths to their metadata. + """ + QtCore.QTimer.singleShot( + SessionConstants.DEFAULT_RETRY_DELAY_MS, + lambda: MetadataHandler.apply_saved_metadata_if_any(self.tagger, metadata_map), + ) + + def _unset_restoring_flag_when_idle(self) -> None: + """Unset the restoring flag when all operations are complete. + + Notes + ----- + This method checks if all file loads and web requests are finished + before unsetting the session restoring flag. + """ + if not get_config().setting['session_safe_restore']: + return + + if self.tagger._pending_files_count == 0 and not self.tagger.webservice.num_pending_web_requests: + self.tagger._restoring_session = False + else: + QtCore.QTimer.singleShot(SessionConstants.DEFAULT_RETRY_DELAY_MS, self._unset_restoring_flag_when_idle) + + def finalize_loading(self) -> None: + """Finalize the loading process. + + Notes + ----- + This method should be called after the main loading is complete + to handle cleanup tasks like unsetting the restoring flag. + """ + QtCore.QTimer.singleShot(SessionConstants.DEFAULT_RETRY_DELAY_MS, self._unset_restoring_flag_when_idle) diff --git a/picard/session/session_manager.py b/picard/session/session_manager.py new file mode 100644 index 0000000000..ece7e0b707 --- /dev/null +++ b/picard/session/session_manager.py @@ -0,0 +1,133 @@ +# -*- coding: utf-8 -*- +# +# Picard, the next-generation MusicBrainz tagger +# +# Copyright (C) 2025 The MusicBrainz Team +# +# This program is free software; you can redistribute it and/or +# modify it under the terms of the GNU General Public License +# as published by the Free Software Foundation; either version 2 +# of the License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. + +"""Session management for Picard. + +This module provides functionality to save and restore Picard sessions, +including file locations, metadata overrides, and configuration options. +Sessions allow users to preserve their work state across application restarts. + +This module serves as the main entry point for session operations, delegating +to specialized modules for specific functionality. + +Functions +--------- +export_session + Export current session data to a dictionary. +save_session_to_path + Save session data to a file. +load_session_from_path + Load session data from a file. + +Notes +----- +Session files use the .mbps extension and contain JSON data with version +information, options, file locations, and metadata overrides. +""" + +from __future__ import annotations + +import json +from pathlib import Path +from typing import Any + +from picard.session.constants import SessionConstants +from picard.session.session_exporter import SessionExporter +from picard.session.session_loader import SessionLoader + + +def export_session(tagger: Any) -> dict[str, Any]: + """Export current session data to a dictionary. + + Parameters + ---------- + tagger : Any + The Picard tagger instance to export session data from. + + Returns + ------- + dict[str, Any] + Dictionary containing session data with the following keys: + - version: Session format version (currently 1) + - options: Configuration options (rename_files, move_files, dont_write_tags) + - items: List of file items with paths and locations + - album_track_overrides: Track-level metadata overrides per album + - album_overrides: Album-level metadata overrides + - unmatched_albums: List of album IDs that are loaded but have no files matched + + Notes + ----- + Only user-visible tags are exported, internal tags (starting with ~) are excluded. + The function captures manual metadata overrides made in the UI. + Unmatched albums are preserved so they can be restored even when no files are matched to them. + """ + exporter = SessionExporter() + return exporter.export_session(tagger) + + +def save_session_to_path(tagger: Any, path: str | Path) -> None: + """Save session data to a file. + + Parameters + ---------- + tagger : Any + The Picard tagger instance to save session data from. + path : str | Path + The file path to save the session to. If the extension is not .mbps, + it will be automatically added. + + Notes + ----- + The session is saved as JSON with UTF-8 encoding and 2-space indentation. + If the file already exists, it will be overwritten. + """ + p = Path(path) + if p.suffix.lower() != SessionConstants.SESSION_FILE_EXTENSION: + p = p.with_suffix(SessionConstants.SESSION_FILE_EXTENSION) + data = export_session(tagger) + p.parent.mkdir(parents=True, exist_ok=True) + p.write_text(json.dumps(data, ensure_ascii=False, indent=2), encoding="utf-8") + + +def load_session_from_path(tagger: Any, path: str | Path) -> None: + """Load session data from a file. + + Parameters + ---------- + tagger : Any + The Picard tagger instance to load session data into. + path : str | Path + The file path to load the session from. + + Notes + ----- + This function will: + - Clear the current session + - Restore configuration options + - Load files to their original locations (unclustered, clusters, albums, tracks) + - Apply saved metadata overrides + - Handle NAT (Non-Album Track) items + + The function respects the session_safe_restore configuration setting + to prevent overwriting unsaved changes. + """ + loader = SessionLoader(tagger) + loader.load_from_path(path) + loader.finalize_loading() diff --git a/picard/session/track_mover.py b/picard/session/track_mover.py new file mode 100644 index 0000000000..92acec48bd --- /dev/null +++ b/picard/session/track_mover.py @@ -0,0 +1,134 @@ +# -*- coding: utf-8 -*- +# +# Picard, the next-generation MusicBrainz tagger +# +# Copyright (C) 2025 The MusicBrainz Team +# +# This program is free software; you can redistribute it and/or +# modify it under the terms of the GNU General Public License +# as published by the Free Software Foundation; either version 2 +# of the License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. + +"""Track movement functionality for session management. + +This module handles moving files to their designated tracks when loading sessions, +separating the complex file-to-track movement logic from other concerns. +""" + +from __future__ import annotations + +from pathlib import Path +from typing import Any + +from picard.album import Album +from picard.file import File +from picard.session.constants import SessionConstants +from picard.session.retry_helper import RetryHelper + + +class TrackMover: + """Handles moving files to their target tracks.""" + + def __init__(self, tagger: Any) -> None: + """Initialize the track mover. + + Parameters + ---------- + tagger : Any + The Picard tagger instance. + """ + self.tagger = tagger + + def move_files_to_tracks(self, album: Album, track_specs: list[tuple[Path, str]]) -> None: + """Move files to their designated tracks when ready. + + Parameters + ---------- + album : Album + The album containing the tracks. + track_specs : list[tuple[Path, str]] + List of (file_path, recording_id) tuples to move. + + Notes + ----- + This method schedules file moves when both the file and track are ready. + It uses the retry helper to wait for proper conditions. + """ + + def run_when_album_ready() -> None: + for fpath, rid in track_specs: + self._schedule_move(fpath, rid, album) + + album.run_when_loaded(run_when_album_ready) + + def _schedule_move(self, fpath: Path, recording_id: str, album: Album) -> None: + """Schedule a file move when both file and track are ready. + + Parameters + ---------- + fpath : Path + The file path to move. + recording_id : str + The recording ID of the target track. + album : Album + The album containing the track. + """ + + def attempt_move() -> None: + file = self.tagger.files.get(str(fpath)) + if not file or file.state == File.PENDING: + return + + rec_to_track = {t.id: t for t in album.tracks} + track = rec_to_track.get(recording_id) + if track is None: + return + + file.move(track) + + def is_ready() -> bool: + file = self.tagger.files.get(str(fpath)) + if not file or file.state == File.PENDING: + return False + + rec_to_track = {t.id: t for t in album.tracks} + track = rec_to_track.get(recording_id) + return track is not None + + RetryHelper.retry_until( + condition_fn=is_ready, action_fn=attempt_move, delay_ms=SessionConstants.FAST_RETRY_DELAY_MS + ) + + def move_file_to_nat(self, fpath: Path, recording_id: str) -> None: + """Move a file to NAT (Non-Album Track) when ready. + + Parameters + ---------- + fpath : Path + The file path to move. + recording_id : str + The recording ID for the NAT. + """ + + def attempt_nat_move() -> None: + file = self.tagger.files.get(str(fpath)) + if not file or file.state == File.PENDING: + return + self.tagger.move_file_to_nat(file, recording_id) + + def is_file_ready() -> bool: + file = self.tagger.files.get(str(fpath)) + return file is not None and file.state != File.PENDING + + RetryHelper.retry_until( + condition_fn=is_file_ready, action_fn=attempt_nat_move, delay_ms=SessionConstants.DEFAULT_RETRY_DELAY_MS + ) diff --git a/picard/tagger.py b/picard/tagger.py index 7052d6e110..56d52cee52 100644 --- a/picard/tagger.py +++ b/picard/tagger.py @@ -499,7 +499,7 @@ def iter_all_files(self): # ============================== def export_session(self) -> dict: from picard import config as _cfg - from picard.session import export_session as _export_session + from picard.session.session_manager import export_session as _export_session # Expose config on self for session helpers self.config = _cfg # type: ignore[attr-defined] @@ -640,7 +640,7 @@ def exit(self): with contextlib.suppress(OSError, PermissionError, FileNotFoundError, ValueError, OverflowError): config = get_config() if config.setting['session_backup_on_crash']: - from picard.session import save_session_to_path + from picard.session.session_manager import save_session_to_path path = config.persist['session_autosave_path'] or config.persist['last_session_path'] if path: @@ -657,7 +657,7 @@ def _run_init(self): last_path = config.persist['last_session_path'] if last_path: with contextlib.suppress(OSError, PermissionError, FileNotFoundError, json.JSONDecodeError, KeyError): - from picard.session import load_session_from_path + from picard.session.session_manager import load_session_from_path load_session_from_path(self, last_path) @@ -670,7 +670,7 @@ def run(self): config = get_config() interval_min = int(config.setting['session_autosave_interval_min']) if interval_min > 0: - from picard.session import save_session_to_path + from picard.session.session_manager import save_session_to_path self._session_autosave_timer = QtCore.QTimer(self) self._session_autosave_timer.setInterval(max(1, interval_min) * 60 * 1000) diff --git a/picard/ui/mainwindow/__init__.py b/picard/ui/mainwindow/__init__.py index c80b5b117a..457c70ff59 100644 --- a/picard/ui/mainwindow/__init__.py +++ b/picard/ui/mainwindow/__init__.py @@ -1045,7 +1045,7 @@ def save(self): self.tagger.save(self.selected_objects) def save_session(self): - from picard.session import save_session_to_path + from picard.session.session_manager import save_session_to_path from picard.ui.util import FileDialog @@ -1066,7 +1066,7 @@ def save_session(self): QtWidgets.QMessageBox.critical(self, _("Failed to save session"), str(e)) def load_session(self): - from picard.session import load_session_from_path + from picard.session.session_manager import load_session_from_path from picard.ui.util import FileDialog diff --git a/test/session/__init__.py b/test/session/__init__.py new file mode 100644 index 0000000000..7078156b9a --- /dev/null +++ b/test/session/__init__.py @@ -0,0 +1,21 @@ +# -*- coding: utf-8 -*- +# +# Picard, the next-generation MusicBrainz tagger +# +# Copyright (C) 2025 The MusicBrainz Team +# +# This program is free software; you can redistribute it and/or +# modify it under the terms of the GNU General Public License +# as published by the Free Software Foundation; either version 2 +# of the License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. + +"""Tests for session management package for Picard.""" diff --git a/test/session/conftest.py b/test/session/conftest.py new file mode 100644 index 0000000000..31feba23d3 --- /dev/null +++ b/test/session/conftest.py @@ -0,0 +1,445 @@ +# -*- coding: utf-8 -*- +# +# Picard, the next-generation MusicBrainz tagger +# +# Copyright (C) 2025 The MusicBrainz Team +# +# This program is free software; you can redistribute it and/or +# modify it under the terms of the GNU General Public License +# as published by the Free Software Foundation; either version 2 +# of the License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. + +"""Fixtures and mocks for tests in session management package for Picard.""" + +from pathlib import Path +from types import SimpleNamespace +from typing import Any +from unittest.mock import Mock + +from picard.album import Album, NatAlbum +from picard.cluster import Cluster, UnclusteredFiles +import picard.config as picard_config +from picard.file import File +from picard.metadata import Metadata +from picard.session.location_detector import LocationDetector +from picard.session.session_data import AlbumItems, SessionItemLocation +from picard.session.session_exporter import SessionExporter +from picard.session.session_loader import SessionLoader +from picard.session.track_mover import TrackMover + +import pytest + + +# ============================================================================= +# Stub Classes +# ============================================================================= + + +class _StubFile: + """Stub file class for testing.""" + + def __init__(self, filename: str, metadata: Metadata, saved: bool, parent_item: Any = None) -> None: + self.filename = filename + self.metadata = metadata + self._saved = saved + self.parent_item = parent_item + + def is_saved(self) -> bool: + return self._saved + + +class _StubTrack: + """Stub track class for testing.""" + + def __init__(self, track_id: str, scripted: Metadata, current: Metadata) -> None: + self.id = track_id + self.scripted_metadata = scripted + self.metadata = current + + +class _StubAlbum: + """Stub album class for testing.""" + + def __init__(self, album_id: str, orig: Metadata, current: Metadata, tracks: list[_StubTrack]) -> None: + self.id = album_id + self.orig_metadata = orig + self.metadata = current + self.tracks = tracks + + +class _StubTagger: + """Stub tagger class for testing.""" + + def __init__(self, files: list[_StubFile], albums: dict[str, Any] | None = None) -> None: + self._files = files + self.albums = albums or {} + + def iter_all_files(self): + yield from self._files + + +# ============================================================================= +# Configuration Fixtures +# ============================================================================= + + +@pytest.fixture(autouse=True) +def _fake_script_config(monkeypatch: pytest.MonkeyPatch) -> SimpleNamespace: + """Provide minimal config so functions accessing get_config() have settings.""" + + class _FakeSetting(dict): + def raw_value(self, name, qtype=None): + return self.get(name) + + def key(self, name): + return name + + cfg = SimpleNamespace(setting=_FakeSetting({'enabled_plugins': []}), sync=lambda: None) + import picard.config as picard_config_mod + import picard.extension_points as ext_points_mod + import picard.session.session_exporter as session_exporter_mod + import picard.session.session_loader as session_loader_mod + + monkeypatch.setattr(picard_config_mod, 'get_config', lambda: cfg, raising=True) + monkeypatch.setattr(ext_points_mod, 'get_config', lambda: cfg, raising=True) + monkeypatch.setattr(session_exporter_mod, 'get_config', lambda: cfg, raising=True) + monkeypatch.setattr(session_loader_mod, 'get_config', lambda: cfg, raising=True) + return cfg + + +@pytest.fixture() +def cfg_options() -> None: + """Ensure required config keys exist with defaults.""" + cfg = picard_config.get_config() + # Ensure required keys exist with defaults + cfg.setting['rename_files'] = False + cfg.setting['move_files'] = False + cfg.setting['dont_write_tags'] = False + + +# ============================================================================= +# Mock Objects +# ============================================================================= + + +@pytest.fixture +def mock_file() -> Mock: + """Provide a mock file object.""" + file_mock = Mock(spec=File) + file_mock.filename = "/test/file.mp3" + return file_mock + + +@pytest.fixture +def mock_file_with_metadata() -> Mock: + """Provide a mock file with metadata.""" + file_mock = Mock(spec=File) + metadata = Metadata() + metadata["title"] = "Test Song" + metadata["artist"] = "Test Artist" + metadata["~internal"] = "internal_value" + metadata["length"] = "123456" + file_mock.metadata = metadata + return file_mock + + +@pytest.fixture +def mock_tagger() -> Mock: + """Provide a mock tagger instance.""" + tagger_mock = Mock() + tagger_mock.iter_all_files.return_value = [] + tagger_mock.albums = {} + return tagger_mock + + +@pytest.fixture +def mock_album() -> Mock: + """Provide a mock album instance.""" + album_mock = Mock(spec=Album) + album_mock.id = "album-123" + album_mock.tracks = [] + return album_mock + + +@pytest.fixture +def mock_nat_album() -> Mock: + """Provide a mock NAT album instance.""" + nat_album_mock = Mock(spec=NatAlbum) + nat_album_mock.id = "nat-album-123" + return nat_album_mock + + +@pytest.fixture +def mock_cluster() -> Mock: + """Provide a mock cluster instance.""" + cluster_mock = Mock(spec=Cluster) + cluster_mock.related_album = None + cluster_mock.metadata = {"album": "Test Album", "albumartist": "Test Artist"} + return cluster_mock + + +@pytest.fixture +def mock_unclustered_files() -> Mock: + """Provide a mock UnclusteredFiles instance.""" + unclustered_mock = Mock(spec=UnclusteredFiles) + unclustered_mock.related_album = None + return unclustered_mock + + +@pytest.fixture +def mock_track() -> Mock: + """Provide a mock track instance.""" + track_mock = Mock() + track_mock.id = "recording-123" + track_mock.metadata = Metadata() + track_mock.scripted_metadata = Metadata() + return track_mock + + +# ============================================================================= +# Session Component Fixtures +# ============================================================================= + + +@pytest.fixture +def location_detector() -> LocationDetector: + """Provide a LocationDetector instance.""" + return LocationDetector() + + +@pytest.fixture +def session_exporter() -> SessionExporter: + """Provide a SessionExporter instance.""" + return SessionExporter() + + +@pytest.fixture +def session_loader() -> SessionLoader: + """Provide a SessionLoader instance.""" + tagger_mock = Mock() + return SessionLoader(tagger_mock) + + +@pytest.fixture +def track_mover() -> TrackMover: + """Provide a TrackMover instance.""" + tagger_mock = Mock() + return TrackMover(tagger_mock) + + +# ============================================================================= +# Test Data Fixtures +# ============================================================================= + + +@pytest.fixture +def sample_metadata() -> Metadata: + """Provide sample metadata for testing.""" + metadata = Metadata() + metadata["title"] = "Test Song" + metadata["artist"] = "Test Artist" + metadata["album"] = "Test Album" + metadata["~internal"] = "internal_value" + metadata["length"] = "123456" + return metadata + + +@pytest.fixture +def sample_session_item_location() -> SessionItemLocation: + """Provide a sample SessionItemLocation for testing.""" + return SessionItemLocation(type="track", album_id="album-123", recording_id="recording-456") + + +@pytest.fixture +def sample_album_items() -> AlbumItems: + """Provide sample AlbumItems for testing.""" + return AlbumItems(unmatched=[Path("/test/unmatched.mp3")], tracks=[(Path("/test/track.mp3"), "recording-456")]) + + +@pytest.fixture +def sample_session_data() -> dict[str, Any]: + """Provide sample session data for testing.""" + return { + "version": 1, + "options": { + "rename_files": True, + "move_files": False, + "dont_write_tags": True, + }, + "items": [ + { + "file_path": "/test/file1.mp3", + "location": {"type": "unclustered"}, + }, + { + "file_path": "/test/file2.mp3", + "location": {"type": "track", "album_id": "album-123", "recording_id": "recording-456"}, + "metadata": {"tags": {"title": ["Test Song"]}}, + }, + ], + "album_track_overrides": {"album-123": {"track-456": {"title": ["New Title"]}}}, + "album_overrides": {"album-123": {"albumartist": ["New Artist"]}}, + "unmatched_albums": ["album-789"], + "expanded_albums": ["album-123"], + } + + +# ============================================================================= +# Utility Functions +# ============================================================================= + + +def create_stub_file(filename: str, metadata: Metadata, saved: bool = False, parent_item: Any = None) -> _StubFile: + """Create a stub file for testing.""" + return _StubFile(filename, metadata, saved, parent_item) + + +def create_stub_track(track_id: str, scripted: Metadata, current: Metadata) -> _StubTrack: + """Create a stub track for testing.""" + return _StubTrack(track_id, scripted, current) + + +def create_stub_album(album_id: str, orig: Metadata, current: Metadata, tracks: list[_StubTrack]) -> _StubAlbum: + """Create a stub album for testing.""" + return _StubAlbum(album_id, orig, current, tracks) + + +def create_stub_tagger(files: list[_StubFile], albums: dict[str, Any] | None = None) -> _StubTagger: + """Create a stub tagger for testing.""" + return _StubTagger(files, albums) + + +def create_mock_album_with_tracks(album_id: str, track_count: int = 2) -> Mock: + """Create a mock album with specified number of tracks.""" + album_mock = Mock(spec=Album) + album_mock.id = album_id + album_mock.metadata = Metadata() + album_mock.orig_metadata = Metadata() + album_mock.tracks = [] + + for i in range(track_count): + track_mock = Mock() + track_mock.id = f"recording-{i + 1}" + track_mock.metadata = Metadata() + track_mock.scripted_metadata = Metadata() + album_mock.tracks.append(track_mock) + + return album_mock + + +def create_mock_file_with_parent(filename: str, parent_type: str = "track", album_id: str = "album-123") -> Mock: + """Create a mock file with specified parent type.""" + file_mock = Mock(spec=File) + file_mock.filename = filename + file_mock.is_saved.return_value = False + file_mock.metadata = Metadata() + + if parent_type == "track": + mock_album = Mock(spec=Album) + mock_album.id = album_id + + mock_track = Mock() + mock_track.album = mock_album + mock_track.id = "recording-456" + file_mock.parent_item = mock_track + elif parent_type == "cluster": + mock_cluster = Mock(spec=Cluster) + mock_cluster.related_album = None + mock_cluster.metadata = {"album": "Test Album", "albumartist": "Test Artist"} + file_mock.parent_item = mock_cluster + elif parent_type == "nat": + mock_album = Mock(spec=NatAlbum) + mock_album.id = "nat-album-123" + + mock_track = Mock() + mock_track.album = mock_album + mock_track.id = "recording-456" + file_mock.parent_item = mock_track + else: + file_mock.parent_item = None + + return file_mock + + +def create_session_items_with_locations() -> list[dict[str, Any]]: + """Create a list of session items with different location types.""" + return [ + { + "file_path": "/test/unclustered.mp3", + "location": {"type": "unclustered"}, + }, + { + "file_path": "/test/cluster.mp3", + "location": {"type": "cluster", "cluster_title": "Album", "cluster_artist": "Artist"}, + }, + { + "file_path": "/test/track.mp3", + "location": {"type": "track", "album_id": "album-123", "recording_id": "recording-456"}, + }, + { + "file_path": "/test/unmatched.mp3", + "location": {"type": "album_unmatched", "album_id": "album-789"}, + }, + { + "file_path": "/test/nat.mp3", + "location": {"type": "nat", "recording_id": "recording-999"}, + }, + ] + + +# ============================================================================= +# Mock Fixtures for Patches +# ============================================================================= + + +@pytest.fixture +def mock_get_config() -> Mock: + """Provide a mock get_config function.""" + return Mock() + + +@pytest.fixture +def mock_single_shot() -> Mock: + """Provide a mock QTimer.singleShot function.""" + return Mock() + + +# ============================================================================= +# Patch Helpers +# ============================================================================= + + +def patch_get_config(monkeypatch: pytest.MonkeyPatch, **settings) -> Mock: + """Patch get_config with specified settings.""" + config_mock = Mock() + config_mock.setting = { + "rename_files": False, + "move_files": False, + "dont_write_tags": False, + "session_safe_restore": True, + **settings, + } + + import picard.session.session_exporter as session_exporter_mod + import picard.session.session_loader as session_loader_mod + + monkeypatch.setattr(session_exporter_mod, 'get_config', lambda: config_mock, raising=True) + monkeypatch.setattr(session_loader_mod, 'get_config', lambda: config_mock, raising=True) + + return config_mock + + +def patch_qtimer_singleshot(monkeypatch: pytest.MonkeyPatch) -> Mock: + """Patch QtCore.QTimer.singleShot for testing.""" + mock_single_shot = Mock() + monkeypatch.setattr('PyQt6.QtCore.QTimer.singleShot', mock_single_shot) + return mock_single_shot diff --git a/test/session/test_data.py b/test/session/test_data.py new file mode 100644 index 0000000000..54e94d8a8f --- /dev/null +++ b/test/session/test_data.py @@ -0,0 +1,244 @@ +# -*- coding: utf-8 -*- +# +# Picard, the next-generation MusicBrainz tagger +# +# Copyright (C) 2025 The MusicBrainz Team +# +# This program is free software; you can redistribute it and/or +# modify it under the terms of the GNU General Public License +# as published by the Free Software Foundation; either version 2 +# of the License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. + +"""Tests for session data structures.""" + +from pathlib import Path + +from picard.metadata import Metadata +from picard.session.session_data import ( + AlbumItems, + AlbumOverrides, + GroupedItems, + SessionData, + SessionItem, + SessionItemLocation, + SessionOptions, + TrackOverrides, +) + +import pytest + + +# ============================================================================= +# SessionItemLocation Tests +# ============================================================================= + + +@pytest.mark.parametrize( + ("location_type", "album_id", "recording_id", "cluster_title", "cluster_artist"), + [ + ("unclustered", None, None, None, None), + ("track", "album-123", "recording-456", None, None), + ("album_unmatched", "album-789", None, None, None), + ("cluster", None, None, "Album Title", "Artist Name"), + ("nat", None, "recording-999", None, None), + ], +) +def test_session_item_location_creation( + location_type: str, + album_id: str | None, + recording_id: str | None, + cluster_title: str | None, + cluster_artist: str | None, +) -> None: + """Test SessionItemLocation creation with various parameters.""" + location = SessionItemLocation( + type=location_type, + album_id=album_id, + recording_id=recording_id, + cluster_title=cluster_title, + cluster_artist=cluster_artist, + ) + + assert location.type == location_type + assert location.album_id == album_id + assert location.recording_id == recording_id + assert location.cluster_title == cluster_title + assert location.cluster_artist == cluster_artist + + +def test_session_item_location_immutable() -> None: + """Test that SessionItemLocation is immutable.""" + location = SessionItemLocation(type="test") + + with pytest.raises(AttributeError): + location.type = "modified" + + +# ============================================================================= +# SessionOptions Tests +# ============================================================================= + + +@pytest.mark.parametrize( + ("rename_files", "move_files", "dont_write_tags"), + [ + (True, True, True), + (False, False, False), + (True, False, True), + (False, True, False), + ], +) +def test_session_options_creation(rename_files: bool, move_files: bool, dont_write_tags: bool) -> None: + """Test SessionOptions creation with various boolean combinations.""" + options = SessionOptions( + rename_files=rename_files, + move_files=move_files, + dont_write_tags=dont_write_tags, + ) + + assert options.rename_files == rename_files + assert options.move_files == move_files + assert options.dont_write_tags == dont_write_tags + + +# ============================================================================= +# SessionItem Tests +# ============================================================================= + + +def test_session_item_creation() -> None: + """Test SessionItem creation with metadata.""" + file_path = Path("/test/file.mp3") + location = SessionItemLocation(type="track", album_id="album-123", recording_id="recording-456") + metadata = Metadata() + metadata["title"] = "Test Song" + + item = SessionItem(file_path=file_path, location=location, metadata=metadata) + + assert item.file_path == file_path + assert item.location == location + assert item.metadata == metadata + + +def test_session_item_creation_without_metadata() -> None: + """Test SessionItem creation without metadata.""" + file_path = Path("/test/file.mp3") + location = SessionItemLocation(type="unclustered") + + item = SessionItem(file_path=file_path, location=location) + + assert item.file_path == file_path + assert item.location == location + assert item.metadata is None + + +# ============================================================================= +# SessionData Tests +# ============================================================================= + + +def test_session_data_creation() -> None: + """Test SessionData creation with all components.""" + options = SessionOptions(rename_files=True, move_files=False, dont_write_tags=True) + location = SessionItemLocation(type="track", album_id="album-123") + item = SessionItem(file_path=Path("/test/file.mp3"), location=location) + + data = SessionData( + version=1, + options=options, + items=[item], + album_track_overrides={"album-123": {"track-456": {"title": ["New Title"]}}}, + album_overrides={"album-123": {"albumartist": ["New Artist"]}}, + unmatched_albums=["album-789"], + ) + + assert data.version == 1 + assert data.options == options + assert data.items == [item] + assert data.album_track_overrides == {"album-123": {"track-456": {"title": ["New Title"]}}} + assert data.album_overrides == {"album-123": {"albumartist": ["New Artist"]}} + assert data.unmatched_albums == ["album-789"] + + +# ============================================================================= +# GroupedItems Tests +# ============================================================================= + + +def test_grouped_items_creation() -> None: + """Test GroupedItems creation with all components.""" + unclustered = [Path("/test/unclustered.mp3")] + by_cluster = {("Album", "Artist"): [Path("/test/cluster.mp3")]} + by_album = { + "album-123": AlbumItems( + unmatched=[Path("/test/unmatched.mp3")], tracks=[(Path("/test/track.mp3"), "recording-456")] + ) + } + nat_items = [(Path("/test/nat.mp3"), "recording-789")] + + grouped = GroupedItems( + unclustered=unclustered, + by_cluster=by_cluster, + by_album=by_album, + nat_items=nat_items, + ) + + assert grouped.unclustered == unclustered + assert grouped.by_cluster == by_cluster + assert grouped.by_album == by_album + assert grouped.nat_items == nat_items + + +# ============================================================================= +# AlbumItems Tests +# ============================================================================= + + +def test_album_items_creation() -> None: + """Test AlbumItems creation with unmatched files and tracks.""" + unmatched = [Path("/test/unmatched1.mp3"), Path("/test/unmatched2.mp3")] + tracks = [(Path("/test/track1.mp3"), "recording-123"), (Path("/test/track2.mp3"), "recording-456")] + + album_items = AlbumItems(unmatched=unmatched, tracks=tracks) + + assert album_items.unmatched == unmatched + assert album_items.tracks == tracks + + +# ============================================================================= +# TrackOverrides Tests +# ============================================================================= + + +def test_track_overrides_creation() -> None: + """Test TrackOverrides creation.""" + overrides = {"title": ["New Title"], "artist": ["New Artist"]} + + track_overrides = TrackOverrides(track_id="recording-123", overrides=overrides) + + assert track_overrides.track_id == "recording-123" + assert track_overrides.overrides == overrides + + +# ============================================================================= +# AlbumOverrides Tests +# ============================================================================= + + +def test_album_overrides_creation() -> None: + """Test AlbumOverrides creation.""" + overrides = {"albumartist": ["New Artist"], "album": ["New Album"]} + + album_overrides = AlbumOverrides(album_id="album-123", overrides=overrides) + + assert album_overrides.album_id == "album-123" + assert album_overrides.overrides == overrides diff --git a/test/session/test_location_detector.py b/test/session/test_location_detector.py new file mode 100644 index 0000000000..b33fe435a0 --- /dev/null +++ b/test/session/test_location_detector.py @@ -0,0 +1,280 @@ +# -*- coding: utf-8 -*- +# +# Picard, the next-generation MusicBrainz tagger +# +# Copyright (C) 2025 The MusicBrainz Team +# +# This program is free software; you can redistribute it and/or +# modify it under the terms of the GNU General Public License +# as published by the Free Software Foundation; either version 2 +# of the License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. + +"""Tests for location detector.""" + +from unittest.mock import Mock + +from picard.album import Album, NatAlbum +from picard.cluster import Cluster, UnclusteredFiles +from picard.file import File +from picard.session.constants import SessionConstants +from picard.session.location_detector import LocationDetector + +import pytest + + +@pytest.fixture +def location_detector() -> LocationDetector: + """Provide a LocationDetector instance.""" + return LocationDetector() + + +@pytest.fixture +def mock_file() -> Mock: + """Provide a mock file object.""" + file_mock = Mock(spec=File) + file_mock.filename = "/test/file.mp3" + return file_mock + + +def test_location_detector_unclustered_file(location_detector: LocationDetector, mock_file: Mock) -> None: + """Test location detection for unclustered files.""" + mock_file.parent_item = None + + location = location_detector.detect(mock_file) + + assert location.type == SessionConstants.LOCATION_UNCLUSTERED + assert location.album_id is None + assert location.recording_id is None + + +def test_location_detector_track_file(location_detector: LocationDetector, mock_file: Mock) -> None: + """Test location detection for files under tracks.""" + mock_album = Mock(spec=Album) + mock_album.id = "album-123" + + mock_track = Mock() + mock_track.album = mock_album + mock_track.id = "recording-456" + mock_file.parent_item = mock_track + + location = location_detector.detect(mock_file) + + assert location.type == SessionConstants.LOCATION_TRACK + assert location.album_id == "album-123" + assert location.recording_id == "recording-456" + + +def test_location_detector_nat_file(location_detector: LocationDetector, mock_file: Mock) -> None: + """Test location detection for NAT files.""" + mock_album = Mock(spec=NatAlbum) + mock_album.id = "nat-album-123" + + mock_track = Mock() + mock_track.album = mock_album + mock_track.id = "recording-456" + mock_file.parent_item = mock_track + + location = location_detector.detect(mock_file) + + assert location.type == SessionConstants.LOCATION_NAT + assert location.recording_id == "recording-456" + assert location.album_id is None + + +def test_location_detector_cluster_file(location_detector: LocationDetector, mock_file: Mock) -> None: + """Test location detection for files under clusters.""" + mock_album = Mock(spec=Album) + mock_album.id = "album-123" + + mock_cluster = Mock(spec=Cluster) + mock_cluster.related_album = mock_album + mock_cluster.metadata = {"album": "Test Album", "albumartist": "Test Artist"} + mock_file.parent_item = mock_cluster + + location = location_detector.detect(mock_file) + + assert location.type == SessionConstants.LOCATION_ALBUM_UNMATCHED + assert location.album_id == "album-123" + + +def test_location_detector_unclustered_files_cluster(location_detector: LocationDetector, mock_file: Mock) -> None: + """Test location detection for UnclusteredFiles cluster.""" + mock_cluster = Mock(spec=UnclusteredFiles) + mock_cluster.related_album = None + mock_file.parent_item = mock_cluster + + location = location_detector.detect(mock_file) + + assert location.type == SessionConstants.LOCATION_UNCLUSTERED + + +def test_location_detector_regular_cluster(location_detector: LocationDetector, mock_file: Mock) -> None: + """Test location detection for regular clusters.""" + mock_cluster = Mock(spec=Cluster) + mock_cluster.related_album = None + mock_cluster.metadata = {"album": "Test Album", "albumartist": "Test Artist"} + mock_file.parent_item = mock_cluster + + location = location_detector.detect(mock_file) + + assert location.type == SessionConstants.LOCATION_CLUSTER + assert location.cluster_title == "Test Album" + assert location.cluster_artist == "Test Artist" + + +def test_location_detector_track_without_id(location_detector: LocationDetector, mock_file: Mock) -> None: + """Test location detection for tracks without ID.""" + mock_album = Mock(spec=Album) + mock_album.id = "album-123" + + mock_track = Mock() + mock_track.album = mock_album + # No id attribute + del mock_track.id + mock_file.parent_item = mock_track + + location = location_detector.detect(mock_file) + + assert location.type == SessionConstants.LOCATION_ALBUM_UNMATCHED + assert location.album_id == "album-123" + + +def test_location_detector_unknown_parent(location_detector: LocationDetector, mock_file: Mock) -> None: + """Test location detection for unknown parent types.""" + mock_file.parent_item = Mock() # Not a track or cluster + + location = location_detector.detect(mock_file) + + assert location.type == SessionConstants.LOCATION_UNCLUSTERED + + +def test_location_detector_is_track_parent(location_detector: LocationDetector) -> None: + """Test _is_track_parent method.""" + # Valid track parent + mock_track = Mock() + mock_album = Mock(spec=Album) + mock_track.album = mock_album + + assert location_detector._is_track_parent(mock_track) is True + + # Invalid track parent - no album + mock_track_no_album = Mock() + mock_track_no_album.album = None + + assert location_detector._is_track_parent(mock_track_no_album) is False + + # Invalid track parent - album not Album instance + mock_track_wrong_album = Mock() + mock_track_wrong_album.album = Mock() # Not Album instance + + assert location_detector._is_track_parent(mock_track_wrong_album) is False + + +def test_location_detector_is_cluster_parent(location_detector: LocationDetector) -> None: + """Test _is_cluster_parent method.""" + # Valid cluster parent + mock_cluster = Mock(spec=Cluster) + assert location_detector._is_cluster_parent(mock_cluster) is True + + # Invalid cluster parent + mock_not_cluster = Mock() + assert location_detector._is_cluster_parent(mock_not_cluster) is False + + +def test_location_detector_detect_track_location_nat(location_detector: LocationDetector) -> None: + """Test _detect_track_location for NAT albums.""" + mock_album = Mock(spec=NatAlbum) + mock_track = Mock() + mock_track.album = mock_album + mock_track.id = "recording-123" + + location = location_detector._detect_track_location(mock_track) + + assert location.type == SessionConstants.LOCATION_NAT + assert location.recording_id == "recording-123" + + +def test_location_detector_detect_track_location_regular(location_detector: LocationDetector) -> None: + """Test _detect_track_location for regular albums.""" + mock_album = Mock(spec=Album) + mock_album.id = "album-123" + mock_track = Mock() + mock_track.album = mock_album + mock_track.id = "recording-456" + + location = location_detector._detect_track_location(mock_track) + + assert location.type == SessionConstants.LOCATION_TRACK + assert location.album_id == "album-123" + assert location.recording_id == "recording-456" + + +def test_location_detector_detect_track_location_no_id(location_detector: LocationDetector) -> None: + """Test _detect_track_location for tracks without ID.""" + mock_album = Mock(spec=Album) + mock_album.id = "album-123" + mock_track = Mock() + mock_track.album = mock_album + # No id attribute + del mock_track.id + + location = location_detector._detect_track_location(mock_track) + + assert location.type == SessionConstants.LOCATION_ALBUM_UNMATCHED + assert location.album_id == "album-123" + + +def test_location_detector_detect_cluster_location_with_related_album(location_detector: LocationDetector) -> None: + """Test _detect_cluster_location with related album.""" + mock_album = Mock(spec=Album) + mock_album.id = "album-123" + mock_cluster = Mock(spec=Cluster) + mock_cluster.related_album = mock_album + + location = location_detector._detect_cluster_location(mock_cluster) + + assert location.type == SessionConstants.LOCATION_ALBUM_UNMATCHED + assert location.album_id == "album-123" + + +def test_location_detector_detect_cluster_location_unclustered_files(location_detector: LocationDetector) -> None: + """Test _detect_cluster_location with UnclusteredFiles.""" + mock_cluster = Mock(spec=UnclusteredFiles) + mock_cluster.related_album = None + + location = location_detector._detect_cluster_location(mock_cluster) + + assert location.type == SessionConstants.LOCATION_UNCLUSTERED + + +def test_location_detector_detect_cluster_location_regular_cluster(location_detector: LocationDetector) -> None: + """Test _detect_cluster_location with regular cluster.""" + mock_cluster = Mock(spec=Cluster) + mock_cluster.related_album = None + mock_cluster.metadata = {"album": "Test Album", "albumartist": "Test Artist"} + + location = location_detector._detect_cluster_location(mock_cluster) + + assert location.type == SessionConstants.LOCATION_CLUSTER + assert location.cluster_title == "Test Album" + assert location.cluster_artist == "Test Artist" + + +def test_location_detector_unclustered_location(location_detector: LocationDetector) -> None: + """Test _unclustered_location method.""" + location = location_detector._unclustered_location() + + assert location.type == SessionConstants.LOCATION_UNCLUSTERED + assert location.album_id is None + assert location.recording_id is None + assert location.cluster_title is None + assert location.cluster_artist is None diff --git a/test/session/test_metadata_handler.py b/test/session/test_metadata_handler.py new file mode 100644 index 0000000000..56a80a6ec6 --- /dev/null +++ b/test/session/test_metadata_handler.py @@ -0,0 +1,336 @@ +# -*- coding: utf-8 -*- +# +# Picard, the next-generation MusicBrainz tagger +# +# Copyright (C) 2025 The MusicBrainz Team +# +# This program is free software; you can redistribute it and/or +# modify it under the terms of the GNU General Public License +# as published by the Free Software Foundation; either version 2 +# of the License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. + +"""Tests for metadata handler.""" + +from pathlib import Path +from typing import Any +from unittest.mock import Mock, patch + +from picard.file import File +from picard.metadata import Metadata +from picard.session.metadata_handler import MetadataHandler + +import pytest + + +@pytest.fixture +def mock_file_with_metadata() -> Mock: + """Provide a mock file with metadata.""" + file_mock = Mock(spec=File) + metadata = Metadata() + metadata["title"] = "Test Song" + metadata["artist"] = "Test Artist" + metadata["~internal"] = "internal_value" + metadata["length"] = "123456" + file_mock.metadata = metadata + return file_mock + + +def test_serialize_metadata_for_file(mock_file_with_metadata: Mock) -> None: + """Test metadata serialization excluding internal tags.""" + tags = MetadataHandler.serialize_metadata_for_file(mock_file_with_metadata) + + assert "title" in tags + assert "artist" in tags + assert "~internal" not in tags + assert "length" not in tags + assert tags["title"] == ["Test Song"] + assert tags["artist"] == ["Test Artist"] + + +def test_serialize_metadata_empty_file() -> None: + """Test metadata serialization for file with no metadata.""" + file_mock = Mock(spec=File) + metadata = Mock(spec=Metadata) + metadata.rawitems.return_value = [] + file_mock.metadata = metadata + + tags = MetadataHandler.serialize_metadata_for_file(file_mock) + + assert tags == {} + + +def test_serialize_metadata_with_multiple_values() -> None: + """Test metadata serialization with multiple values per tag.""" + file_mock = Mock(spec=File) + metadata = Mock(spec=Metadata) + metadata.rawitems.return_value = [ + ("genre", ["Rock", "Pop"]), + ("artist", ["Single Artist"]), + ] + file_mock.metadata = metadata + + tags = MetadataHandler.serialize_metadata_for_file(file_mock) + + assert tags["genre"] == ["Rock", "Pop"] + assert tags["artist"] == ["Single Artist"] + + +def test_deserialize_metadata() -> None: + """Test metadata deserialization.""" + tags = {"title": ["Test Song"], "artist": ["Test Artist"]} + + metadata = MetadataHandler.deserialize_metadata(tags) + + assert metadata["title"] == "Test Song" + assert metadata["artist"] == "Test Artist" + + +def test_deserialize_metadata_empty() -> None: + """Test metadata deserialization with empty tags.""" + metadata = MetadataHandler.deserialize_metadata({}) + + assert len(metadata) == 0 + + +def test_deserialize_metadata_with_multiple_values() -> None: + """Test metadata deserialization with multiple values per tag.""" + tags = {"genre": ["Rock", "Pop"], "artist": ["Artist 1", "Artist 2"]} + + metadata = MetadataHandler.deserialize_metadata(tags) + + assert metadata["genre"] == "Rock; Pop" + assert metadata["artist"] == "Artist 1; Artist 2" + + +@pytest.mark.parametrize( + ("values", "expected"), + [ + ("single_value", ["single_value"]), + (["list", "values"], ["list", "values"]), + (("tuple", "values"), ["tuple", "values"]), + (123, [123]), + (None, [None]), + ([], []), + ((), []), + ], +) +def test_as_list(values: Any, expected: list[Any]) -> None: + """Test as_list conversion with various input types.""" + result = MetadataHandler.as_list(values) + assert result == expected + + +@patch('picard.log.log') +def test_safe_apply_metadata_success(mock_log: Mock) -> None: + """Test successful metadata application.""" + file_mock = Mock(spec=File) + file_mock.metadata = Mock() + file_mock.metadata.length = 123456 + file_mock.orig_metadata = Mock() + file_mock.orig_metadata.length = 789012 + + metadata = Metadata() + metadata["title"] = "New Title" + + result = MetadataHandler.safe_apply_metadata(file_mock, metadata) + + assert result is True + file_mock.copy_metadata.assert_called_once_with(metadata) + file_mock.update.assert_called_once() + assert metadata.length == 123456 + + +@patch('picard.log.log') +def test_safe_apply_metadata_success_with_none_length(mock_log: Mock) -> None: + """Test successful metadata application with None length.""" + file_mock = Mock(spec=File) + file_mock.metadata = Mock() + file_mock.metadata.length = None + file_mock.orig_metadata = Mock() + file_mock.orig_metadata.length = 789012 + + metadata = Metadata() + metadata["title"] = "New Title" + + result = MetadataHandler.safe_apply_metadata(file_mock, metadata) + + assert result is True + file_mock.copy_metadata.assert_called_once_with(metadata) + file_mock.update.assert_called_once() + assert metadata.length == 789012 + + +@patch('picard.session.metadata_handler.log') +def test_safe_apply_metadata_attribute_error(mock_log: Mock) -> None: + """Test metadata application with AttributeError.""" + file_mock = Mock(spec=File) + file_mock.filename = "/test/file.mp3" + file_mock.metadata = Mock() + file_mock.metadata.length = None + file_mock.orig_metadata = Mock() + file_mock.orig_metadata.length = 789012 + file_mock.copy_metadata.side_effect = AttributeError("Test error") + + metadata = Metadata() + + result = MetadataHandler.safe_apply_metadata(file_mock, metadata) + + assert result is False + mock_log.warning.assert_called_once() + assert "Test error" in str(mock_log.warning.call_args) + + +@patch('picard.session.metadata_handler.log') +def test_safe_apply_metadata_key_error(mock_log: Mock) -> None: + """Test metadata application with KeyError.""" + file_mock = Mock(spec=File) + file_mock.filename = "/test/file.mp3" + file_mock.metadata = Mock() + file_mock.metadata.length = None + file_mock.orig_metadata = Mock() + file_mock.orig_metadata.length = 789012 + file_mock.copy_metadata.side_effect = KeyError("Test error") + + metadata = Metadata() + + result = MetadataHandler.safe_apply_metadata(file_mock, metadata) + + assert result is False + mock_log.warning.assert_called_once() + assert "Test error" in str(mock_log.warning.call_args) + + +@patch('picard.session.metadata_handler.log') +def test_safe_apply_metadata_unexpected_error(mock_log: Mock) -> None: + """Test metadata application with unexpected error.""" + file_mock = Mock(spec=File) + file_mock.metadata = Mock() + file_mock.metadata.length = None + file_mock.orig_metadata = Mock() + file_mock.orig_metadata.length = 789012 + file_mock.copy_metadata.side_effect = RuntimeError("Unexpected error") + + metadata = Metadata() + + result = MetadataHandler.safe_apply_metadata(file_mock, metadata) + + assert result is False + mock_log.error.assert_called_once() + assert "Unexpected error" in str(mock_log.error.call_args) + + +@patch('picard.session.retry_helper.RetryHelper') +def test_apply_saved_metadata_if_any_file_pending(mock_retry_helper: Mock) -> None: + """Test applying saved metadata with file in PENDING state.""" + tagger_mock = Mock() + file_mock = Mock(spec=File) + file_mock.state = File.PENDING + + tagger_mock.files.get.return_value = file_mock + + metadata_map = {Path("/test/file.mp3"): Metadata()} + + MetadataHandler.apply_saved_metadata_if_any(tagger_mock, metadata_map) + + mock_retry_helper.retry_until.assert_called_once() + + +@patch('picard.session.retry_helper.RetryHelper') +def test_apply_saved_metadata_if_any_file_not_found(mock_retry_helper: Mock) -> None: + """Test applying saved metadata when file is not found.""" + tagger_mock = Mock() + tagger_mock.files.get.return_value = None + + metadata_map = {Path("/test/file.mp3"): Metadata()} + + MetadataHandler.apply_saved_metadata_if_any(tagger_mock, metadata_map) + + mock_retry_helper.retry_until.assert_called_once() + + +@patch('picard.session.retry_helper.RetryHelper') +def test_apply_saved_metadata_if_any_file_ready_success(mock_retry_helper: Mock) -> None: + """Test applying saved metadata when file is ready and application succeeds.""" + tagger_mock = Mock() + file_mock = Mock(spec=File) + file_mock.state = 1 # Not PENDING (PENDING = 0) + + tagger_mock.files.get.return_value = file_mock + + metadata = Metadata() + metadata_map = {Path("/test/file.mp3"): metadata} + + with patch.object(MetadataHandler, 'safe_apply_metadata', return_value=True): + MetadataHandler.apply_saved_metadata_if_any(tagger_mock, metadata_map) + + # Should not retry if file is ready and metadata applied successfully + mock_retry_helper.retry_until.assert_not_called() + + +@patch('picard.session.retry_helper.RetryHelper') +def test_apply_saved_metadata_if_any_file_ready_failure(mock_retry_helper: Mock) -> None: + """Test applying saved metadata when file is ready but application fails.""" + tagger_mock = Mock() + file_mock = Mock(spec=File) + file_mock.state = 1 # Not PENDING (PENDING = 0) + + tagger_mock.files.get.return_value = file_mock + + metadata = Metadata() + metadata_map = {Path("/test/file.mp3"): metadata} + + with patch.object(MetadataHandler, 'safe_apply_metadata', return_value=False): + MetadataHandler.apply_saved_metadata_if_any(tagger_mock, metadata_map) + + # Should retry if metadata application failed + mock_retry_helper.retry_until.assert_called_once() + + +@patch('picard.session.retry_helper.RetryHelper') +def test_apply_saved_metadata_if_any_mixed_states(mock_retry_helper: Mock) -> None: + """Test applying saved metadata with files in different states.""" + tagger_mock = Mock() + + # File 1: ready and successful + file1_mock = Mock(spec=File) + file1_mock.state = 1 # Not PENDING (PENDING = 0) + + # File 2: pending + file2_mock = Mock(spec=File) + file2_mock.state = File.PENDING + + # File 3: ready but failed + file3_mock = Mock(spec=File) + file3_mock.state = 1 # Not PENDING (PENDING = 0) + + def files_getter(path): + if str(path) == "/test/file1.mp3": + return file1_mock + elif str(path) == "/test/file2.mp3": + return file2_mock + elif str(path) == "/test/file3.mp3": + return file3_mock + return None + + tagger_mock.files.get.side_effect = files_getter + + metadata_map = { + Path("/test/file1.mp3"): Metadata(), + Path("/test/file2.mp3"): Metadata(), + Path("/test/file3.mp3"): Metadata(), + } + + with patch.object(MetadataHandler, 'safe_apply_metadata', side_effect=[True, False]): + MetadataHandler.apply_saved_metadata_if_any(tagger_mock, metadata_map) + + # Should retry for file2 (pending) and file3 (failed) + mock_retry_helper.retry_until.assert_called_once() diff --git a/test/session/test_retry_helper.py b/test/session/test_retry_helper.py new file mode 100644 index 0000000000..913364565f --- /dev/null +++ b/test/session/test_retry_helper.py @@ -0,0 +1,354 @@ +# -*- coding: utf-8 -*- +# +# Picard, the next-generation MusicBrainz tagger +# +# Copyright (C) 2025 The MusicBrainz Team +# +# This program is free software; you can redistribute it and/or +# modify it under the terms of the GNU General Public License +# as published by the Free Software Foundation; either version 2 +# of the License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. + +"""Tests for retry helper.""" + +from unittest.mock import Mock, patch + +from picard.session.retry_helper import RetryHelper + +import pytest + + +@patch('PyQt6.QtCore.QTimer.singleShot') +def test_retry_until_condition_met_immediately(mock_single_shot: Mock) -> None: + """Test retry_until when condition is met immediately.""" + condition_called = False + action_called = False + + def condition_fn() -> bool: + nonlocal condition_called + condition_called = True + return True + + def action_fn() -> None: + nonlocal action_called + action_called = True + + RetryHelper.retry_until(condition_fn, action_fn) + + assert condition_called + assert action_called + mock_single_shot.assert_not_called() + + +@patch('PyQt6.QtCore.QTimer.singleShot') +def test_retry_until_condition_not_met(mock_single_shot: Mock) -> None: + """Test retry_until when condition is not met.""" + + def condition_fn() -> bool: + return False + + def action_fn() -> None: + pass + + RetryHelper.retry_until(condition_fn, action_fn) + + mock_single_shot.assert_called_once() + + +@patch('PyQt6.QtCore.QTimer.singleShot') +def test_retry_until_with_custom_delay(mock_single_shot: Mock) -> None: + """Test retry_until with custom delay.""" + + def condition_fn() -> bool: + return False + + def action_fn() -> None: + pass + + RetryHelper.retry_until(condition_fn, action_fn, delay_ms=500) + + mock_single_shot.assert_called_once_with(500, mock_single_shot.call_args[0][1]) + + +@patch('PyQt6.QtCore.QTimer.singleShot') +def test_retry_until_with_max_attempts(mock_single_shot: Mock) -> None: + """Test retry_until with maximum attempts limit.""" + attempt_count = 0 + + def condition_fn() -> bool: + nonlocal attempt_count + attempt_count += 1 + return False + + def action_fn() -> None: + pass + + # Mock the callback to simulate retries + def mock_callback(delay, callback): + callback() # Simulate retry + + mock_single_shot.side_effect = mock_callback + + RetryHelper.retry_until(condition_fn, action_fn, max_attempts=3) + + # Should schedule retry for max_attempts times + assert mock_single_shot.call_count == 3 + + +@patch('PyQt6.QtCore.QTimer.singleShot') +def test_retry_until_condition_becomes_true_after_retries(mock_single_shot: Mock) -> None: + """Test retry_until when condition becomes true after some retries.""" + call_count = 0 + + def condition_fn() -> bool: + nonlocal call_count + call_count += 1 + return call_count >= 3 # True after 3 calls + + def action_fn() -> None: + pass + + # Mock the callback to simulate retries + def mock_callback(delay, callback): + if call_count < 3: + callback() # Simulate retry + + mock_single_shot.side_effect = mock_callback + + RetryHelper.retry_until(condition_fn, action_fn) + + # Should have scheduled retries + assert mock_single_shot.call_count > 0 + + +@patch('PyQt6.QtCore.QTimer.singleShot') +def test_retry_until_file_ready_file_not_ready(mock_single_shot: Mock) -> None: + """Test retry_until_file_ready with file not ready.""" + file_mock = Mock() + file_mock.state = 0 # PENDING state + file_mock.PENDING = 0 # Add PENDING attribute + + def file_getter() -> Mock: + return file_mock + + def action_fn() -> None: + pass + + RetryHelper.retry_until_file_ready(file_getter, action_fn) + + mock_single_shot.assert_called_once() + + +@patch('PyQt6.QtCore.QTimer.singleShot') +def test_retry_until_file_ready_file_ready(mock_single_shot: Mock) -> None: + """Test retry_until_file_ready when file is ready.""" + file_mock = Mock() + file_mock.state = 1 # Not PENDING + file_mock.PENDING = 0 # Add PENDING attribute + + def file_getter() -> Mock: + return file_mock + + def action_fn() -> None: + pass + + RetryHelper.retry_until_file_ready(file_getter, action_fn) + + mock_single_shot.assert_not_called() + + +@patch('PyQt6.QtCore.QTimer.singleShot') +def test_retry_until_file_ready_no_file(mock_single_shot: Mock) -> None: + """Test retry_until_file_ready when file is None.""" + + def file_getter() -> None: + return None + + def action_fn() -> None: + pass + + RetryHelper.retry_until_file_ready(file_getter, action_fn) + + mock_single_shot.assert_called_once() + + +@patch('PyQt6.QtCore.QTimer.singleShot') +def test_retry_until_file_ready_file_without_state(mock_single_shot: Mock) -> None: + """Test retry_until_file_ready when file has no state attribute.""" + file_mock = Mock() + # No state attribute + del file_mock.state + file_mock.PENDING = 0 # Add PENDING attribute + + def file_getter() -> Mock: + return file_mock + + def action_fn() -> None: + pass + + RetryHelper.retry_until_file_ready(file_getter, action_fn) + + mock_single_shot.assert_called_once() + + +@patch('PyQt6.QtCore.QTimer.singleShot') +def test_retry_until_file_ready_with_custom_delay(mock_single_shot: Mock) -> None: + """Test retry_until_file_ready with custom delay.""" + file_mock = Mock() + file_mock.state = 0 # PENDING state + file_mock.PENDING = 0 # Add PENDING attribute + + def file_getter() -> Mock: + return file_mock + + def action_fn() -> None: + pass + + RetryHelper.retry_until_file_ready(file_getter, action_fn, delay_ms=300) + + mock_single_shot.assert_called_once_with(300, mock_single_shot.call_args[0][1]) + + +@patch('PyQt6.QtCore.QTimer.singleShot') +def test_retry_until_album_ready_album_not_ready(mock_single_shot: Mock) -> None: + """Test retry_until_album_ready with album not ready.""" + album_mock = Mock() + album_mock.tracks = [] # No tracks + + def album_getter() -> Mock: + return album_mock + + def action_fn() -> None: + pass + + RetryHelper.retry_until_album_ready(album_getter, action_fn) + + mock_single_shot.assert_called_once() + + +@patch('PyQt6.QtCore.QTimer.singleShot') +def test_retry_until_album_ready_album_ready(mock_single_shot: Mock) -> None: + """Test retry_until_album_ready when album is ready.""" + album_mock = Mock() + album_mock.tracks = [Mock()] # Has tracks + + def album_getter() -> Mock: + return album_mock + + def action_fn() -> None: + pass + + RetryHelper.retry_until_album_ready(album_getter, action_fn) + + mock_single_shot.assert_not_called() + + +@patch('PyQt6.QtCore.QTimer.singleShot') +def test_retry_until_album_ready_no_album(mock_single_shot: Mock) -> None: + """Test retry_until_album_ready when album is None.""" + + def album_getter() -> None: + return None + + def action_fn() -> None: + pass + + RetryHelper.retry_until_album_ready(album_getter, action_fn) + + mock_single_shot.assert_called_once() + + +@patch('PyQt6.QtCore.QTimer.singleShot') +def test_retry_until_album_ready_album_without_tracks_attribute(mock_single_shot: Mock) -> None: + """Test retry_until_album_ready when album has no tracks attribute.""" + album_mock = Mock() + # Remove tracks attribute to simulate album without tracks + del album_mock.tracks + + def album_getter() -> Mock: + return album_mock + + def action_fn() -> None: + pass + + RetryHelper.retry_until_album_ready(album_getter, action_fn) + + mock_single_shot.assert_called_once() + + +@patch('PyQt6.QtCore.QTimer.singleShot') +def test_retry_until_album_ready_with_custom_delay(mock_single_shot: Mock) -> None: + """Test retry_until_album_ready with custom delay.""" + album_mock = Mock() + album_mock.tracks = [] # No tracks + + def album_getter() -> Mock: + return album_mock + + def action_fn() -> None: + pass + + RetryHelper.retry_until_album_ready(album_getter, action_fn, delay_ms=400) + + mock_single_shot.assert_called_once_with(400, mock_single_shot.call_args[0][1]) + + +def test_retry_until_condition_function_exception() -> None: + """Test retry_until when condition function raises exception.""" + + def condition_fn() -> bool: + raise RuntimeError("Condition error") + + def action_fn() -> None: + pass + + with pytest.raises(RuntimeError, match="Condition error"): + RetryHelper.retry_until(condition_fn, action_fn) + + +def test_retry_until_action_function_exception() -> None: + """Test retry_until when action function raises exception.""" + + def condition_fn() -> bool: + return True + + def action_fn() -> None: + raise RuntimeError("Action error") + + with pytest.raises(RuntimeError, match="Action error"): + RetryHelper.retry_until(condition_fn, action_fn) + + +def test_retry_until_file_ready_file_getter_exception() -> None: + """Test retry_until_file_ready when file getter raises exception.""" + + def file_getter() -> None: + raise RuntimeError("File getter error") + + def action_fn() -> None: + pass + + with pytest.raises(RuntimeError, match="File getter error"): + RetryHelper.retry_until_file_ready(file_getter, action_fn) + + +def test_retry_until_album_ready_album_getter_exception() -> None: + """Test retry_until_album_ready when album getter raises exception.""" + + def album_getter() -> None: + raise RuntimeError("Album getter error") + + def action_fn() -> None: + pass + + with pytest.raises(RuntimeError, match="Album getter error"): + RetryHelper.retry_until_album_ready(album_getter, action_fn) diff --git a/test/session/test_session_constants.py b/test/session/test_session_constants.py new file mode 100644 index 0000000000..1c0a652cbc --- /dev/null +++ b/test/session/test_session_constants.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# +# Picard, the next-generation MusicBrainz tagger +# +# Copyright (C) 2025 The MusicBrainz Team +# +# This program is free software; you can redistribute it and/or +# modify it under the terms of the GNU General Public License +# as published by the Free Software Foundation; either version 2 +# of the License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. + +"""Tests for session constants.""" + +from picard.session.constants import SessionConstants + +import pytest + + +def test_session_constants_values() -> None: + """Test that SessionConstants has expected values.""" + assert SessionConstants.SESSION_FILE_EXTENSION == ".mbps" + assert SessionConstants.SESSION_FORMAT_VERSION == 1 + assert SessionConstants.DEFAULT_RETRY_DELAY_MS == 200 + assert SessionConstants.FAST_RETRY_DELAY_MS == 150 + assert SessionConstants.INTERNAL_TAG_PREFIX == "~" + assert frozenset({"length", "~length"}) == SessionConstants.EXCLUDED_OVERRIDE_TAGS + assert SessionConstants.LOCATION_UNCLUSTERED == "unclustered" + assert SessionConstants.LOCATION_TRACK == "track" + assert SessionConstants.LOCATION_ALBUM_UNMATCHED == "album_unmatched" + assert SessionConstants.LOCATION_CLUSTER == "cluster" + assert SessionConstants.LOCATION_NAT == "nat" + + +def test_session_constants_immutable() -> None: + """Test that SessionConstants values are immutable.""" + # Test that frozenset is immutable + with pytest.raises(AttributeError): + SessionConstants.EXCLUDED_OVERRIDE_TAGS.add("new_tag") + + # Test that constants are class attributes + assert hasattr(SessionConstants, 'SESSION_FILE_EXTENSION') + assert hasattr(SessionConstants, 'SESSION_FORMAT_VERSION') + assert hasattr(SessionConstants, 'DEFAULT_RETRY_DELAY_MS') + assert hasattr(SessionConstants, 'FAST_RETRY_DELAY_MS') + assert hasattr(SessionConstants, 'INTERNAL_TAG_PREFIX') + assert hasattr(SessionConstants, 'EXCLUDED_OVERRIDE_TAGS') + assert hasattr(SessionConstants, 'LOCATION_UNCLUSTERED') + assert hasattr(SessionConstants, 'LOCATION_TRACK') + assert hasattr(SessionConstants, 'LOCATION_ALBUM_UNMATCHED') + assert hasattr(SessionConstants, 'LOCATION_CLUSTER') + assert hasattr(SessionConstants, 'LOCATION_NAT') diff --git a/test/session/test_session_exporter.py b/test/session/test_session_exporter.py new file mode 100644 index 0000000000..4733d5cc98 --- /dev/null +++ b/test/session/test_session_exporter.py @@ -0,0 +1,452 @@ +# -*- coding: utf-8 -*- +# +# Picard, the next-generation MusicBrainz tagger +# +# Copyright (C) 2025 The MusicBrainz Team +# +# This program is free software; you can redistribute it and/or +# modify it under the terms of the GNU General Public License +# as published by the Free Software Foundation; either version 2 +# of the License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. + +"""Tests for session exporter.""" + +from pathlib import Path +from unittest.mock import Mock, patch + +from picard.album import Album, NatAlbum +from picard.metadata import Metadata +from picard.session.constants import SessionConstants +from picard.session.session_data import SessionItemLocation +from picard.session.session_exporter import SessionExporter + +import pytest + + +@pytest.fixture +def session_exporter() -> SessionExporter: + """Provide a SessionExporter instance.""" + return SessionExporter() + + +@pytest.fixture +def mock_tagger() -> Mock: + """Provide a mock tagger instance.""" + tagger_mock = Mock() + tagger_mock.iter_all_files.return_value = [] + tagger_mock.albums = {} + return tagger_mock + + +def test_session_exporter_export_session_empty(session_exporter: SessionExporter, mock_tagger: Mock) -> None: + """Test exporting an empty session.""" + config_mock = Mock() + config_mock.setting = { + "rename_files": False, + "move_files": False, + "dont_write_tags": True, + } + + with patch('picard.session.session_exporter.get_config') as mock_get_config: + mock_get_config.return_value = config_mock + + data = session_exporter.export_session(mock_tagger) + + assert data["version"] == SessionConstants.SESSION_FORMAT_VERSION + assert data["options"] == { + "rename_files": False, + "move_files": False, + "dont_write_tags": True, + } + assert data["items"] == [] + assert data["album_track_overrides"] == {} + assert data["album_overrides"] == {} + assert data["unmatched_albums"] == [] + assert data["expanded_albums"] == [] + + +def test_session_exporter_export_file_item_saved(session_exporter: SessionExporter, cfg_options) -> None: + """Test exporting a saved file item.""" + + file_mock = Mock() + file_mock.filename = str(Path("/test/file.mp3")) + file_mock.is_saved.return_value = True + file_mock.parent_item = None + + tagger_mock = Mock() + tagger_mock.iter_all_files.return_value = [file_mock] + tagger_mock.albums = {} + + with patch.object(session_exporter.location_detector, 'detect') as mock_detect: + mock_detect.return_value = SessionItemLocation(type="unclustered") + data = session_exporter.export_session(tagger_mock) + + assert len(data["items"]) == 1 + item = data["items"][0] + assert item["file_path"] == str(Path("/test/file.mp3")) + assert "metadata" not in item + + +def test_session_exporter_export_file_item_unsaved(session_exporter: SessionExporter, cfg_options) -> None: + """Test exporting an unsaved file item with metadata.""" + + file_mock = Mock() + file_mock.filename = str(Path("/test/file.mp3")) + file_mock.is_saved.return_value = False + file_mock.parent_item = None + file_mock.metadata = Metadata() + file_mock.metadata["title"] = "Test Song" + + tagger_mock = Mock() + tagger_mock.iter_all_files.return_value = [file_mock] + tagger_mock.albums = {} + + with ( + patch.object(file_mock.metadata, 'rawitems', return_value=[("title", ["Test Song"])]), + patch.object(session_exporter.location_detector, 'detect') as mock_detect, + ): + mock_detect.return_value = SessionItemLocation(type="unclustered") + data = session_exporter.export_session(tagger_mock) + + assert len(data["items"]) == 1 + item = data["items"][0] + assert item["file_path"] == str(Path("/test/file.mp3")) + assert "metadata" in item + assert item["metadata"]["tags"]["title"] == ["Test Song"] + + +def test_session_exporter_export_ui_state(session_exporter: SessionExporter, cfg_options) -> None: + """Test exporting UI expansion state.""" + + album_mock = Mock(spec=Album) + album_mock.id = "album-123" + album_mock.metadata = Metadata() + album_mock.orig_metadata = Metadata() + album_mock.tracks = [] + ui_item_mock = Mock() + ui_item_mock.isExpanded.return_value = True + album_mock.ui_item = ui_item_mock + + tagger_mock = Mock() + tagger_mock.iter_all_files.return_value = [] + tagger_mock.albums = {"album-123": album_mock} + + # Mock the diff method to return None (no overrides) + with patch.object(album_mock.metadata, 'diff', return_value=None): + data = session_exporter.export_session(tagger_mock) + + assert data["expanded_albums"] == ["album-123"] + + +def test_session_exporter_export_ui_state_no_ui_item(session_exporter: SessionExporter, cfg_options) -> None: + """Test exporting UI state when album has no UI item.""" + + album_mock = Mock(spec=Album) + album_mock.id = "album-123" + album_mock.metadata = Metadata() + album_mock.orig_metadata = Metadata() + album_mock.tracks = [] + album_mock.ui_item = None + + tagger_mock = Mock() + tagger_mock.iter_all_files.return_value = [] + tagger_mock.albums = {"album-123": album_mock} + + # Mock the diff method to return None (no overrides) + with patch.object(album_mock.metadata, 'diff', return_value=None): + data = session_exporter.export_session(tagger_mock) + + assert data["expanded_albums"] == [] + + +def test_session_exporter_export_metadata_overrides(session_exporter: SessionExporter, cfg_options) -> None: + """Test exporting metadata overrides.""" + + # Create album with overrides + album_mock = Mock(spec=Album) + album_mock.id = "album-123" + album_mock.metadata = Metadata() + album_mock.orig_metadata = Metadata() + album_mock.metadata["albumartist"] = "New Artist" + album_mock.orig_metadata["albumartist"] = "Old Artist" + + # Create track with overrides + track_mock = Mock() + track_mock.id = "track-456" + track_mock.metadata = Metadata() + track_mock.scripted_metadata = Metadata() + track_mock.metadata["title"] = "New Title" + track_mock.scripted_metadata["title"] = "Old Title" + album_mock.tracks = [track_mock] + + tagger_mock = Mock() + tagger_mock.iter_all_files.return_value = [] + tagger_mock.albums = {"album-123": album_mock} + + # Mock the diff and rawitems methods + diff_mock = Mock() + diff_mock.rawitems.return_value = [("albumartist", ["New Artist"])] + track_diff_mock = Mock() + track_diff_mock.rawitems.return_value = [("title", ["New Title"])] + + with ( + patch.object(album_mock.metadata, 'diff', return_value=diff_mock), + patch.object(track_mock.metadata, 'diff', return_value=track_diff_mock), + ): + data = session_exporter.export_session(tagger_mock) + + assert "album-123" in data["album_overrides"] + assert data["album_overrides"]["album-123"]["albumartist"] == ["New Artist"] + assert "album-123" in data["album_track_overrides"] + assert data["album_track_overrides"]["album-123"]["track-456"]["title"] == ["New Title"] + + +def test_session_exporter_export_unmatched_albums(session_exporter: SessionExporter, cfg_options) -> None: + """Test exporting unmatched albums.""" + + # Create album with no files and no overrides + album_mock = Mock(spec=Album) + album_mock.id = "album-123" + album_mock.metadata = Metadata() + album_mock.orig_metadata = Metadata() + album_mock.tracks = [] + + tagger_mock = Mock() + tagger_mock.iter_all_files.return_value = [] + tagger_mock.albums = {"album-123": album_mock} + + # Mock the diff method to return None (no overrides) + with patch.object(album_mock.metadata, 'diff', return_value=None): + data = session_exporter.export_session(tagger_mock) + + assert data["unmatched_albums"] == ["album-123"] + + +def test_session_exporter_export_skips_nat_albums(session_exporter: SessionExporter, cfg_options) -> None: + """Test that NAT albums are skipped in metadata overrides export.""" + + # Create NAT album + nat_album_mock = Mock(spec=NatAlbum) + nat_album_mock.id = "nat-album-123" + + tagger_mock = Mock() + tagger_mock.iter_all_files.return_value = [] + tagger_mock.albums = {"nat-album-123": nat_album_mock} + + data = session_exporter.export_session(tagger_mock) + + assert data["album_overrides"] == {} + assert data["album_track_overrides"] == {} + assert data["unmatched_albums"] == [] + + +def test_session_exporter_export_albums_with_files(session_exporter: SessionExporter, cfg_options) -> None: + """Test that albums with files are not included in unmatched_albums.""" + + # Create album + album_mock = Mock(spec=Album) + album_mock.id = "album-123" + album_mock.metadata = Metadata() + album_mock.orig_metadata = Metadata() + album_mock.tracks = [] + + # Create file with parent item pointing to album + file_mock = Mock() + file_mock.filename = "/test/file.mp3" + file_mock.is_saved.return_value = True + parent_item_mock = Mock() + parent_item_mock.album = album_mock + file_mock.parent_item = parent_item_mock + + tagger_mock = Mock() + tagger_mock.iter_all_files.return_value = [file_mock] + tagger_mock.albums = {"album-123": album_mock} + + # Mock the diff method to return None (no overrides) + with ( + patch.object(album_mock.metadata, 'diff', return_value=None), + patch.object(session_exporter.location_detector, 'detect') as mock_detect, + ): + mock_detect.return_value = SessionItemLocation(type="track", album_id="album-123") + data = session_exporter.export_session(tagger_mock) + + assert data["unmatched_albums"] == [] + + +def test_session_exporter_serialize_location() -> None: + """Test location serialization.""" + exporter = SessionExporter() + + location = SessionItemLocation( + type="track", + album_id="album-123", + recording_id="recording-456", + cluster_title=None, + cluster_artist=None, + ) + + serialized = exporter._serialize_location(location) + + assert serialized == { + "type": "track", + "album_id": "album-123", + "recording_id": "recording-456", + } + + +def test_session_exporter_serialize_location_with_none_values() -> None: + """Test location serialization with None values.""" + exporter = SessionExporter() + + location = SessionItemLocation( + type="unclustered", + album_id=None, + recording_id=None, + cluster_title=None, + cluster_artist=None, + ) + + serialized = exporter._serialize_location(location) + + assert serialized == {"type": "unclustered"} + + +def test_session_exporter_serialize_location_with_cluster_info() -> None: + """Test location serialization with cluster information.""" + exporter = SessionExporter() + + location = SessionItemLocation( + type="cluster", + album_id=None, + recording_id=None, + cluster_title="Test Album", + cluster_artist="Test Artist", + ) + + serialized = exporter._serialize_location(location) + + assert serialized == { + "type": "cluster", + "cluster_title": "Test Album", + "cluster_artist": "Test Artist", + } + + +def test_session_exporter_export_options() -> None: + """Test exporting configuration options.""" + exporter = SessionExporter() + + config_mock = Mock() + config_mock.setting = { + "rename_files": True, + "move_files": False, + "dont_write_tags": True, + } + + options = exporter._export_options(config_mock) + + assert options == { + "rename_files": True, + "move_files": False, + "dont_write_tags": True, + } + + +def test_session_exporter_export_options_with_falsy_values() -> None: + """Test exporting configuration options with falsy values.""" + exporter = SessionExporter() + + config_mock = Mock() + config_mock.setting = { + "rename_files": 0, + "move_files": "", + "dont_write_tags": None, + } + + options = exporter._export_options(config_mock) + + assert options == { + "rename_files": False, + "move_files": False, + "dont_write_tags": False, + } + + +def test_session_exporter_export_metadata_overrides_excludes_length( + session_exporter: SessionExporter, cfg_options +) -> None: + """Test that length tags are excluded from metadata overrides.""" + + # Create album with length override + album_mock = Mock(spec=Album) + album_mock.id = "album-123" + album_mock.metadata = Metadata() + album_mock.orig_metadata = Metadata() + album_mock.metadata["length"] = "300000" + album_mock.orig_metadata["length"] = "250000" + album_mock.tracks = [] + + tagger_mock = Mock() + tagger_mock.iter_all_files.return_value = [] + tagger_mock.albums = {"album-123": album_mock} + + # Mock the diff method to return length override + diff_mock = Mock() + diff_mock.rawitems.return_value = [("length", ["300000"])] + + with patch.object(album_mock.metadata, 'diff', return_value=diff_mock): + data = session_exporter.export_session(tagger_mock) + + # Length should not be in overrides + assert "album-123" not in data["album_overrides"] or "length" not in data["album_overrides"]["album-123"] + + +def test_session_exporter_export_metadata_overrides_excludes_internal_tags( + session_exporter: SessionExporter, cfg_options +) -> None: + """Test that internal tags are excluded from metadata overrides.""" + + # Create track with internal tag override + track_mock = Mock() + track_mock.id = "track-456" + track_mock.metadata = Metadata() + track_mock.scripted_metadata = Metadata() + track_mock.metadata["~internal"] = "new_value" + track_mock.scripted_metadata["~internal"] = "old_value" + + album_mock = Mock(spec=Album) + album_mock.id = "album-123" + album_mock.metadata = Metadata() + album_mock.orig_metadata = Metadata() + album_mock.tracks = [track_mock] + + tagger_mock = Mock() + tagger_mock.iter_all_files.return_value = [] + tagger_mock.albums = {"album-123": album_mock} + + # Mock the diff methods + track_diff_mock = Mock() + track_diff_mock.rawitems.return_value = [("~internal", ["new_value"])] + album_diff_mock = Mock() + album_diff_mock.rawitems.return_value = [] + + with ( + patch.object(track_mock.metadata, 'diff', return_value=track_diff_mock), + patch.object(album_mock.metadata, 'diff', return_value=album_diff_mock), + ): + data = session_exporter.export_session(tagger_mock) + + # Internal tag should be in overrides (current implementation includes them) + assert "album-123" in data["album_track_overrides"] + assert "track-456" in data["album_track_overrides"]["album-123"] + assert "~internal" in data["album_track_overrides"]["album-123"]["track-456"] + assert data["album_track_overrides"]["album-123"]["track-456"]["~internal"] == ["new_value"] diff --git a/test/session/test_session_loader.py b/test/session/test_session_loader.py new file mode 100644 index 0000000000..9f9deb391c --- /dev/null +++ b/test/session/test_session_loader.py @@ -0,0 +1,576 @@ +# -*- coding: utf-8 -*- +# +# Picard, the next-generation MusicBrainz tagger +# +# Copyright (C) 2025 The MusicBrainz Team +# +# This program is free software; you can redistribute it and/or +# modify it under the terms of the GNU General Public License +# as published by the Free Software Foundation; either version 2 +# of the License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. + +"""Tests for session loader.""" + +import json +from pathlib import Path +from unittest.mock import Mock, patch + +from picard.album import Album +import picard.config as picard_config +from picard.metadata import Metadata +from picard.session.session_data import AlbumItems, GroupedItems +from picard.session.session_loader import SessionLoader + +import pytest + + +@pytest.fixture +def session_loader() -> SessionLoader: + """Provide a SessionLoader instance.""" + tagger_mock = Mock() + return SessionLoader(tagger_mock) + + +def test_session_loader_read_session_file(session_loader: SessionLoader, tmp_path: Path) -> None: + """Test reading session file.""" + session_data = {"version": 1, "items": []} + session_file = tmp_path / "test.mbps" + session_file.write_text(json.dumps(session_data), encoding="utf-8") + + data = session_loader._read_session_file(session_file) + + assert data == session_data + + +def test_session_loader_read_session_file_invalid_json(session_loader: SessionLoader, tmp_path: Path) -> None: + """Test reading invalid JSON session file.""" + session_file = tmp_path / "test.mbps" + session_file.write_text("invalid json", encoding="utf-8") + + with pytest.raises(json.JSONDecodeError): + session_loader._read_session_file(session_file) + + +def test_session_loader_read_session_file_not_found(session_loader: SessionLoader) -> None: + """Test reading non-existent session file.""" + with pytest.raises(FileNotFoundError): + session_loader._read_session_file(Path("/nonexistent/file.mbps")) + + +def test_session_loader_prepare_session(session_loader: SessionLoader, cfg_options) -> None: + """Test session preparation.""" + # Set the config value for this test + cfg = picard_config.get_config() + cfg.setting['session_safe_restore'] = True + + data = {"version": 1} + session_loader._prepare_session(data) + + session_loader.tagger.clear_session.assert_called_once() + assert session_loader.tagger._restoring_session is True + + +def test_session_loader_prepare_session_safe_restore_disabled(session_loader: SessionLoader, cfg_options) -> None: + """Test session preparation with safe restore disabled.""" + # Set the config value for this test + cfg = picard_config.get_config() + cfg.setting['session_safe_restore'] = False + + data = {"version": 1} + session_loader._prepare_session(data) + + session_loader.tagger.clear_session.assert_called_once() + # When safe restore is disabled, _restoring_session should not be set to True + # (it might exist from previous tests, but should not be True) + if hasattr(session_loader.tagger, '_restoring_session'): + assert session_loader.tagger._restoring_session is not True + + +def test_session_loader_restore_options(session_loader: SessionLoader, cfg_options) -> None: + """Test restoring configuration options.""" + # The cfg_options fixture already sets the default values + + options = { + "rename_files": True, + "move_files": True, + "dont_write_tags": True, + } + + session_loader._restore_options(options) + + cfg = picard_config.get_config() + assert cfg.setting["rename_files"] is True + assert cfg.setting["move_files"] is True + assert cfg.setting["dont_write_tags"] is True + + +@patch('picard.session.session_loader.get_config') +def test_session_loader_restore_options_with_defaults(session_loader: SessionLoader, mock_get_config) -> None: + """Test restoring configuration options with default values.""" + config_mock = Mock() + config_mock.setting = { + "rename_files": False, + "move_files": False, + "dont_write_tags": False, + } + mock_get_config.return_value = config_mock + + # Empty options should use current config values + options = {} + + session_loader._restore_options(options) + + assert config_mock.setting["rename_files"] is False + assert config_mock.setting["move_files"] is False + assert config_mock.setting["dont_write_tags"] is False + + +def test_session_loader_group_items_by_location(session_loader: SessionLoader) -> None: + """Test grouping items by location type.""" + items = [ + { + "file_path": "/test/unclustered.mp3", + "location": {"type": "unclustered"}, + }, + { + "file_path": "/test/cluster.mp3", + "location": {"type": "cluster", "cluster_title": "Album", "cluster_artist": "Artist"}, + }, + { + "file_path": "/test/track.mp3", + "location": {"type": "track", "album_id": "album-123", "recording_id": "recording-456"}, + }, + { + "file_path": "/test/unmatched.mp3", + "location": {"type": "album_unmatched", "album_id": "album-789"}, + }, + { + "file_path": "/test/nat.mp3", + "location": {"type": "nat", "recording_id": "recording-999"}, + }, + ] + + grouped = session_loader._group_items_by_location(items) + + assert len(grouped.unclustered) == 1 + assert Path(grouped.unclustered[0]).name == "unclustered.mp3" + + assert len(grouped.by_cluster) == 1 + assert ("Album", "Artist") in grouped.by_cluster + + assert "album-123" in grouped.by_album + assert len(grouped.by_album["album-123"].tracks) == 1 + + assert "album-789" in grouped.by_album + assert len(grouped.by_album["album-789"].unmatched) == 1 + + assert len(grouped.nat_items) == 1 + assert grouped.nat_items[0][1] == "recording-999" + + +def test_session_loader_group_items_by_location_unknown_type(session_loader: SessionLoader) -> None: + """Test grouping items with unknown location type.""" + items = [ + { + "file_path": "/test/unknown.mp3", + "location": {"type": "unknown_type"}, + }, + ] + + grouped = session_loader._group_items_by_location(items) + + # Unknown types should be treated as unclustered + assert len(grouped.unclustered) == 1 + assert Path(grouped.unclustered[0]).name == "unknown.mp3" + + +def test_session_loader_group_items_by_location_missing_location(session_loader: SessionLoader) -> None: + """Test grouping items with missing location.""" + items = [ + { + "file_path": "/test/no_location.mp3", + }, + ] + + grouped = session_loader._group_items_by_location(items) + + # Missing location should default to unclustered + assert len(grouped.unclustered) == 1 + assert Path(grouped.unclustered[0]).name == "no_location.mp3" + + +def test_session_loader_extract_metadata(session_loader: SessionLoader) -> None: + """Test extracting metadata from session items.""" + items = [ + { + "file_path": "/test/file1.mp3", + "metadata": {"tags": {"title": ["Song 1"], "artist": ["Artist 1"]}}, + }, + { + "file_path": "/test/file2.mp3", + # No metadata + }, + { + "file_path": "/test/file3.mp3", + "metadata": {"tags": {"title": ["Song 3"]}}, + }, + ] + + metadata_map = session_loader._extract_metadata(items) + + assert len(metadata_map) == 2 + assert Path("/test/file1.mp3") in metadata_map + assert Path("/test/file3.mp3") in metadata_map + assert metadata_map[Path("/test/file1.mp3")]["title"] == "Song 1" + + +def test_session_loader_extract_metadata_empty_items(session_loader: SessionLoader) -> None: + """Test extracting metadata from empty items list.""" + metadata_map = session_loader._extract_metadata([]) + + assert len(metadata_map) == 0 + + +def test_session_loader_extract_metadata_no_metadata(session_loader: SessionLoader) -> None: + """Test extracting metadata when no items have metadata.""" + items = [ + {"file_path": "/test/file1.mp3"}, + {"file_path": "/test/file2.mp3"}, + ] + + metadata_map = session_loader._extract_metadata(items) + + assert len(metadata_map) == 0 + + +def test_session_loader_load_unmatched_albums(session_loader: SessionLoader) -> None: + """Test loading unmatched albums.""" + unmatched_album_ids = ["album-123", "album-456"] + + album_mock1 = Mock(spec=Album) + album_mock2 = Mock(spec=Album) + session_loader.tagger.load_album.side_effect = [album_mock1, album_mock2] + + session_loader._load_unmatched_albums(unmatched_album_ids) + + assert session_loader.loaded_albums["album-123"] == album_mock1 + assert session_loader.loaded_albums["album-456"] == album_mock2 + assert session_loader.tagger.load_album.call_count == 2 + + +def test_session_loader_load_unmatched_albums_empty_list(session_loader: SessionLoader) -> None: + """Test loading unmatched albums with empty list.""" + session_loader._load_unmatched_albums([]) + + assert len(session_loader.loaded_albums) == 0 + session_loader.tagger.load_album.assert_not_called() + + +def test_session_loader_load_albums(session_loader: SessionLoader) -> None: + """Test loading albums.""" + grouped_items = GroupedItems( + unclustered=[], + by_cluster={}, + by_album={"album-123": AlbumItems(unmatched=[], tracks=[]), "album-456": AlbumItems(unmatched=[], tracks=[])}, + nat_items=[], + ) + + album_mock1 = Mock(spec=Album) + album_mock2 = Mock(spec=Album) + + # Use a function to return the appropriate mock based on the album_id + def load_album_side_effect(album_id): + if album_id == "album-123": + return album_mock1 + elif album_id == "album-456": + return album_mock2 + return Mock(spec=Album) + + session_loader.tagger.load_album.side_effect = load_album_side_effect + + session_loader._load_albums(grouped_items) + + assert session_loader.loaded_albums["album-123"] == album_mock1 + assert session_loader.loaded_albums["album-456"] == album_mock2 + + +def test_session_loader_load_albums_no_albums(session_loader: SessionLoader) -> None: + """Test loading albums when no albums are needed.""" + grouped_items = GroupedItems( + unclustered=[], + by_cluster={}, + by_album={}, + nat_items=[], + ) + + session_loader._load_albums(grouped_items) + + assert len(session_loader.loaded_albums) == 0 + session_loader.tagger.load_album.assert_not_called() + + +def test_session_loader_load_album_files(session_loader: SessionLoader) -> None: + """Test loading files into albums.""" + album_mock = Mock(spec=Album) + album_mock.unmatched_files = Mock() + session_loader.loaded_albums = {"album-123": album_mock} + + by_album = { + "album-123": AlbumItems( + unmatched=[Path("/test/unmatched.mp3")], + tracks=[(Path("/test/track.mp3"), "recording-456")], + ) + } + + with patch.object(session_loader.track_mover, 'move_files_to_tracks') as mock_move: + session_loader._load_album_files(by_album) + + session_loader.tagger.add_files.assert_called_once() + mock_move.assert_called_once_with(album_mock, [(Path("/test/track.mp3"), "recording-456")]) + + +def test_session_loader_load_album_files_no_files(session_loader: SessionLoader) -> None: + """Test loading album files when no files are present.""" + album_mock = Mock(spec=Album) + session_loader.loaded_albums = {"album-123": album_mock} + + by_album = {"album-123": AlbumItems(unmatched=[], tracks=[])} + + session_loader._load_album_files(by_album) + + session_loader.tagger.add_files.assert_not_called() + + +def test_session_loader_apply_track_overrides(session_loader: SessionLoader) -> None: + """Test applying track-level overrides.""" + album_mock = Mock(spec=Album) + track_mock = Mock() + track_mock.id = "track-123" + track_mock.metadata = {} # Add metadata dict + album_mock.tracks = [track_mock] + + overrides = {"track-123": {"title": ["New Title"], "artist": ["New Artist"]}} + + # Mock run_when_loaded to call callback immediately + def run_callback(callback): + callback() + + album_mock.run_when_loaded.side_effect = run_callback + + session_loader._apply_track_overrides(album_mock, overrides) + + assert track_mock.metadata["title"] == ["New Title"] + assert track_mock.metadata["artist"] == ["New Artist"] + track_mock.update.assert_called_once() + + +def test_session_loader_apply_track_overrides_track_not_found(session_loader: SessionLoader) -> None: + """Test applying track overrides when track is not found.""" + album_mock = Mock(spec=Album) + track_mock = Mock() + track_mock.id = "track-123" + album_mock.tracks = [track_mock] + + overrides = {"track-999": {"title": ["New Title"]}} # Non-existent track + + # Mock run_when_loaded to call callback immediately + def run_callback(callback): + callback() + + album_mock.run_when_loaded.side_effect = run_callback + + session_loader._apply_track_overrides(album_mock, overrides) + + # Should not modify existing track + track_mock.update.assert_not_called() + + +def test_session_loader_apply_album_overrides(session_loader: SessionLoader) -> None: + """Test applying album-level overrides.""" + album_mock = Mock(spec=Album) + album_mock.metadata = {} # Add metadata dict + + overrides = {"albumartist": ["New Artist"], "album": ["New Album"]} + + # Mock run_when_loaded to call callback immediately + def run_callback(callback): + callback() + + album_mock.run_when_loaded.side_effect = run_callback + + session_loader._apply_album_overrides(album_mock, overrides) + + assert album_mock.metadata["albumartist"] == ["New Artist"] + assert album_mock.metadata["album"] == ["New Album"] + album_mock.update.assert_called_once_with(update_tracks=False) + + +def test_session_loader_schedule_metadata_application(session_loader: SessionLoader, mock_single_shot) -> None: + """Test scheduling metadata application.""" + metadata_map = {Path("/test/file.mp3"): Metadata()} + + with patch('PyQt6.QtCore.QTimer.singleShot', mock_single_shot): + session_loader._schedule_metadata_application(metadata_map) + + mock_single_shot.assert_called_once() + + +def test_session_loader_schedule_metadata_application_empty_map( + session_loader: SessionLoader, mock_single_shot +) -> None: + """Test scheduling metadata application with empty map.""" + with patch('PyQt6.QtCore.QTimer.singleShot', mock_single_shot): + session_loader._schedule_metadata_application({}) + + mock_single_shot.assert_called_once() + + +def test_session_loader_unset_restoring_flag_when_idle_safe_restore_disabled( + session_loader: SessionLoader, cfg_options +) -> None: + """Test unsetting restoring flag when safe restore is disabled.""" + # Set the config value for this test + cfg = picard_config.get_config() + cfg.setting['session_safe_restore'] = False + + session_loader._unset_restoring_flag_when_idle() + + # Should not check pending files or web requests when safe restore is disabled + # The method should return early without checking attributes + + +def test_session_loader_unset_restoring_flag_when_idle_pending_files( + session_loader: SessionLoader, mock_single_shot, cfg_options +) -> None: + """Test unsetting restoring flag when files are still pending.""" + # Set the config value for this test + cfg = picard_config.get_config() + cfg.setting['session_safe_restore'] = True + + session_loader.tagger._pending_files_count = 1 + session_loader.tagger.webservice.num_pending_web_requests = 0 + + with patch('PyQt6.QtCore.QTimer.singleShot', mock_single_shot): + session_loader._unset_restoring_flag_when_idle() + + # Should schedule another check + mock_single_shot.assert_called_once() + + +def test_session_loader_unset_restoring_flag_when_idle_pending_requests( + session_loader: SessionLoader, mock_single_shot, cfg_options +) -> None: + """Test unsetting restoring flag when web requests are still pending.""" + # Set the config value for this test + cfg = picard_config.get_config() + cfg.setting['session_safe_restore'] = True + + session_loader.tagger._pending_files_count = 0 + session_loader.tagger.webservice.num_pending_web_requests = 1 + + with patch('PyQt6.QtCore.QTimer.singleShot', mock_single_shot): + session_loader._unset_restoring_flag_when_idle() + + # Should schedule another check + mock_single_shot.assert_called_once() + + +def test_session_loader_unset_restoring_flag_when_idle_all_done(session_loader: SessionLoader, cfg_options) -> None: + """Test unsetting restoring flag when all operations are complete.""" + # Set the config value for this test + cfg = picard_config.get_config() + cfg.setting['session_safe_restore'] = True + + session_loader.tagger._pending_files_count = 0 + session_loader.tagger.webservice.num_pending_web_requests = 0 + + session_loader._unset_restoring_flag_when_idle() + + # Should unset the flag + assert session_loader.tagger._restoring_session is False + + +def test_session_loader_finalize_loading(session_loader: SessionLoader, mock_single_shot) -> None: + """Test finalizing the loading process.""" + with patch('PyQt6.QtCore.QTimer.singleShot', mock_single_shot): + session_loader.finalize_loading() + + mock_single_shot.assert_called_once() + + +def test_session_loader_initialization() -> None: + """Test SessionLoader initialization.""" + tagger_mock = Mock() + loader = SessionLoader(tagger_mock) + + assert loader.tagger == tagger_mock + assert loader.loaded_albums == {} + assert loader._saved_expanded_albums is None + assert hasattr(loader, 'track_mover') + + +def test_session_loader_ensure_album_visible(session_loader: SessionLoader) -> None: + """Test ensuring album is visible and expanded.""" + album_mock = Mock(spec=Album) + album_mock.id = "album-123" + ui_item_mock = Mock() + album_mock.ui_item = ui_item_mock + + # Mock run_when_loaded to call callback immediately + def run_callback(callback): + callback() + + album_mock.run_when_loaded.side_effect = run_callback + + session_loader._saved_expanded_albums = {"album-123"} + session_loader._ensure_album_visible(album_mock) + + album_mock.update.assert_called_once_with(update_tracks=True) + ui_item_mock.setExpanded.assert_called_once_with(True) + + +def test_session_loader_ensure_album_visible_no_saved_state(session_loader: SessionLoader) -> None: + """Test ensuring album is visible when no saved expansion state.""" + album_mock = Mock(spec=Album) + album_mock.id = "album-123" + ui_item_mock = Mock() + album_mock.ui_item = ui_item_mock + + # Mock run_when_loaded to call callback immediately + def run_callback(callback): + callback() + + album_mock.run_when_loaded.side_effect = run_callback + + session_loader._saved_expanded_albums = None + session_loader._ensure_album_visible(album_mock) + + album_mock.update.assert_called_once_with(update_tracks=True) + ui_item_mock.setExpanded.assert_called_once_with(True) + + +def test_session_loader_ensure_album_visible_no_ui_item(session_loader: SessionLoader) -> None: + """Test ensuring album is visible when album has no UI item.""" + album_mock = Mock(spec=Album) + album_mock.id = "album-123" + album_mock.ui_item = None + + # Mock run_when_loaded to call callback immediately + def run_callback(callback): + callback() + + album_mock.run_when_loaded.side_effect = run_callback + + session_loader._ensure_album_visible(album_mock) + + album_mock.update.assert_called_once_with(update_tracks=True) + # Should not crash when ui_item is None diff --git a/test/session/test_session_manager.py b/test/session/test_session_manager.py new file mode 100644 index 0000000000..1a2fa59be4 --- /dev/null +++ b/test/session/test_session_manager.py @@ -0,0 +1,324 @@ +# -*- coding: utf-8 -*- +# +# Picard, the next-generation MusicBrainz tagger +# +# Copyright (C) 2025 The MusicBrainz Team +# +# This program is free software; you can redistribute it and/or +# modify it under the terms of the GNU General Public License +# as published by the Free Software Foundation; either version 2 +# of the License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. + +"""Tests for session manager.""" + +from pathlib import Path +from unittest.mock import Mock, patch + +from picard.session.constants import SessionConstants +from picard.session.session_manager import export_session, load_session_from_path, save_session_to_path + + +@patch('picard.session.session_manager.SessionExporter') +def test_export_session_function(mock_exporter_class: Mock) -> None: + """Test the export_session function.""" + mock_exporter = Mock() + mock_exporter_class.return_value = mock_exporter + mock_exporter.export_session.return_value = {"version": 1} + + tagger_mock = Mock() + result = export_session(tagger_mock) + + mock_exporter_class.assert_called_once() + mock_exporter.export_session.assert_called_once_with(tagger_mock) + assert result == {"version": 1} + + +@patch('picard.session.session_manager.export_session') +def test_save_session_to_path(mock_export_session: Mock, tmp_path: Path) -> None: + """Test saving session to path.""" + mock_export_session.return_value = {"version": 1, "items": []} + + tagger_mock = Mock() + session_file = tmp_path / "test" + + save_session_to_path(tagger_mock, session_file) + + assert session_file.with_suffix(".mbps").exists() + mock_export_session.assert_called_once_with(tagger_mock) + + +@patch('picard.session.session_manager.export_session') +def test_save_session_to_path_with_extension(mock_export_session: Mock, tmp_path: Path) -> None: + """Test saving session to path with existing extension.""" + mock_export_session.return_value = {"version": 1, "items": []} + + tagger_mock = Mock() + session_file = tmp_path / "test.mbps" + + save_session_to_path(tagger_mock, session_file) + + assert session_file.exists() + mock_export_session.assert_called_once_with(tagger_mock) + + +@patch('picard.session.session_manager.export_session') +def test_save_session_to_path_with_different_extension(mock_export_session: Mock, tmp_path: Path) -> None: + """Test saving session to path with different extension.""" + mock_export_session.return_value = {"version": 1, "items": []} + + tagger_mock = Mock() + session_file = tmp_path / "test.json" + + save_session_to_path(tagger_mock, session_file) + + # Should add .mbps extension + expected_file = session_file.with_suffix(".mbps") + assert expected_file.exists() + mock_export_session.assert_called_once_with(tagger_mock) + + +@patch('picard.session.session_manager.export_session') +def test_save_session_to_path_string_path(mock_export_session: Mock, tmp_path: Path) -> None: + """Test saving session to string path.""" + mock_export_session.return_value = {"version": 1, "items": []} + + tagger_mock = Mock() + session_file = tmp_path / "test" + + save_session_to_path(tagger_mock, str(session_file)) + + assert session_file.with_suffix(".mbps").exists() + mock_export_session.assert_called_once_with(tagger_mock) + + +@patch('picard.session.session_manager.export_session') +def test_save_session_to_path_creates_json_content(mock_export_session: Mock, tmp_path: Path) -> None: + """Test that saved session file contains proper JSON content.""" + session_data = { + "version": 1, + "options": {"rename_files": True}, + "items": [{"file_path": "/test/file.mp3"}], + } + mock_export_session.return_value = session_data + + tagger_mock = Mock() + session_file = tmp_path / "test" + + save_session_to_path(tagger_mock, session_file) + + saved_file = session_file.with_suffix(".mbps") + assert saved_file.exists() + + # Read and verify content + content = saved_file.read_text(encoding="utf-8") + assert '"version": 1' in content + assert '"rename_files": true' in content + assert '"/test/file.mp3"' in content + + +@patch('picard.session.session_manager.SessionLoader') +def test_load_session_from_path(mock_loader_class: Mock) -> None: + """Test loading session from path.""" + mock_loader = Mock() + mock_loader_class.return_value = mock_loader + + tagger_mock = Mock() + session_file = Path("/test/session.mbps") + + load_session_from_path(tagger_mock, session_file) + + mock_loader_class.assert_called_once_with(tagger_mock) + mock_loader.load_from_path.assert_called_once_with(session_file) + mock_loader.finalize_loading.assert_called_once() + + +@patch('picard.session.session_manager.SessionLoader') +def test_load_session_from_path_string_path(mock_loader_class: Mock) -> None: + """Test loading session from string path.""" + mock_loader = Mock() + mock_loader_class.return_value = mock_loader + + tagger_mock = Mock() + session_file = "/test/session.mbps" + + load_session_from_path(tagger_mock, session_file) + + mock_loader_class.assert_called_once_with(tagger_mock) + mock_loader.load_from_path.assert_called_once_with(session_file) + mock_loader.finalize_loading.assert_called_once() + + +def test_save_session_to_path_file_overwrite(tmp_path: Path) -> None: + """Test that save_session_to_path overwrites existing files.""" + existing_file = tmp_path / "test.mbps" + existing_file.write_text("old content", encoding="utf-8") + + with patch('picard.session.session_manager.export_session') as mock_export: + mock_export.return_value = {"version": 1, "items": []} + + tagger_mock = Mock() + save_session_to_path(tagger_mock, existing_file) + + # File should be overwritten + content = existing_file.read_text(encoding="utf-8") + assert content != "old content" + assert '"version": 1' in content + + +def test_save_session_to_path_creates_directory(tmp_path: Path) -> None: + """Test that save_session_to_path creates parent directories.""" + with patch('picard.session.session_manager.export_session') as mock_export: + mock_export.return_value = {"version": 1, "items": []} + + tagger_mock = Mock() + session_file = tmp_path / "subdir" / "test.mbps" + + save_session_to_path(tagger_mock, session_file) + + assert session_file.exists() + assert session_file.parent.exists() + + +def test_save_session_to_path_utf8_encoding(tmp_path: Path) -> None: + """Test that save_session_to_path uses UTF-8 encoding.""" + with patch('picard.session.session_manager.export_session') as mock_export: + # Session data with Unicode characters + session_data = { + "version": 1, + "items": [{"file_path": "/test/歌曲.mp3"}], + } + mock_export.return_value = session_data + + tagger_mock = Mock() + session_file = tmp_path / "test" + + save_session_to_path(tagger_mock, session_file) + + saved_file = session_file.with_suffix(".mbps") + content = saved_file.read_text(encoding="utf-8") + assert "歌曲" in content + + +def test_save_session_to_path_json_formatting(tmp_path: Path) -> None: + """Test that save_session_to_path uses proper JSON formatting.""" + with patch('picard.session.session_manager.export_session') as mock_export: + session_data = { + "version": 1, + "options": {"rename_files": True, "move_files": False}, + "items": [], + } + mock_export.return_value = session_data + + tagger_mock = Mock() + session_file = tmp_path / "test" + + save_session_to_path(tagger_mock, session_file) + + saved_file = session_file.with_suffix(".mbps") + content = saved_file.read_text(encoding="utf-8") + + # Should be properly formatted JSON with indentation + assert content.startswith("{\n") + assert " \"version\": 1" in content + assert " \"options\": {" in content + assert " \"rename_files\": true" in content + + +def test_export_session_returns_dict() -> None: + """Test that export_session returns a dictionary.""" + with patch('picard.session.session_manager.SessionExporter') as mock_exporter_class: + mock_exporter = Mock() + mock_exporter_class.return_value = mock_exporter + mock_exporter.export_session.return_value = {"version": 1, "items": []} + + tagger_mock = Mock() + result = export_session(tagger_mock) + + assert isinstance(result, dict) + assert "version" in result + assert "items" in result + + +def test_load_session_from_path_loader_initialization() -> None: + """Test that SessionLoader is properly initialized.""" + with patch('picard.session.session_manager.SessionLoader') as mock_loader_class: + mock_loader = Mock() + mock_loader_class.return_value = mock_loader + + tagger_mock = Mock() + session_file = Path("/test/session.mbps") + + load_session_from_path(tagger_mock, session_file) + + # Verify SessionLoader was initialized with correct tagger + mock_loader_class.assert_called_once_with(tagger_mock) + + +def test_load_session_from_path_loader_methods_called() -> None: + """Test that all required SessionLoader methods are called.""" + with patch('picard.session.session_manager.SessionLoader') as mock_loader_class: + mock_loader = Mock() + mock_loader_class.return_value = mock_loader + + tagger_mock = Mock() + session_file = Path("/test/session.mbps") + + load_session_from_path(tagger_mock, session_file) + + # Verify all required methods were called + mock_loader.load_from_path.assert_called_once_with(session_file) + mock_loader.finalize_loading.assert_called_once() + + +def test_save_session_to_path_extension_handling(tmp_path: Path) -> None: + """Test various extension handling scenarios.""" + with patch('picard.session.session_manager.export_session') as mock_export: + mock_export.return_value = {"version": 1} + + tagger_mock = Mock() + + # Test cases: (input_path, expected_suffix) + test_cases = [ + ("test", ".mbps"), + ("test.mbps", ".mbps"), + ("test.json", ".mbps"), + ("test.txt", ".mbps"), + ] + + for input_path, expected_suffix in test_cases: + session_file = tmp_path / input_path + save_session_to_path(tagger_mock, session_file) + + expected_file = session_file.with_suffix(expected_suffix) + assert expected_file.exists(), f"Failed for input: {input_path}" + + # Clean up for next test + expected_file.unlink() + + +def test_session_constants_used_correctly(tmp_path: Path) -> None: + """Test that session constants are used correctly in manager functions.""" + # This test ensures that the session manager uses the correct constants + assert SessionConstants.SESSION_FILE_EXTENSION == ".mbps" + assert SessionConstants.SESSION_FORMAT_VERSION == 1 + + # Test that the extension is used in save function + with patch('picard.session.session_manager.export_session') as mock_export: + mock_export.return_value = {"version": SessionConstants.SESSION_FORMAT_VERSION} + + tagger_mock = Mock() + session_file = tmp_path / "session" + + save_session_to_path(tagger_mock, session_file) + + # Verify the exported data has correct version + mock_export.assert_called_once_with(tagger_mock) diff --git a/test/test_sessions.py b/test/session/test_sessions.py similarity index 70% rename from test/test_sessions.py rename to test/session/test_sessions.py index 28397818bc..f856e7fe59 100644 --- a/test/test_sessions.py +++ b/test/session/test_sessions.py @@ -24,77 +24,13 @@ import picard.config as picard_config from picard.metadata import Metadata -from picard.session import export_session +from picard.session.session_manager import export_session +# Import stub classes from conftest.py +from .conftest import _StubAlbum, _StubFile, _StubTagger, _StubTrack import pytest -class _StubFile: - def __init__(self, filename: str, metadata: Metadata, saved: bool, parent_item: Any = None) -> None: - self.filename = filename - self.metadata = metadata - self._saved = saved - self.parent_item = parent_item - - def is_saved(self) -> bool: - return self._saved - - -class _StubTrack: - def __init__(self, track_id: str, scripted: Metadata, current: Metadata) -> None: - self.id = track_id - self.scripted_metadata = scripted - self.metadata = current - - -class _StubAlbum: - def __init__(self, album_id: str, orig: Metadata, current: Metadata, tracks: list[_StubTrack]) -> None: - self.id = album_id - self.orig_metadata = orig - self.metadata = current - self.tracks = tracks - - -class _StubTagger: - def __init__(self, files: list[_StubFile], albums: dict[str, Any] | None = None) -> None: - self._files = files - self.albums = albums or {} - - def iter_all_files(self): - yield from self._files - - -@pytest.fixture(autouse=True) -def _fake_script_config(monkeypatch: pytest.MonkeyPatch) -> SimpleNamespace: - """Provide minimal config so functions accessing get_config() have settings.""" - - class _FakeSetting(dict): - def raw_value(self, name, qtype=None): - return self.get(name) - - def key(self, name): - return name - - cfg = SimpleNamespace(setting=_FakeSetting({'enabled_plugins': []}), sync=lambda: None) - import picard.config as picard_config_mod - import picard.extension_points as ext_points_mod - import picard.session as session_mod - - monkeypatch.setattr(picard_config_mod, 'get_config', lambda: cfg, raising=True) - monkeypatch.setattr(ext_points_mod, 'get_config', lambda: cfg, raising=True) - monkeypatch.setattr(session_mod, 'get_config', lambda: cfg, raising=True) - return cfg - - -@pytest.fixture() -def cfg_options() -> None: - cfg = picard_config.get_config() - # Ensure required keys exist with defaults - cfg.setting['rename_files'] = False - cfg.setting['move_files'] = False - cfg.setting['dont_write_tags'] = False - - def test_export_session_empty(tmp_path: Path) -> None: # Ensure options keys exist cfg = picard_config.get_config() @@ -208,3 +144,49 @@ def test_export_session_listifies_override_values(cfg_options: None, value: Any, tagger = _StubTagger(files=[], albums={'album-X': alb}) data = export_session(tagger) assert data['album_overrides'] == {'album-X': {'genre': expected}} + + +def test_export_session_includes_unmatched_albums(cfg_options: None) -> None: + """Test that albums with no files matched are included in unmatched_albums.""" + # Create an album with no files matched to it + album_orig = Metadata() + album_cur = Metadata() + alb = _StubAlbum('album-unmatched', orig=album_orig, current=album_cur, tracks=[]) + + # Tagger with no files but has the album loaded + tagger = _StubTagger(files=[], albums={'album-unmatched': alb}) + + data = export_session(tagger) + + # Should include the unmatched album + assert 'unmatched_albums' in data + assert data['unmatched_albums'] == ['album-unmatched'] + + +def test_export_session_excludes_albums_with_files_from_unmatched(cfg_options: None, tmp_path: Path) -> None: + """Test that albums with files matched are not included in unmatched_albums.""" + + # Create a mock parent item that represents a track in an album + class _StubParentItem: + def __init__(self, album_id: str) -> None: + self.album = SimpleNamespace(id=album_id) + + # Create an album + album_orig = Metadata() + album_cur = Metadata() + alb = _StubAlbum('album-with-files', orig=album_orig, current=album_cur, tracks=[]) + + # Create a file that's matched to the album + fm = Metadata() + fm['title'] = 'Song' + parent_item = _StubParentItem('album-with-files') + f = _StubFile(filename=str(tmp_path / 'song.mp3'), metadata=fm, saved=True, parent_item=parent_item) + + # Tagger with the file and album + tagger = _StubTagger(files=[f], albums={'album-with-files': alb}) + + data = export_session(tagger) + + # Should not include the album in unmatched_albums since it has files + assert 'unmatched_albums' in data + assert data['unmatched_albums'] == [] diff --git a/test/session/test_track_mover.py b/test/session/test_track_mover.py new file mode 100644 index 0000000000..7ef3038ac7 --- /dev/null +++ b/test/session/test_track_mover.py @@ -0,0 +1,351 @@ +# -*- coding: utf-8 -*- +# +# Picard, the next-generation MusicBrainz tagger +# +# Copyright (C) 2025 The MusicBrainz Team +# +# This program is free software; you can redistribute it and/or +# modify it under the terms of the GNU General Public License +# as published by the Free Software Foundation; either version 2 +# of the License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. + +"""Tests for track mover.""" + +from pathlib import Path +from unittest.mock import Mock, patch + +from picard.album import Album +from picard.file import File +from picard.session.constants import SessionConstants +from picard.session.track_mover import TrackMover + +import pytest + + +@pytest.fixture +def track_mover() -> TrackMover: + """Provide a TrackMover instance.""" + tagger_mock = Mock() + return TrackMover(tagger_mock) + + +@pytest.fixture +def mock_album() -> Mock: + """Provide a mock album instance.""" + album_mock = Mock(spec=Album) + album_mock.id = "album-123" + album_mock.tracks = [] + return album_mock + + +def test_track_mover_move_files_to_tracks(track_mover: TrackMover, mock_album: Mock) -> None: + """Test moving files to tracks.""" + track_specs = [(Path("/test/file1.mp3"), "recording-123"), (Path("/test/file2.mp3"), "recording-456")] + + with patch('picard.session.track_mover.RetryHelper'): + track_mover.move_files_to_tracks(mock_album, track_specs) + + mock_album.run_when_loaded.assert_called_once() + + +def test_track_mover_schedule_move(track_mover: TrackMover, mock_album: Mock) -> None: + """Test scheduling file moves.""" + fpath = Path("/test/file.mp3") + recording_id = "recording-123" + + # Mock the run_when_loaded to call the callback immediately + def run_callback(callback): + callback() + + mock_album.run_when_loaded.side_effect = run_callback + + with patch('picard.session.track_mover.RetryHelper') as mock_retry_helper: + track_mover.move_files_to_tracks(mock_album, [(fpath, recording_id)]) + + mock_retry_helper.retry_until.assert_called_once() + + +def test_track_mover_move_file_to_nat(track_mover: TrackMover) -> None: + """Test moving file to NAT.""" + fpath = Path("/test/file.mp3") + recording_id = "recording-123" + + with patch('picard.session.track_mover.RetryHelper') as mock_retry_helper: + track_mover.move_file_to_nat(fpath, recording_id) + + mock_retry_helper.retry_until.assert_called_once() + + +def test_track_mover_schedule_move_file_pending(track_mover: TrackMover, mock_album: Mock) -> None: + """Test scheduling move when file is in PENDING state.""" + fpath = Path("/test/file.mp3") + recording_id = "recording-123" + + # Mock file in PENDING state + file_mock = Mock(spec=File) + file_mock.state = File.PENDING + track_mover.tagger.files.get.return_value = file_mock + + # Mock the run_when_loaded to call the callback immediately + def run_callback(callback): + callback() + + mock_album.run_when_loaded.side_effect = run_callback + + with patch('picard.session.track_mover.RetryHelper'): + track_mover.move_files_to_tracks(mock_album, [(fpath, recording_id)]) + + # Should not attempt move when file is pending + file_mock.move.assert_not_called() + + +def test_track_mover_schedule_move_file_not_found(track_mover: TrackMover, mock_album: Mock) -> None: + """Test scheduling move when file is not found.""" + fpath = Path("/test/file.mp3") + recording_id = "recording-123" + + # Mock file not found + track_mover.tagger.files.get.return_value = None + + # Mock the run_when_loaded to call the callback immediately + def run_callback(callback): + callback() + + mock_album.run_when_loaded.side_effect = run_callback + + with patch('picard.session.track_mover.RetryHelper') as mock_retry_helper: + track_mover.move_files_to_tracks(mock_album, [(fpath, recording_id)]) + + # Should not attempt move when file is not found + mock_retry_helper.retry_until.assert_called_once() + + +def test_track_mover_schedule_move_track_not_found(track_mover: TrackMover, mock_album: Mock) -> None: + """Test scheduling move when track is not found.""" + fpath = Path("/test/file.mp3") + recording_id = "recording-123" + + # Mock file ready + file_mock = Mock(spec=File) + file_mock.state = 0 # Not PENDING + track_mover.tagger.files.get.return_value = file_mock + + # Mock album with no matching track + mock_album.tracks = [] # No tracks + + # Mock the run_when_loaded to call the callback immediately + def run_callback(callback): + callback() + + mock_album.run_when_loaded.side_effect = run_callback + + with patch('picard.session.track_mover.RetryHelper'): + track_mover.move_files_to_tracks(mock_album, [(fpath, recording_id)]) + + # Should not attempt move when track is not found + file_mock.move.assert_not_called() + + +def test_track_mover_schedule_move_success(track_mover: TrackMover, mock_album: Mock) -> None: + """Test successful file move to track.""" + fpath = Path("/test/file.mp3") + recording_id = "recording-123" + + # Mock file ready + file_mock = Mock(spec=File) + file_mock.state = 1 # Not PENDING (PENDING = 0) + track_mover.tagger.files.get.return_value = file_mock + + # Mock track + track_mock = Mock() + track_mock.id = recording_id + mock_album.tracks = [track_mock] + + # Mock the run_when_loaded to call the callback immediately + def run_callback(callback): + callback() + + mock_album.run_when_loaded.side_effect = run_callback + + with patch('picard.session.track_mover.RetryHelper') as mock_retry_helper: + # Mock retry_until to call the action function immediately if condition is met + def mock_retry_until(condition_fn, action_fn, delay_ms): + if condition_fn(): + action_fn() + + mock_retry_helper.retry_until.side_effect = mock_retry_until + + track_mover.move_files_to_tracks(mock_album, [(fpath, recording_id)]) + + # Should attempt move when both file and track are ready + file_mock.move.assert_called_once_with(track_mock) + + +def test_track_mover_move_file_to_nat_file_pending(track_mover: TrackMover) -> None: + """Test moving file to NAT when file is in PENDING state.""" + fpath = Path("/test/file.mp3") + recording_id = "recording-123" + + # Mock file in PENDING state + file_mock = Mock(spec=File) + file_mock.state = File.PENDING + track_mover.tagger.files.get.return_value = file_mock + + with patch('picard.session.track_mover.RetryHelper'): + track_mover.move_file_to_nat(fpath, recording_id) + + # Should not attempt NAT move when file is pending + track_mover.tagger.move_file_to_nat.assert_not_called() + + +def test_track_mover_move_file_to_nat_file_not_found(track_mover: TrackMover) -> None: + """Test moving file to NAT when file is not found.""" + fpath = Path("/test/file.mp3") + recording_id = "recording-123" + + # Mock file not found + track_mover.tagger.files.get.return_value = None + + with patch('picard.session.track_mover.RetryHelper'): + track_mover.move_file_to_nat(fpath, recording_id) + + # Should not attempt NAT move when file is not found + track_mover.tagger.move_file_to_nat.assert_not_called() + + +def test_track_mover_move_file_to_nat_success(track_mover: TrackMover) -> None: + """Test successful file move to NAT.""" + fpath = Path("/test/file.mp3") + recording_id = "recording-123" + + # Mock file ready + file_mock = Mock(spec=File) + file_mock.state = 1 # Not PENDING (PENDING = 0) + track_mover.tagger.files.get.return_value = file_mock + + with patch('picard.session.track_mover.RetryHelper') as mock_retry_helper: + # Mock retry_until to call the action function immediately if condition is met + def mock_retry_until(condition_fn, action_fn, delay_ms): + if condition_fn(): + action_fn() + + mock_retry_helper.retry_until.side_effect = mock_retry_until + + track_mover.move_file_to_nat(fpath, recording_id) + + # Should attempt NAT move when file is ready + track_mover.tagger.move_file_to_nat.assert_called_once_with(file_mock, recording_id) + + +def test_track_mover_move_files_to_tracks_empty_list(track_mover: TrackMover, mock_album: Mock) -> None: + """Test moving files to tracks with empty list.""" + track_mover.move_files_to_tracks(mock_album, []) + + mock_album.run_when_loaded.assert_called_once() + + +def test_track_mover_move_files_to_tracks_multiple_files(track_mover: TrackMover, mock_album: Mock) -> None: + """Test moving multiple files to tracks.""" + track_specs = [ + (Path("/test/file1.mp3"), "recording-123"), + (Path("/test/file2.mp3"), "recording-456"), + (Path("/test/file3.mp3"), "recording-789"), + ] + + # Mock the run_when_loaded to call the callback immediately + def run_callback(callback): + callback() + + mock_album.run_when_loaded.side_effect = run_callback + + with patch('picard.session.track_mover.RetryHelper') as mock_retry_helper: + track_mover.move_files_to_tracks(mock_album, track_specs) + + # Should schedule moves for all files + assert mock_retry_helper.retry_until.call_count == 3 + + +def test_track_mover_initialization() -> None: + """Test TrackMover initialization.""" + tagger_mock = Mock() + mover = TrackMover(tagger_mock) + + assert mover.tagger == tagger_mock + + +def test_track_mover_retry_until_condition_check(track_mover: TrackMover, mock_album: Mock) -> None: + """Test that retry_until is called with correct condition function.""" + fpath = Path("/test/file.mp3") + recording_id = "recording-123" + + # Mock file ready + file_mock = Mock(spec=File) + file_mock.state = 1 # Not PENDING (PENDING = 0) + track_mover.tagger.files.get.return_value = file_mock + + # Mock track + track_mock = Mock() + track_mock.id = recording_id + mock_album.tracks = [track_mock] + + # Mock the run_when_loaded to call the callback immediately + def run_callback(callback): + callback() + + mock_album.run_when_loaded.side_effect = run_callback + + with patch('picard.session.track_mover.RetryHelper') as mock_retry_helper: + track_mover.move_files_to_tracks(mock_album, [(fpath, recording_id)]) + + # Verify retry_until was called with correct parameters + mock_retry_helper.retry_until.assert_called_once() + call_args = mock_retry_helper.retry_until.call_args + + # Check that condition function returns True when file and track are ready + condition_fn = call_args[1]['condition_fn'] + assert condition_fn() is True + + # Check that action function is provided + action_fn = call_args[1]['action_fn'] + assert callable(action_fn) + + # Check delay parameter + assert call_args[1]['delay_ms'] == SessionConstants.FAST_RETRY_DELAY_MS + + +def test_track_mover_retry_until_condition_check_nat(track_mover: TrackMover) -> None: + """Test that retry_until is called with correct condition function for NAT moves.""" + fpath = Path("/test/file.mp3") + recording_id = "recording-123" + + # Mock file ready + file_mock = Mock(spec=File) + file_mock.state = 1 # Not PENDING (PENDING = 0) + track_mover.tagger.files.get.return_value = file_mock + + with patch('picard.session.track_mover.RetryHelper') as mock_retry_helper: + track_mover.move_file_to_nat(fpath, recording_id) + + # Verify retry_until was called with correct parameters + mock_retry_helper.retry_until.assert_called_once() + call_args = mock_retry_helper.retry_until.call_args + + # Check that condition function returns True when file is ready + condition_fn = call_args[1]['condition_fn'] + assert condition_fn() is True + + # Check that action function is provided + action_fn = call_args[1]['action_fn'] + assert callable(action_fn) + + # Check delay parameter + assert call_args[1]['delay_ms'] == SessionConstants.DEFAULT_RETRY_DELAY_MS From a596368a50b3b47901bf61394d527763e91469f0 Mon Sep 17 00:00:00 2001 From: kyle nguyen Date: Mon, 8 Sep 2025 02:33:21 -0400 Subject: [PATCH 03/30] Session files should be compressed --- picard/session/constants.py | 2 +- picard/session/metadata_handler.py | 52 ++++++++++++- picard/session/session_exporter.py | 22 +++++- picard/session/session_loader.py | 27 ++++--- picard/session/session_manager.py | 23 +++--- picard/tagger.py | 4 +- picard/ui/mainwindow/__init__.py | 9 ++- test/session/conftest.py | 11 ++- test/session/test_metadata_handler.py | 5 +- test/session/test_session_constants.py | 2 +- test/session/test_session_exporter.py | 9 +-- test/session/test_session_loader.py | 2 +- test/session/test_session_manager.py | 100 ++++++++++++++----------- test/session/test_sessions.py | 2 + 14 files changed, 186 insertions(+), 84 deletions(-) diff --git a/picard/session/constants.py b/picard/session/constants.py index 48e2a3cb9f..e43105ea56 100644 --- a/picard/session/constants.py +++ b/picard/session/constants.py @@ -29,7 +29,7 @@ class SessionConstants: """Constants for session management operations.""" # File handling - SESSION_FILE_EXTENSION = ".mbps" + SESSION_FILE_EXTENSION = ".mbps.gz" SESSION_FORMAT_VERSION = 1 # Retry delays in milliseconds diff --git a/picard/session/metadata_handler.py b/picard/session/metadata_handler.py index 3afd658fd7..aacb9004c5 100644 --- a/picard/session/metadata_handler.py +++ b/picard/session/metadata_handler.py @@ -29,8 +29,8 @@ from pathlib import Path from typing import Any +from picard import log from picard.file import File -from picard.log import log from picard.metadata import Metadata from picard.session.constants import SessionConstants @@ -133,13 +133,14 @@ def safe_apply_metadata(file: File, metadata: Metadata) -> bool: metadata.length = file.metadata.length or file.orig_metadata.length file.copy_metadata(metadata) file.update() - return True except (AttributeError, KeyError) as e: log.warning(f"Failed to apply metadata to {file.filename}: {e}") return False - except Exception as e: - log.error(f"Unexpected error applying metadata: {e}") + except (OSError, ValueError, TypeError) as e: + log.error(f"Error applying metadata to {file.filename}: {e}") return False + else: + return True @staticmethod def apply_saved_metadata_if_any(tagger: Any, file_path_to_md: dict[Path, Metadata]) -> None: @@ -178,3 +179,46 @@ def apply_saved_metadata_if_any(tagger: Any, file_path_to_md: dict[Path, Metadat ), delay_ms=SessionConstants.DEFAULT_RETRY_DELAY_MS, ) + + @staticmethod + def apply_tag_deltas_if_any(tagger: Any, file_path_to_tags: dict[Path, dict[str, list[Any]]]) -> None: + """Apply tag deltas to files when they are ready. + + Parameters + ---------- + tagger : Any + The Picard tagger instance. + file_path_to_tags : dict[Path, dict[str, list[Any]]] + Mapping of file paths to tag deltas to apply. + """ + from picard.session.retry_helper import RetryHelper + + pending: list[Path] = [] + for fpath, tags in file_path_to_tags.items(): + file = tagger.files.get(str(fpath)) + if not file or file.state == File.PENDING: + pending.append(fpath) + continue + + try: + # Merge deltas onto current metadata; preserve length + md = Metadata(file.metadata) + for key, values in tags.items(): + if key in SessionConstants.EXCLUDED_OVERRIDE_TAGS or str(key).startswith( + SessionConstants.INTERNAL_TAG_PREFIX + ): + continue + md[key] = MetadataHandler.as_list(values) + MetadataHandler.safe_apply_metadata(file, md) + except (AttributeError, KeyError, OSError, ValueError, TypeError) as e: + log.debug(f"Error applying tag deltas to {fpath}: {e}") + pending.append(fpath) + + if pending: + RetryHelper.retry_until( + condition_fn=lambda: len(pending) == 0, + action_fn=lambda: MetadataHandler.apply_tag_deltas_if_any( + tagger, {p: file_path_to_tags[p] for p in pending} + ), + delay_ms=SessionConstants.DEFAULT_RETRY_DELAY_MS, + ) diff --git a/picard/session/session_exporter.py b/picard/session/session_exporter.py index a5ffb5f64a..b54c2e1318 100644 --- a/picard/session/session_exporter.py +++ b/picard/session/session_exporter.py @@ -159,9 +159,27 @@ def _export_file_item(self, file: Any) -> dict[str, Any]: "location": self._serialize_location(loc), } - # Persist unsaved tag changes + # Persist unsaved tag changes as deltas vs base metadata if not file.is_saved(): - entry["metadata"] = {"tags": MetadataHandler.serialize_metadata_for_file(file)} + parent = getattr(file, "parent_item", None) + base_md = None + # If the file is under a track, diff against the track's scripted metadata (user-visible basis) + if parent is not None and hasattr(parent, "album"): + base_md = getattr(parent, "scripted_metadata", getattr(parent, "metadata", None)) + # Otherwise, diff against the file's original on-disk metadata + if base_md is None: + base_md = getattr(file, "orig_metadata", None) + + if base_md is not None: + diff = file.metadata.diff(base_md) + delta_tags = { + k: MetadataHandler.as_list(v) + for k, v in diff.rawitems() + if k not in SessionConstants.EXCLUDED_OVERRIDE_TAGS + and not str(k).startswith(SessionConstants.INTERNAL_TAG_PREFIX) + } + if delta_tags: + entry["metadata"] = {"tags": delta_tags} return entry diff --git a/picard/session/session_loader.py b/picard/session/session_loader.py index d54ebe5696..2cbc530f38 100644 --- a/picard/session/session_loader.py +++ b/picard/session/session_loader.py @@ -26,6 +26,7 @@ from __future__ import annotations +import gzip import json from pathlib import Path from typing import Any @@ -117,7 +118,13 @@ def _read_session_file(self, path: Path) -> dict[str, Any]: If the file does not exist. """ p = Path(path) - return json.loads(p.read_text(encoding="utf-8")) + # Detect gzip by magic bytes and decode accordingly + raw = p.read_bytes() + if len(raw) >= 2 and raw[0] == 0x1F and raw[1] == 0x8B: + text = gzip.decompress(raw).decode("utf-8") + return json.loads(text) + else: + return json.loads(raw.decode("utf-8")) def _prepare_session(self, data: dict[str, Any]) -> None: """Prepare the session for loading. @@ -188,7 +195,7 @@ def _group_items_by_location(self, items: list[dict[str, Any]]) -> GroupedItems: return GroupedItems(unclustered=by_unclustered, by_cluster=by_cluster, by_album=by_album, nat_items=nat_items) - def _extract_metadata(self, items: list[dict[str, Any]]) -> dict[Path, Any]: + def _extract_metadata(self, items: list[dict[str, Any]]) -> dict[Path, dict[str, list[Any]]]: """Extract metadata from session items. Parameters @@ -198,16 +205,16 @@ def _extract_metadata(self, items: list[dict[str, Any]]) -> dict[Path, Any]: Returns ------- - dict[Path, Any] - Mapping of file paths to their metadata. + dict[Path, dict[str, list[Any]]] + Mapping of file paths to their metadata tag deltas. """ - metadata_by_path: dict[Path, Any] = {} + metadata_by_path: dict[Path, dict[str, list[Any]]] = {} for it in items: fpath = Path(it["file_path"]).expanduser() md = it.get("metadata", {}) if "tags" in md: tags = {k: MetadataHandler.as_list(v) for k, v in md["tags"].items()} - metadata_by_path[fpath] = MetadataHandler.deserialize_metadata(tags) + metadata_by_path[fpath] = tags return metadata_by_path def _load_items(self, grouped_items: GroupedItems) -> None: @@ -404,17 +411,17 @@ def run() -> None: album.run_when_loaded(run) - def _schedule_metadata_application(self, metadata_map: dict[Path, Any]) -> None: + def _schedule_metadata_application(self, metadata_map: dict[Path, dict[str, list[Any]]]) -> None: """Schedule metadata application after files are loaded. Parameters ---------- - metadata_map : dict[Path, Any] - Mapping of file paths to their metadata. + metadata_map : dict[Path, dict[str, list[Any]]] + Mapping of file paths to their metadata tag deltas. """ QtCore.QTimer.singleShot( SessionConstants.DEFAULT_RETRY_DELAY_MS, - lambda: MetadataHandler.apply_saved_metadata_if_any(self.tagger, metadata_map), + lambda: MetadataHandler.apply_tag_deltas_if_any(self.tagger, metadata_map), ) def _unset_restoring_flag_when_idle(self) -> None: diff --git a/picard/session/session_manager.py b/picard/session/session_manager.py index ece7e0b707..bf4aaa4894 100644 --- a/picard/session/session_manager.py +++ b/picard/session/session_manager.py @@ -38,12 +38,13 @@ Notes ----- -Session files use the .mbps extension and contain JSON data with version -information, options, file locations, and metadata overrides. +Session files use the .mbps.gz extension and contain gzip-compressed JSON data +with version information, options, file locations, and metadata overrides. """ from __future__ import annotations +import gzip import json from pathlib import Path from typing import Any @@ -90,20 +91,24 @@ def save_session_to_path(tagger: Any, path: str | Path) -> None: tagger : Any The Picard tagger instance to save session data from. path : str | Path - The file path to save the session to. If the extension is not .mbps, - it will be automatically added. + The file path to save the session to. If the extension does not end with + .mbps.gz, it will be automatically added. Notes ----- - The session is saved as JSON with UTF-8 encoding and 2-space indentation. - If the file already exists, it will be overwritten. + The session is saved as minified JSON (UTF-8) and gzip-compressed. If the + file already exists, it will be overwritten. """ p = Path(path) - if p.suffix.lower() != SessionConstants.SESSION_FILE_EXTENSION: - p = p.with_suffix(SessionConstants.SESSION_FILE_EXTENSION) + # Ensure multi-part extension .mbps.gz + if not str(p).lower().endswith(SessionConstants.SESSION_FILE_EXTENSION): + p = Path(str(p) + SessionConstants.SESSION_FILE_EXTENSION) data = export_session(tagger) p.parent.mkdir(parents=True, exist_ok=True) - p.write_text(json.dumps(data, ensure_ascii=False, indent=2), encoding="utf-8") + # Minify JSON and gzip-compress to reduce file size + json_text = json.dumps(data, ensure_ascii=False, separators=(",", ":")) + compressed = gzip.compress(json_text.encode("utf-8")) + p.write_bytes(compressed) def load_session_from_path(tagger: Any, path: str | Path) -> None: diff --git a/picard/tagger.py b/picard/tagger.py index 56d52cee52..aa0aa49954 100644 --- a/picard/tagger.py +++ b/picard/tagger.py @@ -680,7 +680,9 @@ def _autosave(): if not path: path = config.persist['last_session_path'] if 'last_session_path' in config.persist else None if not path: - path = Path(USER_DIR) / 'autosave.mbps' + from picard.session.constants import SessionConstants + + path = Path(USER_DIR) / ("autosave" + SessionConstants.SESSION_FILE_EXTENSION) config.persist['session_autosave_path'] = path with contextlib.suppress(OSError, PermissionError, FileNotFoundError, ValueError, OverflowError): diff --git a/picard/ui/mainwindow/__init__.py b/picard/ui/mainwindow/__init__.py index 457c70ff59..08997fc095 100644 --- a/picard/ui/mainwindow/__init__.py +++ b/picard/ui/mainwindow/__init__.py @@ -90,6 +90,7 @@ get_option_title, ) from picard.script import get_file_naming_script_presets +from picard.session.constants import SessionConstants from picard.track import Track from picard.util import ( IgnoreUpdatesContext, @@ -1054,7 +1055,9 @@ def save_session(self): path, _filter = FileDialog.getSaveFileName( parent=self, dir=start_dir, - filter=_("MusicBrainz Picard Session (*.mbps);;All files (*)"), + filter=( + _("MusicBrainz Picard Session (%s);;All files (*)") % ("*" + SessionConstants.SESSION_FILE_EXTENSION) + ), ) if path: try: @@ -1075,7 +1078,9 @@ def load_session(self): path, _filter = FileDialog.getOpenFileName( parent=self, dir=start_dir, - filter=_("MusicBrainz Picard Session (*.mbps);;All files (*)"), + filter=( + _("MusicBrainz Picard Session (%s);;All files (*)") % ("*" + SessionConstants.SESSION_FILE_EXTENSION) + ), ) if path: try: diff --git a/test/session/conftest.py b/test/session/conftest.py index 31feba23d3..414183dde6 100644 --- a/test/session/conftest.py +++ b/test/session/conftest.py @@ -47,11 +47,14 @@ class _StubFile: """Stub file class for testing.""" - def __init__(self, filename: str, metadata: Metadata, saved: bool, parent_item: Any = None) -> None: + def __init__( + self, filename: str, metadata: Metadata, saved: bool, parent_item: Any = None, orig: Metadata | None = None + ) -> None: self.filename = filename self.metadata = metadata self._saved = saved self.parent_item = parent_item + self.orig_metadata = orig if orig is not None else Metadata() def is_saved(self) -> bool: return self._saved @@ -298,9 +301,11 @@ def sample_session_data() -> dict[str, Any]: # ============================================================================= -def create_stub_file(filename: str, metadata: Metadata, saved: bool = False, parent_item: Any = None) -> _StubFile: +def create_stub_file( + filename: str, metadata: Metadata, saved: bool = False, parent_item: Any = None, orig: Metadata | None = None +) -> _StubFile: """Create a stub file for testing.""" - return _StubFile(filename, metadata, saved, parent_item) + return _StubFile(filename, metadata, saved, parent_item, orig) def create_stub_track(track_id: str, scripted: Metadata, current: Metadata) -> _StubTrack: diff --git a/test/session/test_metadata_handler.py b/test/session/test_metadata_handler.py index 56a80a6ec6..5e03bbcbf0 100644 --- a/test/session/test_metadata_handler.py +++ b/test/session/test_metadata_handler.py @@ -213,11 +213,12 @@ def test_safe_apply_metadata_key_error(mock_log: Mock) -> None: def test_safe_apply_metadata_unexpected_error(mock_log: Mock) -> None: """Test metadata application with unexpected error.""" file_mock = Mock(spec=File) + file_mock.filename = "test_file.mp3" file_mock.metadata = Mock() file_mock.metadata.length = None file_mock.orig_metadata = Mock() file_mock.orig_metadata.length = 789012 - file_mock.copy_metadata.side_effect = RuntimeError("Unexpected error") + file_mock.copy_metadata.side_effect = OSError("File system error") metadata = Metadata() @@ -225,7 +226,7 @@ def test_safe_apply_metadata_unexpected_error(mock_log: Mock) -> None: assert result is False mock_log.error.assert_called_once() - assert "Unexpected error" in str(mock_log.error.call_args) + assert "File system error" in str(mock_log.error.call_args) @patch('picard.session.retry_helper.RetryHelper') diff --git a/test/session/test_session_constants.py b/test/session/test_session_constants.py index 1c0a652cbc..4c5bba279d 100644 --- a/test/session/test_session_constants.py +++ b/test/session/test_session_constants.py @@ -27,7 +27,7 @@ def test_session_constants_values() -> None: """Test that SessionConstants has expected values.""" - assert SessionConstants.SESSION_FILE_EXTENSION == ".mbps" + assert SessionConstants.SESSION_FILE_EXTENSION == ".mbps.gz" assert SessionConstants.SESSION_FORMAT_VERSION == 1 assert SessionConstants.DEFAULT_RETRY_DELAY_MS == 200 assert SessionConstants.FAST_RETRY_DELAY_MS == 150 diff --git a/test/session/test_session_exporter.py b/test/session/test_session_exporter.py index 4733d5cc98..2e4d3e9c70 100644 --- a/test/session/test_session_exporter.py +++ b/test/session/test_session_exporter.py @@ -97,7 +97,7 @@ def test_session_exporter_export_file_item_saved(session_exporter: SessionExport def test_session_exporter_export_file_item_unsaved(session_exporter: SessionExporter, cfg_options) -> None: - """Test exporting an unsaved file item with metadata.""" + """Test exporting an unsaved file item with metadata (delta vs orig_metadata).""" file_mock = Mock() file_mock.filename = str(Path("/test/file.mp3")) @@ -105,15 +105,14 @@ def test_session_exporter_export_file_item_unsaved(session_exporter: SessionExpo file_mock.parent_item = None file_mock.metadata = Metadata() file_mock.metadata["title"] = "Test Song" + # Provide an original metadata baseline so exporter can compute a delta + file_mock.orig_metadata = Metadata() tagger_mock = Mock() tagger_mock.iter_all_files.return_value = [file_mock] tagger_mock.albums = {} - with ( - patch.object(file_mock.metadata, 'rawitems', return_value=[("title", ["Test Song"])]), - patch.object(session_exporter.location_detector, 'detect') as mock_detect, - ): + with patch.object(session_exporter.location_detector, 'detect') as mock_detect: mock_detect.return_value = SessionItemLocation(type="unclustered") data = session_exporter.export_session(tagger_mock) diff --git a/test/session/test_session_loader.py b/test/session/test_session_loader.py index 9f9deb391c..a1319a998a 100644 --- a/test/session/test_session_loader.py +++ b/test/session/test_session_loader.py @@ -230,7 +230,7 @@ def test_session_loader_extract_metadata(session_loader: SessionLoader) -> None: assert len(metadata_map) == 2 assert Path("/test/file1.mp3") in metadata_map assert Path("/test/file3.mp3") in metadata_map - assert metadata_map[Path("/test/file1.mp3")]["title"] == "Song 1" + assert metadata_map[Path("/test/file1.mp3")]["title"] == ["Song 1"] def test_session_loader_extract_metadata_empty_items(session_loader: SessionLoader) -> None: diff --git a/test/session/test_session_manager.py b/test/session/test_session_manager.py index 1a2fa59be4..edb35e09a2 100644 --- a/test/session/test_session_manager.py +++ b/test/session/test_session_manager.py @@ -52,7 +52,9 @@ def test_save_session_to_path(mock_export_session: Mock, tmp_path: Path) -> None save_session_to_path(tagger_mock, session_file) - assert session_file.with_suffix(".mbps").exists() + # Expect .mbps.gz to be appended + expected_file = Path(str(session_file) + ".mbps.gz") + assert expected_file.exists() mock_export_session.assert_called_once_with(tagger_mock) @@ -62,7 +64,7 @@ def test_save_session_to_path_with_extension(mock_export_session: Mock, tmp_path mock_export_session.return_value = {"version": 1, "items": []} tagger_mock = Mock() - session_file = tmp_path / "test.mbps" + session_file = tmp_path / "test.mbps.gz" save_session_to_path(tagger_mock, session_file) @@ -80,8 +82,8 @@ def test_save_session_to_path_with_different_extension(mock_export_session: Mock save_session_to_path(tagger_mock, session_file) - # Should add .mbps extension - expected_file = session_file.with_suffix(".mbps") + # Should add .mbps.gz extension + expected_file = Path(str(session_file) + ".mbps.gz") assert expected_file.exists() mock_export_session.assert_called_once_with(tagger_mock) @@ -96,7 +98,7 @@ def test_save_session_to_path_string_path(mock_export_session: Mock, tmp_path: P save_session_to_path(tagger_mock, str(session_file)) - assert session_file.with_suffix(".mbps").exists() + assert Path(str(session_file) + ".mbps.gz").exists() mock_export_session.assert_called_once_with(tagger_mock) @@ -115,14 +117,18 @@ def test_save_session_to_path_creates_json_content(mock_export_session: Mock, tm save_session_to_path(tagger_mock, session_file) - saved_file = session_file.with_suffix(".mbps") + saved_file = Path(str(session_file) + ".mbps.gz") assert saved_file.exists() - # Read and verify content - content = saved_file.read_text(encoding="utf-8") - assert '"version": 1' in content - assert '"rename_files": true' in content - assert '"/test/file.mp3"' in content + # Read and verify content (gzip -> parse JSON) + import gzip + import json + + content = gzip.decompress(saved_file.read_bytes()).decode("utf-8") + data = json.loads(content) + assert data["version"] == 1 + assert data["options"]["rename_files"] is True + assert data["items"][0]["file_path"] == "/test/file.mp3" @patch('picard.session.session_manager.SessionLoader') @@ -132,7 +138,7 @@ def test_load_session_from_path(mock_loader_class: Mock) -> None: mock_loader_class.return_value = mock_loader tagger_mock = Mock() - session_file = Path("/test/session.mbps") + session_file = Path("/test/session.mbps.gz") load_session_from_path(tagger_mock, session_file) @@ -148,7 +154,7 @@ def test_load_session_from_path_string_path(mock_loader_class: Mock) -> None: mock_loader_class.return_value = mock_loader tagger_mock = Mock() - session_file = "/test/session.mbps" + session_file = "/test/session.mbps.gz" load_session_from_path(tagger_mock, session_file) @@ -157,21 +163,24 @@ def test_load_session_from_path_string_path(mock_loader_class: Mock) -> None: mock_loader.finalize_loading.assert_called_once() -def test_save_session_to_path_file_overwrite(tmp_path: Path) -> None: +@patch('picard.session.session_manager.export_session') +def test_save_session_to_path_file_overwrite(mock_export_session: Mock, tmp_path: Path) -> None: """Test that save_session_to_path overwrites existing files.""" - existing_file = tmp_path / "test.mbps" + existing_file = tmp_path / "test.mbps.gz" existing_file.write_text("old content", encoding="utf-8") - with patch('picard.session.session_manager.export_session') as mock_export: - mock_export.return_value = {"version": 1, "items": []} + mock_export_session.return_value = {"version": 1, "items": []} - tagger_mock = Mock() - save_session_to_path(tagger_mock, existing_file) + tagger_mock = Mock() + save_session_to_path(tagger_mock, existing_file) - # File should be overwritten - content = existing_file.read_text(encoding="utf-8") - assert content != "old content" - assert '"version": 1' in content + # File should be overwritten + import gzip + import json + + content = gzip.decompress(existing_file.read_bytes()).decode("utf-8") + data = json.loads(content) + assert data["version"] == 1 def test_save_session_to_path_creates_directory(tmp_path: Path) -> None: @@ -180,7 +189,7 @@ def test_save_session_to_path_creates_directory(tmp_path: Path) -> None: mock_export.return_value = {"version": 1, "items": []} tagger_mock = Mock() - session_file = tmp_path / "subdir" / "test.mbps" + session_file = tmp_path / "subdir" / "test.mbps.gz" save_session_to_path(tagger_mock, session_file) @@ -203,8 +212,10 @@ def test_save_session_to_path_utf8_encoding(tmp_path: Path) -> None: save_session_to_path(tagger_mock, session_file) - saved_file = session_file.with_suffix(".mbps") - content = saved_file.read_text(encoding="utf-8") + saved_file = Path(str(session_file) + ".mbps.gz") + import gzip + + content = gzip.decompress(saved_file.read_bytes()).decode("utf-8") assert "歌曲" in content @@ -223,14 +234,15 @@ def test_save_session_to_path_json_formatting(tmp_path: Path) -> None: save_session_to_path(tagger_mock, session_file) - saved_file = session_file.with_suffix(".mbps") - content = saved_file.read_text(encoding="utf-8") + saved_file = Path(str(session_file) + ".mbps.gz") + import gzip - # Should be properly formatted JSON with indentation - assert content.startswith("{\n") - assert " \"version\": 1" in content - assert " \"options\": {" in content - assert " \"rename_files\": true" in content + content = gzip.decompress(saved_file.read_bytes()).decode("utf-8") + # Content is minified JSON + assert content.startswith("{") + assert '"version":1' in content + assert '"options":{' in content + assert '"rename_files":true' in content def test_export_session_returns_dict() -> None: @@ -255,7 +267,7 @@ def test_load_session_from_path_loader_initialization() -> None: mock_loader_class.return_value = mock_loader tagger_mock = Mock() - session_file = Path("/test/session.mbps") + session_file = Path("/test/session.mbps.gz") load_session_from_path(tagger_mock, session_file) @@ -270,7 +282,7 @@ def test_load_session_from_path_loader_methods_called() -> None: mock_loader_class.return_value = mock_loader tagger_mock = Mock() - session_file = Path("/test/session.mbps") + session_file = Path("/test/session.mbps.gz") load_session_from_path(tagger_mock, session_file) @@ -286,19 +298,21 @@ def test_save_session_to_path_extension_handling(tmp_path: Path) -> None: tagger_mock = Mock() - # Test cases: (input_path, expected_suffix) + # Test cases: (input_path) test_cases = [ - ("test", ".mbps"), - ("test.mbps", ".mbps"), - ("test.json", ".mbps"), - ("test.txt", ".mbps"), + "test", + "test.mbps.gz", + "test.json", + "test.txt", ] - for input_path, expected_suffix in test_cases: + for input_path in test_cases: session_file = tmp_path / input_path save_session_to_path(tagger_mock, session_file) - expected_file = session_file.with_suffix(expected_suffix) + expected_file = ( + session_file if str(session_file).endswith(".mbps.gz") else Path(str(session_file) + ".mbps.gz") + ) assert expected_file.exists(), f"Failed for input: {input_path}" # Clean up for next test @@ -308,7 +322,7 @@ def test_save_session_to_path_extension_handling(tmp_path: Path) -> None: def test_session_constants_used_correctly(tmp_path: Path) -> None: """Test that session constants are used correctly in manager functions.""" # This test ensures that the session manager uses the correct constants - assert SessionConstants.SESSION_FILE_EXTENSION == ".mbps" + assert SessionConstants.SESSION_FILE_EXTENSION == ".mbps.gz" assert SessionConstants.SESSION_FORMAT_VERSION == 1 # Test that the extension is used in save function diff --git a/test/session/test_sessions.py b/test/session/test_sessions.py index f856e7fe59..7e172ddbb6 100644 --- a/test/session/test_sessions.py +++ b/test/session/test_sessions.py @@ -54,6 +54,8 @@ def test_export_session_includes_items_and_metadata_tags(cfg_options: None, tmp_ m['~internal'] = 'x' m['length'] = '123456' f = _StubFile(filename=str(tmp_path / 'a.flac'), metadata=m, saved=saved, parent_item=None) + # Provide baseline so deltas can be computed + f.orig_metadata = Metadata() tagger = _StubTagger(files=[f]) data = export_session(tagger) From b44d36e3840c4ec08aee0f7c24c307d26593b2e0 Mon Sep 17 00:00:00 2001 From: kyle nguyen Date: Mon, 8 Sep 2025 03:39:09 -0400 Subject: [PATCH 04/30] Enable caching of mb data in sessions --- picard/album.py | 2 + picard/options.py | 15 +++-- picard/session/__init__.py | 21 +------ picard/session/constants.py | 16 +++++ picard/session/location_detector.py | 5 +- picard/session/retry_helper.py | 2 +- picard/session/session_exporter.py | 29 +++++++++ picard/session/session_loader.py | 85 ++++++++++++++++++++++++++ picard/ui/mainwindow/__init__.py | 2 + picard/ui/options/sessions.py | 17 ++++-- test/session/conftest.py | 19 +++--- test/session/test_location_detector.py | 3 +- test/session/test_metadata_handler.py | 6 +- test/session/test_session_exporter.py | 34 ++++------- test/session/test_sessions.py | 8 +-- 15 files changed, 190 insertions(+), 74 deletions(-) diff --git a/picard/album.py b/picard/album.py index afd184733f..d44512952a 100644 --- a/picard/album.py +++ b/picard/album.py @@ -538,6 +538,8 @@ def _load_track(node, mm, artists, extra_metadata): track.metadata['~totalalbumtracks'] = totalalbumtracks if multiartists: track.metadata['~multiartist'] = '1' + # Preserve release JSON for session export after load finished + self._release_node_cache = self._release_node del self._release_node del self._release_artist_nodes self._tracks_loaded = True diff --git a/picard/options.py b/picard/options.py index 144e62f503..d39e448834 100644 --- a/picard/options.py +++ b/picard/options.py @@ -69,6 +69,7 @@ DEFAULT_WIN_COMPAT_REPLACEMENTS, ) from picard.i18n import N_ +from picard.session.constants import SessionMessages from picard.ui.colors import InterfaceColors @@ -499,25 +500,31 @@ def make_default_toolbar_layout(): 'setting', 'session_safe_restore', True, - title=N_("Preserve session placement and edits when loading sessions"), + title=N_(SessionMessages.SESSION_SAFE_RESTORE_TITLE), ) BoolOption( 'setting', 'session_load_last_on_startup', False, - title=N_("Load last saved session on startup"), + title=N_(SessionMessages.SESSION_LOAD_LAST_TITLE), ) IntOption( 'setting', 'session_autosave_interval_min', 0, - title=N_("Auto-save session every N minutes (0 disables)"), + title=N_(SessionMessages.SESSION_AUTOSAVE_TITLE), ) BoolOption( 'setting', 'session_backup_on_crash', True, - title=N_("Attempt to keep a session backup on unexpected shutdown"), + title=N_(SessionMessages.SESSION_BACKUP_TITLE), +) +BoolOption( + 'setting', + 'session_include_mb_data', + False, + title=N_(SessionMessages.SESSION_INCLUDE_MB_DATA_TITLE), ) # picard/ui/searchdialog/album.py diff --git a/picard/session/__init__.py b/picard/session/__init__.py index aca65e7370..a829b74c1e 100644 --- a/picard/session/__init__.py +++ b/picard/session/__init__.py @@ -18,23 +18,4 @@ # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. -"""Session management package for Picard. - -This package provides functionality to save and restore Picard sessions, -including file locations, metadata overrides, and configuration options. -""" - -from picard.session.session_data import SessionItemLocation -from picard.session.session_manager import ( - export_session, - load_session_from_path, - save_session_to_path, -) - - -__all__ = [ - 'SessionItemLocation', - 'export_session', - 'load_session_from_path', - 'save_session_to_path', -] +"""Session management package for Picard.""" diff --git a/picard/session/constants.py b/picard/session/constants.py index e43105ea56..ee7c7e1a99 100644 --- a/picard/session/constants.py +++ b/picard/session/constants.py @@ -46,3 +46,19 @@ class SessionConstants: LOCATION_ALBUM_UNMATCHED = "album_unmatched" LOCATION_CLUSTER = "cluster" LOCATION_NAT = "nat" + + +class SessionMessages: + """Centralized session-related message strings. + + Define raw, untranslated strings. Call sites should mark for translation: + - API/config titles: wrap with N_() + - UI labels: wrap with _() + """ + + # Option titles (API/config) + SESSION_SAFE_RESTORE_TITLE = "Honor local edits and placement on load (no auto-matching)" + SESSION_LOAD_LAST_TITLE = "Load last saved session on startup" + SESSION_AUTOSAVE_TITLE = "Auto-save session every N minutes (0 disables)" + SESSION_BACKUP_TITLE = "Attempt to keep a session backup on unexpected shutdown" + SESSION_INCLUDE_MB_DATA_TITLE = "Include MusicBrainz data in saved sessions (faster loads, risk of stale data)" diff --git a/picard/session/location_detector.py b/picard/session/location_detector.py index 882ff18dd8..81bcbaaa13 100644 --- a/picard/session/location_detector.py +++ b/picard/session/location_detector.py @@ -31,6 +31,7 @@ from picard.file import File from picard.session.constants import SessionConstants from picard.session.session_data import SessionItemLocation +from picard.track import Track class LocationDetector: @@ -95,12 +96,12 @@ def _is_cluster_parent(self, parent: object) -> bool: """ return isinstance(parent, Cluster) - def _detect_track_location(self, parent: object) -> SessionItemLocation: + def _detect_track_location(self, parent: Track) -> SessionItemLocation: """Detect location for files under a track. Parameters ---------- - parent : object + parent : Track The track parent item. Returns diff --git a/picard/session/retry_helper.py b/picard/session/retry_helper.py index 493cd3641a..49b1508966 100644 --- a/picard/session/retry_helper.py +++ b/picard/session/retry_helper.py @@ -135,6 +135,6 @@ def is_album_ready() -> bool: if not album: return False # Check if album has tracks loaded - return hasattr(album, 'tracks') and len(album.tracks) > 0 + return hasattr(album, 'tracks') and hasattr(album.tracks, '__len__') and len(album.tracks) > 0 RetryHelper.retry_until(is_album_ready, action_fn, delay_ms) diff --git a/picard/session/session_exporter.py b/picard/session/session_exporter.py index b54c2e1318..d38f669040 100644 --- a/picard/session/session_exporter.py +++ b/picard/session/session_exporter.py @@ -78,6 +78,8 @@ def export_session(self, tagger: Any) -> dict[str, Any]: "album_overrides": {}, "unmatched_albums": [], "expanded_albums": [], + # Optional: cache of MB release JSON keyed by album id + "mb_cache": {}, } # Export file items @@ -94,6 +96,12 @@ def export_session(self, tagger: Any) -> dict[str, Any]: if unmatched_albums: session_data["unmatched_albums"] = unmatched_albums + # Optionally export MB JSON cache per album + include_mb = config.setting['session_include_mb_data'] + + if include_mb: + session_data["mb_cache"] = self._export_mb_cache(tagger) + # Export UI state (expanded albums) expanded_albums = self._export_ui_state(tagger) if expanded_albums: @@ -101,6 +109,27 @@ def export_session(self, tagger: Any) -> dict[str, Any]: return session_data + def _export_mb_cache(self, tagger: Any) -> dict[str, Any]: + """Export MB release JSON for currently loaded albums. + + Parameters + ---------- + tagger : Any + The Picard tagger instance. + + Returns + ------- + dict[str, Any] + Mapping of album MBID to release JSON node. + """ + cache: dict[str, Any] = {} + for album_id, album in getattr(tagger, 'albums', {}).items(): + # Prefer cached node saved after tracks were loaded; fall back to live node if still present + node = getattr(album, '_release_node_cache', None) or getattr(album, '_release_node', None) + if node: + cache[album_id] = node + return cache + def _export_ui_state(self, tagger: Any) -> list[str]: """Export UI expansion state for albums in album view. diff --git a/picard/session/session_loader.py b/picard/session/session_loader.py index 2cbc530f38..02b32c1c64 100644 --- a/picard/session/session_loader.py +++ b/picard/session/session_loader.py @@ -77,6 +77,7 @@ def load_from_path(self, path: str | Path) -> None: 6. Apply metadata overrides 7. Schedule metadata application """ + self._emit_progress("read", details={"path": str(path)}) data = self._read_session_file(path) self._prepare_session(data) self._restore_options(data.get("options", {})) @@ -87,16 +88,100 @@ def load_from_path(self, path: str | Path) -> None: grouped_items = self._group_items_by_location(items) metadata_map = self._extract_metadata(items) + # If mb_cache is provided, try to pre-load albums from cached JSON + mb_cache = data.get("mb_cache", {}) + if mb_cache: + self._emit_progress("preload_cache", details={"albums": len(mb_cache)}) + self._preload_albums_from_cache(mb_cache, grouped_items) + + self._emit_progress( + "load_items", + details={ + "files": len(grouped_items.unclustered) + + sum(len(v) for v in grouped_items.by_cluster.values()) + + sum(len(g.unmatched) + len(g.tracks) for g in grouped_items.by_album.values()) + }, + ) self._load_items(grouped_items) self._load_unmatched_albums(data.get("unmatched_albums", [])) + self._emit_progress("apply_overrides") self._apply_overrides(data) if metadata_map: self._schedule_metadata_application(metadata_map) # Restore UI state (expanded albums and file view roots) + self._emit_progress("finalize") self._restore_ui_state(data) + # ---------------------- + # Progress reporting API + # ---------------------- + def _emit_progress(self, stage: str, details: dict[str, Any] | None = None) -> None: + try: + # Forward to main window / status indicator if available + if hasattr(self.tagger, 'window') and hasattr(self.tagger.window, 'status_indicators'): + for indicator in self.tagger.window.status_indicators: + if hasattr(indicator, 'session_progress'): + indicator.session_progress(stage, details or {}) + # Additionally, update status bar text when possible + if hasattr(self.tagger, 'window') and hasattr(self.tagger.window, 'set_statusbar_message'): + msg = self._format_stage_message(stage, details) + if msg: + self.tagger.window.set_statusbar_message(msg) + except Exception: + # Do not let progress reporting break loading + pass + + def _format_stage_message(self, stage: str, details: dict[str, Any] | None) -> str | None: + if stage == "read": + return "Reading session…" + if stage == "preload_cache": + return f"Preloading albums from cache ({(details or {}).get('albums', 0)})…" + if stage == "load_items": + return f"Loading files and albums ({(details or {}).get('files', 0)} files)…" + if stage == "apply_overrides": + return "Applying overrides…" + if stage == "finalize": + # Mention pending web requests if any + pending = getattr(self.tagger.webservice, 'num_pending_web_requests', 0) + if pending: + return f"Waiting on network ({pending} requests)…" + return "Finalizing…" + return None + + def _preload_albums_from_cache(self, mb_cache: dict[str, Any], grouped_items: GroupedItems) -> None: + """Preload albums from embedded MB JSON cache when available. + + Parameters + ---------- + mb_cache : dict[str, Any] + Mapping of album IDs to MB release JSON nodes. + grouped_items : GroupedItems + Items grouped by location type (used to know which albums are needed). + """ + needed_album_ids = set(grouped_items.by_album.keys()) | set(mb_cache.keys()) + for album_id in needed_album_ids: + node = mb_cache.get(album_id) + if not node: + continue + album = self.tagger.albums.get(album_id) + if not album: + # Create album instance via normal path but intercept to parse from JSON node + album = self.tagger.load_album(album_id) + # If album supports parsing from cached release node, do so + parse_from_json = getattr(album, '_parse_release', None) + if callable(parse_from_json): + try: + parse_from_json(node) + album._run_album_metadata_processors() + album.update(update_tracks=True) + self.loaded_albums[album_id] = album + self._ensure_album_visible(album) + except Exception: + # Fall back to normal loading path if parsing fails + continue + def _read_session_file(self, path: Path) -> dict[str, Any]: """Read and parse session file. diff --git a/picard/ui/mainwindow/__init__.py b/picard/ui/mainwindow/__init__.py index 08997fc095..6529ad9b3c 100644 --- a/picard/ui/mainwindow/__init__.py +++ b/picard/ui/mainwindow/__init__.py @@ -1084,6 +1084,8 @@ def load_session(self): ) if path: try: + # Initial progress feedback before heavy load + self.set_statusbar_message(N_("Loading session from '%(path)s' …"), {'path': path}) load_session_from_path(self.tagger, path) config.persist['current_directory'] = os.path.dirname(path) config.persist['last_session_path'] = path diff --git a/picard/ui/options/sessions.py b/picard/ui/options/sessions.py index 208bf9bd90..0d8f298fca 100644 --- a/picard/ui/options/sessions.py +++ b/picard/ui/options/sessions.py @@ -23,6 +23,7 @@ from picard.config import get_config from picard.extension_points.options_pages import register_options_page from picard.i18n import N_, gettext as _ +from picard.session.constants import SessionMessages from picard.ui.options import OptionsPage @@ -39,31 +40,33 @@ class SessionsOptionsPage(OptionsPage): ('session_load_last_on_startup', ['load_last_checkbox']), ('session_autosave_interval_min', ['autosave_spin']), ('session_backup_on_crash', ['backup_checkbox']), + ('session_include_mb_data', ['include_mb_data_checkbox']), ) def __init__(self, parent=None): super().__init__(parent) self.vbox = QtWidgets.QVBoxLayout(self) - self.safe_restore_checkbox = QtWidgets.QCheckBox( - _('Honor local edits and placement on load (no auto-matching)') - ) + self.safe_restore_checkbox = QtWidgets.QCheckBox(_(SessionMessages.SESSION_SAFE_RESTORE_TITLE)) self.vbox.addWidget(self.safe_restore_checkbox) - self.load_last_checkbox = QtWidgets.QCheckBox(_('Load last saved session on startup')) + self.load_last_checkbox = QtWidgets.QCheckBox(_(SessionMessages.SESSION_LOAD_LAST_TITLE)) self.vbox.addWidget(self.load_last_checkbox) autosave_layout = QtWidgets.QHBoxLayout() - self.autosave_label = QtWidgets.QLabel(_('Auto-save session every N minutes (0 disables)')) + self.autosave_label = QtWidgets.QLabel(_(SessionMessages.SESSION_AUTOSAVE_TITLE)) self.autosave_spin = QtWidgets.QSpinBox() self.autosave_spin.setRange(0, 1440) autosave_layout.addWidget(self.autosave_label) autosave_layout.addWidget(self.autosave_spin) self.vbox.addLayout(autosave_layout) - self.backup_checkbox = QtWidgets.QCheckBox(_('Attempt to keep a session backup on unexpected shutdown')) + self.backup_checkbox = QtWidgets.QCheckBox(_(SessionMessages.SESSION_BACKUP_TITLE)) self.vbox.addWidget(self.backup_checkbox) + self.include_mb_data_checkbox = QtWidgets.QCheckBox(_(SessionMessages.SESSION_INCLUDE_MB_DATA_TITLE)) + self.vbox.addWidget(self.include_mb_data_checkbox) + self.vbox.addStretch(1) def load(self): @@ -72,6 +75,7 @@ def load(self): self.load_last_checkbox.setChecked(config.setting['session_load_last_on_startup']) self.autosave_spin.setValue(config.setting['session_autosave_interval_min']) self.backup_checkbox.setChecked(config.setting['session_backup_on_crash']) + self.include_mb_data_checkbox.setChecked(config.setting['session_include_mb_data']) def save(self): config = get_config() @@ -79,6 +83,7 @@ def save(self): config.setting['session_load_last_on_startup'] = self.load_last_checkbox.isChecked() config.setting['session_autosave_interval_min'] = int(self.autosave_spin.value()) config.setting['session_backup_on_crash'] = self.backup_checkbox.isChecked() + config.setting['session_include_mb_data'] = self.include_mb_data_checkbox.isChecked() register_options_page(SessionsOptionsPage) diff --git a/test/session/conftest.py b/test/session/conftest.py index 414183dde6..20e0ff9f9e 100644 --- a/test/session/conftest.py +++ b/test/session/conftest.py @@ -126,7 +126,8 @@ def cfg_options() -> None: # Ensure required keys exist with defaults cfg.setting['rename_files'] = False cfg.setting['move_files'] = False - cfg.setting['dont_write_tags'] = False + cfg.setting['dont_write_tags'] = True + cfg.setting['session_include_mb_data'] = False # ============================================================================= @@ -138,7 +139,7 @@ def cfg_options() -> None: def mock_file() -> Mock: """Provide a mock file object.""" file_mock = Mock(spec=File) - file_mock.filename = "/test/file.mp3" + file_mock.filename = str(Path("/test/file.mp3")) return file_mock @@ -280,11 +281,11 @@ def sample_session_data() -> dict[str, Any]: }, "items": [ { - "file_path": "/test/file1.mp3", + "file_path": str(Path("/test/file1.mp3")), "location": {"type": "unclustered"}, }, { - "file_path": "/test/file2.mp3", + "file_path": str(Path("/test/file2.mp3")), "location": {"type": "track", "album_id": "album-123", "recording_id": "recording-456"}, "metadata": {"tags": {"title": ["Test Song"]}}, }, @@ -379,23 +380,23 @@ def create_session_items_with_locations() -> list[dict[str, Any]]: """Create a list of session items with different location types.""" return [ { - "file_path": "/test/unclustered.mp3", + "file_path": str(Path("/test/unclustered.mp3")), "location": {"type": "unclustered"}, }, { - "file_path": "/test/cluster.mp3", + "file_path": str(Path("/test/cluster.mp3")), "location": {"type": "cluster", "cluster_title": "Album", "cluster_artist": "Artist"}, }, { - "file_path": "/test/track.mp3", + "file_path": str(Path("/test/track.mp3")), "location": {"type": "track", "album_id": "album-123", "recording_id": "recording-456"}, }, { - "file_path": "/test/unmatched.mp3", + "file_path": str(Path("/test/unmatched.mp3")), "location": {"type": "album_unmatched", "album_id": "album-789"}, }, { - "file_path": "/test/nat.mp3", + "file_path": str(Path("/test/nat.mp3")), "location": {"type": "nat", "recording_id": "recording-999"}, }, ] diff --git a/test/session/test_location_detector.py b/test/session/test_location_detector.py index b33fe435a0..e81b8dbe66 100644 --- a/test/session/test_location_detector.py +++ b/test/session/test_location_detector.py @@ -20,6 +20,7 @@ """Tests for location detector.""" +from pathlib import Path from unittest.mock import Mock from picard.album import Album, NatAlbum @@ -41,7 +42,7 @@ def location_detector() -> LocationDetector: def mock_file() -> Mock: """Provide a mock file object.""" file_mock = Mock(spec=File) - file_mock.filename = "/test/file.mp3" + file_mock.filename = str(Path("/test/file.mp3")) return file_mock diff --git a/test/session/test_metadata_handler.py b/test/session/test_metadata_handler.py index 5e03bbcbf0..8b1ad69725 100644 --- a/test/session/test_metadata_handler.py +++ b/test/session/test_metadata_handler.py @@ -173,7 +173,7 @@ def test_safe_apply_metadata_success_with_none_length(mock_log: Mock) -> None: def test_safe_apply_metadata_attribute_error(mock_log: Mock) -> None: """Test metadata application with AttributeError.""" file_mock = Mock(spec=File) - file_mock.filename = "/test/file.mp3" + file_mock.filename = str(Path("/test/file.mp3")) file_mock.metadata = Mock() file_mock.metadata.length = None file_mock.orig_metadata = Mock() @@ -193,7 +193,7 @@ def test_safe_apply_metadata_attribute_error(mock_log: Mock) -> None: def test_safe_apply_metadata_key_error(mock_log: Mock) -> None: """Test metadata application with KeyError.""" file_mock = Mock(spec=File) - file_mock.filename = "/test/file.mp3" + file_mock.filename = str(Path("/test/file.mp3")) file_mock.metadata = Mock() file_mock.metadata.length = None file_mock.orig_metadata = Mock() @@ -213,7 +213,7 @@ def test_safe_apply_metadata_key_error(mock_log: Mock) -> None: def test_safe_apply_metadata_unexpected_error(mock_log: Mock) -> None: """Test metadata application with unexpected error.""" file_mock = Mock(spec=File) - file_mock.filename = "test_file.mp3" + file_mock.filename = str(Path("test_file.mp3")) file_mock.metadata = Mock() file_mock.metadata.length = None file_mock.orig_metadata = Mock() diff --git a/test/session/test_session_exporter.py b/test/session/test_session_exporter.py index 2e4d3e9c70..cd9db38e91 100644 --- a/test/session/test_session_exporter.py +++ b/test/session/test_session_exporter.py @@ -47,31 +47,23 @@ def mock_tagger() -> Mock: return tagger_mock -def test_session_exporter_export_session_empty(session_exporter: SessionExporter, mock_tagger: Mock) -> None: +def test_session_exporter_export_session_empty( + session_exporter: SessionExporter, mock_tagger: Mock, cfg_options +) -> None: """Test exporting an empty session.""" - config_mock = Mock() - config_mock.setting = { + data = session_exporter.export_session(mock_tagger) + + assert data["version"] == SessionConstants.SESSION_FORMAT_VERSION + assert data["options"] == { "rename_files": False, "move_files": False, "dont_write_tags": True, } - - with patch('picard.session.session_exporter.get_config') as mock_get_config: - mock_get_config.return_value = config_mock - - data = session_exporter.export_session(mock_tagger) - - assert data["version"] == SessionConstants.SESSION_FORMAT_VERSION - assert data["options"] == { - "rename_files": False, - "move_files": False, - "dont_write_tags": True, - } - assert data["items"] == [] - assert data["album_track_overrides"] == {} - assert data["album_overrides"] == {} - assert data["unmatched_albums"] == [] - assert data["expanded_albums"] == [] + assert data["items"] == [] + assert data["album_track_overrides"] == {} + assert data["album_overrides"] == {} + assert data["unmatched_albums"] == [] + assert data["expanded_albums"] == [] def test_session_exporter_export_file_item_saved(session_exporter: SessionExporter, cfg_options) -> None: @@ -260,7 +252,7 @@ def test_session_exporter_export_albums_with_files(session_exporter: SessionExpo # Create file with parent item pointing to album file_mock = Mock() - file_mock.filename = "/test/file.mp3" + file_mock.filename = str(Path("/test/file.mp3")) file_mock.is_saved.return_value = True parent_item_mock = Mock() parent_item_mock.album = album_mock diff --git a/test/session/test_sessions.py b/test/session/test_sessions.py index 7e172ddbb6..21200ace85 100644 --- a/test/session/test_sessions.py +++ b/test/session/test_sessions.py @@ -31,13 +31,7 @@ import pytest -def test_export_session_empty(tmp_path: Path) -> None: - # Ensure options keys exist - cfg = picard_config.get_config() - cfg.setting['rename_files'] = False - cfg.setting['move_files'] = False - cfg.setting['dont_write_tags'] = True - +def test_export_session_empty(tmp_path: Path, cfg_options) -> None: data = export_session(_StubTagger(files=[], albums={})) assert isinstance(data, dict) assert data['version'] == 1 From 8383a97bee16922e6c47ce4d671fe9d475ee6ff6 Mon Sep 17 00:00:00 2001 From: Khoa Nguyen Date: Mon, 8 Sep 2025 07:02:03 -0400 Subject: [PATCH 05/30] Do not catch blind exceptions --- picard/session/session_loader.py | 49 ++++++++++++++++++-------------- 1 file changed, 28 insertions(+), 21 deletions(-) diff --git a/picard/session/session_loader.py b/picard/session/session_loader.py index 02b32c1c64..a997c77ee2 100644 --- a/picard/session/session_loader.py +++ b/picard/session/session_loader.py @@ -26,6 +26,7 @@ from __future__ import annotations +from contextlib import suppress import gzip import json from pathlib import Path @@ -35,6 +36,7 @@ from picard.album import Album from picard.config import get_config +from picard.i18n import gettext as _ from picard.session.constants import SessionConstants from picard.session.metadata_handler import MetadataHandler from picard.session.session_data import AlbumItems, GroupedItems @@ -118,7 +120,8 @@ def load_from_path(self, path: str | Path) -> None: # Progress reporting API # ---------------------- def _emit_progress(self, stage: str, details: dict[str, Any] | None = None) -> None: - try: + # Do not let progress reporting break loading + with suppress(AttributeError, RuntimeError, TypeError): # Forward to main window / status indicator if available if hasattr(self.tagger, 'window') and hasattr(self.tagger.window, 'status_indicators'): for indicator in self.tagger.window.status_indicators: @@ -129,26 +132,32 @@ def _emit_progress(self, stage: str, details: dict[str, Any] | None = None) -> N msg = self._format_stage_message(stage, details) if msg: self.tagger.window.set_statusbar_message(msg) - except Exception: - # Do not let progress reporting break loading - pass def _format_stage_message(self, stage: str, details: dict[str, Any] | None) -> str | None: - if stage == "read": - return "Reading session…" - if stage == "preload_cache": - return f"Preloading albums from cache ({(details or {}).get('albums', 0)})…" - if stage == "load_items": - return f"Loading files and albums ({(details or {}).get('files', 0)} files)…" - if stage == "apply_overrides": - return "Applying overrides…" - if stage == "finalize": - # Mention pending web requests if any + def msg_preload(d: dict[str, Any] | None) -> str: + return _("Preloading albums from cache ({albums})…").format(albums=(d or {}).get('albums', 0)) + + def msg_load_items(d: dict[str, Any] | None) -> str: + return _("Loading files and albums ({files} files)…").format(files=(d or {}).get('files', 0)) + + def msg_finalize(_d: dict[str, Any] | None) -> str: pending = getattr(self.tagger.webservice, 'num_pending_web_requests', 0) if pending: - return f"Waiting on network ({pending} requests)…" - return "Finalizing…" - return None + return _("Waiting on network ({requests} requests)…").format(requests=pending) + return _("Finalizing…") + + dispatch: dict[str, Any] = { + "read": _("Reading session…"), + "apply_overrides": _("Applying overrides…"), + "preload_cache": msg_preload, + "load_items": msg_load_items, + "finalize": msg_finalize, + } + + entry = dispatch.get(stage) + if entry is None: + return None + return entry(details) if callable(entry) else entry def _preload_albums_from_cache(self, mb_cache: dict[str, Any], grouped_items: GroupedItems) -> None: """Preload albums from embedded MB JSON cache when available. @@ -172,15 +181,13 @@ def _preload_albums_from_cache(self, mb_cache: dict[str, Any], grouped_items: Gr # If album supports parsing from cached release node, do so parse_from_json = getattr(album, '_parse_release', None) if callable(parse_from_json): - try: + # Fall back to normal loading path if parsing fails + with suppress(KeyError, TypeError, ValueError): parse_from_json(node) album._run_album_metadata_processors() album.update(update_tracks=True) self.loaded_albums[album_id] = album self._ensure_album_visible(album) - except Exception: - # Fall back to normal loading path if parsing fails - continue def _read_session_file(self, path: Path) -> dict[str, Any]: """Read and parse session file. From 1251e9c355a14f23200277d346053d273ef78eff Mon Sep 17 00:00:00 2001 From: Khoa Nguyen Date: Mon, 8 Sep 2025 07:16:06 -0400 Subject: [PATCH 06/30] Apply single quote on dict/attr keys --- picard/session/location_detector.py | 8 +- picard/session/session_exporter.py | 60 ++++----- picard/session/session_loader.py | 60 ++++----- test/session/conftest.py | 82 ++++++------- test/session/test_data.py | 16 +-- test/session/test_location_detector.py | 6 +- test/session/test_metadata_handler.py | 58 ++++----- test/session/test_retry_helper.py | 30 ++--- test/session/test_session_exporter.py | 162 ++++++++++++------------- test/session/test_session_loader.py | 122 +++++++++---------- test/session/test_session_manager.py | 80 ++++++------ test/session/test_sessions.py | 76 ++++++------ test/session/test_track_mover.py | 26 ++-- 13 files changed, 393 insertions(+), 393 deletions(-) diff --git a/picard/session/location_detector.py b/picard/session/location_detector.py index 81bcbaaa13..d9c82ae376 100644 --- a/picard/session/location_detector.py +++ b/picard/session/location_detector.py @@ -79,7 +79,7 @@ def _is_track_parent(self, parent: object) -> bool: bool True if parent is a track. """ - return hasattr(parent, "album") and isinstance(parent.album, Album) + return hasattr(parent, 'album') and isinstance(parent.album, Album) def _is_cluster_parent(self, parent: object) -> bool: """Check if parent is a cluster. @@ -114,7 +114,7 @@ def _detect_track_location(self, parent: Track) -> SessionItemLocation: return SessionItemLocation(type=SessionConstants.LOCATION_NAT, recording_id=parent.id) # Track placement - if hasattr(parent, "id") and parent.id: + if hasattr(parent, 'id') and parent.id: return SessionItemLocation( type=SessionConstants.LOCATION_TRACK, album_id=parent.album.id, recording_id=parent.id ) @@ -145,8 +145,8 @@ def _detect_cluster_location(self, parent: Cluster) -> SessionItemLocation: return SessionItemLocation( type=SessionConstants.LOCATION_CLUSTER, - cluster_title=str(parent.metadata["album"]), - cluster_artist=str(parent.metadata["albumartist"]), + cluster_title=str(parent.metadata['album']), + cluster_artist=str(parent.metadata['albumartist']), ) def _unclustered_location(self) -> SessionItemLocation: diff --git a/picard/session/session_exporter.py b/picard/session/session_exporter.py index d38f669040..536e7e5aff 100644 --- a/picard/session/session_exporter.py +++ b/picard/session/session_exporter.py @@ -71,41 +71,41 @@ def export_session(self, tagger: Any) -> dict[str, Any]: """ config = get_config() session_data = { - "version": SessionConstants.SESSION_FORMAT_VERSION, - "options": self._export_options(config), - "items": [], - "album_track_overrides": {}, - "album_overrides": {}, - "unmatched_albums": [], - "expanded_albums": [], + 'version': SessionConstants.SESSION_FORMAT_VERSION, + 'options': self._export_options(config), + 'items': [], + 'album_track_overrides': {}, + 'album_overrides': {}, + 'unmatched_albums': [], + 'expanded_albums': [], # Optional: cache of MB release JSON keyed by album id - "mb_cache": {}, + 'mb_cache': {}, } # Export file items for file in tagger.iter_all_files(): item = self._export_file_item(file) - session_data["items"].append(item) + session_data['items'].append(item) # Export metadata overrides and unmatched albums album_overrides, album_meta_overrides, unmatched_albums = self._export_metadata_overrides(tagger) if album_overrides: - session_data["album_track_overrides"] = album_overrides + session_data['album_track_overrides'] = album_overrides if album_meta_overrides: - session_data["album_overrides"] = album_meta_overrides + session_data['album_overrides'] = album_meta_overrides if unmatched_albums: - session_data["unmatched_albums"] = unmatched_albums + session_data['unmatched_albums'] = unmatched_albums # Optionally export MB JSON cache per album include_mb = config.setting['session_include_mb_data'] if include_mb: - session_data["mb_cache"] = self._export_mb_cache(tagger) + session_data['mb_cache'] = self._export_mb_cache(tagger) # Export UI state (expanded albums) expanded_albums = self._export_ui_state(tagger) if expanded_albums: - session_data["expanded_albums"] = expanded_albums + session_data['expanded_albums'] = expanded_albums return session_data @@ -145,7 +145,7 @@ def _export_ui_state(self, tagger: Any) -> list[str]: """ expanded: list[str] = [] for album in tagger.albums.values(): - ui_item = getattr(album, "ui_item", None) + ui_item = getattr(album, 'ui_item', None) if ui_item is not None and ui_item.isExpanded(): expanded.append(album.id) return expanded @@ -164,9 +164,9 @@ def _export_options(self, config: Any) -> dict[str, bool]: Dictionary containing the relevant configuration options. """ return { - "rename_files": bool(config.setting["rename_files"]), - "move_files": bool(config.setting["move_files"]), - "dont_write_tags": bool(config.setting["dont_write_tags"]), + 'rename_files': bool(config.setting['rename_files']), + 'move_files': bool(config.setting['move_files']), + 'dont_write_tags': bool(config.setting['dont_write_tags']), } def _export_file_item(self, file: Any) -> dict[str, Any]: @@ -184,20 +184,20 @@ def _export_file_item(self, file: Any) -> dict[str, Any]: """ loc = self.location_detector.detect(file) entry = { - "file_path": str(Path(file.filename)), - "location": self._serialize_location(loc), + 'file_path': str(Path(file.filename)), + 'location': self._serialize_location(loc), } # Persist unsaved tag changes as deltas vs base metadata if not file.is_saved(): - parent = getattr(file, "parent_item", None) + parent = getattr(file, 'parent_item', None) base_md = None # If the file is under a track, diff against the track's scripted metadata (user-visible basis) - if parent is not None and hasattr(parent, "album"): - base_md = getattr(parent, "scripted_metadata", getattr(parent, "metadata", None)) + if parent is not None and hasattr(parent, 'album'): + base_md = getattr(parent, 'scripted_metadata', getattr(parent, 'metadata', None)) # Otherwise, diff against the file's original on-disk metadata if base_md is None: - base_md = getattr(file, "orig_metadata", None) + base_md = getattr(file, 'orig_metadata', None) if base_md is not None: diff = file.metadata.diff(base_md) @@ -208,7 +208,7 @@ def _export_file_item(self, file: Any) -> dict[str, Any]: and not str(k).startswith(SessionConstants.INTERNAL_TAG_PREFIX) } if delta_tags: - entry["metadata"] = {"tags": delta_tags} + entry['metadata'] = {'tags': delta_tags} return entry @@ -228,11 +228,11 @@ def _serialize_location(self, location: SessionItemLocation) -> dict[str, Any]: return { k: v for k, v in { - "type": location.type, - "album_id": location.album_id, - "recording_id": location.recording_id, - "cluster_title": location.cluster_title, - "cluster_artist": location.cluster_artist, + 'type': location.type, + 'album_id': location.album_id, + 'recording_id': location.recording_id, + 'cluster_title': location.cluster_title, + 'cluster_artist': location.cluster_artist, }.items() if v is not None } diff --git a/picard/session/session_loader.py b/picard/session/session_loader.py index a997c77ee2..06794ed6f3 100644 --- a/picard/session/session_loader.py +++ b/picard/session/session_loader.py @@ -79,33 +79,33 @@ def load_from_path(self, path: str | Path) -> None: 6. Apply metadata overrides 7. Schedule metadata application """ - self._emit_progress("read", details={"path": str(path)}) + self._emit_progress("read", details={'path': str(path)}) data = self._read_session_file(path) self._prepare_session(data) - self._restore_options(data.get("options", {})) + self._restore_options(data.get('options', {})) # Cache saved UI expansion state for later album updates - self._saved_expanded_albums = set(data.get("expanded_albums", [])) if "expanded_albums" in data else None + self._saved_expanded_albums = set(data.get('expanded_albums', [])) if "expanded_albums" in data else None - items = data.get("items", []) + items = data.get('items', []) grouped_items = self._group_items_by_location(items) metadata_map = self._extract_metadata(items) # If mb_cache is provided, try to pre-load albums from cached JSON - mb_cache = data.get("mb_cache", {}) + mb_cache = data.get('mb_cache', {}) if mb_cache: - self._emit_progress("preload_cache", details={"albums": len(mb_cache)}) + self._emit_progress("preload_cache", details={'albums': len(mb_cache)}) self._preload_albums_from_cache(mb_cache, grouped_items) self._emit_progress( "load_items", details={ - "files": len(grouped_items.unclustered) + 'files': len(grouped_items.unclustered) + sum(len(v) for v in grouped_items.by_cluster.values()) + sum(len(g.unmatched) + len(g.tracks) for g in grouped_items.by_album.values()) }, ) self._load_items(grouped_items) - self._load_unmatched_albums(data.get("unmatched_albums", [])) + self._load_unmatched_albums(data.get('unmatched_albums', [])) self._emit_progress("apply_overrides") self._apply_overrides(data) @@ -147,11 +147,11 @@ def msg_finalize(_d: dict[str, Any] | None) -> str: return _("Finalizing…") dispatch: dict[str, Any] = { - "read": _("Reading session…"), - "apply_overrides": _("Applying overrides…"), - "preload_cache": msg_preload, - "load_items": msg_load_items, - "finalize": msg_finalize, + 'read': _("Reading session…"), + 'apply_overrides': _("Applying overrides…"), + 'preload_cache': msg_preload, + 'load_items': msg_load_items, + 'finalize': msg_finalize, } entry = dispatch.get(stage) @@ -241,9 +241,9 @@ def _restore_options(self, options: dict[str, Any]) -> None: The options to restore. """ config = get_config() - config.setting["rename_files"] = bool(options.get("rename_files", config.setting["rename_files"])) - config.setting["move_files"] = bool(options.get("move_files", config.setting["move_files"])) - config.setting["dont_write_tags"] = bool(options.get("dont_write_tags", config.setting["dont_write_tags"])) + config.setting['rename_files'] = bool(options.get('rename_files', config.setting['rename_files'])) + config.setting['move_files'] = bool(options.get('move_files', config.setting['move_files'])) + config.setting['dont_write_tags'] = bool(options.get('dont_write_tags', config.setting['dont_write_tags'])) def _group_items_by_location(self, items: list[dict[str, Any]]) -> GroupedItems: """Group items by their target location. @@ -264,24 +264,24 @@ def _group_items_by_location(self, items: list[dict[str, Any]]) -> GroupedItems: nat_items: list[tuple[Path, str]] = [] for it in items: - fpath = Path(it["file_path"]).expanduser() - loc = it.get("location", {}) - ltype = str(loc.get("type", SessionConstants.LOCATION_UNCLUSTERED)) + fpath = Path(it['file_path']).expanduser() + loc = it.get('location', {}) + ltype = str(loc.get('type', SessionConstants.LOCATION_UNCLUSTERED)) if ltype == SessionConstants.LOCATION_UNCLUSTERED: by_unclustered.append(fpath) elif ltype == SessionConstants.LOCATION_CLUSTER: - key = (str(loc.get("cluster_title", "")), str(loc.get("cluster_artist", ""))) + key = (str(loc.get('cluster_title', "")), str(loc.get('cluster_artist', ""))) by_cluster.setdefault(key, []).append(fpath) elif ltype in {SessionConstants.LOCATION_ALBUM_UNMATCHED, SessionConstants.LOCATION_TRACK}: - album_id = str(loc.get("album_id")) + album_id = str(loc.get('album_id')) entry = by_album.setdefault(album_id, AlbumItems(unmatched=[], tracks=[])) if ltype == SessionConstants.LOCATION_ALBUM_UNMATCHED: entry.unmatched.append(fpath) else: - entry.tracks.append((fpath, str(loc.get("recording_id")))) + entry.tracks.append((fpath, str(loc.get('recording_id')))) elif ltype == SessionConstants.LOCATION_NAT: - nat_items.append((fpath, str(loc.get("recording_id")))) + nat_items.append((fpath, str(loc.get('recording_id')))) else: by_unclustered.append(fpath) @@ -302,10 +302,10 @@ def _extract_metadata(self, items: list[dict[str, Any]]) -> dict[Path, dict[str, """ metadata_by_path: dict[Path, dict[str, list[Any]]] = {} for it in items: - fpath = Path(it["file_path"]).expanduser() - md = it.get("metadata", {}) + fpath = Path(it['file_path']).expanduser() + md = it.get('metadata', {}) if "tags" in md: - tags = {k: MetadataHandler.as_list(v) for k, v in md["tags"].items()} + tags = {k: MetadataHandler.as_list(v) for k, v in md['tags'].items()} metadata_by_path[fpath] = tags return metadata_by_path @@ -411,12 +411,12 @@ def _restore_ui_state(self, data: dict[str, Any]) -> None: data : dict[str, Any] The session data. """ - expanded_albums = set(data.get("expanded_albums", [])) + expanded_albums = set(data.get('expanded_albums', [])) def set_expansions() -> None: # Album view: set expansion for albums we have for album_id, album in self.tagger.albums.items(): - ui_item = getattr(album, "ui_item", None) + ui_item = getattr(album, 'ui_item', None) if ui_item is None: continue ui_item.setExpanded(album_id in expanded_albums) @@ -435,8 +435,8 @@ def _apply_overrides(self, data: dict[str, Any]) -> None: data : dict[str, Any] The session data containing overrides. """ - track_overrides_by_album = data.get("album_track_overrides", {}) - album_meta_overrides = data.get("album_overrides", {}) + track_overrides_by_album = data.get('album_track_overrides', {}) + album_meta_overrides = data.get('album_overrides', {}) # Ensure albums referenced by overrides are loaded and visible referenced_album_ids = set(track_overrides_by_album.keys()) | set(album_meta_overrides.keys()) diff --git a/test/session/conftest.py b/test/session/conftest.py index 20e0ff9f9e..1f16046b1a 100644 --- a/test/session/conftest.py +++ b/test/session/conftest.py @@ -148,10 +148,10 @@ def mock_file_with_metadata() -> Mock: """Provide a mock file with metadata.""" file_mock = Mock(spec=File) metadata = Metadata() - metadata["title"] = "Test Song" - metadata["artist"] = "Test Artist" - metadata["~internal"] = "internal_value" - metadata["length"] = "123456" + metadata['title'] = "Test Song" + metadata['artist'] = "Test Artist" + metadata['~internal'] = "internal_value" + metadata['length'] = "123456" file_mock.metadata = metadata return file_mock @@ -187,7 +187,7 @@ def mock_cluster() -> Mock: """Provide a mock cluster instance.""" cluster_mock = Mock(spec=Cluster) cluster_mock.related_album = None - cluster_mock.metadata = {"album": "Test Album", "albumartist": "Test Artist"} + cluster_mock.metadata = {'album': "Test Album", 'albumartist': "Test Artist"} return cluster_mock @@ -249,11 +249,11 @@ def track_mover() -> TrackMover: def sample_metadata() -> Metadata: """Provide sample metadata for testing.""" metadata = Metadata() - metadata["title"] = "Test Song" - metadata["artist"] = "Test Artist" - metadata["album"] = "Test Album" - metadata["~internal"] = "internal_value" - metadata["length"] = "123456" + metadata['title'] = "Test Song" + metadata['artist'] = "Test Artist" + metadata['album'] = "Test Album" + metadata['~internal'] = "internal_value" + metadata['length'] = "123456" return metadata @@ -273,27 +273,27 @@ def sample_album_items() -> AlbumItems: def sample_session_data() -> dict[str, Any]: """Provide sample session data for testing.""" return { - "version": 1, - "options": { - "rename_files": True, - "move_files": False, - "dont_write_tags": True, + 'version': 1, + 'options': { + 'rename_files': True, + 'move_files': False, + 'dont_write_tags': True, }, - "items": [ + 'items': [ { - "file_path": str(Path("/test/file1.mp3")), - "location": {"type": "unclustered"}, + 'file_path': str(Path("/test/file1.mp3")), + 'location': {'type': "unclustered"}, }, { - "file_path": str(Path("/test/file2.mp3")), - "location": {"type": "track", "album_id": "album-123", "recording_id": "recording-456"}, - "metadata": {"tags": {"title": ["Test Song"]}}, + 'file_path': str(Path("/test/file2.mp3")), + 'location': {'type': "track", 'album_id': "album-123", 'recording_id': "recording-456"}, + 'metadata': {'tags': {'title': ["Test Song"]}}, }, ], - "album_track_overrides": {"album-123": {"track-456": {"title": ["New Title"]}}}, - "album_overrides": {"album-123": {"albumartist": ["New Artist"]}}, - "unmatched_albums": ["album-789"], - "expanded_albums": ["album-123"], + 'album_track_overrides': {'album-123': {'track-456': {'title': ["New Title"]}}}, + 'album_overrides': {'album-123': {'albumartist': ["New Artist"]}}, + 'unmatched_albums': ["album-789"], + 'expanded_albums': ["album-123"], } @@ -360,7 +360,7 @@ def create_mock_file_with_parent(filename: str, parent_type: str = "track", albu elif parent_type == "cluster": mock_cluster = Mock(spec=Cluster) mock_cluster.related_album = None - mock_cluster.metadata = {"album": "Test Album", "albumartist": "Test Artist"} + mock_cluster.metadata = {'album': "Test Album", 'albumartist': "Test Artist"} file_mock.parent_item = mock_cluster elif parent_type == "nat": mock_album = Mock(spec=NatAlbum) @@ -380,24 +380,24 @@ def create_session_items_with_locations() -> list[dict[str, Any]]: """Create a list of session items with different location types.""" return [ { - "file_path": str(Path("/test/unclustered.mp3")), - "location": {"type": "unclustered"}, + 'file_path': str(Path("/test/unclustered.mp3")), + 'location': {'type': "unclustered"}, }, { - "file_path": str(Path("/test/cluster.mp3")), - "location": {"type": "cluster", "cluster_title": "Album", "cluster_artist": "Artist"}, + 'file_path': str(Path("/test/cluster.mp3")), + 'location': {'type': "cluster", 'cluster_title': "Album", 'cluster_artist': "Artist"}, }, { - "file_path": str(Path("/test/track.mp3")), - "location": {"type": "track", "album_id": "album-123", "recording_id": "recording-456"}, + 'file_path': str(Path("/test/track.mp3")), + 'location': {'type': "track", 'album_id': "album-123", 'recording_id': "recording-456"}, }, { - "file_path": str(Path("/test/unmatched.mp3")), - "location": {"type": "album_unmatched", "album_id": "album-789"}, + 'file_path': str(Path("/test/unmatched.mp3")), + 'location': {'type': "album_unmatched", 'album_id': "album-789"}, }, { - "file_path": str(Path("/test/nat.mp3")), - "location": {"type": "nat", "recording_id": "recording-999"}, + 'file_path': str(Path("/test/nat.mp3")), + 'location': {'type': "nat", 'recording_id': "recording-999"}, }, ] @@ -428,10 +428,10 @@ def patch_get_config(monkeypatch: pytest.MonkeyPatch, **settings) -> Mock: """Patch get_config with specified settings.""" config_mock = Mock() config_mock.setting = { - "rename_files": False, - "move_files": False, - "dont_write_tags": False, - "session_safe_restore": True, + 'rename_files': False, + 'move_files': False, + 'dont_write_tags': False, + 'session_safe_restore': True, **settings, } @@ -447,5 +447,5 @@ def patch_get_config(monkeypatch: pytest.MonkeyPatch, **settings) -> Mock: def patch_qtimer_singleshot(monkeypatch: pytest.MonkeyPatch) -> Mock: """Patch QtCore.QTimer.singleShot for testing.""" mock_single_shot = Mock() - monkeypatch.setattr('PyQt6.QtCore.QTimer.singleShot', mock_single_shot) + monkeypatch.setattr("PyQt6.QtCore.QTimer.singleShot", mock_single_shot) return mock_single_shot diff --git a/test/session/test_data.py b/test/session/test_data.py index 54e94d8a8f..d08ff4a329 100644 --- a/test/session/test_data.py +++ b/test/session/test_data.py @@ -120,7 +120,7 @@ def test_session_item_creation() -> None: file_path = Path("/test/file.mp3") location = SessionItemLocation(type="track", album_id="album-123", recording_id="recording-456") metadata = Metadata() - metadata["title"] = "Test Song" + metadata['title'] = "Test Song" item = SessionItem(file_path=file_path, location=location, metadata=metadata) @@ -156,16 +156,16 @@ def test_session_data_creation() -> None: version=1, options=options, items=[item], - album_track_overrides={"album-123": {"track-456": {"title": ["New Title"]}}}, - album_overrides={"album-123": {"albumartist": ["New Artist"]}}, + album_track_overrides={'album-123': {'track-456': {'title': ["New Title"]}}}, + album_overrides={'album-123': {'albumartist': ["New Artist"]}}, unmatched_albums=["album-789"], ) assert data.version == 1 assert data.options == options assert data.items == [item] - assert data.album_track_overrides == {"album-123": {"track-456": {"title": ["New Title"]}}} - assert data.album_overrides == {"album-123": {"albumartist": ["New Artist"]}} + assert data.album_track_overrides == {'album-123': {'track-456': {'title': ["New Title"]}}} + assert data.album_overrides == {'album-123': {'albumartist': ["New Artist"]}} assert data.unmatched_albums == ["album-789"] @@ -179,7 +179,7 @@ def test_grouped_items_creation() -> None: unclustered = [Path("/test/unclustered.mp3")] by_cluster = {("Album", "Artist"): [Path("/test/cluster.mp3")]} by_album = { - "album-123": AlbumItems( + 'album-123': AlbumItems( unmatched=[Path("/test/unmatched.mp3")], tracks=[(Path("/test/track.mp3"), "recording-456")] ) } @@ -221,7 +221,7 @@ def test_album_items_creation() -> None: def test_track_overrides_creation() -> None: """Test TrackOverrides creation.""" - overrides = {"title": ["New Title"], "artist": ["New Artist"]} + overrides = {'title': ["New Title"], 'artist': ["New Artist"]} track_overrides = TrackOverrides(track_id="recording-123", overrides=overrides) @@ -236,7 +236,7 @@ def test_track_overrides_creation() -> None: def test_album_overrides_creation() -> None: """Test AlbumOverrides creation.""" - overrides = {"albumartist": ["New Artist"], "album": ["New Album"]} + overrides = {'albumartist': ["New Artist"], 'album': ["New Album"]} album_overrides = AlbumOverrides(album_id="album-123", overrides=overrides) diff --git a/test/session/test_location_detector.py b/test/session/test_location_detector.py index e81b8dbe66..4f54b4ee6b 100644 --- a/test/session/test_location_detector.py +++ b/test/session/test_location_detector.py @@ -98,7 +98,7 @@ def test_location_detector_cluster_file(location_detector: LocationDetector, moc mock_cluster = Mock(spec=Cluster) mock_cluster.related_album = mock_album - mock_cluster.metadata = {"album": "Test Album", "albumartist": "Test Artist"} + mock_cluster.metadata = {'album': "Test Album", 'albumartist': "Test Artist"} mock_file.parent_item = mock_cluster location = location_detector.detect(mock_file) @@ -122,7 +122,7 @@ def test_location_detector_regular_cluster(location_detector: LocationDetector, """Test location detection for regular clusters.""" mock_cluster = Mock(spec=Cluster) mock_cluster.related_album = None - mock_cluster.metadata = {"album": "Test Album", "albumartist": "Test Artist"} + mock_cluster.metadata = {'album': "Test Album", 'albumartist': "Test Artist"} mock_file.parent_item = mock_cluster location = location_detector.detect(mock_file) @@ -261,7 +261,7 @@ def test_location_detector_detect_cluster_location_regular_cluster(location_dete """Test _detect_cluster_location with regular cluster.""" mock_cluster = Mock(spec=Cluster) mock_cluster.related_album = None - mock_cluster.metadata = {"album": "Test Album", "albumartist": "Test Artist"} + mock_cluster.metadata = {'album': "Test Album", 'albumartist': "Test Artist"} location = location_detector._detect_cluster_location(mock_cluster) diff --git a/test/session/test_metadata_handler.py b/test/session/test_metadata_handler.py index 8b1ad69725..dd15dbf6d6 100644 --- a/test/session/test_metadata_handler.py +++ b/test/session/test_metadata_handler.py @@ -36,10 +36,10 @@ def mock_file_with_metadata() -> Mock: """Provide a mock file with metadata.""" file_mock = Mock(spec=File) metadata = Metadata() - metadata["title"] = "Test Song" - metadata["artist"] = "Test Artist" - metadata["~internal"] = "internal_value" - metadata["length"] = "123456" + metadata['title'] = "Test Song" + metadata['artist'] = "Test Artist" + metadata['~internal'] = "internal_value" + metadata['length'] = "123456" file_mock.metadata = metadata return file_mock @@ -52,8 +52,8 @@ def test_serialize_metadata_for_file(mock_file_with_metadata: Mock) -> None: assert "artist" in tags assert "~internal" not in tags assert "length" not in tags - assert tags["title"] == ["Test Song"] - assert tags["artist"] == ["Test Artist"] + assert tags['title'] == ["Test Song"] + assert tags['artist'] == ["Test Artist"] def test_serialize_metadata_empty_file() -> None: @@ -80,18 +80,18 @@ def test_serialize_metadata_with_multiple_values() -> None: tags = MetadataHandler.serialize_metadata_for_file(file_mock) - assert tags["genre"] == ["Rock", "Pop"] - assert tags["artist"] == ["Single Artist"] + assert tags['genre'] == ["Rock", "Pop"] + assert tags['artist'] == ["Single Artist"] def test_deserialize_metadata() -> None: """Test metadata deserialization.""" - tags = {"title": ["Test Song"], "artist": ["Test Artist"]} + tags = {'title': ["Test Song"], 'artist': ["Test Artist"]} metadata = MetadataHandler.deserialize_metadata(tags) - assert metadata["title"] == "Test Song" - assert metadata["artist"] == "Test Artist" + assert metadata['title'] == "Test Song" + assert metadata['artist'] == "Test Artist" def test_deserialize_metadata_empty() -> None: @@ -103,12 +103,12 @@ def test_deserialize_metadata_empty() -> None: def test_deserialize_metadata_with_multiple_values() -> None: """Test metadata deserialization with multiple values per tag.""" - tags = {"genre": ["Rock", "Pop"], "artist": ["Artist 1", "Artist 2"]} + tags = {'genre': ["Rock", "Pop"], 'artist': ["Artist 1", "Artist 2"]} metadata = MetadataHandler.deserialize_metadata(tags) - assert metadata["genre"] == "Rock; Pop" - assert metadata["artist"] == "Artist 1; Artist 2" + assert metadata['genre'] == "Rock; Pop" + assert metadata['artist'] == "Artist 1; Artist 2" @pytest.mark.parametrize( @@ -129,7 +129,7 @@ def test_as_list(values: Any, expected: list[Any]) -> None: assert result == expected -@patch('picard.log.log') +@patch("picard.log.log") def test_safe_apply_metadata_success(mock_log: Mock) -> None: """Test successful metadata application.""" file_mock = Mock(spec=File) @@ -139,7 +139,7 @@ def test_safe_apply_metadata_success(mock_log: Mock) -> None: file_mock.orig_metadata.length = 789012 metadata = Metadata() - metadata["title"] = "New Title" + metadata['title'] = "New Title" result = MetadataHandler.safe_apply_metadata(file_mock, metadata) @@ -149,7 +149,7 @@ def test_safe_apply_metadata_success(mock_log: Mock) -> None: assert metadata.length == 123456 -@patch('picard.log.log') +@patch("picard.log.log") def test_safe_apply_metadata_success_with_none_length(mock_log: Mock) -> None: """Test successful metadata application with None length.""" file_mock = Mock(spec=File) @@ -159,7 +159,7 @@ def test_safe_apply_metadata_success_with_none_length(mock_log: Mock) -> None: file_mock.orig_metadata.length = 789012 metadata = Metadata() - metadata["title"] = "New Title" + metadata['title'] = "New Title" result = MetadataHandler.safe_apply_metadata(file_mock, metadata) @@ -169,7 +169,7 @@ def test_safe_apply_metadata_success_with_none_length(mock_log: Mock) -> None: assert metadata.length == 789012 -@patch('picard.session.metadata_handler.log') +@patch("picard.session.metadata_handler.log") def test_safe_apply_metadata_attribute_error(mock_log: Mock) -> None: """Test metadata application with AttributeError.""" file_mock = Mock(spec=File) @@ -189,7 +189,7 @@ def test_safe_apply_metadata_attribute_error(mock_log: Mock) -> None: assert "Test error" in str(mock_log.warning.call_args) -@patch('picard.session.metadata_handler.log') +@patch("picard.session.metadata_handler.log") def test_safe_apply_metadata_key_error(mock_log: Mock) -> None: """Test metadata application with KeyError.""" file_mock = Mock(spec=File) @@ -209,7 +209,7 @@ def test_safe_apply_metadata_key_error(mock_log: Mock) -> None: assert "Test error" in str(mock_log.warning.call_args) -@patch('picard.session.metadata_handler.log') +@patch("picard.session.metadata_handler.log") def test_safe_apply_metadata_unexpected_error(mock_log: Mock) -> None: """Test metadata application with unexpected error.""" file_mock = Mock(spec=File) @@ -229,7 +229,7 @@ def test_safe_apply_metadata_unexpected_error(mock_log: Mock) -> None: assert "File system error" in str(mock_log.error.call_args) -@patch('picard.session.retry_helper.RetryHelper') +@patch("picard.session.retry_helper.RetryHelper") def test_apply_saved_metadata_if_any_file_pending(mock_retry_helper: Mock) -> None: """Test applying saved metadata with file in PENDING state.""" tagger_mock = Mock() @@ -245,7 +245,7 @@ def test_apply_saved_metadata_if_any_file_pending(mock_retry_helper: Mock) -> No mock_retry_helper.retry_until.assert_called_once() -@patch('picard.session.retry_helper.RetryHelper') +@patch("picard.session.retry_helper.RetryHelper") def test_apply_saved_metadata_if_any_file_not_found(mock_retry_helper: Mock) -> None: """Test applying saved metadata when file is not found.""" tagger_mock = Mock() @@ -258,7 +258,7 @@ def test_apply_saved_metadata_if_any_file_not_found(mock_retry_helper: Mock) -> mock_retry_helper.retry_until.assert_called_once() -@patch('picard.session.retry_helper.RetryHelper') +@patch("picard.session.retry_helper.RetryHelper") def test_apply_saved_metadata_if_any_file_ready_success(mock_retry_helper: Mock) -> None: """Test applying saved metadata when file is ready and application succeeds.""" tagger_mock = Mock() @@ -270,14 +270,14 @@ def test_apply_saved_metadata_if_any_file_ready_success(mock_retry_helper: Mock) metadata = Metadata() metadata_map = {Path("/test/file.mp3"): metadata} - with patch.object(MetadataHandler, 'safe_apply_metadata', return_value=True): + with patch.object(MetadataHandler, "safe_apply_metadata", return_value=True): MetadataHandler.apply_saved_metadata_if_any(tagger_mock, metadata_map) # Should not retry if file is ready and metadata applied successfully mock_retry_helper.retry_until.assert_not_called() -@patch('picard.session.retry_helper.RetryHelper') +@patch("picard.session.retry_helper.RetryHelper") def test_apply_saved_metadata_if_any_file_ready_failure(mock_retry_helper: Mock) -> None: """Test applying saved metadata when file is ready but application fails.""" tagger_mock = Mock() @@ -289,14 +289,14 @@ def test_apply_saved_metadata_if_any_file_ready_failure(mock_retry_helper: Mock) metadata = Metadata() metadata_map = {Path("/test/file.mp3"): metadata} - with patch.object(MetadataHandler, 'safe_apply_metadata', return_value=False): + with patch.object(MetadataHandler, "safe_apply_metadata", return_value=False): MetadataHandler.apply_saved_metadata_if_any(tagger_mock, metadata_map) # Should retry if metadata application failed mock_retry_helper.retry_until.assert_called_once() -@patch('picard.session.retry_helper.RetryHelper') +@patch("picard.session.retry_helper.RetryHelper") def test_apply_saved_metadata_if_any_mixed_states(mock_retry_helper: Mock) -> None: """Test applying saved metadata with files in different states.""" tagger_mock = Mock() @@ -330,7 +330,7 @@ def files_getter(path): Path("/test/file3.mp3"): Metadata(), } - with patch.object(MetadataHandler, 'safe_apply_metadata', side_effect=[True, False]): + with patch.object(MetadataHandler, "safe_apply_metadata", side_effect=[True, False]): MetadataHandler.apply_saved_metadata_if_any(tagger_mock, metadata_map) # Should retry for file2 (pending) and file3 (failed) diff --git a/test/session/test_retry_helper.py b/test/session/test_retry_helper.py index 913364565f..6f7e1726e5 100644 --- a/test/session/test_retry_helper.py +++ b/test/session/test_retry_helper.py @@ -27,7 +27,7 @@ import pytest -@patch('PyQt6.QtCore.QTimer.singleShot') +@patch("PyQt6.QtCore.QTimer.singleShot") def test_retry_until_condition_met_immediately(mock_single_shot: Mock) -> None: """Test retry_until when condition is met immediately.""" condition_called = False @@ -49,7 +49,7 @@ def action_fn() -> None: mock_single_shot.assert_not_called() -@patch('PyQt6.QtCore.QTimer.singleShot') +@patch("PyQt6.QtCore.QTimer.singleShot") def test_retry_until_condition_not_met(mock_single_shot: Mock) -> None: """Test retry_until when condition is not met.""" @@ -64,7 +64,7 @@ def action_fn() -> None: mock_single_shot.assert_called_once() -@patch('PyQt6.QtCore.QTimer.singleShot') +@patch("PyQt6.QtCore.QTimer.singleShot") def test_retry_until_with_custom_delay(mock_single_shot: Mock) -> None: """Test retry_until with custom delay.""" @@ -79,7 +79,7 @@ def action_fn() -> None: mock_single_shot.assert_called_once_with(500, mock_single_shot.call_args[0][1]) -@patch('PyQt6.QtCore.QTimer.singleShot') +@patch("PyQt6.QtCore.QTimer.singleShot") def test_retry_until_with_max_attempts(mock_single_shot: Mock) -> None: """Test retry_until with maximum attempts limit.""" attempt_count = 0 @@ -104,7 +104,7 @@ def mock_callback(delay, callback): assert mock_single_shot.call_count == 3 -@patch('PyQt6.QtCore.QTimer.singleShot') +@patch("PyQt6.QtCore.QTimer.singleShot") def test_retry_until_condition_becomes_true_after_retries(mock_single_shot: Mock) -> None: """Test retry_until when condition becomes true after some retries.""" call_count = 0 @@ -130,7 +130,7 @@ def mock_callback(delay, callback): assert mock_single_shot.call_count > 0 -@patch('PyQt6.QtCore.QTimer.singleShot') +@patch("PyQt6.QtCore.QTimer.singleShot") def test_retry_until_file_ready_file_not_ready(mock_single_shot: Mock) -> None: """Test retry_until_file_ready with file not ready.""" file_mock = Mock() @@ -148,7 +148,7 @@ def action_fn() -> None: mock_single_shot.assert_called_once() -@patch('PyQt6.QtCore.QTimer.singleShot') +@patch("PyQt6.QtCore.QTimer.singleShot") def test_retry_until_file_ready_file_ready(mock_single_shot: Mock) -> None: """Test retry_until_file_ready when file is ready.""" file_mock = Mock() @@ -166,7 +166,7 @@ def action_fn() -> None: mock_single_shot.assert_not_called() -@patch('PyQt6.QtCore.QTimer.singleShot') +@patch("PyQt6.QtCore.QTimer.singleShot") def test_retry_until_file_ready_no_file(mock_single_shot: Mock) -> None: """Test retry_until_file_ready when file is None.""" @@ -181,7 +181,7 @@ def action_fn() -> None: mock_single_shot.assert_called_once() -@patch('PyQt6.QtCore.QTimer.singleShot') +@patch("PyQt6.QtCore.QTimer.singleShot") def test_retry_until_file_ready_file_without_state(mock_single_shot: Mock) -> None: """Test retry_until_file_ready when file has no state attribute.""" file_mock = Mock() @@ -200,7 +200,7 @@ def action_fn() -> None: mock_single_shot.assert_called_once() -@patch('PyQt6.QtCore.QTimer.singleShot') +@patch("PyQt6.QtCore.QTimer.singleShot") def test_retry_until_file_ready_with_custom_delay(mock_single_shot: Mock) -> None: """Test retry_until_file_ready with custom delay.""" file_mock = Mock() @@ -218,7 +218,7 @@ def action_fn() -> None: mock_single_shot.assert_called_once_with(300, mock_single_shot.call_args[0][1]) -@patch('PyQt6.QtCore.QTimer.singleShot') +@patch("PyQt6.QtCore.QTimer.singleShot") def test_retry_until_album_ready_album_not_ready(mock_single_shot: Mock) -> None: """Test retry_until_album_ready with album not ready.""" album_mock = Mock() @@ -235,7 +235,7 @@ def action_fn() -> None: mock_single_shot.assert_called_once() -@patch('PyQt6.QtCore.QTimer.singleShot') +@patch("PyQt6.QtCore.QTimer.singleShot") def test_retry_until_album_ready_album_ready(mock_single_shot: Mock) -> None: """Test retry_until_album_ready when album is ready.""" album_mock = Mock() @@ -252,7 +252,7 @@ def action_fn() -> None: mock_single_shot.assert_not_called() -@patch('PyQt6.QtCore.QTimer.singleShot') +@patch("PyQt6.QtCore.QTimer.singleShot") def test_retry_until_album_ready_no_album(mock_single_shot: Mock) -> None: """Test retry_until_album_ready when album is None.""" @@ -267,7 +267,7 @@ def action_fn() -> None: mock_single_shot.assert_called_once() -@patch('PyQt6.QtCore.QTimer.singleShot') +@patch("PyQt6.QtCore.QTimer.singleShot") def test_retry_until_album_ready_album_without_tracks_attribute(mock_single_shot: Mock) -> None: """Test retry_until_album_ready when album has no tracks attribute.""" album_mock = Mock() @@ -285,7 +285,7 @@ def action_fn() -> None: mock_single_shot.assert_called_once() -@patch('PyQt6.QtCore.QTimer.singleShot') +@patch("PyQt6.QtCore.QTimer.singleShot") def test_retry_until_album_ready_with_custom_delay(mock_single_shot: Mock) -> None: """Test retry_until_album_ready with custom delay.""" album_mock = Mock() diff --git a/test/session/test_session_exporter.py b/test/session/test_session_exporter.py index cd9db38e91..fed154c74b 100644 --- a/test/session/test_session_exporter.py +++ b/test/session/test_session_exporter.py @@ -53,17 +53,17 @@ def test_session_exporter_export_session_empty( """Test exporting an empty session.""" data = session_exporter.export_session(mock_tagger) - assert data["version"] == SessionConstants.SESSION_FORMAT_VERSION - assert data["options"] == { - "rename_files": False, - "move_files": False, - "dont_write_tags": True, + assert data['version'] == SessionConstants.SESSION_FORMAT_VERSION + assert data['options'] == { + 'rename_files': False, + 'move_files': False, + 'dont_write_tags': True, } - assert data["items"] == [] - assert data["album_track_overrides"] == {} - assert data["album_overrides"] == {} - assert data["unmatched_albums"] == [] - assert data["expanded_albums"] == [] + assert data['items'] == [] + assert data['album_track_overrides'] == {} + assert data['album_overrides'] == {} + assert data['unmatched_albums'] == [] + assert data['expanded_albums'] == [] def test_session_exporter_export_file_item_saved(session_exporter: SessionExporter, cfg_options) -> None: @@ -78,13 +78,13 @@ def test_session_exporter_export_file_item_saved(session_exporter: SessionExport tagger_mock.iter_all_files.return_value = [file_mock] tagger_mock.albums = {} - with patch.object(session_exporter.location_detector, 'detect') as mock_detect: + with patch.object(session_exporter.location_detector, "detect") as mock_detect: mock_detect.return_value = SessionItemLocation(type="unclustered") data = session_exporter.export_session(tagger_mock) - assert len(data["items"]) == 1 - item = data["items"][0] - assert item["file_path"] == str(Path("/test/file.mp3")) + assert len(data['items']) == 1 + item = data['items'][0] + assert item['file_path'] == str(Path("/test/file.mp3")) assert "metadata" not in item @@ -96,7 +96,7 @@ def test_session_exporter_export_file_item_unsaved(session_exporter: SessionExpo file_mock.is_saved.return_value = False file_mock.parent_item = None file_mock.metadata = Metadata() - file_mock.metadata["title"] = "Test Song" + file_mock.metadata['title'] = "Test Song" # Provide an original metadata baseline so exporter can compute a delta file_mock.orig_metadata = Metadata() @@ -104,15 +104,15 @@ def test_session_exporter_export_file_item_unsaved(session_exporter: SessionExpo tagger_mock.iter_all_files.return_value = [file_mock] tagger_mock.albums = {} - with patch.object(session_exporter.location_detector, 'detect') as mock_detect: + with patch.object(session_exporter.location_detector, "detect") as mock_detect: mock_detect.return_value = SessionItemLocation(type="unclustered") data = session_exporter.export_session(tagger_mock) - assert len(data["items"]) == 1 - item = data["items"][0] - assert item["file_path"] == str(Path("/test/file.mp3")) + assert len(data['items']) == 1 + item = data['items'][0] + assert item['file_path'] == str(Path("/test/file.mp3")) assert "metadata" in item - assert item["metadata"]["tags"]["title"] == ["Test Song"] + assert item['metadata']['tags']['title'] == ["Test Song"] def test_session_exporter_export_ui_state(session_exporter: SessionExporter, cfg_options) -> None: @@ -129,13 +129,13 @@ def test_session_exporter_export_ui_state(session_exporter: SessionExporter, cfg tagger_mock = Mock() tagger_mock.iter_all_files.return_value = [] - tagger_mock.albums = {"album-123": album_mock} + tagger_mock.albums = {'album-123': album_mock} # Mock the diff method to return None (no overrides) - with patch.object(album_mock.metadata, 'diff', return_value=None): + with patch.object(album_mock.metadata, "diff", return_value=None): data = session_exporter.export_session(tagger_mock) - assert data["expanded_albums"] == ["album-123"] + assert data['expanded_albums'] == ["album-123"] def test_session_exporter_export_ui_state_no_ui_item(session_exporter: SessionExporter, cfg_options) -> None: @@ -150,13 +150,13 @@ def test_session_exporter_export_ui_state_no_ui_item(session_exporter: SessionEx tagger_mock = Mock() tagger_mock.iter_all_files.return_value = [] - tagger_mock.albums = {"album-123": album_mock} + tagger_mock.albums = {'album-123': album_mock} # Mock the diff method to return None (no overrides) - with patch.object(album_mock.metadata, 'diff', return_value=None): + with patch.object(album_mock.metadata, "diff", return_value=None): data = session_exporter.export_session(tagger_mock) - assert data["expanded_albums"] == [] + assert data['expanded_albums'] == [] def test_session_exporter_export_metadata_overrides(session_exporter: SessionExporter, cfg_options) -> None: @@ -167,21 +167,21 @@ def test_session_exporter_export_metadata_overrides(session_exporter: SessionExp album_mock.id = "album-123" album_mock.metadata = Metadata() album_mock.orig_metadata = Metadata() - album_mock.metadata["albumartist"] = "New Artist" - album_mock.orig_metadata["albumartist"] = "Old Artist" + album_mock.metadata['albumartist'] = "New Artist" + album_mock.orig_metadata['albumartist'] = "Old Artist" # Create track with overrides track_mock = Mock() track_mock.id = "track-456" track_mock.metadata = Metadata() track_mock.scripted_metadata = Metadata() - track_mock.metadata["title"] = "New Title" - track_mock.scripted_metadata["title"] = "Old Title" + track_mock.metadata['title'] = "New Title" + track_mock.scripted_metadata['title'] = "Old Title" album_mock.tracks = [track_mock] tagger_mock = Mock() tagger_mock.iter_all_files.return_value = [] - tagger_mock.albums = {"album-123": album_mock} + tagger_mock.albums = {'album-123': album_mock} # Mock the diff and rawitems methods diff_mock = Mock() @@ -190,15 +190,15 @@ def test_session_exporter_export_metadata_overrides(session_exporter: SessionExp track_diff_mock.rawitems.return_value = [("title", ["New Title"])] with ( - patch.object(album_mock.metadata, 'diff', return_value=diff_mock), - patch.object(track_mock.metadata, 'diff', return_value=track_diff_mock), + patch.object(album_mock.metadata, "diff", return_value=diff_mock), + patch.object(track_mock.metadata, "diff", return_value=track_diff_mock), ): data = session_exporter.export_session(tagger_mock) - assert "album-123" in data["album_overrides"] - assert data["album_overrides"]["album-123"]["albumartist"] == ["New Artist"] - assert "album-123" in data["album_track_overrides"] - assert data["album_track_overrides"]["album-123"]["track-456"]["title"] == ["New Title"] + assert "album-123" in data['album_overrides'] + assert data['album_overrides']['album-123']['albumartist'] == ["New Artist"] + assert "album-123" in data['album_track_overrides'] + assert data['album_track_overrides']['album-123']['track-456']['title'] == ["New Title"] def test_session_exporter_export_unmatched_albums(session_exporter: SessionExporter, cfg_options) -> None: @@ -213,13 +213,13 @@ def test_session_exporter_export_unmatched_albums(session_exporter: SessionExpor tagger_mock = Mock() tagger_mock.iter_all_files.return_value = [] - tagger_mock.albums = {"album-123": album_mock} + tagger_mock.albums = {'album-123': album_mock} # Mock the diff method to return None (no overrides) - with patch.object(album_mock.metadata, 'diff', return_value=None): + with patch.object(album_mock.metadata, "diff", return_value=None): data = session_exporter.export_session(tagger_mock) - assert data["unmatched_albums"] == ["album-123"] + assert data['unmatched_albums'] == ["album-123"] def test_session_exporter_export_skips_nat_albums(session_exporter: SessionExporter, cfg_options) -> None: @@ -231,13 +231,13 @@ def test_session_exporter_export_skips_nat_albums(session_exporter: SessionExpor tagger_mock = Mock() tagger_mock.iter_all_files.return_value = [] - tagger_mock.albums = {"nat-album-123": nat_album_mock} + tagger_mock.albums = {'nat-album-123': nat_album_mock} data = session_exporter.export_session(tagger_mock) - assert data["album_overrides"] == {} - assert data["album_track_overrides"] == {} - assert data["unmatched_albums"] == [] + assert data['album_overrides'] == {} + assert data['album_track_overrides'] == {} + assert data['unmatched_albums'] == [] def test_session_exporter_export_albums_with_files(session_exporter: SessionExporter, cfg_options) -> None: @@ -260,17 +260,17 @@ def test_session_exporter_export_albums_with_files(session_exporter: SessionExpo tagger_mock = Mock() tagger_mock.iter_all_files.return_value = [file_mock] - tagger_mock.albums = {"album-123": album_mock} + tagger_mock.albums = {'album-123': album_mock} # Mock the diff method to return None (no overrides) with ( - patch.object(album_mock.metadata, 'diff', return_value=None), - patch.object(session_exporter.location_detector, 'detect') as mock_detect, + patch.object(album_mock.metadata, "diff", return_value=None), + patch.object(session_exporter.location_detector, "detect") as mock_detect, ): mock_detect.return_value = SessionItemLocation(type="track", album_id="album-123") data = session_exporter.export_session(tagger_mock) - assert data["unmatched_albums"] == [] + assert data['unmatched_albums'] == [] def test_session_exporter_serialize_location() -> None: @@ -288,9 +288,9 @@ def test_session_exporter_serialize_location() -> None: serialized = exporter._serialize_location(location) assert serialized == { - "type": "track", - "album_id": "album-123", - "recording_id": "recording-456", + 'type': "track", + 'album_id': "album-123", + 'recording_id': "recording-456", } @@ -308,7 +308,7 @@ def test_session_exporter_serialize_location_with_none_values() -> None: serialized = exporter._serialize_location(location) - assert serialized == {"type": "unclustered"} + assert serialized == {'type': "unclustered"} def test_session_exporter_serialize_location_with_cluster_info() -> None: @@ -326,9 +326,9 @@ def test_session_exporter_serialize_location_with_cluster_info() -> None: serialized = exporter._serialize_location(location) assert serialized == { - "type": "cluster", - "cluster_title": "Test Album", - "cluster_artist": "Test Artist", + 'type': "cluster", + 'cluster_title': "Test Album", + 'cluster_artist': "Test Artist", } @@ -338,17 +338,17 @@ def test_session_exporter_export_options() -> None: config_mock = Mock() config_mock.setting = { - "rename_files": True, - "move_files": False, - "dont_write_tags": True, + 'rename_files': True, + 'move_files': False, + 'dont_write_tags': True, } options = exporter._export_options(config_mock) assert options == { - "rename_files": True, - "move_files": False, - "dont_write_tags": True, + 'rename_files': True, + 'move_files': False, + 'dont_write_tags': True, } @@ -358,17 +358,17 @@ def test_session_exporter_export_options_with_falsy_values() -> None: config_mock = Mock() config_mock.setting = { - "rename_files": 0, - "move_files": "", - "dont_write_tags": None, + 'rename_files': 0, + 'move_files': "", + 'dont_write_tags': None, } options = exporter._export_options(config_mock) assert options == { - "rename_files": False, - "move_files": False, - "dont_write_tags": False, + 'rename_files': False, + 'move_files': False, + 'dont_write_tags': False, } @@ -382,23 +382,23 @@ def test_session_exporter_export_metadata_overrides_excludes_length( album_mock.id = "album-123" album_mock.metadata = Metadata() album_mock.orig_metadata = Metadata() - album_mock.metadata["length"] = "300000" - album_mock.orig_metadata["length"] = "250000" + album_mock.metadata['length'] = "300000" + album_mock.orig_metadata['length'] = "250000" album_mock.tracks = [] tagger_mock = Mock() tagger_mock.iter_all_files.return_value = [] - tagger_mock.albums = {"album-123": album_mock} + tagger_mock.albums = {'album-123': album_mock} # Mock the diff method to return length override diff_mock = Mock() diff_mock.rawitems.return_value = [("length", ["300000"])] - with patch.object(album_mock.metadata, 'diff', return_value=diff_mock): + with patch.object(album_mock.metadata, "diff", return_value=diff_mock): data = session_exporter.export_session(tagger_mock) # Length should not be in overrides - assert "album-123" not in data["album_overrides"] or "length" not in data["album_overrides"]["album-123"] + assert "album-123" not in data['album_overrides'] or "length" not in data['album_overrides']['album-123'] def test_session_exporter_export_metadata_overrides_excludes_internal_tags( @@ -411,8 +411,8 @@ def test_session_exporter_export_metadata_overrides_excludes_internal_tags( track_mock.id = "track-456" track_mock.metadata = Metadata() track_mock.scripted_metadata = Metadata() - track_mock.metadata["~internal"] = "new_value" - track_mock.scripted_metadata["~internal"] = "old_value" + track_mock.metadata['~internal'] = "new_value" + track_mock.scripted_metadata['~internal'] = "old_value" album_mock = Mock(spec=Album) album_mock.id = "album-123" @@ -422,7 +422,7 @@ def test_session_exporter_export_metadata_overrides_excludes_internal_tags( tagger_mock = Mock() tagger_mock.iter_all_files.return_value = [] - tagger_mock.albums = {"album-123": album_mock} + tagger_mock.albums = {'album-123': album_mock} # Mock the diff methods track_diff_mock = Mock() @@ -431,13 +431,13 @@ def test_session_exporter_export_metadata_overrides_excludes_internal_tags( album_diff_mock.rawitems.return_value = [] with ( - patch.object(track_mock.metadata, 'diff', return_value=track_diff_mock), - patch.object(album_mock.metadata, 'diff', return_value=album_diff_mock), + patch.object(track_mock.metadata, "diff", return_value=track_diff_mock), + patch.object(album_mock.metadata, "diff", return_value=album_diff_mock), ): data = session_exporter.export_session(tagger_mock) # Internal tag should be in overrides (current implementation includes them) - assert "album-123" in data["album_track_overrides"] - assert "track-456" in data["album_track_overrides"]["album-123"] - assert "~internal" in data["album_track_overrides"]["album-123"]["track-456"] - assert data["album_track_overrides"]["album-123"]["track-456"]["~internal"] == ["new_value"] + assert "album-123" in data['album_track_overrides'] + assert "track-456" in data['album_track_overrides']['album-123'] + assert "~internal" in data['album_track_overrides']['album-123']['track-456'] + assert data['album_track_overrides']['album-123']['track-456']['~internal'] == ["new_value"] diff --git a/test/session/test_session_loader.py b/test/session/test_session_loader.py index a1319a998a..bee9aa55ad 100644 --- a/test/session/test_session_loader.py +++ b/test/session/test_session_loader.py @@ -42,7 +42,7 @@ def session_loader() -> SessionLoader: def test_session_loader_read_session_file(session_loader: SessionLoader, tmp_path: Path) -> None: """Test reading session file.""" - session_data = {"version": 1, "items": []} + session_data = {'version': 1, 'items': []} session_file = tmp_path / "test.mbps" session_file.write_text(json.dumps(session_data), encoding="utf-8") @@ -72,7 +72,7 @@ def test_session_loader_prepare_session(session_loader: SessionLoader, cfg_optio cfg = picard_config.get_config() cfg.setting['session_safe_restore'] = True - data = {"version": 1} + data = {'version': 1} session_loader._prepare_session(data) session_loader.tagger.clear_session.assert_called_once() @@ -85,7 +85,7 @@ def test_session_loader_prepare_session_safe_restore_disabled(session_loader: Se cfg = picard_config.get_config() cfg.setting['session_safe_restore'] = False - data = {"version": 1} + data = {'version': 1} session_loader._prepare_session(data) session_loader.tagger.clear_session.assert_called_once() @@ -100,27 +100,27 @@ def test_session_loader_restore_options(session_loader: SessionLoader, cfg_optio # The cfg_options fixture already sets the default values options = { - "rename_files": True, - "move_files": True, - "dont_write_tags": True, + 'rename_files': True, + 'move_files': True, + 'dont_write_tags': True, } session_loader._restore_options(options) cfg = picard_config.get_config() - assert cfg.setting["rename_files"] is True - assert cfg.setting["move_files"] is True - assert cfg.setting["dont_write_tags"] is True + assert cfg.setting['rename_files'] is True + assert cfg.setting['move_files'] is True + assert cfg.setting['dont_write_tags'] is True -@patch('picard.session.session_loader.get_config') +@patch("picard.session.session_loader.get_config") def test_session_loader_restore_options_with_defaults(session_loader: SessionLoader, mock_get_config) -> None: """Test restoring configuration options with default values.""" config_mock = Mock() config_mock.setting = { - "rename_files": False, - "move_files": False, - "dont_write_tags": False, + 'rename_files': False, + 'move_files': False, + 'dont_write_tags': False, } mock_get_config.return_value = config_mock @@ -129,33 +129,33 @@ def test_session_loader_restore_options_with_defaults(session_loader: SessionLoa session_loader._restore_options(options) - assert config_mock.setting["rename_files"] is False - assert config_mock.setting["move_files"] is False - assert config_mock.setting["dont_write_tags"] is False + assert config_mock.setting['rename_files'] is False + assert config_mock.setting['move_files'] is False + assert config_mock.setting['dont_write_tags'] is False def test_session_loader_group_items_by_location(session_loader: SessionLoader) -> None: """Test grouping items by location type.""" items = [ { - "file_path": "/test/unclustered.mp3", - "location": {"type": "unclustered"}, + 'file_path': "/test/unclustered.mp3", + 'location': {'type': "unclustered"}, }, { - "file_path": "/test/cluster.mp3", - "location": {"type": "cluster", "cluster_title": "Album", "cluster_artist": "Artist"}, + 'file_path': "/test/cluster.mp3", + 'location': {'type': "cluster", 'cluster_title': "Album", 'cluster_artist': "Artist"}, }, { - "file_path": "/test/track.mp3", - "location": {"type": "track", "album_id": "album-123", "recording_id": "recording-456"}, + 'file_path': "/test/track.mp3", + 'location': {'type': "track", 'album_id': "album-123", 'recording_id': "recording-456"}, }, { - "file_path": "/test/unmatched.mp3", - "location": {"type": "album_unmatched", "album_id": "album-789"}, + 'file_path': "/test/unmatched.mp3", + 'location': {'type': "album_unmatched", 'album_id': "album-789"}, }, { - "file_path": "/test/nat.mp3", - "location": {"type": "nat", "recording_id": "recording-999"}, + 'file_path': "/test/nat.mp3", + 'location': {'type': "nat", 'recording_id': "recording-999"}, }, ] @@ -168,10 +168,10 @@ def test_session_loader_group_items_by_location(session_loader: SessionLoader) - assert ("Album", "Artist") in grouped.by_cluster assert "album-123" in grouped.by_album - assert len(grouped.by_album["album-123"].tracks) == 1 + assert len(grouped.by_album['album-123'].tracks) == 1 assert "album-789" in grouped.by_album - assert len(grouped.by_album["album-789"].unmatched) == 1 + assert len(grouped.by_album['album-789'].unmatched) == 1 assert len(grouped.nat_items) == 1 assert grouped.nat_items[0][1] == "recording-999" @@ -181,8 +181,8 @@ def test_session_loader_group_items_by_location_unknown_type(session_loader: Ses """Test grouping items with unknown location type.""" items = [ { - "file_path": "/test/unknown.mp3", - "location": {"type": "unknown_type"}, + 'file_path': "/test/unknown.mp3", + 'location': {'type': "unknown_type"}, }, ] @@ -197,7 +197,7 @@ def test_session_loader_group_items_by_location_missing_location(session_loader: """Test grouping items with missing location.""" items = [ { - "file_path": "/test/no_location.mp3", + 'file_path': "/test/no_location.mp3", }, ] @@ -212,16 +212,16 @@ def test_session_loader_extract_metadata(session_loader: SessionLoader) -> None: """Test extracting metadata from session items.""" items = [ { - "file_path": "/test/file1.mp3", - "metadata": {"tags": {"title": ["Song 1"], "artist": ["Artist 1"]}}, + 'file_path': "/test/file1.mp3", + 'metadata': {'tags': {'title': ["Song 1"], 'artist': ["Artist 1"]}}, }, { - "file_path": "/test/file2.mp3", + 'file_path': "/test/file2.mp3", # No metadata }, { - "file_path": "/test/file3.mp3", - "metadata": {"tags": {"title": ["Song 3"]}}, + 'file_path': "/test/file3.mp3", + 'metadata': {'tags': {'title': ["Song 3"]}}, }, ] @@ -230,7 +230,7 @@ def test_session_loader_extract_metadata(session_loader: SessionLoader) -> None: assert len(metadata_map) == 2 assert Path("/test/file1.mp3") in metadata_map assert Path("/test/file3.mp3") in metadata_map - assert metadata_map[Path("/test/file1.mp3")]["title"] == ["Song 1"] + assert metadata_map[Path("/test/file1.mp3")]['title'] == ["Song 1"] def test_session_loader_extract_metadata_empty_items(session_loader: SessionLoader) -> None: @@ -243,8 +243,8 @@ def test_session_loader_extract_metadata_empty_items(session_loader: SessionLoad def test_session_loader_extract_metadata_no_metadata(session_loader: SessionLoader) -> None: """Test extracting metadata when no items have metadata.""" items = [ - {"file_path": "/test/file1.mp3"}, - {"file_path": "/test/file2.mp3"}, + {'file_path': "/test/file1.mp3"}, + {'file_path': "/test/file2.mp3"}, ] metadata_map = session_loader._extract_metadata(items) @@ -262,8 +262,8 @@ def test_session_loader_load_unmatched_albums(session_loader: SessionLoader) -> session_loader._load_unmatched_albums(unmatched_album_ids) - assert session_loader.loaded_albums["album-123"] == album_mock1 - assert session_loader.loaded_albums["album-456"] == album_mock2 + assert session_loader.loaded_albums['album-123'] == album_mock1 + assert session_loader.loaded_albums['album-456'] == album_mock2 assert session_loader.tagger.load_album.call_count == 2 @@ -280,7 +280,7 @@ def test_session_loader_load_albums(session_loader: SessionLoader) -> None: grouped_items = GroupedItems( unclustered=[], by_cluster={}, - by_album={"album-123": AlbumItems(unmatched=[], tracks=[]), "album-456": AlbumItems(unmatched=[], tracks=[])}, + by_album={'album-123': AlbumItems(unmatched=[], tracks=[]), 'album-456': AlbumItems(unmatched=[], tracks=[])}, nat_items=[], ) @@ -299,8 +299,8 @@ def load_album_side_effect(album_id): session_loader._load_albums(grouped_items) - assert session_loader.loaded_albums["album-123"] == album_mock1 - assert session_loader.loaded_albums["album-456"] == album_mock2 + assert session_loader.loaded_albums['album-123'] == album_mock1 + assert session_loader.loaded_albums['album-456'] == album_mock2 def test_session_loader_load_albums_no_albums(session_loader: SessionLoader) -> None: @@ -322,16 +322,16 @@ def test_session_loader_load_album_files(session_loader: SessionLoader) -> None: """Test loading files into albums.""" album_mock = Mock(spec=Album) album_mock.unmatched_files = Mock() - session_loader.loaded_albums = {"album-123": album_mock} + session_loader.loaded_albums = {'album-123': album_mock} by_album = { - "album-123": AlbumItems( + 'album-123': AlbumItems( unmatched=[Path("/test/unmatched.mp3")], tracks=[(Path("/test/track.mp3"), "recording-456")], ) } - with patch.object(session_loader.track_mover, 'move_files_to_tracks') as mock_move: + with patch.object(session_loader.track_mover, "move_files_to_tracks") as mock_move: session_loader._load_album_files(by_album) session_loader.tagger.add_files.assert_called_once() @@ -341,9 +341,9 @@ def test_session_loader_load_album_files(session_loader: SessionLoader) -> None: def test_session_loader_load_album_files_no_files(session_loader: SessionLoader) -> None: """Test loading album files when no files are present.""" album_mock = Mock(spec=Album) - session_loader.loaded_albums = {"album-123": album_mock} + session_loader.loaded_albums = {'album-123': album_mock} - by_album = {"album-123": AlbumItems(unmatched=[], tracks=[])} + by_album = {'album-123': AlbumItems(unmatched=[], tracks=[])} session_loader._load_album_files(by_album) @@ -358,7 +358,7 @@ def test_session_loader_apply_track_overrides(session_loader: SessionLoader) -> track_mock.metadata = {} # Add metadata dict album_mock.tracks = [track_mock] - overrides = {"track-123": {"title": ["New Title"], "artist": ["New Artist"]}} + overrides = {'track-123': {'title': ["New Title"], 'artist': ["New Artist"]}} # Mock run_when_loaded to call callback immediately def run_callback(callback): @@ -368,8 +368,8 @@ def run_callback(callback): session_loader._apply_track_overrides(album_mock, overrides) - assert track_mock.metadata["title"] == ["New Title"] - assert track_mock.metadata["artist"] == ["New Artist"] + assert track_mock.metadata['title'] == ["New Title"] + assert track_mock.metadata['artist'] == ["New Artist"] track_mock.update.assert_called_once() @@ -380,7 +380,7 @@ def test_session_loader_apply_track_overrides_track_not_found(session_loader: Se track_mock.id = "track-123" album_mock.tracks = [track_mock] - overrides = {"track-999": {"title": ["New Title"]}} # Non-existent track + overrides = {'track-999': {'title': ["New Title"]}} # Non-existent track # Mock run_when_loaded to call callback immediately def run_callback(callback): @@ -399,7 +399,7 @@ def test_session_loader_apply_album_overrides(session_loader: SessionLoader) -> album_mock = Mock(spec=Album) album_mock.metadata = {} # Add metadata dict - overrides = {"albumartist": ["New Artist"], "album": ["New Album"]} + overrides = {'albumartist': ["New Artist"], 'album': ["New Album"]} # Mock run_when_loaded to call callback immediately def run_callback(callback): @@ -409,8 +409,8 @@ def run_callback(callback): session_loader._apply_album_overrides(album_mock, overrides) - assert album_mock.metadata["albumartist"] == ["New Artist"] - assert album_mock.metadata["album"] == ["New Album"] + assert album_mock.metadata['albumartist'] == ["New Artist"] + assert album_mock.metadata['album'] == ["New Album"] album_mock.update.assert_called_once_with(update_tracks=False) @@ -418,7 +418,7 @@ def test_session_loader_schedule_metadata_application(session_loader: SessionLoa """Test scheduling metadata application.""" metadata_map = {Path("/test/file.mp3"): Metadata()} - with patch('PyQt6.QtCore.QTimer.singleShot', mock_single_shot): + with patch("PyQt6.QtCore.QTimer.singleShot", mock_single_shot): session_loader._schedule_metadata_application(metadata_map) mock_single_shot.assert_called_once() @@ -428,7 +428,7 @@ def test_session_loader_schedule_metadata_application_empty_map( session_loader: SessionLoader, mock_single_shot ) -> None: """Test scheduling metadata application with empty map.""" - with patch('PyQt6.QtCore.QTimer.singleShot', mock_single_shot): + with patch("PyQt6.QtCore.QTimer.singleShot", mock_single_shot): session_loader._schedule_metadata_application({}) mock_single_shot.assert_called_once() @@ -459,7 +459,7 @@ def test_session_loader_unset_restoring_flag_when_idle_pending_files( session_loader.tagger._pending_files_count = 1 session_loader.tagger.webservice.num_pending_web_requests = 0 - with patch('PyQt6.QtCore.QTimer.singleShot', mock_single_shot): + with patch("PyQt6.QtCore.QTimer.singleShot", mock_single_shot): session_loader._unset_restoring_flag_when_idle() # Should schedule another check @@ -477,7 +477,7 @@ def test_session_loader_unset_restoring_flag_when_idle_pending_requests( session_loader.tagger._pending_files_count = 0 session_loader.tagger.webservice.num_pending_web_requests = 1 - with patch('PyQt6.QtCore.QTimer.singleShot', mock_single_shot): + with patch("PyQt6.QtCore.QTimer.singleShot", mock_single_shot): session_loader._unset_restoring_flag_when_idle() # Should schedule another check @@ -501,7 +501,7 @@ def test_session_loader_unset_restoring_flag_when_idle_all_done(session_loader: def test_session_loader_finalize_loading(session_loader: SessionLoader, mock_single_shot) -> None: """Test finalizing the loading process.""" - with patch('PyQt6.QtCore.QTimer.singleShot', mock_single_shot): + with patch("PyQt6.QtCore.QTimer.singleShot", mock_single_shot): session_loader.finalize_loading() mock_single_shot.assert_called_once() diff --git a/test/session/test_session_manager.py b/test/session/test_session_manager.py index edb35e09a2..4b29a1edfa 100644 --- a/test/session/test_session_manager.py +++ b/test/session/test_session_manager.py @@ -27,25 +27,25 @@ from picard.session.session_manager import export_session, load_session_from_path, save_session_to_path -@patch('picard.session.session_manager.SessionExporter') +@patch("picard.session.session_manager.SessionExporter") def test_export_session_function(mock_exporter_class: Mock) -> None: """Test the export_session function.""" mock_exporter = Mock() mock_exporter_class.return_value = mock_exporter - mock_exporter.export_session.return_value = {"version": 1} + mock_exporter.export_session.return_value = {'version': 1} tagger_mock = Mock() result = export_session(tagger_mock) mock_exporter_class.assert_called_once() mock_exporter.export_session.assert_called_once_with(tagger_mock) - assert result == {"version": 1} + assert result == {'version': 1} -@patch('picard.session.session_manager.export_session') +@patch("picard.session.session_manager.export_session") def test_save_session_to_path(mock_export_session: Mock, tmp_path: Path) -> None: """Test saving session to path.""" - mock_export_session.return_value = {"version": 1, "items": []} + mock_export_session.return_value = {'version': 1, 'items': []} tagger_mock = Mock() session_file = tmp_path / "test" @@ -58,10 +58,10 @@ def test_save_session_to_path(mock_export_session: Mock, tmp_path: Path) -> None mock_export_session.assert_called_once_with(tagger_mock) -@patch('picard.session.session_manager.export_session') +@patch("picard.session.session_manager.export_session") def test_save_session_to_path_with_extension(mock_export_session: Mock, tmp_path: Path) -> None: """Test saving session to path with existing extension.""" - mock_export_session.return_value = {"version": 1, "items": []} + mock_export_session.return_value = {'version': 1, 'items': []} tagger_mock = Mock() session_file = tmp_path / "test.mbps.gz" @@ -72,10 +72,10 @@ def test_save_session_to_path_with_extension(mock_export_session: Mock, tmp_path mock_export_session.assert_called_once_with(tagger_mock) -@patch('picard.session.session_manager.export_session') +@patch("picard.session.session_manager.export_session") def test_save_session_to_path_with_different_extension(mock_export_session: Mock, tmp_path: Path) -> None: """Test saving session to path with different extension.""" - mock_export_session.return_value = {"version": 1, "items": []} + mock_export_session.return_value = {'version': 1, 'items': []} tagger_mock = Mock() session_file = tmp_path / "test.json" @@ -88,10 +88,10 @@ def test_save_session_to_path_with_different_extension(mock_export_session: Mock mock_export_session.assert_called_once_with(tagger_mock) -@patch('picard.session.session_manager.export_session') +@patch("picard.session.session_manager.export_session") def test_save_session_to_path_string_path(mock_export_session: Mock, tmp_path: Path) -> None: """Test saving session to string path.""" - mock_export_session.return_value = {"version": 1, "items": []} + mock_export_session.return_value = {'version': 1, 'items': []} tagger_mock = Mock() session_file = tmp_path / "test" @@ -102,13 +102,13 @@ def test_save_session_to_path_string_path(mock_export_session: Mock, tmp_path: P mock_export_session.assert_called_once_with(tagger_mock) -@patch('picard.session.session_manager.export_session') +@patch("picard.session.session_manager.export_session") def test_save_session_to_path_creates_json_content(mock_export_session: Mock, tmp_path: Path) -> None: """Test that saved session file contains proper JSON content.""" session_data = { - "version": 1, - "options": {"rename_files": True}, - "items": [{"file_path": "/test/file.mp3"}], + 'version': 1, + 'options': {'rename_files': True}, + 'items': [{'file_path': "/test/file.mp3"}], } mock_export_session.return_value = session_data @@ -126,12 +126,12 @@ def test_save_session_to_path_creates_json_content(mock_export_session: Mock, tm content = gzip.decompress(saved_file.read_bytes()).decode("utf-8") data = json.loads(content) - assert data["version"] == 1 - assert data["options"]["rename_files"] is True - assert data["items"][0]["file_path"] == "/test/file.mp3" + assert data['version'] == 1 + assert data['options']['rename_files'] is True + assert data['items'][0]['file_path'] == "/test/file.mp3" -@patch('picard.session.session_manager.SessionLoader') +@patch("picard.session.session_manager.SessionLoader") def test_load_session_from_path(mock_loader_class: Mock) -> None: """Test loading session from path.""" mock_loader = Mock() @@ -147,7 +147,7 @@ def test_load_session_from_path(mock_loader_class: Mock) -> None: mock_loader.finalize_loading.assert_called_once() -@patch('picard.session.session_manager.SessionLoader') +@patch("picard.session.session_manager.SessionLoader") def test_load_session_from_path_string_path(mock_loader_class: Mock) -> None: """Test loading session from string path.""" mock_loader = Mock() @@ -163,13 +163,13 @@ def test_load_session_from_path_string_path(mock_loader_class: Mock) -> None: mock_loader.finalize_loading.assert_called_once() -@patch('picard.session.session_manager.export_session') +@patch("picard.session.session_manager.export_session") def test_save_session_to_path_file_overwrite(mock_export_session: Mock, tmp_path: Path) -> None: """Test that save_session_to_path overwrites existing files.""" existing_file = tmp_path / "test.mbps.gz" existing_file.write_text("old content", encoding="utf-8") - mock_export_session.return_value = {"version": 1, "items": []} + mock_export_session.return_value = {'version': 1, 'items': []} tagger_mock = Mock() save_session_to_path(tagger_mock, existing_file) @@ -180,13 +180,13 @@ def test_save_session_to_path_file_overwrite(mock_export_session: Mock, tmp_path content = gzip.decompress(existing_file.read_bytes()).decode("utf-8") data = json.loads(content) - assert data["version"] == 1 + assert data['version'] == 1 def test_save_session_to_path_creates_directory(tmp_path: Path) -> None: """Test that save_session_to_path creates parent directories.""" - with patch('picard.session.session_manager.export_session') as mock_export: - mock_export.return_value = {"version": 1, "items": []} + with patch("picard.session.session_manager.export_session") as mock_export: + mock_export.return_value = {'version': 1, 'items': []} tagger_mock = Mock() session_file = tmp_path / "subdir" / "test.mbps.gz" @@ -199,11 +199,11 @@ def test_save_session_to_path_creates_directory(tmp_path: Path) -> None: def test_save_session_to_path_utf8_encoding(tmp_path: Path) -> None: """Test that save_session_to_path uses UTF-8 encoding.""" - with patch('picard.session.session_manager.export_session') as mock_export: + with patch("picard.session.session_manager.export_session") as mock_export: # Session data with Unicode characters session_data = { - "version": 1, - "items": [{"file_path": "/test/歌曲.mp3"}], + 'version': 1, + 'items': [{'file_path': "/test/歌曲.mp3"}], } mock_export.return_value = session_data @@ -221,11 +221,11 @@ def test_save_session_to_path_utf8_encoding(tmp_path: Path) -> None: def test_save_session_to_path_json_formatting(tmp_path: Path) -> None: """Test that save_session_to_path uses proper JSON formatting.""" - with patch('picard.session.session_manager.export_session') as mock_export: + with patch("picard.session.session_manager.export_session") as mock_export: session_data = { - "version": 1, - "options": {"rename_files": True, "move_files": False}, - "items": [], + 'version': 1, + 'options': {'rename_files': True, 'move_files': False}, + 'items': [], } mock_export.return_value = session_data @@ -247,10 +247,10 @@ def test_save_session_to_path_json_formatting(tmp_path: Path) -> None: def test_export_session_returns_dict() -> None: """Test that export_session returns a dictionary.""" - with patch('picard.session.session_manager.SessionExporter') as mock_exporter_class: + with patch("picard.session.session_manager.SessionExporter") as mock_exporter_class: mock_exporter = Mock() mock_exporter_class.return_value = mock_exporter - mock_exporter.export_session.return_value = {"version": 1, "items": []} + mock_exporter.export_session.return_value = {'version': 1, 'items': []} tagger_mock = Mock() result = export_session(tagger_mock) @@ -262,7 +262,7 @@ def test_export_session_returns_dict() -> None: def test_load_session_from_path_loader_initialization() -> None: """Test that SessionLoader is properly initialized.""" - with patch('picard.session.session_manager.SessionLoader') as mock_loader_class: + with patch("picard.session.session_manager.SessionLoader") as mock_loader_class: mock_loader = Mock() mock_loader_class.return_value = mock_loader @@ -277,7 +277,7 @@ def test_load_session_from_path_loader_initialization() -> None: def test_load_session_from_path_loader_methods_called() -> None: """Test that all required SessionLoader methods are called.""" - with patch('picard.session.session_manager.SessionLoader') as mock_loader_class: + with patch("picard.session.session_manager.SessionLoader") as mock_loader_class: mock_loader = Mock() mock_loader_class.return_value = mock_loader @@ -293,8 +293,8 @@ def test_load_session_from_path_loader_methods_called() -> None: def test_save_session_to_path_extension_handling(tmp_path: Path) -> None: """Test various extension handling scenarios.""" - with patch('picard.session.session_manager.export_session') as mock_export: - mock_export.return_value = {"version": 1} + with patch("picard.session.session_manager.export_session") as mock_export: + mock_export.return_value = {'version': 1} tagger_mock = Mock() @@ -326,8 +326,8 @@ def test_session_constants_used_correctly(tmp_path: Path) -> None: assert SessionConstants.SESSION_FORMAT_VERSION == 1 # Test that the extension is used in save function - with patch('picard.session.session_manager.export_session') as mock_export: - mock_export.return_value = {"version": SessionConstants.SESSION_FORMAT_VERSION} + with patch("picard.session.session_manager.export_session") as mock_export: + mock_export.return_value = {'version': SessionConstants.SESSION_FORMAT_VERSION} tagger_mock = Mock() session_file = tmp_path / "session" diff --git a/test/session/test_sessions.py b/test/session/test_sessions.py index 21200ace85..488bc11e74 100644 --- a/test/session/test_sessions.py +++ b/test/session/test_sessions.py @@ -35,19 +35,19 @@ def test_export_session_empty(tmp_path: Path, cfg_options) -> None: data = export_session(_StubTagger(files=[], albums={})) assert isinstance(data, dict) assert data['version'] == 1 - assert set(data['options'].keys()) == {'rename_files', 'move_files', 'dont_write_tags'} + assert set(data['options'].keys()) == {"rename_files", "move_files", "dont_write_tags"} assert data['options']['dont_write_tags'] is True assert data['items'] == [] -@pytest.mark.parametrize('saved', [True, False]) +@pytest.mark.parametrize("saved", [True, False]) def test_export_session_includes_items_and_metadata_tags(cfg_options: None, tmp_path: Path, saved: bool) -> None: m = Metadata() - m['title'] = 'Song' - m['artist'] = 'Artist' - m['~internal'] = 'x' - m['length'] = '123456' - f = _StubFile(filename=str(tmp_path / 'a.flac'), metadata=m, saved=saved, parent_item=None) + m['title'] = "Song" + m['artist'] = "Artist" + m['~internal'] = "x" + m['length'] = "123456" + f = _StubFile(filename=str(tmp_path / "a.flac"), metadata=m, saved=saved, parent_item=None) # Provide baseline so deltas can be computed f.orig_metadata = Metadata() tagger = _StubTagger(files=[f]) @@ -56,19 +56,19 @@ def test_export_session_includes_items_and_metadata_tags(cfg_options: None, tmp_ assert isinstance(data['items'], list) and len(data['items']) == 1 item = data['items'][0] - assert Path(item['file_path']).name == 'a.flac' + assert Path(item['file_path']).name == "a.flac" loc = item['location'] - assert loc['type'] == 'unclustered' - assert 'album_id' not in loc and 'recording_id' not in loc + assert loc['type'] == "unclustered" + assert "album_id" not in loc and "recording_id" not in loc if saved: - assert 'metadata' not in item + assert "metadata" not in item else: # Only user-visible tags; internal and length excluded; values are lists tags = item['metadata']['tags'] - assert set(tags.keys()) == {'title', 'artist'} - assert isinstance(tags['title'], list) and tags['title'] == ['Song'] + assert set(tags.keys()) == {"title", "artist"} + assert isinstance(tags['title'], list) and tags['title'] == ["Song"] def test_export_session_options_reflect_config_flags(cfg_options: None) -> None: @@ -89,53 +89,53 @@ def test_export_session_options_reflect_config_flags(cfg_options: None) -> None: def test_export_session_captures_album_and_track_overrides(cfg_options: None, tmp_path: Path) -> None: # File present to ensure items list not empty, but focus is on overrides capture fm = Metadata() - fm['title'] = 'Song' - f = _StubFile(filename=str(tmp_path / 'b.mp3'), metadata=fm, saved=True, parent_item=None) + fm['title'] = "Song" + f = _StubFile(filename=str(tmp_path / "b.mp3"), metadata=fm, saved=True, parent_item=None) # Album-level override (albumartist changed) album_orig = Metadata() - album_orig['albumartist'] = 'Orig Artist' + album_orig['albumartist'] = "Orig Artist" album_cur = Metadata() - album_cur['albumartist'] = 'New Artist' + album_cur['albumartist'] = "New Artist" # Track-level override vs scripted_metadata; exclude length scripted = Metadata() - scripted['title'] = 'Old Title' - scripted['length'] = '1000' + scripted['title'] = "Old Title" + scripted['length'] = "1000" track_cur = Metadata() - track_cur['title'] = 'New Title' - track_cur['length'] = '2000' # must be excluded + track_cur['title'] = "New Title" + track_cur['length'] = "2000" # must be excluded - tr = _StubTrack('track-1', scripted=scripted, current=track_cur) - alb = _StubAlbum('album-1', orig=album_orig, current=album_cur, tracks=[tr]) + tr = _StubTrack("track-1", scripted=scripted, current=track_cur) + alb = _StubAlbum("album-1", orig=album_orig, current=album_cur, tracks=[tr]) tagger = _StubTagger(files=[f], albums={'album-1': alb}) data = export_session(tagger) # Track-level overrides captured and listified atr = data['album_track_overrides'] - assert 'album-1' in atr and 'track-1' in atr['album-1'] - assert atr['album-1']['track-1'] == {'title': ['New Title']} + assert "album-1" in atr and "track-1" in atr['album-1'] + assert atr['album-1']['track-1'] == {'title': ["New Title"]} # Album-level overrides captured and listified aor = data['album_overrides'] - assert aor == {'album-1': {'albumartist': ['New Artist']}} + assert aor == {'album-1': {'albumartist': ["New Artist"]}} @pytest.mark.parametrize( ("value", "expected"), [ - ('Rock', ['Rock']), - (['Rock', 'Pop'], ['Rock', 'Pop']), + ("Rock", ["Rock"]), + (["Rock", "Pop"], ["Rock", "Pop"]), ], ) def test_export_session_listifies_override_values(cfg_options: None, value: Any, expected: list[str]) -> None: # Construct album with scalar/list diffs album_orig = Metadata() - album_orig['genre'] = '' + album_orig['genre'] = "" album_cur = Metadata() album_cur['genre'] = value - alb = _StubAlbum('album-X', orig=album_orig, current=album_cur, tracks=[]) + alb = _StubAlbum("album-X", orig=album_orig, current=album_cur, tracks=[]) tagger = _StubTagger(files=[], albums={'album-X': alb}) data = export_session(tagger) @@ -147,7 +147,7 @@ def test_export_session_includes_unmatched_albums(cfg_options: None) -> None: # Create an album with no files matched to it album_orig = Metadata() album_cur = Metadata() - alb = _StubAlbum('album-unmatched', orig=album_orig, current=album_cur, tracks=[]) + alb = _StubAlbum("album-unmatched", orig=album_orig, current=album_cur, tracks=[]) # Tagger with no files but has the album loaded tagger = _StubTagger(files=[], albums={'album-unmatched': alb}) @@ -155,8 +155,8 @@ def test_export_session_includes_unmatched_albums(cfg_options: None) -> None: data = export_session(tagger) # Should include the unmatched album - assert 'unmatched_albums' in data - assert data['unmatched_albums'] == ['album-unmatched'] + assert "unmatched_albums" in data + assert data['unmatched_albums'] == ["album-unmatched"] def test_export_session_excludes_albums_with_files_from_unmatched(cfg_options: None, tmp_path: Path) -> None: @@ -170,13 +170,13 @@ def __init__(self, album_id: str) -> None: # Create an album album_orig = Metadata() album_cur = Metadata() - alb = _StubAlbum('album-with-files', orig=album_orig, current=album_cur, tracks=[]) + alb = _StubAlbum("album-with-files", orig=album_orig, current=album_cur, tracks=[]) # Create a file that's matched to the album fm = Metadata() - fm['title'] = 'Song' - parent_item = _StubParentItem('album-with-files') - f = _StubFile(filename=str(tmp_path / 'song.mp3'), metadata=fm, saved=True, parent_item=parent_item) + fm['title'] = "Song" + parent_item = _StubParentItem("album-with-files") + f = _StubFile(filename=str(tmp_path / "song.mp3"), metadata=fm, saved=True, parent_item=parent_item) # Tagger with the file and album tagger = _StubTagger(files=[f], albums={'album-with-files': alb}) @@ -184,5 +184,5 @@ def __init__(self, album_id: str) -> None: data = export_session(tagger) # Should not include the album in unmatched_albums since it has files - assert 'unmatched_albums' in data + assert "unmatched_albums" in data assert data['unmatched_albums'] == [] diff --git a/test/session/test_track_mover.py b/test/session/test_track_mover.py index 7ef3038ac7..58b1a76293 100644 --- a/test/session/test_track_mover.py +++ b/test/session/test_track_mover.py @@ -51,7 +51,7 @@ def test_track_mover_move_files_to_tracks(track_mover: TrackMover, mock_album: M """Test moving files to tracks.""" track_specs = [(Path("/test/file1.mp3"), "recording-123"), (Path("/test/file2.mp3"), "recording-456")] - with patch('picard.session.track_mover.RetryHelper'): + with patch("picard.session.track_mover.RetryHelper"): track_mover.move_files_to_tracks(mock_album, track_specs) mock_album.run_when_loaded.assert_called_once() @@ -68,7 +68,7 @@ def run_callback(callback): mock_album.run_when_loaded.side_effect = run_callback - with patch('picard.session.track_mover.RetryHelper') as mock_retry_helper: + with patch("picard.session.track_mover.RetryHelper") as mock_retry_helper: track_mover.move_files_to_tracks(mock_album, [(fpath, recording_id)]) mock_retry_helper.retry_until.assert_called_once() @@ -79,7 +79,7 @@ def test_track_mover_move_file_to_nat(track_mover: TrackMover) -> None: fpath = Path("/test/file.mp3") recording_id = "recording-123" - with patch('picard.session.track_mover.RetryHelper') as mock_retry_helper: + with patch("picard.session.track_mover.RetryHelper") as mock_retry_helper: track_mover.move_file_to_nat(fpath, recording_id) mock_retry_helper.retry_until.assert_called_once() @@ -101,7 +101,7 @@ def run_callback(callback): mock_album.run_when_loaded.side_effect = run_callback - with patch('picard.session.track_mover.RetryHelper'): + with patch("picard.session.track_mover.RetryHelper"): track_mover.move_files_to_tracks(mock_album, [(fpath, recording_id)]) # Should not attempt move when file is pending @@ -122,7 +122,7 @@ def run_callback(callback): mock_album.run_when_loaded.side_effect = run_callback - with patch('picard.session.track_mover.RetryHelper') as mock_retry_helper: + with patch("picard.session.track_mover.RetryHelper") as mock_retry_helper: track_mover.move_files_to_tracks(mock_album, [(fpath, recording_id)]) # Should not attempt move when file is not found @@ -148,7 +148,7 @@ def run_callback(callback): mock_album.run_when_loaded.side_effect = run_callback - with patch('picard.session.track_mover.RetryHelper'): + with patch("picard.session.track_mover.RetryHelper"): track_mover.move_files_to_tracks(mock_album, [(fpath, recording_id)]) # Should not attempt move when track is not found @@ -176,7 +176,7 @@ def run_callback(callback): mock_album.run_when_loaded.side_effect = run_callback - with patch('picard.session.track_mover.RetryHelper') as mock_retry_helper: + with patch("picard.session.track_mover.RetryHelper") as mock_retry_helper: # Mock retry_until to call the action function immediately if condition is met def mock_retry_until(condition_fn, action_fn, delay_ms): if condition_fn(): @@ -200,7 +200,7 @@ def test_track_mover_move_file_to_nat_file_pending(track_mover: TrackMover) -> N file_mock.state = File.PENDING track_mover.tagger.files.get.return_value = file_mock - with patch('picard.session.track_mover.RetryHelper'): + with patch("picard.session.track_mover.RetryHelper"): track_mover.move_file_to_nat(fpath, recording_id) # Should not attempt NAT move when file is pending @@ -215,7 +215,7 @@ def test_track_mover_move_file_to_nat_file_not_found(track_mover: TrackMover) -> # Mock file not found track_mover.tagger.files.get.return_value = None - with patch('picard.session.track_mover.RetryHelper'): + with patch("picard.session.track_mover.RetryHelper"): track_mover.move_file_to_nat(fpath, recording_id) # Should not attempt NAT move when file is not found @@ -232,7 +232,7 @@ def test_track_mover_move_file_to_nat_success(track_mover: TrackMover) -> None: file_mock.state = 1 # Not PENDING (PENDING = 0) track_mover.tagger.files.get.return_value = file_mock - with patch('picard.session.track_mover.RetryHelper') as mock_retry_helper: + with patch("picard.session.track_mover.RetryHelper") as mock_retry_helper: # Mock retry_until to call the action function immediately if condition is met def mock_retry_until(condition_fn, action_fn, delay_ms): if condition_fn(): @@ -267,7 +267,7 @@ def run_callback(callback): mock_album.run_when_loaded.side_effect = run_callback - with patch('picard.session.track_mover.RetryHelper') as mock_retry_helper: + with patch("picard.session.track_mover.RetryHelper") as mock_retry_helper: track_mover.move_files_to_tracks(mock_album, track_specs) # Should schedule moves for all files @@ -303,7 +303,7 @@ def run_callback(callback): mock_album.run_when_loaded.side_effect = run_callback - with patch('picard.session.track_mover.RetryHelper') as mock_retry_helper: + with patch("picard.session.track_mover.RetryHelper") as mock_retry_helper: track_mover.move_files_to_tracks(mock_album, [(fpath, recording_id)]) # Verify retry_until was called with correct parameters @@ -332,7 +332,7 @@ def test_track_mover_retry_until_condition_check_nat(track_mover: TrackMover) -> file_mock.state = 1 # Not PENDING (PENDING = 0) track_mover.tagger.files.get.return_value = file_mock - with patch('picard.session.track_mover.RetryHelper') as mock_retry_helper: + with patch("picard.session.track_mover.RetryHelper") as mock_retry_helper: track_mover.move_file_to_nat(fpath, recording_id) # Verify retry_until was called with correct parameters From b8c0b42bf8fd66eef8b1360bad3afec0f0d85bda Mon Sep 17 00:00:00 2001 From: Khoa Nguyen Date: Mon, 8 Sep 2025 21:23:37 -0400 Subject: [PATCH 07/30] Zas code review 20250908; own close session confirm box --- picard/const/defaults.py | 10 +++ picard/options.py | 10 +-- picard/session/constants.py | 94 +++++++++++++++++++++++--- picard/session/metadata_handler.py | 11 ++- picard/session/session_exporter.py | 12 ++-- picard/session/session_loader.py | 3 +- picard/ui/mainwindow/__init__.py | 76 ++++++++++++++++++++- test/session/test_session_constants.py | 60 ---------------- 8 files changed, 183 insertions(+), 93 deletions(-) delete mode 100644 test/session/test_session_constants.py diff --git a/picard/const/defaults.py b/picard/const/defaults.py index 8958de1af0..3fa37a9b56 100644 --- a/picard/const/defaults.py +++ b/picard/const/defaults.py @@ -169,3 +169,13 @@ DEFAULT_COVER_CONVERTING_FORMAT = 'JPEG' DEFAULT_QUICK_MENU_ITEMS = ['save_images_to_tags', 'save_images_to_files'] + +# Metadata handling +# Prefix for internal/non-user-facing tags; filtered from exports and overrides. +INTERNAL_TAG_PREFIX = "~" + +# Tags that must never be overridden from sessions. Include values that are +# computed or come from file info and must reflect the current file (e.g. duration). +# 'length' is audio duration; '~length' is its display alias. Add more if we expose +# additional non-internal computed fields that should not be user-overridable. +EXCLUDED_OVERRIDE_TAGS = frozenset({"length", "~length"}) diff --git a/picard/options.py b/picard/options.py index d39e448834..cf429a1a47 100644 --- a/picard/options.py +++ b/picard/options.py @@ -500,31 +500,31 @@ def make_default_toolbar_layout(): 'setting', 'session_safe_restore', True, - title=N_(SessionMessages.SESSION_SAFE_RESTORE_TITLE), + title=SessionMessages.SESSION_SAFE_RESTORE_TITLE, ) BoolOption( 'setting', 'session_load_last_on_startup', False, - title=N_(SessionMessages.SESSION_LOAD_LAST_TITLE), + title=SessionMessages.SESSION_LOAD_LAST_TITLE, ) IntOption( 'setting', 'session_autosave_interval_min', 0, - title=N_(SessionMessages.SESSION_AUTOSAVE_TITLE), + title=SessionMessages.SESSION_AUTOSAVE_TITLE, ) BoolOption( 'setting', 'session_backup_on_crash', True, - title=N_(SessionMessages.SESSION_BACKUP_TITLE), + title=SessionMessages.SESSION_BACKUP_TITLE, ) BoolOption( 'setting', 'session_include_mb_data', False, - title=N_(SessionMessages.SESSION_INCLUDE_MB_DATA_TITLE), + title=SessionMessages.SESSION_INCLUDE_MB_DATA_TITLE, ) # picard/ui/searchdialog/album.py diff --git a/picard/session/constants.py b/picard/session/constants.py index ee7c7e1a99..eeb23fb295 100644 --- a/picard/session/constants.py +++ b/picard/session/constants.py @@ -24,21 +24,95 @@ including retry delays, file extensions, and excluded tags. """ +from picard.i18n import N_ + class SessionConstants: - """Constants for session management operations.""" + """Constants for session management operations. + + Retry delays + ------------ + These delays govern how often we re-check readiness during session + load/restore using Qt timers. They coordinate operations across + asynchronous components (file scanning, network lookups, album/track + population, UI creation) without requiring deep refactors. + + Attributes + ---------- + DEFAULT_RETRY_DELAY_MS : int + General-purpose delay (milliseconds) for deferred actions that need + other subsystems to settle first. Used for: + - Applying saved metadata / tag deltas once files are loaded + (see `MetadataHandler.apply_saved_metadata_if_any`, + `MetadataHandler.apply_tag_deltas_if_any`). + - Restoring UI state (expanding albums) once UI items exist + (see `SessionLoader._restore_ui_state`). + - Finalizing the restoring flag when network/disk operations are idle + (see `SessionLoader._unset_restoring_flag_when_idle`). + + Trade-offs + ---------- + - Too short: Excess CPU wake-ups, risk of race-condition flapping, + unnecessary network/UI churn. + - Too long: Noticeable lag for metadata application and UI finalize. + + Tuning + ------ + - Shorten for tests, small sessions, fast machines (snappier UI). + - Lengthen for very large sessions, slow I/O/network (reduce churn). + + FAST_RETRY_DELAY_MS : int + Lower-latency delay (milliseconds) for local readiness checks where + objects stabilize quickly (e.g., file/album becomes ready) and we want + prompt feedback. Used for: + - Moving files to tracks once file/album are ready + (see `TrackMover.move_files_to_tracks`). + - Specialized helpers like `RetryHelper.retry_until_file_ready` and + `RetryHelper.retry_until_album_ready`. + + Trade-offs + ---------- + - Too short: High-frequency polling of local state, potential CPU + spikes on large batches. + - Too long: Sluggish track moves and perceived restore latency. + + Notes + ----- + What is being retried + Readiness checks and deferred execution (polling until conditions are + true), not re-execution of failed logic. + + Why retries are needed + In an event-driven Qt architecture not all components emit precise + "ready" signals, and many operations require multiple conditions to be + true simultaneously (e.g., file loaded AND album tracks available AND + UI node created). Timed re-checks are a pragmatic coordination + mechanism. + + Alternative (fully async/signals) + We could replace polls with explicit signals/awaitables + (e.g., file_ready, album_tracks_loaded, ui_item_created, webservice_idle), + but this requires cross-cutting changes across `File`, `Album`, UI, + WebService, and `Tagger`. Incremental migration is possible; until then + these delays balance responsiveness and load. + """ # File handling SESSION_FILE_EXTENSION = ".mbps.gz" SESSION_FORMAT_VERSION = 1 # Retry delays in milliseconds + # Used by Qt timers for retry/poll loops during session load/restore. + # Balance responsiveness with CPU/network load: shorter feels snappier + # but risks busy-looping and churn; longer reduces load but adds visible lag. DEFAULT_RETRY_DELAY_MS = 200 - FAST_RETRY_DELAY_MS = 150 - # Metadata handling - INTERNAL_TAG_PREFIX = "~" - EXCLUDED_OVERRIDE_TAGS = frozenset({"length", "~length"}) + # General retries (e.g. metadata application, UI finalize). + # Adjust up for huge sessions/slow I/O; down for tests/small sessions/fast + # machines. + FAST_RETRY_DELAY_MS = 150 + # Local readiness checks (files/albums becoming ready, track moves). + # Too short ⇒ high CPU/race flapping; too long ⇒ sluggish moves/restore. # Location types LOCATION_UNCLUSTERED = "unclustered" @@ -57,8 +131,8 @@ class SessionMessages: """ # Option titles (API/config) - SESSION_SAFE_RESTORE_TITLE = "Honor local edits and placement on load (no auto-matching)" - SESSION_LOAD_LAST_TITLE = "Load last saved session on startup" - SESSION_AUTOSAVE_TITLE = "Auto-save session every N minutes (0 disables)" - SESSION_BACKUP_TITLE = "Attempt to keep a session backup on unexpected shutdown" - SESSION_INCLUDE_MB_DATA_TITLE = "Include MusicBrainz data in saved sessions (faster loads, risk of stale data)" + SESSION_SAFE_RESTORE_TITLE = N_("Honor local edits and placement on load (no auto-matching)") + SESSION_LOAD_LAST_TITLE = N_("Load last saved session on startup") + SESSION_AUTOSAVE_TITLE = N_("Auto-save session every N minutes (0 disables)") + SESSION_BACKUP_TITLE = N_("Attempt to keep a session backup on unexpected shutdown") + SESSION_INCLUDE_MB_DATA_TITLE = N_("Include MusicBrainz data in saved sessions (faster loads, risk of stale data)") diff --git a/picard/session/metadata_handler.py b/picard/session/metadata_handler.py index aacb9004c5..2fa0d77c56 100644 --- a/picard/session/metadata_handler.py +++ b/picard/session/metadata_handler.py @@ -30,6 +30,7 @@ from typing import Any from picard import log +from picard.const.defaults import EXCLUDED_OVERRIDE_TAGS, INTERNAL_TAG_PREFIX from picard.file import File from picard.metadata import Metadata from picard.session.constants import SessionConstants @@ -54,12 +55,12 @@ def serialize_metadata_for_file(file: File) -> dict[str, list[Any]]: Notes ----- - Only user-visible tags are serialized, internal tags (starting with ~) - and length are excluded. + Only user-visible tags are serialized. Internal tags (starting with ~) + and tags in the excluded override set are not included. """ tags: dict[str, list[Any]] = {} for key, values in file.metadata.rawitems(): - if key.startswith(SessionConstants.INTERNAL_TAG_PREFIX) or key == "length": + if key.startswith(INTERNAL_TAG_PREFIX) or key in EXCLUDED_OVERRIDE_TAGS: continue # Copy as list to be JSON serializable tags[key] = list(values) @@ -204,9 +205,7 @@ def apply_tag_deltas_if_any(tagger: Any, file_path_to_tags: dict[Path, dict[str, # Merge deltas onto current metadata; preserve length md = Metadata(file.metadata) for key, values in tags.items(): - if key in SessionConstants.EXCLUDED_OVERRIDE_TAGS or str(key).startswith( - SessionConstants.INTERNAL_TAG_PREFIX - ): + if key in EXCLUDED_OVERRIDE_TAGS or str(key).startswith(INTERNAL_TAG_PREFIX): continue md[key] = MetadataHandler.as_list(values) MetadataHandler.safe_apply_metadata(file, md) diff --git a/picard/session/session_exporter.py b/picard/session/session_exporter.py index 536e7e5aff..bf92c66a0f 100644 --- a/picard/session/session_exporter.py +++ b/picard/session/session_exporter.py @@ -31,6 +31,7 @@ from picard.album import NatAlbum from picard.config import get_config +from picard.const.defaults import EXCLUDED_OVERRIDE_TAGS, INTERNAL_TAG_PREFIX from picard.session.constants import SessionConstants from picard.session.location_detector import LocationDetector from picard.session.metadata_handler import MetadataHandler @@ -204,8 +205,7 @@ def _export_file_item(self, file: Any) -> dict[str, Any]: delta_tags = { k: MetadataHandler.as_list(v) for k, v in diff.rawitems() - if k not in SessionConstants.EXCLUDED_OVERRIDE_TAGS - and not str(k).startswith(SessionConstants.INTERNAL_TAG_PREFIX) + if k not in EXCLUDED_OVERRIDE_TAGS and not str(k).startswith(INTERNAL_TAG_PREFIX) } if delta_tags: entry['metadata'] = {'tags': delta_tags} @@ -274,9 +274,7 @@ def _export_metadata_overrides( album_diff = album.metadata.diff(album.orig_metadata) if album_diff: album_meta_overrides[album.id] = { - k: MetadataHandler.as_list(v) - for k, v in album_diff.rawitems() - if k not in SessionConstants.EXCLUDED_OVERRIDE_TAGS + k: MetadataHandler.as_list(v) for k, v in album_diff.rawitems() if k not in EXCLUDED_OVERRIDE_TAGS } # Track-level overrides @@ -286,9 +284,7 @@ def _export_metadata_overrides( diff = track.metadata.diff(track.scripted_metadata) if diff: overrides_for_album[track.id] = { - k: MetadataHandler.as_list(v) - for k, v in diff.rawitems() - if k not in SessionConstants.EXCLUDED_OVERRIDE_TAGS + k: MetadataHandler.as_list(v) for k, v in diff.rawitems() if k not in EXCLUDED_OVERRIDE_TAGS } if overrides_for_album: diff --git a/picard/session/session_loader.py b/picard/session/session_loader.py index 06794ed6f3..5fcf054e79 100644 --- a/picard/session/session_loader.py +++ b/picard/session/session_loader.py @@ -36,6 +36,7 @@ from picard.album import Album from picard.config import get_config +from picard.const.defaults import EXCLUDED_OVERRIDE_TAGS from picard.i18n import gettext as _ from picard.session.constants import SessionConstants from picard.session.metadata_handler import MetadataHandler @@ -478,7 +479,7 @@ def run() -> None: # Apply overrides to track metadata so columns reflect user edits for tag, values in tags.items(): # Never override computed lengths - if tag in SessionConstants.EXCLUDED_OVERRIDE_TAGS: + if tag in EXCLUDED_OVERRIDE_TAGS: continue tr.metadata[tag] = MetadataHandler.as_list(values) tr.update() diff --git a/picard/ui/mainwindow/__init__.py b/picard/ui/mainwindow/__init__.py index 6529ad9b3c..e4d7bccad8 100644 --- a/picard/ui/mainwindow/__init__.py +++ b/picard/ui/mainwindow/__init__.py @@ -46,6 +46,7 @@ from collections import namedtuple +from contextlib import suppress from copy import deepcopy import datetime from functools import partial @@ -405,6 +406,72 @@ def show_quit_confirmation(self): return True + def _has_session_content(self): + """Return True if there is session content to save/close.""" + if self.tagger.files or self.tagger.albums: + return True + + with suppress(AttributeError, TypeError): + return bool(self.tagger.clusters and len(self.tagger.clusters) > 0) + return False + + def show_close_session_confirmation(self): + """Ask the user whether to save the session before closing. + + Returns + ------- + bool + True if closing should proceed, False to cancel. + """ + # If there is nothing to save, proceed without asking + if not self._has_session_content(): + return True + + QMessageBox = QtWidgets.QMessageBox + msg = QMessageBox(self) + msg.setIcon(QMessageBox.Icon.Question) + msg.setWindowModality(QtCore.Qt.WindowModality.WindowModal) + msg.setWindowTitle(_("Close Session")) + msg.setText(_("Do you want to save the current session before closing?")) + msg.setInformativeText(_("Closing the session will clear all files, clusters and albums from the view.")) + cancel_btn = msg.addButton(QMessageBox.StandardButton.Cancel) + save_btn = msg.addButton(_("&Save Session"), QMessageBox.ButtonRole.YesRole) + msg.addButton(_("Do&n't Save"), QMessageBox.ButtonRole.NoRole) + msg.setDefaultButton(save_btn) + msg.exec() + + clicked = msg.clickedButton() + if clicked == cancel_btn: + return False + if clicked == save_btn: + # If saving fails or is cancelled, abort closing + return self._save_session_to_known_path_or_prompt() + # Don't Save + return True + + def _save_session_to_known_path_or_prompt(self) -> bool: + """Save session to last known session path if available; otherwise prompt. + + Returns + ------- + bool + True if saved successfully, False otherwise. + """ + from picard.session.session_manager import save_session_to_path + + config = get_config() + path = config.persist['last_session_path'] if 'last_session_path' in config.persist else None + if path: + try: + save_session_to_path(self.tagger, path) + self.set_statusbar_message(N_("Session saved to '%(path)s'"), {'path': path}) + return True + except (OSError, PermissionError, FileNotFoundError, ValueError, OverflowError) as e: + QtWidgets.QMessageBox.critical(self, _("Failed to save session"), str(e)) + return False + # Fallback to prompting for a path + return bool(self.save_session()) + def saveWindowState(self): config = get_config() config.persist['window_state'] = self.saveState() @@ -1065,8 +1132,11 @@ def save_session(self): config.persist['current_directory'] = os.path.dirname(path) config.persist['last_session_path'] = path self.set_statusbar_message(N_("Session saved to '%(path)s'"), {'path': path}) - except Exception as e: + return True + except (OSError, PermissionError, FileNotFoundError, ValueError, OverflowError) as e: QtWidgets.QMessageBox.critical(self, _("Failed to save session"), str(e)) + return False + return False def load_session(self): from picard.session.session_manager import load_session_from_path @@ -1094,8 +1164,8 @@ def load_session(self): QtWidgets.QMessageBox.critical(self, _("Failed to load session"), str(e)) def close_session(self): - # Ask to save if unsaved files - if not self.show_quit_confirmation(): + # Use dedicated confirmation for closing sessions (save / don't save / cancel) + if not self.show_close_session_confirmation(): return # Clear current state self.tagger.clear_session() diff --git a/test/session/test_session_constants.py b/test/session/test_session_constants.py deleted file mode 100644 index 4c5bba279d..0000000000 --- a/test/session/test_session_constants.py +++ /dev/null @@ -1,60 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Picard, the next-generation MusicBrainz tagger -# -# Copyright (C) 2025 The MusicBrainz Team -# -# This program is free software; you can redistribute it and/or -# modify it under the terms of the GNU General Public License -# as published by the Free Software Foundation; either version 2 -# of the License, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program; if not, write to the Free Software -# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. - -"""Tests for session constants.""" - -from picard.session.constants import SessionConstants - -import pytest - - -def test_session_constants_values() -> None: - """Test that SessionConstants has expected values.""" - assert SessionConstants.SESSION_FILE_EXTENSION == ".mbps.gz" - assert SessionConstants.SESSION_FORMAT_VERSION == 1 - assert SessionConstants.DEFAULT_RETRY_DELAY_MS == 200 - assert SessionConstants.FAST_RETRY_DELAY_MS == 150 - assert SessionConstants.INTERNAL_TAG_PREFIX == "~" - assert frozenset({"length", "~length"}) == SessionConstants.EXCLUDED_OVERRIDE_TAGS - assert SessionConstants.LOCATION_UNCLUSTERED == "unclustered" - assert SessionConstants.LOCATION_TRACK == "track" - assert SessionConstants.LOCATION_ALBUM_UNMATCHED == "album_unmatched" - assert SessionConstants.LOCATION_CLUSTER == "cluster" - assert SessionConstants.LOCATION_NAT == "nat" - - -def test_session_constants_immutable() -> None: - """Test that SessionConstants values are immutable.""" - # Test that frozenset is immutable - with pytest.raises(AttributeError): - SessionConstants.EXCLUDED_OVERRIDE_TAGS.add("new_tag") - - # Test that constants are class attributes - assert hasattr(SessionConstants, 'SESSION_FILE_EXTENSION') - assert hasattr(SessionConstants, 'SESSION_FORMAT_VERSION') - assert hasattr(SessionConstants, 'DEFAULT_RETRY_DELAY_MS') - assert hasattr(SessionConstants, 'FAST_RETRY_DELAY_MS') - assert hasattr(SessionConstants, 'INTERNAL_TAG_PREFIX') - assert hasattr(SessionConstants, 'EXCLUDED_OVERRIDE_TAGS') - assert hasattr(SessionConstants, 'LOCATION_UNCLUSTERED') - assert hasattr(SessionConstants, 'LOCATION_TRACK') - assert hasattr(SessionConstants, 'LOCATION_ALBUM_UNMATCHED') - assert hasattr(SessionConstants, 'LOCATION_CLUSTER') - assert hasattr(SessionConstants, 'LOCATION_NAT') From 7bff0c42df40bd5c83f5b74d2d3ec66797944b60 Mon Sep 17 00:00:00 2001 From: Khoa Nguyen Date: Mon, 8 Sep 2025 22:16:21 -0400 Subject: [PATCH 08/30] Use try...except...else pattern --- picard/ui/mainwindow/__init__.py | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/picard/ui/mainwindow/__init__.py b/picard/ui/mainwindow/__init__.py index e4d7bccad8..bb4ba8c235 100644 --- a/picard/ui/mainwindow/__init__.py +++ b/picard/ui/mainwindow/__init__.py @@ -464,11 +464,13 @@ def _save_session_to_known_path_or_prompt(self) -> bool: if path: try: save_session_to_path(self.tagger, path) - self.set_statusbar_message(N_("Session saved to '%(path)s'"), {'path': path}) - return True except (OSError, PermissionError, FileNotFoundError, ValueError, OverflowError) as e: QtWidgets.QMessageBox.critical(self, _("Failed to save session"), str(e)) return False + else: + self.set_statusbar_message(N_("Session saved to '%(path)s'"), {'path': path}) + return True + # Fallback to prompting for a path return bool(self.save_session()) @@ -1131,11 +1133,12 @@ def save_session(self): save_session_to_path(self.tagger, path) config.persist['current_directory'] = os.path.dirname(path) config.persist['last_session_path'] = path - self.set_statusbar_message(N_("Session saved to '%(path)s'"), {'path': path}) - return True except (OSError, PermissionError, FileNotFoundError, ValueError, OverflowError) as e: QtWidgets.QMessageBox.critical(self, _("Failed to save session"), str(e)) return False + else: + self.set_statusbar_message(N_("Session saved to '%(path)s'"), {'path': path}) + return True return False def load_session(self): From 86eef7aa31ad667110adce66855364639f7b3b7c Mon Sep 17 00:00:00 2001 From: Khoa Nguyen Date: Tue, 9 Sep 2025 06:27:09 -0400 Subject: [PATCH 09/30] Add flyout menu for recent sessions --- picard/options.py | 1 + picard/session/constants.py | 4 + picard/ui/mainwindow/__init__.py | 122 ++++++++++++++++++++++++++++--- 3 files changed, 116 insertions(+), 11 deletions(-) diff --git a/picard/options.py b/picard/options.py index cf429a1a47..f6406e7e83 100644 --- a/picard/options.py +++ b/picard/options.py @@ -148,6 +148,7 @@ ListOption('persist', 'filters_FileTreeView', None) ListOption('persist', 'filters_AlbumTreeView', None) TextOption('persist', 'last_session_path', '') +ListOption('persist', 'recent_sessions', []) TextOption('persist', 'session_autosave_path', '') # picard/ui/metadatabox.py diff --git a/picard/session/constants.py b/picard/session/constants.py index eeb23fb295..1772a9aa25 100644 --- a/picard/session/constants.py +++ b/picard/session/constants.py @@ -101,6 +101,10 @@ class SessionConstants: SESSION_FILE_EXTENSION = ".mbps.gz" SESSION_FORMAT_VERSION = 1 + # Recent sessions + # Number of recent session entries shown in the UI flyout menu. + RECENT_SESSIONS_MAX = 5 + # Retry delays in milliseconds # Used by Qt timers for retry/poll loops during session load/restore. # Balance responsiveness with CPU/network load: shorter feels snappier diff --git a/picard/ui/mainwindow/__init__.py b/picard/ui/mainwindow/__init__.py index bb4ba8c235..0e6d707b8c 100644 --- a/picard/ui/mainwindow/__init__.py +++ b/picard/ui/mainwindow/__init__.py @@ -432,7 +432,7 @@ def show_close_session_confirmation(self): msg.setIcon(QMessageBox.Icon.Question) msg.setWindowModality(QtCore.Qt.WindowModality.WindowModal) msg.setWindowTitle(_("Close Session")) - msg.setText(_("Do you want to save the current session before closing?")) + msg.setText(_("Do you want to save the current session before continuing?")) msg.setInformativeText(_("Closing the session will clear all files, clusters and albums from the view.")) cancel_btn = msg.addButton(QMessageBox.StandardButton.Cancel) save_btn = msg.addButton(_("&Save Session"), QMessageBox.ButtonRole.YesRole) @@ -460,7 +460,7 @@ def _save_session_to_known_path_or_prompt(self) -> bool: from picard.session.session_manager import save_session_to_path config = get_config() - path = config.persist['last_session_path'] if 'last_session_path' in config.persist else None + path = config.persist['last_session_path'] or '' if path: try: save_session_to_path(self.tagger, path) @@ -469,6 +469,7 @@ def _save_session_to_known_path_or_prompt(self) -> bool: return False else: self.set_statusbar_message(N_("Session saved to '%(path)s'"), {'path': path}) + self._add_to_recent_sessions(path) return True # Fallback to prompting for a path @@ -623,6 +624,67 @@ def _create_cd_lookup_menu(self): self.cd_lookup_menu = menu self._init_cd_lookup_menu() + def _create_recent_sessions_menu(self): + """Create and return the "Recent Sessions" submenu. + + The menu content is populated from the persisted recent sessions list. + """ + self.recent_sessions_menu = QtWidgets.QMenu(_("Recent Sessions")) + self.recent_sessions_menu.setIcon(icontheme.lookup('document-open-recent')) + self._populate_recent_sessions_menu() + return self.recent_sessions_menu + + def _get_recent_sessions(self): + """Return the list of recent session paths from persistent config.""" + config = get_config() + value = config.persist['recent_sessions'] + if isinstance(value, list): + return [str(p) for p in value] + return [] + + def _set_recent_sessions(self, paths): + """Persist the given list of recent session paths and refresh the menu.""" + config = get_config() + config.persist['recent_sessions'] = list(paths) + if hasattr(self, 'recent_sessions_menu') and isinstance(self.recent_sessions_menu, QtWidgets.QMenu): + self._populate_recent_sessions_menu() + + def _add_to_recent_sessions(self, path): + """Insert a path at the front of the recent sessions list, de-duplicated and capped.""" + if not path: + return + paths = self._get_recent_sessions() + # De-duplicate while preserving order by removing existing entry first + try: + paths.remove(path) + except ValueError: + pass + paths.insert(0, path) + # Cap to configured maximum + pruned = paths[: SessionConstants.RECENT_SESSIONS_MAX] + self._set_recent_sessions(pruned) + + def _populate_recent_sessions_menu(self): + """Populate the recent sessions submenu based on persisted list.""" + menu = self.recent_sessions_menu + if not menu: + return + menu.clear() + paths = self._get_recent_sessions() + if not paths: + empty = menu.addAction(_("Empty")) + empty.setEnabled(False) + menu.setEnabled(False) + return + menu.setEnabled(True) + for index, path in enumerate(paths, start=1): + label = f"{index}. {os.path.basename(path) or path}" + action = menu.addAction(label) + action.setData(path) + action.setToolTip(path) + action.setStatusTip(path) + action.triggered.connect(partial(self._load_session_from_recent, path)) + def _init_cd_lookup_menu(self): if discid is None: log.warning("CDROM: discid library not found - Lookup CD functionality disabled") @@ -736,6 +798,8 @@ def add_menu(menu_title, *args): MainAction.SUBMIT_ACOUSTID, '-', MainAction.LOAD_SESSION, + # Recent Sessions submenu + self._create_recent_sessions_menu(), MainAction.SAVE_SESSION, MainAction.CLOSE_SESSION, '-', @@ -1120,6 +1184,20 @@ def save_session(self): from picard.ui.util import FileDialog config = get_config() + # If a last session path is known, save silently to it + known_path = config.persist['last_session_path'] or '' + if known_path: + try: + save_session_to_path(self.tagger, known_path) + except (OSError, PermissionError, FileNotFoundError, ValueError, OverflowError) as e: + QtWidgets.QMessageBox.critical(self, _("Failed to save session"), str(e)) + return False + else: + self.set_statusbar_message(N_("Session saved to '%(path)s'"), {'path': known_path}) + self._add_to_recent_sessions(known_path) + return True + + # Otherwise, prompt for a new path start_dir = config.persist['current_directory'] or os.path.expanduser('~') path, _filter = FileDialog.getSaveFileName( parent=self, @@ -1138,6 +1216,7 @@ def save_session(self): return False else: self.set_statusbar_message(N_("Session saved to '%(path)s'"), {'path': path}) + self._add_to_recent_sessions(path) return True return False @@ -1146,6 +1225,10 @@ def load_session(self): from picard.ui.util import FileDialog + # Ask whether to save/close current session before loading a new one + if not self.show_close_session_confirmation(): + return + config = get_config() start_dir = config.persist['current_directory'] or os.path.expanduser('~') path, _filter = FileDialog.getOpenFileName( @@ -1156,15 +1239,29 @@ def load_session(self): ), ) if path: - try: - # Initial progress feedback before heavy load - self.set_statusbar_message(N_("Loading session from '%(path)s' …"), {'path': path}) - load_session_from_path(self.tagger, path) - config.persist['current_directory'] = os.path.dirname(path) - config.persist['last_session_path'] = path - self.set_statusbar_message(N_("Session loaded from '%(path)s'"), {'path': path}) - except Exception as e: - QtWidgets.QMessageBox.critical(self, _("Failed to load session"), str(e)) + # Initial progress feedback before heavy load + self.set_statusbar_message(N_("Loading session from '%(path)s' …"), {'path': path}) + load_session_from_path(self.tagger, path) + config.persist['current_directory'] = os.path.dirname(path) + config.persist['last_session_path'] = path + self.set_statusbar_message(N_("Session loaded from '%(path)s'"), {'path': path}) + # Track in recent sessions + self._add_to_recent_sessions(path) + + def _load_session_from_recent(self, path): + from picard.session.session_manager import load_session_from_path + + # Ask whether to save/close current session before loading a new one + if not self.show_close_session_confirmation(): + return + + self.set_statusbar_message(N_("Loading session from '%(path)s' …"), {'path': path}) + load_session_from_path(self.tagger, path) + config = get_config() + config.persist['current_directory'] = os.path.dirname(path) + config.persist['last_session_path'] = path + self.set_statusbar_message(N_("Session loaded from '%(path)s'"), {'path': path}) + self._add_to_recent_sessions(path) def close_session(self): # Use dedicated confirmation for closing sessions (save / don't save / cancel) @@ -1172,6 +1269,9 @@ def close_session(self): return # Clear current state self.tagger.clear_session() + # Reset last_session_path so subsequent saves prompt for a new path + config = get_config() + config.persist['last_session_path'] = '' def remove_selected_objects(self): """Tell the tagger to remove the selected objects.""" From 1c14bc4f59237f70d5f3118fdd4c0c78ae82962e Mon Sep 17 00:00:00 2001 From: Khoa Nguyen Date: Tue, 9 Sep 2025 07:35:28 -0400 Subject: [PATCH 10/30] Make session_folder configurable and default --- picard/const/appdirs.py | 22 +++++++++++++++++++++- picard/options.py | 6 ++++++ picard/session/constants.py | 1 + picard/tagger.py | 8 +++----- picard/ui/mainwindow/__init__.py | 18 ++++++------------ picard/ui/options/sessions.py | 32 ++++++++++++++++++++++++++++++++ test/session/test_sessions.py | 30 ++++++++++++++++++++++++++++++ 7 files changed, 99 insertions(+), 18 deletions(-) diff --git a/picard/const/appdirs.py b/picard/const/appdirs.py index bde45a13e5..e9b08c3057 100644 --- a/picard/const/appdirs.py +++ b/picard/const/appdirs.py @@ -21,7 +21,7 @@ import os -import os.path +from pathlib import Path from PyQt6.QtCore import ( QCoreApplication, @@ -59,3 +59,23 @@ def plugin_folder(): # FIXME: This really should be in QStandardPaths.StandardLocation.AppDataLocation instead, # but this is a breaking change that requires data migration return os.path.normpath(os.environ.get('PICARD_PLUGIN_DIR', os.path.join(config_folder(), 'plugins'))) + + +def sessions_folder(): + """Get the sessions folder path. + + Returns + ------- + str + The path to the sessions folder. If a custom path is configured, + returns that path. Otherwise, returns the default path + /sessions. + """ + from picard.config import get_config + + config = get_config() + custom_path = config.setting['session_folder_path'] + if custom_path: + return str(Path(custom_path).resolve()) + else: + return str(Path(config_folder()) / 'sessions') diff --git a/picard/options.py b/picard/options.py index f6406e7e83..efcc8a3f77 100644 --- a/picard/options.py +++ b/picard/options.py @@ -527,6 +527,12 @@ def make_default_toolbar_layout(): False, title=SessionMessages.SESSION_INCLUDE_MB_DATA_TITLE, ) +TextOption( + 'setting', + 'session_folder_path', + '', + title=SessionMessages.SESSION_FOLDER_PATH_TITLE, +) # picard/ui/searchdialog/album.py # diff --git a/picard/session/constants.py b/picard/session/constants.py index 1772a9aa25..17bacbc53b 100644 --- a/picard/session/constants.py +++ b/picard/session/constants.py @@ -140,3 +140,4 @@ class SessionMessages: SESSION_AUTOSAVE_TITLE = N_("Auto-save session every N minutes (0 disables)") SESSION_BACKUP_TITLE = N_("Attempt to keep a session backup on unexpected shutdown") SESSION_INCLUDE_MB_DATA_TITLE = N_("Include MusicBrainz data in saved sessions (faster loads, risk of stale data)") + SESSION_FOLDER_PATH_TITLE = N_("Sessions folder path (leave empty for default)") diff --git a/picard/tagger.py b/picard/tagger.py index aa0aa49954..dc92efc694 100644 --- a/picard/tagger.py +++ b/picard/tagger.py @@ -130,6 +130,7 @@ ) from picard.releasegroup import ReleaseGroup from picard.remotecommands import RemoteCommands +from picard.session.session_manager import save_session_to_path from picard.track import ( NonAlbumTrack, Track, @@ -640,8 +641,6 @@ def exit(self): with contextlib.suppress(OSError, PermissionError, FileNotFoundError, ValueError, OverflowError): config = get_config() if config.setting['session_backup_on_crash']: - from picard.session.session_manager import save_session_to_path - path = config.persist['session_autosave_path'] or config.persist['last_session_path'] if path: save_session_to_path(self, path) @@ -670,8 +669,6 @@ def run(self): config = get_config() interval_min = int(config.setting['session_autosave_interval_min']) if interval_min > 0: - from picard.session.session_manager import save_session_to_path - self._session_autosave_timer = QtCore.QTimer(self) self._session_autosave_timer.setInterval(max(1, interval_min) * 60 * 1000) @@ -680,9 +677,10 @@ def _autosave(): if not path: path = config.persist['last_session_path'] if 'last_session_path' in config.persist else None if not path: + from picard.const.appdirs import sessions_folder from picard.session.constants import SessionConstants - path = Path(USER_DIR) / ("autosave" + SessionConstants.SESSION_FILE_EXTENSION) + path = Path(sessions_folder()) / ("autosave" + SessionConstants.SESSION_FILE_EXTENSION) config.persist['session_autosave_path'] = path with contextlib.suppress(OSError, PermissionError, FileNotFoundError, ValueError, OverflowError): diff --git a/picard/ui/mainwindow/__init__.py b/picard/ui/mainwindow/__init__.py index 0e6d707b8c..8e6c477808 100644 --- a/picard/ui/mainwindow/__init__.py +++ b/picard/ui/mainwindow/__init__.py @@ -74,6 +74,7 @@ get_config, ) from picard.const import PROGRAM_UPDATE_LEVELS +from picard.const.appdirs import sessions_folder from picard.const.sys import ( IS_MACOS, IS_WIN, @@ -92,6 +93,7 @@ ) from picard.script import get_file_naming_script_presets from picard.session.constants import SessionConstants +from picard.session.session_manager import load_session_from_path, save_session_to_path from picard.track import Track from picard.util import ( IgnoreUpdatesContext, @@ -457,8 +459,6 @@ def _save_session_to_known_path_or_prompt(self) -> bool: bool True if saved successfully, False otherwise. """ - from picard.session.session_manager import save_session_to_path - config = get_config() path = config.persist['last_session_path'] or '' if path: @@ -1179,10 +1179,6 @@ def save(self): self.tagger.save(self.selected_objects) def save_session(self): - from picard.session.session_manager import save_session_to_path - - from picard.ui.util import FileDialog - config = get_config() # If a last session path is known, save silently to it known_path = config.persist['last_session_path'] or '' @@ -1198,7 +1194,7 @@ def save_session(self): return True # Otherwise, prompt for a new path - start_dir = config.persist['current_directory'] or os.path.expanduser('~') + start_dir = config.persist['current_directory'] or sessions_folder() path, _filter = FileDialog.getSaveFileName( parent=self, dir=start_dir, @@ -1221,16 +1217,14 @@ def save_session(self): return False def load_session(self): - from picard.session.session_manager import load_session_from_path - - from picard.ui.util import FileDialog - # Ask whether to save/close current session before loading a new one if not self.show_close_session_confirmation(): return config = get_config() - start_dir = config.persist['current_directory'] or os.path.expanduser('~') + from picard.const.appdirs import sessions_folder + + start_dir = config.persist['current_directory'] or sessions_folder() path, _filter = FileDialog.getOpenFileName( parent=self, dir=start_dir, diff --git a/picard/ui/options/sessions.py b/picard/ui/options/sessions.py index 0d8f298fca..1d68c0f2cb 100644 --- a/picard/ui/options/sessions.py +++ b/picard/ui/options/sessions.py @@ -21,6 +21,7 @@ from PyQt6 import QtWidgets from picard.config import get_config +from picard.const.appdirs import sessions_folder from picard.extension_points.options_pages import register_options_page from picard.i18n import N_, gettext as _ from picard.session.constants import SessionMessages @@ -41,12 +42,27 @@ class SessionsOptionsPage(OptionsPage): ('session_autosave_interval_min', ['autosave_spin']), ('session_backup_on_crash', ['backup_checkbox']), ('session_include_mb_data', ['include_mb_data_checkbox']), + ('session_folder_path', ['folder_path_edit']), ) def __init__(self, parent=None): super().__init__(parent) self.vbox = QtWidgets.QVBoxLayout(self) + # Sessions folder path + folder_layout = QtWidgets.QHBoxLayout() + self.folder_label = QtWidgets.QLabel(_(SessionMessages.SESSION_FOLDER_PATH_TITLE)) + self.folder_path_edit = QtWidgets.QLineEdit() + # Set placeholder text showing the default path + default_path = sessions_folder() + self.folder_path_edit.setPlaceholderText(default_path) + self.folder_browse_button = QtWidgets.QPushButton(_("Browse...")) + self.folder_browse_button.clicked.connect(self._browse_sessions_folder) + folder_layout.addWidget(self.folder_label) + folder_layout.addWidget(self.folder_path_edit) + folder_layout.addWidget(self.folder_browse_button) + self.vbox.addLayout(folder_layout) + self.safe_restore_checkbox = QtWidgets.QCheckBox(_(SessionMessages.SESSION_SAFE_RESTORE_TITLE)) self.vbox.addWidget(self.safe_restore_checkbox) @@ -76,6 +92,7 @@ def load(self): self.autosave_spin.setValue(config.setting['session_autosave_interval_min']) self.backup_checkbox.setChecked(config.setting['session_backup_on_crash']) self.include_mb_data_checkbox.setChecked(config.setting['session_include_mb_data']) + self.folder_path_edit.setText(config.setting['session_folder_path']) def save(self): config = get_config() @@ -84,6 +101,21 @@ def save(self): config.setting['session_autosave_interval_min'] = int(self.autosave_spin.value()) config.setting['session_backup_on_crash'] = self.backup_checkbox.isChecked() config.setting['session_include_mb_data'] = self.include_mb_data_checkbox.isChecked() + config.setting['session_folder_path'] = self.folder_path_edit.text().strip() + + def _browse_sessions_folder(self): + """Open a folder selection dialog for the sessions folder.""" + from picard.ui.util import FileDialog + + current_path = self.folder_path_edit.text().strip() + if not current_path: + from picard.const.appdirs import config_folder + + current_path = config_folder() + + folder = FileDialog.getExistingDirectory(parent=self, dir=current_path, caption=_("Select Sessions Folder")) + if folder: + self.folder_path_edit.setText(folder) register_options_page(SessionsOptionsPage) diff --git a/test/session/test_sessions.py b/test/session/test_sessions.py index 488bc11e74..19efee0813 100644 --- a/test/session/test_sessions.py +++ b/test/session/test_sessions.py @@ -23,6 +23,7 @@ from typing import Any import picard.config as picard_config +from picard.const.appdirs import config_folder, sessions_folder from picard.metadata import Metadata from picard.session.session_manager import export_session @@ -186,3 +187,32 @@ def __init__(self, album_id: str) -> None: # Should not include the album in unmatched_albums since it has files assert "unmatched_albums" in data assert data['unmatched_albums'] == [] + + +def test_sessions_folder_default_path(cfg_options: None) -> None: + """Test that sessions_folder returns the default path when no custom path is set.""" + config = picard_config.get_config() + config.setting['session_folder_path'] = '' + + expected_path = Path(config_folder()) / 'sessions' + assert sessions_folder() == str(expected_path) + + +def test_sessions_folder_custom_path(cfg_options: None, tmp_path: Path) -> None: + """Test that sessions_folder returns the custom path when configured.""" + config = picard_config.get_config() + custom_path = str(tmp_path / 'custom_sessions') + config.setting['session_folder_path'] = custom_path + + assert sessions_folder() == custom_path + + +@pytest.mark.parametrize("custom_path", ["", "/some/custom/path", "relative/path"]) +def test_sessions_folder_path_normalization(cfg_options: None, custom_path: str) -> None: + """Test that sessions_folder normalizes paths correctly.""" + config = picard_config.get_config() + config.setting['session_folder_path'] = custom_path + + result = sessions_folder() + assert isinstance(result, str) + assert result == Path(result).as_posix() # Should be normalized From 69ca2122c2c2a2fbd4e12bb18b0a2ddd1aa6fd65 Mon Sep 17 00:00:00 2001 From: Khoa Nguyen Date: Tue, 9 Sep 2025 22:22:51 -0400 Subject: [PATCH 11/30] Clean up menus; add default filenames --- picard/tagger.py | 29 +++--- picard/ui/enums.py | 6 +- picard/ui/mainwindow/__init__.py | 152 ++++++++++++++++++++++++------- picard/ui/mainwindow/actions.py | 28 ++++-- picard/ui/options/sessions.py | 7 +- picard/ui/util.py | 17 ++++ 6 files changed, 180 insertions(+), 59 deletions(-) diff --git a/picard/tagger.py b/picard/tagger.py index dc92efc694..132906b3cc 100644 --- a/picard/tagger.py +++ b/picard/tagger.py @@ -101,6 +101,7 @@ BROWSER_INTEGRATION_LOCALHOST, USER_DIR, ) +from picard.const.appdirs import sessions_folder from picard.const.sys import ( FROZEN_TEMP_PATH, IS_FROZEN, @@ -130,7 +131,12 @@ ) from picard.releasegroup import ReleaseGroup from picard.remotecommands import RemoteCommands -from picard.session.session_manager import save_session_to_path +from picard.session.constants import SessionConstants +from picard.session.session_manager import ( + export_session as _export_session, + load_session_from_path, + save_session_to_path, +) from picard.track import ( NonAlbumTrack, Track, @@ -167,7 +173,7 @@ from picard.ui.searchdialog.album import AlbumSearchDialog from picard.ui.searchdialog.artist import ArtistSearchDialog from picard.ui.searchdialog.track import TrackSearchDialog -from picard.ui.util import FileDialog +from picard.ui.util import FileDialog, show_session_not_found_dialog # A "fix" for https://bugs.python.org/issue1438480 @@ -500,7 +506,6 @@ def iter_all_files(self): # ============================== def export_session(self) -> dict: from picard import config as _cfg - from picard.session.session_manager import export_session as _export_session # Expose config on self for session helpers self.config = _cfg # type: ignore[attr-defined] @@ -641,9 +646,8 @@ def exit(self): with contextlib.suppress(OSError, PermissionError, FileNotFoundError, ValueError, OverflowError): config = get_config() if config.setting['session_backup_on_crash']: - path = config.persist['session_autosave_path'] or config.persist['last_session_path'] - if path: - save_session_to_path(self, path) + path = Path(sessions_folder()) / ("autosave" + SessionConstants.SESSION_FILE_EXTENSION) + save_session_to_path(self, path) log.debug("Picard stopping") self.run_cleanup() @@ -655,10 +659,14 @@ def _run_init(self): if config.setting['session_load_last_on_startup']: last_path = config.persist['last_session_path'] if last_path: - with contextlib.suppress(OSError, PermissionError, FileNotFoundError, json.JSONDecodeError, KeyError): - from picard.session.session_manager import load_session_from_path - + try: load_session_from_path(self, last_path) + except FileNotFoundError: + show_session_not_found_dialog(self.window, last_path) + except (OSError, PermissionError, json.JSONDecodeError, KeyError) as e: + # Keep previous best-effort behavior for other errors + log.debug(f"Error loading session from {last_path}: {e}") + pass if self._to_load: self.load_to_picard(self._to_load) @@ -677,9 +685,6 @@ def _autosave(): if not path: path = config.persist['last_session_path'] if 'last_session_path' in config.persist else None if not path: - from picard.const.appdirs import sessions_folder - from picard.session.constants import SessionConstants - path = Path(sessions_folder()) / ("autosave" + SessionConstants.SESSION_FILE_EXTENSION) config.persist['session_autosave_path'] = path diff --git a/picard/ui/enums.py b/picard/ui/enums.py index 888034d771..c0539e7dbb 100644 --- a/picard/ui/enums.py +++ b/picard/ui/enums.py @@ -107,6 +107,8 @@ class MainAction(str, Enum): VIEW_INFO = 'view_info_action' VIEW_LOG = 'view_log_action' # Session management - SAVE_SESSION = 'save_session_action' + SAVE_SESSION_AS = 'save_session_action' + SAVE_SESSION = 'quick_save_session_action' LOAD_SESSION = 'load_session_action' - CLOSE_SESSION = 'close_session_action' + NEW_SESSION = 'close_session_action' + CLEAR_RECENT_SESSIONS = 'clear_recent_sessions_action' diff --git a/picard/ui/mainwindow/__init__.py b/picard/ui/mainwindow/__init__.py index 8e6c477808..13f1da0d3c 100644 --- a/picard/ui/mainwindow/__init__.py +++ b/picard/ui/mainwindow/__init__.py @@ -52,6 +52,7 @@ from functools import partial import itertools import os.path +from pathlib import Path from PyQt6 import ( QtCore, @@ -103,6 +104,7 @@ open_local_path, reconnect, restore_method, + sanitize_filename, thread, throttle, webbrowser2, @@ -157,6 +159,7 @@ FileDialog, find_starting_directory, menu_builder, + show_session_not_found_dialog, ) @@ -473,7 +476,7 @@ def _save_session_to_known_path_or_prompt(self) -> bool: return True # Fallback to prompting for a path - return bool(self.save_session()) + return bool(self.save_session_as()) def saveWindowState(self): config = get_config() @@ -664,6 +667,20 @@ def _add_to_recent_sessions(self, path): pruned = paths[: SessionConstants.RECENT_SESSIONS_MAX] self._set_recent_sessions(pruned) + def clear_recent_sessions(self): + """Clear all recent session entries from the persistent config.""" + self._set_recent_sessions([]) + self.set_statusbar_message(_("Recent sessions cleared")) + + def _remove_from_recent_sessions(self, path): + """Remove a specific path from the recent sessions list.""" + if not path: + return + paths = self._get_recent_sessions() + with suppress(ValueError): + paths.remove(path) + self._set_recent_sessions(paths) + def _populate_recent_sessions_menu(self): """Populate the recent sessions submenu based on persisted list.""" menu = self.recent_sessions_menu @@ -678,13 +695,19 @@ def _populate_recent_sessions_menu(self): return menu.setEnabled(True) for index, path in enumerate(paths, start=1): - label = f"{index}. {os.path.basename(path) or path}" + path_obj = Path(path) + label = f"{index}. {path_obj.name or path}" action = menu.addAction(label) action.setData(path) action.setToolTip(path) action.setStatusTip(path) action.triggered.connect(partial(self._load_session_from_recent, path)) + # Add separator and clear action at the bottom + menu.addSeparator() + clear_action = menu.addAction(_("Clear Recent Sessions")) + clear_action.triggered.connect(self.clear_recent_sessions) + def _init_cd_lookup_menu(self): if discid is None: log.warning("CDROM: discid library not found - Lookup CD functionality disabled") @@ -801,7 +824,8 @@ def add_menu(menu_title, *args): # Recent Sessions submenu self._create_recent_sessions_menu(), MainAction.SAVE_SESSION, - MainAction.CLOSE_SESSION, + MainAction.SAVE_SESSION_AS, + MainAction.NEW_SESSION, '-', MainAction.EXIT, ) @@ -1178,26 +1202,78 @@ def save(self): if proceed_with_save: self.tagger.save(self.selected_objects) - def save_session(self): + def _get_default_session_filename_from_metadata(self) -> str | None: + """Get default session filename based on first track's artist information. + + Returns + ------- + str | None + Sanitized artist name to use as default filename, or None if no artist found. + """ + artist_tags = ['artist', 'albumartist', 'artists', 'albumartists'] + + # Scan files once; for each file pick first non-empty artist tag + for file in self.tagger.iter_all_files(): + metadata = file.metadata + artist_value = next( + (value for tag in artist_tags if (value := metadata.get(tag)) and str(value).strip()), + None, + ) + if artist_value: + artist_name = str(artist_value).split(',')[0].strip() + if artist_name: + return sanitize_filename(artist_name, repl="_", win_compat=True) + return None + + def _get_timestamped_session_filename(self) -> str: + """Generate a timestamped session filename. + + Returns + ------- + str + Session filename with format 'sessions_yyyyMMddHHmmss'. + """ + timestamp = datetime.datetime.now().strftime("%Y%m%d%H%M%S") + return f"session_{timestamp}" + + def quick_save_session(self) -> bool: + """Save to known path or delegate to Save As if unknown. + + Returns + ------- + bool + True on success, False otherwise. + """ + return self._save_session_to_known_path_or_prompt() + + def save_session_as(self) -> bool: + """Always prompt for a session file path and save there. + + When there is no known last session path, suggest a default filename + based on current content. + + Returns + ------- + bool + True on success, False otherwise. + """ config = get_config() - # If a last session path is known, save silently to it + + # Use known path's parent directory if available, otherwise fall back to sessions folder known_path = config.persist['last_session_path'] or '' if known_path: - try: - save_session_to_path(self.tagger, known_path) - except (OSError, PermissionError, FileNotFoundError, ValueError, OverflowError) as e: - QtWidgets.QMessageBox.critical(self, _("Failed to save session"), str(e)) - return False - else: - self.set_statusbar_message(N_("Session saved to '%(path)s'"), {'path': known_path}) - self._add_to_recent_sessions(known_path) - return True + start_dir = Path(known_path).parent + else: + start_dir = sessions_folder() + + # Use default filename only when path is not known + default_name = self._get_default_session_filename_from_metadata() or self._get_timestamped_session_filename() + default_filename = f"{default_name}{SessionConstants.SESSION_FILE_EXTENSION}" + start_dir = Path(start_dir) / default_filename - # Otherwise, prompt for a new path - start_dir = config.persist['current_directory'] or sessions_folder() path, _filter = FileDialog.getSaveFileName( parent=self, - dir=start_dir, + dir=str(start_dir), filter=( _("MusicBrainz Picard Session (%s);;All files (*)") % ("*" + SessionConstants.SESSION_FILE_EXTENSION) ), @@ -1205,7 +1281,6 @@ def save_session(self): if path: try: save_session_to_path(self.tagger, path) - config.persist['current_directory'] = os.path.dirname(path) config.persist['last_session_path'] = path except (OSError, PermissionError, FileNotFoundError, ValueError, OverflowError) as e: QtWidgets.QMessageBox.critical(self, _("Failed to save session"), str(e)) @@ -1222,9 +1297,8 @@ def load_session(self): return config = get_config() - from picard.const.appdirs import sessions_folder - start_dir = config.persist['current_directory'] or sessions_folder() + start_dir = sessions_folder() or config.persist['current_directory'] path, _filter = FileDialog.getOpenFileName( parent=self, dir=start_dir, @@ -1235,27 +1309,37 @@ def load_session(self): if path: # Initial progress feedback before heavy load self.set_statusbar_message(N_("Loading session from '%(path)s' …"), {'path': path}) - load_session_from_path(self.tagger, path) - config.persist['current_directory'] = os.path.dirname(path) - config.persist['last_session_path'] = path - self.set_statusbar_message(N_("Session loaded from '%(path)s'"), {'path': path}) - # Track in recent sessions - self._add_to_recent_sessions(path) + try: + load_session_from_path(self.tagger, path) + except FileNotFoundError: + show_session_not_found_dialog(self, path) + return + except (OSError, PermissionError) as e: + log.debug(f"Error loading session from {path}: {e}") + return + else: + config.persist['last_session_path'] = path + self.set_statusbar_message(N_("Session loaded from '%(path)s'"), {'path': path}) + # Track in recent sessions + self._add_to_recent_sessions(path) def _load_session_from_recent(self, path): - from picard.session.session_manager import load_session_from_path - # Ask whether to save/close current session before loading a new one if not self.show_close_session_confirmation(): return self.set_statusbar_message(N_("Loading session from '%(path)s' …"), {'path': path}) - load_session_from_path(self.tagger, path) - config = get_config() - config.persist['current_directory'] = os.path.dirname(path) - config.persist['last_session_path'] = path - self.set_statusbar_message(N_("Session loaded from '%(path)s'"), {'path': path}) - self._add_to_recent_sessions(path) + try: + load_session_from_path(self.tagger, path) + except FileNotFoundError: + show_session_not_found_dialog(self, path) + self._remove_from_recent_sessions(path) + return + else: + config = get_config() + config.persist['last_session_path'] = path + self.set_statusbar_message(N_("Session loaded from '%(path)s'"), {'path': path}) + self._add_to_recent_sessions(path) def close_session(self): # Use dedicated confirmation for closing sessions (save / don't save / cancel) diff --git a/picard/ui/mainwindow/actions.py b/picard/ui/mainwindow/actions.py index bf3b0c9f6f..92c3a7d86b 100644 --- a/picard/ui/mainwindow/actions.py +++ b/picard/ui/mainwindow/actions.py @@ -545,11 +545,19 @@ def _create_check_update_action(parent): return action -@add_action(MainAction.SAVE_SESSION) +@add_action(MainAction.SAVE_SESSION_AS) def _create_save_session_action(parent): - action = QtGui.QAction(icontheme.lookup('document-save'), _("Save Ses&sion…"), parent) - action.setStatusTip(_("Save the current session to a file")) - action.triggered.connect(parent.save_session) + action = QtGui.QAction(icontheme.lookup('document-save'), _("Save Session &As…"), parent) + action.setStatusTip(_("Save the current session to a new file")) + action.triggered.connect(parent.save_session_as) + return action + + +@add_action(MainAction.SAVE_SESSION) +def _create_quick_save_session_action(parent): + action = QtGui.QAction(icontheme.lookup('document-save'), _("&Save Session"), parent) + action.setStatusTip(_("Save the current session to the last used file")) + action.triggered.connect(parent.quick_save_session) return action @@ -561,9 +569,17 @@ def _create_load_session_action(parent): return action -@add_action(MainAction.CLOSE_SESSION) +@add_action(MainAction.NEW_SESSION) def _create_close_session_action(parent): - action = QtGui.QAction(_("&Close Session"), parent) + action = QtGui.QAction(_("&New Session"), parent) action.setStatusTip(_("Close the current session")) action.triggered.connect(parent.close_session) return action + + +@add_action(MainAction.CLEAR_RECENT_SESSIONS) +def _create_clear_recent_sessions_action(parent): + action = QtGui.QAction(_("Clear Recent Sessions"), parent) + action.setStatusTip(_("Clear all recent session entries")) + action.triggered.connect(parent.clear_recent_sessions) + return action diff --git a/picard/ui/options/sessions.py b/picard/ui/options/sessions.py index 1d68c0f2cb..3136550ab4 100644 --- a/picard/ui/options/sessions.py +++ b/picard/ui/options/sessions.py @@ -27,6 +27,7 @@ from picard.session.constants import SessionMessages from picard.ui.options import OptionsPage +from picard.ui.util import FileDialog class SessionsOptionsPage(OptionsPage): @@ -105,13 +106,9 @@ def save(self): def _browse_sessions_folder(self): """Open a folder selection dialog for the sessions folder.""" - from picard.ui.util import FileDialog - current_path = self.folder_path_edit.text().strip() if not current_path: - from picard.const.appdirs import config_folder - - current_path = config_folder() + current_path = sessions_folder() folder = FileDialog.getExistingDirectory(parent=self, dir=current_path, caption=_("Select Sessions Folder")) if folder: diff --git a/picard/ui/util.py b/picard/ui/util.py index 6a0f916f54..1c4469c336 100644 --- a/picard/ui/util.py +++ b/picard/ui/util.py @@ -225,6 +225,23 @@ def changes_require_restart_warning(parent, warnings=None, notes=None): QtWidgets.QMessageBox.warning(parent, _("Changes only applied on restart"), text) +def show_session_not_found_dialog(parent, path: str) -> None: + """Show a friendly dialog for a missing session file. + + Parameters + ---------- + parent : QWidget + Parent widget for the dialog. + path : str + Full path of the missing session file to show to the user. + """ + QtWidgets.QMessageBox.warning( + parent, + _("Load Session"), + _("The session file '%(path)s' was not found.") % {'path': path}, + ) + + def menu_builder(menu, main_actions, *args): """Adds each argument to menu, depending on their type""" for arg in args: From 7b7506cb0baf3e9a6a38e3be23676088348f302e Mon Sep 17 00:00:00 2001 From: Khoa Nguyen Date: Tue, 9 Sep 2025 22:34:36 -0400 Subject: [PATCH 12/30] `load_session` should default to the last_session_path --- picard/ui/mainwindow/__init__.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/picard/ui/mainwindow/__init__.py b/picard/ui/mainwindow/__init__.py index 13f1da0d3c..4135be2f13 100644 --- a/picard/ui/mainwindow/__init__.py +++ b/picard/ui/mainwindow/__init__.py @@ -1298,7 +1298,11 @@ def load_session(self): config = get_config() - start_dir = sessions_folder() or config.persist['current_directory'] + last_session_path = config.persist['last_session_path'] + if last_session_path and isinstance(last_session_path, str): + start_dir = Path(last_session_path).parent + else: + start_dir = sessions_folder() path, _filter = FileDialog.getOpenFileName( parent=self, dir=start_dir, From 11de9130b95915c6f04f2ed67e78f7dba626fe46 Mon Sep 17 00:00:00 2001 From: Khoa Nguyen Date: Tue, 9 Sep 2025 22:51:12 -0400 Subject: [PATCH 13/30] Fix failing Windows tests --- test/session/test_sessions.py | 14 +++++++++++--- 1 file changed, 11 insertions(+), 3 deletions(-) diff --git a/test/session/test_sessions.py b/test/session/test_sessions.py index 19efee0813..e867c391f7 100644 --- a/test/session/test_sessions.py +++ b/test/session/test_sessions.py @@ -195,7 +195,8 @@ def test_sessions_folder_default_path(cfg_options: None) -> None: config.setting['session_folder_path'] = '' expected_path = Path(config_folder()) / 'sessions' - assert sessions_folder() == str(expected_path) + result = sessions_folder() + assert result.lower().endswith(str(expected_path).lower()) def test_sessions_folder_custom_path(cfg_options: None, tmp_path: Path) -> None: @@ -204,7 +205,10 @@ def test_sessions_folder_custom_path(cfg_options: None, tmp_path: Path) -> None: custom_path = str(tmp_path / 'custom_sessions') config.setting['session_folder_path'] = custom_path - assert sessions_folder() == custom_path + # sessions_folder resolves custom paths; compare using endswith on strings + expected = Path(custom_path).resolve() + result = sessions_folder() + assert result.lower().endswith(str(expected).lower()) @pytest.mark.parametrize("custom_path", ["", "/some/custom/path", "relative/path"]) @@ -215,4 +219,8 @@ def test_sessions_folder_path_normalization(cfg_options: None, custom_path: str) result = sessions_folder() assert isinstance(result, str) - assert result == Path(result).as_posix() # Should be normalized + if custom_path: + expected = Path(custom_path).resolve() + else: + expected = Path(config_folder()) / 'sessions' + assert result.lower().endswith(str(expected).lower()) From 722b3fcf836e53f30fab709eaeebde50f86f72da Mon Sep 17 00:00:00 2001 From: Khoa Nguyen Date: Tue, 9 Sep 2025 23:16:37 -0400 Subject: [PATCH 14/30] Add atomic writes; fix crash during load session --- picard/session/session_manager.py | 22 ++++++++- picard/tagger.py | 6 ++- picard/ui/mainwindow/__init__.py | 2 +- test/session/test_session_manager.py | 70 ++++++++++++++++++++++++++++ 4 files changed, 96 insertions(+), 4 deletions(-) diff --git a/picard/session/session_manager.py b/picard/session/session_manager.py index bf4aaa4894..2293340bf8 100644 --- a/picard/session/session_manager.py +++ b/picard/session/session_manager.py @@ -47,6 +47,7 @@ import gzip import json from pathlib import Path +import tempfile from typing import Any from picard.session.constants import SessionConstants @@ -97,18 +98,35 @@ def save_session_to_path(tagger: Any, path: str | Path) -> None: Notes ----- The session is saved as minified JSON (UTF-8) and gzip-compressed. If the - file already exists, it will be overwritten. + file already exists, it will be overwritten. The write operation is atomic + to prevent file corruption in case of crashes. """ p = Path(path) # Ensure multi-part extension .mbps.gz if not str(p).lower().endswith(SessionConstants.SESSION_FILE_EXTENSION): p = Path(str(p) + SessionConstants.SESSION_FILE_EXTENSION) + data = export_session(tagger) p.parent.mkdir(parents=True, exist_ok=True) + # Minify JSON and gzip-compress to reduce file size json_text = json.dumps(data, ensure_ascii=False, separators=(",", ":")) compressed = gzip.compress(json_text.encode("utf-8")) - p.write_bytes(compressed) + + # Atomic write: write to temporary file first, then rename + temp_path = None + try: + with tempfile.NamedTemporaryFile(dir=p.parent, prefix=p.stem + "_", suffix=p.suffix, delete=False) as temp_file: + temp_path = Path(temp_file.name) + temp_path.write_bytes(compressed) + + # Atomic rename to final destination + temp_path.replace(p) + except (OSError, IOError, PermissionError): + # Clean up temporary file if it exists and rename failed + if temp_path and temp_path.exists(): + temp_path.unlink() + raise # Caller will handle the exception def load_session_from_path(tagger: Any, path: str | Path) -> None: diff --git a/picard/tagger.py b/picard/tagger.py index 132906b3cc..8bbd062ee7 100644 --- a/picard/tagger.py +++ b/picard/tagger.py @@ -643,12 +643,16 @@ def exit(self): self.stopping = True # Best-effort crash/exit backup if enabled + config = get_config() with contextlib.suppress(OSError, PermissionError, FileNotFoundError, ValueError, OverflowError): - config = get_config() if config.setting['session_backup_on_crash']: path = Path(sessions_folder()) / ("autosave" + SessionConstants.SESSION_FILE_EXTENSION) save_session_to_path(self, path) + # set to blank otherwise `Save Session` will save to previous path + # which is probably not what the user wants + config.persist['last_session_path'] = '' + log.debug("Picard stopping") self.run_cleanup() QtCore.QCoreApplication.processEvents() diff --git a/picard/ui/mainwindow/__init__.py b/picard/ui/mainwindow/__init__.py index 4135be2f13..36904e72fe 100644 --- a/picard/ui/mainwindow/__init__.py +++ b/picard/ui/mainwindow/__init__.py @@ -1305,7 +1305,7 @@ def load_session(self): start_dir = sessions_folder() path, _filter = FileDialog.getOpenFileName( parent=self, - dir=start_dir, + dir=str(start_dir), filter=( _("MusicBrainz Picard Session (%s);;All files (*)") % ("*" + SessionConstants.SESSION_FILE_EXTENSION) ), diff --git a/test/session/test_session_manager.py b/test/session/test_session_manager.py index 4b29a1edfa..715b623461 100644 --- a/test/session/test_session_manager.py +++ b/test/session/test_session_manager.py @@ -26,6 +26,8 @@ from picard.session.constants import SessionConstants from picard.session.session_manager import export_session, load_session_from_path, save_session_to_path +import pytest + @patch("picard.session.session_manager.SessionExporter") def test_export_session_function(mock_exporter_class: Mock) -> None: @@ -336,3 +338,71 @@ def test_session_constants_used_correctly(tmp_path: Path) -> None: # Verify the exported data has correct version mock_export.assert_called_once_with(tagger_mock) + + +@patch("picard.session.session_manager.export_session") +def test_save_session_to_path_atomic_write(mock_export_session: Mock, tmp_path: Path) -> None: + """Test that save_session_to_path uses atomic write to prevent corruption.""" + mock_export_session.return_value = {'version': 1, 'items': []} + + tagger_mock = Mock() + session_file = tmp_path / "test_session" + expected_file = session_file.with_suffix('.mbps.gz') + + # Create a file that should be overwritten + expected_file.write_text("old content") + + save_session_to_path(tagger_mock, session_file) + + # Verify the file was written atomically (old content replaced) + assert expected_file.exists() + content = expected_file.read_bytes() + # Should be gzip-compressed JSON, not the old text content + assert content != b"old content" + assert len(content) > 0 # Should have actual compressed content + + mock_export_session.assert_called_once_with(tagger_mock) + + +@patch("picard.session.session_manager.export_session") +def test_save_session_to_path_atomic_write_cleanup_on_error(mock_export_session: Mock, tmp_path: Path) -> None: + """Test that temporary files are cleaned up when atomic write fails.""" + mock_export_session.return_value = {'version': 1, 'items': []} + + tagger_mock = Mock() + session_file = tmp_path / "test_session" + expected_file = session_file.with_suffix('.mbps.gz') + + # Mock tempfile.NamedTemporaryFile to raise an exception during creation + with patch("tempfile.NamedTemporaryFile", side_effect=OSError("Permission denied")): + with pytest.raises(OSError, match="Permission denied"): + save_session_to_path(tagger_mock, session_file) + + # Verify no temporary files were left behind + temp_files = list(tmp_path.glob("*_*.mbps.gz")) + assert len(temp_files) == 0, f"Temporary files found: {temp_files}" + + # Verify the final file doesn't exist (since write failed) + assert not expected_file.exists() + + +@patch("picard.session.session_manager.export_session") +def test_save_session_to_path_atomic_write_rename_failure_cleanup(mock_export_session: Mock, tmp_path: Path) -> None: + """Test that temporary files are cleaned up when rename fails.""" + mock_export_session.return_value = {'version': 1, 'items': []} + + tagger_mock = Mock() + session_file = tmp_path / "test_session" + expected_file = session_file.with_suffix('.mbps.gz') + + # Mock Path.replace to raise an exception during rename + with patch("pathlib.Path.replace", side_effect=OSError("Rename failed")): + with pytest.raises(OSError, match="Rename failed"): + save_session_to_path(tagger_mock, session_file) + + # Verify no temporary files were left behind + temp_files = list(tmp_path.glob("*_*.mbps.gz")) + assert len(temp_files) == 0, f"Temporary files found: {temp_files}" + + # Verify the final file doesn't exist (since rename failed) + assert not expected_file.exists() From a644edbc38337b4e7f92185fd1a2d9580ceabf3a Mon Sep 17 00:00:00 2001 From: Khoa Nguyen Date: Tue, 9 Sep 2025 23:25:36 -0400 Subject: [PATCH 15/30] Refactor ; remove redundant code --- picard/session/track_mover.py | 24 +++++++++++------------- 1 file changed, 11 insertions(+), 13 deletions(-) diff --git a/picard/session/track_mover.py b/picard/session/track_mover.py index 92acec48bd..3145ed704f 100644 --- a/picard/session/track_mover.py +++ b/picard/session/track_mover.py @@ -83,29 +83,27 @@ def _schedule_move(self, fpath: Path, recording_id: str, album: Album) -> None: The album containing the track. """ - def attempt_move() -> None: + def _get_file_and_track() -> tuple[File | None, Any | None]: file = self.tagger.files.get(str(fpath)) if not file or file.state == File.PENDING: - return + return None, None rec_to_track = {t.id: t for t in album.tracks} track = rec_to_track.get(recording_id) - if track is None: - return + return file, track + def _attempt_move() -> None: + file, track = _get_file_and_track() + if file is None or track is None: + return file.move(track) - def is_ready() -> bool: - file = self.tagger.files.get(str(fpath)) - if not file or file.state == File.PENDING: - return False - - rec_to_track = {t.id: t for t in album.tracks} - track = rec_to_track.get(recording_id) - return track is not None + def _is_ready() -> bool: + file, track = _get_file_and_track() + return file is not None and track is not None RetryHelper.retry_until( - condition_fn=is_ready, action_fn=attempt_move, delay_ms=SessionConstants.FAST_RETRY_DELAY_MS + condition_fn=_is_ready, action_fn=_attempt_move, delay_ms=SessionConstants.FAST_RETRY_DELAY_MS ) def move_file_to_nat(self, fpath: Path, recording_id: str) -> None: From 6da5c74e663f727b39ccf2aa0faf84c5a048a87e Mon Sep 17 00:00:00 2001 From: Khoa Nguyen Date: Tue, 9 Sep 2025 23:38:22 -0400 Subject: [PATCH 16/30] Switch from json to yaml for session files --- picard/session/metadata_handler.py | 2 +- picard/session/session_exporter.py | 8 +++--- picard/session/session_loader.py | 19 +++++++------- picard/session/session_manager.py | 13 +++++----- test/session/test_session_loader.py | 13 +++++----- test/session/test_session_manager.py | 39 ++++++++++++---------------- 6 files changed, 45 insertions(+), 49 deletions(-) diff --git a/picard/session/metadata_handler.py b/picard/session/metadata_handler.py index 2fa0d77c56..5bc0ba1029 100644 --- a/picard/session/metadata_handler.py +++ b/picard/session/metadata_handler.py @@ -62,7 +62,7 @@ def serialize_metadata_for_file(file: File) -> dict[str, list[Any]]: for key, values in file.metadata.rawitems(): if key.startswith(INTERNAL_TAG_PREFIX) or key in EXCLUDED_OVERRIDE_TAGS: continue - # Copy as list to be JSON serializable + # Copy as list to be YAML serializable tags[key] = list(values) return tags diff --git a/picard/session/session_exporter.py b/picard/session/session_exporter.py index bf92c66a0f..4c5a3c4bbb 100644 --- a/picard/session/session_exporter.py +++ b/picard/session/session_exporter.py @@ -79,7 +79,7 @@ def export_session(self, tagger: Any) -> dict[str, Any]: 'album_overrides': {}, 'unmatched_albums': [], 'expanded_albums': [], - # Optional: cache of MB release JSON keyed by album id + # Optional: cache of MB release data keyed by album id 'mb_cache': {}, } @@ -97,7 +97,7 @@ def export_session(self, tagger: Any) -> dict[str, Any]: if unmatched_albums: session_data['unmatched_albums'] = unmatched_albums - # Optionally export MB JSON cache per album + # Optionally export MB data cache per album include_mb = config.setting['session_include_mb_data'] if include_mb: @@ -111,7 +111,7 @@ def export_session(self, tagger: Any) -> dict[str, Any]: return session_data def _export_mb_cache(self, tagger: Any) -> dict[str, Any]: - """Export MB release JSON for currently loaded albums. + """Export MB release data for currently loaded albums. Parameters ---------- @@ -121,7 +121,7 @@ def _export_mb_cache(self, tagger: Any) -> dict[str, Any]: Returns ------- dict[str, Any] - Mapping of album MBID to release JSON node. + Mapping of album MBID to release data node. """ cache: dict[str, Any] = {} for album_id, album in getattr(tagger, 'albums', {}).items(): diff --git a/picard/session/session_loader.py b/picard/session/session_loader.py index 5fcf054e79..e78463d6d3 100644 --- a/picard/session/session_loader.py +++ b/picard/session/session_loader.py @@ -28,10 +28,11 @@ from contextlib import suppress import gzip -import json from pathlib import Path from typing import Any +import yaml + from PyQt6 import QtCore from picard.album import Album @@ -91,7 +92,7 @@ def load_from_path(self, path: str | Path) -> None: grouped_items = self._group_items_by_location(items) metadata_map = self._extract_metadata(items) - # If mb_cache is provided, try to pre-load albums from cached JSON + # If mb_cache is provided, try to pre-load albums from cached MB data mb_cache = data.get('mb_cache', {}) if mb_cache: self._emit_progress("preload_cache", details={'albums': len(mb_cache)}) @@ -161,12 +162,12 @@ def msg_finalize(_d: dict[str, Any] | None) -> str: return entry(details) if callable(entry) else entry def _preload_albums_from_cache(self, mb_cache: dict[str, Any], grouped_items: GroupedItems) -> None: - """Preload albums from embedded MB JSON cache when available. + """Preload albums from embedded MB data cache when available. Parameters ---------- mb_cache : dict[str, Any] - Mapping of album IDs to MB release JSON nodes. + Mapping of album IDs to MB release data nodes. grouped_items : GroupedItems Items grouped by location type (used to know which albums are needed). """ @@ -177,7 +178,7 @@ def _preload_albums_from_cache(self, mb_cache: dict[str, Any], grouped_items: Gr continue album = self.tagger.albums.get(album_id) if not album: - # Create album instance via normal path but intercept to parse from JSON node + # Create album instance via normal path but intercept to parse from MB data node album = self.tagger.load_album(album_id) # If album supports parsing from cached release node, do so parse_from_json = getattr(album, '_parse_release', None) @@ -205,8 +206,8 @@ def _read_session_file(self, path: Path) -> dict[str, Any]: Raises ------ - json.JSONDecodeError - If the file contains invalid JSON. + yaml.YAMLError + If the file contains invalid YAML. FileNotFoundError If the file does not exist. """ @@ -215,9 +216,9 @@ def _read_session_file(self, path: Path) -> dict[str, Any]: raw = p.read_bytes() if len(raw) >= 2 and raw[0] == 0x1F and raw[1] == 0x8B: text = gzip.decompress(raw).decode("utf-8") - return json.loads(text) + return yaml.safe_load(text) else: - return json.loads(raw.decode("utf-8")) + return yaml.safe_load(raw.decode("utf-8")) def _prepare_session(self, data: dict[str, Any]) -> None: """Prepare the session for loading. diff --git a/picard/session/session_manager.py b/picard/session/session_manager.py index 2293340bf8..64563efca8 100644 --- a/picard/session/session_manager.py +++ b/picard/session/session_manager.py @@ -38,18 +38,19 @@ Notes ----- -Session files use the .mbps.gz extension and contain gzip-compressed JSON data +Session files use the .mbps.gz extension and contain gzip-compressed YAML data with version information, options, file locations, and metadata overrides. """ from __future__ import annotations import gzip -import json from pathlib import Path import tempfile from typing import Any +import yaml + from picard.session.constants import SessionConstants from picard.session.session_exporter import SessionExporter from picard.session.session_loader import SessionLoader @@ -97,7 +98,7 @@ def save_session_to_path(tagger: Any, path: str | Path) -> None: Notes ----- - The session is saved as minified JSON (UTF-8) and gzip-compressed. If the + The session is saved as YAML (UTF-8) and gzip-compressed. If the file already exists, it will be overwritten. The write operation is atomic to prevent file corruption in case of crashes. """ @@ -109,9 +110,9 @@ def save_session_to_path(tagger: Any, path: str | Path) -> None: data = export_session(tagger) p.parent.mkdir(parents=True, exist_ok=True) - # Minify JSON and gzip-compress to reduce file size - json_text = json.dumps(data, ensure_ascii=False, separators=(",", ":")) - compressed = gzip.compress(json_text.encode("utf-8")) + # Convert to YAML and gzip-compress to reduce file size + yaml_text = yaml.dump(data, default_flow_style=False, allow_unicode=True, sort_keys=False) + compressed = gzip.compress(yaml_text.encode("utf-8")) # Atomic write: write to temporary file first, then rename temp_path = None diff --git a/test/session/test_session_loader.py b/test/session/test_session_loader.py index bee9aa55ad..f46b726be9 100644 --- a/test/session/test_session_loader.py +++ b/test/session/test_session_loader.py @@ -20,10 +20,11 @@ """Tests for session loader.""" -import json from pathlib import Path from unittest.mock import Mock, patch +import yaml + from picard.album import Album import picard.config as picard_config from picard.metadata import Metadata @@ -44,19 +45,19 @@ def test_session_loader_read_session_file(session_loader: SessionLoader, tmp_pat """Test reading session file.""" session_data = {'version': 1, 'items': []} session_file = tmp_path / "test.mbps" - session_file.write_text(json.dumps(session_data), encoding="utf-8") + session_file.write_text(yaml.dump(session_data, default_flow_style=False), encoding="utf-8") data = session_loader._read_session_file(session_file) assert data == session_data -def test_session_loader_read_session_file_invalid_json(session_loader: SessionLoader, tmp_path: Path) -> None: - """Test reading invalid JSON session file.""" +def test_session_loader_read_session_file_invalid_yaml(session_loader: SessionLoader, tmp_path: Path) -> None: + """Test reading invalid YAML session file.""" session_file = tmp_path / "test.mbps" - session_file.write_text("invalid json", encoding="utf-8") + session_file.write_text("invalid yaml: [", encoding="utf-8") - with pytest.raises(json.JSONDecodeError): + with pytest.raises(yaml.YAMLError): session_loader._read_session_file(session_file) diff --git a/test/session/test_session_manager.py b/test/session/test_session_manager.py index 715b623461..9b1cc7f2e0 100644 --- a/test/session/test_session_manager.py +++ b/test/session/test_session_manager.py @@ -20,9 +20,12 @@ """Tests for session manager.""" +import gzip from pathlib import Path from unittest.mock import Mock, patch +import yaml + from picard.session.constants import SessionConstants from picard.session.session_manager import export_session, load_session_from_path, save_session_to_path @@ -105,8 +108,8 @@ def test_save_session_to_path_string_path(mock_export_session: Mock, tmp_path: P @patch("picard.session.session_manager.export_session") -def test_save_session_to_path_creates_json_content(mock_export_session: Mock, tmp_path: Path) -> None: - """Test that saved session file contains proper JSON content.""" +def test_save_session_to_path_creates_yaml_content(mock_export_session: Mock, tmp_path: Path) -> None: + """Test that saved session file contains proper YAML content.""" session_data = { 'version': 1, 'options': {'rename_files': True}, @@ -122,12 +125,9 @@ def test_save_session_to_path_creates_json_content(mock_export_session: Mock, tm saved_file = Path(str(session_file) + ".mbps.gz") assert saved_file.exists() - # Read and verify content (gzip -> parse JSON) - import gzip - import json - + # Read and verify content (gzip -> parse YAML) content = gzip.decompress(saved_file.read_bytes()).decode("utf-8") - data = json.loads(content) + data = yaml.safe_load(content) assert data['version'] == 1 assert data['options']['rename_files'] is True assert data['items'][0]['file_path'] == "/test/file.mp3" @@ -177,11 +177,8 @@ def test_save_session_to_path_file_overwrite(mock_export_session: Mock, tmp_path save_session_to_path(tagger_mock, existing_file) # File should be overwritten - import gzip - import json - content = gzip.decompress(existing_file.read_bytes()).decode("utf-8") - data = json.loads(content) + data = yaml.safe_load(content) assert data['version'] == 1 @@ -215,14 +212,12 @@ def test_save_session_to_path_utf8_encoding(tmp_path: Path) -> None: save_session_to_path(tagger_mock, session_file) saved_file = Path(str(session_file) + ".mbps.gz") - import gzip - content = gzip.decompress(saved_file.read_bytes()).decode("utf-8") assert "歌曲" in content -def test_save_session_to_path_json_formatting(tmp_path: Path) -> None: - """Test that save_session_to_path uses proper JSON formatting.""" +def test_save_session_to_path_yaml_formatting(tmp_path: Path) -> None: + """Test that save_session_to_path uses proper YAML formatting.""" with patch("picard.session.session_manager.export_session") as mock_export: session_data = { 'version': 1, @@ -237,14 +232,12 @@ def test_save_session_to_path_json_formatting(tmp_path: Path) -> None: save_session_to_path(tagger_mock, session_file) saved_file = Path(str(session_file) + ".mbps.gz") - import gzip - content = gzip.decompress(saved_file.read_bytes()).decode("utf-8") - # Content is minified JSON - assert content.startswith("{") - assert '"version":1' in content - assert '"options":{' in content - assert '"rename_files":true' in content + # Content is YAML format + assert "version: 1" in content + assert "options:" in content + assert "rename_files: true" in content + assert "move_files: false" in content def test_export_session_returns_dict() -> None: @@ -357,7 +350,7 @@ def test_save_session_to_path_atomic_write(mock_export_session: Mock, tmp_path: # Verify the file was written atomically (old content replaced) assert expected_file.exists() content = expected_file.read_bytes() - # Should be gzip-compressed JSON, not the old text content + # Should be gzip-compressed YAML, not the old text content assert content != b"old content" assert len(content) > 0 # Should have actual compressed content From f18eb91214cac11e744a3aa66a000139a47d4896 Mon Sep 17 00:00:00 2001 From: Khoa Nguyen Date: Wed, 10 Sep 2025 01:02:43 -0400 Subject: [PATCH 17/30] Refactor session loader for dry/srp --- picard/options.py | 8 +- picard/session/constants.py | 5 +- picard/session/session_loader.py | 589 ++++++++++++--------- picard/ui/options/sessions.py | 21 +- test/session/test_album_manager.py | 155 ++++++ test/session/test_configuration_manager.py | 83 +++ test/session/test_item_grouper.py | 151 ++++++ test/session/test_override_applicator.py | 124 +++++ test/session/test_session_file_reader.py | 74 +++ test/session/test_session_loader.py | 512 ++++-------------- test/session/test_ui_state_manager.py | 108 ++++ 11 files changed, 1159 insertions(+), 671 deletions(-) create mode 100644 test/session/test_album_manager.py create mode 100644 test/session/test_configuration_manager.py create mode 100644 test/session/test_item_grouper.py create mode 100644 test/session/test_override_applicator.py create mode 100644 test/session/test_session_file_reader.py create mode 100644 test/session/test_ui_state_manager.py diff --git a/picard/options.py b/picard/options.py index efcc8a3f77..cf43713152 100644 --- a/picard/options.py +++ b/picard/options.py @@ -524,9 +524,15 @@ def make_default_toolbar_layout(): BoolOption( 'setting', 'session_include_mb_data', - False, + True, title=SessionMessages.SESSION_INCLUDE_MB_DATA_TITLE, ) +BoolOption( + 'setting', + 'session_no_mb_requests_on_load', + True, + title=SessionMessages.SESSION_NO_MB_REQUESTS_ON_LOAD, +) TextOption( 'setting', 'session_folder_path', diff --git a/picard/session/constants.py b/picard/session/constants.py index 17bacbc53b..c1d65c057a 100644 --- a/picard/session/constants.py +++ b/picard/session/constants.py @@ -139,5 +139,8 @@ class SessionMessages: SESSION_LOAD_LAST_TITLE = N_("Load last saved session on startup") SESSION_AUTOSAVE_TITLE = N_("Auto-save session every N minutes (0 disables)") SESSION_BACKUP_TITLE = N_("Attempt to keep a session backup on unexpected shutdown") - SESSION_INCLUDE_MB_DATA_TITLE = N_("Include MusicBrainz data in saved sessions (faster loads, risk of stale data)") + SESSION_INCLUDE_MB_DATA_TITLE = N_("Include MusicBrainz data in saved sessions (warm cache)") + SESSION_NO_MB_REQUESTS_ON_LOAD = N_( + "Do not make MusicBrainz requests on restore (faster loads, risk of stale data)" + ) SESSION_FOLDER_PATH_TITLE = N_("Sessions folder path (leave empty for default)") diff --git a/picard/session/session_loader.py b/picard/session/session_loader.py index e78463d6d3..1416876342 100644 --- a/picard/session/session_loader.py +++ b/picard/session/session_loader.py @@ -29,7 +29,7 @@ from contextlib import suppress import gzip from pathlib import Path -from typing import Any +from typing import Any, Protocol import yaml @@ -39,101 +39,49 @@ from picard.config import get_config from picard.const.defaults import EXCLUDED_OVERRIDE_TAGS from picard.i18n import gettext as _ +from picard.metadata import Metadata from picard.session.constants import SessionConstants from picard.session.metadata_handler import MetadataHandler from picard.session.session_data import AlbumItems, GroupedItems from picard.session.track_mover import TrackMover -class SessionLoader: - """Handles loading and restoring Picard sessions.""" +class ProgressReporter(Protocol): + """Protocol for emitting session loading progress updates.""" - def __init__(self, tagger: Any) -> None: - """Initialize the session loader. + def emit(self, stage: str, details: dict[str, Any] | None = None) -> None: # pragma: no cover - interface + """Emit a progress update for a given stage. Parameters ---------- - tagger : Any - The Picard tagger instance. + stage : str + Identifier of the current stage. + details : dict[str, Any] | None, optional + Additional details for the stage, by default None. """ - self.tagger = tagger - self.track_mover = TrackMover(tagger) - self.loaded_albums: dict[str, Album] = {} - # Saved UI expansion state from session (None = not provided) - self._saved_expanded_albums: set[str] | None = None + ... - def load_from_path(self, path: str | Path) -> None: - """Main entry point for loading a session. - Parameters - ---------- - path : str | Path - The file path to load the session from. - - Notes - ----- - This method orchestrates the entire session loading process: - 1. Read and parse the session file - 2. Prepare the session (clear current, set flags) - 3. Restore configuration options - 4. Group items by location type - 5. Load items to their proper locations - 6. Apply metadata overrides - 7. Schedule metadata application - """ - self._emit_progress("read", details={'path': str(path)}) - data = self._read_session_file(path) - self._prepare_session(data) - self._restore_options(data.get('options', {})) - # Cache saved UI expansion state for later album updates - self._saved_expanded_albums = set(data.get('expanded_albums', [])) if "expanded_albums" in data else None - - items = data.get('items', []) - grouped_items = self._group_items_by_location(items) - metadata_map = self._extract_metadata(items) - - # If mb_cache is provided, try to pre-load albums from cached MB data - mb_cache = data.get('mb_cache', {}) - if mb_cache: - self._emit_progress("preload_cache", details={'albums': len(mb_cache)}) - self._preload_albums_from_cache(mb_cache, grouped_items) - - self._emit_progress( - "load_items", - details={ - 'files': len(grouped_items.unclustered) - + sum(len(v) for v in grouped_items.by_cluster.values()) - + sum(len(g.unmatched) + len(g.tracks) for g in grouped_items.by_album.values()) - }, - ) - self._load_items(grouped_items) - self._load_unmatched_albums(data.get('unmatched_albums', [])) - self._emit_progress("apply_overrides") - self._apply_overrides(data) - - if metadata_map: - self._schedule_metadata_application(metadata_map) +class TaggerProgressReporter: + """Progress reporter that routes updates to the Picard UI when available.""" - # Restore UI state (expanded albums and file view roots) - self._emit_progress("finalize") - self._restore_ui_state(data) + def __init__(self, tagger: Any) -> None: + self._tagger = tagger - # ---------------------- - # Progress reporting API - # ---------------------- - def _emit_progress(self, stage: str, details: dict[str, Any] | None = None) -> None: - # Do not let progress reporting break loading + def emit(self, stage: str, details: dict[str, Any] | None = None) -> None: + # Avoid letting UI progress errors break loading flow with suppress(AttributeError, RuntimeError, TypeError): - # Forward to main window / status indicator if available - if hasattr(self.tagger, 'window') and hasattr(self.tagger.window, 'status_indicators'): - for indicator in self.tagger.window.status_indicators: + # Forward to status indicators if present + if hasattr(self._tagger, 'window') and hasattr(self._tagger.window, 'status_indicators'): + for indicator in self._tagger.window.status_indicators: if hasattr(indicator, 'session_progress'): indicator.session_progress(stage, details or {}) - # Additionally, update status bar text when possible - if hasattr(self.tagger, 'window') and hasattr(self.tagger.window, 'set_statusbar_message'): + + # Additionally update statusbar text + if hasattr(self._tagger, 'window') and hasattr(self._tagger.window, 'set_statusbar_message'): msg = self._format_stage_message(stage, details) if msg: - self.tagger.window.set_statusbar_message(msg) + self._tagger.window.set_statusbar_message(msg) def _format_stage_message(self, stage: str, details: dict[str, Any] | None) -> str | None: def msg_preload(d: dict[str, Any] | None) -> str: @@ -142,8 +90,8 @@ def msg_preload(d: dict[str, Any] | None) -> str: def msg_load_items(d: dict[str, Any] | None) -> str: return _("Loading files and albums ({files} files)…").format(files=(d or {}).get('files', 0)) - def msg_finalize(_d: dict[str, Any] | None) -> str: - pending = getattr(self.tagger.webservice, 'num_pending_web_requests', 0) + def msg_finalize(details_unused: dict[str, Any] | None) -> str: + pending = getattr(self._tagger.webservice, 'num_pending_web_requests', 0) if pending: return _("Waiting on network ({requests} requests)…").format(requests=pending) return _("Finalizing…") @@ -161,99 +109,77 @@ def msg_finalize(_d: dict[str, Any] | None) -> str: return None return entry(details) if callable(entry) else entry - def _preload_albums_from_cache(self, mb_cache: dict[str, Any], grouped_items: GroupedItems) -> None: - """Preload albums from embedded MB data cache when available. - Parameters - ---------- - mb_cache : dict[str, Any] - Mapping of album IDs to MB release data nodes. - grouped_items : GroupedItems - Items grouped by location type (used to know which albums are needed). - """ - needed_album_ids = set(grouped_items.by_album.keys()) | set(mb_cache.keys()) - for album_id in needed_album_ids: - node = mb_cache.get(album_id) - if not node: - continue - album = self.tagger.albums.get(album_id) - if not album: - # Create album instance via normal path but intercept to parse from MB data node - album = self.tagger.load_album(album_id) - # If album supports parsing from cached release node, do so - parse_from_json = getattr(album, '_parse_release', None) - if callable(parse_from_json): - # Fall back to normal loading path if parsing fails - with suppress(KeyError, TypeError, ValueError): - parse_from_json(node) - album._run_album_metadata_processors() - album.update(update_tracks=True) - self.loaded_albums[album_id] = album - self._ensure_album_visible(album) - - def _read_session_file(self, path: Path) -> dict[str, Any]: - """Read and parse session file. +class SessionFileReader: + """Read and parse session files (YAML or gzipped YAML).""" + + def read(self, path: str | Path) -> dict[str, Any]: + """Read and parse a session file. Parameters ---------- - path : Path - The file path to read. + path : str | Path + Path to the session file. Returns ------- dict[str, Any] - The parsed session data. + Parsed session data. Raises ------ - yaml.YAMLError - If the file contains invalid YAML. FileNotFoundError - If the file does not exist. + If the path does not exist. + yaml.YAMLError + If the file cannot be parsed as YAML. """ p = Path(path) - # Detect gzip by magic bytes and decode accordingly raw = p.read_bytes() if len(raw) >= 2 and raw[0] == 0x1F and raw[1] == 0x8B: text = gzip.decompress(raw).decode("utf-8") return yaml.safe_load(text) - else: - return yaml.safe_load(raw.decode("utf-8")) + return yaml.safe_load(raw.decode("utf-8")) - def _prepare_session(self, data: dict[str, Any]) -> None: - """Prepare the session for loading. + +class ConfigurationManager: + """Restore configuration and manage safe-restore lifecycle flags.""" + + def prepare_session(self, tagger: Any) -> None: + """Clear current session and set restoring flag when configured. Parameters ---------- - data : dict[str, Any] - The session data. + tagger : Any + The Picard tagger instance. """ - # Close current session - self.tagger.clear_session() - # Respect user setting for safe restore (defaults enabled) + tagger.clear_session() if get_config().setting['session_safe_restore']: - self.tagger._restoring_session = True + tagger._restoring_session = True - def _restore_options(self, options: dict[str, Any]) -> None: - """Restore configuration options. + def restore_options(self, options: dict[str, Any]) -> None: + """Restore core configuration options from the session payload. Parameters ---------- options : dict[str, Any] - The options to restore. + Options mapping from the session file. """ config = get_config() config.setting['rename_files'] = bool(options.get('rename_files', config.setting['rename_files'])) config.setting['move_files'] = bool(options.get('move_files', config.setting['move_files'])) config.setting['dont_write_tags'] = bool(options.get('dont_write_tags', config.setting['dont_write_tags'])) - def _group_items_by_location(self, items: list[dict[str, Any]]) -> GroupedItems: - """Group items by their target location. + +class ItemGrouper: + """Group raw session items and extract metadata deltas.""" + + def group(self, items: list[dict[str, Any]]) -> GroupedItems: + """Group items by their destination (unclustered, clusters, albums, NAT). Parameters ---------- items : list[dict[str, Any]] - List of session items. + Raw item entries from the session payload. Returns ------- @@ -289,197 +215,251 @@ def _group_items_by_location(self, items: list[dict[str, Any]]) -> GroupedItems: return GroupedItems(unclustered=by_unclustered, by_cluster=by_cluster, by_album=by_album, nat_items=nat_items) - def _extract_metadata(self, items: list[dict[str, Any]]) -> dict[Path, dict[str, list[Any]]]: - """Extract metadata from session items. + def extract_metadata(self, items: list[dict[str, Any]]) -> dict[Path, dict[str, list[Any]]]: + """Extract per-path metadata deltas from item entries. Parameters ---------- items : list[dict[str, Any]] - List of session items. + Raw item entries from the session payload. Returns ------- dict[Path, dict[str, list[Any]]] - Mapping of file paths to their metadata tag deltas. + Mapping of file path to tag delta list. """ metadata_by_path: dict[Path, dict[str, list[Any]]] = {} for it in items: fpath = Path(it['file_path']).expanduser() md = it.get('metadata', {}) - if "tags" in md: + if 'tags' in md: tags = {k: MetadataHandler.as_list(v) for k, v in md['tags'].items()} metadata_by_path[fpath] = tags return metadata_by_path - def _load_items(self, grouped_items: GroupedItems) -> None: - """Load items to their proper locations. + +class UIStateManager: + """Manage UI-related state such as album expansion and delayed updates.""" + + def __init__(self, tagger: Any, default_delay_ms: int) -> None: + self._tagger = tagger + self._delay_ms = default_delay_ms + + def ensure_album_visible(self, album: Album, saved_expanded: set[str] | None) -> None: + """Ensure an album node is visible and expanded according to saved state. Parameters ---------- - grouped_items : GroupedItems - Items grouped by location type. + album : Album + Album whose UI node should be expanded/updated. + saved_expanded : set[str] | None + Saved expanded album IDs; if None, expand by default. """ - # Load albums upfront - self._load_albums(grouped_items) - - # Add unclustered files - if grouped_items.unclustered: - self.tagger.add_files([str(p) for p in grouped_items.unclustered], target=self.tagger.unclustered_files) - # Add cluster files - for (title, artist), paths in grouped_items.by_cluster.items(): - cluster = self.tagger.load_cluster(title, artist) - self.tagger.add_files([str(p) for p in paths], target=cluster) - - # Add album files - self._load_album_files(grouped_items.by_album) + def run() -> None: + album.update(update_tracks=True) + if album.ui_item: + if saved_expanded is not None: + album.ui_item.setExpanded(album.id in saved_expanded) + else: + album.ui_item.setExpanded(True) - # Handle NAT items - for fpath, rid in grouped_items.nat_items: - self.track_mover.move_file_to_nat(fpath, rid) + album.run_when_loaded(run) - def _load_unmatched_albums(self, unmatched_album_ids: list[str]) -> None: - """Load albums that have no files matched to them. + def apply_expansions_later(self, expanded_albums: set[str]) -> None: + """Apply album expansion states after a short delay to avoid flicker. Parameters ---------- - unmatched_album_ids : list[str] - List of album IDs to load. + expanded_albums : set[str] + Album IDs that should be expanded. """ - for album_id in unmatched_album_ids: - if album_id not in self.loaded_albums: - album = self.tagger.load_album(album_id) - self.loaded_albums[album_id] = album - # Ensure album becomes visible and expanded once loaded - self._ensure_album_visible(album) - def _load_albums(self, grouped_items: GroupedItems) -> None: - """Load albums that will be needed. + def set_expansions() -> None: + for album_id, album in self._tagger.albums.items(): + ui_item = getattr(album, 'ui_item', None) + if ui_item is None: + continue + ui_item.setExpanded(album_id in expanded_albums) + + QtCore.QTimer.singleShot(self._delay_ms, set_expansions) - Parameters - ---------- - grouped_items : GroupedItems - Items grouped by location type. - """ - album_ids = set(grouped_items.by_album.keys()) - for album_id in album_ids: - self.loaded_albums[album_id] = self.tagger.load_album(album_id) - def _load_album_files(self, by_album: dict[str, AlbumItems]) -> None: - """Load files into albums and move them to tracks. +class AlbumManager: + """Manage album loading and caching strategies.""" + + def __init__(self, tagger: Any, ui_state: UIStateManager) -> None: + self._tagger = tagger + self._ui_state = ui_state + self.loaded_albums: dict[str, Album] = {} + self._suppress_network: bool = False + self._saved_expanded_albums: set[str] | None = None + + def configure(self, suppress_network: bool, saved_expanded_albums: set[str] | None) -> None: + """Set runtime behavior flags for album loading. Parameters ---------- - by_album : dict[str, AlbumItems] - Files grouped by album ID. + suppress_network : bool + Whether to suppress MB web requests. + saved_expanded_albums : set[str] | None + Saved UI expansion state from session. """ + self._suppress_network = suppress_network + self._saved_expanded_albums = saved_expanded_albums + + def preload_from_cache(self, mb_cache: dict[str, Any], grouped_items: GroupedItems) -> None: + """Preload albums using embedded MB cache data if available.""" + needed_album_ids = set(grouped_items.by_album.keys()) | set(mb_cache.keys()) + for album_id in needed_album_ids: + node = mb_cache.get(album_id) + if not node: + continue + album = self._tagger.albums.get(album_id) + if not album: + album = self._build_from_cache(album_id, node) + self.loaded_albums[album_id] = album + self._ui_state.ensure_album_visible(album, self._saved_expanded_albums) + if not self._suppress_network: + album.load() + + def load_needed_albums(self, grouped_items: GroupedItems, mb_cache: dict[str, Any]) -> None: + """Ensure albums referenced by grouped items are available.""" + for album_id in set(grouped_items.by_album.keys()): + if album_id in self.loaded_albums: + continue + node = mb_cache.get(album_id) + album = self.load_album_with_strategy(album_id, node) + if album: + self.loaded_albums[album_id] = album + + def load_unmatched_albums(self, album_ids: list[str], mb_cache: dict[str, Any]) -> None: + """Load albums that have no files matched to them.""" + for album_id in album_ids: + if album_id in self.loaded_albums: + continue + node = mb_cache.get(album_id) + album = self.load_album_with_strategy(album_id, node) + if album: + self.loaded_albums[album_id] = album + + def load_album_files(self, by_album: dict[str, AlbumItems], track_mover: TrackMover) -> None: + """Add files to albums and move them to specific tracks as needed.""" for album_id, groups in by_album.items(): album = self.loaded_albums[album_id] all_paths = list(groups.unmatched) + [fp for (fp, _rid) in groups.tracks] if all_paths: - self.tagger.add_files([str(p) for p in all_paths], target=album.unmatched_files) + self._tagger.add_files([str(p) for p in all_paths], target=album.unmatched_files) - # Ensure album node is expanded/visible early - self._ensure_album_visible(album) + self._ui_state.ensure_album_visible(album, self._saved_expanded_albums) - # Move files to their tracks if groups.tracks: - self.track_mover.move_files_to_tracks(album, groups.tracks) + track_mover.move_files_to_tracks(album, groups.tracks) - def _ensure_album_visible(self, album: Album) -> None: - """Ensure album node is expanded and visible. + def ensure_loaded_for_overrides(self, album_ids: set[str], mb_cache: dict[str, Any]) -> None: + """Ensure albums referenced by overrides are available and visible.""" + for album_id in album_ids: + if album_id in self.loaded_albums: + continue + node = mb_cache.get(album_id) + album = self.load_album_with_strategy(album_id, node) + if album: + self.loaded_albums[album_id] = album + + def load_album_with_strategy(self, album_id: str, cached_node: dict[str, Any] | None) -> Album | None: + """Load an album using cache if available and optionally the network. Parameters ---------- - album : Album - The album to make visible. + album_id : str + The MusicBrainz release ID. + cached_node : dict[str, Any] | None + Cached release tree if available. + + Returns + ------- + Album | None + The loaded album, or None if suppressed and no cache exists. """ + if cached_node is not None: + album = self._build_from_cache(album_id, cached_node) + self._ui_state.ensure_album_visible(album, self._saved_expanded_albums) + if not self._suppress_network: + album.load() + return album + + if self._suppress_network: + return None - def run() -> None: - album.update(update_tracks=True) - if album.ui_item: - if self._saved_expanded_albums is not None: - album.ui_item.setExpanded(album.id in self._saved_expanded_albums) - else: - album.ui_item.setExpanded(True) + album = self._tagger.load_album(album_id) + self._ui_state.ensure_album_visible(album, self._saved_expanded_albums) + return album - album.run_when_loaded(run) + def _build_from_cache(self, album_id: str, node: dict[str, Any]) -> Album: + """Construct and finalize an album from cached MB data without network.""" + album = self._tagger.albums.get(album_id) + if not album: + album = Album(album_id) + self._tagger.albums[album_id] = album + if hasattr(self._tagger, 'album_added'): + self._tagger.album_added.emit(album) - def _restore_ui_state(self, data: dict[str, Any]) -> None: - """Restore saved UI expansion state. + album.loaded = False + album.metadata.clear() + album.genres.clear() + album._new_metadata = Metadata() + album._new_tracks = [] + album._requests = max(1, getattr(album, '_requests', 0)) - Parameters - ---------- - data : dict[str, Any] - The session data. - """ - expanded_albums = set(data.get('expanded_albums', [])) + with suppress(KeyError, TypeError, ValueError): + album._parse_release(node) + album._run_album_metadata_processors() + album._requests -= 1 + album._finalize_loading(error=False) - def set_expansions() -> None: - # Album view: set expansion for albums we have - for album_id, album in self.tagger.albums.items(): - ui_item = getattr(album, 'ui_item', None) - if ui_item is None: - continue - ui_item.setExpanded(album_id in expanded_albums) + return album - # File view roots: keep default expansion for unmatched / clusters - # (Optional future: persist these as well.) - # Delay until after albums finished initial load to avoid toggling too early - QtCore.QTimer.singleShot(SessionConstants.DEFAULT_RETRY_DELAY_MS, set_expansions) +class OverrideApplicator: + """Apply album and track metadata overrides from the session payload.""" - def _apply_overrides(self, data: dict[str, Any]) -> None: - """Apply metadata overrides to albums and tracks. + def __init__(self, album_manager: AlbumManager) -> None: + self._albums = album_manager + + def apply(self, data: dict[str, Any], mb_cache: dict[str, Any]) -> None: + """Apply overrides, ensuring referenced albums are loaded. Parameters ---------- data : dict[str, Any] - The session data containing overrides. + Session data containing overrides. + mb_cache : dict[str, Any] + Cached MB data to avoid network when possible. """ track_overrides_by_album = data.get('album_track_overrides', {}) album_meta_overrides = data.get('album_overrides', {}) - # Ensure albums referenced by overrides are loaded and visible referenced_album_ids = set(track_overrides_by_album.keys()) | set(album_meta_overrides.keys()) - for album_id in referenced_album_ids: - if album_id not in self.loaded_albums: - album = self.tagger.load_album(album_id) - self.loaded_albums[album_id] = album - self._ensure_album_visible(album) + self._albums.ensure_loaded_for_overrides(referenced_album_ids, mb_cache) - # Apply track-level overrides for album_id, track_overrides in track_overrides_by_album.items(): - album = self.loaded_albums.get(album_id) + album = self._albums.loaded_albums.get(album_id) if album: self._apply_track_overrides(album, track_overrides) - # Apply album-level overrides for album_id, overrides in album_meta_overrides.items(): - album = self.loaded_albums.get(album_id) + album = self._albums.loaded_albums.get(album_id) if album: self._apply_album_overrides(album, overrides) def _apply_track_overrides(self, album: Album, overrides: dict[str, dict[str, list[Any]]]) -> None: - """Apply track-level metadata overrides. - - Parameters - ---------- - album : Album - The album containing the tracks. - overrides : dict[str, dict[str, list[Any]]] - Track overrides by track ID. - """ - def run() -> None: track_by_id = {t.id: t for t in album.tracks} for track_id, tags in overrides.items(): tr = track_by_id.get(track_id) if not tr: continue - # Apply overrides to track metadata so columns reflect user edits for tag, values in tags.items(): - # Never override computed lengths if tag in EXCLUDED_OVERRIDE_TAGS: continue tr.metadata[tag] = MetadataHandler.as_list(values) @@ -488,16 +468,6 @@ def run() -> None: album.run_when_loaded(run) def _apply_album_overrides(self, album: Album, overrides: dict[str, list[Any]]) -> None: - """Apply album-level metadata overrides. - - Parameters - ---------- - album : Album - The album to apply overrides to. - overrides : dict[str, list[Any]] - Album-level overrides. - """ - def run() -> None: for tag, values in overrides.items(): album.metadata[tag] = MetadataHandler.as_list(values) @@ -505,6 +475,121 @@ def run() -> None: album.run_when_loaded(run) + +class SessionLoader: + """Orchestrate loading and restoring Picard sessions.""" + + def __init__(self, tagger: Any) -> None: + """Initialize the session loader. + + Parameters + ---------- + tagger : Any + The Picard tagger instance. + """ + self.tagger = tagger + self._progress: ProgressReporter = TaggerProgressReporter(tagger) + self._file_reader = SessionFileReader() + self._config_mgr = ConfigurationManager() + self._grouper = ItemGrouper() + self._ui_state = UIStateManager(tagger, SessionConstants.DEFAULT_RETRY_DELAY_MS) + self._albums = AlbumManager(tagger, self._ui_state) + self._overrides = OverrideApplicator(self._albums) + self.track_mover = TrackMover(tagger) + # Module-level state bound to a single session load + self._saved_expanded_albums: set[str] | None = None + self._mb_cache: dict[str, Any] = {} + self._suppress_mb_requests: bool = False + + @property + def loaded_albums(self) -> dict[str, Album]: + """Expose albums loaded during the session. + + Returns + ------- + dict[str, Album] + Mapping of MusicBrainz release ID to `Album`. + """ + return self._albums.loaded_albums + + @loaded_albums.setter + def loaded_albums(self, value: dict[str, Album]) -> None: + self._albums.loaded_albums = value + + # Note: Previously exposed internal methods are now encapsulated in + # dedicated components (SessionFileReader, ConfigurationManager, + # ItemGrouper, UIStateManager, AlbumManager, OverrideApplicator). + + def load_from_path(self, path: str | Path) -> None: + """Load and restore a Picard session from file. + + Parameters + ---------- + path : str | Path + The file path to load the session from. + + Notes + ----- + Orchestrates reading the session, restoring configuration, loading + items and albums, applying overrides, and restoring UI state. + """ + self._progress.emit("read", details={'path': str(path)}) + data = self._file_reader.read(path) + + self._config_mgr.prepare_session(self.tagger) + self._config_mgr.restore_options(data.get('options', {})) + + self._suppress_mb_requests = get_config().setting['session_no_mb_requests_on_load'] + self._saved_expanded_albums = set(data.get('expanded_albums', [])) if 'expanded_albums' in data else None + self._albums.configure(self._suppress_mb_requests, self._saved_expanded_albums) + + items = data.get('items', []) + grouped_items = self._grouper.group(items) + metadata_map = self._grouper.extract_metadata(items) + + self._mb_cache = data.get('mb_cache', {}) + if self._mb_cache: + self._progress.emit("preload_cache", details={'albums': len(self._mb_cache)}) + self._albums.preload_from_cache(self._mb_cache, grouped_items) + + total_files = ( + len(grouped_items.unclustered) + + sum(len(v) for v in grouped_items.by_cluster.values()) + + sum(len(g.unmatched) + len(g.tracks) for g in grouped_items.by_album.values()) + ) + self._progress.emit("load_items", details={'files': total_files}) + + # Load albums for items and place files accordingly + self._albums.load_needed_albums(grouped_items, self._mb_cache) + + if grouped_items.unclustered: + self.tagger.add_files([str(p) for p in grouped_items.unclustered], target=self.tagger.unclustered_files) + for (title, artist), paths in grouped_items.by_cluster.items(): + cluster = self.tagger.load_cluster(title, artist) + self.tagger.add_files([str(p) for p in paths], target=cluster) + self._albums.load_album_files(grouped_items.by_album, self.track_mover) + + # NAT items + for fpath, rid in grouped_items.nat_items: + self.track_mover.move_file_to_nat(fpath, rid) + + # Unmatched albums + self._albums.load_unmatched_albums(data.get('unmatched_albums', []), self._mb_cache) + + # Apply overrides + self._progress.emit("apply_overrides") + self._overrides.apply(data, self._mb_cache) + + if metadata_map: + self._schedule_metadata_application(metadata_map) + + # Restore UI state + self._progress.emit("finalize") + expanded = set(data.get('expanded_albums', [])) + self._ui_state.apply_expansions_later(expanded) + + # The following block of methods are retained for scheduling and lifecycle. + def _schedule_metadata_application(self, metadata_map: dict[Path, dict[str, list[Any]]]) -> None: """Schedule metadata application after files are loaded. diff --git a/picard/ui/options/sessions.py b/picard/ui/options/sessions.py index 3136550ab4..5a477f4750 100644 --- a/picard/ui/options/sessions.py +++ b/picard/ui/options/sessions.py @@ -43,6 +43,7 @@ class SessionsOptionsPage(OptionsPage): ('session_autosave_interval_min', ['autosave_spin']), ('session_backup_on_crash', ['backup_checkbox']), ('session_include_mb_data', ['include_mb_data_checkbox']), + ('session_no_mb_requests_on_load', ['no_mb_requests_checkbox']), ('session_folder_path', ['folder_path_edit']), ) @@ -84,6 +85,16 @@ def __init__(self, parent=None): self.include_mb_data_checkbox = QtWidgets.QCheckBox(_(SessionMessages.SESSION_INCLUDE_MB_DATA_TITLE)) self.vbox.addWidget(self.include_mb_data_checkbox) + # Child option: Only effective when Include MB data is enabled + self.no_mb_requests_checkbox = QtWidgets.QCheckBox(_(SessionMessages.SESSION_NO_MB_REQUESTS_ON_LOAD)) + # Visually indent to indicate dependency on parent option + child_layout = QtWidgets.QHBoxLayout() + child_layout.setContentsMargins(24, 0, 0, 0) + child_layout.addWidget(self.no_mb_requests_checkbox) + self.vbox.addLayout(child_layout) + # Keep child disabled when parent is unchecked + self.include_mb_data_checkbox.toggled.connect(self.no_mb_requests_checkbox.setEnabled) + self.vbox.addStretch(1) def load(self): @@ -93,6 +104,9 @@ def load(self): self.autosave_spin.setValue(config.setting['session_autosave_interval_min']) self.backup_checkbox.setChecked(config.setting['session_backup_on_crash']) self.include_mb_data_checkbox.setChecked(config.setting['session_include_mb_data']) + self.no_mb_requests_checkbox.setChecked(config.setting['session_no_mb_requests_on_load']) + # Enforce dependency (child enabled only when parent is on) + self.no_mb_requests_checkbox.setEnabled(self.include_mb_data_checkbox.isChecked()) self.folder_path_edit.setText(config.setting['session_folder_path']) def save(self): @@ -101,7 +115,12 @@ def save(self): config.setting['session_load_last_on_startup'] = self.load_last_checkbox.isChecked() config.setting['session_autosave_interval_min'] = int(self.autosave_spin.value()) config.setting['session_backup_on_crash'] = self.backup_checkbox.isChecked() - config.setting['session_include_mb_data'] = self.include_mb_data_checkbox.isChecked() + include_mb = self.include_mb_data_checkbox.isChecked() + config.setting['session_include_mb_data'] = include_mb + # Force child off when parent is off to avoid stale state + config.setting['session_no_mb_requests_on_load'] = ( + self.no_mb_requests_checkbox.isChecked() if include_mb else False + ) config.setting['session_folder_path'] = self.folder_path_edit.text().strip() def _browse_sessions_folder(self): diff --git a/test/session/test_album_manager.py b/test/session/test_album_manager.py new file mode 100644 index 0000000000..4b5b3b9311 --- /dev/null +++ b/test/session/test_album_manager.py @@ -0,0 +1,155 @@ +# -*- coding: utf-8 -*- +# +# Picard, the next-generation MusicBrainz tagger +# +# Copyright (C) 2025 The MusicBrainz Team +# +# This program is free software; you can redistribute it and/or +# modify it under the terms of the GNU General Public License +# as published by the Free Software Foundation; either version 2 +# of the License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. + +"""Tests for AlbumManager.""" + +from pathlib import Path +from unittest.mock import Mock + +from picard.album import Album +from picard.session.session_data import AlbumItems, GroupedItems +from picard.session.session_loader import AlbumManager, UIStateManager + + +def test_album_manager_load_unmatched_albums() -> None: + tagger = Mock() + ui_state = UIStateManager(tagger, default_delay_ms=10) + manager = AlbumManager(tagger, ui_state) + manager.configure(suppress_network=False, saved_expanded_albums=None) + + unmatched = ["album-123", "album-456"] + album_mock1 = Mock(spec=Album) + album_mock2 = Mock(spec=Album) + tagger.load_album.side_effect = [album_mock1, album_mock2] + + manager.load_unmatched_albums(unmatched, mb_cache={}) + + assert manager.loaded_albums['album-123'] == album_mock1 + assert manager.loaded_albums['album-456'] == album_mock2 + assert tagger.load_album.call_count == 2 + + +def test_album_manager_load_needed_albums() -> None: + tagger = Mock() + ui_state = UIStateManager(tagger, default_delay_ms=10) + manager = AlbumManager(tagger, ui_state) + manager.configure(suppress_network=False, saved_expanded_albums=None) + + grouped = GroupedItems( + unclustered=[], + by_cluster={}, + by_album={'album-123': AlbumItems(unmatched=[], tracks=[]), 'album-456': AlbumItems(unmatched=[], tracks=[])}, + nat_items=[], + ) + + album_mock1 = Mock(spec=Album) + album_mock2 = Mock(spec=Album) + + def side_effect(album_id): + if album_id == "album-123": + return album_mock1 + if album_id == "album-456": + return album_mock2 + return Mock(spec=Album) + + tagger.load_album.side_effect = side_effect + + manager.load_needed_albums(grouped, mb_cache={}) + + assert manager.loaded_albums['album-123'] == album_mock1 + assert manager.loaded_albums['album-456'] == album_mock2 + + +def test_album_manager_load_album_files() -> None: + tagger = Mock() + ui_state = UIStateManager(tagger, default_delay_ms=10) + manager = AlbumManager(tagger, ui_state) + manager.configure(suppress_network=False, saved_expanded_albums=None) + + album_mock = Mock(spec=Album) + album_mock.unmatched_files = Mock() + manager.loaded_albums = {'album-123': album_mock} + + by_album = { + 'album-123': AlbumItems( + unmatched=[Path("/test/unmatched.mp3")], + tracks=[(Path("/test/track.mp3"), "recording-456")], + ) + } + + track_mover = Mock() + manager.load_album_files(by_album, track_mover) + + tagger.add_files.assert_called_once() + track_mover.move_files_to_tracks.assert_called_once_with(album_mock, [(Path("/test/track.mp3"), "recording-456")]) + + +def test_album_manager_load_album_files_no_files() -> None: + tagger = Mock() + ui_state = UIStateManager(tagger, default_delay_ms=10) + manager = AlbumManager(tagger, ui_state) + manager.configure(suppress_network=False, saved_expanded_albums=None) + + album_mock = Mock(spec=Album) + manager.loaded_albums = {'album-123': album_mock} + + by_album = {'album-123': AlbumItems(unmatched=[], tracks=[])} + manager.load_album_files(by_album, track_mover=Mock()) + + tagger.add_files.assert_not_called() + + +def test_album_manager_preload_from_cache_and_refresh_network() -> None: + tagger = Mock() + ui_state = UIStateManager(tagger, default_delay_ms=10) + manager = AlbumManager(tagger, ui_state) + # allow network refresh + manager.configure(suppress_network=False, saved_expanded_albums=None) + + grouped = GroupedItems(unclustered=[], by_cluster={}, by_album={'album-1': AlbumItems([], [])}, nat_items=[]) + mb_cache = {'album-1': {'id': 'album-1'}} + + # When AlbumManager builds from cache, it creates Album() and may emit signals. + # Simulate tagger.albums empty, force branch that creates new Album via internal logic. + tagger.albums = {} + + # Ensure run_when_loaded callback executes synchronously for test + def run_immediately(cb): + cb() + + Album.run_when_loaded = Mock(side_effect=run_immediately) + + manager.preload_from_cache(mb_cache, grouped) + + assert 'album-1' in manager.loaded_albums + # album.load() should have been called if network not suppressed + manager.loaded_albums['album-1'] + # Some Album methods are internal; for this test ensure attribute exists and is callable + # We cannot assert exact call since Album is constructed internally; just ensure no crash and album present. + + +def test_album_manager_load_album_with_strategy_suppressed_no_cache() -> None: + tagger = Mock() + ui_state = UIStateManager(tagger, default_delay_ms=10) + manager = AlbumManager(tagger, ui_state) + manager.configure(suppress_network=True, saved_expanded_albums=None) + + album = manager.load_album_with_strategy('album-x', cached_node=None) + assert album is None diff --git a/test/session/test_configuration_manager.py b/test/session/test_configuration_manager.py new file mode 100644 index 0000000000..cab865a0d2 --- /dev/null +++ b/test/session/test_configuration_manager.py @@ -0,0 +1,83 @@ +# -*- coding: utf-8 -*- +# +# Picard, the next-generation MusicBrainz tagger +# +# Copyright (C) 2025 The MusicBrainz Team +# +# This program is free software; you can redistribute it and/or +# modify it under the terms of the GNU General Public License +# as published by the Free Software Foundation; either version 2 +# of the License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. + +"""Tests for ConfigurationManager.""" + +from unittest.mock import Mock, patch + +import picard.config as picard_config +from picard.session.session_loader import ConfigurationManager + + +def test_configuration_manager_prepare_session(cfg_options) -> None: + cfg = picard_config.get_config() + cfg.setting['session_safe_restore'] = True + + manager = ConfigurationManager() + tagger = Mock() + manager.prepare_session(tagger) + + tagger.clear_session.assert_called_once() + assert tagger._restoring_session is True + + +def test_configuration_manager_prepare_session_safe_restore_disabled(cfg_options) -> None: + cfg = picard_config.get_config() + cfg.setting['session_safe_restore'] = False + + manager = ConfigurationManager() + tagger = Mock() + manager.prepare_session(tagger) + + tagger.clear_session.assert_called_once() + if hasattr(tagger, '_restoring_session'): + assert tagger._restoring_session is not True + + +@patch("picard.session.session_loader.get_config") +def test_configuration_manager_restore_options_with_defaults(mock_get_config) -> None: + config_mock = Mock() + config_mock.setting = { + 'rename_files': False, + 'move_files': False, + 'dont_write_tags': False, + } + mock_get_config.return_value = config_mock + + manager = ConfigurationManager() + manager.restore_options({}) + + assert config_mock.setting['rename_files'] is False + assert config_mock.setting['move_files'] is False + assert config_mock.setting['dont_write_tags'] is False + + +def test_configuration_manager_restore_options(cfg_options) -> None: + manager = ConfigurationManager() + options = { + 'rename_files': True, + 'move_files': True, + 'dont_write_tags': True, + } + manager.restore_options(options) + cfg = picard_config.get_config() + assert cfg.setting['rename_files'] is True + assert cfg.setting['move_files'] is True + assert cfg.setting['dont_write_tags'] is True diff --git a/test/session/test_item_grouper.py b/test/session/test_item_grouper.py new file mode 100644 index 0000000000..d35113f624 --- /dev/null +++ b/test/session/test_item_grouper.py @@ -0,0 +1,151 @@ +# -*- coding: utf-8 -*- +# +# Picard, the next-generation MusicBrainz tagger +# +# Copyright (C) 2025 The MusicBrainz Team +# +# This program is free software; you can redistribute it and/or +# modify it under the terms of the GNU General Public License +# as published by the Free Software Foundation; either version 2 +# of the License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. + +"""Tests for ItemGrouper.""" + +from pathlib import Path + +from picard.session.session_loader import ItemGrouper + + +def test_item_grouper_group_items_by_location() -> None: + grouper = ItemGrouper() + items = [ + { + 'file_path': "/test/unclustered.mp3", + 'location': {'type': "unclustered"}, + }, + { + 'file_path': "/test/cluster.mp3", + 'location': {'type': "cluster", 'cluster_title': "Album", 'cluster_artist': "Artist"}, + }, + { + 'file_path': "/test/track.mp3", + 'location': {'type': "track", 'album_id': "album-123", 'recording_id': "recording-456"}, + }, + { + 'file_path': "/test/unmatched.mp3", + 'location': {'type': "album_unmatched", 'album_id': "album-789"}, + }, + { + 'file_path': "/test/nat.mp3", + 'location': {'type': "nat", 'recording_id': "recording-999"}, + }, + ] + + grouped = grouper.group(items) + + assert len(grouped.unclustered) == 1 + assert Path(grouped.unclustered[0]).name == "unclustered.mp3" + + assert len(grouped.by_cluster) == 1 + assert ("Album", "Artist") in grouped.by_cluster + + assert "album-123" in grouped.by_album + assert len(grouped.by_album['album-123'].tracks) == 1 + + assert "album-789" in grouped.by_album + assert len(grouped.by_album['album-789'].unmatched) == 1 + + assert len(grouped.nat_items) == 1 + assert grouped.nat_items[0][1] == "recording-999" + + +def test_item_grouper_group_items_by_location_unknown_type() -> None: + grouper = ItemGrouper() + items = [ + { + 'file_path': "/test/unknown.mp3", + 'location': {'type': "unknown_type"}, + }, + ] + + grouped = grouper.group(items) + + assert len(grouped.unclustered) == 1 + assert Path(grouped.unclustered[0]).name == "unknown.mp3" + + +def test_item_grouper_group_items_by_location_missing_location() -> None: + grouper = ItemGrouper() + items = [ + { + 'file_path': "/test/no_location.mp3", + }, + ] + + grouped = grouper.group(items) + + assert len(grouped.unclustered) == 1 + assert Path(grouped.unclustered[0]).name == "no_location.mp3" + + +def test_item_grouper_extract_metadata() -> None: + grouper = ItemGrouper() + items = [ + { + 'file_path': "/test/file1.mp3", + 'metadata': {'tags': {'title': ["Song 1"], 'artist': ["Artist 1"]}}, + }, + { + 'file_path': "/test/file2.mp3", + }, + { + 'file_path': "/test/file3.mp3", + 'metadata': {'tags': {'title': ["Song 3"]}}, + }, + ] + + metadata_map = grouper.extract_metadata(items) + + assert len(metadata_map) == 2 + assert Path("/test/file1.mp3") in metadata_map + assert Path("/test/file3.mp3") in metadata_map + assert metadata_map[Path("/test/file1.mp3")]['title'] == ["Song 1"] + + +def test_item_grouper_preserves_path_expansion() -> None: + grouper = ItemGrouper() + # Use a tilde path to ensure expanduser is applied + items = [ + { + 'file_path': str(Path.home() / "does_not_exist.mp3"), + 'location': {'type': "unclustered"}, + } + ] + grouped = grouper.group(items) + # Path should be absolute and expanded + assert grouped.unclustered[0].is_absolute() + + +def test_item_grouper_extract_metadata_empty_items() -> None: + grouper = ItemGrouper() + metadata_map = grouper.extract_metadata([]) + assert len(metadata_map) == 0 + + +def test_item_grouper_extract_metadata_no_metadata() -> None: + grouper = ItemGrouper() + items = [ + {'file_path': "/test/file1.mp3"}, + {'file_path': "/test/file2.mp3"}, + ] + metadata_map = grouper.extract_metadata(items) + assert len(metadata_map) == 0 diff --git a/test/session/test_override_applicator.py b/test/session/test_override_applicator.py new file mode 100644 index 0000000000..2d66acf2eb --- /dev/null +++ b/test/session/test_override_applicator.py @@ -0,0 +1,124 @@ +# -*- coding: utf-8 -*- +# +# Picard, the next-generation MusicBrainz tagger +# +# Copyright (C) 2025 The MusicBrainz Team +# +# This program is free software; you can redistribute it and/or +# modify it under the terms of the GNU General Public License +# as published by the Free Software Foundation; either version 2 +# of the License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. + +"""Tests for OverrideApplicator.""" + +from unittest.mock import Mock + +from picard.album import Album +from picard.session.session_loader import AlbumManager, OverrideApplicator, UIStateManager + + +def _immediate_run(callback): + callback() + + +def test_override_applicator_apply_track_overrides() -> None: + tagger = Mock() + ui_state = UIStateManager(tagger, default_delay_ms=10) + albums = AlbumManager(tagger, ui_state) + applicator = OverrideApplicator(albums) + + album_mock = Mock(spec=Album) + track_mock = Mock() + track_mock.id = "track-123" + track_mock.metadata = {} + album_mock.tracks = [track_mock] + album_mock.run_when_loaded.side_effect = _immediate_run + + albums.loaded_albums = {'album-1': album_mock} + + data = { + 'album_track_overrides': { + 'album-1': {'track-123': {'title': ["New Title"], 'artist': ["New Artist"]}}, + } + } + + applicator.apply(data, mb_cache={}) + + assert track_mock.metadata['title'] == ["New Title"] + assert track_mock.metadata['artist'] == ["New Artist"] + track_mock.update.assert_called_once() + + +def test_override_applicator_apply_track_overrides_track_not_found() -> None: + tagger = Mock() + ui_state = UIStateManager(tagger, default_delay_ms=10) + albums = AlbumManager(tagger, ui_state) + applicator = OverrideApplicator(albums) + + album_mock = Mock(spec=Album) + track_mock = Mock() + track_mock.id = "track-123" + album_mock.tracks = [track_mock] + album_mock.run_when_loaded.side_effect = _immediate_run + + albums.loaded_albums = {'album-1': album_mock} + + data = { + 'album_track_overrides': { + 'album-1': {'track-999': {'title': ["New Title"]}}, + } + } + + applicator.apply(data, mb_cache={}) + track_mock.update.assert_not_called() + + +def test_override_applicator_ensures_albums_loaded() -> None: + tagger = Mock() + ui_state = UIStateManager(tagger, default_delay_ms=10) + albums = AlbumManager(tagger, ui_state) + applicator = OverrideApplicator(albums) + + # No albums loaded initially; applicator should ask AlbumManager to ensure they are loaded + data = { + 'album_track_overrides': {'album-42': {'t1': {'title': ["X"]}}}, + 'album_overrides': {'album-99': {'album': ["Y"]}}, + } + + # Spy on ensure_loaded_for_overrides + albums.ensure_loaded_for_overrides = Mock() + applicator.apply(data, mb_cache={}) + albums.ensure_loaded_for_overrides.assert_called_once() + + +def test_override_applicator_apply_album_overrides() -> None: + tagger = Mock() + ui_state = UIStateManager(tagger, default_delay_ms=10) + albums = AlbumManager(tagger, ui_state) + applicator = OverrideApplicator(albums) + + album_mock = Mock(spec=Album) + album_mock.metadata = {} + album_mock.run_when_loaded.side_effect = _immediate_run + albums.loaded_albums = {'album-1': album_mock} + + data = { + 'album_overrides': { + 'album-1': {'albumartist': ["New Artist"], 'album': ["New Album"]}, + } + } + + applicator.apply(data, mb_cache={}) + + assert album_mock.metadata['albumartist'] == ["New Artist"] + assert album_mock.metadata['album'] == ["New Album"] + album_mock.update.assert_called_once_with(update_tracks=False) diff --git a/test/session/test_session_file_reader.py b/test/session/test_session_file_reader.py new file mode 100644 index 0000000000..86370b0bdb --- /dev/null +++ b/test/session/test_session_file_reader.py @@ -0,0 +1,74 @@ +# -*- coding: utf-8 -*- +# +# Picard, the next-generation MusicBrainz tagger +# +# Copyright (C) 2025 The MusicBrainz Team +# +# This program is free software; you can redistribute it and/or +# modify it under the terms of the GNU General Public License +# as published by the Free Software Foundation; either version 2 +# of the License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. + +"""Tests for SessionFileReader.""" + +from pathlib import Path + +import yaml + +from picard.session.session_loader import SessionFileReader + +import pytest + + +def test_session_file_reader_read(tmp_path: Path) -> None: + """Read a valid YAML session file.""" + session_data = {'version': 1, 'items': []} + session_file = tmp_path / "test.mbps" + session_file.write_text(yaml.dump(session_data, default_flow_style=False), encoding="utf-8") + + reader = SessionFileReader() + data = reader.read(session_file) + + assert data == session_data + + +def test_session_file_reader_invalid_yaml(tmp_path: Path) -> None: + """Raise YAMLError on invalid YAML.""" + session_file = tmp_path / "test.mbps" + session_file.write_text("invalid yaml: [", encoding="utf-8") + + reader = SessionFileReader() + with pytest.raises(yaml.YAMLError): + reader.read(session_file) + + +def test_session_file_reader_not_found() -> None: + """Raise FileNotFoundError for nonexistent path.""" + reader = SessionFileReader() + with pytest.raises(FileNotFoundError): + reader.read(Path("/nonexistent/file.mbps")) + + +def test_session_file_reader_gzip(tmp_path: Path) -> None: + """Read a gzipped YAML session file.""" + session_data = {'version': 1, 'items': []} + text = yaml.dump(session_data, default_flow_style=False) + + import gzip as _gzip + + gz_path = tmp_path / "test.mbps.gz" + gz_path.write_bytes(_gzip.compress(text.encode("utf-8"))) + + reader = SessionFileReader() + data = reader.read(gz_path) + + assert data == session_data diff --git a/test/session/test_session_loader.py b/test/session/test_session_loader.py index f46b726be9..58977d1869 100644 --- a/test/session/test_session_loader.py +++ b/test/session/test_session_loader.py @@ -25,10 +25,8 @@ import yaml -from picard.album import Album import picard.config as picard_config from picard.metadata import Metadata -from picard.session.session_data import AlbumItems, GroupedItems from picard.session.session_loader import SessionLoader import pytest @@ -41,380 +39,6 @@ def session_loader() -> SessionLoader: return SessionLoader(tagger_mock) -def test_session_loader_read_session_file(session_loader: SessionLoader, tmp_path: Path) -> None: - """Test reading session file.""" - session_data = {'version': 1, 'items': []} - session_file = tmp_path / "test.mbps" - session_file.write_text(yaml.dump(session_data, default_flow_style=False), encoding="utf-8") - - data = session_loader._read_session_file(session_file) - - assert data == session_data - - -def test_session_loader_read_session_file_invalid_yaml(session_loader: SessionLoader, tmp_path: Path) -> None: - """Test reading invalid YAML session file.""" - session_file = tmp_path / "test.mbps" - session_file.write_text("invalid yaml: [", encoding="utf-8") - - with pytest.raises(yaml.YAMLError): - session_loader._read_session_file(session_file) - - -def test_session_loader_read_session_file_not_found(session_loader: SessionLoader) -> None: - """Test reading non-existent session file.""" - with pytest.raises(FileNotFoundError): - session_loader._read_session_file(Path("/nonexistent/file.mbps")) - - -def test_session_loader_prepare_session(session_loader: SessionLoader, cfg_options) -> None: - """Test session preparation.""" - # Set the config value for this test - cfg = picard_config.get_config() - cfg.setting['session_safe_restore'] = True - - data = {'version': 1} - session_loader._prepare_session(data) - - session_loader.tagger.clear_session.assert_called_once() - assert session_loader.tagger._restoring_session is True - - -def test_session_loader_prepare_session_safe_restore_disabled(session_loader: SessionLoader, cfg_options) -> None: - """Test session preparation with safe restore disabled.""" - # Set the config value for this test - cfg = picard_config.get_config() - cfg.setting['session_safe_restore'] = False - - data = {'version': 1} - session_loader._prepare_session(data) - - session_loader.tagger.clear_session.assert_called_once() - # When safe restore is disabled, _restoring_session should not be set to True - # (it might exist from previous tests, but should not be True) - if hasattr(session_loader.tagger, '_restoring_session'): - assert session_loader.tagger._restoring_session is not True - - -def test_session_loader_restore_options(session_loader: SessionLoader, cfg_options) -> None: - """Test restoring configuration options.""" - # The cfg_options fixture already sets the default values - - options = { - 'rename_files': True, - 'move_files': True, - 'dont_write_tags': True, - } - - session_loader._restore_options(options) - - cfg = picard_config.get_config() - assert cfg.setting['rename_files'] is True - assert cfg.setting['move_files'] is True - assert cfg.setting['dont_write_tags'] is True - - -@patch("picard.session.session_loader.get_config") -def test_session_loader_restore_options_with_defaults(session_loader: SessionLoader, mock_get_config) -> None: - """Test restoring configuration options with default values.""" - config_mock = Mock() - config_mock.setting = { - 'rename_files': False, - 'move_files': False, - 'dont_write_tags': False, - } - mock_get_config.return_value = config_mock - - # Empty options should use current config values - options = {} - - session_loader._restore_options(options) - - assert config_mock.setting['rename_files'] is False - assert config_mock.setting['move_files'] is False - assert config_mock.setting['dont_write_tags'] is False - - -def test_session_loader_group_items_by_location(session_loader: SessionLoader) -> None: - """Test grouping items by location type.""" - items = [ - { - 'file_path': "/test/unclustered.mp3", - 'location': {'type': "unclustered"}, - }, - { - 'file_path': "/test/cluster.mp3", - 'location': {'type': "cluster", 'cluster_title': "Album", 'cluster_artist': "Artist"}, - }, - { - 'file_path': "/test/track.mp3", - 'location': {'type': "track", 'album_id': "album-123", 'recording_id': "recording-456"}, - }, - { - 'file_path': "/test/unmatched.mp3", - 'location': {'type': "album_unmatched", 'album_id': "album-789"}, - }, - { - 'file_path': "/test/nat.mp3", - 'location': {'type': "nat", 'recording_id': "recording-999"}, - }, - ] - - grouped = session_loader._group_items_by_location(items) - - assert len(grouped.unclustered) == 1 - assert Path(grouped.unclustered[0]).name == "unclustered.mp3" - - assert len(grouped.by_cluster) == 1 - assert ("Album", "Artist") in grouped.by_cluster - - assert "album-123" in grouped.by_album - assert len(grouped.by_album['album-123'].tracks) == 1 - - assert "album-789" in grouped.by_album - assert len(grouped.by_album['album-789'].unmatched) == 1 - - assert len(grouped.nat_items) == 1 - assert grouped.nat_items[0][1] == "recording-999" - - -def test_session_loader_group_items_by_location_unknown_type(session_loader: SessionLoader) -> None: - """Test grouping items with unknown location type.""" - items = [ - { - 'file_path': "/test/unknown.mp3", - 'location': {'type': "unknown_type"}, - }, - ] - - grouped = session_loader._group_items_by_location(items) - - # Unknown types should be treated as unclustered - assert len(grouped.unclustered) == 1 - assert Path(grouped.unclustered[0]).name == "unknown.mp3" - - -def test_session_loader_group_items_by_location_missing_location(session_loader: SessionLoader) -> None: - """Test grouping items with missing location.""" - items = [ - { - 'file_path': "/test/no_location.mp3", - }, - ] - - grouped = session_loader._group_items_by_location(items) - - # Missing location should default to unclustered - assert len(grouped.unclustered) == 1 - assert Path(grouped.unclustered[0]).name == "no_location.mp3" - - -def test_session_loader_extract_metadata(session_loader: SessionLoader) -> None: - """Test extracting metadata from session items.""" - items = [ - { - 'file_path': "/test/file1.mp3", - 'metadata': {'tags': {'title': ["Song 1"], 'artist': ["Artist 1"]}}, - }, - { - 'file_path': "/test/file2.mp3", - # No metadata - }, - { - 'file_path': "/test/file3.mp3", - 'metadata': {'tags': {'title': ["Song 3"]}}, - }, - ] - - metadata_map = session_loader._extract_metadata(items) - - assert len(metadata_map) == 2 - assert Path("/test/file1.mp3") in metadata_map - assert Path("/test/file3.mp3") in metadata_map - assert metadata_map[Path("/test/file1.mp3")]['title'] == ["Song 1"] - - -def test_session_loader_extract_metadata_empty_items(session_loader: SessionLoader) -> None: - """Test extracting metadata from empty items list.""" - metadata_map = session_loader._extract_metadata([]) - - assert len(metadata_map) == 0 - - -def test_session_loader_extract_metadata_no_metadata(session_loader: SessionLoader) -> None: - """Test extracting metadata when no items have metadata.""" - items = [ - {'file_path': "/test/file1.mp3"}, - {'file_path': "/test/file2.mp3"}, - ] - - metadata_map = session_loader._extract_metadata(items) - - assert len(metadata_map) == 0 - - -def test_session_loader_load_unmatched_albums(session_loader: SessionLoader) -> None: - """Test loading unmatched albums.""" - unmatched_album_ids = ["album-123", "album-456"] - - album_mock1 = Mock(spec=Album) - album_mock2 = Mock(spec=Album) - session_loader.tagger.load_album.side_effect = [album_mock1, album_mock2] - - session_loader._load_unmatched_albums(unmatched_album_ids) - - assert session_loader.loaded_albums['album-123'] == album_mock1 - assert session_loader.loaded_albums['album-456'] == album_mock2 - assert session_loader.tagger.load_album.call_count == 2 - - -def test_session_loader_load_unmatched_albums_empty_list(session_loader: SessionLoader) -> None: - """Test loading unmatched albums with empty list.""" - session_loader._load_unmatched_albums([]) - - assert len(session_loader.loaded_albums) == 0 - session_loader.tagger.load_album.assert_not_called() - - -def test_session_loader_load_albums(session_loader: SessionLoader) -> None: - """Test loading albums.""" - grouped_items = GroupedItems( - unclustered=[], - by_cluster={}, - by_album={'album-123': AlbumItems(unmatched=[], tracks=[]), 'album-456': AlbumItems(unmatched=[], tracks=[])}, - nat_items=[], - ) - - album_mock1 = Mock(spec=Album) - album_mock2 = Mock(spec=Album) - - # Use a function to return the appropriate mock based on the album_id - def load_album_side_effect(album_id): - if album_id == "album-123": - return album_mock1 - elif album_id == "album-456": - return album_mock2 - return Mock(spec=Album) - - session_loader.tagger.load_album.side_effect = load_album_side_effect - - session_loader._load_albums(grouped_items) - - assert session_loader.loaded_albums['album-123'] == album_mock1 - assert session_loader.loaded_albums['album-456'] == album_mock2 - - -def test_session_loader_load_albums_no_albums(session_loader: SessionLoader) -> None: - """Test loading albums when no albums are needed.""" - grouped_items = GroupedItems( - unclustered=[], - by_cluster={}, - by_album={}, - nat_items=[], - ) - - session_loader._load_albums(grouped_items) - - assert len(session_loader.loaded_albums) == 0 - session_loader.tagger.load_album.assert_not_called() - - -def test_session_loader_load_album_files(session_loader: SessionLoader) -> None: - """Test loading files into albums.""" - album_mock = Mock(spec=Album) - album_mock.unmatched_files = Mock() - session_loader.loaded_albums = {'album-123': album_mock} - - by_album = { - 'album-123': AlbumItems( - unmatched=[Path("/test/unmatched.mp3")], - tracks=[(Path("/test/track.mp3"), "recording-456")], - ) - } - - with patch.object(session_loader.track_mover, "move_files_to_tracks") as mock_move: - session_loader._load_album_files(by_album) - - session_loader.tagger.add_files.assert_called_once() - mock_move.assert_called_once_with(album_mock, [(Path("/test/track.mp3"), "recording-456")]) - - -def test_session_loader_load_album_files_no_files(session_loader: SessionLoader) -> None: - """Test loading album files when no files are present.""" - album_mock = Mock(spec=Album) - session_loader.loaded_albums = {'album-123': album_mock} - - by_album = {'album-123': AlbumItems(unmatched=[], tracks=[])} - - session_loader._load_album_files(by_album) - - session_loader.tagger.add_files.assert_not_called() - - -def test_session_loader_apply_track_overrides(session_loader: SessionLoader) -> None: - """Test applying track-level overrides.""" - album_mock = Mock(spec=Album) - track_mock = Mock() - track_mock.id = "track-123" - track_mock.metadata = {} # Add metadata dict - album_mock.tracks = [track_mock] - - overrides = {'track-123': {'title': ["New Title"], 'artist': ["New Artist"]}} - - # Mock run_when_loaded to call callback immediately - def run_callback(callback): - callback() - - album_mock.run_when_loaded.side_effect = run_callback - - session_loader._apply_track_overrides(album_mock, overrides) - - assert track_mock.metadata['title'] == ["New Title"] - assert track_mock.metadata['artist'] == ["New Artist"] - track_mock.update.assert_called_once() - - -def test_session_loader_apply_track_overrides_track_not_found(session_loader: SessionLoader) -> None: - """Test applying track overrides when track is not found.""" - album_mock = Mock(spec=Album) - track_mock = Mock() - track_mock.id = "track-123" - album_mock.tracks = [track_mock] - - overrides = {'track-999': {'title': ["New Title"]}} # Non-existent track - - # Mock run_when_loaded to call callback immediately - def run_callback(callback): - callback() - - album_mock.run_when_loaded.side_effect = run_callback - - session_loader._apply_track_overrides(album_mock, overrides) - - # Should not modify existing track - track_mock.update.assert_not_called() - - -def test_session_loader_apply_album_overrides(session_loader: SessionLoader) -> None: - """Test applying album-level overrides.""" - album_mock = Mock(spec=Album) - album_mock.metadata = {} # Add metadata dict - - overrides = {'albumartist': ["New Artist"], 'album': ["New Album"]} - - # Mock run_when_loaded to call callback immediately - def run_callback(callback): - callback() - - album_mock.run_when_loaded.side_effect = run_callback - - session_loader._apply_album_overrides(album_mock, overrides) - - assert album_mock.metadata['albumartist'] == ["New Artist"] - assert album_mock.metadata['album'] == ["New Album"] - album_mock.update.assert_called_once_with(update_tracks=False) - - def test_session_loader_schedule_metadata_application(session_loader: SessionLoader, mock_single_shot) -> None: """Test scheduling metadata application.""" metadata_map = {Path("/test/file.mp3"): Metadata()} @@ -519,59 +143,115 @@ def test_session_loader_initialization() -> None: assert hasattr(loader, 'track_mover') -def test_session_loader_ensure_album_visible(session_loader: SessionLoader) -> None: - """Test ensuring album is visible and expanded.""" - album_mock = Mock(spec=Album) - album_mock.id = "album-123" - ui_item_mock = Mock() - album_mock.ui_item = ui_item_mock +def _write_session(tmp_path: Path, data: dict) -> Path: + p = tmp_path / "session.mbps" + p.write_text(yaml.safe_dump(data), encoding="utf-8") + return p - # Mock run_when_loaded to call callback immediately - def run_callback(callback): - callback() - album_mock.run_when_loaded.side_effect = run_callback +def test_session_loader_requests_allowed(tmp_path: Path, mock_single_shot, cfg_options) -> None: + cfg = picard_config.get_config() + cfg.setting['session_no_mb_requests_on_load'] = False + cfg.setting['session_safe_restore'] = False - session_loader._saved_expanded_albums = {"album-123"} - session_loader._ensure_album_visible(album_mock) + tagger = Mock() + tagger.albums = {} - album_mock.update.assert_called_once_with(update_tracks=True) - ui_item_mock.setExpanded.assert_called_once_with(True) + loader = SessionLoader(tagger) + data = { + 'version': 1, + 'options': {}, + 'items': [], + 'unmatched_albums': ["album-123"], + 'expanded_albums': [], + } + path = _write_session(tmp_path, data) -def test_session_loader_ensure_album_visible_no_saved_state(session_loader: SessionLoader) -> None: - """Test ensuring album is visible when no saved expansion state.""" - album_mock = Mock(spec=Album) - album_mock.id = "album-123" - ui_item_mock = Mock() - album_mock.ui_item = ui_item_mock + album_mock = Mock() + album_mock.unmatched_files = Mock() + album_mock.run_when_loaded = Mock(side_effect=lambda cb: cb()) + tagger.load_album.return_value = album_mock - # Mock run_when_loaded to call callback immediately - def run_callback(callback): - callback() + loader.load_from_path(path) - album_mock.run_when_loaded.side_effect = run_callback + tagger.load_album.assert_called_once_with("album-123") - session_loader._saved_expanded_albums = None - session_loader._ensure_album_visible(album_mock) - album_mock.update.assert_called_once_with(update_tracks=True) - ui_item_mock.setExpanded.assert_called_once_with(True) +def test_session_loader_requests_suppressed(tmp_path: Path, mock_single_shot, cfg_options) -> None: + cfg = picard_config.get_config() + cfg.setting['session_no_mb_requests_on_load'] = True + cfg.setting['session_safe_restore'] = False + + tagger = Mock() + tagger.albums = {} + + loader = SessionLoader(tagger) + data = { + 'version': 1, + 'options': {}, + 'items': [], + 'unmatched_albums': ["album-123"], + 'expanded_albums': [], + } + path = _write_session(tmp_path, data) + + loader.load_from_path(path) -def test_session_loader_ensure_album_visible_no_ui_item(session_loader: SessionLoader) -> None: - """Test ensuring album is visible when album has no UI item.""" - album_mock = Mock(spec=Album) - album_mock.id = "album-123" - album_mock.ui_item = None + tagger.load_album.assert_not_called() - # Mock run_when_loaded to call callback immediately - def run_callback(callback): - callback() - album_mock.run_when_loaded.side_effect = run_callback +def test_session_loader_cached_album_refresh_allowed(tmp_path: Path, mock_single_shot, cfg_options) -> None: + cfg = picard_config.get_config() + cfg.setting['session_no_mb_requests_on_load'] = False + cfg.setting['session_safe_restore'] = False + + tagger = Mock() + album_mock = Mock() + album_mock.unmatched_files = Mock() + album_mock.run_when_loaded = Mock(side_effect=lambda cb: cb()) + tagger.albums = {"album-123": album_mock} + + loader = SessionLoader(tagger) + + data = { + 'version': 1, + 'options': {}, + 'mb_cache': {"album-123": {"id": "album-123"}}, + 'items': [], + 'expanded_albums': ["album-123"], + } + path = _write_session(tmp_path, data) + + loader.load_from_path(path) + + # With network allowed and cache present, album.load() should be scheduled + assert album_mock.load.called + + +def test_session_loader_cached_album_no_refresh_when_suppressed(tmp_path: Path, mock_single_shot, cfg_options) -> None: + cfg = picard_config.get_config() + cfg.setting['session_no_mb_requests_on_load'] = True + cfg.setting['session_safe_restore'] = False + + tagger = Mock() + album_mock = Mock() + album_mock.unmatched_files = Mock() + album_mock.run_when_loaded = Mock(side_effect=lambda cb: cb()) + tagger.albums = {"album-123": album_mock} + + loader = SessionLoader(tagger) + + data = { + 'version': 1, + 'options': {}, + 'mb_cache': {"album-123": {"id": "album-123"}}, + 'items': [], + 'expanded_albums': ["album-123"], + } + path = _write_session(tmp_path, data) - session_loader._ensure_album_visible(album_mock) + loader.load_from_path(path) - album_mock.update.assert_called_once_with(update_tracks=True) - # Should not crash when ui_item is None + assert not album_mock.load.called diff --git a/test/session/test_ui_state_manager.py b/test/session/test_ui_state_manager.py new file mode 100644 index 0000000000..d56af5f4ea --- /dev/null +++ b/test/session/test_ui_state_manager.py @@ -0,0 +1,108 @@ +# -*- coding: utf-8 -*- +# +# Picard, the next-generation MusicBrainz tagger +# +# Copyright (C) 2025 The MusicBrainz Team +# +# This program is free software; you can redistribute it and/or +# modify it under the terms of the GNU General Public License +# as published by the Free Software Foundation; either version 2 +# of the License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. + +"""Tests for UIStateManager.""" + +from unittest.mock import Mock + +from picard.album import Album +from picard.session.session_loader import UIStateManager + + +def _immediate_run(callback): + callback() + + +def test_ui_state_manager_ensure_album_visible_with_saved_state() -> None: + tagger = Mock() + ui = UIStateManager(tagger, default_delay_ms=0) + + album = Mock(spec=Album) + album.id = "album-123" + album.ui_item = Mock() + album.run_when_loaded.side_effect = _immediate_run + + ui.ensure_album_visible(album, saved_expanded={"album-123"}) + + album.update.assert_called_once_with(update_tracks=True) + album.ui_item.setExpanded.assert_called_once_with(True) + + +def test_ui_state_manager_ensure_album_visible_no_saved_state() -> None: + tagger = Mock() + ui = UIStateManager(tagger, default_delay_ms=0) + + album = Mock(spec=Album) + album.id = "album-123" + album.ui_item = Mock() + album.run_when_loaded.side_effect = _immediate_run + + ui.ensure_album_visible(album, saved_expanded=None) + + album.update.assert_called_once_with(update_tracks=True) + album.ui_item.setExpanded.assert_called_once_with(True) + + +def test_ui_state_manager_ensure_album_visible_no_ui_item() -> None: + tagger = Mock() + ui = UIStateManager(tagger, default_delay_ms=0) + + album = Mock(spec=Album) + album.id = "album-123" + album.ui_item = None + album.run_when_loaded.side_effect = _immediate_run + + ui.ensure_album_visible(album, saved_expanded={"album-123"}) + + album.update.assert_called_once_with(update_tracks=True) + + +def test_ui_state_manager_apply_expansions_later(monkeypatch) -> None: + tagger = Mock() + ui = UIStateManager(tagger, default_delay_ms=10) + + album = Mock(spec=Album) + album.id = "album-123" + album.ui_item = Mock() + tagger.albums = {"album-123": album} + + mock_single_shot = Mock() + monkeypatch.setattr("PyQt6.QtCore.QTimer.singleShot", mock_single_shot) + + ui.apply_expansions_later({"album-123"}) + + mock_single_shot.assert_called_once() + + +def test_ui_state_manager_apply_expansions_later_handles_missing_ui(monkeypatch) -> None: + tagger = Mock() + ui = UIStateManager(tagger, default_delay_ms=10) + + album = Mock(spec=Album) + album.id = "album-123" + album.ui_item = None + tagger.albums = {"album-123": album} + + mock_single_shot = Mock() + monkeypatch.setattr("PyQt6.QtCore.QTimer.singleShot", mock_single_shot) + + ui.apply_expansions_later({"album-123"}) + + mock_single_shot.assert_called_once() From f437291f67b2f9fc5ad3c975f6c9e17e69eaaa87 Mon Sep 17 00:00:00 2001 From: Khoa Nguyen Date: Wed, 10 Sep 2025 01:13:39 -0400 Subject: [PATCH 18/30] Fix bug with blank albums (no cache, no web) --- picard/session/session_loader.py | 10 ++++++---- test/session/test_session_loader.py | 2 +- 2 files changed, 7 insertions(+), 5 deletions(-) diff --git a/picard/session/session_loader.py b/picard/session/session_loader.py index 1416876342..8559776ee6 100644 --- a/picard/session/session_loader.py +++ b/picard/session/session_loader.py @@ -346,7 +346,10 @@ def load_unmatched_albums(self, album_ids: list[str], mb_cache: dict[str, Any]) def load_album_files(self, by_album: dict[str, AlbumItems], track_mover: TrackMover) -> None: """Add files to albums and move them to specific tracks as needed.""" for album_id, groups in by_album.items(): - album = self.loaded_albums[album_id] + album = self.loaded_albums.get(album_id) + if album is None: + # Album not available (e.g., network suppressed and no cache). Skip gracefully. + continue all_paths = list(groups.unmatched) + [fp for (fp, _rid) in groups.tracks] if all_paths: self._tagger.add_files([str(p) for p in all_paths], target=album.unmatched_files) @@ -497,7 +500,6 @@ def __init__(self, tagger: Any) -> None: self._overrides = OverrideApplicator(self._albums) self.track_mover = TrackMover(tagger) # Module-level state bound to a single session load - self._saved_expanded_albums: set[str] | None = None self._mb_cache: dict[str, Any] = {} self._suppress_mb_requests: bool = False @@ -540,8 +542,8 @@ def load_from_path(self, path: str | Path) -> None: self._config_mgr.restore_options(data.get('options', {})) self._suppress_mb_requests = get_config().setting['session_no_mb_requests_on_load'] - self._saved_expanded_albums = set(data.get('expanded_albums', [])) if 'expanded_albums' in data else None - self._albums.configure(self._suppress_mb_requests, self._saved_expanded_albums) + saved_expanded_albums = set(data.get('expanded_albums', [])) if 'expanded_albums' in data else None + self._albums.configure(self._suppress_mb_requests, saved_expanded_albums) items = data.get('items', []) grouped_items = self._grouper.group(items) diff --git a/test/session/test_session_loader.py b/test/session/test_session_loader.py index 58977d1869..e133db6b54 100644 --- a/test/session/test_session_loader.py +++ b/test/session/test_session_loader.py @@ -139,7 +139,7 @@ def test_session_loader_initialization() -> None: assert loader.tagger == tagger_mock assert loader.loaded_albums == {} - assert loader._saved_expanded_albums is None + # Saved expansion state is managed by AlbumManager/UIStateManager now assert hasattr(loader, 'track_mover') From c3ca44e43742a3dd8b67322b2000123a0398ece7 Mon Sep 17 00:00:00 2001 From: Khoa Nguyen Date: Wed, 10 Sep 2025 01:30:44 -0400 Subject: [PATCH 19/30] Fix bug with web requests suppression --- picard/session/session_loader.py | 7 ++- test/session/test_session_loader.py | 83 +++++++++++++++++++++++++++++ 2 files changed, 89 insertions(+), 1 deletion(-) diff --git a/picard/session/session_loader.py b/picard/session/session_loader.py index 8559776ee6..e374abbc26 100644 --- a/picard/session/session_loader.py +++ b/picard/session/session_loader.py @@ -541,7 +541,12 @@ def load_from_path(self, path: str | Path) -> None: self._config_mgr.prepare_session(self.tagger) self._config_mgr.restore_options(data.get('options', {})) - self._suppress_mb_requests = get_config().setting['session_no_mb_requests_on_load'] + # Only make web requests if MB data is included and the user has not disabled them + config = get_config() + self._suppress_mb_requests = ( + config.setting['session_include_mb_data'] and config.setting['session_no_mb_requests_on_load'] + ) + saved_expanded_albums = set(data.get('expanded_albums', [])) if 'expanded_albums' in data else None self._albums.configure(self._suppress_mb_requests, saved_expanded_albums) diff --git a/test/session/test_session_loader.py b/test/session/test_session_loader.py index e133db6b54..030dd32765 100644 --- a/test/session/test_session_loader.py +++ b/test/session/test_session_loader.py @@ -180,6 +180,7 @@ def test_session_loader_requests_allowed(tmp_path: Path, mock_single_shot, cfg_o def test_session_loader_requests_suppressed(tmp_path: Path, mock_single_shot, cfg_options) -> None: cfg = picard_config.get_config() + cfg.setting['session_include_mb_data'] = True cfg.setting['session_no_mb_requests_on_load'] = True cfg.setting['session_safe_restore'] = False @@ -232,6 +233,7 @@ def test_session_loader_cached_album_refresh_allowed(tmp_path: Path, mock_single def test_session_loader_cached_album_no_refresh_when_suppressed(tmp_path: Path, mock_single_shot, cfg_options) -> None: cfg = picard_config.get_config() + cfg.setting['session_include_mb_data'] = True cfg.setting['session_no_mb_requests_on_load'] = True cfg.setting['session_safe_restore'] = False @@ -255,3 +257,84 @@ def test_session_loader_cached_album_no_refresh_when_suppressed(tmp_path: Path, loader.load_from_path(path) assert not album_mock.load.called + + +@pytest.mark.parametrize( + ("include_mb_data", "no_requests", "expected_suppressed"), + [ + (False, False, False), + (False, True, False), + (True, False, False), + (True, True, True), + ], +) +def test_session_loader_request_suppression_matrix_unmatched( + tmp_path: Path, mock_single_shot, cfg_options, include_mb_data: bool, no_requests: bool, expected_suppressed: bool +) -> None: + cfg = picard_config.get_config() + cfg.setting['session_include_mb_data'] = include_mb_data + cfg.setting['session_no_mb_requests_on_load'] = no_requests + cfg.setting['session_safe_restore'] = False + + tagger = Mock() + tagger.albums = {} + + loader = SessionLoader(tagger) + + data = { + 'version': 1, + 'options': {}, + 'items': [], + 'unmatched_albums': ["album-xyz"], + 'expanded_albums': [], + } + path = _write_session(tmp_path, data) + + album_mock = Mock() + album_mock.unmatched_files = Mock() + album_mock.run_when_loaded = Mock(side_effect=lambda cb: cb()) + tagger.load_album.return_value = album_mock + + loader.load_from_path(path) + + assert tagger.load_album.called == (not expected_suppressed) + + +@pytest.mark.parametrize( + ("include_mb_data", "no_requests", "expected_suppressed"), + [ + (False, False, False), + (False, True, False), + (True, False, False), + (True, True, True), + ], +) +def test_session_loader_request_suppression_matrix_cached( + tmp_path: Path, mock_single_shot, cfg_options, include_mb_data: bool, no_requests: bool, expected_suppressed: bool +) -> None: + cfg = picard_config.get_config() + cfg.setting['session_include_mb_data'] = include_mb_data + cfg.setting['session_no_mb_requests_on_load'] = no_requests + cfg.setting['session_safe_restore'] = False + + tagger = Mock() + album_mock = Mock() + album_mock.unmatched_files = Mock() + album_mock.run_when_loaded = Mock(side_effect=lambda cb: cb()) + tagger.albums = {"album-xyz": album_mock} + + loader = SessionLoader(tagger) + + data = { + 'version': 1, + 'options': {}, + 'mb_cache': {"album-xyz": {"id": "album-xyz"}}, + 'items': [], + 'expanded_albums': ["album-xyz"], + } + path = _write_session(tmp_path, data) + + loader.load_from_path(path) + + # With cache, suppression controls whether album.load() is called + assert album_mock.load.called == (not expected_suppressed) From 325bbc4c2105a108feef7f0b1ccf704aa5c69502 Mon Sep 17 00:00:00 2001 From: Khoa Nguyen Date: Wed, 10 Sep 2025 06:35:43 -0400 Subject: [PATCH 20/30] Fix some inconsistencies with `last_session_path` --- picard/tagger.py | 19 ++++++++++--------- picard/ui/mainwindow/__init__.py | 29 +++++++++++++++++++++++------ 2 files changed, 33 insertions(+), 15 deletions(-) diff --git a/picard/tagger.py b/picard/tagger.py index 8bbd062ee7..1bf94fdc31 100644 --- a/picard/tagger.py +++ b/picard/tagger.py @@ -649,10 +649,6 @@ def exit(self): path = Path(sessions_folder()) / ("autosave" + SessionConstants.SESSION_FILE_EXTENSION) save_session_to_path(self, path) - # set to blank otherwise `Save Session` will save to previous path - # which is probably not what the user wants - config.persist['last_session_path'] = '' - log.debug("Picard stopping") self.run_cleanup() QtCore.QCoreApplication.processEvents() @@ -668,9 +664,14 @@ def _run_init(self): except FileNotFoundError: show_session_not_found_dialog(self.window, last_path) except (OSError, PermissionError, json.JSONDecodeError, KeyError) as e: - # Keep previous best-effort behavior for other errors + # Surface startup load errors to user similar to interactive load log.debug(f"Error loading session from {last_path}: {e}") - pass + QtWidgets.QMessageBox.critical( + self.window, + _("Failed to load session"), + _("Could not load session from %(path)s:\n\n%(error)s") + % {"path": str(last_path), "error": str(e)}, + ) if self._to_load: self.load_to_picard(self._to_load) @@ -685,11 +686,11 @@ def run(self): self._session_autosave_timer.setInterval(max(1, interval_min) * 60 * 1000) def _autosave(): - path = config.persist['session_autosave_path'] if 'session_autosave_path' in config.persist else None + path = config.persist['session_autosave_path'] or None if not path: - path = config.persist['last_session_path'] if 'last_session_path' in config.persist else None + path = config.persist['last_session_path'] or None if not path: - path = Path(sessions_folder()) / ("autosave" + SessionConstants.SESSION_FILE_EXTENSION) + path = str(Path(sessions_folder()) / ("autosave" + SessionConstants.SESSION_FILE_EXTENSION)) config.persist['session_autosave_path'] = path with contextlib.suppress(OSError, PermissionError, FileNotFoundError, ValueError, OverflowError): diff --git a/picard/ui/mainwindow/__init__.py b/picard/ui/mainwindow/__init__.py index 36904e72fe..be049664fe 100644 --- a/picard/ui/mainwindow/__init__.py +++ b/picard/ui/mainwindow/__init__.py @@ -51,6 +51,7 @@ import datetime from functools import partial import itertools +import json import os.path from pathlib import Path @@ -467,12 +468,15 @@ def _save_session_to_known_path_or_prompt(self) -> bool: if path: try: save_session_to_path(self.tagger, path) + + # Ensure the known path remains persisted explicitly + config.persist['last_session_path'] = path + self.set_statusbar_message(N_("Session saved to '%(path)s'"), {'path': path}) + self._add_to_recent_sessions(path) except (OSError, PermissionError, FileNotFoundError, ValueError, OverflowError) as e: QtWidgets.QMessageBox.critical(self, _("Failed to save session"), str(e)) return False else: - self.set_statusbar_message(N_("Session saved to '%(path)s'"), {'path': path}) - self._add_to_recent_sessions(path) return True # Fallback to prompting for a path @@ -1298,9 +1302,9 @@ def load_session(self): config = get_config() - last_session_path = config.persist['last_session_path'] - if last_session_path and isinstance(last_session_path, str): - start_dir = Path(last_session_path).parent + last_session_path = config.persist['last_session_path'] or '' + if last_session_path: + start_dir = Path(str(last_session_path)).parent else: start_dir = sessions_folder() path, _filter = FileDialog.getOpenFileName( @@ -1318,8 +1322,13 @@ def load_session(self): except FileNotFoundError: show_session_not_found_dialog(self, path) return - except (OSError, PermissionError) as e: + except (OSError, PermissionError, json.JSONDecodeError, KeyError) as e: log.debug(f"Error loading session from {path}: {e}") + QtWidgets.QMessageBox.critical( + self, + _("Failed to load session"), + _("Could not load session from %(path)s:\n\n%(error)s") % {"path": str(path), "error": str(e)}, + ) return else: config.persist['last_session_path'] = path @@ -1339,6 +1348,14 @@ def _load_session_from_recent(self, path): show_session_not_found_dialog(self, path) self._remove_from_recent_sessions(path) return + except (OSError, PermissionError, json.JSONDecodeError, KeyError) as e: + log.debug(f"Error loading session from {path}: {e}") + QtWidgets.QMessageBox.critical( + self, + _("Failed to load session"), + _("Could not load session from %(path)s:\n\n%(error)s") % {"path": str(path), "error": str(e)}, + ) + return else: config = get_config() config.persist['last_session_path'] = path From 847bd335962d0eb0052cd476e5529e4d522bf720 Mon Sep 17 00:00:00 2001 From: Khoa Nguyen Date: Wed, 10 Sep 2025 08:33:53 -0400 Subject: [PATCH 21/30] Apply zas code review recs 20250910 --- picard/session/session_exporter.py | 55 ++++++++++++++++++++++-------- picard/session/session_loader.py | 4 +-- picard/tagger.py | 5 +-- picard/ui/mainwindow/__init__.py | 7 ++-- 4 files changed, 49 insertions(+), 22 deletions(-) diff --git a/picard/session/session_exporter.py b/picard/session/session_exporter.py index 4c5a3c4bbb..ea35147a52 100644 --- a/picard/session/session_exporter.py +++ b/picard/session/session_exporter.py @@ -26,6 +26,7 @@ from __future__ import annotations +from dataclasses import dataclass from pathlib import Path from typing import Any @@ -38,6 +39,28 @@ from picard.session.session_data import SessionItemLocation +@dataclass +class MetadataOverridesResult: + """Result of metadata overrides export operation. + + Contains track-level and album-level metadata overrides, as well as + information about unmatched albums. + + Attributes + ---------- + album_track_overrides : dict[str, dict[str, dict[str, list[Any]]]] + Track-level metadata overrides per album, keyed by album ID. + album_overrides : dict[str, dict[str, list[Any]]] + Album-level metadata overrides, keyed by album ID. + unmatched_albums : list[str] + List of album IDs that are loaded but have no files matched to them. + """ + + album_track_overrides: dict[str, dict[str, dict[str, list[Any]]]] + album_overrides: dict[str, dict[str, list[Any]]] + unmatched_albums: list[str] + + class SessionExporter: """Handles exporting session data from the current Picard state.""" @@ -89,13 +112,13 @@ def export_session(self, tagger: Any) -> dict[str, Any]: session_data['items'].append(item) # Export metadata overrides and unmatched albums - album_overrides, album_meta_overrides, unmatched_albums = self._export_metadata_overrides(tagger) - if album_overrides: - session_data['album_track_overrides'] = album_overrides - if album_meta_overrides: - session_data['album_overrides'] = album_meta_overrides - if unmatched_albums: - session_data['unmatched_albums'] = unmatched_albums + metadata_result = self._export_metadata_overrides(tagger) + if metadata_result.album_track_overrides: + session_data['album_track_overrides'] = metadata_result.album_track_overrides + if metadata_result.album_overrides: + session_data['album_overrides'] = metadata_result.album_overrides + if metadata_result.unmatched_albums: + session_data['unmatched_albums'] = metadata_result.unmatched_albums # Optionally export MB data cache per album include_mb = config.setting['session_include_mb_data'] @@ -104,7 +127,7 @@ def export_session(self, tagger: Any) -> dict[str, Any]: session_data['mb_cache'] = self._export_mb_cache(tagger) # Export UI state (expanded albums) - expanded_albums = self._export_ui_state(tagger) + expanded_albums = self._export_expanded_albums(tagger) if expanded_albums: session_data['expanded_albums'] = expanded_albums @@ -131,7 +154,7 @@ def _export_mb_cache(self, tagger: Any) -> dict[str, Any]: cache[album_id] = node return cache - def _export_ui_state(self, tagger: Any) -> list[str]: + def _export_expanded_albums(self, tagger: Any) -> list[str]: """Export UI expansion state for albums in album view. Parameters @@ -237,9 +260,7 @@ def _serialize_location(self, location: SessionItemLocation) -> dict[str, Any]: if v is not None } - def _export_metadata_overrides( - self, tagger: Any - ) -> tuple[dict[str, dict[str, dict[str, list[Any]]]], dict[str, dict[str, list[Any]]], list[str]]: + def _export_metadata_overrides(self, tagger: Any) -> MetadataOverridesResult: """Export metadata overrides for albums and tracks. Parameters @@ -249,8 +270,8 @@ def _export_metadata_overrides( Returns ------- - tuple[dict, dict, list] - Tuple containing (album_track_overrides, album_overrides, unmatched_albums). + MetadataOverridesResult + Result containing album track overrides, album overrides, and unmatched albums. """ album_overrides: dict[str, dict[str, dict[str, list[Any]]]] = {} album_meta_overrides: dict[str, dict[str, list[Any]]] = {} @@ -294,4 +315,8 @@ def _export_metadata_overrides( if not has_files and not album_diff and not overrides_for_album: unmatched_albums.append(album.id) - return album_overrides, album_meta_overrides, unmatched_albums + return MetadataOverridesResult( + album_track_overrides=album_overrides, + album_overrides=album_meta_overrides, + unmatched_albums=unmatched_albums, + ) diff --git a/picard/session/session_loader.py b/picard/session/session_loader.py index e374abbc26..4d2a10a702 100644 --- a/picard/session/session_loader.py +++ b/picard/session/session_loader.py @@ -136,8 +136,8 @@ def read(self, path: str | Path) -> dict[str, Any]: p = Path(path) raw = p.read_bytes() if len(raw) >= 2 and raw[0] == 0x1F and raw[1] == 0x8B: - text = gzip.decompress(raw).decode("utf-8") - return yaml.safe_load(text) + decompressed_raw = gzip.decompress(raw) + return yaml.safe_load(decompressed_raw.decode("utf-8")) return yaml.safe_load(raw.decode("utf-8")) diff --git a/picard/tagger.py b/picard/tagger.py index 1bf94fdc31..8ce8eb179b 100644 --- a/picard/tagger.py +++ b/picard/tagger.py @@ -50,7 +50,6 @@ import contextlib from functools import partial from hashlib import blake2b -import json import logging import os from pathlib import Path @@ -63,6 +62,8 @@ from urllib.parse import urlparse from uuid import uuid4 +import yaml + from PyQt6 import ( QtCore, QtGui, @@ -663,7 +664,7 @@ def _run_init(self): load_session_from_path(self, last_path) except FileNotFoundError: show_session_not_found_dialog(self.window, last_path) - except (OSError, PermissionError, json.JSONDecodeError, KeyError) as e: + except (OSError, PermissionError, yaml.YAMLError, KeyError) as e: # Surface startup load errors to user similar to interactive load log.debug(f"Error loading session from {last_path}: {e}") QtWidgets.QMessageBox.critical( diff --git a/picard/ui/mainwindow/__init__.py b/picard/ui/mainwindow/__init__.py index be049664fe..72928fa5c2 100644 --- a/picard/ui/mainwindow/__init__.py +++ b/picard/ui/mainwindow/__init__.py @@ -51,10 +51,11 @@ import datetime from functools import partial import itertools -import json import os.path from pathlib import Path +import yaml + from PyQt6 import ( QtCore, QtGui, @@ -1322,7 +1323,7 @@ def load_session(self): except FileNotFoundError: show_session_not_found_dialog(self, path) return - except (OSError, PermissionError, json.JSONDecodeError, KeyError) as e: + except (OSError, PermissionError, yaml.YAMLError, KeyError) as e: log.debug(f"Error loading session from {path}: {e}") QtWidgets.QMessageBox.critical( self, @@ -1348,7 +1349,7 @@ def _load_session_from_recent(self, path): show_session_not_found_dialog(self, path) self._remove_from_recent_sessions(path) return - except (OSError, PermissionError, json.JSONDecodeError, KeyError) as e: + except (OSError, PermissionError, yaml.YAMLError, KeyError) as e: log.debug(f"Error loading session from {path}: {e}") QtWidgets.QMessageBox.critical( self, From 14e952baf81627243b2163d6718e38e733c2c46b Mon Sep 17 00:00:00 2001 From: Khoa Nguyen Date: Wed, 24 Sep 2025 20:02:10 -0400 Subject: [PATCH 22/30] Apply zas code review 20250924 --- picard/session/metadata_handler.py | 5 +---- picard/session/retry_helper.py | 7 ++++--- picard/session/session_exporter.py | 24 ++++++++++++++++++------ picard/session/session_loader.py | 7 +++---- 4 files changed, 26 insertions(+), 17 deletions(-) diff --git a/picard/session/metadata_handler.py b/picard/session/metadata_handler.py index 5bc0ba1029..9b076cfd87 100644 --- a/picard/session/metadata_handler.py +++ b/picard/session/metadata_handler.py @@ -80,10 +80,7 @@ def deserialize_metadata(tags: dict[str, list[Any]]) -> Metadata: Metadata The deserialized metadata object. """ - md = Metadata() - for key, values in tags.items(): - md[key] = values - return md + return Metadata(tags) @staticmethod def as_list(values: Any) -> list[Any]: diff --git a/picard/session/retry_helper.py b/picard/session/retry_helper.py index 49b1508966..b600350b30 100644 --- a/picard/session/retry_helper.py +++ b/picard/session/retry_helper.py @@ -61,11 +61,12 @@ def retry_until( This replaces the scattered QtCore.QTimer.singleShot patterns throughout the session management code with a centralized retry mechanism. """ - attempts = [0] + attempts = 0 def attempt() -> None: - attempts[0] += 1 - if max_attempts and attempts[0] > max_attempts: + nonlocal attempts + attempts += 1 + if max_attempts and attempts > max_attempts: return if condition_fn(): diff --git a/picard/session/session_exporter.py b/picard/session/session_exporter.py index ea35147a52..7df36152cc 100644 --- a/picard/session/session_exporter.py +++ b/picard/session/session_exporter.py @@ -260,6 +260,22 @@ def _serialize_location(self, location: SessionItemLocation) -> dict[str, Any]: if v is not None } + @staticmethod + def _extract_metadata_overrides(diff: Any) -> dict[str, list[Any]]: + """Extract metadata overrides from a diff object. + + Parameters + ---------- + diff : Any + The metadata diff object. + + Returns + ------- + dict[str, list[Any]] + Dictionary of metadata overrides with values converted to lists. + """ + return {k: MetadataHandler.as_list(v) for k, v in diff.rawitems() if k not in EXCLUDED_OVERRIDE_TAGS} + def _export_metadata_overrides(self, tagger: Any) -> MetadataOverridesResult: """Export metadata overrides for albums and tracks. @@ -294,9 +310,7 @@ def _export_metadata_overrides(self, tagger: Any) -> MetadataOverridesResult: # Album-level diffs vs orig_metadata album_diff = album.metadata.diff(album.orig_metadata) if album_diff: - album_meta_overrides[album.id] = { - k: MetadataHandler.as_list(v) for k, v in album_diff.rawitems() if k not in EXCLUDED_OVERRIDE_TAGS - } + album_meta_overrides[album.id] = SessionExporter._extract_metadata_overrides(album_diff) # Track-level overrides overrides_for_album: dict[str, dict[str, list[Any]]] = {} @@ -304,9 +318,7 @@ def _export_metadata_overrides(self, tagger: Any) -> MetadataOverridesResult: # The difference to scripted_metadata are user edits made in UI diff = track.metadata.diff(track.scripted_metadata) if diff: - overrides_for_album[track.id] = { - k: MetadataHandler.as_list(v) for k, v in diff.rawitems() if k not in EXCLUDED_OVERRIDE_TAGS - } + overrides_for_album[track.id] = SessionExporter._extract_metadata_overrides(diff) if overrides_for_album: album_overrides[album.id] = overrides_for_album diff --git a/picard/session/session_loader.py b/picard/session/session_loader.py index 4d2a10a702..376b04e9b0 100644 --- a/picard/session/session_loader.py +++ b/picard/session/session_loader.py @@ -135,10 +135,9 @@ def read(self, path: str | Path) -> dict[str, Any]: """ p = Path(path) raw = p.read_bytes() - if len(raw) >= 2 and raw[0] == 0x1F and raw[1] == 0x8B: - decompressed_raw = gzip.decompress(raw) - return yaml.safe_load(decompressed_raw.decode("utf-8")) - return yaml.safe_load(raw.decode("utf-8")) + is_gzip = len(raw) >= 2 and raw[0] == 0x1F and raw[1] == 0x8B + payload = gzip.decompress(raw) if is_gzip else raw + return yaml.safe_load(payload.decode("utf-8")) class ConfigurationManager: From 31d7bc39da24e0213b32e8a7d04dc63fa8bf7702 Mon Sep 17 00:00:00 2001 From: Khoa Nguyen Date: Wed, 24 Sep 2025 20:27:40 -0400 Subject: [PATCH 23/30] Rename `dont_write_tags` -> `enable_tag_saving` --- picard/session/session_data.py | 6 +++--- picard/session/session_exporter.py | 14 ++++++++------ picard/session/session_loader.py | 5 ++++- picard/session/session_manager.py | 2 +- test/session/conftest.py | 6 +++--- test/session/test_configuration_manager.py | 8 ++++---- test/session/test_data.py | 10 +++++----- test/session/test_session_exporter.py | 14 +++++++------- test/session/test_sessions.py | 8 ++++---- 9 files changed, 39 insertions(+), 34 deletions(-) diff --git a/picard/session/session_data.py b/picard/session/session_data.py index 98855b06ee..7390f6d31a 100644 --- a/picard/session/session_data.py +++ b/picard/session/session_data.py @@ -68,13 +68,13 @@ class SessionOptions: Whether to rename files during processing. move_files : bool Whether to move files during processing. - dont_write_tags : bool - Whether to skip writing tags to files. + enable_tag_saving : bool + Whether to save tags to files. """ rename_files: bool move_files: bool - dont_write_tags: bool + enable_tag_saving: bool @dataclass diff --git a/picard/session/session_exporter.py b/picard/session/session_exporter.py index 7df36152cc..23c989d2e2 100644 --- a/picard/session/session_exporter.py +++ b/picard/session/session_exporter.py @@ -81,7 +81,7 @@ def export_session(self, tagger: Any) -> dict[str, Any]: dict[str, Any] Dictionary containing session data with the following keys: - version: Session format version (currently 1) - - options: Configuration options (rename_files, move_files, dont_write_tags) + - options: Configuration options (rename_files, move_files, enable_tag_saving) - items: List of file items with paths and locations - album_track_overrides: Track-level metadata overrides per album - album_overrides: Album-level metadata overrides @@ -184,13 +184,15 @@ def _export_options(self, config: Any) -> dict[str, bool]: Returns ------- - dict[str, bool] - Dictionary containing the relevant configuration options. + dict[str, Any] + Dictionary containing the relevant configuration options. Values + preserve their original types (not forced to bool). """ return { - 'rename_files': bool(config.setting['rename_files']), - 'move_files': bool(config.setting['move_files']), - 'dont_write_tags': bool(config.setting['dont_write_tags']), + 'rename_files': config.setting['rename_files'], + 'move_files': config.setting['move_files'], + # PICARD-3123: renamed from dont_write_tags, semantics reversed in upgrade + 'enable_tag_saving': config.setting['enable_tag_saving'], } def _export_file_item(self, file: Any) -> dict[str, Any]: diff --git a/picard/session/session_loader.py b/picard/session/session_loader.py index 376b04e9b0..a615aa92bb 100644 --- a/picard/session/session_loader.py +++ b/picard/session/session_loader.py @@ -166,7 +166,10 @@ def restore_options(self, options: dict[str, Any]) -> None: config = get_config() config.setting['rename_files'] = bool(options.get('rename_files', config.setting['rename_files'])) config.setting['move_files'] = bool(options.get('move_files', config.setting['move_files'])) - config.setting['dont_write_tags'] = bool(options.get('dont_write_tags', config.setting['dont_write_tags'])) + # Only support new key moving forward + config.setting['enable_tag_saving'] = bool( + options.get('enable_tag_saving', config.setting['enable_tag_saving']) + ) class ItemGrouper: diff --git a/picard/session/session_manager.py b/picard/session/session_manager.py index 64563efca8..3118c7a9c7 100644 --- a/picard/session/session_manager.py +++ b/picard/session/session_manager.py @@ -69,7 +69,7 @@ def export_session(tagger: Any) -> dict[str, Any]: dict[str, Any] Dictionary containing session data with the following keys: - version: Session format version (currently 1) - - options: Configuration options (rename_files, move_files, dont_write_tags) + - options: Configuration options (rename_files, move_files, enable_tag_saving) - items: List of file items with paths and locations - album_track_overrides: Track-level metadata overrides per album - album_overrides: Album-level metadata overrides diff --git a/test/session/conftest.py b/test/session/conftest.py index 1f16046b1a..3c898296af 100644 --- a/test/session/conftest.py +++ b/test/session/conftest.py @@ -126,7 +126,7 @@ def cfg_options() -> None: # Ensure required keys exist with defaults cfg.setting['rename_files'] = False cfg.setting['move_files'] = False - cfg.setting['dont_write_tags'] = True + cfg.setting['enable_tag_saving'] = False cfg.setting['session_include_mb_data'] = False @@ -277,7 +277,7 @@ def sample_session_data() -> dict[str, Any]: 'options': { 'rename_files': True, 'move_files': False, - 'dont_write_tags': True, + 'enable_tag_saving': True, }, 'items': [ { @@ -430,7 +430,7 @@ def patch_get_config(monkeypatch: pytest.MonkeyPatch, **settings) -> Mock: config_mock.setting = { 'rename_files': False, 'move_files': False, - 'dont_write_tags': False, + 'enable_tag_saving': False, 'session_safe_restore': True, **settings, } diff --git a/test/session/test_configuration_manager.py b/test/session/test_configuration_manager.py index cab865a0d2..6223bef9d6 100644 --- a/test/session/test_configuration_manager.py +++ b/test/session/test_configuration_manager.py @@ -57,7 +57,7 @@ def test_configuration_manager_restore_options_with_defaults(mock_get_config) -> config_mock.setting = { 'rename_files': False, 'move_files': False, - 'dont_write_tags': False, + 'enable_tag_saving': False, } mock_get_config.return_value = config_mock @@ -66,7 +66,7 @@ def test_configuration_manager_restore_options_with_defaults(mock_get_config) -> assert config_mock.setting['rename_files'] is False assert config_mock.setting['move_files'] is False - assert config_mock.setting['dont_write_tags'] is False + assert config_mock.setting['enable_tag_saving'] is False def test_configuration_manager_restore_options(cfg_options) -> None: @@ -74,10 +74,10 @@ def test_configuration_manager_restore_options(cfg_options) -> None: options = { 'rename_files': True, 'move_files': True, - 'dont_write_tags': True, + 'enable_tag_saving': True, } manager.restore_options(options) cfg = picard_config.get_config() assert cfg.setting['rename_files'] is True assert cfg.setting['move_files'] is True - assert cfg.setting['dont_write_tags'] is True + assert cfg.setting['enable_tag_saving'] is True diff --git a/test/session/test_data.py b/test/session/test_data.py index d08ff4a329..4e1b298b4b 100644 --- a/test/session/test_data.py +++ b/test/session/test_data.py @@ -89,7 +89,7 @@ def test_session_item_location_immutable() -> None: @pytest.mark.parametrize( - ("rename_files", "move_files", "dont_write_tags"), + ("rename_files", "move_files", "enable_tag_saving"), [ (True, True, True), (False, False, False), @@ -97,17 +97,17 @@ def test_session_item_location_immutable() -> None: (False, True, False), ], ) -def test_session_options_creation(rename_files: bool, move_files: bool, dont_write_tags: bool) -> None: +def test_session_options_creation(rename_files: bool, move_files: bool, enable_tag_saving: bool) -> None: """Test SessionOptions creation with various boolean combinations.""" options = SessionOptions( rename_files=rename_files, move_files=move_files, - dont_write_tags=dont_write_tags, + enable_tag_saving=enable_tag_saving, ) assert options.rename_files == rename_files assert options.move_files == move_files - assert options.dont_write_tags == dont_write_tags + assert options.enable_tag_saving == enable_tag_saving # ============================================================================= @@ -148,7 +148,7 @@ def test_session_item_creation_without_metadata() -> None: def test_session_data_creation() -> None: """Test SessionData creation with all components.""" - options = SessionOptions(rename_files=True, move_files=False, dont_write_tags=True) + options = SessionOptions(rename_files=True, move_files=False, enable_tag_saving=True) location = SessionItemLocation(type="track", album_id="album-123") item = SessionItem(file_path=Path("/test/file.mp3"), location=location) diff --git a/test/session/test_session_exporter.py b/test/session/test_session_exporter.py index fed154c74b..568cf8a0d1 100644 --- a/test/session/test_session_exporter.py +++ b/test/session/test_session_exporter.py @@ -57,7 +57,7 @@ def test_session_exporter_export_session_empty( assert data['options'] == { 'rename_files': False, 'move_files': False, - 'dont_write_tags': True, + 'enable_tag_saving': False, } assert data['items'] == [] assert data['album_track_overrides'] == {} @@ -340,7 +340,7 @@ def test_session_exporter_export_options() -> None: config_mock.setting = { 'rename_files': True, 'move_files': False, - 'dont_write_tags': True, + 'enable_tag_saving': True, } options = exporter._export_options(config_mock) @@ -348,7 +348,7 @@ def test_session_exporter_export_options() -> None: assert options == { 'rename_files': True, 'move_files': False, - 'dont_write_tags': True, + 'enable_tag_saving': True, } @@ -360,15 +360,15 @@ def test_session_exporter_export_options_with_falsy_values() -> None: config_mock.setting = { 'rename_files': 0, 'move_files': "", - 'dont_write_tags': None, + 'enable_tag_saving': None, } options = exporter._export_options(config_mock) assert options == { - 'rename_files': False, - 'move_files': False, - 'dont_write_tags': False, + 'rename_files': 0, + 'move_files': "", + 'enable_tag_saving': None, } diff --git a/test/session/test_sessions.py b/test/session/test_sessions.py index e867c391f7..135a49f950 100644 --- a/test/session/test_sessions.py +++ b/test/session/test_sessions.py @@ -36,8 +36,8 @@ def test_export_session_empty(tmp_path: Path, cfg_options) -> None: data = export_session(_StubTagger(files=[], albums={})) assert isinstance(data, dict) assert data['version'] == 1 - assert set(data['options'].keys()) == {"rename_files", "move_files", "dont_write_tags"} - assert data['options']['dont_write_tags'] is True + assert set(data['options'].keys()) == {"rename_files", "move_files", "enable_tag_saving"} + assert data['options']['enable_tag_saving'] is False assert data['items'] == [] @@ -76,14 +76,14 @@ def test_export_session_options_reflect_config_flags(cfg_options: None) -> None: cfg = picard_config.get_config() cfg.setting['rename_files'] = True cfg.setting['move_files'] = True - cfg.setting['dont_write_tags'] = True + cfg.setting['enable_tag_saving'] = True tagger = _StubTagger(files=[]) data = export_session(tagger) assert data['options'] == { 'rename_files': True, 'move_files': True, - 'dont_write_tags': True, + 'enable_tag_saving': True, } From 499c05223ad0b4492d230041c1f2ebb5afabd324 Mon Sep 17 00:00:00 2001 From: Khoa Nguyen Date: Thu, 25 Sep 2025 19:04:07 -0400 Subject: [PATCH 24/30] Make `_atomic_write` its own utility --- picard/session/session_manager.py | 48 +++++++++++++++++++------------ 1 file changed, 29 insertions(+), 19 deletions(-) diff --git a/picard/session/session_manager.py b/picard/session/session_manager.py index 3118c7a9c7..b6af38617d 100644 --- a/picard/session/session_manager.py +++ b/picard/session/session_manager.py @@ -44,18 +44,41 @@ from __future__ import annotations +import contextlib import gzip from pathlib import Path import tempfile from typing import Any -import yaml - from picard.session.constants import SessionConstants from picard.session.session_exporter import SessionExporter from picard.session.session_loader import SessionLoader +def _atomic_write(path: Path, data: bytes) -> None: + """Write bytes atomically to the given path. + + The function writes to a temporary file in the destination directory and + replaces the target file to ensure atomicity. On failure, it attempts to + clean up the temporary file and re-raises the exception. + """ + p = Path(path) + p.parent.mkdir(parents=True, exist_ok=True) + + temp_path: Path | None = None + try: + with tempfile.NamedTemporaryFile(dir=p.parent, prefix=p.stem + "_", suffix=p.suffix, delete=False) as temp_file: + temp_path = Path(temp_file.name) + temp_path.write_bytes(data) + + temp_path.replace(p) + except (OSError, IOError, PermissionError): + if temp_path and temp_path.exists(): + with contextlib.suppress(OSError, PermissionError): + temp_path.unlink() + raise # caller should handle the exception + + def export_session(tagger: Any) -> dict[str, Any]: """Export current session data to a dictionary. @@ -103,31 +126,18 @@ def save_session_to_path(tagger: Any, path: str | Path) -> None: to prevent file corruption in case of crashes. """ p = Path(path) - # Ensure multi-part extension .mbps.gz if not str(p).lower().endswith(SessionConstants.SESSION_FILE_EXTENSION): p = Path(str(p) + SessionConstants.SESSION_FILE_EXTENSION) + # Local import to avoid module-level dependency during static analysis + import yaml # type: ignore[import-not-found] + data = export_session(tagger) - p.parent.mkdir(parents=True, exist_ok=True) - # Convert to YAML and gzip-compress to reduce file size yaml_text = yaml.dump(data, default_flow_style=False, allow_unicode=True, sort_keys=False) compressed = gzip.compress(yaml_text.encode("utf-8")) - # Atomic write: write to temporary file first, then rename - temp_path = None - try: - with tempfile.NamedTemporaryFile(dir=p.parent, prefix=p.stem + "_", suffix=p.suffix, delete=False) as temp_file: - temp_path = Path(temp_file.name) - temp_path.write_bytes(compressed) - - # Atomic rename to final destination - temp_path.replace(p) - except (OSError, IOError, PermissionError): - # Clean up temporary file if it exists and rename failed - if temp_path and temp_path.exists(): - temp_path.unlink() - raise # Caller will handle the exception + _atomic_write(p, compressed) def load_session_from_path(tagger: Any, path: str | Path) -> None: From 8eef8469fa7161a95bfdc18dd2245807fe5dc625 Mon Sep 17 00:00:00 2001 From: Khoa Nguyen Date: Thu, 25 Sep 2025 19:19:42 -0400 Subject: [PATCH 25/30] Apply zas code review 20250925 --- picard/session/session_exporter.py | 21 +- picard/ui/mainwindow/__init__.py | 29 ++- test/test_mainwindow_session_filename.py | 267 +++++++++++++++++++++++ 3 files changed, 293 insertions(+), 24 deletions(-) create mode 100644 test/test_mainwindow_session_filename.py diff --git a/picard/session/session_exporter.py b/picard/session/session_exporter.py index 23c989d2e2..295ce65fe2 100644 --- a/picard/session/session_exporter.py +++ b/picard/session/session_exporter.py @@ -174,7 +174,7 @@ def _export_expanded_albums(self, tagger: Any) -> list[str]: expanded.append(album.id) return expanded - def _export_options(self, config: Any) -> dict[str, bool]: + def _export_options(self, config: Any) -> dict[str, Any]: """Export configuration options. Parameters @@ -250,17 +250,14 @@ def _serialize_location(self, location: SessionItemLocation) -> dict[str, Any]: dict[str, Any] Dictionary containing the location data. """ - return { - k: v - for k, v in { - 'type': location.type, - 'album_id': location.album_id, - 'recording_id': location.recording_id, - 'cluster_title': location.cluster_title, - 'cluster_artist': location.cluster_artist, - }.items() - if v is not None - } + location_data = { + 'type': location.type, + 'album_id': location.album_id, + 'recording_id': location.recording_id, + 'cluster_title': location.cluster_title, + 'cluster_artist': location.cluster_artist, + }.items() + return {k: v for k, v in location_data if v is not None} @staticmethod def _extract_metadata_overrides(diff: Any) -> dict[str, list[Any]]: diff --git a/picard/ui/mainwindow/__init__.py b/picard/ui/mainwindow/__init__.py index 85afbcc319..8f681deabd 100644 --- a/picard/ui/mainwindow/__init__.py +++ b/picard/ui/mainwindow/__init__.py @@ -1206,26 +1206,31 @@ def save(self): self.tagger.save(self.selected_objects) def _get_default_session_filename_from_metadata(self) -> str | None: - """Get default session filename based on first track's artist information. + """Gets a default session filename based on the first track's artist metadata. Returns ------- str | None - Sanitized artist name to use as default filename, or None if no artist found. + A sanitized artist name to use as a default filename, or None if no artist is found. """ - artist_tags = ['artist', 'albumartist', 'artists', 'albumartists'] + artist_tags = ( + 'artist', + 'albumartist', + 'artists', + 'albumartists', + ) - # Scan files once; for each file pick first non-empty artist tag for file in self.tagger.iter_all_files(): metadata = file.metadata - artist_value = next( - (value for tag in artist_tags if (value := metadata.get(tag)) and str(value).strip()), - None, - ) - if artist_value: - artist_name = str(artist_value).split(',')[0].strip() - if artist_name: - return sanitize_filename(artist_name, repl="_", win_compat=True) + for tag in artist_tags: + artist_value = metadata.get(tag) + + if artist_value and str(artist_value).strip(): + artist_name = str(artist_value).split(',')[0].strip() + + if artist_name: + return sanitize_filename(artist_name, repl="_", win_compat=True) + return None def _get_timestamped_session_filename(self) -> str: diff --git a/test/test_mainwindow_session_filename.py b/test/test_mainwindow_session_filename.py new file mode 100644 index 0000000000..bfd635f6d3 --- /dev/null +++ b/test/test_mainwindow_session_filename.py @@ -0,0 +1,267 @@ +# -*- coding: utf-8 -*- +# +# Picard, the next-generation MusicBrainz tagger +# +# Copyright (C) 2025 The MusicBrainz Team +# +# This program is free software; you can redistribute it and/or +# modify it under the terms of the GNU General Public License +# as published by the Free Software Foundation; either version 2 +# of the License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. + +"""Tests for mainwindow session filename functionality.""" + +from typing import Any +from unittest.mock import Mock + +from picard.metadata import Metadata + +import pytest + +from picard.ui.mainwindow import MainWindow + + +@pytest.fixture +def mock_tagger() -> Mock: + """Provide a mock tagger instance.""" + tagger = Mock() + tagger.iter_all_files.return_value = [] + return tagger + + +@pytest.fixture +def mock_mainwindow(mock_tagger: Mock) -> Mock: + """Provide a mock MainWindow instance with tagger.""" + mainwindow = Mock(spec=MainWindow) + mainwindow.tagger = mock_tagger + # Bind the actual method to the mock + mainwindow._get_default_session_filename_from_metadata = ( + MainWindow._get_default_session_filename_from_metadata.__get__(mainwindow, MainWindow) + ) + return mainwindow + + +@pytest.fixture +def mock_file_with_metadata() -> Mock: + """Provide a mock file with metadata.""" + file_mock = Mock() + file_mock.metadata = Metadata() + return file_mock + + +@pytest.fixture +def artist_metadata_cases() -> list[dict[str, Any]]: + """Provide test cases for different artist metadata scenarios.""" + return [ + # Case: artist tag present + {'metadata': {'artist': 'The Beatles'}, 'expected': 'The Beatles', 'description': 'artist tag present'}, + # Case: albumartist tag present (should be used when artist is empty) + {'metadata': {'albumartist': 'Pink Floyd'}, 'expected': 'Pink Floyd', 'description': 'albumartist tag present'}, + # Case: artists tag present (should be used when artist and albumartist are empty) + {'metadata': {'artists': 'Led Zeppelin'}, 'expected': 'Led Zeppelin', 'description': 'artists tag present'}, + # Case: albumartists tag present (should be used when others are empty) + {'metadata': {'albumartists': 'Queen'}, 'expected': 'Queen', 'description': 'albumartists tag present'}, + # Case: multiple artists with comma (should take first) + { + 'metadata': {'artist': 'Artist1, Artist2, Artist3'}, + 'expected': 'Artist1', + 'description': 'multiple artists with comma', + }, + # Case: artist with path separators (should be replaced) + {'metadata': {'artist': 'AC/DC'}, 'expected': 'AC_DC', 'description': 'artist with path separators'}, + # Case: artist with spaces (should remain unchanged) + { + 'metadata': {'artist': 'The Rolling Stones'}, + 'expected': 'The Rolling Stones', + 'description': 'artist with spaces', + }, + # Case: artist with unicode characters + {'metadata': {'artist': 'Björk'}, 'expected': 'Björk', 'description': 'artist with unicode characters'}, + ] + + +@pytest.fixture +def empty_metadata_cases() -> list[dict[str, Any]]: + """Provide test cases for empty or invalid metadata scenarios.""" + return [ + # Case: empty metadata + {'metadata': {}, 'expected': None, 'description': 'empty metadata'}, + # Case: empty string values + {'metadata': {'artist': '', 'albumartist': ''}, 'expected': None, 'description': 'empty string values'}, + # Case: whitespace-only values + { + 'metadata': {'artist': ' ', 'albumartist': '\t\n'}, + 'expected': None, + 'description': 'whitespace-only values', + }, + # Case: None values + {'metadata': {'artist': None, 'albumartist': None}, 'expected': None, 'description': 'None values'}, + ] + + +@pytest.fixture +def priority_test_cases() -> list[dict[str, Any]]: + """Provide test cases for tag priority (artist > albumartist > artists > albumartists).""" + return [ + # Case: artist should take priority over albumartist + { + 'metadata': {'artist': 'Artist1', 'albumartist': 'AlbumArtist1'}, + 'expected': 'Artist1', + 'description': 'artist takes priority over albumartist', + }, + # Case: albumartist should take priority over artists + { + 'metadata': {'albumartist': 'AlbumArtist1', 'artists': 'Artists1'}, + 'expected': 'AlbumArtist1', + 'description': 'albumartist takes priority over artists', + }, + # Case: artists should take priority over albumartists + { + 'metadata': {'artists': 'Artists1', 'albumartists': 'AlbumArtists1'}, + 'expected': 'Artists1', + 'description': 'artists takes priority over albumartists', + }, + ] + + +def test_get_default_session_filename_with_artist_metadata( + mock_mainwindow: Mock, mock_file_with_metadata: Mock, artist_metadata_cases: list[dict[str, Any]] +) -> None: + """Test session filename generation with various artist metadata scenarios.""" + for case in artist_metadata_cases: + # Set up metadata + metadata = Metadata() + for tag, value in case['metadata'].items(): + metadata[tag] = value + + mock_file_with_metadata.metadata = metadata + mock_mainwindow.tagger.iter_all_files.return_value = [mock_file_with_metadata] + + # Test the method + result = mock_mainwindow._get_default_session_filename_from_metadata() + + assert result == case['expected'], f"Failed for case: {case['description']}" + + +def test_get_default_session_filename_with_empty_metadata( + mock_mainwindow: Mock, mock_file_with_metadata: Mock, empty_metadata_cases: list[dict[str, Any]] +) -> None: + """Test session filename generation with empty or invalid metadata.""" + for case in empty_metadata_cases: + # Set up metadata + metadata = Metadata() + for tag, value in case['metadata'].items(): + if value is not None: + metadata[tag] = value + + mock_file_with_metadata.metadata = metadata + mock_mainwindow.tagger.iter_all_files.return_value = [mock_file_with_metadata] + + # Test the method + result = mock_mainwindow._get_default_session_filename_from_metadata() + + assert result == case['expected'], f"Failed for case: {case['description']}" + + +def test_get_default_session_filename_tag_priority( + mock_mainwindow: Mock, mock_file_with_metadata: Mock, priority_test_cases: list[dict[str, Any]] +) -> None: + """Test that artist tags are checked in the correct priority order.""" + for case in priority_test_cases: + # Set up metadata + metadata = Metadata() + for tag, value in case['metadata'].items(): + metadata[tag] = value + + mock_file_with_metadata.metadata = metadata + mock_mainwindow.tagger.iter_all_files.return_value = [mock_file_with_metadata] + + # Test the method + result = mock_mainwindow._get_default_session_filename_from_metadata() + + assert result == case['expected'], f"Failed for case: {case['description']}" + + +def test_get_default_session_filename_no_files(mock_mainwindow: Mock) -> None: + """Test session filename generation when no files are present.""" + mock_mainwindow.tagger.iter_all_files.return_value = [] + + result = mock_mainwindow._get_default_session_filename_from_metadata() + + assert result is None + + +def test_get_default_session_filename_multiple_files_uses_first( + mock_mainwindow: Mock, mock_file_with_metadata: Mock +) -> None: + """Test that the method returns the first valid artist found across multiple files.""" + # Create multiple files with different artists + file1 = Mock() + file1.metadata = Metadata() + file1.metadata['artist'] = 'First Artist' + + file2 = Mock() + file2.metadata = Metadata() + file2.metadata['artist'] = 'Second Artist' + + mock_mainwindow.tagger.iter_all_files.return_value = [file1, file2] + + result = mock_mainwindow._get_default_session_filename_from_metadata() + + assert result == 'First Artist' + + +@pytest.mark.parametrize( + "artist_value,expected", + [ + ("Artist Name", "Artist Name"), # Spaces are not sanitized + ("Artist/Name", "Artist_Name"), # Forward slash is sanitized + ("Artist\\Name", "Artist_Name"), # Backslash is sanitized + ("Artist:Name", "Artist:Name"), # Colon is not sanitized + ("Artist*Name", "Artist*Name"), # Asterisk is not sanitized + ("Artist?Name", "Artist?Name"), # Question mark is not sanitized + ("ArtistName", "Artist>Name"), # Greater than is not sanitized + ("Artist|Name", "Artist|Name"), # Pipe is not sanitized + ("Artist\"Name", "Artist\"Name"), # Quote is not sanitized + ], +) +def test_get_default_session_filename_sanitization( + mock_mainwindow: Mock, mock_file_with_metadata: Mock, artist_value: str, expected: str +) -> None: + """Test that artist names are properly sanitized for filename use.""" + mock_file_with_metadata.metadata = Metadata() + mock_file_with_metadata.metadata['artist'] = artist_value + mock_mainwindow.tagger.iter_all_files.return_value = [mock_file_with_metadata] + + result = mock_mainwindow._get_default_session_filename_from_metadata() + + assert result == expected + + +def test_get_default_session_filename_whitespace_handling(mock_mainwindow: Mock, mock_file_with_metadata: Mock) -> None: + """Test that whitespace is properly handled in artist names.""" + test_cases = [ + (" Artist Name ", "Artist Name"), # Spaces are preserved + ("Artist\tName", "Artist\tName"), # Tabs are preserved + ("Artist\nName", "Artist\nName"), # Newlines are preserved + ("Artist\rName", "Artist\rName"), # Carriage returns are preserved + ] + + for artist_value, expected in test_cases: + mock_file_with_metadata.metadata = Metadata() + mock_file_with_metadata.metadata['artist'] = artist_value + mock_mainwindow.tagger.iter_all_files.return_value = [mock_file_with_metadata] + + result = mock_mainwindow._get_default_session_filename_from_metadata() + + assert result == expected, f"Failed for artist value: {repr(artist_value)}" From cd61cb8c0960d55a327b8862760cb56fe30370e0 Mon Sep 17 00:00:00 2001 From: Khoa Nguyen Date: Thu, 25 Sep 2025 21:06:29 -0400 Subject: [PATCH 26/30] Improve type hints --- picard/session/session_exporter.py | 27 +++++++++++++++++---------- picard/session/types.py | 25 +++++++++++++++++++++++++ 2 files changed, 42 insertions(+), 10 deletions(-) create mode 100644 picard/session/types.py diff --git a/picard/session/session_exporter.py b/picard/session/session_exporter.py index 295ce65fe2..7a167a9a6c 100644 --- a/picard/session/session_exporter.py +++ b/picard/session/session_exporter.py @@ -37,6 +37,13 @@ from picard.session.location_detector import LocationDetector from picard.session.metadata_handler import MetadataHandler from picard.session.session_data import SessionItemLocation +from picard.session.types import ( + AlbumOverrides, + AlbumTrackOverrides, + MbReleaseCache, + TagOverrideMap, + UnmatchedAlbums, +) @dataclass @@ -56,9 +63,9 @@ class MetadataOverridesResult: List of album IDs that are loaded but have no files matched to them. """ - album_track_overrides: dict[str, dict[str, dict[str, list[Any]]]] - album_overrides: dict[str, dict[str, list[Any]]] - unmatched_albums: list[str] + album_track_overrides: AlbumTrackOverrides + album_overrides: AlbumOverrides + unmatched_albums: UnmatchedAlbums class SessionExporter: @@ -133,7 +140,7 @@ def export_session(self, tagger: Any) -> dict[str, Any]: return session_data - def _export_mb_cache(self, tagger: Any) -> dict[str, Any]: + def _export_mb_cache(self, tagger: Any) -> MbReleaseCache: """Export MB release data for currently loaded albums. Parameters @@ -146,7 +153,7 @@ def _export_mb_cache(self, tagger: Any) -> dict[str, Any]: dict[str, Any] Mapping of album MBID to release data node. """ - cache: dict[str, Any] = {} + cache: MbReleaseCache = {} for album_id, album in getattr(tagger, 'albums', {}).items(): # Prefer cached node saved after tracks were loaded; fall back to live node if still present node = getattr(album, '_release_node_cache', None) or getattr(album, '_release_node', None) @@ -260,7 +267,7 @@ def _serialize_location(self, location: SessionItemLocation) -> dict[str, Any]: return {k: v for k, v in location_data if v is not None} @staticmethod - def _extract_metadata_overrides(diff: Any) -> dict[str, list[Any]]: + def _extract_metadata_overrides(diff: Any) -> TagOverrideMap: """Extract metadata overrides from a diff object. Parameters @@ -288,9 +295,9 @@ def _export_metadata_overrides(self, tagger: Any) -> MetadataOverridesResult: MetadataOverridesResult Result containing album track overrides, album overrides, and unmatched albums. """ - album_overrides: dict[str, dict[str, dict[str, list[Any]]]] = {} - album_meta_overrides: dict[str, dict[str, list[Any]]] = {} - unmatched_albums: list[str] = [] + album_overrides: AlbumTrackOverrides = {} + album_meta_overrides: AlbumOverrides = {} + unmatched_albums: UnmatchedAlbums = [] # Get all album IDs that have files matched to them albums_with_files = set() @@ -312,7 +319,7 @@ def _export_metadata_overrides(self, tagger: Any) -> MetadataOverridesResult: album_meta_overrides[album.id] = SessionExporter._extract_metadata_overrides(album_diff) # Track-level overrides - overrides_for_album: dict[str, dict[str, list[Any]]] = {} + overrides_for_album: dict[str, TagOverrideMap] = {} for track in album.tracks: # The difference to scripted_metadata are user edits made in UI diff = track.metadata.diff(track.scripted_metadata) diff --git a/picard/session/types.py b/picard/session/types.py new file mode 100644 index 0000000000..9f98095ce5 --- /dev/null +++ b/picard/session/types.py @@ -0,0 +1,25 @@ +"""Shared session type aliases. + +These aliases centralize nested mapping structures used across session +export/import to improve readability and maintainability. +""" + +from __future__ import annotations + +from typing import Any, TypeAlias + + +# Base aliases +TagValues: TypeAlias = list[Any] + +# Tag overrides per entity +TagOverrideMap: TypeAlias = dict[str, TagValues] # tag -> values +TrackOverrideMap: TypeAlias = dict[str, TagOverrideMap] # track_id -> tags + +# Aggregated overrides +AlbumTrackOverrides: TypeAlias = dict[str, TrackOverrideMap] # album_id -> tracks +AlbumOverrides: TypeAlias = dict[str, TagOverrideMap] # album_id -> tags + +# Misc session types +UnmatchedAlbums: TypeAlias = list[str] +MbReleaseCache: TypeAlias = dict[str, Any] # album_id -> release node From 9479f395d128f93d6880f69d6123fb489c61fd1e Mon Sep 17 00:00:00 2001 From: Khoa Nguyen Date: Sun, 28 Sep 2025 06:55:39 -0400 Subject: [PATCH 27/30] Make `restore_options` more generic --- picard/session/session_loader.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/picard/session/session_loader.py b/picard/session/session_loader.py index a615aa92bb..6613416ffd 100644 --- a/picard/session/session_loader.py +++ b/picard/session/session_loader.py @@ -46,6 +46,10 @@ from picard.session.track_mover import TrackMover +# Configuration keys that can be restored from session files +RESTORABLE_CONFIG_KEYS = ['rename_files', 'move_files', 'enable_tag_saving'] + + class ProgressReporter(Protocol): """Protocol for emitting session loading progress updates.""" @@ -164,12 +168,8 @@ def restore_options(self, options: dict[str, Any]) -> None: Options mapping from the session file. """ config = get_config() - config.setting['rename_files'] = bool(options.get('rename_files', config.setting['rename_files'])) - config.setting['move_files'] = bool(options.get('move_files', config.setting['move_files'])) - # Only support new key moving forward - config.setting['enable_tag_saving'] = bool( - options.get('enable_tag_saving', config.setting['enable_tag_saving']) - ) + for key in RESTORABLE_CONFIG_KEYS: + config.setting[key] = options.get(key, config.setting[key]) class ItemGrouper: From 7e7654b4a2d2d2166f99b8fb2a1499313603b637 Mon Sep 17 00:00:00 2001 From: Khoa Nguyen Date: Sun, 28 Sep 2025 07:18:46 -0400 Subject: [PATCH 28/30] Fix: `dir` -> `directory` --- picard/script/serializer.py | 2 +- picard/ui/mainwindow/__init__.py | 4 ++-- picard/ui/options/sessions.py | 4 +++- 3 files changed, 6 insertions(+), 4 deletions(-) diff --git a/picard/script/serializer.py b/picard/script/serializer.py index 401c69401f..29aced803e 100644 --- a/picard/script/serializer.py +++ b/picard/script/serializer.py @@ -226,7 +226,7 @@ def export_script(self, parent=None): filename, file_type = FileDialog.getSaveFileName( parent=parent, caption=dialog_title, - dir=default_path, + directory=default_path, filter=dialog_file_types, ) if not filename: diff --git a/picard/ui/mainwindow/__init__.py b/picard/ui/mainwindow/__init__.py index 8f681deabd..08656556a9 100644 --- a/picard/ui/mainwindow/__init__.py +++ b/picard/ui/mainwindow/__init__.py @@ -1281,7 +1281,7 @@ def save_session_as(self) -> bool: path, _filter = FileDialog.getSaveFileName( parent=self, - dir=str(start_dir), + directory=str(start_dir), filter=( _("MusicBrainz Picard Session (%s);;All files (*)") % ("*" + SessionConstants.SESSION_FILE_EXTENSION) ), @@ -1313,7 +1313,7 @@ def load_session(self): start_dir = sessions_folder() path, _filter = FileDialog.getOpenFileName( parent=self, - dir=str(start_dir), + directory=str(start_dir), filter=( _("MusicBrainz Picard Session (%s);;All files (*)") % ("*" + SessionConstants.SESSION_FILE_EXTENSION) ), diff --git a/picard/ui/options/sessions.py b/picard/ui/options/sessions.py index 5a477f4750..856fc1a224 100644 --- a/picard/ui/options/sessions.py +++ b/picard/ui/options/sessions.py @@ -129,7 +129,9 @@ def _browse_sessions_folder(self): if not current_path: current_path = sessions_folder() - folder = FileDialog.getExistingDirectory(parent=self, dir=current_path, caption=_("Select Sessions Folder")) + folder = FileDialog.getExistingDirectory( + parent=self, directory=current_path, caption=_("Select Sessions Folder") + ) if folder: self.folder_path_edit.setText(folder) From 3ebd96ba8edddf593036e8d6f7c5586f05f7714f Mon Sep 17 00:00:00 2001 From: Khoa Nguyen Date: Sun, 28 Sep 2025 07:19:14 -0400 Subject: [PATCH 29/30] refactor: `session_loader` more readable --- picard/session/session_loader.py | 235 ++++++++++++++++++++++++------- 1 file changed, 187 insertions(+), 48 deletions(-) diff --git a/picard/session/session_loader.py b/picard/session/session_loader.py index 6613416ffd..ce7832b39f 100644 --- a/picard/session/session_loader.py +++ b/picard/session/session_loader.py @@ -27,6 +27,7 @@ from __future__ import annotations from contextlib import suppress +from dataclasses import dataclass import gzip from pathlib import Path from typing import Any, Protocol @@ -193,29 +194,29 @@ def group(self, items: list[dict[str, Any]]) -> GroupedItems: by_album: dict[str, AlbumItems] = {} nat_items: list[tuple[Path, str]] = [] - for it in items: - fpath = Path(it['file_path']).expanduser() - loc = it.get('location', {}) - ltype = str(loc.get('type', SessionConstants.LOCATION_UNCLUSTERED)) - - if ltype == SessionConstants.LOCATION_UNCLUSTERED: - by_unclustered.append(fpath) - elif ltype == SessionConstants.LOCATION_CLUSTER: - key = (str(loc.get('cluster_title', "")), str(loc.get('cluster_artist', ""))) - by_cluster.setdefault(key, []).append(fpath) - elif ltype in {SessionConstants.LOCATION_ALBUM_UNMATCHED, SessionConstants.LOCATION_TRACK}: - album_id = str(loc.get('album_id')) - entry = by_album.setdefault(album_id, AlbumItems(unmatched=[], tracks=[])) - if ltype == SessionConstants.LOCATION_ALBUM_UNMATCHED: - entry.unmatched.append(fpath) - else: - entry.tracks.append((fpath, str(loc.get('recording_id')))) - elif ltype == SessionConstants.LOCATION_NAT: - nat_items.append((fpath, str(loc.get('recording_id')))) - else: - by_unclustered.append(fpath) + acc = _GroupAccumulators( + unclustered=by_unclustered, + by_cluster=by_cluster, + by_album=by_album, + nat_items=nat_items, + ) + + dispatch = self._build_dispatch() - return GroupedItems(unclustered=by_unclustered, by_cluster=by_cluster, by_album=by_album, nat_items=nat_items) + for item in items: + file_path = Path(item['file_path']).expanduser() + location = item.get('location', {}) + location_type = str(location.get('type', SessionConstants.LOCATION_UNCLUSTERED)) + + handler = dispatch.get(location_type, self._handle_default) + handler(file_path, location, acc) + + return GroupedItems( + unclustered=by_unclustered, + by_cluster=by_cluster, + by_album=by_album, + nat_items=nat_items, + ) def extract_metadata(self, items: list[dict[str, Any]]) -> dict[Path, dict[str, list[Any]]]: """Extract per-path metadata deltas from item entries. @@ -239,6 +240,72 @@ def extract_metadata(self, items: list[dict[str, Any]]) -> dict[Path, dict[str, metadata_by_path[fpath] = tags return metadata_by_path + # --- Internal strategy handlers ------------------------------------------------- + + def _build_dispatch(self): + """Build the dispatch table for location handlers. + + Returns + ------- + dict[str, Any] + Mapping of location type to handler. + """ + return { + SessionConstants.LOCATION_UNCLUSTERED: self._handle_unclustered, + SessionConstants.LOCATION_CLUSTER: self._handle_cluster, + SessionConstants.LOCATION_ALBUM_UNMATCHED: self._handle_album_unmatched, + SessionConstants.LOCATION_TRACK: self._handle_track, + SessionConstants.LOCATION_NAT: self._handle_nat, + } + + def _handle_unclustered(self, file_path: Path, location: dict[str, Any], acc: "_GroupAccumulators") -> None: + acc.unclustered.append(file_path) + + def _handle_cluster(self, file_path: Path, location: dict[str, Any], acc: "_GroupAccumulators") -> None: + title = str(location.get('cluster_title', "")) + artist = str(location.get('cluster_artist', "")) + acc.by_cluster.setdefault((title, artist), []).append(file_path) + + def _handle_album_unmatched(self, file_path: Path, location: dict[str, Any], acc: "_GroupAccumulators") -> None: + album_id = str(location.get('album_id')) + entry = acc.by_album.setdefault(album_id, AlbumItems(unmatched=[], tracks=[])) + entry.unmatched.append(file_path) + + def _handle_track(self, file_path: Path, location: dict[str, Any], acc: "_GroupAccumulators") -> None: + album_id = str(location.get('album_id')) + recording_id = str(location.get('recording_id')) + entry = acc.by_album.setdefault(album_id, AlbumItems(unmatched=[], tracks=[])) + entry.tracks.append((file_path, recording_id)) + + def _handle_nat(self, file_path: Path, location: dict[str, Any], acc: "_GroupAccumulators") -> None: + recording_id = str(location.get('recording_id')) + acc.nat_items.append((file_path, recording_id)) + + def _handle_default(self, file_path: Path, location: dict[str, Any], acc: "_GroupAccumulators") -> None: + acc.unclustered.append(file_path) + + +@dataclass +class _GroupAccumulators: + """Mutable accumulators passed between grouping handlers. + + Attributes + ---------- + unclustered : list[Path] + Paths destined for the unclustered section. + by_cluster : dict[tuple[str, str], list[Path]] + Mapping from (cluster title, cluster artist) to file paths. + by_album : dict[str, AlbumItems] + Mapping from album ID to album grouping (unmatched and track-bound items). + nat_items : list[tuple[Path, str]] + List of (file path, recording ID) destined for the NAT area. + """ + + unclustered: list[Path] + by_cluster: dict[tuple[str, str], list[Path]] + by_album: dict[str, AlbumItems] + nat_items: list[tuple[Path, str]] + class UIStateManager: """Manage UI-related state such as album expansion and delayed updates.""" @@ -534,8 +601,52 @@ def load_from_path(self, path: str | Path) -> None: Notes ----- - Orchestrates reading the session, restoring configuration, loading - items and albums, applying overrides, and restoring UI state. + Executes a small pipeline: read file → restore options → + configure albums → group items → preload cache → load items → + apply overrides/metadata → restore UI. + """ + ctx = self._build_context(Path(path)) + + # Preload albums from embedded cache if available + if self._mb_cache: + self._progress.emit("preload_cache", details={'albums': len(self._mb_cache)}) + self._albums.preload_from_cache(self._mb_cache, ctx.grouped_items) + + # Emit progress and ensure needed albums are present + self._progress.emit("load_items", details={'files': self._compute_total_files(ctx.grouped_items)}) + self._albums.load_needed_albums(ctx.grouped_items, self._mb_cache) + + # Place files into their destinations (unclustered, clusters, albums, NAT) + self._load_grouped_items(ctx.grouped_items) + + # Unmatched albums without items + self._albums.load_unmatched_albums(ctx.data.get('unmatched_albums', []), self._mb_cache) + + # Apply overrides and deferred file metadata + self._progress.emit("apply_overrides") + self._overrides.apply(ctx.data, self._mb_cache) + if ctx.metadata_map: + self._schedule_metadata_application(ctx.metadata_map) + + # Finalize UI state + self._progress.emit("finalize") + expanded = set(ctx.data.get('expanded_albums', [])) + self._ui_state.apply_expansions_later(expanded) + + # --- Internal pipeline helpers -------------------------------------------------- + + def _build_context(self, path: Path) -> "SessionLoadContext": + """Build immutable inputs and compute initial grouping context. + + Parameters + ---------- + path : Path + Session file path to read. + + Returns + ------- + SessionLoadContext + Context holding parsed data, grouping, and metadata map. """ self._progress.emit("read", details={'path': str(path)}) data = self._file_reader.read(path) @@ -555,48 +666,54 @@ def load_from_path(self, path: str | Path) -> None: items = data.get('items', []) grouped_items = self._grouper.group(items) metadata_map = self._grouper.extract_metadata(items) - self._mb_cache = data.get('mb_cache', {}) - if self._mb_cache: - self._progress.emit("preload_cache", details={'albums': len(self._mb_cache)}) - self._albums.preload_from_cache(self._mb_cache, grouped_items) - total_files = ( + return SessionLoadContext( + path=path, + data=data, + grouped_items=grouped_items, + metadata_map=metadata_map, + ) + + def _compute_total_files(self, grouped_items: GroupedItems) -> int: + """Compute total number of files to be loaded from grouped items. + + Parameters + ---------- + grouped_items : GroupedItems + Items grouped by destination. + + Returns + ------- + int + Total count of files across all groups. + """ + return ( len(grouped_items.unclustered) + sum(len(v) for v in grouped_items.by_cluster.values()) + sum(len(g.unmatched) + len(g.tracks) for g in grouped_items.by_album.values()) ) - self._progress.emit("load_items", details={'files': total_files}) - # Load albums for items and place files accordingly - self._albums.load_needed_albums(grouped_items, self._mb_cache) + def _load_grouped_items(self, grouped_items: GroupedItems) -> None: + """Load grouped files into the tagger model (unclustered, clusters, albums, NAT). + Parameters + ---------- + grouped_items : GroupedItems + Items grouped by destination. + """ if grouped_items.unclustered: self.tagger.add_files([str(p) for p in grouped_items.unclustered], target=self.tagger.unclustered_files) + for (title, artist), paths in grouped_items.by_cluster.items(): cluster = self.tagger.load_cluster(title, artist) self.tagger.add_files([str(p) for p in paths], target=cluster) + self._albums.load_album_files(grouped_items.by_album, self.track_mover) - # NAT items for fpath, rid in grouped_items.nat_items: self.track_mover.move_file_to_nat(fpath, rid) - # Unmatched albums - self._albums.load_unmatched_albums(data.get('unmatched_albums', []), self._mb_cache) - - # Apply overrides - self._progress.emit("apply_overrides") - self._overrides.apply(data, self._mb_cache) - - if metadata_map: - self._schedule_metadata_application(metadata_map) - - # Restore UI state - self._progress.emit("finalize") - expanded = set(data.get('expanded_albums', [])) - self._ui_state.apply_expansions_later(expanded) - # The following block of methods are retained for scheduling and lifecycle. def _schedule_metadata_application(self, metadata_map: dict[Path, dict[str, list[Any]]]) -> None: @@ -637,3 +754,25 @@ def finalize_loading(self) -> None: to handle cleanup tasks like unsetting the restoring flag. """ QtCore.QTimer.singleShot(SessionConstants.DEFAULT_RETRY_DELAY_MS, self._unset_restoring_flag_when_idle) + + +@dataclass(frozen=True) +class SessionLoadContext: + """Immutable context for a single session load operation. + + Attributes + ---------- + path : Path + Path to the session file. + data : dict[str, Any] + Parsed session data. + grouped_items : GroupedItems + Items grouped by destination. + metadata_map : dict[Path, dict[str, list[Any]]] + Per-file tag deltas to apply after load. + """ + + path: Path + data: dict[str, Any] + grouped_items: GroupedItems + metadata_map: dict[Path, dict[str, list[Any]]] From 1c189b0ceba8d616518957c6d17f28c899babf29 Mon Sep 17 00:00:00 2001 From: Khoa Nguyen Date: Sun, 28 Sep 2025 19:31:03 -0400 Subject: [PATCH 30/30] Refactor: Centralize RESTORABLE_CONFIG_KEYS for session management --- picard/session/constants.py | 9 +++++++++ picard/session/session_exporter.py | 9 ++------- picard/session/session_loader.py | 6 +----- 3 files changed, 12 insertions(+), 12 deletions(-) diff --git a/picard/session/constants.py b/picard/session/constants.py index c1d65c057a..1e347dd3a3 100644 --- a/picard/session/constants.py +++ b/picard/session/constants.py @@ -144,3 +144,12 @@ class SessionMessages: "Do not make MusicBrainz requests on restore (faster loads, risk of stale data)" ) SESSION_FOLDER_PATH_TITLE = N_("Sessions folder path (leave empty for default)") + + +# Configuration keys that should be persisted in and restored from session files +RESTORABLE_CONFIG_KEYS = [ + 'rename_files', + 'move_files', + # PICARD-3123: renamed from dont_write_tags, semantics reversed in upgrade + 'enable_tag_saving', +] diff --git a/picard/session/session_exporter.py b/picard/session/session_exporter.py index 7a167a9a6c..710a2dcb44 100644 --- a/picard/session/session_exporter.py +++ b/picard/session/session_exporter.py @@ -33,7 +33,7 @@ from picard.album import NatAlbum from picard.config import get_config from picard.const.defaults import EXCLUDED_OVERRIDE_TAGS, INTERNAL_TAG_PREFIX -from picard.session.constants import SessionConstants +from picard.session.constants import RESTORABLE_CONFIG_KEYS, SessionConstants from picard.session.location_detector import LocationDetector from picard.session.metadata_handler import MetadataHandler from picard.session.session_data import SessionItemLocation @@ -195,12 +195,7 @@ def _export_options(self, config: Any) -> dict[str, Any]: Dictionary containing the relevant configuration options. Values preserve their original types (not forced to bool). """ - return { - 'rename_files': config.setting['rename_files'], - 'move_files': config.setting['move_files'], - # PICARD-3123: renamed from dont_write_tags, semantics reversed in upgrade - 'enable_tag_saving': config.setting['enable_tag_saving'], - } + return {key: config.setting[key] for key in RESTORABLE_CONFIG_KEYS} def _export_file_item(self, file: Any) -> dict[str, Any]: """Export a single file item. diff --git a/picard/session/session_loader.py b/picard/session/session_loader.py index ce7832b39f..a179a44b28 100644 --- a/picard/session/session_loader.py +++ b/picard/session/session_loader.py @@ -41,16 +41,12 @@ from picard.const.defaults import EXCLUDED_OVERRIDE_TAGS from picard.i18n import gettext as _ from picard.metadata import Metadata -from picard.session.constants import SessionConstants +from picard.session.constants import RESTORABLE_CONFIG_KEYS, SessionConstants from picard.session.metadata_handler import MetadataHandler from picard.session.session_data import AlbumItems, GroupedItems from picard.session.track_mover import TrackMover -# Configuration keys that can be restored from session files -RESTORABLE_CONFIG_KEYS = ['rename_files', 'move_files', 'enable_tag_saving'] - - class ProgressReporter(Protocol): """Protocol for emitting session loading progress updates."""