diff --git a/beets/art.py b/beets/art.py index 2ff58c309e..682d35bbca 100644 --- a/beets/art.py +++ b/beets/art.py @@ -63,9 +63,7 @@ def embed_item( """Embed an image into the item's media file.""" # Conditions. if compare_threshold: - is_similar = check_art_similarity( - log, item, imagepath, compare_threshold - ) + is_similar = check_art_similarity(log, item, imagepath, compare_threshold) if is_similar is None: log.warning("Error while checking art similarity; skipping.") return @@ -148,9 +146,7 @@ def resize_image(log, imagepath, maxwidth, quality): maxwidth, quality, ) - imagepath = ArtResizer.shared.resize( - maxwidth, syspath(imagepath), quality=quality - ) + imagepath = ArtResizer.shared.resize(maxwidth, syspath(imagepath), quality=quality) return imagepath diff --git a/beets/autotag/__init__.py b/beets/autotag/__init__.py index 42f957b0d5..a3e6dc4bae 100644 --- a/beets/autotag/__init__.py +++ b/beets/autotag/__init__.py @@ -104,9 +104,7 @@ def _apply_metadata( nullable_fields: Sequence[str] = [], ): """Set the db_obj's metadata to match the info.""" - special_fields = SPECIAL_FIELDS[ - "album" if isinstance(info, AlbumInfo) else "track" - ] + special_fields = SPECIAL_FIELDS["album" if isinstance(info, AlbumInfo) else "track"] for field, value in info.items(): # We only overwrite fields that are not already hardcoded. @@ -225,12 +223,8 @@ def apply_metadata(album_info: AlbumInfo, mapping: Mapping[Item, TrackInfo]): # Artist sort and credit names. item.artist_sort = track_info.artist_sort or album_info.artist_sort item.artists_sort = track_info.artists_sort or album_info.artists_sort - item.artist_credit = ( - track_info.artist_credit or album_info.artist_credit - ) - item.artists_credit = ( - track_info.artists_credit or album_info.artists_credit - ) + item.artist_credit = track_info.artist_credit or album_info.artist_credit + item.artists_credit = track_info.artists_credit or album_info.artists_credit item.albumartist_sort = album_info.artist_sort item.albumartists_sort = album_info.artists_sort item.albumartist_credit = album_info.artist_credit diff --git a/beets/autotag/hooks.py b/beets/autotag/hooks.py index 81cfd7bb22..335325a10c 100644 --- a/beets/autotag/hooks.py +++ b/beets/autotag/hooks.py @@ -661,15 +661,11 @@ def album_candidates( if config["musicbrainz"]["enabled"]: # Base candidates if we have album and artist to match. if artist and album: - yield from invoke_mb( - mb.match_album, artist, album, len(items), extra_tags - ) + yield from invoke_mb(mb.match_album, artist, album, len(items), extra_tags) # Also add VA matches from MusicBrainz where appropriate. if va_likely and album: - yield from invoke_mb( - mb.match_album, None, album, len(items), extra_tags - ) + yield from invoke_mb(mb.match_album, None, album, len(items), extra_tags) # Candidates from plugins. yield from plugins.candidates(items, artist, album, va_likely, extra_tags) diff --git a/beets/autotag/match.py b/beets/autotag/match.py index bc30ccea28..52d15781f4 100644 --- a/beets/autotag/match.py +++ b/beets/autotag/match.py @@ -138,9 +138,7 @@ def assign_items( # `tracks` list. Each value is either an index into the assigned item in # `items` list, or -1 if that track has no match. mapping = { - items[iidx]: t - for iidx, t in zip(assigned_item_idxs, tracks) - if iidx != -1 + items[iidx]: t for iidx, t in zip(assigned_item_idxs, tracks) if iidx != -1 } extra_items = list(set(items) - mapping.keys()) extra_items.sort(key=lambda i: (i.disc, i.track, i.title)) @@ -192,11 +190,7 @@ def track_distance( dist.add_string("track_title", item.title, track_info.title) # Artist. Only check if there is actually an artist in the track data. - if ( - incl_artist - and track_info.artist - and item.artist.lower() not in VA_ARTISTS - ): + if incl_artist and track_info.artist and item.artist.lower() not in VA_ARTISTS: dist.add_string("track_artist", item.artist, track_info.artist) # Track index. @@ -273,9 +267,7 @@ def distance( elif album_info.original_year: # Prefer matchest closest to the release year. diff = abs(likelies["year"] - album_info.year) - diff_max = abs( - datetime.date.today().year - album_info.original_year - ) + diff_max = abs(datetime.date.today().year - album_info.original_year) dist.add_ratio("year", diff, diff_max) else: # Full penalty when there is no original year. @@ -297,9 +289,7 @@ def distance( # Catalog number. if likelies["catalognum"] and album_info.catalognum: - dist.add_string( - "catalognum", likelies["catalognum"], album_info.catalognum - ) + dist.add_string("catalognum", likelies["catalognum"], album_info.catalognum) # Disambiguation. if likelies["albumdisambig"] and album_info.albumdisambig: @@ -309,9 +299,7 @@ def distance( # Album ID. if likelies["mb_albumid"]: - dist.add_equality( - "album_id", likelies["mb_albumid"], album_info.album_id - ) + dist.add_equality("album_id", likelies["mb_albumid"], album_info.album_id) # Tracks. dist.tracks = {} @@ -383,8 +371,7 @@ def _recommendation( # Only a single candidate. rec = Recommendation.low elif ( - results[1].distance - min_dist - >= config["match"]["rec_gap_thresh"].as_number() + results[1].distance - min_dist >= config["match"]["rec_gap_thresh"].as_number() ): # Gap between first two candidates is large. rec = Recommendation.low @@ -432,9 +419,7 @@ def _add_candidate( checking the track count, ordering the items, checking for duplicates, and calculating the distance. """ - log.debug( - "Candidate: {0} - {1} ({2})", info.artist, info.album, info.album_id - ) + log.debug("Candidate: {0} - {1} ({2})", info.artist, info.album, info.album_id) # Discard albums with zero tracks. if not info.tracks: @@ -447,9 +432,7 @@ def _add_candidate( return # Discard matches without required tags. - for req_tag in cast( - Sequence[str], config["match"]["required"].as_str_seq() - ): + for req_tag in cast(Sequence[str], config["match"]["required"].as_str_seq()): if getattr(info, req_tag) is None: log.debug("Ignored. Missing required tag: {0}", req_tag) return @@ -595,10 +578,7 @@ def tag_item( candidates[info.track_id] = hooks.TrackMatch(dist, info) # If this is a good match, then don't keep searching. rec = _recommendation(_sort_candidates(candidates.values())) - if ( - rec == Recommendation.strong - and not config["import"]["timid"] - ): + if rec == Recommendation.strong and not config["import"]["timid"]: log.debug("Track ID match.") return Proposal(_sort_candidates(candidates.values()), rec) diff --git a/beets/autotag/mb.py b/beets/autotag/mb.py index 6c2b604cd0..3d1dbd13fd 100644 --- a/beets/autotag/mb.py +++ b/beets/autotag/mb.py @@ -324,9 +324,7 @@ def track_info( info.artists, info.artists_sort, info.artists_credit, - ) = _multi_artist_credit( - recording["artist-credit"], include_join_phrase=False - ) + ) = _multi_artist_credit(recording["artist-credit"], include_join_phrase=False) info.artists_ids = _artist_ids(recording["artist-credit"]) info.artist_id = info.artists_ids[0] @@ -355,9 +353,7 @@ def track_info( if "disambiguation" in work_relation["work"]: info.work_disambig = work_relation["work"]["disambiguation"] - for artist_relation in work_relation["work"].get( - "artist-relation-list", () - ): + for artist_relation in work_relation["work"].get("artist-relation-list", ()): if "type" in artist_relation: type = artist_relation["type"] if type == "lyricist": @@ -425,9 +421,7 @@ def album_info(release: dict) -> beets.autotag.hooks.AlbumInfo: artists_names, artists_sort_names, artists_credit_names, - ) = _multi_artist_credit( - release["artist-credit"], include_join_phrase=False - ) + ) = _multi_artist_credit(release["artist-credit"], include_join_phrase=False) ntracks = sum(len(m["track-list"]) for m in release["medium-list"]) @@ -464,10 +458,7 @@ def album_info(release: dict) -> beets.autotag.hooks.AlbumInfo: continue all_tracks = medium["track-list"] - if ( - "data-track-list" in medium - and not config["match"]["ignore_data_tracks"] - ): + if "data-track-list" in medium and not config["match"]["ignore_data_tracks"]: all_tracks += medium["data-track-list"] track_count = len(all_tracks) @@ -558,9 +549,7 @@ def album_info(release: dict) -> beets.autotag.hooks.AlbumInfo: # Get the disambiguation strings at the release and release group level. if release["release-group"].get("disambiguation"): - info.releasegroupdisambig = release["release-group"].get( - "disambiguation" - ) + info.releasegroupdisambig = release["release-group"].get("disambiguation") if release.get("disambiguation"): info.albumdisambig = release.get("disambiguation") @@ -626,8 +615,7 @@ def album_info(release: dict) -> beets.autotag.hooks.AlbumInfo: for genreitem in source: genres[genreitem["name"]] += int(genreitem["count"]) info.genre = "; ".join( - genre - for genre, _count in sorted(genres.items(), key=lambda g: -g[1]) + genre for genre, _count in sorted(genres.items(), key=lambda g: -g[1]) ) # We might find links to external sources (Discogs, Bandcamp, ...) diff --git a/beets/dbcore/db.py b/beets/dbcore/db.py index dd8401935f..6600139325 100755 --- a/beets/dbcore/db.py +++ b/beets/dbcore/db.py @@ -642,9 +642,7 @@ def remove(self): db = self._check_db() with db.transaction() as tx: tx.mutate(f"DELETE FROM {self._table} WHERE id=?", (self.id,)) - tx.mutate( - f"DELETE FROM {self._flex_table} WHERE entity_id=?", (self.id,) - ) + tx.mutate(f"DELETE FROM {self._flex_table} WHERE entity_id=?", (self.id,)) def add(self, db: D | None = None): """Add the object to the library database. This object must be @@ -698,9 +696,7 @@ def evaluate_template( else: # Help out mypy t = template - return t.substitute( - self.formatted(for_path=for_path), self._template_funcs() - ) + return t.substitute(self.formatted(for_path=for_path), self._template_funcs()) # Parsing. @@ -836,9 +832,7 @@ def _get_indexed_flex_attrs(self) -> dict[int, FlexAttrs]: return flex_values - def _make_model( - self, row: sqlite3.Row, flex_values: FlexAttrs = {} - ) -> AnyModel: + def _make_model(self, row: sqlite3.Row, flex_values: FlexAttrs = {}) -> AnyModel: """Create a Model object for the given row""" cols = dict(row) values = {k: v for (k, v) in cols.items() if not k[:4] == "flex"} @@ -1003,9 +997,7 @@ class Database: def __init__(self, path, timeout: float = 5.0): if sqlite3.threadsafety == 0: - raise RuntimeError( - "sqlite3 must be compiled with multi-threading support" - ) + raise RuntimeError("sqlite3 must be compiled with multi-threading support") self.path = path self.timeout = timeout @@ -1035,23 +1027,23 @@ def __init__(self, path, timeout: float = 5.0): # Primitive access control: connections and transactions. - def _connection(self) -> Connection: - """Get a SQLite connection object to the underlying database. - One connection object is created per thread. - """ - thread_id = threading.current_thread().ident - # Help the type checker: ident can only be None if the thread has not - # been started yet; but since this results from current_thread(), that - # can't happen - assert thread_id is not None + def _connect(self) -> sqlite3.Connection: + try: + conn = sqlite3.connect(self.path, timeout=self.timeout) + except sqlite3.OperationalError as e: + if "unable to open database file" in str(e).lower(): + raise sqlite3.OperationalError( + f"Unable to open the database file at {self.path}. " + f"Check that the directory exists and is writable." + ) from e + raise + return conn + + def _connection(self) -> sqlite3.Connection: + # Legacy support for code that still calls _connection() + # Reuse the same logic as _connect + return self._connect() - with self._shared_map_lock: - if thread_id in self._connections: - return self._connections[thread_id] - else: - conn = self._create_connection() - self._connections[thread_id] = conn - return conn def _create_connection(self) -> Connection: """Create a SQLite connection to the underlying database. @@ -1140,9 +1132,7 @@ def transaction(self) -> Transaction: def load_extension(self, path: str): """Load an SQLite extension into all open connections.""" if not self.supports_extensions: - raise ValueError( - "this sqlite3 installation does not support extensions" - ) + raise ValueError("this sqlite3 installation does not support extensions") self._extensions.append(path) @@ -1171,9 +1161,7 @@ def _make_table(self, table: str, fields: Mapping[str, types.Type]): columns = [] for name, typ in fields.items(): columns.append(f"{name} {typ.sql}") - setup_sql = "CREATE TABLE {} ({});\n".format( - table, ", ".join(columns) - ) + setup_sql = "CREATE TABLE {} ({});\n".format(table, ", ".join(columns)) else: # Table exists does not match the field set. @@ -1203,7 +1191,9 @@ def _make_attribute_table(self, flex_table: str): UNIQUE(entity_id, key) ON CONFLICT REPLACE); CREATE INDEX IF NOT EXISTS {0}_by_entity ON {0} (entity_id); - """.format(flex_table) + """.format( + flex_table + ) ) # Querying. diff --git a/beets/dbcore/query.py b/beets/dbcore/query.py index c7ca444524..b97d109505 100644 --- a/beets/dbcore/query.py +++ b/beets/dbcore/query.py @@ -130,9 +130,7 @@ class FieldQuery(Query, Generic[P]): @property def field(self) -> str: - return ( - f"{self.table}.{self.field_name}" if self.table else self.field_name - ) + return f"{self.table}.{self.field_name}" if self.table else self.field_name @property def field_names(self) -> set[str]: @@ -235,9 +233,7 @@ class StringQuery(StringFieldQuery[str]): def col_clause(self) -> tuple[str, Sequence[SQLiteType]]: search = ( - self.pattern.replace("\\", "\\\\") - .replace("%", "\\%") - .replace("_", "\\_") + self.pattern.replace("\\", "\\\\").replace("%", "\\%").replace("_", "\\_") ) clause = self.field + " like ? escape '\\'" subvals = [search] @@ -253,9 +249,7 @@ class SubstringQuery(StringFieldQuery[str]): def col_clause(self) -> tuple[str, Sequence[SQLiteType]]: pattern = ( - self.pattern.replace("\\", "\\\\") - .replace("%", "\\%") - .replace("_", "\\_") + self.pattern.replace("\\", "\\\\").replace("%", "\\%").replace("_", "\\_") ) search = "%" + pattern + "%" clause = self.field + " like ? escape '\\'" @@ -631,9 +625,7 @@ class Period: ("%Y-%m-%dT%H:%M:%S", "%Y-%m-%d %H:%M:%S"), # second ) relative_units = {"y": 365, "m": 30, "w": 7, "d": 1} - relative_re = ( - "(?P[+|-]?)(?P[0-9]+)" + "(?P[y|m|w|d])" - ) + relative_re = "(?P[+|-]?)(?P[0-9]+)" + "(?P[y|m|w|d])" def __init__(self, date: datetime, precision: str): """Create a period with the given date (a `datetime` object) and @@ -692,18 +684,13 @@ def find_date_and_format( # date. multiplier = -1 if sign == "-" else 1 days = cls.relative_units[timespan] - date = ( - datetime.now() - + timedelta(days=int(quantity) * days) * multiplier - ) + date = datetime.now() + timedelta(days=int(quantity) * days) * multiplier return cls(date, cls.precisions[5]) # Check for an absolute date. date, ordinal = find_date_and_format(string) if date is None or ordinal is None: - raise InvalidQueryArgumentValueError( - string, "a valid date/time string" - ) + raise InvalidQueryArgumentValueError(string, "a valid date/time string") precision = cls.precisions[ordinal] return cls(date, precision) @@ -839,9 +826,7 @@ def _convert(self, s: str) -> float | None: try: return float(s) except ValueError: - raise InvalidQueryArgumentValueError( - s, "a M:SS string or a float" - ) + raise InvalidQueryArgumentValueError(s, "a M:SS string or a float") # Sorting. @@ -1052,3 +1037,15 @@ def key(o): return val.lower() if self.case_insensitive else val return sorted(objs, key=key, reverse=not self.ascending) + + +from beets.dbcore.query import FieldQuery, BooleanQuery +from beets import util + + +class ArtQuery(FieldQuery): + """Query that matches albums with or without embedded art.""" + + def __new__(cls, field, pattern, *args, **kwargs): + val = util.str2bool(pattern) + return BooleanQuery(field, val, *args, **kwargs) diff --git a/beets/dbcore/queryparse.py b/beets/dbcore/queryparse.py index f84ed74365..a7fed368f7 100644 --- a/beets/dbcore/queryparse.py +++ b/beets/dbcore/queryparse.py @@ -138,9 +138,7 @@ def construct_query_part( # Use `model_cls` to build up a map from field (or query) names to # `Query` classes. query_classes: dict[str, FieldQueryType] = {} - for k, t in itertools.chain( - model_cls._fields.items(), model_cls._types.items() - ): + for k, t in itertools.chain(model_cls._fields.items(), model_cls._types.items()): query_classes[k] = t.query query_classes.update(model_cls._queries) # Non-field queries. @@ -228,9 +226,7 @@ def sort_from_strings( else: sort = query.MultipleSort() for part in sort_parts: - sort.add_sort( - construct_sort_part(model_cls, part, case_insensitive) - ) + sort.add_sort(construct_sort_part(model_cls, part, case_insensitive)) return sort @@ -259,9 +255,7 @@ def parse_sorted_query( # Parse the subquery in to a single AndQuery # TODO: Avoid needlessly wrapping AndQueries containing 1 subquery? query_parts.append( - query_from_strings( - query.AndQuery, model_cls, prefixes, subquery_parts - ) + query_from_strings(query.AndQuery, model_cls, prefixes, subquery_parts) ) del subquery_parts[:] else: diff --git a/beets/importer.py b/beets/importer.py index 2bdb166697..b50dfbfcdd 100644 --- a/beets/importer.py +++ b/beets/importer.py @@ -30,6 +30,8 @@ from enum import Enum from tempfile import mkdtemp from typing import Callable, Iterable, Sequence +from mediafile import MediaFile +from beets.util import syspath import mediafile @@ -268,9 +270,7 @@ def set_config(self, config): iconfig["incremental"] = False if iconfig["reflink"]: - iconfig["reflink"] = iconfig["reflink"].as_choice( - ["auto", True, False] - ) + iconfig["reflink"] = iconfig["reflink"].as_choice(["auto", True, False]) # Copy, move, reflink, link, and hardlink are mutually exclusive. if iconfig["move"]: @@ -550,9 +550,7 @@ def __init__( self.is_album = True self.search_ids = [] # user-supplied candidate IDs. - def set_choice( - self, choice: action | autotag.AlbumMatch | autotag.TrackMatch - ): + def set_choice(self, choice: action | autotag.AlbumMatch | autotag.TrackMatch): """Given an AlbumMatch or TrackMatch object or an action constant, indicates that an action has been selected for this task. @@ -647,9 +645,7 @@ def remove_duplicates(self, lib: library.Library): for item in duplicate_items: item.remove() if lib.directory in util.ancestry(item.path): - log.debug( - "deleting duplicate {0}", util.displayable_path(item.path) - ) + log.debug("deleting duplicate {0}", util.displayable_path(item.path)) util.remove(item.path) util.prune_dirs(os.path.dirname(item.path), lib.directory) @@ -681,7 +677,8 @@ def finalize(self, session: ImportSession): self.save_progress() if session.config["incremental"] and not ( # Should we skip recording to incremental list? - self.skip and session.config["incremental_skip_later"] + self.skip + and session.config["incremental_skip_later"] ): self.save_history() @@ -738,9 +735,7 @@ def lookup_candidates(self): candidate IDs are stored in self.search_ids: if present, the initial lookup is restricted to only those IDs. """ - artist, album, prop = autotag.tag_album( - self.items, search_ids=self.search_ids - ) + artist, album, prop = autotag.tag_album(self.items, search_ids=self.search_ids) self.cur_artist = artist self.cur_album = album self.candidates = prop.candidates @@ -760,9 +755,7 @@ def find_duplicates(self, lib: library.Library): # Construct a query to find duplicates with this metadata. We # use a temporary Album object to generate any computed fields. tmp_album = library.Album(lib, **info) - keys: list[str] = config["import"]["duplicate_keys"][ - "album" - ].as_str_seq() + keys: list[str] = config["import"]["duplicate_keys"]["album"].as_str_seq() dup_query = tmp_album.duplicates_query(keys) # Don't count albums with the same files as duplicates. @@ -793,8 +786,7 @@ def align_album_level_fields(self): [i.albumartist or i.artist for i in self.items] ) if freq == len(self.items) or ( - freq > 1 - and float(freq) / len(self.items) >= SINGLE_ARTIST_THRESH + freq > 1 and float(freq) / len(self.items) >= SINGLE_ARTIST_THRESH ): # Single-artist album. changes["albumartist"] = plur_albumartist @@ -875,7 +867,15 @@ def add(self, lib: library.Library): self.record_replaced(lib) self.remove_replaced(lib) - self.album = lib.add_album(self.imported_items()) + # Detect embedded album art across imported items + has_art = False + for item in self.imported_items(): + mf = MediaFile(syspath(item.path)) + if mf.art: + has_art = True + break + + self.album = lib.add_album(self.imported_items(), art=has_art) if self.choice_flag == action.APPLY and isinstance( self.match, autotag.AlbumMatch ): @@ -896,15 +896,10 @@ def record_replaced(self, lib: library.Library): self.replaced_albums: dict[PathBytes, library.Album] = defaultdict() replaced_album_ids = set() for item in self.imported_items(): - dup_items = list( - lib.items(dbcore.query.BytesQuery("path", item.path)) - ) + dup_items = list(lib.items(dbcore.query.BytesQuery("path", item.path))) self.replaced_items[item] = dup_items for dup_item in dup_items: - if ( - not dup_item.album_id - or dup_item.album_id in replaced_album_ids - ): + if not dup_item.album_id or dup_item.album_id in replaced_album_ids: continue replaced_album = dup_item._cached_album if replaced_album: @@ -957,8 +952,7 @@ def _reduce_and_log(new_obj, existing_fields, overwrite_keys): self.album.artpath = replaced_album.artpath self.album.store() log.debug( - "Reimported album {}. Preserving attribute ['added']. " - "Path: {}", + "Reimported album {}. Preserving attribute ['added']. " "Path: {}", self.album.id, displayable_path(self.album.path), ) @@ -1086,9 +1080,7 @@ def find_duplicates(self, lib): # Query for existing items using the same metadata. We use a # temporary `Item` object to generate any computed fields. tmp_item = library.Item(lib, **info) - keys: list[str] = config["import"]["duplicate_keys"][ - "item" - ].as_str_seq() + keys: list[str] = config["import"]["duplicate_keys"]["item"].as_str_seq() dup_query = tmp_item.duplicates_query(keys) found_items = [] @@ -1376,9 +1368,7 @@ def paths(self): def singleton(self, path: PathBytes): """Return a `SingletonImportTask` for the music file.""" if self.session.already_imported(self.toppath, [path]): - log.debug( - "Skipping previously-imported path: {0}", displayable_path(path) - ) + log.debug("Skipping previously-imported path: {0}", displayable_path(path)) self.skipped += 1 return None @@ -1399,9 +1389,7 @@ def album(self, paths: Iterable[PathBytes], dirs=None): dirs = list({os.path.dirname(p) for p in paths}) if self.session.already_imported(self.toppath, dirs): - log.debug( - "Skipping previously-imported path: {0}", displayable_path(dirs) - ) + log.debug("Skipping previously-imported path: {0}", displayable_path(dirs)) self.skipped += 1 return None @@ -1431,8 +1419,7 @@ def unarchive(self): if not (self.session.config["move"] or self.session.config["copy"]): log.warning( - "Archive importing requires either " - "'copy' or 'move' to be enabled." + "Archive importing requires either " "'copy' or 'move' to be enabled." ) return @@ -1646,9 +1633,7 @@ def resolve_duplicates(session: ImportSession, task: ImportTask): if task.choice_flag in (action.ASIS, action.APPLY, action.RETAG): found_duplicates = task.find_duplicates(session.lib) if found_duplicates: - log.debug( - "found duplicates: {}".format([o.id for o in found_duplicates]) - ) + log.debug("found duplicates: {}".format([o.id for o in found_duplicates])) # Get the default action to follow from config. duplicate_action = config["import"]["duplicate_action"].as_choice( diff --git a/beets/library.py b/beets/library.py index d4ec63200d..cf79a93746 100644 --- a/beets/library.py +++ b/beets/library.py @@ -26,6 +26,7 @@ from functools import cached_property from pathlib import Path from typing import TYPE_CHECKING +from beets.dbcore.query import ArtQuery import platformdirs from mediafile import MediaFile, UnreadableFileError @@ -130,9 +131,7 @@ def is_path_query(cls, query_part): # Test both `sep` and `altsep` (i.e., both slash and backslash on # Windows). - if not ( - os.sep in query_part or (os.altsep and os.altsep in query_part) - ): + if not (os.sep in query_part or (os.altsep and os.altsep in query_part)): return False if cls.force_implicit_query_detection: @@ -408,17 +407,12 @@ def any_field_query(cls, *args, **kwargs) -> dbcore.OrQuery: @classmethod def any_writable_media_field_query(cls, *args, **kwargs) -> dbcore.OrQuery: fields = cls.writable_media_fields - return dbcore.OrQuery( - [cls.field_query(f, *args, **kwargs) for f in fields] - ) + return dbcore.OrQuery([cls.field_query(f, *args, **kwargs) for f in fields]) def duplicates_query(self, fields: list[str]) -> dbcore.AndQuery: """Return a query for entities with same values in the given fields.""" return dbcore.AndQuery( - [ - self.field_query(f, self.get(f), dbcore.MatchQuery) - for f in fields - ] + [self.field_query(f, self.get(f), dbcore.MatchQuery) for f in fields] ) @@ -453,10 +447,7 @@ def album_keys(self): if self.included_keys == self.ALL_KEYS: # Performance note: this triggers a database query. for key in self.album.keys(computed=True): - if ( - key in Album.item_keys - or key not in self.item._fields.keys() - ): + if key in Album.item_keys or key not in self.item._fields.keys(): album_keys.append(key) else: album_keys = self.included_keys @@ -625,9 +616,7 @@ class Item(LibModel): # Any kind of field (fixed, flexible, and computed) may be a media # field. Only these fields are read from disk in `read` and written in # `write`. - _media_fields = set(MediaFile.readable_fields()).intersection( - _fields.keys() - ) + _media_fields = set(MediaFile.readable_fields()).intersection(_fields.keys()) # Set of item fields that are backed by *writable* `MediaFile` tag # fields. @@ -639,7 +628,10 @@ class Item(LibModel): _sorts = {"artist": dbcore.query.SmartArtistSort} - _queries = {"singleton": SingletonQuery} + _queries = { + "singleton": SingletonQuery, + "art": ArtQuery, + } _format_config_key = "format_item" @@ -696,9 +688,7 @@ def _getters(cls): def duplicates_query(self, fields: list[str]) -> dbcore.AndQuery: """Return a query for entities with same values in the given fields.""" - return super().duplicates_query(fields) & dbcore.query.NoneQuery( - "album_id" - ) + return super().duplicates_query(fields) & dbcore.query.NoneQuery("album_id") @classmethod def from_path(cls, path): @@ -745,8 +735,7 @@ def __repr__(self): return "{}({})".format( type(self).__name__, ", ".join( - "{}={!r}".format(k, self[k]) - for k in self.keys(with_album=False) + "{}={!r}".format(k, self[k]) for k in self.keys(with_album=False) ), ) @@ -940,19 +929,13 @@ def move_file(self, dest, operation=MoveOperation.MOVE): destination=dest, ) util.move(self.path, dest) - plugins.send( - "item_moved", item=self, source=self.path, destination=dest - ) + plugins.send("item_moved", item=self, source=self.path, destination=dest) elif operation == MoveOperation.COPY: util.copy(self.path, dest) - plugins.send( - "item_copied", item=self, source=self.path, destination=dest - ) + plugins.send("item_copied", item=self, source=self.path, destination=dest) elif operation == MoveOperation.LINK: util.link(self.path, dest) - plugins.send( - "item_linked", item=self, source=self.path, destination=dest - ) + plugins.send("item_linked", item=self, source=self.path, destination=dest) elif operation == MoveOperation.HARDLINK: util.hardlink(self.path, dest) plugins.send( @@ -1173,6 +1156,7 @@ class Album(LibModel): _fields = { "id": types.PRIMARY_ID, "artpath": PathType(True), + "art": types.BOOLEAN, "added": DateType(), "albumartist": types.STRING, "albumartist_sort": types.STRING, @@ -1460,9 +1444,7 @@ def art_destination(self, image, item_dir=None): subpath = util.asciify_path( subpath, beets.config["path_sep_replace"].as_str() ) - subpath = util.sanitize_path( - subpath, replacements=self._db.replacements - ) + subpath = util.sanitize_path(subpath, replacements=self._db.replacements) subpath = bytestring_path(subpath) _, ext = os.path.splitext(image) @@ -1637,7 +1619,7 @@ def add(self, obj): self._memotable = {} return obj.id - def add_album(self, items): + def add_album(self, items, art=None): """Create a new album consisting of a list of items. The items are added to the database if they don't yet have an @@ -1650,6 +1632,8 @@ def add_album(self, items): # Create the album structure using metadata from the first item. values = {key: items[0][key] for key in Album.item_keys} album = Album(self, **values) + if art is not None: + album.art = art # Add the album structure and set the items' album_id fields. # Store or add the items. @@ -1662,6 +1646,7 @@ def add_album(self, items): else: item.store() + album.store(inherit=False, fields=["art"]) return album # Querying. @@ -1692,16 +1677,12 @@ def _fetch(self, model_cls, query, sort=None): @staticmethod def get_default_album_sort(): """Get a :class:`Sort` object for albums from the config option.""" - return dbcore.sort_from_strings( - Album, beets.config["sort_album"].as_str_seq() - ) + return dbcore.sort_from_strings(Album, beets.config["sort_album"].as_str_seq()) @staticmethod def get_default_item_sort(): """Get a :class:`Sort` object for items from the config option.""" - return dbcore.sort_from_strings( - Item, beets.config["sort_item"].as_str_seq() - ) + return dbcore.sort_from_strings(Item, beets.config["sort_item"].as_str_seq()) def albums(self, query=None, sort=None) -> Results[Album]: """Get :class:`Album` objects matching the query.""" diff --git a/beets/plugins.py b/beets/plugins.py index 2ca98649e4..0a592503f9 100644 --- a/beets/plugins.py +++ b/beets/plugins.py @@ -103,8 +103,7 @@ def commands(self): def _set_stage_log_level(self, stages): """Adjust all the stages in `stages` to WARNING logging level.""" return [ - self._set_log_level_and_params(logging.WARNING, stage) - for stage in stages + self._set_log_level_and_params(logging.WARNING, stage) for stage in stages ] def get_early_import_stages(self): @@ -393,9 +392,7 @@ def album_distance(items, album_info, mapping): def candidates(items, artist, album, va_likely, extra_tags=None): """Gets MusicBrainz candidates for an album from each plugin.""" for plugin in find_plugins(): - yield from plugin.candidates( - items, artist, album, va_likely, extra_tags - ) + yield from plugin.candidates(items, artist, album, va_likely, extra_tags) def item_candidates(item, artist, title): diff --git a/beets/random.py b/beets/random.py index f3318054c8..0ae6cbdad0 100644 --- a/beets/random.py +++ b/beets/random.py @@ -84,9 +84,7 @@ def _take_time(iter, secs, album): return out -def random_objs( - objs, album, number=1, time=None, equal_chance=False, random_gen=None -): +def random_objs(objs, album, number=1, time=None, equal_chance=False, random_gen=None): """Get a random subset of the provided `objs`. If `number` is provided, produce that many matches. Otherwise, if diff --git a/beets/ui/__init__.py b/beets/ui/__init__.py index 386410a090..e0696667cf 100644 --- a/beets/ui/__init__.py +++ b/beets/ui/__init__.py @@ -172,8 +172,7 @@ def should_move(move_opt=None): """ return _bool_fallback( move_opt, - config["import"]["move"].get(bool) - or config["import"]["copy"].get(bool), + config["import"]["move"].get(bool) or config["import"]["copy"].get(bool), ) @@ -261,10 +260,7 @@ def input_options( # Mark the option's shortcut letter for display. if not require and ( (default is None and not numrange and first) - or ( - isinstance(default, str) - and found_letter.lower() == default.lower() - ) + or (isinstance(default, str) and found_letter.lower() == default.lower()) ): # The first option is the default; mark it. show_letter = "[%s]" % found_letter.upper() @@ -319,9 +315,7 @@ def input_options( # Start prompt with U+279C: Heavy Round-Tipped Rightwards Arrow prompt = colorize("action", "\u279c ") line_length = 0 - for i, (part, length) in enumerate( - zip(prompt_parts, prompt_part_lengths) - ): + for i, (part, length) in enumerate(zip(prompt_parts, prompt_part_lengths)): # Add punctuation. if i == len(prompt_parts) - 1: part += colorize("action_description", "?") @@ -854,8 +848,7 @@ def split_into_lines(string, width_tuple): # Colorize each word with pre/post escapes # Reconstruct colored words words += [ - m.group("esc") + raw_word + RESET_COLOR - for raw_word in raw_words + m.group("esc") + raw_word + RESET_COLOR for raw_word in raw_words ] elif raw_words: # Pretext stops mid-word @@ -975,26 +968,18 @@ def print_column_layout( # Wrap into columns if "width" not in left or "width" not in right: # If widths have not been defined, set to share space. - left["width"] = ( - max_width - len(indent_str) - color_len(separator) - ) // 2 - right["width"] = ( - max_width - len(indent_str) - color_len(separator) - ) // 2 + left["width"] = (max_width - len(indent_str) - color_len(separator)) // 2 + right["width"] = (max_width - len(indent_str) - color_len(separator)) // 2 # On the first line, account for suffix as well as prefix left_width_tuple = ( - left["width"] - - color_len(left["prefix"]) - - color_len(left["suffix"]), + left["width"] - color_len(left["prefix"]) - color_len(left["suffix"]), left["width"] - color_len(left["prefix"]), left["width"] - color_len(left["prefix"]), ) left_split = split_into_lines(left["contents"], left_width_tuple) right_width_tuple = ( - right["width"] - - color_len(right["prefix"]) - - color_len(right["suffix"]), + right["width"] - color_len(right["prefix"]) - color_len(right["suffix"]), right["width"] - color_len(right["prefix"]), right["width"] - color_len(right["prefix"]), ) @@ -1056,9 +1041,7 @@ def print_column_layout( right_part_len = 0 # Padding until end of column - padding = ( - right["width"] - color_len(right["prefix"]) - right_part_len - ) + padding = right["width"] - color_len(right["prefix"]) - right_part_len # Remove some padding on the first line to display # length if i == 0: @@ -1293,9 +1276,7 @@ def _store_dict(option, opt_str, value, parser): raise ValueError except ValueError: raise UserError( - "supplied argument `{}' is not of the form `key=value'".format( - value - ) + "supplied argument `{}' is not of the form `key=value'".format(value) ) option_values[key] = value @@ -1496,7 +1477,9 @@ def __init__(self, *args, **kwargs): """ # A more helpful default usage. if "usage" not in kwargs: - kwargs["usage"] = """ + kwargs[ + "usage" + ] = """ %prog COMMAND [ARGS...] %prog help COMMAND""" kwargs["add_help_option"] = False @@ -1565,10 +1548,7 @@ def format_help(self, formatter=None): help_line = help_lines[0] if help_lines else "" result.append("%*s%s\n" % (indent_first, "", help_line)) result.extend( - [ - "%*s%s\n" % (help_position, "", line) - for line in help_lines[1:] - ] + ["%*s%s\n" % (help_position, "", line) for line in help_lines[1:]] ) formatter.dedent() @@ -1643,17 +1623,13 @@ def _load_plugins(options, config): # If we were given any plugins on the command line, use those. if options.plugins is not None: - plugin_list = ( - options.plugins.split(",") if len(options.plugins) > 0 else [] - ) + plugin_list = options.plugins.split(",") if len(options.plugins) > 0 else [] else: plugin_list = config["plugins"].as_str_seq() # Exclude any plugins that were specified on the command line if options.exclude is not None: - plugin_list = [ - p for p in plugin_list if p not in options.exclude.split(",") - ] + plugin_list = [p for p in plugin_list if p not in options.exclude.split(",")] plugins.load_plugins(plugin_list) return plugins @@ -1717,9 +1693,7 @@ def _configure(options): log.set_global_level(logging.INFO) if overlay_path: - log.debug( - "overlaying configuration: {0}", util.displayable_path(overlay_path) - ) + log.debug("overlaying configuration: {0}", util.displayable_path(overlay_path)) config_path = config.user_config_path() if os.path.isfile(config_path): @@ -1832,11 +1806,7 @@ def _raw_main(args, lib=None): # Special case for the `config --edit` command: bypass _setup so # that an invalid configuration does not prevent the editor from # starting. - if ( - subargs - and subargs[0] == "config" - and ("-e" in subargs or "--edit" in subargs) - ): + if subargs and subargs[0] == "config" and ("-e" in subargs or "--edit" in subargs): from beets.ui.commands import config_edit return config_edit() diff --git a/beets/ui/commands.py b/beets/ui/commands.py index 99aa04f0ac..fe72c9d229 100755 --- a/beets/ui/commands.py +++ b/beets/ui/commands.py @@ -218,8 +218,7 @@ def get_singleton_disambig_fields(info: hooks.TrackInfo) -> Sequence[str]: "album": ( "[{}]".format(info.album) if ( - config["import"]["singleton_album_disambig"].get() - and info.get("album") + config["import"]["singleton_album_disambig"].get() and info.get("album") ) else "" ), @@ -339,9 +338,7 @@ def __init__(self): } ) - def print_layout( - self, indent, left, right, separator=" -> ", max_width=None - ): + def print_layout(self, indent, left, right, separator=" -> ", max_width=None): if not max_width: # If no max_width provided, use terminal width max_width = ui.term_width() @@ -359,9 +356,7 @@ def show_match_header(self): print_("") # 'Match' line and similarity. - print_( - self.indent_header + f"Match ({dist_string(self.match.distance)}):" - ) + print_(self.indent_header + f"Match ({dist_string(self.match.distance)}):") if self.match.info.get("album"): # Matching an album - print that @@ -374,8 +369,7 @@ def show_match_header(self): f"{self.match.info.artist}" + f" - {self.match.info.title}" ) print_( - self.indent_header - + dist_colorize(artist_album_str, self.match.distance) + self.indent_header + dist_colorize(artist_album_str, self.match.distance) ) # Penalties. @@ -455,9 +449,7 @@ def make_medium_info_line(self, track_info): track_media = track_info.get("media", "Media") # Build output string. if self.match.info.mediums > 1 and track_info.disctitle: - return ( - f"* {track_media} {track_info.medium}: {track_info.disctitle}" - ) + return f"* {track_media} {track_info.medium}: {track_info.disctitle}" elif self.match.info.mediums > 1: return f"* {track_media} {track_info.medium}" elif track_info.disctitle: @@ -554,17 +546,11 @@ def make_line(self, item, track_info): appropriately. Returns (lhs, rhs) for column printing. """ # Track titles. - lhs_title, rhs_title, diff_title = self.make_track_titles( - item, track_info - ) + lhs_title, rhs_title, diff_title = self.make_track_titles(item, track_info) # Track number change. - lhs_track, rhs_track, diff_track = self.make_track_numbers( - item, track_info - ) + lhs_track, rhs_track, diff_track = self.make_track_numbers(item, track_info) # Length change. - lhs_length, rhs_length, diff_length = self.make_track_lengths( - item, track_info - ) + lhs_length, rhs_length, diff_length = self.make_track_lengths(item, track_info) changed = diff_title or diff_track or diff_length @@ -605,9 +591,7 @@ def get_width(side): try: return len( ui.uncolorize( - " ".join( - [side["prefix"], side["contents"], side["suffix"]] - ) + " ".join([side["prefix"], side["contents"], side["suffix"]]) ) ) except KeyError: @@ -732,9 +716,7 @@ def show_change(cur_artist, cur_album, match): album's tags are changed according to `match`, which must be an AlbumMatch object. """ - change = AlbumChange( - cur_artist=cur_artist, cur_album=cur_album, match=match - ) + change = AlbumChange(cur_artist=cur_artist, cur_album=cur_album, match=match) # Print the match header. change.show_match_header() @@ -750,9 +732,7 @@ def show_item_change(item, match): """Print out the change that would occur by tagging `item` with the metadata from `match`, a TrackMatch object. """ - change = TrackChange( - cur_artist=item.artist, cur_title=item.title, match=match - ) + change = TrackChange(cur_artist=item.artist, cur_title=item.title, match=match) # Print the match header. change.show_match_header() # Print the match details. @@ -1049,9 +1029,7 @@ def choose_match(self, task): # Let plugins display info or prompt the user before we go through the # process of selecting candidate. - results = plugins.send( - "import_task_before_choice", session=self, task=task - ) + results = plugins.send("import_task_before_choice", session=self, task=task) actions = [action for action in results if action] if len(actions) == 1: @@ -1171,11 +1149,7 @@ def resolve_duplicate(self, task, found_duplicates): print_( "Old: " + summarize_items( - ( - list(duplicate.items()) - if task.is_album - else [duplicate] - ), + (list(duplicate.items()) if task.is_album else [duplicate]), not task.is_album, ) ) @@ -1197,9 +1171,7 @@ def resolve_duplicate(self, task, found_duplicates): for item in task.imported_items(): print(f" {item}") - sel = ui.input_options( - ("Skip new", "Keep all", "Remove old", "Merge all") - ) + sel = ui.input_options(("Skip new", "Keep all", "Remove old", "Merge all")) if sel == "s": # Skip new. @@ -1244,12 +1216,8 @@ def _get_choices(self, task): ] if task.is_album: choices += [ - PromptChoice( - "t", "as Tracks", lambda s, t: importer.action.TRACKS - ), - PromptChoice( - "g", "Group albums", lambda s, t: importer.action.ALBUMS - ), + PromptChoice("t", "as Tracks", lambda s, t: importer.action.TRACKS), + PromptChoice("g", "Group albums", lambda s, t: importer.action.ALBUMS), ] choices += [ PromptChoice("e", "Enter search", manual_search), @@ -1259,11 +1227,7 @@ def _get_choices(self, task): # Send the before_choose_candidate event and flatten list. extra_choices = list( - chain( - *plugins.send( - "before_choose_candidate", session=self, task=task - ) - ) + chain(*plugins.send("before_choose_candidate", session=self, task=task)) ) # Add a "dummy" choice for the other baked-in option, for @@ -1280,9 +1244,7 @@ def _get_choices(self, task): short_letters = [c.short for c in all_choices] if len(short_letters) != len(set(short_letters)): # Duplicate short letter has been found. - duplicates = [ - i for i, count in Counter(short_letters).items() if count > 1 - ] + duplicates = [i for i, count in Counter(short_letters).items() if count > 1] for short in duplicates: # Keep the first of the choices, removing the rest. dup_choices = [c for c in all_choices if c.short == short] @@ -1359,9 +1321,7 @@ def import_func(lib, opts, args): # what we need. On Python 3, we need to undo the "helpful" # conversion to Unicode strings to get the real bytestring # filename. - paths = [ - p.encode(util.arg_encoding(), "surrogateescape") for p in paths - ] + paths = [p.encode(util.arg_encoding(), "surrogateescape") for p in paths] paths_from_logfiles = [ p.encode(util.arg_encoding(), "surrogateescape") for p in paths_from_logfiles @@ -1371,9 +1331,7 @@ def import_func(lib, opts, args): for path in paths: if not os.path.exists(syspath(normpath(path))): raise ui.UserError( - "no such file or directory: {}".format( - displayable_path(path) - ) + "no such file or directory: {}".format(displayable_path(path)) ) # Check the directories from the logfiles, but don't throw an error in @@ -1383,9 +1341,7 @@ def import_func(lib, opts, args): for path in paths_from_logfiles: if not os.path.exists(syspath(normpath(path))): log.warning( - "No such file or directory: {}".format( - displayable_path(path) - ) + "No such file or directory: {}".format(displayable_path(path)) ) continue @@ -1399,9 +1355,7 @@ def import_func(lib, opts, args): import_files(lib, paths, query) -import_cmd = ui.Subcommand( - "import", help="import new music", aliases=("imp", "im") -) +import_cmd = ui.Subcommand("import", help="import new music", aliases=("imp", "im")) import_cmd.parser.add_option( "-c", "--copy", @@ -1604,9 +1558,7 @@ def list_func(lib, opts, args): list_cmd = ui.Subcommand("list", help="query the library", aliases=("ls",)) -list_cmd.parser.usage += ( - "\n" "Example: %prog -f '$album: $title' artist:beatles" -) +list_cmd.parser.usage += "\n" "Example: %prog -f '$album: $title' artist:beatles" list_cmd.parser.add_all_common_options() list_cmd.func = list_func default_commands.append(list_cmd) @@ -1671,9 +1623,7 @@ def update_items(lib, query, album, move, pretend, fields, exclude_fields=None): try: item.read() except library.ReadError as exc: - log.error( - "error reading {0}: {1}", displayable_path(item.path), exc - ) + log.error("error reading {0}: {1}", displayable_path(item.path), exc) continue # Special-case album artist when it matches track artist. (Hacky @@ -1853,9 +1803,7 @@ def fmt_album(a): fmt_obj(o) # Confirm with user. - objs = ui.input_select_objects( - prompt, objs, fmt_obj, prompt_all=prompt_all - ) + objs = ui.input_select_objects(prompt, objs, fmt_obj, prompt_all=prompt_all) if not objs: return @@ -1940,9 +1888,7 @@ def stats_func(lib, opts, args): show_stats(lib, decargs(args), opts.exact) -stats_cmd = ui.Subcommand( - "stats", help="show statistics about the library or a query" -) +stats_cmd = ui.Subcommand("stats", help="show statistics about the library or a query") stats_cmd.parser.add_option( "-e", "--exact", action="store_true", help="exact size and time" ) @@ -1990,9 +1936,7 @@ def modify_items(lib, mods, dels, query, write, move, album, confirm, inherit): # objects. print_("Modifying {} {}s.".format(len(objs), "album" if album else "item")) changed = [] - templates = { - key: functemplate.template(value) for key, value in mods.items() - } + templates = {key: functemplate.template(value) for key, value in mods.items()} for obj in objs: obj_mods = { key: model_cls._parse(key, obj.evaluate_template(templates[key])) @@ -2081,9 +2025,7 @@ def modify_func(lib, opts, args): ) -modify_cmd = ui.Subcommand( - "modify", help="change metadata fields", aliases=("mod",) -) +modify_cmd = ui.Subcommand("modify", help="change metadata fields", aliases=("mod",)) modify_cmd.parser.add_option( "-m", "--move", @@ -2132,9 +2074,7 @@ def modify_func(lib, opts, args): # move: Move/copy files to the library or a new base directory. -def move_items( - lib, dest, query, copy, album, pretend, confirm=False, export=False -): +def move_items(lib, dest, query, copy, album, pretend, confirm=False, export=False): """Moves or copies items to a new base directory, given by dest. If dest is None, then the library's base directory is used, making the command "consolidate" files. @@ -2190,9 +2130,7 @@ def isalbummoved(album): objs = ui.input_select_objects( "Really %s" % act, objs, - lambda o: show_path_changes( - [(o.path, o.destination(basedir=dest))] - ), + lambda o: show_path_changes([(o.path, o.destination(basedir=dest))]), ) for obj in objs: @@ -2200,9 +2138,7 @@ def isalbummoved(album): if export: # Copy without affecting the database. - obj.move( - operation=MoveOperation.COPY, basedir=dest, store=False - ) + obj.move(operation=MoveOperation.COPY, basedir=dest, store=False) else: # Ordinary move/copy: store the new path. if copy: @@ -2216,9 +2152,7 @@ def move_func(lib, opts, args): if dest is not None: dest = normpath(dest) if not os.path.isdir(syspath(dest)): - raise ui.UserError( - "no such directory: {}".format(displayable_path(dest)) - ) + raise ui.UserError("no such directory: {}".format(displayable_path(dest))) move_items( lib, @@ -2288,9 +2222,7 @@ def write_items(lib, query, pretend, force): try: clean_item = library.Item.from_path(item.path) except library.ReadError as exc: - log.error( - "error reading {0}: {1}", displayable_path(item.path), exc - ) + log.error("error reading {0}: {1}", displayable_path(item.path), exc) continue # Check for and display changes. @@ -2375,9 +2307,7 @@ def config_edit(): except OSError as exc: message = f"Could not edit configuration: {exc}" if not editor: - message += ( - ". Please set the VISUAL (or EDITOR) environment variable" - ) + message += ". Please set the VISUAL (or EDITOR) environment variable" raise ui.UserError(message) @@ -2466,9 +2396,7 @@ def completion_script(commands): else: option_type = "opts" - options[name][option_type].extend( - opts._short_opts + opts._long_opts - ) + options[name][option_type].extend(opts._short_opts + opts._long_opts) # Add global options options["_global"] = { @@ -2494,10 +2422,7 @@ def completion_script(commands): # Fields yield " fields='%s'\n" % " ".join( - set( - list(library.Item._fields.keys()) - + list(library.Album._fields.keys()) - ) + set(list(library.Item._fields.keys()) + list(library.Album._fields.keys())) ) # Command options diff --git a/beets/util/__init__.py b/beets/util/__init__.py index b882ed626d..331706f886 100644 --- a/beets/util/__init__.py +++ b/beets/util/__init__.py @@ -235,9 +235,7 @@ def sorted_walk( for pat in ignore_bytes: if fnmatch.fnmatch(base, pat): if logger: - logger.debug( - "ignoring '{}' due to ignore rule '{}'", base, pat - ) + logger.debug("ignoring '{}' due to ignore rule '{}'", base, pat) skip = True break if skip: @@ -405,9 +403,7 @@ def bytestring_path(path: PathLike) -> bytes: # On Windows, remove the magic prefix added by `syspath`. This makes # ``bytestring_path(syspath(X)) == X``, i.e., we can safely # round-trip through `syspath`. - if os.path.__name__ == "ntpath" and str_path.startswith( - WINDOWS_MAGIC_PREFIX - ): + if os.path.__name__ == "ntpath" and str_path.startswith(WINDOWS_MAGIC_PREFIX): str_path = str_path[len(WINDOWS_MAGIC_PREFIX) :] # Try to encode with default encodings, but fall back to utf-8. @@ -420,9 +416,7 @@ def bytestring_path(path: PathLike) -> bytes: PATH_SEP: bytes = bytestring_path(os.sep) -def displayable_path( - path: PathLike | Iterable[PathLike], separator: str = "; " -) -> str: +def displayable_path(path: PathLike | Iterable[PathLike], separator: str = "; ") -> str: """Attempts to decode a bytestring path to a unicode object for the purpose of displaying it to the user. If the `path` argument is a list or a tuple, the elements are joined with `separator`. @@ -485,9 +479,7 @@ def remove(path: PathLike, soft: bool = True): try: os.remove(str_path) except OSError as exc: - raise FilesystemError( - exc, "delete", (str_path,), traceback.format_exc() - ) + raise FilesystemError(exc, "delete", (str_path,), traceback.format_exc()) def copy(path: bytes, dest: bytes, replace: bool = False): @@ -505,9 +497,7 @@ def copy(path: bytes, dest: bytes, replace: bool = False): try: shutil.copyfile(str_path, str_dest) except OSError as exc: - raise FilesystemError( - exc, "copy", (str_path, str_dest), traceback.format_exc() - ) + raise FilesystemError(exc, "copy", (str_path, str_dest), traceback.format_exc()) def move(path: bytes, dest: bytes, replace: bool = False): @@ -563,9 +553,7 @@ def move(path: bytes, dest: bytes, replace: bool = False): tmp_filename = "" os.remove(syspath(path)) except OSError as exc: - raise FilesystemError( - exc, "move", (path, dest), traceback.format_exc() - ) + raise FilesystemError(exc, "move", (path, dest), traceback.format_exc()) finally: if tmp_filename: os.remove(tmp_filename) @@ -620,9 +608,7 @@ def hardlink(path: bytes, dest: bytes, replace: bool = False): traceback.format_exc(), ) else: - raise FilesystemError( - exc, "link", (path, dest), traceback.format_exc() - ) + raise FilesystemError(exc, "link", (path, dest), traceback.format_exc()) def reflink( @@ -952,9 +938,7 @@ def editor_command() -> str: in general. """ - return ( - os.environ.get("VISUAL") or os.environ.get("EDITOR") or open_anything() - ) + return os.environ.get("VISUAL") or os.environ.get("EDITOR") or open_anything() def interactive_open(targets: Sequence[str], command: str): @@ -1057,9 +1041,7 @@ def asciify_path(path: str, sep_replace: str) -> str: for index, item in enumerate(path_components): path_components[index] = unidecode(item).replace(os.sep, sep_replace) if os.altsep: - path_components[index] = unidecode(item).replace( - os.altsep, sep_replace - ) + path_components[index] = unidecode(item).replace(os.altsep, sep_replace) return os.sep.join(path_components) @@ -1133,9 +1115,7 @@ def get_temp_filename( tempdir = get_module_tempdir(module) tempdir.mkdir(parents=True, exist_ok=True) - descriptor, filename = tempfile.mkstemp( - dir=tempdir, prefix=prefix, suffix=suffix - ) + descriptor, filename = tempfile.mkstemp(dir=tempdir, prefix=prefix, suffix=suffix) os.close(descriptor) return bytestring_path(filename) diff --git a/beets/util/artresizer.py b/beets/util/artresizer.py index ffbc2edba0..24a2f03a87 100644 --- a/beets/util/artresizer.py +++ b/beets/util/artresizer.py @@ -123,9 +123,7 @@ def __init__(self): self.identify_cmd = ["magick", "identify"] self.compare_cmd = ["magick", "compare"] - def resize( - self, maxwidth, path_in, path_out=None, quality=0, max_filesize=0 - ): + def resize(self, maxwidth, path_in, path_out=None, quality=0, max_filesize=0): """Resize using ImageMagick. Use the ``magick`` program or ``convert`` on older versions. Return @@ -269,9 +267,7 @@ def compare(self, im1, im2, compare_threshold): "-", "null:", ] - log.debug( - "comparing images with pipeline {} | {}", convert_cmd, compare_cmd - ) + log.debug("comparing images with pipeline {} | {}", convert_cmd, compare_cmd) convert_proc = subprocess.Popen( convert_cmd, stdout=subprocess.PIPE, @@ -358,9 +354,7 @@ def __init__(self): """ self.version() - def resize( - self, maxwidth, path_in, path_out=None, quality=0, max_filesize=0 - ): + def resize(self, maxwidth, path_in, path_out=None, quality=0, max_filesize=0): """Resize using Python Imaging Library (PIL). Return the output path of resized image. """ @@ -415,9 +409,7 @@ def resize( optimize=True, progressive=False, ) - log.warning( - "PIL Failed to resize file to below {0}B", max_filesize - ) + log.warning("PIL Failed to resize file to below {0}B", max_filesize) return path_out else: @@ -436,9 +428,7 @@ def get_size(self, path_in): im = Image.open(syspath(path_in)) return im.size except OSError as exc: - log.error( - "PIL could not read file {}: {}", displayable_path(path_in), exc - ) + log.error("PIL could not read file {}: {}", displayable_path(path_in), exc) return None def deinterlace(self, path_in, path_out=None): @@ -560,9 +550,7 @@ def method(self): else: return "WEBPROXY" - def resize( - self, maxwidth, path_in, path_out=None, quality=0, max_filesize=0 - ): + def resize(self, maxwidth, path_in, path_out=None, quality=0, max_filesize=0): """Manipulate an image file according to the method, returning a new path. For PIL or IMAGEMAGIC methods, resizes the image to a temporary file and encodes with the specified quality level. diff --git a/beets/util/bluelet.py b/beets/util/bluelet.py index b81b389e0d..65ef7e8e94 100644 --- a/beets/util/bluelet.py +++ b/beets/util/bluelet.py @@ -350,15 +350,11 @@ def kill_thread(coro): try: value = event.fire() except OSError as exc: - if ( - isinstance(exc.args, tuple) - and exc.args[0] == errno.EPIPE - ): + if isinstance(exc.args, tuple) and exc.args[0] == errno.EPIPE: # Broken pipe. Remote host disconnected. pass elif ( - isinstance(exc.args, tuple) - and exc.args[0] == errno.ECONNRESET + isinstance(exc.args, tuple) and exc.args[0] == errno.ECONNRESET ): # Connection was reset by peer. pass diff --git a/beets/util/functemplate.py b/beets/util/functemplate.py index b0daefac28..25b0716546 100644 --- a/beets/util/functemplate.py +++ b/beets/util/functemplate.py @@ -319,8 +319,7 @@ def parse_expression(self): special_char_re = re.compile( r"[%s]|\Z" % "".join( - re.escape(c) - for c in self.special_chars + extra_special_chars + re.escape(c) for c in self.special_chars + extra_special_chars ) ) @@ -333,8 +332,7 @@ def parse_expression(self): # A non-special character. Skip to the next special # character, treating the interstice as literal text. next_pos = ( - special_char_re.search(self.string[self.pos :]).start() - + self.pos + special_char_re.search(self.string[self.pos :]).start() + self.pos ) text_parts.append(self.string[self.pos : next_pos]) self.pos = next_pos @@ -417,18 +415,14 @@ def parse_symbol(self): # Closer found. ident = self.string[self.pos : closer] self.pos = closer + 1 - self.parts.append( - Symbol(ident, self.string[start_pos : self.pos]) - ) + self.parts.append(Symbol(ident, self.string[start_pos : self.pos])) else: # A bare-word symbol. ident = self._parse_ident() if ident: # Found a real symbol. - self.parts.append( - Symbol(ident, self.string[start_pos : self.pos]) - ) + self.parts.append(Symbol(ident, self.string[start_pos : self.pos])) else: # A standalone $. self.parts.append(SYMBOL_DELIM) @@ -488,10 +482,7 @@ def parse_argument_list(self): expressions.append(Expression(subparser.parts)) self.pos += subparser.pos - if ( - self.pos >= len(self.string) - or self.string[self.pos] == GROUP_CLOSE - ): + if self.pos >= len(self.string) or self.string[self.pos] == GROUP_CLOSE: # Argument list terminated by EOF or closing brace. break diff --git a/beets/util/m3u.py b/beets/util/m3u.py index b6e355e06b..9b88485dfe 100644 --- a/beets/util/m3u.py +++ b/beets/util/m3u.py @@ -47,9 +47,7 @@ def load(self): with open(syspath(pl_normpath), "rb") as pl_file: raw_contents = pl_file.readlines() except OSError as exc: - raise FilesystemError( - exc, "read", (pl_normpath,), traceback.format_exc() - ) + raise FilesystemError(exc, "read", (pl_normpath,), traceback.format_exc()) self.extm3u = True if raw_contents[0].rstrip() == b"#EXTM3U" else False for line in raw_contents[1:]: @@ -92,6 +90,4 @@ def write(self): pl_file.write(line + b"\n") pl_file.write(b"\n") # Final linefeed to prevent noeol file. except OSError as exc: - raise FilesystemError( - exc, "create", (pl_normpath,), traceback.format_exc() - ) + raise FilesystemError(exc, "create", (pl_normpath,), traceback.format_exc()) diff --git a/beets/util/pipeline.py b/beets/util/pipeline.py index 98a1addce9..beb64e1600 100644 --- a/beets/util/pipeline.py +++ b/beets/util/pipeline.py @@ -429,9 +429,7 @@ def run_parallel(self, queue_size=DEFAULT_QUEUE_SIZE): for i in range(1, queue_count): for coro in self.stages[i]: threads.append( - MiddlePipelineThread( - coro, queues[i - 1], queues[i], threads - ) + MiddlePipelineThread(coro, queues[i - 1], queues[i], threads) ) # Last stage.