Skip to content

Add art:true/false album query to detect embedded cover art #5730

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 3 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 2 additions & 6 deletions beets/art.py
Original file line number Diff line number Diff line change
Expand Up @@ -63,9 +63,7 @@ def embed_item(
"""Embed an image into the item's media file."""
# Conditions.
if compare_threshold:
is_similar = check_art_similarity(
log, item, imagepath, compare_threshold
)
is_similar = check_art_similarity(log, item, imagepath, compare_threshold)
if is_similar is None:
log.warning("Error while checking art similarity; skipping.")
return
Expand Down Expand Up @@ -148,9 +146,7 @@ def resize_image(log, imagepath, maxwidth, quality):
maxwidth,
quality,
)
imagepath = ArtResizer.shared.resize(
maxwidth, syspath(imagepath), quality=quality
)
imagepath = ArtResizer.shared.resize(maxwidth, syspath(imagepath), quality=quality)
return imagepath


Expand Down
12 changes: 3 additions & 9 deletions beets/autotag/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -104,9 +104,7 @@ def _apply_metadata(
nullable_fields: Sequence[str] = [],
):
"""Set the db_obj's metadata to match the info."""
special_fields = SPECIAL_FIELDS[
"album" if isinstance(info, AlbumInfo) else "track"
]
special_fields = SPECIAL_FIELDS["album" if isinstance(info, AlbumInfo) else "track"]

for field, value in info.items():
# We only overwrite fields that are not already hardcoded.
Expand Down Expand Up @@ -225,12 +223,8 @@ def apply_metadata(album_info: AlbumInfo, mapping: Mapping[Item, TrackInfo]):
# Artist sort and credit names.
item.artist_sort = track_info.artist_sort or album_info.artist_sort
item.artists_sort = track_info.artists_sort or album_info.artists_sort
item.artist_credit = (
track_info.artist_credit or album_info.artist_credit
)
item.artists_credit = (
track_info.artists_credit or album_info.artists_credit
)
item.artist_credit = track_info.artist_credit or album_info.artist_credit
item.artists_credit = track_info.artists_credit or album_info.artists_credit
item.albumartist_sort = album_info.artist_sort
item.albumartists_sort = album_info.artists_sort
item.albumartist_credit = album_info.artist_credit
Expand Down
8 changes: 2 additions & 6 deletions beets/autotag/hooks.py
Original file line number Diff line number Diff line change
Expand Up @@ -661,15 +661,11 @@ def album_candidates(
if config["musicbrainz"]["enabled"]:
# Base candidates if we have album and artist to match.
if artist and album:
yield from invoke_mb(
mb.match_album, artist, album, len(items), extra_tags
)
yield from invoke_mb(mb.match_album, artist, album, len(items), extra_tags)

# Also add VA matches from MusicBrainz where appropriate.
if va_likely and album:
yield from invoke_mb(
mb.match_album, None, album, len(items), extra_tags
)
yield from invoke_mb(mb.match_album, None, album, len(items), extra_tags)

# Candidates from plugins.
yield from plugins.candidates(items, artist, album, va_likely, extra_tags)
Expand Down
38 changes: 9 additions & 29 deletions beets/autotag/match.py
Original file line number Diff line number Diff line change
Expand Up @@ -138,9 +138,7 @@ def assign_items(
# `tracks` list. Each value is either an index into the assigned item in
# `items` list, or -1 if that track has no match.
mapping = {
items[iidx]: t
for iidx, t in zip(assigned_item_idxs, tracks)
if iidx != -1
items[iidx]: t for iidx, t in zip(assigned_item_idxs, tracks) if iidx != -1
}
extra_items = list(set(items) - mapping.keys())
extra_items.sort(key=lambda i: (i.disc, i.track, i.title))
Expand Down Expand Up @@ -192,11 +190,7 @@ def track_distance(
dist.add_string("track_title", item.title, track_info.title)

# Artist. Only check if there is actually an artist in the track data.
if (
incl_artist
and track_info.artist
and item.artist.lower() not in VA_ARTISTS
):
if incl_artist and track_info.artist and item.artist.lower() not in VA_ARTISTS:
dist.add_string("track_artist", item.artist, track_info.artist)

# Track index.
Expand Down Expand Up @@ -273,9 +267,7 @@ def distance(
elif album_info.original_year:
# Prefer matchest closest to the release year.
diff = abs(likelies["year"] - album_info.year)
diff_max = abs(
datetime.date.today().year - album_info.original_year
)
diff_max = abs(datetime.date.today().year - album_info.original_year)
dist.add_ratio("year", diff, diff_max)
else:
# Full penalty when there is no original year.
Expand All @@ -297,9 +289,7 @@ def distance(

# Catalog number.
if likelies["catalognum"] and album_info.catalognum:
dist.add_string(
"catalognum", likelies["catalognum"], album_info.catalognum
)
dist.add_string("catalognum", likelies["catalognum"], album_info.catalognum)

# Disambiguation.
if likelies["albumdisambig"] and album_info.albumdisambig:
Expand All @@ -309,9 +299,7 @@ def distance(

# Album ID.
if likelies["mb_albumid"]:
dist.add_equality(
"album_id", likelies["mb_albumid"], album_info.album_id
)
dist.add_equality("album_id", likelies["mb_albumid"], album_info.album_id)

# Tracks.
dist.tracks = {}
Expand Down Expand Up @@ -383,8 +371,7 @@ def _recommendation(
# Only a single candidate.
rec = Recommendation.low
elif (
results[1].distance - min_dist
>= config["match"]["rec_gap_thresh"].as_number()
results[1].distance - min_dist >= config["match"]["rec_gap_thresh"].as_number()
):
# Gap between first two candidates is large.
rec = Recommendation.low
Expand Down Expand Up @@ -432,9 +419,7 @@ def _add_candidate(
checking the track count, ordering the items, checking for
duplicates, and calculating the distance.
"""
log.debug(
"Candidate: {0} - {1} ({2})", info.artist, info.album, info.album_id
)
log.debug("Candidate: {0} - {1} ({2})", info.artist, info.album, info.album_id)

# Discard albums with zero tracks.
if not info.tracks:
Expand All @@ -447,9 +432,7 @@ def _add_candidate(
return

# Discard matches without required tags.
for req_tag in cast(
Sequence[str], config["match"]["required"].as_str_seq()
):
for req_tag in cast(Sequence[str], config["match"]["required"].as_str_seq()):
if getattr(info, req_tag) is None:
log.debug("Ignored. Missing required tag: {0}", req_tag)
return
Expand Down Expand Up @@ -595,10 +578,7 @@ def tag_item(
candidates[info.track_id] = hooks.TrackMatch(dist, info)
# If this is a good match, then don't keep searching.
rec = _recommendation(_sort_candidates(candidates.values()))
if (
rec == Recommendation.strong
and not config["import"]["timid"]
):
if rec == Recommendation.strong and not config["import"]["timid"]:
log.debug("Track ID match.")
return Proposal(_sort_candidates(candidates.values()), rec)

Expand Down
24 changes: 6 additions & 18 deletions beets/autotag/mb.py
Original file line number Diff line number Diff line change
Expand Up @@ -143,7 +143,7 @@
)


def _preferred_alias(aliases: list):

Check failure on line 146 in beets/autotag/mb.py

View workflow job for this annotation

GitHub Actions / Check types with mypy

Missing type parameters for generic type "list"

Check failure on line 146 in beets/autotag/mb.py

View workflow job for this annotation

GitHub Actions / Check types with mypy

Missing type parameters for generic type "list"
"""Given an list of alias structures for an artist credit, select
and return the user's preferred alias alias or None if no matching
alias is found.
Expand Down Expand Up @@ -184,7 +184,7 @@
default release event if a preferred event is not found.
"""
countries = config["match"]["preferred"]["countries"].as_str_seq()
countries = cast(Sequence, countries)

Check failure on line 187 in beets/autotag/mb.py

View workflow job for this annotation

GitHub Actions / Check types with mypy

Missing type parameters for generic type "Sequence"

Check failure on line 187 in beets/autotag/mb.py

View workflow job for this annotation

GitHub Actions / Check types with mypy

Missing type parameters for generic type "Sequence"

for country in countries:
for event in release.get("release-event-list", {}):
Expand All @@ -198,7 +198,7 @@


def _multi_artist_credit(
credit: list[dict], include_join_phrase: bool

Check failure on line 201 in beets/autotag/mb.py

View workflow job for this annotation

GitHub Actions / Check types with mypy

Missing type parameters for generic type "dict"

Check failure on line 201 in beets/autotag/mb.py

View workflow job for this annotation

GitHub Actions / Check types with mypy

Missing type parameters for generic type "dict"
) -> tuple[list[str], list[str], list[str]]:
"""Given a list representing an ``artist-credit`` block, accumulate
data into a triple of joined artist name lists: canonical, sort, and
Expand Down Expand Up @@ -246,7 +246,7 @@
)


def _flatten_artist_credit(credit: list[dict]) -> tuple[str, str, str]:

Check failure on line 249 in beets/autotag/mb.py

View workflow job for this annotation

GitHub Actions / Check types with mypy

Missing type parameters for generic type "dict"

Check failure on line 249 in beets/autotag/mb.py

View workflow job for this annotation

GitHub Actions / Check types with mypy

Missing type parameters for generic type "dict"
"""Given a list representing an ``artist-credit`` block, flatten the
data into a triple of joined artist name strings: canonical, sort, and
credit.
Expand All @@ -261,7 +261,7 @@
)


def _artist_ids(credit: list[dict]) -> list[str]:

Check failure on line 264 in beets/autotag/mb.py

View workflow job for this annotation

GitHub Actions / Check types with mypy

Missing type parameters for generic type "dict"

Check failure on line 264 in beets/autotag/mb.py

View workflow job for this annotation

GitHub Actions / Check types with mypy

Missing type parameters for generic type "dict"
"""
Given a list representing an ``artist-credit``,
return a list of artist IDs
Expand Down Expand Up @@ -324,9 +324,7 @@
info.artists,
info.artists_sort,
info.artists_credit,
) = _multi_artist_credit(
recording["artist-credit"], include_join_phrase=False
)
) = _multi_artist_credit(recording["artist-credit"], include_join_phrase=False)

info.artists_ids = _artist_ids(recording["artist-credit"])
info.artist_id = info.artists_ids[0]
Expand Down Expand Up @@ -355,9 +353,7 @@
if "disambiguation" in work_relation["work"]:
info.work_disambig = work_relation["work"]["disambiguation"]

for artist_relation in work_relation["work"].get(
"artist-relation-list", ()
):
for artist_relation in work_relation["work"].get("artist-relation-list", ()):
if "type" in artist_relation:
type = artist_relation["type"]
if type == "lyricist":
Expand Down Expand Up @@ -425,9 +421,7 @@
artists_names,
artists_sort_names,
artists_credit_names,
) = _multi_artist_credit(
release["artist-credit"], include_join_phrase=False
)
) = _multi_artist_credit(release["artist-credit"], include_join_phrase=False)

ntracks = sum(len(m["track-list"]) for m in release["medium-list"])

Expand Down Expand Up @@ -464,10 +458,7 @@
continue

all_tracks = medium["track-list"]
if (
"data-track-list" in medium
and not config["match"]["ignore_data_tracks"]
):
if "data-track-list" in medium and not config["match"]["ignore_data_tracks"]:
all_tracks += medium["data-track-list"]
track_count = len(all_tracks)

Expand Down Expand Up @@ -558,9 +549,7 @@

# Get the disambiguation strings at the release and release group level.
if release["release-group"].get("disambiguation"):
info.releasegroupdisambig = release["release-group"].get(
"disambiguation"
)
info.releasegroupdisambig = release["release-group"].get("disambiguation")
if release.get("disambiguation"):
info.albumdisambig = release.get("disambiguation")

Expand Down Expand Up @@ -626,8 +615,7 @@
for genreitem in source:
genres[genreitem["name"]] += int(genreitem["count"])
info.genre = "; ".join(
genre
for genre, _count in sorted(genres.items(), key=lambda g: -g[1])
genre for genre, _count in sorted(genres.items(), key=lambda g: -g[1])
)

# We might find links to external sources (Discogs, Bandcamp, ...)
Expand Down
60 changes: 25 additions & 35 deletions beets/dbcore/db.py
Original file line number Diff line number Diff line change
Expand Up @@ -642,9 +642,7 @@ def remove(self):
db = self._check_db()
with db.transaction() as tx:
tx.mutate(f"DELETE FROM {self._table} WHERE id=?", (self.id,))
tx.mutate(
f"DELETE FROM {self._flex_table} WHERE entity_id=?", (self.id,)
)
tx.mutate(f"DELETE FROM {self._flex_table} WHERE entity_id=?", (self.id,))

def add(self, db: D | None = None):
"""Add the object to the library database. This object must be
Expand Down Expand Up @@ -698,9 +696,7 @@ def evaluate_template(
else:
# Help out mypy
t = template
return t.substitute(
self.formatted(for_path=for_path), self._template_funcs()
)
return t.substitute(self.formatted(for_path=for_path), self._template_funcs())

# Parsing.

Expand Down Expand Up @@ -836,9 +832,7 @@ def _get_indexed_flex_attrs(self) -> dict[int, FlexAttrs]:

return flex_values

def _make_model(
self, row: sqlite3.Row, flex_values: FlexAttrs = {}
) -> AnyModel:
def _make_model(self, row: sqlite3.Row, flex_values: FlexAttrs = {}) -> AnyModel:
"""Create a Model object for the given row"""
cols = dict(row)
values = {k: v for (k, v) in cols.items() if not k[:4] == "flex"}
Expand Down Expand Up @@ -1003,9 +997,7 @@ class Database:

def __init__(self, path, timeout: float = 5.0):
if sqlite3.threadsafety == 0:
raise RuntimeError(
"sqlite3 must be compiled with multi-threading support"
)
raise RuntimeError("sqlite3 must be compiled with multi-threading support")

self.path = path
self.timeout = timeout
Expand Down Expand Up @@ -1035,23 +1027,23 @@ def __init__(self, path, timeout: float = 5.0):

# Primitive access control: connections and transactions.

def _connection(self) -> Connection:
"""Get a SQLite connection object to the underlying database.
One connection object is created per thread.
"""
thread_id = threading.current_thread().ident
# Help the type checker: ident can only be None if the thread has not
# been started yet; but since this results from current_thread(), that
# can't happen
assert thread_id is not None
def _connect(self) -> sqlite3.Connection:
try:
conn = sqlite3.connect(self.path, timeout=self.timeout)
except sqlite3.OperationalError as e:
if "unable to open database file" in str(e).lower():
raise sqlite3.OperationalError(
f"Unable to open the database file at {self.path}. "
f"Check that the directory exists and is writable."
) from e
raise
return conn

def _connection(self) -> sqlite3.Connection:
# Legacy support for code that still calls _connection()
# Reuse the same logic as _connect
return self._connect()

with self._shared_map_lock:
if thread_id in self._connections:
return self._connections[thread_id]
else:
conn = self._create_connection()
self._connections[thread_id] = conn
return conn

def _create_connection(self) -> Connection:
"""Create a SQLite connection to the underlying database.
Expand Down Expand Up @@ -1140,9 +1132,7 @@ def transaction(self) -> Transaction:
def load_extension(self, path: str):
"""Load an SQLite extension into all open connections."""
if not self.supports_extensions:
raise ValueError(
"this sqlite3 installation does not support extensions"
)
raise ValueError("this sqlite3 installation does not support extensions")

self._extensions.append(path)

Expand Down Expand Up @@ -1171,9 +1161,7 @@ def _make_table(self, table: str, fields: Mapping[str, types.Type]):
columns = []
for name, typ in fields.items():
columns.append(f"{name} {typ.sql}")
setup_sql = "CREATE TABLE {} ({});\n".format(
table, ", ".join(columns)
)
setup_sql = "CREATE TABLE {} ({});\n".format(table, ", ".join(columns))

else:
# Table exists does not match the field set.
Expand Down Expand Up @@ -1203,7 +1191,9 @@ def _make_attribute_table(self, flex_table: str):
UNIQUE(entity_id, key) ON CONFLICT REPLACE);
CREATE INDEX IF NOT EXISTS {0}_by_entity
ON {0} (entity_id);
""".format(flex_table)
""".format(
flex_table
)
)

# Querying.
Expand Down
Loading
Loading