class Album(DataObject, Item): release_group_loaded = QtCore.pyqtSignal() def __init__(self, id, discid=None): DataObject.__init__(self, id) self.metadata = Metadata() self.tracks = [] self.format_str = "" self.loaded = False self.load_task = None self.rgloaded = False self.rgid = None self._files = 0 self._requests = 0 self._tracks_loaded = False self._discid = discid self._after_load_callbacks = queue.Queue() self.other_versions = [] self.unmatched_files = Cluster(_("Unmatched Files"), special=True, related_album=self, hide_if_empty=True) def __repr__(self): return '<Album %s %r>' % (self.id, self.metadata[u"album"]) def iterfiles(self, save=False): for track in self.tracks: for file in track.iterfiles(): yield file if not save: for file in self.unmatched_files.iterfiles(): yield file def _parse_release(self, document): self.log.debug("Loading release %r", self.id) self._tracks_loaded = False release_node = document.metadata[0].release[0] if release_node.id != self.id: self.tagger.mbid_redirects[self.id] = release_node.id album = self.tagger.albums.get(release_node.id) if album: self.log.debug("Release %r already loaded", release_node.id) album.match_files(self.unmatched_files.files) album.update() self.tagger.remove_album(self) return False else: del self.tagger.albums[self.id] self.tagger.albums[release_node.id] = self self.id = release_node.id # Get release metadata m = self._new_metadata m.length = 0 release_to_metadata(release_node, m, config=self.config, album=self) self.format_str = media_formats_from_node(release_node.medium_list[0]) self.rgid = release_node.release_group[0].id if self._discid: m['musicbrainz_discid'] = self._discid # Custom VA name if m['musicbrainz_albumartistid'] == VARIOUS_ARTISTS_ID: m['albumartistsort'] = m['albumartist'] = self.config.setting['va_name'] # Convert Unicode punctuation if self.config.setting['convert_punctuation']: m.apply_func(asciipunct) m['totaldiscs'] = release_node.medium_list[0].count # Run album metadata plugins try: run_album_metadata_processors(self, m, release_node) except: self.log.error(traceback.format_exc()) self._release_node = release_node return True def _release_request_finished(self, document, http, error): if self.load_task is None: return self.load_task = None parsed = False try: if error: self.log.error("%r", unicode(http.errorString())) # Fix for broken NAT releases if error == QtNetwork.QNetworkReply.ContentNotFoundError: nats = False nat_name = self.config.setting["nat_name"] files = list(self.unmatched_files.files) for file in files: trackid = file.metadata["musicbrainz_trackid"] if mbid_validate(trackid) and file.metadata["album"] == nat_name: nats = True self.tagger.move_file_to_nat(file, trackid) self.tagger.nats.update() if nats and not self.get_num_unmatched_files(): self.tagger.remove_album(self) error = False else: try: parsed = self._parse_release(document) except: error = True self.log.error(traceback.format_exc()) finally: self._requests -= 1 if parsed or error: self._finalize_loading(error) def _parse_release_group(self, document): for node in document.metadata[0].release_list[0].release: v = {} v["mbid"] = node.id v["date"] = node.date[0].text if "date" in node.children else "" v["country"] = node.country[0].text if "country" in node.children else "" labels, catnums = label_info_from_node(node.label_info_list[0]) v["labels"] = ", ".join(set(labels)) v["catnums"] = ", ".join(set(catnums)) v["tracks"] = " + ".join([m.track_list[0].count for m in node.medium_list[0].medium]) v["format"] = media_formats_from_node(node.medium_list[0]) self.other_versions.append(v) self.other_versions.sort(key=lambda x: x["date"]) def _release_group_request_finished(self, document, http, error): try: if error: self.log.error("%r", unicode(http.errorString())) else: try: self._parse_release_group(document) except: error = True self.log.error(traceback.format_exc()) finally: self.rgloaded = True self.release_group_loaded.emit() def _finalize_loading(self, error): if error: self.metadata.clear() self.metadata['album'] = _("[could not load album %s]") % self.id del self._new_metadata del self._new_tracks self.update() return if self._requests > 0: return if not self._tracks_loaded: artists = set() totalalbumtracks = 0 djmix_ars = {} if hasattr(self._new_metadata, "_djmix_ars"): djmix_ars = self._new_metadata._djmix_ars for medium_node in self._release_node.medium_list[0].medium: mm = Metadata() mm.copy(self._new_metadata) medium_to_metadata(medium_node, mm) totalalbumtracks += int(mm["totaltracks"]) for dj in djmix_ars.get(mm["discnumber"], []): mm.add("djmixer", dj) for track_node in medium_node.track_list[0].track: track = Track(track_node.recording[0].id, self) self._new_tracks.append(track) # Get track metadata tm = track.metadata tm.copy(mm) track_to_metadata(track_node, track, self.config) track._customize_metadata() self._new_metadata.length += tm.length artists.add(tm["musicbrainz_artistid"]) # Run track metadata plugins try: run_track_metadata_processors(self, tm, self._release_node, track_node) except: self.log.error(traceback.format_exc()) totalalbumtracks = str(totalalbumtracks) for track in self._new_tracks: track.metadata["~totalalbumtracks"] = totalalbumtracks if len(artists) > 1: track.metadata["compilation"] = "1" del self._release_node self._tracks_loaded = True if not self._requests: # Prepare parser for user's script if self.config.setting["enable_tagger_script"]: script = self.config.setting["tagger_script"] if script: parser = ScriptParser() for track in self._new_tracks: # Run tagger script for each track try: parser.eval(script, track.metadata) except: self.log.error(traceback.format_exc()) # Strip leading/trailing whitespace track.metadata.strip_whitespace() # Run tagger script for the album itself try: parser.eval(script, self._new_metadata) except: self.log.error(traceback.format_exc()) self._new_metadata.strip_whitespace() for track in self.tracks: for file in list(track.linked_files): file.move(self.unmatched_files) self.metadata = self._new_metadata self.tracks = self._new_tracks del self._new_metadata del self._new_tracks self.loaded = True self.match_files(self.unmatched_files.files) self.update() self.tagger.window.set_statusbar_message(_('Album %s loaded'), self.id, timeout=3000) while self._after_load_callbacks.qsize() > 0: func = self._after_load_callbacks.get() func() def load(self): if self._requests: self.log.info("Not reloading, some requests are still active.") return self.tagger.window.set_statusbar_message('Loading album %s...', self.id) self.loaded = False self.rgloaded = False self.rgid = None self.other_versions = [] self.metadata.clear() self.folksonomy_tags.clear() self.metadata['album'] = _("[loading album information]") self.update() self._new_metadata = Metadata() self._new_tracks = [] self._requests = 1 require_authentication = False inc = ['release-groups', 'media', 'recordings', 'puids', 'artist-credits', 'artists', 'aliases', 'labels', 'isrcs'] if self.config.setting['release_ars'] or self.config.setting['track_ars']: inc += ['artist-rels', 'release-rels', 'url-rels', 'recording-rels', 'work-rels'] if self.config.setting['track_ars']: inc += ['recording-level-rels', 'work-level-rels'] if self.config.setting['folksonomy_tags']: if self.config.setting['only_my_tags']: require_authentication = True inc += ['user-tags'] else: inc += ['tags'] if self.config.setting['enable_ratings']: require_authentication = True inc += ['user-ratings'] self.load_task = self.tagger.xmlws.get_release_by_id( self.id, self._release_request_finished, inc=inc, mblogin=require_authentication) def run_when_loaded(self, func): if self.loaded: func() else: self._after_load_callbacks.put(func) def stop_loading(self): if self.load_task: self.tagger.xmlws.remove_task(self.load_task) self.load_task = None def update(self, update_tracks=True): if self.item: self.item.update(update_tracks) def _add_file(self, track, file): self._files += 1 self.update(update_tracks=False) def _remove_file(self, track, file): self._files -= 1 self.update(update_tracks=False) def match_files(self, files, use_trackid=True): """Match files to tracks on this album, based on metadata similarity or trackid.""" for file in list(files): if file.state == File.REMOVED: continue matches = [] trackid = file.metadata['musicbrainz_trackid'] if use_trackid and mbid_validate(trackid): matches = self._get_trackid_matches(file, trackid) if not matches: for track in self.tracks: sim = track.metadata.compare(file.orig_metadata) if sim >= self.config.setting['track_matching_threshold']: matches.append((sim, track)) if matches: matches.sort(reverse=True) file.move(matches[0][1]) else: file.move(self.unmatched_files) def match_file(self, file, trackid=None): """Match the file on a track on this album, based on trackid or metadata similarity.""" if file.state == File.REMOVED: return if trackid is not None: matches = self._get_trackid_matches(file, trackid) if matches: matches.sort(reverse=True) file.move(matches[0][1]) return self.match_files([file], use_trackid=False) def _get_trackid_matches(self, file, trackid): matches = [] tracknumber = file.metadata['tracknumber'] discnumber = file.metadata['discnumber'] for track in self.tracks: tm = track.metadata if trackid == tm['musicbrainz_trackid']: if tracknumber == tm['tracknumber']: if discnumber == tm['discnumber']: matches.append((4.0, track)) break else: matches.append((3.0, track)) else: matches.append((2.0, track)) return matches def can_save(self): return self._files > 0 def can_remove(self): return True def can_edit_tags(self): return True def can_analyze(self): return False def can_autotag(self): return False def can_refresh(self): return True def is_album_like(self): return True def get_num_matched_tracks(self): num = 0 for track in self.tracks: if track.is_linked(): num += 1 return num def get_num_unmatched_files(self): return len(self.unmatched_files.files) def is_complete(self): if not self.tracks: return False for track in self.tracks: if track.num_linked_files != 1: return False else: return True def get_num_unsaved_files(self): count = 0 for track in self.tracks: for file in track.linked_files: if not file.is_saved(): count+=1 return count def column(self, column): if column == 'title': if self.tracks: linked_tracks = 0 for track in self.tracks: if track.is_linked(): linked_tracks+=1 text = u'%s\u200E (%d/%d' % (self.metadata['album'], linked_tracks, len(self.tracks)) unmatched = self.get_num_unmatched_files() if unmatched: text += '; %d?' % (unmatched,) unsaved = self.get_num_unsaved_files() if unsaved: text += '; %d*' % (unsaved,) return text + ')' else: return self.metadata['album'] elif column == '~length': length = self.metadata.length if length: return format_time(length) else: return '' elif column == 'artist': return self.metadata['albumartist'] else: return '' def switch_release_version(self, mbid): if mbid == self.id: return for file in list(self.iterfiles(True)): file.move(self.unmatched_files) album = self.tagger.albums.get(mbid) if album: album.match_files(self.unmatched_files.files) album.update() self.tagger.remove_album(self) else: del self.tagger.albums[self.id] self.id = mbid self.tagger.albums[mbid] = self self.load()
class MetadataTest(PicardTestCase): original = None tags = [] def setUp(self): super().setUp() config.setting = settings.copy() self.metadata = Metadata() self.metadata["single1"] = "single1-value" self.metadata.add_unique("single2", "single2-value") self.metadata.add_unique("single2", "single2-value") self.multi1 = ["multi1-value", "multi1-value"] self.metadata.add("multi1", self.multi1[0]) self.metadata.add("multi1", self.multi1[1]) self.multi2 = ["multi2-value1", "multi2-value2"] self.metadata["multi2"] = self.multi2 self.multi3 = ["multi3-value1", "multi3-value2"] self.metadata.set("multi3", self.multi3) self.metadata["~hidden"] = "hidden-value" self.metadata_d1 = Metadata({ 'a': 'b', 'c': 2, 'd': ['x', 'y'], 'x': '' }) self.metadata_d2 = Metadata({ 'a': 'b', 'c': 2, 'd': ['x', 'y'], 'x': 'z' }) self.metadata_d3 = Metadata({'c': 3, 'd': ['u', 'w'], 'x': 'p'}) def tearDown(self): pass def test_metadata_setitem(self): self.assertEqual(["single1-value"], self.metadata.getraw("single1")) self.assertEqual(["single2-value"], self.metadata.getraw("single2")) self.assertEqual(self.multi1, self.metadata.getraw("multi1")) self.assertEqual(self.multi2, self.metadata.getraw("multi2")) self.assertEqual(self.multi3, self.metadata.getraw("multi3")) self.assertEqual(["hidden-value"], self.metadata.getraw("~hidden")) def test_metadata_get(self): self.assertEqual("single1-value", self.metadata["single1"]) self.assertEqual("single1-value", self.metadata.get("single1")) self.assertEqual(["single1-value"], self.metadata.getall("single1")) self.assertEqual(["single1-value"], self.metadata.getraw("single1")) self.assertEqual(MULTI_VALUED_JOINER.join(self.multi1), self.metadata["multi1"]) self.assertEqual(MULTI_VALUED_JOINER.join(self.multi1), self.metadata.get("multi1")) self.assertEqual(self.multi1, self.metadata.getall("multi1")) self.assertEqual(self.multi1, self.metadata.getraw("multi1")) self.assertEqual("", self.metadata["nonexistent"]) self.assertEqual(None, self.metadata.get("nonexistent")) self.assertEqual([], self.metadata.getall("nonexistent")) self.assertRaises(KeyError, self.metadata.getraw, "nonexistent") self.assertEqual(self.metadata._store.items(), self.metadata.rawitems()) metadata_items = [(x, z) for (x, y) in self.metadata.rawitems() for z in y] self.assertEqual(metadata_items, list(self.metadata.items())) def test_metadata_delete(self): self.metadata.delete("single1") self.assertNotIn("single1", self.metadata) self.assertIn("single1", self.metadata.deleted_tags) def test_metadata_implicit_delete(self): self.metadata["single2"] = "" self.assertNotIn("single2", self.metadata) self.assertIn("single2", self.metadata.deleted_tags) self.metadata["unknown"] = "" self.assertNotIn("unknown", self.metadata) self.assertNotIn("unknown", self.metadata.deleted_tags) def test_metadata_set_explicit_empty(self): self.metadata.delete("single1") self.metadata.set("single1", []) self.assertIn("single1", self.metadata) self.assertNotIn("single1", self.metadata.deleted_tags) self.assertEqual([], self.metadata.getall("single1")) def test_metadata_undelete(self): self.metadata.delete("single1") self.assertNotIn("single1", self.metadata) self.assertIn("single1", self.metadata.deleted_tags) self.metadata["single1"] = "value1" self.assertIn("single1", self.metadata) self.assertNotIn("single1", self.metadata.deleted_tags) def test_metadata_copy(self): m = Metadata() m["old"] = "old-value" self.metadata.delete("single1") m.copy(self.metadata) self.assertEqual(self.metadata._store, m._store) self.assertEqual(self.metadata.deleted_tags, m.deleted_tags) self.assertEqual(self.metadata.length, m.length) self.assertEqual(self.metadata.images, m.images) def test_metadata_copy_without_images(self): m = Metadata() m.copy(self.metadata, copy_images=False) self.assertEqual(self.metadata._store, m._store) self.assertEqual(self.metadata.deleted_tags, m.deleted_tags) self.assertEqual(self.metadata.length, m.length) self.assertEqual(ImageList(), m.images) def test_metadata_update(self): m = Metadata() m["old"] = "old-value" self.metadata.delete("single1") m.update(self.metadata) self.assertIn("old", m) self.assertNotIn("single1", m) self.assertIn("single1", m.deleted_tags) self.assertEqual("single2-value", m["single2"]) self.assertEqual(self.metadata.deleted_tags, m.deleted_tags) self.assertEqual(self.metadata.images, m.images) self.metadata["old"] = "old-value" self.assertEqual(self.metadata._store, m._store) def test_metadata_clear(self): self.metadata.clear() self.assertEqual(0, len(self.metadata)) def test_metadata_clear_deleted(self): self.metadata.delete("single1") self.assertIn("single1", self.metadata.deleted_tags) self.metadata.clear_deleted() self.assertNotIn("single1", self.metadata.deleted_tags) def test_metadata_applyfunc(self): def func(x): return x[1:] self.metadata.apply_func(func) self.assertEqual("ingle1-value", self.metadata["single1"]) self.assertEqual("ingle1-value", self.metadata.get("single1")) self.assertEqual(["ingle1-value"], self.metadata.getall("single1")) self.assertEqual(MULTI_VALUED_JOINER.join(map(func, self.multi1)), self.metadata["multi1"]) self.assertEqual(MULTI_VALUED_JOINER.join(map(func, self.multi1)), self.metadata.get("multi1")) self.assertEqual(list(map(func, self.multi1)), self.metadata.getall("multi1")) def test_metadata_applyfunc_preserve_tags(self): self.assertTrue(len(PRESERVED_TAGS) > 0) m = Metadata() m[PRESERVED_TAGS[0]] = 'value1' m['not_preserved'] = 'value2' def func(x): return x[1:] m.apply_func(func) self.assertEqual("value1", m[PRESERVED_TAGS[0]]) self.assertEqual("alue2", m['not_preserved']) def test_metadata_applyfunc_delete_tags(self): def func(x): return None metadata = Metadata(self.metadata) metadata.apply_func(func) self.assertEqual(0, len(metadata.rawitems())) self.assertEqual(self.metadata.keys(), metadata.deleted_tags) def test_length_score(self): results = [(20000, 0, 0.333333333333), (20000, 10000, 0.666666666667), (20000, 20000, 1.0), (20000, 30000, 0.666666666667), (20000, 40000, 0.333333333333), (20000, 50000, 0.0)] for (a, b, expected) in results: actual = Metadata.length_score(a, b) self.assertAlmostEqual(expected, actual, msg="a={a}, b={b}".format(a=a, b=b)) def test_compare_is_equal(self): m1 = Metadata() m1["title"] = "title1" m1["tracknumber"] = "2" m1.length = 360 m2 = Metadata() m2["title"] = "title1" m2["tracknumber"] = "2" m2.length = 360 self.assertEqual(m1.compare(m2), m2.compare(m1)) self.assertEqual(m1.compare(m2), 1) def test_compare_lengths(self): m1 = Metadata() m1.length = 360 m2 = Metadata() m2.length = 300 self.assertAlmostEqual(m1.compare(m2), 0.998) def test_compare_tracknumber_difference(self): m1 = Metadata() m1["tracknumber"] = "1" m2 = Metadata() m2["tracknumber"] = "2" self.assertEqual(m1.compare(m2), 0) def test_compare_deleted(self): m1 = Metadata() m1["artist"] = "TheArtist" m1["title"] = "title1" m2 = Metadata() m2["artist"] = "TheArtist" m2.delete("title") self.assertTrue(m1.compare(m2) < 1) def test_strip_whitespace(self): m1 = Metadata() m1["artist"] = " TheArtist " m1["title"] = "\t\u00A0 tit le1 \r\n" m1["genre"] = " \t" m1.strip_whitespace() self.assertEqual(m1["artist"], "TheArtist") self.assertEqual(m1["title"], "tit le1") def test_metadata_mapping_init(self): d = {'a': 'b', 'c': 2, 'd': ['x', 'y'], 'x': '', 'z': {'u', 'w'}} deleted_tags = set('c') m = Metadata(d, deleted_tags=deleted_tags, length=1234) self.assertTrue('a' in m) self.assertEqual(m.getraw('a'), ['b']) self.assertEqual(m['d'], MULTI_VALUED_JOINER.join(d['d'])) self.assertNotIn('c', m) self.assertNotIn('length', m) self.assertIn('c', m.deleted_tags) self.assertEqual(m.length, 1234) def test_metadata_mapping_init_zero(self): m = Metadata(tag1='a', tag2=0, tag3='', tag4=None) m['tag5'] = 0 m['tag1'] = '' self.assertIn('tag1', m.deleted_tags) self.assertEqual(m['tag2'], '0') self.assertNotIn('tag3', m) self.assertNotIn('tag4', m) self.assertEqual(m['tag5'], '0') def test_metadata_mapping_del(self): m = self.metadata_d1 self.assertEqual(m.getraw('a'), ['b']) self.assertNotIn('a', m.deleted_tags) self.assertNotIn('x', m.deleted_tags) self.assertRaises(KeyError, m.getraw, 'x') del m['a'] self.assertRaises(KeyError, m.getraw, 'a') self.assertIn('a', m.deleted_tags) # NOTE: historic behavior of Metadata.delete() # an attempt to delete an non-existing tag, will add it to the list # of deleted tags # so this will not raise a KeyError # as is it differs from dict or even defaultdict behavior del m['unknown'] self.assertIn('unknown', m.deleted_tags) def test_metadata_mapping_iter(self): l = set(self.metadata_d1) self.assertEqual(l, {'a', 'c', 'd'}) def test_metadata_mapping_keys(self): l = set(self.metadata_d1.keys()) self.assertEqual(l, {'a', 'c', 'd'}) def test_metadata_mapping_values(self): l = set(self.metadata_d1.values()) self.assertEqual(l, {'b', '2', 'x; y'}) def test_metadata_mapping_len(self): m = self.metadata_d1 self.assertEqual(len(m), 3) del m['x'] self.assertEqual(len(m), 3) del m['c'] self.assertEqual(len(m), 2) def _check_mapping_update(self, m): self.assertEqual(m['a'], 'b') self.assertEqual(m['c'], '3') self.assertEqual(m.getraw('d'), ['u', 'w']) self.assertEqual(m['x'], '') self.assertIn('x', m.deleted_tags) def test_metadata_mapping_update(self): # update from Metadata m = self.metadata_d2 m2 = self.metadata_d3 del m2['x'] m.update(m2) self._check_mapping_update(m) def test_metadata_mapping_update_dict(self): # update from dict m = self.metadata_d2 d2 = {'c': 3, 'd': ['u', 'w'], 'x': ''} m.update(d2) self._check_mapping_update(m) def test_metadata_mapping_update_tuple(self): # update from tuple m = self.metadata_d2 d2 = (('c', 3), ('d', ['u', 'w']), ('x', '')) m.update(d2) self._check_mapping_update(m) def test_metadata_mapping_update_dictlike(self): # update from kwargs m = self.metadata_d2 m.update(c=3, d=['u', 'w'], x='') self._check_mapping_update(m) def test_metadata_mapping_update_noparam(self): # update without parameter m = self.metadata_d2 self.assertRaises(TypeError, m.update) self.assertEqual(m['a'], 'b') def test_metadata_mapping_update_intparam(self): # update without parameter m = self.metadata_d2 self.assertRaises(TypeError, m.update, 123) def test_metadata_mapping_update_strparam(self): # update without parameter m = self.metadata_d2 self.assertRaises(ValueError, m.update, 'abc') def test_metadata_mapping_update_kw(self): m = Metadata(tag1='a', tag2='b') m.update(tag1='c') self.assertEqual(m['tag1'], 'c') self.assertEqual(m['tag2'], 'b') m.update(tag2='') self.assertIn('tag2', m.deleted_tags) def test_metadata_mapping_update_kw_del(self): m = Metadata(tag1='a', tag2='b') del m['tag1'] m2 = Metadata(tag1='c', tag2='d') del m2['tag2'] m.update(m2) self.assertEqual(m['tag1'], 'c') self.assertNotIn('tag2', m) self.assertNotIn('tag1', m.deleted_tags) self.assertIn('tag2', m.deleted_tags) def test_metadata_mapping_images(self): image1 = create_image(b'A', comment='A') image2 = create_image(b'B', comment='B') m1 = Metadata(a='b', length=1234, images=[image1]) self.assertEqual(m1.images[0], image1) self.assertEqual(len(m1), 2) # one tag, one image m1.images.append(image2) self.assertEqual(m1.images[1], image2) m1.images.pop(0) self.assertEqual(m1.images[0], image2) m2 = Metadata(a='c', length=4567, images=[image1]) m1.update(m2) self.assertEqual(m1.images[0], image1) m1.images.pop(0) self.assertEqual(len(m1), 1) # one tag, zero image self.assertFalse(m1.images) def test_metadata_mapping_iterable(self): m = Metadata(tag_tuple=('a', 0)) m['tag_set'] = {'c', 'd'} m['tag_dict'] = {'e': 1, 'f': 2} m['tag_str'] = 'gh' self.assertIn('0', m.getraw('tag_tuple')) self.assertIn('c', m.getraw('tag_set')) self.assertIn('e', m.getraw('tag_dict')) self.assertIn('gh', m.getraw('tag_str'))
class Album(DataObject, Item): release_group_loaded = QtCore.pyqtSignal() def __init__(self, id, discid=None): DataObject.__init__(self, id) self.metadata = Metadata() self.tracks = [] self.format_str = "" self.loaded = False self.load_task = None self.rgloaded = False self.rgid = None self._files = 0 self._requests = 0 self._tracks_loaded = False self._discid = discid self._after_load_callbacks = queue.Queue() self.other_versions = [] self.unmatched_files = Cluster(_("Unmatched Files"), special=True, related_album=self, hide_if_empty=True) def __repr__(self): return '<Album %s %r>' % (self.id, self.metadata[u"album"]) def iterfiles(self, save=False): for track in self.tracks: for file in track.iterfiles(): yield file if not save: for file in self.unmatched_files.iterfiles(): yield file def _parse_release(self, document): self.log.debug("Loading release %r", self.id) self._tracks_loaded = False release_node = document.metadata[0].release[0] if release_node.id != self.id: self.tagger.mbid_redirects[self.id] = release_node.id album = self.tagger.albums.get(release_node.id) if album: self.log.debug("Release %r already loaded", release_node.id) album.match_files(self.unmatched_files.files) album.update() self.tagger.remove_album(self) return False else: del self.tagger.albums[self.id] self.tagger.albums[release_node.id] = self self.id = release_node.id # Get release metadata m = self._new_metadata m.length = 0 release_to_metadata(release_node, m, config=self.config, album=self) self.format_str = media_formats_from_node(release_node.medium_list[0]) self.rgid = release_node.release_group[0].id if self._discid: m['musicbrainz_discid'] = self._discid # Custom VA name if m['musicbrainz_albumartistid'] == VARIOUS_ARTISTS_ID: m['albumartistsort'] = m['albumartist'] = self.config.setting[ 'va_name'] # Convert Unicode punctuation if self.config.setting['convert_punctuation']: m.apply_func(asciipunct) m['totaldiscs'] = release_node.medium_list[0].count # Run album metadata plugins try: run_album_metadata_processors(self, m, release_node) except: self.log.error(traceback.format_exc()) self._release_node = release_node return True def _release_request_finished(self, document, http, error): if self.load_task is None: return self.load_task = None parsed = False try: if error: self.log.error("%r", unicode(http.errorString())) # Fix for broken NAT releases if error == QtNetwork.QNetworkReply.ContentNotFoundError: nats = False nat_name = self.config.setting["nat_name"] files = list(self.unmatched_files.files) for file in files: trackid = file.metadata["musicbrainz_trackid"] if mbid_validate(trackid) and file.metadata[ "album"] == nat_name: nats = True self.tagger.move_file_to_nat(file, trackid) self.tagger.nats.update() if nats and not self.get_num_unmatched_files(): self.tagger.remove_album(self) error = False else: try: parsed = self._parse_release(document) except: error = True self.log.error(traceback.format_exc()) finally: self._requests -= 1 if parsed or error: self._finalize_loading(error) def _parse_release_group(self, document): for node in document.metadata[0].release_list[0].release: v = {} v["mbid"] = node.id v["date"] = node.date[0].text if "date" in node.children else "" v["country"] = node.country[ 0].text if "country" in node.children else "" labels, catnums = label_info_from_node(node.label_info_list[0]) v["labels"] = ", ".join(set(labels)) v["catnums"] = ", ".join(set(catnums)) v["tracks"] = " + ".join( [m.track_list[0].count for m in node.medium_list[0].medium]) v["format"] = media_formats_from_node(node.medium_list[0]) self.other_versions.append(v) self.other_versions.sort(key=lambda x: x["date"]) def _release_group_request_finished(self, document, http, error): try: if error: self.log.error("%r", unicode(http.errorString())) else: try: self._parse_release_group(document) except: error = True self.log.error(traceback.format_exc()) finally: self.rgloaded = True self.release_group_loaded.emit() def _finalize_loading(self, error): if error: self.metadata.clear() self.metadata['album'] = _("[could not load album %s]") % self.id del self._new_metadata del self._new_tracks self.update() return if self._requests > 0: return if not self._tracks_loaded: artists = set() totalalbumtracks = 0 djmix_ars = {} if hasattr(self._new_metadata, "_djmix_ars"): djmix_ars = self._new_metadata._djmix_ars for medium_node in self._release_node.medium_list[0].medium: mm = Metadata() mm.copy(self._new_metadata) medium_to_metadata(medium_node, mm) totalalbumtracks += int(mm["totaltracks"]) for dj in djmix_ars.get(mm["discnumber"], []): mm.add("djmixer", dj) for track_node in medium_node.track_list[0].track: track = Track(track_node.recording[0].id, self) self._new_tracks.append(track) # Get track metadata tm = track.metadata tm.copy(mm) track_to_metadata(track_node, track, self.config) track._customize_metadata() self._new_metadata.length += tm.length artists.add(tm["musicbrainz_artistid"]) # Run track metadata plugins try: run_track_metadata_processors(self, tm, self._release_node, track_node) except: self.log.error(traceback.format_exc()) totalalbumtracks = str(totalalbumtracks) for track in self._new_tracks: track.metadata["~totalalbumtracks"] = totalalbumtracks if len(artists) > 1: track.metadata["compilation"] = "1" del self._release_node self._tracks_loaded = True if not self._requests: # Prepare parser for user's script if self.config.setting["enable_tagger_script"]: script = self.config.setting["tagger_script"] if script: parser = ScriptParser() for track in self._new_tracks: # Run tagger script for each track try: parser.eval(script, track.metadata) except: self.log.error(traceback.format_exc()) # Strip leading/trailing whitespace track.metadata.strip_whitespace() # Run tagger script for the album itself try: parser.eval(script, self._new_metadata) except: self.log.error(traceback.format_exc()) self._new_metadata.strip_whitespace() for track in self.tracks: for file in list(track.linked_files): file.move(self.unmatched_files) self.metadata = self._new_metadata self.tracks = self._new_tracks del self._new_metadata del self._new_tracks self.loaded = True self.match_files(self.unmatched_files.files) self.update() self.tagger.window.set_statusbar_message('Album %s loaded', self.id, timeout=3000) while self._after_load_callbacks.qsize() > 0: func = self._after_load_callbacks.get() func() def load(self): if self._requests: self.log.info("Not reloading, some requests are still active.") return self.tagger.window.set_statusbar_message('Loading album %s...', self.id) self.loaded = False self.rgloaded = False self.rgid = None self.other_versions = [] self.metadata.clear() self.metadata['album'] = _("[loading album information]") self.update() self._new_metadata = Metadata() self._new_tracks = [] self._requests = 1 require_authentication = False inc = [ 'release-groups', 'media', 'recordings', 'puids', 'artist-credits', 'artists', 'aliases', 'labels', 'isrcs' ] if self.config.setting['release_ars'] or self.config.setting[ 'track_ars']: inc += [ 'artist-rels', 'release-rels', 'url-rels', 'recording-rels', 'work-rels' ] if self.config.setting['track_ars']: inc += ['recording-level-rels', 'work-level-rels'] if self.config.setting['folksonomy_tags']: if self.config.setting['only_my_tags']: require_authentication = True inc += ['user-tags'] else: inc += ['tags'] if self.config.setting['enable_ratings']: require_authentication = True inc += ['user-ratings'] self.load_task = self.tagger.xmlws.get_release_by_id( self.id, self._release_request_finished, inc=inc, mblogin=require_authentication) def run_when_loaded(self, func): if self.loaded: func() else: self._after_load_callbacks.put(func) def stop_loading(self): if self.load_task: self.tagger.xmlws.remove_task(self.load_task) self.load_task = None def update(self, update_tracks=True): if self.item: self.item.update(update_tracks) def _add_file(self, track, file): self._files += 1 self.update(False) def _remove_file(self, track, file): self._files -= 1 self.update(False) def match_files(self, files, use_trackid=True): """Match files to tracks on this album, based on metadata similarity or trackid.""" for file in list(files): if file.state == File.REMOVED: continue matches = [] trackid = file.metadata['musicbrainz_trackid'] if use_trackid and mbid_validate(trackid): matches = self._get_trackid_matches(file, trackid) if not matches: for track in self.tracks: sim = track.metadata.compare(file.orig_metadata) if sim >= self.config.setting['track_matching_threshold']: matches.append((sim, track)) if matches: matches.sort(reverse=True) file.move(matches[0][1]) else: file.move(self.unmatched_files) def match_file(self, file, trackid=None): """Match the file on a track on this album, based on trackid or metadata similarity.""" if file.state == File.REMOVED: return if trackid is not None: matches = self._get_trackid_matches(file, trackid) if matches: matches.sort(reverse=True) file.move(matches[0][1]) return self.match_files([file], use_trackid=False) def _get_trackid_matches(self, file, trackid): matches = [] tracknumber = file.metadata['tracknumber'] discnumber = file.metadata['discnumber'] for track in self.tracks: tm = track.metadata if trackid == tm['musicbrainz_trackid']: if tracknumber == tm['tracknumber']: if discnumber == tm['discnumber']: matches.append((4.0, track)) break else: matches.append((3.0, track)) else: matches.append((2.0, track)) return matches def can_save(self): return self._files > 0 def can_remove(self): return True def can_edit_tags(self): return False def can_analyze(self): return False def can_autotag(self): return False def can_refresh(self): return True def get_num_matched_tracks(self): num = 0 for track in self.tracks: if track.is_linked(): num += 1 return num def get_num_unmatched_files(self): return len(self.unmatched_files.files) def is_complete(self): if not self.tracks: return False for track in self.tracks: if track.num_linked_files != 1: return False else: return True def get_num_unsaved_files(self): count = 0 for track in self.tracks: for file in track.linked_files: if not file.is_saved(): count += 1 return count def column(self, column): if column == 'title': if self.tracks: linked_tracks = 0 for track in self.tracks: if track.is_linked(): linked_tracks += 1 text = u'%s\u200E (%d/%d' % (self.metadata['album'], linked_tracks, len(self.tracks)) unmatched = self.get_num_unmatched_files() if unmatched: text += '; %d?' % (unmatched, ) unsaved = self.get_num_unsaved_files() if unsaved: text += '; %d*' % (unsaved, ) return text + ')' else: return self.metadata['album'] elif column == '~length': length = self.metadata.length if length: return format_time(length) else: return '' elif column == 'artist': return self.metadata['albumartist'] else: return '' def switch_release_version(self, mbid): if mbid == self.id: return for file in list(self.iterfiles(True)): file.move(self.unmatched_files) album = self.tagger.albums.get(mbid) if album: album.match_files(self.unmatched_files.files) album.update() self.tagger.remove_album(self) else: del self.tagger.albums[self.id] self.id = mbid self.tagger.albums[mbid] = self self.load()
class Album(DataObject, Item): release_group_loaded = QtCore.pyqtSignal() def __init__(self, id, discid=None): DataObject.__init__(self, id) self.metadata = Metadata() self.tracks = [] self.loaded = False self.load_task = None self.release_group = None self._files = 0 self._requests = 0 self._tracks_loaded = False self._discid = discid self._after_load_callbacks = [] self.unmatched_files = Cluster(_("Unmatched Files"), special=True, related_album=self, hide_if_empty=True) self.errors = [] self.status = None self._album_artists = [] def __repr__(self): return '<Album %s %r>' % (self.id, self.metadata[u"album"]) def iterfiles(self, save=False): for track in self.tracks: for file in track.iterfiles(): yield file if not save: for file in self.unmatched_files.iterfiles(): yield file def append_album_artist(self, id): """Append artist id to the list of album artists and return an AlbumArtist instance""" album_artist = AlbumArtist(id) self._album_artists.append(album_artist) return album_artist def get_album_artists(self): """Returns the list of album artists (as AlbumArtist objects)""" return self._album_artists def _parse_release(self, document): log.debug("Loading release %r ...", self.id) self._tracks_loaded = False release_node = document.metadata[0].release[0] if release_node.id != self.id: self.tagger.mbid_redirects[self.id] = release_node.id album = self.tagger.albums.get(release_node.id) if album: log.debug("Release %r already loaded", release_node.id) album.match_files(self.unmatched_files.files) album.update() self.tagger.remove_album(self) return False else: del self.tagger.albums[self.id] self.tagger.albums[release_node.id] = self self.id = release_node.id # Get release metadata m = self._new_metadata m.length = 0 rg_node = release_node.release_group[0] rg = self.release_group = self.tagger.get_release_group_by_id( rg_node.id) rg.loaded_albums.add(self.id) rg.refcount += 1 release_group_to_metadata(rg_node, rg.metadata, rg) m.copy(rg.metadata) release_to_metadata(release_node, m, album=self) if self._discid: m['musicbrainz_discid'] = self._discid # Custom VA name if m['musicbrainz_albumartistid'] == VARIOUS_ARTISTS_ID: m['albumartistsort'] = m['albumartist'] = config.setting['va_name'] # Convert Unicode punctuation if config.setting['convert_punctuation']: m.apply_func(asciipunct) m['totaldiscs'] = release_node.medium_list[0].count # Add album to collections if "collection_list" in release_node.children: for node in release_node.collection_list[0].collection: if node.editor[0].text.lower( ) == config.persist["oauth_username"].lower(): if node.id not in user_collections: user_collections[node.id] = \ Collection(node.id, node.name[0].text, node.release_list[0].count) user_collections[node.id].releases.add(self.id) # Run album metadata plugins try: run_album_metadata_processors(self, m, release_node) except: self.error_append(traceback.format_exc()) self._release_node = release_node return True def _release_request_finished(self, document, http, error): if self.load_task is None: return self.load_task = None parsed = False try: if error: self.error_append(unicode(http.errorString())) # Fix for broken NAT releases if error == QtNetwork.QNetworkReply.ContentNotFoundError: nats = False nat_name = config.setting["nat_name"] files = list(self.unmatched_files.files) for file in files: recordingid = file.metadata["musicbrainz_recordingid"] if mbid_validate(recordingid) and file.metadata[ "album"] == nat_name: nats = True self.tagger.move_file_to_nat(file, recordingid) self.tagger.nats.update() if nats and not self.get_num_unmatched_files(): self.tagger.remove_album(self) error = False else: try: parsed = self._parse_release(document) except: error = True self.error_append(traceback.format_exc()) finally: self._requests -= 1 if parsed or error: self._finalize_loading(error) # does http need to be set to None to free the memory used by the network response? # http://pyqt.sourceforge.net/Docs/PyQt4/qnetworkaccessmanager.html says: # After the request has finished, it is the responsibility of the user # to delete the QNetworkReply object at an appropriate time. # Do not directly delete it inside the slot connected to finished(). # You can use the deleteLater() function. def error_append(self, msg): log.error(msg) self.errors.append(msg) def _finalize_loading(self, error): if error: self.metadata.clear() self.status = _("[could not load album %s]") % self.id del self._new_metadata del self._new_tracks self.update() return if self._requests > 0: return if not self._tracks_loaded: artists = set() totalalbumtracks = 0 absolutetracknumber = 0 va = self._new_metadata[ 'musicbrainz_albumartistid'] == VARIOUS_ARTISTS_ID djmix_ars = {} if hasattr(self._new_metadata, "_djmix_ars"): djmix_ars = self._new_metadata._djmix_ars for medium_node in self._release_node.medium_list[0].medium: mm = Metadata() mm.copy(self._new_metadata) medium_to_metadata(medium_node, mm) discpregap = False for dj in djmix_ars.get(mm["discnumber"], []): mm.add("djmixer", dj) if "pregap" in medium_node.children: discpregap = True absolutetracknumber += 1 track = self._finalize_loading_track( medium_node.pregap[0], mm, artists, va, absolutetracknumber, discpregap) track.metadata['~pregap'] = "1" for track_node in medium_node.track_list[0].track: absolutetracknumber += 1 track = self._finalize_loading_track( track_node, mm, artists, va, absolutetracknumber, discpregap) if "data_track_list" in medium_node.children: for track_node in medium_node.data_track_list[0].track: absolutetracknumber += 1 track = self._finalize_loading_track( track_node, mm, artists, va, absolutetracknumber, discpregap) track.metadata['~datatrack'] = "1" totalalbumtracks = str(absolutetracknumber) for track in self._new_tracks: track.metadata["~totalalbumtracks"] = totalalbumtracks if len(artists) > 1: track.metadata["~multiartist"] = "1" del self._release_node self._tracks_loaded = True if not self._requests: # Prepare parser for user's script if config.setting["enable_tagger_script"]: script = config.setting["tagger_script"] if script: parser = ScriptParser() for track in self._new_tracks: # Run tagger script for each track try: parser.eval(script, track.metadata) except: self.error_append(traceback.format_exc()) # Strip leading/trailing whitespace track.metadata.strip_whitespace() # Run tagger script for the album itself try: parser.eval(script, self._new_metadata) except: self.error_append(traceback.format_exc()) self._new_metadata.strip_whitespace() for track in self.tracks: for file in list(track.linked_files): file.move(self.unmatched_files) self.metadata = self._new_metadata self.tracks = self._new_tracks del self._new_metadata del self._new_tracks self.loaded = True self.status = None self.match_files(self.unmatched_files.files) self.update() self.tagger.window.set_statusbar_message( N_('Album %(id)s loaded: %(artist)s - %(album)s'), { 'id': self.id, 'artist': self.metadata['albumartist'], 'album': self.metadata['album'] }, timeout=3000) for func in self._after_load_callbacks: func() self._after_load_callbacks = [] def _finalize_loading_track(self, track_node, metadata, artists, va, absolutetracknumber, discpregap): track = Track(track_node.recording[0].id, self) self._new_tracks.append(track) # Get track metadata tm = track.metadata tm.copy(metadata) track_to_metadata(track_node, track) track.metadata["~absolutetracknumber"] = absolutetracknumber track._customize_metadata() self._new_metadata.length += tm.length artists.add(tm["artist"]) if va: tm["compilation"] = "1" if discpregap: tm["~discpregap"] = "1" # Run track metadata plugins try: run_track_metadata_processors(self, tm, self._release_node, track_node) except: self.error_append(traceback.format_exc()) return track def load(self, priority=False, refresh=False): if self._requests: log.info("Not reloading, some requests are still active.") return self.tagger.window.set_statusbar_message( N_('Loading album %(id)s ...'), {'id': self.id}) self.loaded = False self.status = _("[loading album information]") if self.release_group: self.release_group.loaded = False self.release_group.folksonomy_tags.clear() self.metadata.clear() self.folksonomy_tags.clear() self.update() self._new_metadata = Metadata() self._new_tracks = [] self._requests = 1 self.errors = [] require_authentication = False inc = [ 'release-groups', 'media', 'recordings', 'artist-credits', 'artists', 'aliases', 'labels', 'isrcs', 'collections' ] if config.setting['release_ars'] or config.setting['track_ars']: inc += [ 'artist-rels', 'release-rels', 'url-rels', 'recording-rels', 'work-rels' ] if config.setting['track_ars']: inc += ['recording-level-rels', 'work-level-rels'] if config.setting['folksonomy_tags']: if config.setting['only_my_tags']: require_authentication = True inc += ['user-tags'] else: inc += ['tags'] if config.setting['enable_ratings']: require_authentication = True inc += ['user-ratings'] self.load_task = self.tagger.xmlws.get_release_by_id( self.id, self._release_request_finished, inc=inc, mblogin=require_authentication, priority=priority, refresh=refresh) def run_when_loaded(self, func): if self.loaded: func() else: self._after_load_callbacks.append(func) def stop_loading(self): if self.load_task: self.tagger.xmlws.remove_task(self.load_task) self.load_task = None def update(self, update_tracks=True): if self.item: self.item.update(update_tracks) def _add_file(self, track, file): self._files += 1 self.update(update_tracks=False) def _remove_file(self, track, file): self._files -= 1 self.update(update_tracks=False) def match_files(self, files, use_recordingid=True): """Match files to tracks on this album, based on metadata similarity or recordingid.""" for file in list(files): if file.state == File.REMOVED: continue matches = [] recordingid = file.metadata['musicbrainz_recordingid'] if use_recordingid and mbid_validate(recordingid): matches = self._get_recordingid_matches(file, recordingid) if not matches: for track in self.tracks: sim = track.metadata.compare(file.orig_metadata) if sim >= config.setting['track_matching_threshold']: matches.append((sim, track)) if matches: matches.sort(reverse=True) file.move(matches[0][1]) else: file.move(self.unmatched_files) def match_file(self, file, recordingid=None): """Match the file on a track on this album, based on recordingid or metadata similarity.""" if file.state == File.REMOVED: return if recordingid is not None: matches = self._get_recordingid_matches(file, recordingid) if matches: matches.sort(reverse=True) file.move(matches[0][1]) return self.match_files([file], use_recordingid=False) def _get_recordingid_matches(self, file, recordingid): matches = [] tracknumber = file.metadata['tracknumber'] discnumber = file.metadata['discnumber'] for track in self.tracks: tm = track.metadata if recordingid == tm['musicbrainz_recordingid']: if tracknumber == tm['tracknumber']: if discnumber == tm['discnumber']: matches.append((4.0, track)) break else: matches.append((3.0, track)) else: matches.append((2.0, track)) return matches def can_save(self): return self._files > 0 def can_remove(self): return True def can_edit_tags(self): return True def can_analyze(self): return False def can_autotag(self): return False def can_refresh(self): return True def can_view_info(self): return (self.loaded and self.metadata and self.metadata.images) or self.errors def is_album_like(self): return True def get_num_matched_tracks(self): num = 0 for track in self.tracks: if track.is_linked(): num += 1 return num def get_num_unmatched_files(self): return len(self.unmatched_files.files) def get_num_total_files(self): return self._files + len(self.unmatched_files.files) def is_complete(self): if not self.tracks: return False for track in self.tracks: if not track.is_complete(): return False else: return True def is_modified(self): if self.tracks: for track in self.tracks: for file in track.linked_files: if not file.is_saved(): return True return False def get_num_unsaved_files(self): count = 0 for track in self.tracks: for file in track.linked_files: if not file.is_saved(): count += 1 return count def column(self, column): if column == 'title': if self.status is not None: title = self.status else: title = self.metadata['album'] if self.tracks: linked_tracks = 0 for track in self.tracks: if track.is_linked(): linked_tracks += 1 text = u'%s\u200E (%d/%d' % (title, linked_tracks, len(self.tracks)) unmatched = self.get_num_unmatched_files() if unmatched: text += '; %d?' % (unmatched, ) unsaved = self.get_num_unsaved_files() if unsaved: text += '; %d*' % (unsaved, ) text += ungettext("; %i image", "; %i images", len(self.metadata.images)) % len( self.metadata.images) return text + ')' else: return title elif column == '~length': length = self.metadata.length if length: return format_time(length) else: return '' elif column == 'artist': return self.metadata['albumartist'] else: return '' def switch_release_version(self, mbid): if mbid == self.id: return for file in list(self.iterfiles(True)): file.move(self.unmatched_files) album = self.tagger.albums.get(mbid) if album: album.match_files(self.unmatched_files.files) album.update() self.tagger.remove_album(self) else: del self.tagger.albums[self.id] self.release_group.loaded_albums.discard(self.id) self.id = mbid self.tagger.albums[mbid] = self self.load(priority=True, refresh=True)
class Album(DataObject, Item): release_group_loaded = QtCore.pyqtSignal() def __init__(self, album_id, discid=None): DataObject.__init__(self, album_id) self.metadata = Metadata() self.orig_metadata = Metadata() self.tracks = [] self.loaded = False self.load_task = None self.release_group = None self._files = 0 self._requests = 0 self._tracks_loaded = False self._discids = set() if discid: self._discids.add(discid) self._after_load_callbacks = [] self.unmatched_files = Cluster(_("Unmatched Files"), special=True, related_album=self, hide_if_empty=True) self.errors = [] self.status = None self._album_artists = [] self.update_metadata_images_enabled = True def __repr__(self): return '<Album %s %r>' % (self.id, self.metadata["album"]) def iterfiles(self, save=False): for track in self.tracks: for file in track.iterfiles(): yield file if not save: for file in self.unmatched_files.iterfiles(): yield file def enable_update_metadata_images(self, enabled): self.update_metadata_images_enabled = enabled def append_album_artist(self, album_artist_id): """Append artist id to the list of album artists and return an AlbumArtist instance""" album_artist = AlbumArtist(album_artist_id) self._album_artists.append(album_artist) return album_artist def add_discid(self, discid): if not discid: return self._discids.add(discid) for track in self.tracks: medium_discids = track.metadata.getall('~musicbrainz_discids') track_discids = list(self._discids.intersection(medium_discids)) if track_discids: track.metadata['musicbrainz_discid'] = track_discids track.update() for file in track.linked_files: file.metadata['musicbrainz_discid'] = track_discids file.update() def get_album_artists(self): """Returns the list of album artists (as AlbumArtist objects)""" return self._album_artists def _parse_release(self, release_node): log.debug("Loading release %r ...", self.id) self._tracks_loaded = False release_id = release_node['id'] if release_id != self.id: self.tagger.mbid_redirects[self.id] = release_id album = self.tagger.albums.get(release_id) if album: log.debug("Release %r already loaded", release_id) album.match_files(self.unmatched_files.files) album.update() self.tagger.remove_album(self) return False else: del self.tagger.albums[self.id] self.tagger.albums[release_id] = self self.id = release_id # Make the release artist nodes available, since they may # contain supplementary data (aliases, tags, genres, ratings) # which aren't present in the release group, track, or # recording artist nodes. We can copy them into those places # wherever the IDs match, so that the data is shared and # available for use in mbjson.py and external plugins. self._release_artist_nodes = _create_artist_node_dict(release_node) # Get release metadata m = self._new_metadata m.length = 0 rg_node = release_node['release-group'] rg = self.release_group = self.tagger.get_release_group_by_id(rg_node['id']) rg.loaded_albums.add(self.id) rg.refcount += 1 _copy_artist_nodes(self._release_artist_nodes, rg_node) release_group_to_metadata(rg_node, rg.metadata, rg) m.copy(rg.metadata) release_to_metadata(release_node, m, album=self) # Custom VA name if m['musicbrainz_albumartistid'] == VARIOUS_ARTISTS_ID: m['albumartistsort'] = m['albumartist'] = config.setting['va_name'] # Convert Unicode punctuation if config.setting['convert_punctuation']: m.apply_func(asciipunct) m['totaldiscs'] = len(release_node['media']) # Add album to collections add_release_to_user_collections(release_node) # Run album metadata plugins try: run_album_metadata_processors(self, m, release_node) except BaseException: self.error_append(traceback.format_exc()) self._release_node = release_node return True def _release_request_finished(self, document, http, error): if self.load_task is None: return self.load_task = None parsed = False try: if error: self.error_append(http.errorString()) # Fix for broken NAT releases if error == QtNetwork.QNetworkReply.ContentNotFoundError: nats = False nat_name = config.setting["nat_name"] files = list(self.unmatched_files.files) for file in files: recordingid = file.metadata["musicbrainz_recordingid"] if mbid_validate(recordingid) and file.metadata["album"] == nat_name: nats = True self.tagger.move_file_to_nat(file, recordingid) self.tagger.nats.update() if nats and not self.get_num_unmatched_files(): self.tagger.remove_album(self) error = False else: try: parsed = self._parse_release(document) except Exception: error = True self.error_append(traceback.format_exc()) finally: self._requests -= 1 if parsed or error: self._finalize_loading(error) # does http need to be set to None to free the memory used by the network response? # http://qt-project.org/doc/qt-5/qnetworkaccessmanager.html says: # After the request has finished, it is the responsibility of the user # to delete the QNetworkReply object at an appropriate time. # Do not directly delete it inside the slot connected to finished(). # You can use the deleteLater() function. def error_append(self, msg): log.error(msg) self.errors.append(msg) def _finalize_loading(self, error): if error: self.metadata.clear() self.status = _("[could not load album %s]") % self.id del self._new_metadata del self._new_tracks self.update() return if self._requests > 0: return if not self._tracks_loaded: artists = set() all_media = [] absolutetracknumber = 0 va = self._new_metadata['musicbrainz_albumartistid'] == VARIOUS_ARTISTS_ID djmix_ars = {} if hasattr(self._new_metadata, "_djmix_ars"): djmix_ars = self._new_metadata._djmix_ars for medium_node in self._release_node['media']: mm = Metadata() mm.copy(self._new_metadata) medium_to_metadata(medium_node, mm) discpregap = False format = medium_node.get('format') if format: all_media.append(format) for dj in djmix_ars.get(mm["discnumber"], []): mm.add("djmixer", dj) if 'discs' in medium_node: discids = [disc.get('id') for disc in medium_node['discs']] mm['~musicbrainz_discids'] = discids mm['musicbrainz_discid'] = list(self._discids.intersection(discids)) if "pregap" in medium_node: discpregap = True absolutetracknumber += 1 track = self._finalize_loading_track(medium_node['pregap'], mm, artists, va, absolutetracknumber, discpregap) track.metadata['~pregap'] = "1" track_count = medium_node['track-count'] if track_count: tracklist_node = medium_node['tracks'] for track_node in tracklist_node: absolutetracknumber += 1 track = self._finalize_loading_track(track_node, mm, artists, va, absolutetracknumber, discpregap) if "data-tracks" in medium_node: for track_node in medium_node['data-tracks']: absolutetracknumber += 1 track = self._finalize_loading_track(track_node, mm, artists, va, absolutetracknumber, discpregap) track.metadata['~datatrack'] = "1" totalalbumtracks = absolutetracknumber self._new_metadata['~totalalbumtracks'] = totalalbumtracks # Generate a list of unique media, but keep order of first appearance self._new_metadata['media'] = " / ".join(list(OrderedDict.fromkeys(all_media))) for track in self._new_tracks: track.metadata["~totalalbumtracks"] = totalalbumtracks if len(artists) > 1: track.metadata["~multiartist"] = "1" del self._release_node del self._release_artist_nodes self._tracks_loaded = True if not self._requests: self.enable_update_metadata_images(False) # Prepare parser for user's script for s_name, s_text in enabled_tagger_scripts_texts(): parser = ScriptParser() for track in self._new_tracks: # Run tagger script for each track try: parser.eval(s_text, track.metadata) except ScriptError: log.exception("Failed to run tagger script %s on track", s_name) track.metadata.strip_whitespace() # Run tagger script for the album itself try: parser.eval(s_text, self._new_metadata) except ScriptError: log.exception("Failed to run tagger script %s on album", s_name) self._new_metadata.strip_whitespace() for track in self.tracks: track.metadata_images_changed.connect(self.update_metadata_images) for file in list(track.linked_files): file.move(self.unmatched_files) self.metadata = self._new_metadata self.tracks = self._new_tracks del self._new_metadata del self._new_tracks self.loaded = True self.status = None self.match_files(self.unmatched_files.files) self.enable_update_metadata_images(True) self.update() self.tagger.window.set_statusbar_message( N_('Album %(id)s loaded: %(artist)s - %(album)s'), { 'id': self.id, 'artist': self.metadata['albumartist'], 'album': self.metadata['album'] }, timeout=3000 ) for func in self._after_load_callbacks: func() self._after_load_callbacks = [] def _finalize_loading_track(self, track_node, metadata, artists, va, absolutetracknumber, discpregap): # As noted in `_parse_release` above, the release artist nodes # may contain supplementary data that isn't present in track # artist nodes. Similarly, the track artists may contain # information which the recording artists don't. Copy this # information across to wherever the artist IDs match. _copy_artist_nodes(self._release_artist_nodes, track_node) _copy_artist_nodes(self._release_artist_nodes, track_node['recording']) _copy_artist_nodes(_create_artist_node_dict(track_node), track_node['recording']) track = Track(track_node['recording']['id'], self) self._new_tracks.append(track) # Get track metadata tm = track.metadata tm.copy(metadata) track_to_metadata(track_node, track) tm["~absolutetracknumber"] = absolutetracknumber track.orig_metadata.copy(tm) track._customize_metadata() self._new_metadata.length += tm.length artists.add(tm["artist"]) if va: tm["compilation"] = "1" else: del tm["compilation"] if discpregap: tm["~discpregap"] = "1" # Run track metadata plugins try: run_track_metadata_processors(self, tm, track_node, self._release_node) except BaseException: self.error_append(traceback.format_exc()) return track def load(self, priority=False, refresh=False): if self._requests: log.info("Not reloading, some requests are still active.") return self.tagger.window.set_statusbar_message( N_('Loading album %(id)s ...'), {'id': self.id} ) self.loaded = False self.status = _("[loading album information]") if self.release_group: self.release_group.loaded = False self.release_group.genres.clear() self.metadata.clear() self.genres.clear() self.update() self._new_metadata = Metadata() self._new_tracks = [] self._requests = 1 self.errors = [] require_authentication = False inc = ['release-groups', 'media', 'discids', 'recordings', 'artist-credits', 'artists', 'aliases', 'labels', 'isrcs', 'collections'] if self.tagger.webservice.oauth_manager.is_authorized(): require_authentication = True inc += ['user-collections'] if config.setting['release_ars'] or config.setting['track_ars']: inc += ['artist-rels', 'release-rels', 'url-rels', 'recording-rels', 'work-rels'] if config.setting['track_ars']: inc += ['recording-level-rels', 'work-level-rels'] require_authentication = self.set_genre_inc_params(inc) or require_authentication if config.setting['enable_ratings']: require_authentication = True inc += ['user-ratings'] self.load_task = self.tagger.mb_api.get_release_by_id( self.id, self._release_request_finished, inc=inc, mblogin=require_authentication, priority=priority, refresh=refresh) def run_when_loaded(self, func): if self.loaded: func() else: self._after_load_callbacks.append(func) def stop_loading(self): if self.load_task: self.tagger.webservice.remove_task(self.load_task) self.load_task = None def update(self, update_tracks=True): if self.item: self.item.update(update_tracks) def _add_file(self, track, file): self._files += 1 self.update(update_tracks=False) add_metadata_images(self, [file]) file.metadata_images_changed.connect(self.update_metadata_images) def _remove_file(self, track, file): self._files -= 1 self.update(update_tracks=False) file.metadata_images_changed.disconnect(self.update_metadata_images) remove_metadata_images(self, [file]) def _match_files(self, files, recordingid=None, threshold=0): """Match files to tracks on this album, based on metadata similarity or recordingid.""" tracks_cache = defaultdict(lambda: None) def build_tracks_cache(): for track in self.tracks: tm_recordingid = track.orig_metadata['musicbrainz_recordingid'] tm_tracknumber = track.orig_metadata['tracknumber'] tm_discnumber = track.orig_metadata['discnumber'] for tup in ( (tm_recordingid, tm_tracknumber, tm_discnumber), (tm_recordingid, tm_tracknumber), (tm_recordingid, )): tracks_cache[tup] = track SimMatchAlbum = namedtuple('SimMatchAlbum', 'similarity track') for file in list(files): if file.state == File.REMOVED: continue # if we have a recordingid to match against, use that in priority recid = recordingid or file.metadata['musicbrainz_recordingid'] if recid and mbid_validate(recid): if not tracks_cache: build_tracks_cache() tracknumber = file.metadata['tracknumber'] discnumber = file.metadata['discnumber'] track = (tracks_cache[(recid, tracknumber, discnumber)] or tracks_cache[(recid, tracknumber)] or tracks_cache[(recid, )]) if track: yield (file, track) continue # try to match by similarity def candidates(): for track in self.tracks: yield SimMatchAlbum( similarity=track.metadata.compare(file.orig_metadata), track=track ) no_match = SimMatchAlbum(similarity=-1, track=self.unmatched_files) best_match = find_best_match(candidates, no_match) if best_match.similarity < threshold: yield (file, no_match.track) else: yield (file, best_match.result.track) def match_files(self, files, recordingid=None): """Match and move files to tracks on this album, based on metadata similarity or recordingid.""" moves = self._match_files(files, recordingid=recordingid, threshold=config.setting['track_matching_threshold']) for file, target in moves: file.move(target) def can_save(self): return self._files > 0 def can_remove(self): return True def can_edit_tags(self): return True def can_analyze(self): return False def can_autotag(self): return False def can_refresh(self): return True def can_view_info(self): return (self.loaded and (self.metadata.images or self.orig_metadata.images)) or self.errors def is_album_like(self): return True def get_num_matched_tracks(self): num = 0 for track in self.tracks: if track.is_linked(): num += 1 return num def get_num_unmatched_files(self): return len(self.unmatched_files.files) def get_num_total_files(self): return self._files + len(self.unmatched_files.files) def is_complete(self): if not self.tracks: return False for track in self.tracks: if not track.is_complete(): return False if self.get_num_unmatched_files(): return False else: return True def is_modified(self): if self.tracks: for track in self.tracks: for file in track.linked_files: if not file.is_saved(): return True return False def get_num_unsaved_files(self): count = 0 for track in self.tracks: for file in track.linked_files: if not file.is_saved(): count += 1 return count def column(self, column): if column == 'title': if self.status is not None: title = self.status else: title = self.metadata['album'] if self.tracks: linked_tracks = 0 for track in self.tracks: if track.is_linked(): linked_tracks += 1 text = '%s\u200E (%d/%d' % (title, linked_tracks, len(self.tracks)) unmatched = self.get_num_unmatched_files() if unmatched: text += '; %d?' % (unmatched,) unsaved = self.get_num_unsaved_files() if unsaved: text += '; %d*' % (unsaved,) # CoverArt.set_metadata uses the orig_metadata.images if metadata.images is empty # in order to show existing cover art if there's no cover art for a release. So # we do the same here in order to show the number of images consistently. if self.metadata.images: metadata = self.metadata else: metadata = self.orig_metadata number_of_images = len(metadata.images) if getattr(metadata, 'has_common_images', True): text += ngettext("; %i image", "; %i images", number_of_images) % number_of_images else: text += ngettext("; %i image not in all tracks", "; %i different images among tracks", number_of_images) % number_of_images return text + ')' else: return title elif column == '~length': length = self.metadata.length if length: return format_time(length) else: return '' elif column == 'artist': return self.metadata['albumartist'] elif column == 'tracknumber': return self.metadata['~totalalbumtracks'] elif column == 'discnumber': return self.metadata['totaldiscs'] else: return self.metadata[column] def switch_release_version(self, mbid): if mbid == self.id: return for file in list(self.iterfiles(True)): file.move(self.unmatched_files) album = self.tagger.albums.get(mbid) if album: album.match_files(self.unmatched_files.files) album.update() self.tagger.remove_album(self) else: del self.tagger.albums[self.id] self.release_group.loaded_albums.discard(self.id) self.id = mbid self.tagger.albums[mbid] = self self.load(priority=True, refresh=True) def update_metadata_images(self): if not self.update_metadata_images_enabled: return update_metadata_images(self) self.update(False) def keep_original_images(self): self.enable_update_metadata_images(False) for track in self.tracks: track.keep_original_images() for file in list(self.unmatched_files.files): file.keep_original_images() self.enable_update_metadata_images(True) self.update_metadata_images()
class Album(DataObject, Item): release_group_loaded = QtCore.pyqtSignal() def __init__(self, album_id, discid=None): DataObject.__init__(self, album_id) self.metadata = Metadata() self.orig_metadata = Metadata() self.tracks = [] self.loaded = False self.load_task = None self.release_group = None self._files = 0 self._requests = 0 self._tracks_loaded = False self._discids = set() if discid: self._discids.add(discid) self._after_load_callbacks = [] self.unmatched_files = Cluster(_("Unmatched Files"), special=True, related_album=self, hide_if_empty=True) self.errors = [] self.status = None self._album_artists = [] self.update_metadata_images_enabled = True def __repr__(self): return '<Album %s %r>' % (self.id, self.metadata["album"]) def iterfiles(self, save=False): for track in self.tracks: for file in track.iterfiles(): yield file if not save: for file in self.unmatched_files.iterfiles(): yield file def enable_update_metadata_images(self, enabled): self.update_metadata_images_enabled = enabled def append_album_artist(self, album_artist_id): """Append artist id to the list of album artists and return an AlbumArtist instance""" album_artist = AlbumArtist(album_artist_id) self._album_artists.append(album_artist) return album_artist def add_discid(self, discid): if not discid: return self._discids.add(discid) for track in self.tracks: medium_discids = track.metadata.getall('~musicbrainz_discids') track_discids = list(self._discids.intersection(medium_discids)) if track_discids: track.metadata['musicbrainz_discid'] = track_discids track.update() for file in track.linked_files: file.metadata['musicbrainz_discid'] = track_discids file.update() def get_album_artists(self): """Returns the list of album artists (as AlbumArtist objects)""" return self._album_artists def _parse_release(self, release_node): log.debug("Loading release %r ...", self.id) self._tracks_loaded = False release_id = release_node['id'] if release_id != self.id: self.tagger.mbid_redirects[self.id] = release_id album = self.tagger.albums.get(release_id) if album: log.debug("Release %r already loaded", release_id) album.match_files(self.unmatched_files.files) album.update() self.tagger.remove_album(self) return False else: del self.tagger.albums[self.id] self.tagger.albums[release_id] = self self.id = release_id # Make the release artist nodes available, since they may # contain supplementary data (aliases, tags, genres, ratings) # which aren't present in the release group, track, or # recording artist nodes. We can copy them into those places # wherever the IDs match, so that the data is shared and # available for use in mbjson.py and external plugins. self._release_artist_nodes = _create_artist_node_dict(release_node) # Get release metadata m = self._new_metadata m.length = 0 rg_node = release_node['release-group'] rg = self.release_group = self.tagger.get_release_group_by_id(rg_node['id']) rg.loaded_albums.add(self.id) rg.refcount += 1 _copy_artist_nodes(self._release_artist_nodes, rg_node) release_group_to_metadata(rg_node, rg.metadata, rg) m.copy(rg.metadata) release_to_metadata(release_node, m, album=self) # Custom VA name if m['musicbrainz_albumartistid'] == VARIOUS_ARTISTS_ID: m['albumartistsort'] = m['albumartist'] = config.setting['va_name'] # Convert Unicode punctuation if config.setting['convert_punctuation']: m.apply_func(asciipunct) m['totaldiscs'] = len(release_node['media']) # Add album to collections add_release_to_user_collections(release_node) # Run album metadata plugins try: run_album_metadata_processors(self, m, release_node) except BaseException: self.error_append(traceback.format_exc()) self._release_node = release_node return True def _release_request_finished(self, document, http, error): if self.load_task is None: return self.load_task = None parsed = False try: if error: self.error_append(http.errorString()) # Fix for broken NAT releases if error == QtNetwork.QNetworkReply.ContentNotFoundError: nats = False nat_name = config.setting["nat_name"] files = list(self.unmatched_files.files) for file in files: recordingid = file.metadata["musicbrainz_recordingid"] if mbid_validate(recordingid) and file.metadata["album"] == nat_name: nats = True self.tagger.move_file_to_nat(file, recordingid) self.tagger.nats.update() if nats and not self.get_num_unmatched_files(): self.tagger.remove_album(self) error = False else: try: parsed = self._parse_release(document) except Exception: error = True self.error_append(traceback.format_exc()) finally: self._requests -= 1 if parsed or error: self._finalize_loading(error) # does http need to be set to None to free the memory used by the network response? # http://qt-project.org/doc/qt-5/qnetworkaccessmanager.html says: # After the request has finished, it is the responsibility of the user # to delete the QNetworkReply object at an appropriate time. # Do not directly delete it inside the slot connected to finished(). # You can use the deleteLater() function. def error_append(self, msg): log.error(msg) self.errors.append(msg) def _finalize_loading(self, error): if error: self.metadata.clear() self.status = _("[could not load album %s]") % self.id del self._new_metadata del self._new_tracks self.update() return if self._requests > 0: return if not self._tracks_loaded: artists = set() totalalbumtracks = 0 absolutetracknumber = 0 va = self._new_metadata['musicbrainz_albumartistid'] == VARIOUS_ARTISTS_ID djmix_ars = {} if hasattr(self._new_metadata, "_djmix_ars"): djmix_ars = self._new_metadata._djmix_ars for medium_node in self._release_node['media']: mm = Metadata() mm.copy(self._new_metadata) medium_to_metadata(medium_node, mm) discpregap = False for dj in djmix_ars.get(mm["discnumber"], []): mm.add("djmixer", dj) if 'discs' in medium_node: discids = [disc.get('id') for disc in medium_node['discs']] mm['~musicbrainz_discids'] = discids mm['musicbrainz_discid'] = list(self._discids.intersection(discids)) if "pregap" in medium_node: discpregap = True absolutetracknumber += 1 track = self._finalize_loading_track(medium_node['pregap'], mm, artists, va, absolutetracknumber, discpregap) track.metadata['~pregap'] = "1" track_count = medium_node['track-count'] if track_count: tracklist_node = medium_node['tracks'] for track_node in tracklist_node: absolutetracknumber += 1 track = self._finalize_loading_track(track_node, mm, artists, va, absolutetracknumber, discpregap) if "data-tracks" in medium_node: for track_node in medium_node['data-tracks']: absolutetracknumber += 1 track = self._finalize_loading_track(track_node, mm, artists, va, absolutetracknumber, discpregap) track.metadata['~datatrack'] = "1" totalalbumtracks = str(absolutetracknumber) for track in self._new_tracks: track.metadata["~totalalbumtracks"] = totalalbumtracks if len(artists) > 1: track.metadata["~multiartist"] = "1" del self._release_node del self._release_artist_nodes self._tracks_loaded = True if not self._requests: self.enable_update_metadata_images(False) # Prepare parser for user's script for s_name, s_text in enabled_tagger_scripts_texts(): parser = ScriptParser() for track in self._new_tracks: # Run tagger script for each track try: parser.eval(s_text, track.metadata) except ScriptError: log.exception("Failed to run tagger script %s on track", s_name) track.metadata.strip_whitespace() # Run tagger script for the album itself try: parser.eval(s_text, self._new_metadata) except ScriptError: log.exception("Failed to run tagger script %s on album", s_name) self._new_metadata.strip_whitespace() for track in self.tracks: track.metadata_images_changed.connect(self.update_metadata_images) for file in list(track.linked_files): file.move(self.unmatched_files) self.metadata = self._new_metadata self.tracks = self._new_tracks del self._new_metadata del self._new_tracks self.loaded = True self.status = None self.match_files(self.unmatched_files.files) self.enable_update_metadata_images(True) self.update() self.tagger.window.set_statusbar_message( N_('Album %(id)s loaded: %(artist)s - %(album)s'), { 'id': self.id, 'artist': self.metadata['albumartist'], 'album': self.metadata['album'] }, timeout=3000 ) for func in self._after_load_callbacks: func() self._after_load_callbacks = [] def _finalize_loading_track(self, track_node, metadata, artists, va, absolutetracknumber, discpregap): # As noted in `_parse_release` above, the release artist nodes # may contain supplementary data that isn't present in track # artist nodes. Similarly, the track artists may contain # information which the recording artists don't. Copy this # information across to wherever the artist IDs match. _copy_artist_nodes(self._release_artist_nodes, track_node) _copy_artist_nodes(self._release_artist_nodes, track_node['recording']) _copy_artist_nodes(_create_artist_node_dict(track_node), track_node['recording']) track = Track(track_node['recording']['id'], self) self._new_tracks.append(track) # Get track metadata tm = track.metadata tm.copy(metadata) track_to_metadata(track_node, track) tm["~absolutetracknumber"] = absolutetracknumber track.orig_metadata.copy(tm) track._customize_metadata() self._new_metadata.length += tm.length artists.add(tm["artist"]) if va: tm["compilation"] = "1" else: del tm["compilation"] if discpregap: tm["~discpregap"] = "1" # Run track metadata plugins try: run_track_metadata_processors(self, tm, self._release_node, track_node) except BaseException: self.error_append(traceback.format_exc()) return track def load(self, priority=False, refresh=False): if self._requests: log.info("Not reloading, some requests are still active.") return self.tagger.window.set_statusbar_message( N_('Loading album %(id)s ...'), {'id': self.id} ) self.loaded = False self.status = _("[loading album information]") if self.release_group: self.release_group.loaded = False self.release_group.genres.clear() self.metadata.clear() self.genres.clear() self.update() self._new_metadata = Metadata() self._new_tracks = [] self._requests = 1 self.errors = [] require_authentication = False inc = ['release-groups', 'media', 'discids', 'recordings', 'artist-credits', 'artists', 'aliases', 'labels', 'isrcs', 'collections'] if self.tagger.webservice.oauth_manager.is_authorized(): require_authentication = True inc += ['user-collections'] if config.setting['release_ars'] or config.setting['track_ars']: inc += ['artist-rels', 'release-rels', 'url-rels', 'recording-rels', 'work-rels'] if config.setting['track_ars']: inc += ['recording-level-rels', 'work-level-rels'] require_authentication = self.set_genre_inc_params(inc) or require_authentication if config.setting['enable_ratings']: require_authentication = True inc += ['user-ratings'] self.load_task = self.tagger.mb_api.get_release_by_id( self.id, self._release_request_finished, inc=inc, mblogin=require_authentication, priority=priority, refresh=refresh) def run_when_loaded(self, func): if self.loaded: func() else: self._after_load_callbacks.append(func) def stop_loading(self): if self.load_task: self.tagger.webservice.remove_task(self.load_task) self.load_task = None def update(self, update_tracks=True): if self.item: self.item.update(update_tracks) def _add_file(self, track, file): self._files += 1 self.update(update_tracks=False) add_metadata_images(self, [file]) file.metadata_images_changed.connect(self.update_metadata_images) def _remove_file(self, track, file): self._files -= 1 self.update(update_tracks=False) file.metadata_images_changed.disconnect(self.update_metadata_images) remove_metadata_images(self, [file]) def _match_files(self, files, recordingid=None, threshold=0): """Match files to tracks on this album, based on metadata similarity or recordingid.""" tracks_cache = defaultdict(lambda: None) def build_tracks_cache(): for track in self.tracks: tm_recordingid = track.orig_metadata['musicbrainz_recordingid'] tm_tracknumber = track.orig_metadata['tracknumber'] tm_discnumber = track.orig_metadata['discnumber'] for tup in ( (tm_recordingid, tm_tracknumber, tm_discnumber), (tm_recordingid, tm_tracknumber), (tm_recordingid, )): tracks_cache[tup] = track SimMatchAlbum = namedtuple('SimMatchAlbum', 'similarity track') for file in list(files): if file.state == File.REMOVED: continue # if we have a recordingid to match against, use that in priority recid = recordingid or file.metadata['musicbrainz_recordingid'] if recid and mbid_validate(recid): if not tracks_cache: build_tracks_cache() tracknumber = file.metadata['tracknumber'] discnumber = file.metadata['discnumber'] track = (tracks_cache[(recid, tracknumber, discnumber)] or tracks_cache[(recid, tracknumber)] or tracks_cache[(recid, )]) if track: yield (file, track) continue # try to match by similarity def candidates(): for track in self.tracks: yield SimMatchAlbum( similarity=track.metadata.compare(file.orig_metadata), track=track ) no_match = SimMatchAlbum(similarity=-1, track=self.unmatched_files) best_match = find_best_match(candidates, no_match) if best_match.similarity < threshold: yield (file, no_match.track) else: yield (file, best_match.result.track) def match_files(self, files, recordingid=None): """Match and move files to tracks on this album, based on metadata similarity or recordingid.""" moves = self._match_files(files, recordingid=recordingid, threshold=config.setting['track_matching_threshold']) for file, target in moves: file.move(target) def can_save(self): return self._files > 0 def can_remove(self): return True def can_edit_tags(self): return True def can_analyze(self): return False def can_autotag(self): return False def can_refresh(self): return True def can_view_info(self): return (self.loaded and (self.metadata.images or self.orig_metadata.images)) or self.errors def is_album_like(self): return True def get_num_matched_tracks(self): num = 0 for track in self.tracks: if track.is_linked(): num += 1 return num def get_num_unmatched_files(self): return len(self.unmatched_files.files) def get_num_total_files(self): return self._files + len(self.unmatched_files.files) def is_complete(self): if not self.tracks: return False for track in self.tracks: if not track.is_complete(): return False if self.get_num_unmatched_files(): return False else: return True def is_modified(self): if self.tracks: for track in self.tracks: for file in track.linked_files: if not file.is_saved(): return True return False def get_num_unsaved_files(self): count = 0 for track in self.tracks: for file in track.linked_files: if not file.is_saved(): count += 1 return count def column(self, column): if column == 'title': if self.status is not None: title = self.status else: title = self.metadata['album'] if self.tracks: linked_tracks = 0 for track in self.tracks: if track.is_linked(): linked_tracks += 1 text = '%s\u200E (%d/%d' % (title, linked_tracks, len(self.tracks)) unmatched = self.get_num_unmatched_files() if unmatched: text += '; %d?' % (unmatched,) unsaved = self.get_num_unsaved_files() if unsaved: text += '; %d*' % (unsaved,) # CoverArt.set_metadata uses the orig_metadata.images if metadata.images is empty # in order to show existing cover art if there's no cover art for a release. So # we do the same here in order to show the number of images consistently. if self.metadata.images: metadata = self.metadata else: metadata = self.orig_metadata number_of_images = len(metadata.images) if getattr(metadata, 'has_common_images', True): text += ngettext("; %i image", "; %i images", number_of_images) % number_of_images else: text += ngettext("; %i image not in all tracks", "; %i different images among tracks", number_of_images) % number_of_images return text + ')' else: return title elif column == '~length': length = self.metadata.length if length: return format_time(length) else: return '' elif column == 'artist': return self.metadata['albumartist'] else: return '' def switch_release_version(self, mbid): if mbid == self.id: return for file in list(self.iterfiles(True)): file.move(self.unmatched_files) album = self.tagger.albums.get(mbid) if album: album.match_files(self.unmatched_files.files) album.update() self.tagger.remove_album(self) else: del self.tagger.albums[self.id] self.release_group.loaded_albums.discard(self.id) self.id = mbid self.tagger.albums[mbid] = self self.load(priority=True, refresh=True) def update_metadata_images(self): if not self.update_metadata_images_enabled: return update_metadata_images(self) self.update(False) def keep_original_images(self): self.enable_update_metadata_images(False) for track in self.tracks: track.keep_original_images() for file in list(self.unmatched_files.files): file.keep_original_images() self.enable_update_metadata_images(True) self.update_metadata_images()
class MetadataTest(unittest.TestCase): original = None tags = [] def setUp(self): config.setting = settings.copy() self.metadata = Metadata() self.metadata["single1"] = "single1-value" self.metadata.add_unique("single2", "single2-value") self.metadata.add_unique("single2", "single2-value") self.multi1 = ["multi1-value", "multi1-value"] self.metadata.add("multi1", self.multi1[0]) self.metadata.add("multi1", self.multi1[1]) self.multi2 = ["multi2-value1", "multi2-value2"] self.metadata["multi2"] = self.multi2 self.multi3 = ["multi3-value1", "multi3-value2"] self.metadata.set("multi3", self.multi3) self.metadata["~hidden"] = "hidden-value" def tearDown(self): pass def test_metadata_set(self): self.assertEqual(["single1-value"], dict.get(self.metadata, "single1")) self.assertEqual(["single2-value"], dict.get(self.metadata, "single2")) self.assertEqual(self.multi1, dict.get(self.metadata, "multi1")) self.assertEqual(self.multi2, dict.get(self.metadata, "multi2")) self.assertEqual(self.multi3, dict.get(self.metadata, "multi3")) self.assertEqual(["hidden-value"], dict.get(self.metadata, "~hidden")) def test_metadata_get(self): self.assertEqual("single1-value", self.metadata["single1"]) self.assertEqual("single1-value", self.metadata.get("single1")) self.assertEqual(["single1-value"], self.metadata.getall("single1")) self.assertEqual(MULTI_VALUED_JOINER.join(self.multi1), self.metadata["multi1"]) self.assertEqual(MULTI_VALUED_JOINER.join(self.multi1), self.metadata.get("multi1")) self.assertEqual(self.multi1, self.metadata.getall("multi1")) self.assertEqual("", self.metadata["nonexistent"]) self.assertEqual(None, self.metadata.get("nonexistent")) self.assertEqual([], self.metadata.getall("nonexistent")) self.assertEqual(dict.items(self.metadata), self.metadata.rawitems()) metadata_items = [(x, z) for (x, y) in dict.items(self.metadata) for z in y] self.assertEqual(metadata_items, list(self.metadata.items())) def test_metadata_delete(self): self.metadata.delete("single1") self.assertNotIn("single1", self.metadata) self.assertIn("single1", self.metadata.deleted_tags) self.metadata["single2"] = "" self.assertNotIn("single2", self.metadata) self.assertIn("single2", self.metadata.deleted_tags) def test_metadata_update(self): m = Metadata() m["old"] = "old-value" self.metadata.delete("single1") m.update(self.metadata) self.assertIn("old", m) self.assertNotIn("single1", m) self.assertIn("single1", m.deleted_tags) self.assertEqual("single2-value", m["single2"]) self.assertEqual(self.metadata.deleted_tags, m.deleted_tags) self.metadata["old"] = "old-value" for (key, value) in dict.items(self.metadata): self.assertIn(key, m) self.assertEqual(value, dict.get(m, key)) for (key, value) in dict.items(m): self.assertIn(key, self.metadata) self.assertEqual(value, dict.get(self.metadata, key)) def test_metadata_clear(self): self.metadata.clear() self.assertEqual(0, len(self.metadata)) def test_metadata_applyfunc(self): func = lambda x: x[1:] self.metadata.apply_func(func) self.assertEqual("ingle1-value", self.metadata["single1"]) self.assertEqual("ingle1-value", self.metadata.get("single1")) self.assertEqual(["ingle1-value"], self.metadata.getall("single1")) self.assertEqual(MULTI_VALUED_JOINER.join(map(func, self.multi1)), self.metadata["multi1"]) self.assertEqual(MULTI_VALUED_JOINER.join(map(func, self.multi1)), self.metadata.get("multi1")) self.assertEqual(list(map(func, self.multi1)), self.metadata.getall("multi1")) self.assertEqual("", self.metadata["nonexistent"]) self.assertEqual(None, self.metadata.get("nonexistent")) self.assertEqual([], self.metadata.getall("nonexistent")) self.assertEqual(dict.items(self.metadata), self.metadata.rawitems()) metadata_items = [(x, z) for (x, y) in dict.items(self.metadata) for z in y] self.assertEqual(metadata_items, list(self.metadata.items())) def test_length_score(self): results = [(20000, 0, 0.333333333333), (20000, 10000, 0.666666666667), (20000, 20000, 1.0), (20000, 30000, 0.666666666667), (20000, 40000, 0.333333333333), (20000, 50000, 0.0)] for (a, b, expected) in results: actual = Metadata.length_score(a, b) self.assertAlmostEqual(expected, actual, msg="a={a}, b={b}".format(a=a, b=b))
class MetadataTest(PicardTestCase): original = None tags = [] def setUp(self): super().setUp() config.setting = settings.copy() self.metadata = Metadata() self.metadata["single1"] = "single1-value" self.metadata.add_unique("single2", "single2-value") self.metadata.add_unique("single2", "single2-value") self.multi1 = ["multi1-value", "multi1-value"] self.metadata.add("multi1", self.multi1[0]) self.metadata.add("multi1", self.multi1[1]) self.multi2 = ["multi2-value1", "multi2-value2"] self.metadata["multi2"] = self.multi2 self.multi3 = ["multi3-value1", "multi3-value2"] self.metadata.set("multi3", self.multi3) self.metadata["~hidden"] = "hidden-value" self.metadata_d1 = Metadata({'a': 'b', 'c': 2, 'd': ['x', 'y'], 'x': ''}) self.metadata_d2 = Metadata({'a': 'b', 'c': 2, 'd': ['x', 'y'], 'x': 'z'}) self.metadata_d3 = Metadata({'c': 3, 'd': ['u', 'w'], 'x': 'p'}) def tearDown(self): pass def test_metadata_setitem(self): self.assertEqual(["single1-value"], self.metadata.getraw("single1")) self.assertEqual(["single2-value"], self.metadata.getraw("single2")) self.assertEqual(self.multi1, self.metadata.getraw("multi1")) self.assertEqual(self.multi2, self.metadata.getraw("multi2")) self.assertEqual(self.multi3, self.metadata.getraw("multi3")) self.assertEqual(["hidden-value"], self.metadata.getraw("~hidden")) def test_metadata_set_all_values_as_string(self): for val in (0, 2, True): str_val = str(val) self.metadata.set('val1', val) self.assertEqual([str_val], self.metadata.getraw("val1")) self.metadata['val2'] = val self.assertEqual([str_val], self.metadata.getraw("val2")) del self.metadata['val3'] self.metadata.add('val3', val) self.assertEqual([str_val], self.metadata.getraw("val3")) del self.metadata['val4'] self.metadata.add_unique('val4', val) self.assertEqual([str_val], self.metadata.getraw("val4")) def test_metadata_get(self): self.assertEqual("single1-value", self.metadata["single1"]) self.assertEqual("single1-value", self.metadata.get("single1")) self.assertEqual(["single1-value"], self.metadata.getall("single1")) self.assertEqual(["single1-value"], self.metadata.getraw("single1")) self.assertEqual(MULTI_VALUED_JOINER.join(self.multi1), self.metadata["multi1"]) self.assertEqual(MULTI_VALUED_JOINER.join(self.multi1), self.metadata.get("multi1")) self.assertEqual(self.multi1, self.metadata.getall("multi1")) self.assertEqual(self.multi1, self.metadata.getraw("multi1")) self.assertEqual("", self.metadata["nonexistent"]) self.assertEqual(None, self.metadata.get("nonexistent")) self.assertEqual([], self.metadata.getall("nonexistent")) self.assertRaises(KeyError, self.metadata.getraw, "nonexistent") self.assertEqual(self.metadata._store.items(), self.metadata.rawitems()) metadata_items = [(x, z) for (x, y) in self.metadata.rawitems() for z in y] self.assertEqual(metadata_items, list(self.metadata.items())) def test_metadata_unset(self): self.metadata.unset("single1") self.assertNotIn("single1", self.metadata) self.assertNotIn("single1", self.metadata.deleted_tags) self.assertRaises(KeyError, self.metadata.unset, 'unknown_tag') def test_metadata_delete(self): del self.metadata["single1"] self.assertNotIn("single1", self.metadata) self.assertIn("single1", self.metadata.deleted_tags) def test_metadata_legacy_delete(self): self.metadata.delete("single1") self.assertNotIn("single1", self.metadata) self.assertIn("single1", self.metadata.deleted_tags) def test_metadata_implicit_delete(self): self.metadata["single2"] = "" self.assertNotIn("single2", self.metadata) self.assertIn("single2", self.metadata.deleted_tags) self.metadata["unknown"] = "" self.assertNotIn("unknown", self.metadata) self.assertNotIn("unknown", self.metadata.deleted_tags) def test_metadata_undelete(self): self.metadata.delete("single1") self.assertNotIn("single1", self.metadata) self.assertIn("single1", self.metadata.deleted_tags) self.metadata["single1"] = "value1" self.assertIn("single1", self.metadata) self.assertNotIn("single1", self.metadata.deleted_tags) def test_normalize_tag(self): self.assertEqual('sometag', Metadata.normalize_tag('sometag')) self.assertEqual('sometag', Metadata.normalize_tag('sometag:')) self.assertEqual('sometag', Metadata.normalize_tag('sometag::')) self.assertEqual('sometag:desc', Metadata.normalize_tag('sometag:desc')) def test_metadata_tag_trailing_colon(self): self.metadata['tag:'] = 'Foo' self.assertIn('tag', self.metadata) self.assertIn('tag:', self.metadata) self.assertEqual('Foo', self.metadata['tag']) self.assertEqual('Foo', self.metadata['tag:']) del self.metadata['tag'] self.assertNotIn('tag', self.metadata) self.assertNotIn('tag:', self.metadata) def test_metadata_copy(self): m = Metadata() m["old"] = "old-value" self.metadata.delete("single1") m.copy(self.metadata) self.assertEqual(self.metadata._store, m._store) self.assertEqual(self.metadata.deleted_tags, m.deleted_tags) self.assertEqual(self.metadata.length, m.length) self.assertEqual(self.metadata.images, m.images) def test_metadata_copy_without_images(self): m = Metadata() m.copy(self.metadata, copy_images=False) self.assertEqual(self.metadata._store, m._store) self.assertEqual(self.metadata.deleted_tags, m.deleted_tags) self.assertEqual(self.metadata.length, m.length) self.assertEqual(ImageList(), m.images) def test_metadata_update(self): m = Metadata() m["old"] = "old-value" self.metadata.delete("single1") m.update(self.metadata) self.assertIn("old", m) self.assertNotIn("single1", m) self.assertIn("single1", m.deleted_tags) self.assertEqual("single2-value", m["single2"]) self.assertEqual(self.metadata.deleted_tags, m.deleted_tags) self.assertEqual(self.metadata.images, m.images) self.metadata["old"] = "old-value" self.assertEqual(self.metadata._store, m._store) def test_metadata_clear(self): self.metadata.clear() self.assertEqual(0, len(self.metadata)) def test_metadata_clear_deleted(self): self.metadata.delete("single1") self.assertIn("single1", self.metadata.deleted_tags) self.metadata.clear_deleted() self.assertNotIn("single1", self.metadata.deleted_tags) def test_metadata_applyfunc(self): def func(x): return x[1:] self.metadata.apply_func(func) self.assertEqual("ingle1-value", self.metadata["single1"]) self.assertEqual("ingle1-value", self.metadata.get("single1")) self.assertEqual(["ingle1-value"], self.metadata.getall("single1")) self.assertEqual(MULTI_VALUED_JOINER.join(map(func, self.multi1)), self.metadata["multi1"]) self.assertEqual(MULTI_VALUED_JOINER.join(map(func, self.multi1)), self.metadata.get("multi1")) self.assertEqual(list(map(func, self.multi1)), self.metadata.getall("multi1")) def test_metadata_applyfunc_preserve_tags(self): self.assertTrue(len(PRESERVED_TAGS) > 0) m = Metadata() m[PRESERVED_TAGS[0]] = 'value1' m['not_preserved'] = 'value2' def func(x): return x[1:] m.apply_func(func) self.assertEqual("value1", m[PRESERVED_TAGS[0]]) self.assertEqual("alue2", m['not_preserved']) def test_metadata_applyfunc_delete_tags(self): def func(x): return None metadata = Metadata(self.metadata) metadata.apply_func(func) self.assertEqual(0, len(metadata.rawitems())) self.assertEqual(self.metadata.keys(), metadata.deleted_tags) def test_length_score(self): results = [(20000, 0, 0.333333333333), (20000, 10000, 0.666666666667), (20000, 20000, 1.0), (20000, 30000, 0.666666666667), (20000, 40000, 0.333333333333), (20000, 50000, 0.0)] for (a, b, expected) in results: actual = Metadata.length_score(a, b) self.assertAlmostEqual(expected, actual, msg="a={a}, b={b}".format(a=a, b=b)) def test_compare_is_equal(self): m1 = Metadata() m1["title"] = "title1" m1["tracknumber"] = "2" m1.length = 360 m2 = Metadata() m2["title"] = "title1" m2["tracknumber"] = "2" m2.length = 360 self.assertEqual(m1.compare(m2), m2.compare(m1)) self.assertEqual(m1.compare(m2), 1) def test_compare_with_ignored(self): m1 = Metadata() m1["title"] = "title1" m1["tracknumber"] = "2" m1.length = 360 m2 = Metadata() m2["title"] = "title1" m2["tracknumber"] = "3" m2.length = 300 self.assertNotEqual(m1.compare(m2), 1) self.assertEqual(m1.compare(m2, ignored=['tracknumber', '~length']), 1) def test_compare_lengths(self): m1 = Metadata() m1.length = 360 m2 = Metadata() m2.length = 300 self.assertAlmostEqual(m1.compare(m2), 0.998) def test_compare_tracknumber_difference(self): m1 = Metadata() m1["tracknumber"] = "1" m2 = Metadata() m2["tracknumber"] = "2" m3 = Metadata() m3["tracknumber"] = "2" self.assertEqual(m1.compare(m2), 0) self.assertEqual(m2.compare(m3), 1) def test_compare_discnumber_difference(self): m1 = Metadata() m1["discnumber"] = "1" m2 = Metadata() m2["discnumber"] = "2" m3 = Metadata() m3["discnumber"] = "2" self.assertEqual(m1.compare(m2), 0) self.assertEqual(m2.compare(m3), 1) def test_compare_deleted(self): m1 = Metadata() m1["artist"] = "TheArtist" m1["title"] = "title1" m2 = Metadata() m2["artist"] = "TheArtist" m2.delete("title") self.assertTrue(m1.compare(m2) < 1) def test_strip_whitespace(self): m1 = Metadata() m1["artist"] = " TheArtist " m1["title"] = "\t\u00A0 tit le1 \r\n" m1["genre"] = " \t" m1.strip_whitespace() self.assertEqual(m1["artist"], "TheArtist") self.assertEqual(m1["title"], "tit le1") def test_metadata_mapping_init(self): d = {'a': 'b', 'c': 2, 'd': ['x', 'y'], 'x': '', 'z': {'u', 'w'}} deleted_tags = set('c') m = Metadata(d, deleted_tags=deleted_tags, length=1234) self.assertIn('a', m) self.assertEqual(m.getraw('a'), ['b']) self.assertEqual(m['d'], MULTI_VALUED_JOINER.join(d['d'])) self.assertNotIn('c', m) self.assertNotIn('length', m) self.assertIn('c', m.deleted_tags) self.assertEqual(m.length, 1234) def test_metadata_mapping_init_zero(self): m = Metadata(tag1='a', tag2=0, tag3='', tag4=None) m['tag5'] = 0 m['tag1'] = '' self.assertIn('tag1', m.deleted_tags) self.assertEqual(m['tag2'], '0') self.assertNotIn('tag3', m) self.assertNotIn('tag4', m) self.assertEqual(m['tag5'], '0') def test_metadata_mapping_del(self): m = self.metadata_d1 self.assertEqual(m.getraw('a'), ['b']) self.assertNotIn('a', m.deleted_tags) self.assertNotIn('x', m.deleted_tags) self.assertRaises(KeyError, m.getraw, 'x') del m['a'] self.assertRaises(KeyError, m.getraw, 'a') self.assertIn('a', m.deleted_tags) # NOTE: historic behavior of Metadata.delete() # an attempt to delete an non-existing tag, will add it to the list # of deleted tags # so this will not raise a KeyError # as is it differs from dict or even defaultdict behavior del m['unknown'] self.assertIn('unknown', m.deleted_tags) def test_metadata_mapping_iter(self): self.assertEqual(set(self.metadata_d1), {'a', 'c', 'd'}) def test_metadata_mapping_keys(self): self.assertEqual(set(self.metadata_d1.keys()), {'a', 'c', 'd'}) def test_metadata_mapping_values(self): self.assertEqual(set(self.metadata_d1.values()), {'b', '2', 'x; y'}) def test_metadata_mapping_len(self): m = self.metadata_d1 self.assertEqual(len(m), 3) del m['x'] self.assertEqual(len(m), 3) del m['c'] self.assertEqual(len(m), 2) def _check_mapping_update(self, m): self.assertEqual(m['a'], 'b') self.assertEqual(m['c'], '3') self.assertEqual(m.getraw('d'), ['u', 'w']) self.assertEqual(m['x'], '') self.assertIn('x', m.deleted_tags) def test_metadata_mapping_update(self): # update from Metadata m = self.metadata_d2 m2 = self.metadata_d3 del m2['x'] m.update(m2) self._check_mapping_update(m) def test_metadata_mapping_update_dict(self): # update from dict m = self.metadata_d2 d2 = {'c': 3, 'd': ['u', 'w'], 'x': ''} m.update(d2) self._check_mapping_update(m) def test_metadata_mapping_update_tuple(self): # update from tuple m = self.metadata_d2 d2 = (('c', 3), ('d', ['u', 'w']), ('x', '')) m.update(d2) self._check_mapping_update(m) def test_metadata_mapping_update_dictlike(self): # update from kwargs m = self.metadata_d2 m.update(c=3, d=['u', 'w'], x='') self._check_mapping_update(m) def test_metadata_mapping_update_noparam(self): # update without parameter m = self.metadata_d2 self.assertRaises(TypeError, m.update) self.assertEqual(m['a'], 'b') def test_metadata_mapping_update_intparam(self): # update without parameter m = self.metadata_d2 self.assertRaises(TypeError, m.update, 123) def test_metadata_mapping_update_strparam(self): # update without parameter m = self.metadata_d2 self.assertRaises(ValueError, m.update, 'abc') def test_metadata_mapping_update_kw(self): m = Metadata(tag1='a', tag2='b') m.update(tag1='c') self.assertEqual(m['tag1'], 'c') self.assertEqual(m['tag2'], 'b') m.update(tag2='') self.assertIn('tag2', m.deleted_tags) def test_metadata_mapping_update_kw_del(self): m = Metadata(tag1='a', tag2='b') del m['tag1'] m2 = Metadata(tag1='c', tag2='d') del m2['tag2'] m.update(m2) self.assertEqual(m['tag1'], 'c') self.assertNotIn('tag2', m) self.assertNotIn('tag1', m.deleted_tags) self.assertIn('tag2', m.deleted_tags) def test_metadata_mapping_images(self): image1 = create_image(b'A', comment='A') image2 = create_image(b'B', comment='B') m1 = Metadata(a='b', length=1234, images=[image1]) self.assertEqual(m1.images[0], image1) self.assertEqual(len(m1), 2) # one tag, one image m1.images.append(image2) self.assertEqual(m1.images[1], image2) m1.images.pop(0) self.assertEqual(m1.images[0], image2) m2 = Metadata(a='c', length=4567, images=[image1]) m1.update(m2) self.assertEqual(m1.images[0], image1) m1.images.pop(0) self.assertEqual(len(m1), 1) # one tag, zero image self.assertFalse(m1.images) def test_metadata_mapping_iterable(self): m = Metadata(tag_tuple=('a', 0)) m['tag_set'] = {'c', 'd'} m['tag_dict'] = {'e': 1, 'f': 2} m['tag_str'] = 'gh' self.assertIn('0', m.getraw('tag_tuple')) self.assertIn('c', m.getraw('tag_set')) self.assertIn('e', m.getraw('tag_dict')) self.assertIn('gh', m.getraw('tag_str')) def test_compare_to_release(self): release = load_test_json('release.json') metadata = Metadata() release_to_metadata(release, metadata) match = metadata.compare_to_release(release, Cluster.comparison_weights) self.assertEqual(1.0, match.similarity) self.assertEqual(release, match.release) def test_compare_to_release_with_score(self): release = load_test_json('release.json') metadata = Metadata() release_to_metadata(release, metadata) for score, sim in ((42, 0.42), ('42', 0.42), ('foo', 1.0), (None, 1.0)): release['score'] = score match = metadata.compare_to_release(release, Cluster.comparison_weights) self.assertEqual(sim, match.similarity) def test_weights_from_release_type_scores(self): release = load_test_json('release.json') parts = [] weights_from_release_type_scores(parts, release, {'Album': 0.75}, 666) self.assertEqual( parts[0], (0.75, 666) ) weights_from_release_type_scores(parts, release, {}, 666) self.assertEqual( parts[1], (0.5, 666) ) del release['release-group'] weights_from_release_type_scores(parts, release, {}, 777) self.assertEqual( parts[2], (0.0, 777) ) def test_preferred_countries(self): release = load_test_json('release.json') parts = [] weights_from_preferred_countries(parts, release, [], 666) self.assertFalse(parts) weights_from_preferred_countries(parts, release, ['FR'], 666) self.assertEqual(parts[0], (0.0, 666)) weights_from_preferred_countries(parts, release, ['GB'], 666) self.assertEqual(parts[1], (1.0, 666)) def test_preferred_formats(self): release = load_test_json('release.json') parts = [] weights_from_preferred_formats(parts, release, [], 777) self.assertFalse(parts) weights_from_preferred_formats(parts, release, ['Digital Media'], 777) self.assertEqual(parts[0], (0.0, 777)) weights_from_preferred_formats(parts, release, ['12" Vinyl'], 777) self.assertEqual(parts[1], (1.0, 777)) def test_compare_to_track(self): track_json = load_test_json('track.json') track = Track(track_json['id']) track_to_metadata(track_json, track) match = track.metadata.compare_to_track(track_json, File.comparison_weights) self.assertEqual(1.0, match.similarity) self.assertEqual(track_json, match.track) def test_compare_to_track_with_score(self): track_json = load_test_json('track.json') track = Track(track_json['id']) track_to_metadata(track_json, track) for score, sim in ((42, 0.42), ('42', 0.42), ('foo', 1.0), (None, 1.0)): track_json['score'] = score match = track.metadata.compare_to_track(track_json, File.comparison_weights) self.assertEqual(sim, match.similarity)
class Album(DataObject, Item): metadata_images_changed = QtCore.pyqtSignal() def __init__(self, album_id, discid=None): DataObject.__init__(self, album_id) self.metadata = Metadata() self.orig_metadata = Metadata() self.tracks = [] self.loaded = False self.load_task = None self.release_group = None self._files_count = 0 self._requests = 0 self._tracks_loaded = False self._discids = set() if discid: self._discids.add(discid) self._after_load_callbacks = [] self.unmatched_files = Cluster(_("Unmatched Files"), special=True, related_album=self, hide_if_empty=True) self.unmatched_files.metadata_images_changed.connect( self.update_metadata_images) self.status = AlbumStatus.NONE self._album_artists = [] self.update_metadata_images_enabled = True def __repr__(self): return '<Album %s %r>' % (self.id, self.metadata["album"]) def iterfiles(self, save=False): for track in self.tracks: yield from track.iterfiles() if not save: yield from self.unmatched_files.iterfiles() def enable_update_metadata_images(self, enabled): self.update_metadata_images_enabled = enabled def append_album_artist(self, album_artist_id): """Append artist id to the list of album artists and return an AlbumArtist instance""" album_artist = AlbumArtist(album_artist_id) self._album_artists.append(album_artist) return album_artist def add_discid(self, discid): if not discid: return self._discids.add(discid) for track in self.tracks: medium_discids = track.metadata.getall('~musicbrainz_discids') track_discids = list(self._discids.intersection(medium_discids)) if track_discids: track.metadata['musicbrainz_discid'] = track_discids track.update() for file in track.files: file.metadata['musicbrainz_discid'] = track_discids file.update() def get_next_track(self, track): try: index = self.tracks.index(track) return self.tracks[index + 1] except (IndexError, ValueError): return None def get_album_artists(self): """Returns the list of album artists (as AlbumArtist objects)""" return self._album_artists def _parse_release(self, release_node): log.debug("Loading release %r ...", self.id) self._tracks_loaded = False release_id = release_node['id'] if release_id != self.id: self.tagger.mbid_redirects[self.id] = release_id album = self.tagger.albums.get(release_id) if album: log.debug("Release %r already loaded", release_id) album.match_files(self.unmatched_files.files) album.update() self.tagger.remove_album(self) return False else: del self.tagger.albums[self.id] self.tagger.albums[release_id] = self self.id = release_id # Make the release artist nodes available, since they may # contain supplementary data (aliases, tags, genres, ratings) # which aren't present in the release group, track, or # recording artist nodes. We can copy them into those places # wherever the IDs match, so that the data is shared and # available for use in mbjson.py and external plugins. self._release_artist_nodes = _create_artist_node_dict(release_node) # Get release metadata m = self._new_metadata m.length = 0 rg_node = release_node['release-group'] rg = self.release_group = self.tagger.get_release_group_by_id( rg_node['id']) rg.loaded_albums.add(self.id) rg.refcount += 1 _copy_artist_nodes(self._release_artist_nodes, rg_node) release_group_to_metadata(rg_node, rg.metadata, rg) m.copy(rg.metadata) release_to_metadata(release_node, m, album=self) config = get_config() # Custom VA name if m['musicbrainz_albumartistid'] == VARIOUS_ARTISTS_ID: m['albumartistsort'] = m['albumartist'] = config.setting['va_name'] # Convert Unicode punctuation if config.setting['convert_punctuation']: m.apply_func(asciipunct) m['totaldiscs'] = len(release_node['media']) # Add album to collections add_release_to_user_collections(release_node) # Run album metadata plugins try: run_album_metadata_processors(self, m, release_node) except BaseException: self.error_append(traceback.format_exc()) self._release_node = release_node return True def _release_request_finished(self, document, http, error): if self.load_task is None: return self.load_task = None parsed = False try: if error: self.error_append(http.errorString()) # Fix for broken NAT releases if error == QtNetwork.QNetworkReply.ContentNotFoundError: config = get_config() nats = False nat_name = config.setting["nat_name"] files = list(self.unmatched_files.files) for file in files: recordingid = file.metadata["musicbrainz_recordingid"] if mbid_validate(recordingid) and file.metadata[ "album"] == nat_name: nats = True self.tagger.move_file_to_nat(file, recordingid) self.tagger.nats.update() if nats and not self.get_num_unmatched_files(): self.tagger.remove_album(self) error = False else: try: parsed = self._parse_release(document) except Exception: error = True self.error_append(traceback.format_exc()) finally: self._requests -= 1 if parsed or error: self._finalize_loading(error) def _finalize_loading(self, error): if error: self.metadata.clear() self.status = AlbumStatus.ERROR del self._new_metadata del self._new_tracks self.update() if not self._requests: self.loaded = True for func, always in self._after_load_callbacks: if always: func() return if self._requests > 0: return if not self._tracks_loaded: artists = set() all_media = [] absolutetracknumber = 0 va = self._new_metadata[ 'musicbrainz_albumartistid'] == VARIOUS_ARTISTS_ID djmix_ars = {} if hasattr(self._new_metadata, "_djmix_ars"): djmix_ars = self._new_metadata._djmix_ars for medium_node in self._release_node['media']: mm = Metadata() mm.copy(self._new_metadata) medium_to_metadata(medium_node, mm) format = medium_node.get('format') if format: all_media.append(format) for dj in djmix_ars.get(mm["discnumber"], []): mm.add("djmixer", dj) if va: mm["compilation"] = "1" else: del mm["compilation"] if 'discs' in medium_node: discids = [disc.get('id') for disc in medium_node['discs']] mm['~musicbrainz_discids'] = discids mm['musicbrainz_discid'] = list( self._discids.intersection(discids)) if "pregap" in medium_node: absolutetracknumber += 1 mm['~discpregap'] = '1' extra_metadata = { '~pregap': '1', '~absolutetracknumber': absolutetracknumber, } self._finalize_loading_track(medium_node['pregap'], mm, artists, extra_metadata) track_count = medium_node['track-count'] if track_count: tracklist_node = medium_node['tracks'] for track_node in tracklist_node: absolutetracknumber += 1 extra_metadata = { '~absolutetracknumber': absolutetracknumber, } self._finalize_loading_track(track_node, mm, artists, extra_metadata) if "data-tracks" in medium_node: for track_node in medium_node['data-tracks']: absolutetracknumber += 1 extra_metadata = { '~datatrack': '1', '~absolutetracknumber': absolutetracknumber, } self._finalize_loading_track(track_node, mm, artists, extra_metadata) totalalbumtracks = absolutetracknumber self._new_metadata['~totalalbumtracks'] = totalalbumtracks # Generate a list of unique media, but keep order of first appearance self._new_metadata['media'] = " / ".join( list(OrderedDict.fromkeys(all_media))) for track in self._new_tracks: track.metadata["~totalalbumtracks"] = totalalbumtracks if len(artists) > 1: track.metadata["~multiartist"] = "1" del self._release_node del self._release_artist_nodes self._tracks_loaded = True if not self._requests: self.enable_update_metadata_images(False) for track in self._new_tracks: track.orig_metadata.copy(track.metadata) track.metadata_images_changed.connect( self.update_metadata_images) # Prepare parser for user's script for s_name, s_text in enabled_tagger_scripts_texts(): parser = ScriptParser() for track in self._new_tracks: # Run tagger script for each track try: parser.eval(s_text, track.metadata) except ScriptError: log.exception( "Failed to run tagger script %s on track", s_name) track.metadata.strip_whitespace() track.scripted_metadata.update(track.metadata) # Run tagger script for the album itself try: parser.eval(s_text, self._new_metadata) except ScriptError: log.exception("Failed to run tagger script %s on album", s_name) self._new_metadata.strip_whitespace() unmatched_files = [ file for track in self.tracks for file in track.files ] self.metadata = self._new_metadata self.orig_metadata.copy(self.metadata) self.orig_metadata.images.clear() self.tracks = self._new_tracks del self._new_metadata del self._new_tracks self.loaded = True self.status = AlbumStatus.LOADED self.match_files(unmatched_files + self.unmatched_files.files) self.enable_update_metadata_images(True) self.update_metadata_images() self.update() self.tagger.window.set_statusbar_message( N_('Album %(id)s loaded: %(artist)s - %(album)s'), { 'id': self.id, 'artist': self.metadata['albumartist'], 'album': self.metadata['album'] }, timeout=3000) for func, always in self._after_load_callbacks: func() self._after_load_callbacks = [] if self.item.isSelected(): self.tagger.window.refresh_metadatabox() def _finalize_loading_track(self, track_node, metadata, artists, extra_metadata=None): # As noted in `_parse_release` above, the release artist nodes # may contain supplementary data that isn't present in track # artist nodes. Similarly, the track artists may contain # information which the recording artists don't. Copy this # information across to wherever the artist IDs match. _copy_artist_nodes(self._release_artist_nodes, track_node) _copy_artist_nodes(self._release_artist_nodes, track_node['recording']) _copy_artist_nodes(_create_artist_node_dict(track_node), track_node['recording']) track = Track(track_node['recording']['id'], self) self._new_tracks.append(track) # Get track metadata tm = track.metadata tm.copy(metadata) track_to_metadata(track_node, track) track._customize_metadata() self._new_metadata.length += tm.length artists.add(tm["artist"]) if extra_metadata: tm.update(extra_metadata) # Run track metadata plugins try: run_track_metadata_processors(self, tm, track_node, self._release_node) except BaseException: self.error_append(traceback.format_exc()) return track def load(self, priority=False, refresh=False): if self._requests: log.info("Not reloading, some requests are still active.") return self.tagger.window.set_statusbar_message( N_('Loading album %(id)s ...'), {'id': self.id}) self.loaded = False self.status = AlbumStatus.LOADING if self.release_group: self.release_group.loaded = False self.release_group.genres.clear() self.metadata.clear() self.genres.clear() self.update(update_selection=False) self._new_metadata = Metadata() self._new_tracks = [] self._requests = 1 self.clear_errors() config = get_config() require_authentication = False inc = { 'aliases', 'annotation', 'artist-credits', 'artists', 'collections', 'discids', 'isrcs', 'labels', 'media', 'recordings', 'release-groups', } if self.tagger.webservice.oauth_manager.is_authorized(): require_authentication = True inc |= {'user-collections'} if config.setting['release_ars'] or config.setting['track_ars']: inc |= { 'artist-rels', 'recording-rels', 'release-rels', 'url-rels', 'work-rels' } if config.setting['track_ars']: inc |= { 'recording-level-rels', 'work-level-rels', } require_authentication = self.set_genre_inc_params( inc, config) or require_authentication if config.setting['enable_ratings']: require_authentication = True inc |= {'user-ratings'} self.load_task = self.tagger.mb_api.get_release_by_id( self.id, self._release_request_finished, inc=tuple(inc), mblogin=require_authentication, priority=priority, refresh=refresh) def run_when_loaded(self, func, always=False): if self.loaded: func() else: self._after_load_callbacks.append((func, always)) def stop_loading(self): if self.load_task: self.tagger.webservice.remove_task(self.load_task) self.load_task = None def update(self, update_tracks=True, update_selection=True): if self.item: self.item.update(update_tracks, update_selection=update_selection) def add_file(self, track, file, new_album=True): self._files_count += 1 if new_album: self.update(update_tracks=False) add_metadata_images(self, [file]) def remove_file(self, track, file, new_album=True): self._files_count -= 1 if new_album: self.update(update_tracks=False) remove_metadata_images(self, [file]) @staticmethod def _match_files(files, tracks, unmatched_files, threshold=0, use_events_iter=False): """Match files to tracks on this album, based on metadata similarity or recordingid.""" if use_events_iter: # TODO: get rid of this completely at some point events_iter = process_events_iter else: def _events_iter(seq): return seq events_iter = _events_iter tracks_cache = defaultdict(lambda: None) def build_tracks_cache(): for track in tracks: tm_recordingid = track.orig_metadata['musicbrainz_recordingid'] tm_tracknumber = track.orig_metadata['tracknumber'] tm_discnumber = track.orig_metadata['discnumber'] for tup in ((tm_recordingid, tm_tracknumber, tm_discnumber), (tm_recordingid, tm_tracknumber), (tm_recordingid, )): tracks_cache[tup] = track SimMatchAlbum = namedtuple('SimMatchAlbum', 'similarity track') no_match = SimMatchAlbum(similarity=-1, track=unmatched_files) for file in list(files): if file.state == File.REMOVED: continue # if we have a recordingid to match against, use that in priority recid = file.match_recordingid or file.metadata[ 'musicbrainz_recordingid'] if recid and mbid_validate(recid): if not tracks_cache: build_tracks_cache() tracknumber = file.metadata['tracknumber'] discnumber = file.metadata['discnumber'] track = (tracks_cache[(recid, tracknumber, discnumber)] or tracks_cache[(recid, tracknumber)] or tracks_cache[(recid, )]) if track: yield (file, track) continue # try to match by similarity def candidates(): for track in events_iter(tracks): similarity = track.metadata.compare(file.orig_metadata) if similarity >= threshold: yield SimMatchAlbum(similarity=similarity, track=track) best_match = find_best_match(candidates, no_match) yield (file, best_match.result.track) def match_files(self, files): """Match and move files to tracks on this album, based on metadata similarity or recordingid.""" if self.loaded: config = get_config() threshold = config.setting['track_matching_threshold'] moves = self._match_files(files, self.tracks, self.unmatched_files, threshold=threshold) for file, target in moves: file.move(target) else: for file in list(files): file.move(self.unmatched_files) def can_save(self): return self._files_count > 0 def can_remove(self): return True def can_edit_tags(self): return True def can_analyze(self): return False def can_autotag(self): return False def can_refresh(self): return True def can_view_info(self): return self.loaded or bool(self.errors) def can_extract(self): return any(track.can_extract() for track in self.tracks) def is_album_like(self): return True def get_num_matched_tracks(self): return sum(1 for track in self.tracks if track.is_linked()) def get_num_unmatched_files(self): return len(self.unmatched_files.files) def get_num_total_files(self): return self._files_count + len(self.unmatched_files.files) def is_complete(self): if not self.tracks: return False for track in self.tracks: if not track.is_complete(): return False return not self.get_num_unmatched_files() def is_modified(self): return any(self._iter_unsaved_files()) def get_num_unsaved_files(self): return sum(1 for file in self._iter_unsaved_files()) def _iter_unsaved_files(self): yield from (file for file in self.iterfiles(save=True) if not file.is_saved()) def column(self, column): if column == 'title': if self.status == AlbumStatus.LOADING: title = _("[loading album information]") elif self.status == AlbumStatus.ERROR: title = _("[could not load album %s]") % self.id else: title = self.metadata['album'] if self.tracks: elems = [ '%d/%d' % (self.get_num_matched_tracks(), len(self.tracks)) ] unmatched = self.get_num_unmatched_files() if unmatched: elems.append('%d?' % (unmatched, )) unsaved = self.get_num_unsaved_files() if unsaved: elems.append('%d*' % (unsaved, )) ca_detailed = self.cover_art_description_detailed() if ca_detailed: elems.append(ca_detailed) return '%s\u200E (%s)' % (title, '; '.join(elems)) else: return title elif column == '~length': length = self.metadata.length if length: return format_time(length) else: return '' elif column == 'artist': return self.metadata['albumartist'] elif column == 'tracknumber': return self.metadata['~totalalbumtracks'] elif column == 'discnumber': return self.metadata['totaldiscs'] elif column == 'covercount': return self.cover_art_description() else: return self.metadata[column] def switch_release_version(self, mbid): if mbid == self.id: return for file in list(self.iterfiles(True)): file.move(self.unmatched_files) album = self.tagger.albums.get(mbid) if album: album.match_files(self.unmatched_files.files) album.update() self.tagger.remove_album(self) else: del self.tagger.albums[self.id] self.release_group.loaded_albums.discard(self.id) self.id = mbid self.tagger.albums[mbid] = self self.load(priority=True, refresh=True) def update_metadata_images(self): if not self.update_metadata_images_enabled: return if update_metadata_images(self): self.update(False) self.metadata_images_changed.emit() def keep_original_images(self): self.enable_update_metadata_images(False) for track in self.tracks: track.keep_original_images() for file in list(self.unmatched_files.files): file.keep_original_images() self.enable_update_metadata_images(True) self.update_metadata_images()
class Album(DataObject, Item): release_group_loaded = QtCore.pyqtSignal() def __init__(self, id, discid=None): DataObject.__init__(self, id) self.metadata = Metadata() self.tracks = [] self.loaded = False self.load_task = None self.release_group = None self._files = 0 self._requests = 0 self._tracks_loaded = False self._discid = discid self._after_load_callbacks = [] self.unmatched_files = Cluster(_("Unmatched Files"), special=True, related_album=self, hide_if_empty=True) self.errors = [] self.status = None self._album_artists = [] def __repr__(self): return '<Album %s %r>' % (self.id, self.metadata[u"album"]) def iterfiles(self, save=False): for track in self.tracks: for file in track.iterfiles(): yield file if not save: for file in self.unmatched_files.iterfiles(): yield file def append_album_artist(self, id): """Append artist id to the list of album artists and return an AlbumArtist instance""" album_artist = AlbumArtist(id) self._album_artists.append(album_artist) return album_artist def get_album_artists(self): """Returns the list of album artists (as AlbumArtist objects)""" return self._album_artists def _parse_release(self, document): log.debug("Loading release %r ...", self.id) self._tracks_loaded = False release_node = document.metadata[0].release[0] if release_node.id != self.id: self.tagger.mbid_redirects[self.id] = release_node.id album = self.tagger.albums.get(release_node.id) if album: log.debug("Release %r already loaded", release_node.id) album.match_files(self.unmatched_files.files) album.update() self.tagger.remove_album(self) return False else: del self.tagger.albums[self.id] self.tagger.albums[release_node.id] = self self.id = release_node.id # Get release metadata m = self._new_metadata m.length = 0 rg_node = release_node.release_group[0] rg = self.release_group = self.tagger.get_release_group_by_id(rg_node.id) rg.loaded_albums.add(self.id) rg.refcount += 1 release_group_to_metadata(rg_node, rg.metadata, rg) m.copy(rg.metadata) release_to_metadata(release_node, m, album=self) if self._discid: m['musicbrainz_discid'] = self._discid # Custom VA name if m['musicbrainz_albumartistid'] == VARIOUS_ARTISTS_ID: m['albumartistsort'] = m['albumartist'] = config.setting['va_name'] # Convert Unicode punctuation if config.setting['convert_punctuation']: m.apply_func(asciipunct) m['totaldiscs'] = release_node.medium_list[0].count # Add album to collections if "collection_list" in release_node.children: for node in release_node.collection_list[0].collection: if node.editor[0].text.lower() == config.persist["oauth_username"].lower(): if node.id not in user_collections: user_collections[node.id] = \ Collection(node.id, node.name[0].text, node.release_list[0].count) user_collections[node.id].releases.add(self.id) # Run album metadata plugins try: run_album_metadata_processors(self, m, release_node) except: self.error_append(traceback.format_exc()) self._release_node = release_node return True def _release_request_finished(self, document, http, error): if self.load_task is None: return self.load_task = None parsed = False try: if error: self.error_append(unicode(http.errorString())) # Fix for broken NAT releases if error == QtNetwork.QNetworkReply.ContentNotFoundError: nats = False nat_name = config.setting["nat_name"] files = list(self.unmatched_files.files) for file in files: recordingid = file.metadata["musicbrainz_recordingid"] if mbid_validate(recordingid) and file.metadata["album"] == nat_name: nats = True self.tagger.move_file_to_nat(file, recordingid) self.tagger.nats.update() if nats and not self.get_num_unmatched_files(): self.tagger.remove_album(self) error = False else: try: parsed = self._parse_release(document) except: error = True self.error_append(traceback.format_exc()) finally: self._requests -= 1 if parsed or error: self._finalize_loading(error) # does http need to be set to None to free the memory used by the network response? # http://pyqt.sourceforge.net/Docs/PyQt4/qnetworkaccessmanager.html says: # After the request has finished, it is the responsibility of the user # to delete the QNetworkReply object at an appropriate time. # Do not directly delete it inside the slot connected to finished(). # You can use the deleteLater() function. def error_append(self, msg): log.error(msg) self.errors.append(msg) def _finalize_loading(self, error): if error: self.metadata.clear() self.status = _("[could not load album %s]") % self.id del self._new_metadata del self._new_tracks self.update() return if self._requests > 0: return if not self._tracks_loaded: artists = set() totalalbumtracks = 0 absolutetracknumber = 0 va = self._new_metadata['musicbrainz_albumartistid'] == VARIOUS_ARTISTS_ID djmix_ars = {} if hasattr(self._new_metadata, "_djmix_ars"): djmix_ars = self._new_metadata._djmix_ars for medium_node in self._release_node.medium_list[0].medium: mm = Metadata() mm.copy(self._new_metadata) medium_to_metadata(medium_node, mm) discpregap = False for dj in djmix_ars.get(mm["discnumber"], []): mm.add("djmixer", dj) if "pregap" in medium_node.children: discpregap = True absolutetracknumber += 1 track = self._finalize_loading_track(medium_node.pregap[0], mm, artists, va, absolutetracknumber, discpregap) track.metadata['~pregap'] = "1" for track_node in medium_node.track_list[0].track: absolutetracknumber += 1 track = self._finalize_loading_track(track_node, mm, artists, va, absolutetracknumber, discpregap) if "data_track_list" in medium_node.children: for track_node in medium_node.data_track_list[0].track: absolutetracknumber += 1 track = self._finalize_loading_track(track_node, mm, artists, va, absolutetracknumber, discpregap) track.metadata['~datatrack'] = "1" totalalbumtracks = str(absolutetracknumber) for track in self._new_tracks: track.metadata["~totalalbumtracks"] = totalalbumtracks if len(artists) > 1: track.metadata["~multiartist"] = "1" del self._release_node self._tracks_loaded = True if not self._requests: # Prepare parser for user's script if config.setting["enable_tagger_script"]: script = config.setting["tagger_script"] if script: parser = ScriptParser() for track in self._new_tracks: # Run tagger script for each track try: parser.eval(script, track.metadata) except: self.error_append(traceback.format_exc()) # Strip leading/trailing whitespace track.metadata.strip_whitespace() # Run tagger script for the album itself try: parser.eval(script, self._new_metadata) except: self.error_append(traceback.format_exc()) self._new_metadata.strip_whitespace() for track in self.tracks: for file in list(track.linked_files): file.move(self.unmatched_files) self.metadata = self._new_metadata self.tracks = self._new_tracks del self._new_metadata del self._new_tracks self.loaded = True self.status = None self.match_files(self.unmatched_files.files) self.update() self.tagger.window.set_statusbar_message( N_('Album %(id)s loaded: %(artist)s - %(album)s'), { 'id': self.id, 'artist': self.metadata['albumartist'], 'album': self.metadata['album'] }, timeout=3000 ) for func in self._after_load_callbacks: func() self._after_load_callbacks = [] def _finalize_loading_track(self, track_node, metadata, artists, va, absolutetracknumber, discpregap): track = Track(track_node.recording[0].id, self) self._new_tracks.append(track) # Get track metadata tm = track.metadata tm.copy(metadata) track_to_metadata(track_node, track) track.metadata["~absolutetracknumber"] = absolutetracknumber track._customize_metadata() self._new_metadata.length += tm.length artists.add(tm["artist"]) if va: tm["compilation"] = "1" if discpregap: tm["~discpregap"] = "1" # Run track metadata plugins try: run_track_metadata_processors(self, tm, self._release_node, track_node) except: self.error_append(traceback.format_exc()) return track def load(self, priority=False, refresh=False): if self._requests: log.info("Not reloading, some requests are still active.") return self.tagger.window.set_statusbar_message( N_('Loading album %(id)s ...'), {'id': self.id} ) self.loaded = False self.status = _("[loading album information]") if self.release_group: self.release_group.loaded = False self.release_group.folksonomy_tags.clear() self.metadata.clear() self.folksonomy_tags.clear() self.update() self._new_metadata = Metadata() self._new_tracks = [] self._requests = 1 self.errors = [] require_authentication = False inc = ['release-groups', 'media', 'recordings', 'artist-credits', 'artists', 'aliases', 'labels', 'isrcs', 'collections'] if config.setting['release_ars'] or config.setting['track_ars']: inc += ['artist-rels', 'release-rels', 'url-rels', 'recording-rels', 'work-rels'] if config.setting['track_ars']: inc += ['recording-level-rels', 'work-level-rels'] if config.setting['folksonomy_tags']: if config.setting['only_my_tags']: require_authentication = True inc += ['user-tags'] else: inc += ['tags'] if config.setting['enable_ratings']: require_authentication = True inc += ['user-ratings'] self.load_task = self.tagger.xmlws.get_release_by_id( self.id, self._release_request_finished, inc=inc, mblogin=require_authentication, priority=priority, refresh=refresh) def run_when_loaded(self, func): if self.loaded: func() else: self._after_load_callbacks.append(func) def stop_loading(self): if self.load_task: self.tagger.xmlws.remove_task(self.load_task) self.load_task = None def update(self, update_tracks=True): if self.item: self.item.update(update_tracks) def _add_file(self, track, file): self._files += 1 self.update(update_tracks=False) def _remove_file(self, track, file): self._files -= 1 self.update(update_tracks=False) def match_files(self, files, use_recordingid=True): """Match files to tracks on this album, based on metadata similarity or recordingid.""" for file in list(files): if file.state == File.REMOVED: continue matches = [] recordingid = file.metadata['musicbrainz_recordingid'] if use_recordingid and mbid_validate(recordingid): matches = self._get_recordingid_matches(file, recordingid) if not matches: for track in self.tracks: sim = track.metadata.compare(file.orig_metadata) if sim >= config.setting['track_matching_threshold']: matches.append((sim, track)) if matches: matches.sort(reverse=True) file.move(matches[0][1]) else: file.move(self.unmatched_files) def match_file(self, file, recordingid=None): """Match the file on a track on this album, based on recordingid or metadata similarity.""" if file.state == File.REMOVED: return if recordingid is not None: matches = self._get_recordingid_matches(file, recordingid) if matches: matches.sort(reverse=True) file.move(matches[0][1]) return self.match_files([file], use_recordingid=False) def _get_recordingid_matches(self, file, recordingid): matches = [] tracknumber = file.metadata['tracknumber'] discnumber = file.metadata['discnumber'] for track in self.tracks: tm = track.metadata if recordingid == tm['musicbrainz_recordingid']: if tracknumber == tm['tracknumber']: if discnumber == tm['discnumber']: matches.append((4.0, track)) break else: matches.append((3.0, track)) else: matches.append((2.0, track)) return matches def can_save(self): return self._files > 0 def can_remove(self): return True def can_edit_tags(self): return True def can_analyze(self): return False def can_autotag(self): return False def can_refresh(self): return True def can_view_info(self): return (self.loaded and self.metadata and self.metadata.images) or self.errors def is_album_like(self): return True def get_num_matched_tracks(self): num = 0 for track in self.tracks: if track.is_linked(): num += 1 return num def get_num_unmatched_files(self): return len(self.unmatched_files.files) def get_num_total_files(self): return self._files + len(self.unmatched_files.files) def is_complete(self): if not self.tracks: return False for track in self.tracks: if not track.is_complete(): return False else: return True def is_modified(self): if self.tracks: for track in self.tracks: for file in track.linked_files: if not file.is_saved(): return True return False def get_num_unsaved_files(self): count = 0 for track in self.tracks: for file in track.linked_files: if not file.is_saved(): count += 1 return count def column(self, column): if column == 'title': if self.status is not None: title = self.status else: title = self.metadata['album'] if self.tracks: linked_tracks = 0 for track in self.tracks: if track.is_linked(): linked_tracks += 1 text = u'%s\u200E (%d/%d' % (title, linked_tracks, len(self.tracks)) unmatched = self.get_num_unmatched_files() if unmatched: text += '; %d?' % (unmatched,) unsaved = self.get_num_unsaved_files() if unsaved: text += '; %d*' % (unsaved,) text += ungettext("; %i image", "; %i images", len(self.metadata.images)) % len(self.metadata.images) return text + ')' else: return title elif column == '~length': length = self.metadata.length if length: return format_time(length) else: return '' elif column == 'artist': return self.metadata['albumartist'] else: return '' def switch_release_version(self, mbid): if mbid == self.id: return for file in list(self.iterfiles(True)): file.move(self.unmatched_files) album = self.tagger.albums.get(mbid) if album: album.match_files(self.unmatched_files.files) album.update() self.tagger.remove_album(self) else: del self.tagger.albums[self.id] self.release_group.loaded_albums.discard(self.id) self.id = mbid self.tagger.albums[mbid] = self self.load(priority=True, refresh=True)
class MetadataTest(unittest.TestCase): original = None tags = [] def setUp(self): config.setting = settings.copy() self.metadata = Metadata() self.metadata["single1"] = "single1-value" self.metadata.add_unique("single2", "single2-value") self.metadata.add_unique("single2", "single2-value") self.multi1 = ["multi1-value", "multi1-value"] self.metadata.add("multi1", self.multi1[0]) self.metadata.add("multi1", self.multi1[1]) self.multi2 = ["multi2-value1", "multi2-value2"] self.metadata["multi2"] = self.multi2 self.multi3 = ["multi3-value1", "multi3-value2"] self.metadata.set("multi3", self.multi3) self.metadata["~hidden"] = "hidden-value" def tearDown(self): pass def test_metadata_set(self): self.assertEqual(["single1-value"], dict.get(self.metadata,"single1")) self.assertEqual(["single2-value"], dict.get(self.metadata,"single2")) self.assertEqual(self.multi1, dict.get(self.metadata,"multi1")) self.assertEqual(self.multi2, dict.get(self.metadata,"multi2")) self.assertEqual(self.multi3, dict.get(self.metadata,"multi3")) self.assertEqual(["hidden-value"], dict.get(self.metadata,"~hidden")) def test_metadata_get(self): self.assertEqual("single1-value", self.metadata["single1"]) self.assertEqual("single1-value", self.metadata.get("single1")) self.assertEqual(["single1-value"], self.metadata.getall("single1")) self.assertEqual(MULTI_VALUED_JOINER.join(self.multi1), self.metadata["multi1"]) self.assertEqual(MULTI_VALUED_JOINER.join(self.multi1), self.metadata.get("multi1")) self.assertEqual(self.multi1, self.metadata.getall("multi1")) self.assertEqual("", self.metadata["nonexistent"]) self.assertEqual(None, self.metadata.get("nonexistent")) self.assertEqual([], self.metadata.getall("nonexistent")) self.assertEqual(dict.items(self.metadata), self.metadata.rawitems()) metadata_items = [(x, z) for (x, y) in dict.items(self.metadata) for z in y] self.assertEqual(metadata_items, list(self.metadata.items())) def test_metadata_delete(self): self.metadata.delete("single1") self.assertNotIn("single1", self.metadata) self.assertIn("single1", self.metadata.deleted_tags) self.metadata["single2"] = "" self.assertNotIn("single2", self.metadata) self.assertIn("single2", self.metadata.deleted_tags) def test_metadata_update(self): m = Metadata() m["old"] = "old-value" self.metadata.delete("single1") m.update(self.metadata) self.assertIn("old", m) self.assertNotIn("single1", m) self.assertIn("single1", m.deleted_tags) self.assertEqual("single2-value", m["single2"]) self.assertEqual(self.metadata.deleted_tags, m.deleted_tags) self.metadata["old"] = "old-value" for (key, value) in dict.items(self.metadata): self.assertIn(key, m) self.assertEqual(value, dict.get(m, key)) for (key, value) in dict.items(m): self.assertIn(key, self.metadata) self.assertEqual(value, dict.get(self.metadata, key)) def test_metadata_clear(self): self.metadata.clear() self.assertEqual(0, len(self.metadata)) def test_metadata_applyfunc(self): func = lambda x: x[1:] self.metadata.apply_func(func) self.assertEqual("ingle1-value", self.metadata["single1"]) self.assertEqual("ingle1-value", self.metadata.get("single1")) self.assertEqual(["ingle1-value"], self.metadata.getall("single1")) self.assertEqual(MULTI_VALUED_JOINER.join(map(func, self.multi1)), self.metadata["multi1"]) self.assertEqual(MULTI_VALUED_JOINER.join(map(func, self.multi1)), self.metadata.get("multi1")) self.assertEqual(list(map(func, self.multi1)), self.metadata.getall("multi1")) self.assertEqual("", self.metadata["nonexistent"]) self.assertEqual(None, self.metadata.get("nonexistent")) self.assertEqual([], self.metadata.getall("nonexistent")) self.assertEqual(dict.items(self.metadata), self.metadata.rawitems()) metadata_items = [(x, z) for (x, y) in dict.items(self.metadata) for z in y] self.assertEqual(metadata_items, list(self.metadata.items()))
class MetadataTest(PicardTestCase): original = None tags = [] def setUp(self): super().setUp() config.setting = settings.copy() self.metadata = Metadata() self.metadata["single1"] = "single1-value" self.metadata.add_unique("single2", "single2-value") self.metadata.add_unique("single2", "single2-value") self.multi1 = ["multi1-value", "multi1-value"] self.metadata.add("multi1", self.multi1[0]) self.metadata.add("multi1", self.multi1[1]) self.multi2 = ["multi2-value1", "multi2-value2"] self.metadata["multi2"] = self.multi2 self.multi3 = ["multi3-value1", "multi3-value2"] self.metadata.set("multi3", self.multi3) self.metadata["~hidden"] = "hidden-value" def tearDown(self): pass def test_metadata_setitem(self): self.assertEqual(["single1-value"], dict.get(self.metadata, "single1")) self.assertEqual(["single2-value"], dict.get(self.metadata, "single2")) self.assertEqual(self.multi1, dict.get(self.metadata, "multi1")) self.assertEqual(self.multi2, dict.get(self.metadata, "multi2")) self.assertEqual(self.multi3, dict.get(self.metadata, "multi3")) self.assertEqual(["hidden-value"], dict.get(self.metadata, "~hidden")) def test_metadata_get(self): self.assertEqual("single1-value", self.metadata["single1"]) self.assertEqual("single1-value", self.metadata.get("single1")) self.assertEqual(["single1-value"], self.metadata.getall("single1")) self.assertEqual(MULTI_VALUED_JOINER.join(self.multi1), self.metadata["multi1"]) self.assertEqual(MULTI_VALUED_JOINER.join(self.multi1), self.metadata.get("multi1")) self.assertEqual(self.multi1, self.metadata.getall("multi1")) self.assertEqual("", self.metadata["nonexistent"]) self.assertEqual(None, self.metadata.get("nonexistent")) self.assertEqual([], self.metadata.getall("nonexistent")) self.assertEqual(dict.items(self.metadata), self.metadata.rawitems()) metadata_items = [(x, z) for (x, y) in dict.items(self.metadata) for z in y] self.assertEqual(metadata_items, list(self.metadata.items())) def test_metadata_delete(self): self.metadata.delete("single1") self.assertNotIn("single1", self.metadata) self.assertIn("single1", self.metadata.deleted_tags) def test_metadata_implicit_delete(self): self.metadata["single2"] = "" self.assertNotIn("single2", self.metadata) self.assertIn("single2", self.metadata.deleted_tags) self.metadata["unknown"] = "" self.assertNotIn("unknown", self.metadata) self.assertNotIn("unknown", self.metadata.deleted_tags) def test_metadata_set_explicit_empty(self): self.metadata.delete("single1") self.metadata.set("single1", []) self.assertIn("single1", self.metadata) self.assertNotIn("single1", self.metadata.deleted_tags) self.assertEqual([], self.metadata.getall("single1")) def test_metadata_undelete(self): self.metadata.delete("single1") self.assertNotIn("single1", self.metadata) self.assertIn("single1", self.metadata.deleted_tags) self.metadata["single1"] = "value1" self.assertIn("single1", self.metadata) self.assertNotIn("single1", self.metadata.deleted_tags) def test_metadata_update(self): m = Metadata() m["old"] = "old-value" self.metadata.delete("single1") m.update(self.metadata) self.assertIn("old", m) self.assertNotIn("single1", m) self.assertIn("single1", m.deleted_tags) self.assertEqual("single2-value", m["single2"]) self.assertEqual(self.metadata.deleted_tags, m.deleted_tags) self.metadata["old"] = "old-value" for (key, value) in dict.items(self.metadata): self.assertIn(key, m) self.assertEqual(value, dict.get(m, key)) for (key, value) in dict.items(m): self.assertIn(key, self.metadata) self.assertEqual(value, dict.get(self.metadata, key)) def test_metadata_clear(self): self.metadata.clear() self.assertEqual(0, len(self.metadata)) def test_metadata_clear_deleted(self): self.metadata.delete("single1") self.assertIn("single1", self.metadata.deleted_tags) self.metadata.clear_deleted() self.assertNotIn("single1", self.metadata.deleted_tags) def test_metadata_applyfunc(self): def func(x): return x[1:] self.metadata.apply_func(func) self.assertEqual("ingle1-value", self.metadata["single1"]) self.assertEqual("ingle1-value", self.metadata.get("single1")) self.assertEqual(["ingle1-value"], self.metadata.getall("single1")) self.assertEqual(MULTI_VALUED_JOINER.join(map(func, self.multi1)), self.metadata["multi1"]) self.assertEqual(MULTI_VALUED_JOINER.join(map(func, self.multi1)), self.metadata.get("multi1")) self.assertEqual(list(map(func, self.multi1)), self.metadata.getall("multi1")) self.assertEqual("", self.metadata["nonexistent"]) self.assertEqual(None, self.metadata.get("nonexistent")) self.assertEqual([], self.metadata.getall("nonexistent")) self.assertEqual(dict.items(self.metadata), self.metadata.rawitems()) metadata_items = [(x, z) for (x, y) in dict.items(self.metadata) for z in y] self.assertEqual(metadata_items, list(self.metadata.items())) def test_length_score(self): results = [(20000, 0, 0.333333333333), (20000, 10000, 0.666666666667), (20000, 20000, 1.0), (20000, 30000, 0.666666666667), (20000, 40000, 0.333333333333), (20000, 50000, 0.0)] for (a, b, expected) in results: actual = Metadata.length_score(a, b) self.assertAlmostEqual(expected, actual, msg="a={a}, b={b}".format(a=a, b=b)) def test_compare_is_equal(self): m1 = Metadata() m1["title"] = "title1" m1["tracknumber"] = "2" m1.length = 360 m2 = Metadata() m2["title"] = "title1" m2["tracknumber"] = "2" m2.length = 360 self.assertEqual(m1.compare(m2), m2.compare(m1)) self.assertEqual(m1.compare(m2), 1) def test_compare_lengths(self): m1 = Metadata() m1.length = 360 m2 = Metadata() m2.length = 300 self.assertAlmostEqual(m1.compare(m2), 0.998) def test_compare_tracknumber_difference(self): m1 = Metadata() m1["tracknumber"] = "1" m2 = Metadata() m2["tracknumber"] = "2" self.assertEqual(m1.compare(m2), 0) def test_compare_deleted(self): m1 = Metadata() m1["artist"] = "TheArtist" m1["title"] = "title1" m2 = Metadata() m2["artist"] = "TheArtist" m2.delete("title") self.assertTrue(m1.compare(m2) < 1)
class MetadataTest(PicardTestCase): original = None tags = [] def setUp(self): super().setUp() config.setting = settings.copy() self.metadata = Metadata() self.metadata["single1"] = "single1-value" self.metadata.add_unique("single2", "single2-value") self.metadata.add_unique("single2", "single2-value") self.multi1 = ["multi1-value", "multi1-value"] self.metadata.add("multi1", self.multi1[0]) self.metadata.add("multi1", self.multi1[1]) self.multi2 = ["multi2-value1", "multi2-value2"] self.metadata["multi2"] = self.multi2 self.multi3 = ["multi3-value1", "multi3-value2"] self.metadata.set("multi3", self.multi3) self.metadata["~hidden"] = "hidden-value" def tearDown(self): pass def test_metadata_setitem(self): self.assertEqual(["single1-value"], dict.get(self.metadata, "single1")) self.assertEqual(["single2-value"], dict.get(self.metadata, "single2")) self.assertEqual(self.multi1, dict.get(self.metadata, "multi1")) self.assertEqual(self.multi2, dict.get(self.metadata, "multi2")) self.assertEqual(self.multi3, dict.get(self.metadata, "multi3")) self.assertEqual(["hidden-value"], dict.get(self.metadata, "~hidden")) def test_metadata_get(self): self.assertEqual("single1-value", self.metadata["single1"]) self.assertEqual("single1-value", self.metadata.get("single1")) self.assertEqual(["single1-value"], self.metadata.getall("single1")) self.assertEqual(MULTI_VALUED_JOINER.join(self.multi1), self.metadata["multi1"]) self.assertEqual(MULTI_VALUED_JOINER.join(self.multi1), self.metadata.get("multi1")) self.assertEqual(self.multi1, self.metadata.getall("multi1")) self.assertEqual("", self.metadata["nonexistent"]) self.assertEqual(None, self.metadata.get("nonexistent")) self.assertEqual([], self.metadata.getall("nonexistent")) self.assertEqual(dict.items(self.metadata), self.metadata.rawitems()) metadata_items = [(x, z) for (x, y) in dict.items(self.metadata) for z in y] self.assertEqual(metadata_items, list(self.metadata.items())) def test_metadata_delete(self): self.metadata.delete("single1") self.assertNotIn("single1", self.metadata) self.assertIn("single1", self.metadata.deleted_tags) def test_metadata_implicit_delete(self): self.metadata["single2"] = "" self.assertNotIn("single2", self.metadata) self.assertIn("single2", self.metadata.deleted_tags) self.metadata["unknown"] = "" self.assertNotIn("unknown", self.metadata) self.assertNotIn("unknown", self.metadata.deleted_tags) def test_metadata_set_explicit_empty(self): self.metadata.delete("single1") self.metadata.set("single1", []) self.assertIn("single1", self.metadata) self.assertNotIn("single1", self.metadata.deleted_tags) self.assertEqual([], self.metadata.getall("single1")) def test_metadata_undelete(self): self.metadata.delete("single1") self.assertNotIn("single1", self.metadata) self.assertIn("single1", self.metadata.deleted_tags) self.metadata["single1"] = "value1" self.assertIn("single1", self.metadata) self.assertNotIn("single1", self.metadata.deleted_tags) def test_metadata_update(self): m = Metadata() m["old"] = "old-value" self.metadata.delete("single1") m.update(self.metadata) self.assertIn("old", m) self.assertNotIn("single1", m) self.assertIn("single1", m.deleted_tags) self.assertEqual("single2-value", m["single2"]) self.assertEqual(self.metadata.deleted_tags, m.deleted_tags) self.metadata["old"] = "old-value" for (key, value) in dict.items(self.metadata): self.assertIn(key, m) self.assertEqual(value, dict.get(m, key)) for (key, value) in dict.items(m): self.assertIn(key, self.metadata) self.assertEqual(value, dict.get(self.metadata, key)) def test_metadata_clear(self): self.metadata.clear() self.assertEqual(0, len(self.metadata)) def test_metadata_clear_deleted(self): self.metadata.delete("single1") self.assertIn("single1", self.metadata.deleted_tags) self.metadata.clear_deleted() self.assertNotIn("single1", self.metadata.deleted_tags) def test_metadata_applyfunc(self): def func(x): return x[1:] self.metadata.apply_func(func) self.assertEqual("ingle1-value", self.metadata["single1"]) self.assertEqual("ingle1-value", self.metadata.get("single1")) self.assertEqual(["ingle1-value"], self.metadata.getall("single1")) self.assertEqual(MULTI_VALUED_JOINER.join(map(func, self.multi1)), self.metadata["multi1"]) self.assertEqual(MULTI_VALUED_JOINER.join(map(func, self.multi1)), self.metadata.get("multi1")) self.assertEqual(list(map(func, self.multi1)), self.metadata.getall("multi1")) self.assertEqual("", self.metadata["nonexistent"]) self.assertEqual(None, self.metadata.get("nonexistent")) self.assertEqual([], self.metadata.getall("nonexistent")) self.assertEqual(dict.items(self.metadata), self.metadata.rawitems()) metadata_items = [(x, z) for (x, y) in dict.items(self.metadata) for z in y] self.assertEqual(metadata_items, list(self.metadata.items())) def test_length_score(self): results = [(20000, 0, 0.333333333333), (20000, 10000, 0.666666666667), (20000, 20000, 1.0), (20000, 30000, 0.666666666667), (20000, 40000, 0.333333333333), (20000, 50000, 0.0)] for (a, b, expected) in results: actual = Metadata.length_score(a, b) self.assertAlmostEqual(expected, actual, msg="a={a}, b={b}".format(a=a, b=b)) def test_compare_is_equal(self): m1 = Metadata() m1["title"] = "title1" m1["tracknumber"] = "2" m1.length = 360 m2 = Metadata() m2["title"] = "title1" m2["tracknumber"] = "2" m2.length = 360 self.assertEqual(m1.compare(m2), m2.compare(m1)) self.assertEqual(m1.compare(m2), 1) def test_compare_lengths(self): m1 = Metadata() m1.length = 360 m2 = Metadata() m2.length = 300 self.assertAlmostEqual(m1.compare(m2), 0.998) def test_compare_tracknumber_difference(self): m1 = Metadata() m1["tracknumber"] = "1" m2 = Metadata() m2["tracknumber"] = "2" self.assertEqual(m1.compare(m2), 0) def test_compare_deleted(self): m1 = Metadata() m1["artist"] = "TheArtist" m1["title"] = "title1" m2 = Metadata() m2["artist"] = "TheArtist" m2.delete("title") self.assertTrue(m1.compare(m2) < 1)
class Album(DataObject, Item): release_group_loaded = QtCore.pyqtSignal() def __init__(self, id, discid=None): DataObject.__init__(self, id) self.metadata = Metadata() self.tracks = [] self.loaded = False self.load_task = None self.release_group = None self._files = 0 self._requests = 0 self._tracks_loaded = False self._discid = discid self._after_load_callbacks = [] self.unmatched_files = Cluster(_("Unmatched Files"), special=True, related_album=self, hide_if_empty=True) self.errors = [] def __repr__(self): return "<Album %s %r>" % (self.id, self.metadata[u"album"]) def iterfiles(self, save=False): for track in self.tracks: for file in track.iterfiles(): yield file if not save: for file in self.unmatched_files.iterfiles(): yield file def _parse_release(self, document): log.debug("Loading release %r", self.id) self._tracks_loaded = False release_node = document.metadata[0].release[0] if release_node.id != self.id: self.tagger.mbid_redirects[self.id] = release_node.id album = self.tagger.albums.get(release_node.id) if album: log.debug("Release %r already loaded", release_node.id) album.match_files(self.unmatched_files.files) album.update() self.tagger.remove_album(self) return False else: del self.tagger.albums[self.id] self.tagger.albums[release_node.id] = self self.id = release_node.id # Get release metadata m = self._new_metadata m.length = 0 rg_node = release_node.release_group[0] rg = self.release_group = self.tagger.get_release_group_by_id(rg_node.id) rg.loaded_albums.add(self.id) rg.refcount += 1 release_group_to_metadata(rg_node, rg.metadata, rg) m.copy(rg.metadata) release_to_metadata(release_node, m, album=self) if self._discid: m["musicbrainz_discid"] = self._discid # Custom VA name if m["musicbrainz_albumartistid"] == VARIOUS_ARTISTS_ID: m["albumartistsort"] = m["albumartist"] = config.setting["va_name"] # Convert Unicode punctuation if config.setting["convert_punctuation"]: m.apply_func(asciipunct) m["totaldiscs"] = release_node.medium_list[0].count # Add album to collections if "collection_list" in release_node.children: for node in release_node.collection_list[0].collection: if node.editor[0].text.lower() == config.setting["username"].lower(): if node.id not in user_collections: user_collections[node.id] = Collection(node.id, node.name[0].text, node.release_list[0].count) user_collections[node.id].releases.add(self.id) # Run album metadata plugins try: run_album_metadata_processors(self, m, release_node) except: self.error_append(traceback.format_exc()) self._release_node = release_node return True def _release_request_finished(self, document, http, error): if self.load_task is None: return self.load_task = None parsed = False try: if error: self.error_append(unicode(http.errorString())) # Fix for broken NAT releases if error == QtNetwork.QNetworkReply.ContentNotFoundError: nats = False nat_name = config.setting["nat_name"] files = list(self.unmatched_files.files) for file in files: trackid = file.metadata["musicbrainz_trackid"] if mbid_validate(trackid) and file.metadata["album"] == nat_name: nats = True self.tagger.move_file_to_nat(file, trackid) self.tagger.nats.update() if nats and not self.get_num_unmatched_files(): self.tagger.remove_album(self) error = False else: try: parsed = self._parse_release(document) except: error = True self.error_append(traceback.format_exc()) finally: self._requests -= 1 if parsed or error: self._finalize_loading(error) def error_append(self, msg): log.error(msg) self.errors.append(msg) def _finalize_loading(self, error): if error: self.metadata.clear() self.metadata["album"] = _("[could not load album %s]") % self.id del self._new_metadata del self._new_tracks self.update() return if self._requests > 0: return if not self._tracks_loaded: artists = set() totalalbumtracks = 0 djmix_ars = {} if hasattr(self._new_metadata, "_djmix_ars"): djmix_ars = self._new_metadata._djmix_ars for medium_node in self._release_node.medium_list[0].medium: mm = Metadata() mm.copy(self._new_metadata) medium_to_metadata(medium_node, mm) totalalbumtracks += int(mm["totaltracks"]) for dj in djmix_ars.get(mm["discnumber"], []): mm.add("djmixer", dj) for track_node in medium_node.track_list[0].track: track = Track(track_node.recording[0].id, self) self._new_tracks.append(track) # Get track metadata tm = track.metadata tm.copy(mm) track_to_metadata(track_node, track) track._customize_metadata() self._new_metadata.length += tm.length artists.add(tm["musicbrainz_artistid"]) # Run track metadata plugins try: run_track_metadata_processors(self, tm, self._release_node, track_node) except: self.error_append(traceback.format_exc()) totalalbumtracks = str(totalalbumtracks) for track in self._new_tracks: track.metadata["~totalalbumtracks"] = totalalbumtracks if len(artists) > 1: track.metadata["compilation"] = "1" del self._release_node self._tracks_loaded = True if not self._requests: # Prepare parser for user's script if config.setting["enable_tagger_script"]: script = config.setting["tagger_script"] if script: parser = ScriptParser() for track in self._new_tracks: # Run tagger script for each track try: parser.eval(script, track.metadata) except: self.error_append(traceback.format_exc()) # Strip leading/trailing whitespace track.metadata.strip_whitespace() # Run tagger script for the album itself try: parser.eval(script, self._new_metadata) except: self.error_append(traceback.format_exc()) self._new_metadata.strip_whitespace() for track in self.tracks: for file in list(track.linked_files): file.move(self.unmatched_files) self.metadata = self._new_metadata self.tracks = self._new_tracks del self._new_metadata del self._new_tracks self.loaded = True self.match_files(self.unmatched_files.files) self.update() self.tagger.window.set_statusbar_message(_("Album %s loaded"), self.id, timeout=3000) for func in self._after_load_callbacks: func() self._after_load_callbacks = [] def load(self): if self._requests: log.info("Not reloading, some requests are still active.") return self.tagger.window.set_statusbar_message("Loading album %s...", self.id) self.loaded = False if self.release_group: self.release_group.loaded = False self.release_group.folksonomy_tags.clear() self.metadata.clear() self.folksonomy_tags.clear() self.metadata["album"] = _("[loading album information]") self.update() self._new_metadata = Metadata() self._new_tracks = [] self._requests = 1 self.errors = [] require_authentication = False inc = [ "release-groups", "media", "recordings", "artist-credits", "artists", "aliases", "labels", "isrcs", "collections", ] if config.setting["release_ars"] or config.setting["track_ars"]: inc += ["artist-rels", "release-rels", "url-rels", "recording-rels", "work-rels"] if config.setting["track_ars"]: inc += ["recording-level-rels", "work-level-rels"] if config.setting["folksonomy_tags"]: if config.setting["only_my_tags"]: require_authentication = True inc += ["user-tags"] else: inc += ["tags"] if config.setting["enable_ratings"]: require_authentication = True inc += ["user-ratings"] self.load_task = self.tagger.xmlws.get_release_by_id( self.id, self._release_request_finished, inc=inc, mblogin=require_authentication ) def run_when_loaded(self, func): if self.loaded: func() else: self._after_load_callbacks.append(func) def stop_loading(self): if self.load_task: self.tagger.xmlws.remove_task(self.load_task) self.load_task = None def update(self, update_tracks=True): if self.item: self.item.update(update_tracks) def _add_file(self, track, file): self._files += 1 self.update(update_tracks=False) def _remove_file(self, track, file): self._files -= 1 self.update(update_tracks=False) def match_files(self, files, use_trackid=True): """Match files to tracks on this album, based on metadata similarity or trackid.""" for file in list(files): if file.state == File.REMOVED: continue matches = [] trackid = file.metadata["musicbrainz_trackid"] if use_trackid and mbid_validate(trackid): matches = self._get_trackid_matches(file, trackid) if not matches: for track in self.tracks: sim = track.metadata.compare(file.orig_metadata) if sim >= config.setting["track_matching_threshold"]: matches.append((sim, track)) if matches: matches.sort(reverse=True) file.move(matches[0][1]) else: file.move(self.unmatched_files) def match_file(self, file, trackid=None): """Match the file on a track on this album, based on trackid or metadata similarity.""" if file.state == File.REMOVED: return if trackid is not None: matches = self._get_trackid_matches(file, trackid) if matches: matches.sort(reverse=True) file.move(matches[0][1]) return self.match_files([file], use_trackid=False) def _get_trackid_matches(self, file, trackid): matches = [] tracknumber = file.metadata["tracknumber"] discnumber = file.metadata["discnumber"] for track in self.tracks: tm = track.metadata if trackid == tm["musicbrainz_trackid"]: if tracknumber == tm["tracknumber"]: if discnumber == tm["discnumber"]: matches.append((4.0, track)) break else: matches.append((3.0, track)) else: matches.append((2.0, track)) return matches def can_save(self): return self._files > 0 def can_remove(self): return True def can_edit_tags(self): return True def can_analyze(self): return False def can_autotag(self): return False def can_refresh(self): return True def can_view_info(self): return (self.loaded and self.metadata and self.metadata.images) or self.errors def is_album_like(self): return True def get_num_matched_tracks(self): num = 0 for track in self.tracks: if track.is_linked(): num += 1 return num def get_num_unmatched_files(self): return len(self.unmatched_files.files) def is_complete(self): if not self.tracks: return False for track in self.tracks: if track.num_linked_files != 1: return False else: return True def is_modified(self): if self.tracks: for track in self.tracks: for file in track.linked_files: if not file.is_saved(): return True return False def get_num_unsaved_files(self): count = 0 for track in self.tracks: for file in track.linked_files: if not file.is_saved(): count += 1 return count def column(self, column): if column == "title": if self.tracks: linked_tracks = 0 for track in self.tracks: if track.is_linked(): linked_tracks += 1 text = u"%s\u200E (%d/%d" % (self.metadata["album"], linked_tracks, len(self.tracks)) unmatched = self.get_num_unmatched_files() if unmatched: text += "; %d?" % (unmatched,) unsaved = self.get_num_unsaved_files() if unsaved: text += "; %d*" % (unsaved,) text += ungettext("; %i image", "; %i images", len(self.metadata.images)) % len(self.metadata.images) return text + ")" else: return self.metadata["album"] elif column == "~length": length = self.metadata.length if length: return format_time(length) else: return "" elif column == "artist": return self.metadata["albumartist"] else: return "" def switch_release_version(self, mbid): if mbid == self.id: return for file in list(self.iterfiles(True)): file.move(self.unmatched_files) album = self.tagger.albums.get(mbid) if album: album.match_files(self.unmatched_files.files) album.update() self.tagger.remove_album(self) else: del self.tagger.albums[self.id] self.release_group.loaded_albums.discard(self.id) self.id = mbid self.tagger.albums[mbid] = self self.load()
class MetadataTest(PicardTestCase): original = None tags = [] def setUp(self): super().setUp() config.setting = settings.copy() self.metadata = Metadata() self.metadata["single1"] = "single1-value" self.metadata.add_unique("single2", "single2-value") self.metadata.add_unique("single2", "single2-value") self.multi1 = ["multi1-value", "multi1-value"] self.metadata.add("multi1", self.multi1[0]) self.metadata.add("multi1", self.multi1[1]) self.multi2 = ["multi2-value1", "multi2-value2"] self.metadata["multi2"] = self.multi2 self.multi3 = ["multi3-value1", "multi3-value2"] self.metadata.set("multi3", self.multi3) self.metadata["~hidden"] = "hidden-value" self.metadata_d1 = Metadata({'a': 'b', 'c': 2, 'd': ['x', 'y'], 'x': ''}) self.metadata_d2 = Metadata({'a': 'b', 'c': 2, 'd': ['x', 'y'], 'x': 'z'}) self.metadata_d3 = Metadata({'c': 3, 'd': ['u', 'w'], 'x': 'p'}) def tearDown(self): pass def test_metadata_setitem(self): self.assertEqual(["single1-value"], self.metadata.getraw("single1")) self.assertEqual(["single2-value"], self.metadata.getraw("single2")) self.assertEqual(self.multi1, self.metadata.getraw("multi1")) self.assertEqual(self.multi2, self.metadata.getraw("multi2")) self.assertEqual(self.multi3, self.metadata.getraw("multi3")) self.assertEqual(["hidden-value"], self.metadata.getraw("~hidden")) def test_metadata_get(self): self.assertEqual("single1-value", self.metadata["single1"]) self.assertEqual("single1-value", self.metadata.get("single1")) self.assertEqual(["single1-value"], self.metadata.getall("single1")) self.assertEqual(["single1-value"], self.metadata.getraw("single1")) self.assertEqual(MULTI_VALUED_JOINER.join(self.multi1), self.metadata["multi1"]) self.assertEqual(MULTI_VALUED_JOINER.join(self.multi1), self.metadata.get("multi1")) self.assertEqual(self.multi1, self.metadata.getall("multi1")) self.assertEqual(self.multi1, self.metadata.getraw("multi1")) self.assertEqual("", self.metadata["nonexistent"]) self.assertEqual(None, self.metadata.get("nonexistent")) self.assertEqual([], self.metadata.getall("nonexistent")) self.assertRaises(KeyError, self.metadata.getraw, "nonexistent") self.assertEqual(self.metadata._store.items(), self.metadata.rawitems()) metadata_items = [(x, z) for (x, y) in self.metadata.rawitems() for z in y] self.assertEqual(metadata_items, list(self.metadata.items())) def test_metadata_delete(self): self.metadata.delete("single1") self.assertNotIn("single1", self.metadata) self.assertIn("single1", self.metadata.deleted_tags) def test_metadata_implicit_delete(self): self.metadata["single2"] = "" self.assertNotIn("single2", self.metadata) self.assertIn("single2", self.metadata.deleted_tags) self.metadata["unknown"] = "" self.assertNotIn("unknown", self.metadata) self.assertNotIn("unknown", self.metadata.deleted_tags) def test_metadata_set_explicit_empty(self): self.metadata.delete("single1") self.metadata.set("single1", []) self.assertIn("single1", self.metadata) self.assertNotIn("single1", self.metadata.deleted_tags) self.assertEqual([], self.metadata.getall("single1")) def test_metadata_undelete(self): self.metadata.delete("single1") self.assertNotIn("single1", self.metadata) self.assertIn("single1", self.metadata.deleted_tags) self.metadata["single1"] = "value1" self.assertIn("single1", self.metadata) self.assertNotIn("single1", self.metadata.deleted_tags) def test_metadata_update(self): m = Metadata() m["old"] = "old-value" self.metadata.delete("single1") m.update(self.metadata) self.assertIn("old", m) self.assertNotIn("single1", m) self.assertIn("single1", m.deleted_tags) self.assertEqual("single2-value", m["single2"]) self.assertEqual(self.metadata.deleted_tags, m.deleted_tags) self.metadata["old"] = "old-value" for (key, value) in self.metadata.rawitems(): self.assertIn(key, m) self.assertEqual(value, m.getraw(key)) for (key, value) in m.rawitems(): self.assertIn(key, self.metadata) self.assertEqual(value, self.metadata.getraw(key)) def test_metadata_clear(self): self.metadata.clear() self.assertEqual(0, len(self.metadata)) def test_metadata_clear_deleted(self): self.metadata.delete("single1") self.assertIn("single1", self.metadata.deleted_tags) self.metadata.clear_deleted() self.assertNotIn("single1", self.metadata.deleted_tags) def test_metadata_applyfunc(self): def func(x): return x[1:] self.metadata.apply_func(func) self.assertEqual("ingle1-value", self.metadata["single1"]) self.assertEqual("ingle1-value", self.metadata.get("single1")) self.assertEqual(["ingle1-value"], self.metadata.getall("single1")) self.assertEqual(MULTI_VALUED_JOINER.join(map(func, self.multi1)), self.metadata["multi1"]) self.assertEqual(MULTI_VALUED_JOINER.join(map(func, self.multi1)), self.metadata.get("multi1")) self.assertEqual(list(map(func, self.multi1)), self.metadata.getall("multi1")) def test_metadata_applyfunc_preserve_tags(self): self.assertTrue(len(PRESERVED_TAGS) > 0) m = Metadata() m[PRESERVED_TAGS[0]] = 'value1' m['not_preserved'] = 'value2' def func(x): return x[1:] m.apply_func(func) self.assertEqual("value1", m[PRESERVED_TAGS[0]]) self.assertEqual("alue2", m['not_preserved']) def test_length_score(self): results = [(20000, 0, 0.333333333333), (20000, 10000, 0.666666666667), (20000, 20000, 1.0), (20000, 30000, 0.666666666667), (20000, 40000, 0.333333333333), (20000, 50000, 0.0)] for (a, b, expected) in results: actual = Metadata.length_score(a, b) self.assertAlmostEqual(expected, actual, msg="a={a}, b={b}".format(a=a, b=b)) def test_compare_is_equal(self): m1 = Metadata() m1["title"] = "title1" m1["tracknumber"] = "2" m1.length = 360 m2 = Metadata() m2["title"] = "title1" m2["tracknumber"] = "2" m2.length = 360 self.assertEqual(m1.compare(m2), m2.compare(m1)) self.assertEqual(m1.compare(m2), 1) def test_compare_lengths(self): m1 = Metadata() m1.length = 360 m2 = Metadata() m2.length = 300 self.assertAlmostEqual(m1.compare(m2), 0.998) def test_compare_tracknumber_difference(self): m1 = Metadata() m1["tracknumber"] = "1" m2 = Metadata() m2["tracknumber"] = "2" self.assertEqual(m1.compare(m2), 0) def test_compare_deleted(self): m1 = Metadata() m1["artist"] = "TheArtist" m1["title"] = "title1" m2 = Metadata() m2["artist"] = "TheArtist" m2.delete("title") self.assertTrue(m1.compare(m2) < 1) def test_strip_whitespace(self): m1 = Metadata() m1["artist"] = " TheArtist " m1["title"] = "\t\u00A0 tit le1 \r\n" m1.strip_whitespace() self.assertEqual(m1["artist"], "TheArtist") self.assertEqual(m1["title"], "tit le1") def test_metadata_mapping_init(self): d = {'a': 'b', 'c': 2, 'd': ['x', 'y'], 'x': '', 'z': {'u', 'w'}} deleted_tags = set('c') m = Metadata(d, deleted_tags=deleted_tags, length=1234) self.assertTrue('a' in m) self.assertEqual(m.getraw('a'), ['b']) self.assertEqual(m['d'], MULTI_VALUED_JOINER.join(d['d'])) self.assertNotIn('c', m) self.assertNotIn('length', m) self.assertIn('c', m.deleted_tags) self.assertEqual(m.length, 1234) def test_metadata_mapping_init_zero(self): m = Metadata(tag1='a', tag2=0, tag3='', tag4=None) m['tag5'] = 0 m['tag1'] = '' self.assertIn('tag1', m.deleted_tags) self.assertEqual(m['tag2'], '0') self.assertNotIn('tag3', m) self.assertNotIn('tag4', m) self.assertEqual(m['tag5'], '0') def test_metadata_mapping_del(self): m = self.metadata_d1 self.assertEqual(m.getraw('a'), ['b']) self.assertNotIn('a', m.deleted_tags) self.assertNotIn('x', m.deleted_tags) self.assertRaises(KeyError, m.getraw, 'x') del m['a'] self.assertRaises(KeyError, m.getraw, 'a') self.assertIn('a', m.deleted_tags) # NOTE: historic behavior of Metadata.delete() # an attempt to delete an non-existing tag, will add it to the list # of deleted tags # so this will not raise a KeyError # as is it differs from dict or even defaultdict behavior del m['unknown'] self.assertIn('unknown', m.deleted_tags) def test_metadata_mapping_iter(self): l = set(self.metadata_d1) self.assertEqual(l, {'a', 'c', 'd'}) def test_metadata_mapping_keys(self): l = set(self.metadata_d1.keys()) self.assertEqual(l, {'a', 'c', 'd'}) def test_metadata_mapping_values(self): l = set(self.metadata_d1.values()) self.assertEqual(l, {'b', '2', 'x; y'}) def test_metadata_mapping_len(self): m = self.metadata_d1 self.assertEqual(len(m), 3) del m['x'] self.assertEqual(len(m), 3) del m['c'] self.assertEqual(len(m), 2) def _check_mapping_update(self, m): self.assertEqual(m['a'], 'b') self.assertEqual(m['c'], '3') self.assertEqual(m.getraw('d'), ['u', 'w']) self.assertEqual(m['x'], '') self.assertIn('x', m.deleted_tags) def test_metadata_mapping_update(self): # update from Metadata m = self.metadata_d2 m2 = self.metadata_d3 del m2['x'] m.update(m2) self._check_mapping_update(m) def test_metadata_mapping_update_dict(self): # update from dict m = self.metadata_d2 d2 = {'c': 3, 'd': ['u', 'w'], 'x': ''} m.update(d2) self._check_mapping_update(m) def test_metadata_mapping_update_tuple(self): # update from tuple m = self.metadata_d2 d2 = (('c', 3), ('d', ['u', 'w']), ('x', '')) m.update(d2) self._check_mapping_update(m) def test_metadata_mapping_update_dictlike(self): # update from kwargs m = self.metadata_d2 m.update(c=3, d=['u', 'w'], x='') self._check_mapping_update(m) def test_metadata_mapping_update_noparam(self): # update without parameter m = self.metadata_d2 self.assertRaises(TypeError, m.update) self.assertEqual(m['a'], 'b') def test_metadata_mapping_update_intparam(self): # update without parameter m = self.metadata_d2 self.assertRaises(TypeError, m.update, 123) def test_metadata_mapping_update_strparam(self): # update without parameter m = self.metadata_d2 self.assertRaises(ValueError, m.update, 'abc') def test_metadata_mapping_update_kw(self): m = Metadata(tag1='a', tag2='b') m.update(tag1='c') self.assertEqual(m['tag1'], 'c') self.assertEqual(m['tag2'], 'b') m.update(tag2='') self.assertIn('tag2', m.deleted_tags) def test_metadata_mapping_update_kw_del(self): m = Metadata(tag1='a', tag2='b') del m['tag1'] m2 = Metadata(tag1='c', tag2='d') del m2['tag2'] m.update(m2) self.assertEqual(m['tag1'], 'c') self.assertNotIn('tag2', m) self.assertNotIn('tag1', m.deleted_tags) self.assertIn('tag2', m.deleted_tags) def test_metadata_mapping_images(self): image1 = create_image(b'A', comment='A') image2 = create_image(b'B', comment='B') m1 = Metadata(a='b', length=1234, images=[image1]) self.assertEqual(m1.images[0], image1) self.assertEqual(len(m1), 2) # one tag, one image m1.images.append(image2) self.assertEqual(m1.images[1], image2) m1.images.pop(0) self.assertEqual(m1.images[0], image2) m2 = Metadata(a='c', length=4567, images=[image1]) m1.update(m2) self.assertEqual(m1.images[0], image1) m1.images.pop(0) self.assertEqual(len(m1), 1) # one tag, zero image self.assertFalse(m1.images) def test_metadata_mapping_iterable(self): m = Metadata(tag_tuple=('a', 0)) m['tag_set'] = {'c', 'd'} m['tag_dict'] = {'e': 1, 'f': 2} m['tag_str'] = 'gh' self.assertIn('0', m.getraw('tag_tuple')) self.assertIn('c', m.getraw('tag_set')) self.assertIn('e', m.getraw('tag_dict')) self.assertIn('gh', m.getraw('tag_str'))