def test_strip_whitespace(self): m1 = Metadata() m1["artist"] = " TheArtist " m1["title"] = "\t\u00A0 tit le1 \r\n" m1.strip_whitespace() self.assertEqual(m1["artist"], "TheArtist") self.assertEqual(m1["title"], "tit le1")
def _test_cover_art(self, filename): self._set_up(filename) try: # Use reasonable large data > 64kb. # This checks a mutagen error with ASF files. dummyload = "a" * 1024 * 128 tests = { 'jpg': { 'mime': 'image/jpeg', 'head': 'JFIF' }, 'png': { 'mime': 'image/png', 'head': 'PNG' }, } for t in tests: f = picard.formats.open(self.filename) metadata = Metadata() imgdata = tests[t]['head'] + dummyload metadata.add_image(tests[t]['mime'], imgdata) f._save(self.filename, metadata) f = picard.formats.open(self.filename) loaded_metadata = f._load(self.filename) image = loaded_metadata.images[0] self.assertEqual(image["mime"], tests[t]['mime']) self.assertEqual(image["data"], imgdata) finally: self._tear_down()
def _test_cover_art(self, filename): self._set_up(filename) try: # Use reasonable large data > 64kb. # This checks a mutagen error with ASF files. tests = { 'jpg': { 'mime': 'image/jpeg', 'data': self.jpegdata + "a" * 1024 * 128 }, 'png': { 'mime': 'image/png', 'data': self.pngdata + "a" * 1024 * 128 }, } for t in tests: f = picard.formats.open(self.filename) metadata = Metadata() imgdata = tests[t]['data'] metadata.append_image( CoverArtImage( data=imgdata ) ) f._save(self.filename, metadata) f = picard.formats.open(self.filename) loaded_metadata = f._load(self.filename) image = loaded_metadata.images[0] self.assertEqual(image.mimetype, tests[t]['mime']) self.assertEqual(image.data, imgdata) finally: self._tear_down()
def save(pf): metadata = Metadata() metadata.copy(pf.metadata) mf = MFile(pf.filename) if mf is not None: mf.delete() return pf._save_and_rename(pf.filename, metadata)
def _load(self, filename): log.debug("Loading file %r", filename) file = ASF(encode_filename(filename)) metadata = Metadata() for name, values in file.tags.items(): if name == 'WM/Picture': for image in values: (mime, data, type, description) = unpack_image(image.value) extras = { 'desc': description, 'type': image_type_from_id3_num(type) } metadata.add_image(mime, data, extras=extras) continue elif name not in self.__RTRANS: continue elif name == 'WM/SharedUserRating': # Rating in WMA ranges from 0 to 99, normalize this to the range 0 to 5 values[0] = int(round(int(unicode(values[0])) / 99.0 * (config.setting['rating_steps'] - 1))) name = self.__RTRANS[name] values = filter(bool, map(unicode, values)) if values: metadata[name] = values self._info(metadata, file) return metadata
def __init__(self, filename): super(File, self).__init__() self.id = self.new_id() self.filename = filename self.base_filename = os.path.basename(filename) self._state = File.UNDEFINED self.state = File.PENDING self.error = None self.orig_metadata = Metadata() self.user_metadata = Metadata() self.server_metadata = Metadata() self.saved_metadata = self.server_metadata self.metadata = self.user_metadata self.orig_metadata["title"] = os.path.basename(self.filename) self.user_metadata.copy(self.orig_metadata) self.server_metadata.copy(self.orig_metadata) self.similarity = 1.0 self.parent = None self.lookup_task = None self.comparison_weights = { "title": 13, "artist": 4, "album": 5, "length": 10, "totaltracks": 4, "releasetype": 20, "releasecountry": 2, "format": 2, }
def save(self): metadata = Metadata() for i in range(self.ui.tags.topLevelItemCount()): item = self.ui.tags.topLevelItem(i) name = unicode(item.data(0, QtCore.Qt.UserRole).toString()) if name in self.changed: value = unicode(item.text(1)) metadata.add(name, value) # Rate the different tracks if self.config.setting['enable_ratings']: rating = self.ui.rating.getRating() metadata['~rating'] = unicode(rating) tracks = set([file.parent for file in self.files if isinstance(file.parent, Track)]) ratings = {} for track in tracks: ratings[('recording', track.id)] = rating track.metadata['~rating'] = rating if self.config.setting['submit_ratings']: self.tagger.xmlws.submit_ratings(ratings, None) for file in self.files: for name in self.changed: try: del file.metadata[name] except KeyError: pass file.metadata.update(metadata) file.update()
def new_metadata(self): if not config.setting["write_id3v23"]: return self.metadata copy = Metadata() copy.copy(self.metadata) join_with = config.setting["id3v23_join_with"] copy.multi_valued_joiner = join_with for name, values in copy.rawitems(): # ID3v23 can only save TDOR dates in YYYY format. Mutagen cannot # handle ID3v23 dates which are YYYY-MM rather than YYYY or # YYYY-MM-DD. if name == "originaldate": values = [v[:4] for v in values] elif name == "date": values = [(v[:4] if len(v) < 10 else v) for v in values] # If this is a multi-valued field, then it needs to be flattened, # unless it's TIPL or TMCL which can still be multi-valued. if (len(values) > 1 and not name in ID3File._rtipl_roles and not name.startswith("performer:")): values = [join_with.join(values)] copy[name] = values return copy
def _load(self, filename): log.debug("Loading file %r", filename) file = ASF(encode_filename(filename)) metadata = Metadata() for name, values in file.tags.items(): if name == 'WM/Picture': for image in values: (mime, data, type, description) = unpack_image(image.value) try: coverartimage = TagCoverArtImage( file=filename, tag=name, types=types_from_id3(type), comment=description, support_types=True, data=data, ) except CoverArtImageError as e: log.error('Cannot load image from %r: %s' % (filename, e)) else: metadata.append_image(coverartimage) continue elif name not in self.__RTRANS: continue elif name == 'WM/SharedUserRating': # Rating in WMA ranges from 0 to 99, normalize this to the range 0 to 5 values[0] = int(round(int(unicode(values[0])) / 99.0 * (config.setting['rating_steps'] - 1))) name = self.__RTRANS[name] values = filter(bool, map(unicode, values)) if values: metadata[name] = values self._info(metadata, file) return metadata
def test_metadata_mapping_update_kw(self): m = Metadata(tag1='a', tag2='b') m.update(tag1='c') self.assertEqual(m['tag1'], 'c') self.assertEqual(m['tag2'], 'b') m.update(tag2='') self.assertIn('tag2', m.deleted_tags)
def save(self, next, settings): self.set_pending() metadata = Metadata() metadata.copy(self.metadata) self.tagger.save_queue.put(( partial(self._save_and_rename, self.filename, metadata, settings), partial(self._saving_finished, next), QtCore.Qt.LowEventPriority + 2))
def test_compare_deleted(self): m1 = Metadata() m1["artist"] = "TheArtist" m1["title"] = "title1" m2 = Metadata() m2["artist"] = "TheArtist" m2.delete("title") self.assertTrue(m1.compare(m2) < 1)
def save(self): self.set_pending() metadata = Metadata() metadata.copy(self.metadata) thread.run_task( partial(self._save_and_rename, self.filename, metadata), self._saving_finished, priority=2, thread_pool=self.tagger.save_thread_pool)
def test_metadata_mapping_iterable(self): m = Metadata(tag_tuple=('a', 0)) m['tag_set'] = {'c', 'd'} m['tag_dict'] = {'e': 1, 'f': 2} m['tag_str'] = 'gh' self.assertIn('0', m.getraw('tag_tuple')) self.assertIn('c', m.getraw('tag_set')) self.assertIn('e', m.getraw('tag_dict')) self.assertIn('gh', m.getraw('tag_str'))
def _load(self, filename): self.log.debug("Loading file %r", filename) f = wave.open(encode_filename(filename), "rb") metadata = Metadata() metadata['~#channels'] = f.getnchannels() metadata['~#bits_per_sample'] = f.getsampwidth() * 8 metadata['~#sample_rate'] = f.getframerate() metadata.length = 1000 * f.getnframes() / f.getframerate() metadata['~format'] = 'Microsoft WAVE' return metadata
def _load(self, filename): log.debug("Loading file %r", filename) file = self._File(encode_filename(filename)) metadata = Metadata() if file.tags: for origname, values in file.tags.items(): if origname.lower().startswith("cover art") and values.kind == mutagen.apev2.BINARY: if '\0' in values.value: descr, data = values.value.split('\0', 1) try: coverartimage = TagCoverArtImage( file=filename, tag=origname, data=data, ) except CoverArtImageError as e: log.error('Cannot load image from %r: %s' % (filename, e)) else: metadata.append_image(coverartimage) # skip EXTERNAL and BINARY values if values.kind != mutagen.apev2.TEXT: continue for value in values: name = origname if name == "Year": name = "date" value = sanitize_date(value) elif name == "Track": name = "tracknumber" track = value.split("/") if len(track) > 1: metadata["totaltracks"] = track[1] value = track[0] elif name == "Disc": name = "discnumber" disc = value.split("/") if len(disc) > 1: metadata["totaldiscs"] = disc[1] value = disc[0] elif name == 'Performer' or name == 'Comment': name = name.lower() + ':' if value.endswith(')'): start = value.rfind(' (') if start > 0: name += value[start + 2:-1] value = value[:start] elif name in self.__translate: name = self.__translate[name] else: name = name.lower() metadata.add(name, value) self._info(metadata, file) return metadata
def test_metadata_applyfunc_preserve_tags(self): self.assertTrue(len(PRESERVED_TAGS) > 0) m = Metadata() m[PRESERVED_TAGS[0]] = 'value1' m['not_preserved'] = 'value2' def func(x): return x[1:] m.apply_func(func) self.assertEqual("value1", m[PRESERVED_TAGS[0]]) self.assertEqual("alue2", m['not_preserved'])
def test_id3_ufid_delete(self): metadata = Metadata() for (key, value) in self.tags.items(): metadata[key] = value metadata['musicbrainz_recordingid'] = "Foo" original_metadata = save_and_load_metadata(self.filename, metadata) metadata.delete('musicbrainz_recordingid') new_metadata = save_and_load_metadata(self.filename, metadata) self.assertIn('musicbrainz_recordingid', original_metadata) self.assertNotIn('musicbrainz_recordingid', new_metadata)
def test_metadata_mapping_init(self): d = {'a': 'b', 'c': 2, 'd': ['x', 'y'], 'x': '', 'z': {'u', 'w'}} deleted_tags = set('c') m = Metadata(d, deleted_tags=deleted_tags, length=1234) self.assertTrue('a' in m) self.assertEqual(m.getraw('a'), ['b']) self.assertEqual(m['d'], MULTI_VALUED_JOINER.join(d['d'])) self.assertNotIn('c', m) self.assertNotIn('length', m) self.assertIn('c', m.deleted_tags) self.assertEqual(m.length, 1234)
def _load(self, filename): log.debug("Loading file %r", filename) f = wave.open(filename, "rb") metadata = Metadata() metadata['~channels'] = f.getnchannels() metadata['~bits_per_sample'] = f.getsampwidth() * 8 metadata['~sample_rate'] = f.getframerate() metadata.length = 1000 * f.getnframes() // f.getframerate() metadata['~format'] = self.NAME self._add_path_to_metadata(metadata) return metadata
def test_id3_freeform_delete(self): metadata = Metadata() for (key, value) in self.tags.items(): metadata[key] = value metadata['Foo'] = 'Foo' original_metadata = save_and_load_metadata(self.filename, metadata) metadata.delete('Foo') new_metadata = save_and_load_metadata(self.filename, metadata) self.assertIn('Foo', original_metadata) self.assertNotIn('Foo', new_metadata)
def test_metadata_mapping_update_kw_del(self): m = Metadata(tag1='a', tag2='b') del m['tag1'] m2 = Metadata(tag1='c', tag2='d') del m2['tag2'] m.update(m2) self.assertEqual(m['tag1'], 'c') self.assertNotIn('tag2', m) self.assertNotIn('tag1', m.deleted_tags) self.assertIn('tag2', m.deleted_tags)
def test_comment_delete(self): metadata = Metadata() for (key, value) in self.tags.items(): metadata[key] = value metadata['comment:bar'] = 'Foo' original_metadata = save_and_load_metadata(self.filename, metadata) metadata.delete('comment:bar') new_metadata = save_and_load_metadata(self.filename, metadata) self.assertIn('comment:foo', original_metadata) self.assertIn('comment:bar', original_metadata) self.assertIn('comment:foo', new_metadata) self.assertNotIn('comment:bar', new_metadata)
def _script_to_filename(self, format, file_metadata, settings=None): if settings is None: settings = config.setting metadata = Metadata() if config.setting["clear_existing_tags"]: metadata.copy(file_metadata) else: metadata.copy(self.orig_metadata) metadata.update(file_metadata) # make sure every metadata can safely be used in a path name for name in metadata.keys(): if isinstance(metadata[name], basestring): metadata[name] = sanitize_filename(metadata[name]) format = format.replace("\t", "").replace("\n", "") filename = ScriptParser().eval(format, metadata, self) if settings["ascii_filenames"]: if isinstance(filename, unicode): filename = unaccent(filename) filename = replace_non_ascii(filename) # replace incompatible characters if settings["windows_compatibility"] or sys.platform == "win32": filename = replace_win32_incompat(filename) # remove null characters filename = filename.replace("\x00", "") return filename
def _load(self, filename): self.log.debug("Loading file %r", filename) file = self._File(encode_filename(filename)) file.tags = file.tags or {} metadata = Metadata() for origname, values in file.tags.items(): for value in values: name = origname if name == "date" or name == "originaldate": # YYYY-00-00 => YYYY value = sanitize_date(value) elif name == 'performer' or name == 'comment': # transform "performer=Joe Barr (Piano)" to "performer:Piano=Joe Barr" name += ':' if value.endswith(')'): start = value.rfind(' (') if start > 0: name += value[start + 2:-1] value = value[:start] elif name.startswith('rating'): try: name, email = name.split(':', 1) except ValueError: email = '' if email != self.config.setting['rating_user_email']: continue name = '~rating' value = unicode(int(round((float(value) * (self.config.setting['rating_steps'] - 1))))) elif name == "fingerprint" and value.startswith("MusicMagic Fingerprint"): name = "musicip_fingerprint" value = value[22:] elif name == "tracktotal": if "totaltracks" in file.tags: continue name = "totaltracks" elif name == "disctotal": if "totaldiscs" in file.tags: continue name = "totaldiscs" elif name == "metadata_block_picture": image = mutagen.flac.Picture(base64.standard_b64decode(value)) metadata.add_image(image.mime, image.data) continue metadata.add(name, value) if self._File == mutagen.flac.FLAC: for image in file.pictures: metadata.add_image(image.mime, image.data) # Read the unofficial COVERART tags, for backward compatibillity only if not "metadata_block_picture" in file.tags: try: for index, data in enumerate(file["COVERART"]): metadata.add_image(file["COVERARTMIME"][index], base64.standard_b64decode(data)) except KeyError: pass self._info(metadata, file) return metadata
def _load_tracks(self, release_node, album): # this happens after the album metadata processor in picard self.tracks = [] for medium_node in release_node.medium_list[0].medium: mm = Metadata() mm.copy(album._new_metadata) medium_to_metadata(medium_node, mm) for track_node in medium_node.track_list[0].track: track = Track(track_node.recording[0].id, album) self.tracks.append(track) # Get track metadata tm = track.metadata tm.copy(mm) self._track_to_metadata(track_node, track) track._customize_metadata()
def test_delete_complex_tags(self): metadata = Metadata() for (key, value) in self.tags.items(): metadata[key] = value original_metadata = save_and_load_metadata(self.filename, metadata) metadata.delete('totaldiscs') new_metadata = save_and_load_metadata(self.filename, metadata) self.assertIn('totaldiscs', original_metadata) if self.testfile_ext == '.m4a': self.assertEqual(u'0', new_metadata['totaldiscs']) else: self.assertNotIn('totaldiscs', new_metadata)
def test_delete_performer(self): if 'performer:guest vocal' in self.tags: metadata = Metadata() for (key, value) in self.tags.items(): metadata[key] = value metadata['performer:piano'] = 'Foo' original_metadata = save_and_load_metadata(self.filename, metadata) metadata.delete('performer:piano') new_metadata = save_and_load_metadata(self.filename, metadata) self.assertIn('performer:guest vocal', original_metadata) self.assertIn('performer:guest vocal', new_metadata) self.assertIn('performer:piano', original_metadata) self.assertNotIn('performer:piano', new_metadata)
def __init__(self, filename): super(File, self).__init__() self.filename = filename self.base_filename = os.path.basename(filename) self._state = File.UNDEFINED self.state = File.PENDING self.error = None self.orig_metadata = Metadata() self.metadata = Metadata() self.similarity = 1.0 self.parent = None self.lookup_task = None self.item = None
def _load(self, lines): metadata = Metadata() metadata.copy(self.metadata) metadata.add("album", self.metadata.get("title")) del metadata["title"] for line in lines: splitline = line.split() # linetokv? key = splitline[0] value = " ".join(splitline[1:]) if key == "INDEX": index, value = value.split() self.indexes[index] = value elif not key == "TRACK": self.kv_to_metadata(key, value, metadata) return metadata
def test_license_single_url(self): metadata = Metadata({'license': 'http://example.com'}) loaded_metadata = save_and_load_metadata(self.filename, metadata) self.assertEqual(metadata['license'], loaded_metadata['license']) raw_metadata = load_raw(self.filename) self.assertEqual(metadata['license'], raw_metadata['WCOP'])
def test_recording(self): m = Metadata() t = Track("1") parsed_recording = parse_recording(self.json_doc) recording_to_metadata(parsed_recording, m, t) self.assertEqual(m, {})
def _finalize_loading(self, error): if error: self.metadata.clear() self.status = _("[could not load album %s]") % self.id del self._new_metadata del self._new_tracks self.update() return if self._requests > 0: return if not self._tracks_loaded: artists = set() all_media = [] absolutetracknumber = 0 va = self._new_metadata[ 'musicbrainz_albumartistid'] == VARIOUS_ARTISTS_ID djmix_ars = {} if hasattr(self._new_metadata, "_djmix_ars"): djmix_ars = self._new_metadata._djmix_ars for medium_node in self._release_node['media']: mm = Metadata() mm.copy(self._new_metadata) medium_to_metadata(medium_node, mm) discpregap = False format = medium_node.get('format') if format: all_media.append(format) for dj in djmix_ars.get(mm["discnumber"], []): mm.add("djmixer", dj) if 'discs' in medium_node: discids = [disc.get('id') for disc in medium_node['discs']] mm['~musicbrainz_discids'] = discids mm['musicbrainz_discid'] = list( self._discids.intersection(discids)) if "pregap" in medium_node: discpregap = True absolutetracknumber += 1 track = self._finalize_loading_track( medium_node['pregap'], mm, artists, va, absolutetracknumber, discpregap) track.metadata['~pregap'] = "1" track_count = medium_node['track-count'] if track_count: tracklist_node = medium_node['tracks'] for track_node in tracklist_node: absolutetracknumber += 1 track = self._finalize_loading_track( track_node, mm, artists, va, absolutetracknumber, discpregap) if "data-tracks" in medium_node: for track_node in medium_node['data-tracks']: absolutetracknumber += 1 track = self._finalize_loading_track( track_node, mm, artists, va, absolutetracknumber, discpregap) track.metadata['~datatrack'] = "1" totalalbumtracks = absolutetracknumber self._new_metadata['~totalalbumtracks'] = totalalbumtracks # Generate a list of unique media, but keep order of first appearance self._new_metadata['media'] = " / ".join( list(OrderedDict.fromkeys(all_media))) for track in self._new_tracks: track.metadata["~totalalbumtracks"] = totalalbumtracks if len(artists) > 1: track.metadata["~multiartist"] = "1" del self._release_node del self._release_artist_nodes self._tracks_loaded = True if not self._requests: self.enable_update_metadata_images(False) # Prepare parser for user's script for s_name, s_text in enabled_tagger_scripts_texts(): parser = ScriptParser() for track in self._new_tracks: # Run tagger script for each track try: parser.eval(s_text, track.metadata) except ScriptError: log.exception( "Failed to run tagger script %s on track", s_name) track.metadata.strip_whitespace() # Run tagger script for the album itself try: parser.eval(s_text, self._new_metadata) except ScriptError: log.exception("Failed to run tagger script %s on album", s_name) self._new_metadata.strip_whitespace() for track in self.tracks: track.metadata_images_changed.connect( self.update_metadata_images) for file in list(track.linked_files): file.move(self.unmatched_files) self.metadata = self._new_metadata self.orig_metadata.copy(self.metadata) self.tracks = self._new_tracks del self._new_metadata del self._new_tracks self.loaded = True self.status = None self.match_files(self.unmatched_files.files) self.enable_update_metadata_images(True) self.update() self.tagger.window.set_statusbar_message( N_('Album %(id)s loaded: %(artist)s - %(album)s'), { 'id': self.id, 'artist': self.metadata['albumartist'], 'album': self.metadata['album'] }, timeout=3000) for func in self._after_load_callbacks: func() self._after_load_callbacks = [] if self.item.isSelected(): self.tagger.window.refresh_metadatabox()
def test_cmd_unset_prefix(self): context = Metadata() context['title'] = u'Foo' context['~rating'] = u'4' self.parser.eval("$unset(_rating)", context) self.assertNotIn('~rating', context)
def test_cmd_copymerge_nosource(self): context = Metadata() target = ["tag1", "tag2"] context["target"] = target self._eval_and_check_copymerge(context, target)
class File(QtCore.QObject, Item): metadata_images_changed = QtCore.pyqtSignal() UNDEFINED = -1 PENDING = 0 NORMAL = 1 CHANGED = 2 ERROR = 3 REMOVED = 4 comparison_weights = { "title": 13, "artist": 4, "album": 5, "length": 10, "totaltracks": 4, "releasetype": 20, "releasecountry": 2, "format": 2, } def __init__(self, filename): super().__init__() self.filename = filename self.base_filename = os.path.basename(filename) self._state = File.UNDEFINED self.state = File.PENDING self.error = None self.orig_metadata = Metadata() self.metadata = Metadata() self.similarity = 1.0 self.parent = None self.lookup_task = None self.item = None def __repr__(self): return '<File %r>' % self.base_filename @property def new_metadata(self): return self.metadata def load(self, callback): thread.run_task(partial(self._load_check, self.filename), partial(self._loading_finished, callback), priority=1) def _load_check(self, filename): # Check that file has not been removed since thread was queued # Don't load if we are stopping. if self.state != File.PENDING: log.debug("File not loaded because it was removed: %r", self.filename) return None if self.tagger.stopping: log.debug("File not loaded because %s is stopping: %r", PICARD_APP_NAME, self.filename) return None return self._load(filename) def _load(self, filename): """Load metadata from the file.""" raise NotImplementedError def _loading_finished(self, callback, result=None, error=None): if self.state != File.PENDING or self.tagger.stopping: return if error is not None: self.error = string_(error) self.state = self.ERROR from picard.formats import supported_extensions file_name, file_extension = os.path.splitext(self.base_filename) if file_extension not in supported_extensions(): self.remove() log.error( 'Unsupported media file %r wrongly loaded. Removing ...', self) return else: self.error = None self.state = self.NORMAL self._copy_loaded_metadata(result) self.update() callback(self) def _copy_loaded_metadata(self, metadata): filename, _ = os.path.splitext(self.base_filename) metadata['~length'] = format_time(metadata.length) if 'title' not in metadata: metadata['title'] = filename if 'tracknumber' not in metadata: tracknumber = tracknum_from_filename(self.base_filename) if tracknumber != -1: tracknumber = string_(tracknumber) metadata['tracknumber'] = tracknumber if metadata['title'] == filename: stripped_filename = filename.lstrip('0') tnlen = len(tracknumber) if stripped_filename[:tnlen] == tracknumber: metadata['title'] = stripped_filename[tnlen:].lstrip() self.orig_metadata = metadata self.metadata.copy(metadata) def copy_metadata(self, metadata): acoustid = self.metadata["acoustid_id"] preserve = config.setting["preserved_tags"].strip() saved_metadata = {} for tag in re.split(r"\s*,\s*", preserve) + PRESERVED_TAGS: values = self.orig_metadata.getall(tag) if values: saved_metadata[tag] = values deleted_tags = self.metadata.deleted_tags self.metadata.copy(metadata) self.metadata.deleted_tags = deleted_tags for tag, values in saved_metadata.items(): self.metadata.set(tag, values) if acoustid: self.metadata["acoustid_id"] = acoustid self.metadata_images_changed.emit() def keep_original_images(self): self.metadata.images = self.orig_metadata.images[:] self.update() self.metadata_images_changed.emit() def has_error(self): return self.state == File.ERROR def save(self): self.set_pending() metadata = Metadata() metadata.copy(self.metadata) thread.run_task(partial(self._save_and_rename, self.filename, metadata), self._saving_finished, priority=2, thread_pool=self.tagger.save_thread_pool) def _save_and_rename(self, old_filename, metadata): """Save the metadata.""" # Check that file has not been removed since thread was queued # Also don't save if we are stopping. if self.state == File.REMOVED: log.debug("File not saved because it was removed: %r", self.filename) return None if self.tagger.stopping: log.debug("File not saved because %s is stopping: %r", PICARD_APP_NAME, self.filename) return None new_filename = old_filename if not config.setting["dont_write_tags"]: encoded_old_filename = encode_filename(old_filename) info = os.stat(encoded_old_filename) self._save(old_filename, metadata) if config.setting["preserve_timestamps"]: try: os.utime(encoded_old_filename, (info.st_atime, info.st_mtime)) except OSError: log.warning("Couldn't preserve timestamp for %r", old_filename) # Rename files if config.setting["rename_files"] or config.setting["move_files"]: new_filename = self._rename(old_filename, metadata) # Move extra files (images, playlists, etc.) if config.setting["move_files"] and config.setting[ "move_additional_files"]: self._move_additional_files(old_filename, new_filename) # Delete empty directories if config.setting["delete_empty_dirs"]: dirname = encode_filename(os.path.dirname(old_filename)) try: self._rmdir(dirname) head, tail = os.path.split(dirname) if not tail: head, tail = os.path.split(head) while head and tail: try: self._rmdir(head) except: break head, tail = os.path.split(head) except EnvironmentError: pass # Save cover art images if config.setting["save_images_to_files"]: self._save_images(os.path.dirname(new_filename), metadata) return new_filename @staticmethod def _rmdir(path): junk_files = (".DS_Store", "desktop.ini", "Desktop.ini", "Thumbs.db") if not set(os.listdir(path)) - set(junk_files): shutil.rmtree(path, False) else: raise OSError def _saving_finished(self, result=None, error=None): # Handle file removed before save # Result is None if save was skipped if ((self.state == File.REMOVED or self.tagger.stopping) and result is None): return old_filename = new_filename = self.filename if error is not None: self.error = string_(error) self.set_state(File.ERROR, update=True) else: self.filename = new_filename = result self.base_filename = os.path.basename(new_filename) length = self.orig_metadata.length temp_info = {} for info in ('~bitrate', '~sample_rate', '~channels', '~bits_per_sample', '~format'): temp_info[info] = self.orig_metadata[info] # Data is copied from New to Original because New may be a subclass to handle id3v23 if config.setting["clear_existing_tags"]: self.orig_metadata.copy(self.new_metadata) else: self.orig_metadata.update(self.new_metadata) self.orig_metadata.length = length self.orig_metadata['~length'] = format_time(length) for k, v in temp_info.items(): self.orig_metadata[k] = v self.error = None # Force update to ensure file status icon changes immediately after save self.clear_pending(force_update=True) self._add_path_to_metadata(self.orig_metadata) self.metadata_images_changed.emit() if self.state != File.REMOVED: del self.tagger.files[old_filename] self.tagger.files[new_filename] = self if self.tagger.stopping: log.debug("Save of %r completed before stopping Picard", self.filename) def _save(self, filename, metadata): """Save the metadata.""" raise NotImplementedError def _script_to_filename(self, naming_format, file_metadata, settings=None): if settings is None: settings = config.setting metadata = Metadata() if config.setting["clear_existing_tags"]: metadata.copy(file_metadata) else: metadata.copy(self.orig_metadata) metadata.update(file_metadata) # make sure every metadata can safely be used in a path name for name in metadata.keys(): if isinstance(metadata[name], str): metadata[name] = sanitize_filename(metadata[name]) naming_format = naming_format.replace("\t", "").replace("\n", "") filename = ScriptParser().eval(naming_format, metadata, self) if settings["ascii_filenames"]: if isinstance(filename, str): filename = unaccent(filename) filename = replace_non_ascii(filename) # replace incompatible characters if settings["windows_compatibility"] or sys.platform == "win32": filename = replace_win32_incompat(filename) # remove null characters if isinstance(filename, (bytes, bytearray)): filename = filename.replace(b"\x00", "") return filename def _fixed_splitext(self, filename): # In case the filename is blank and only has the extension # the real extension is in new_filename and ext is blank new_filename, ext = os.path.splitext(filename) if ext == '' and new_filename.lower() in self.EXTENSIONS: ext = new_filename new_filename = '' return new_filename, ext def _make_filename(self, filename, metadata, settings=None): """Constructs file name based on metadata and file naming formats.""" if settings is None: settings = config.setting if settings["move_files"]: new_dirname = settings["move_files_to"] if not os.path.isabs(new_dirname): new_dirname = os.path.normpath( os.path.join(os.path.dirname(filename), new_dirname)) else: new_dirname = os.path.dirname(filename) new_filename = os.path.basename(filename) if settings["rename_files"]: new_filename, ext = self._fixed_splitext(new_filename) ext = ext.lower() new_filename = new_filename + ext # expand the naming format naming_format = settings['file_naming_format'] if len(naming_format) > 0: new_filename = self._script_to_filename( naming_format, metadata, settings) # NOTE: the _script_to_filename strips the extension away new_filename = new_filename + ext if not settings['move_files']: new_filename = os.path.basename(new_filename) new_filename = make_short_filename( new_dirname, new_filename, config.setting['windows_compatibility'], config.setting['windows_compatibility_drive_root']) # TODO: move following logic under util.filenaming # (and reconsider its necessity) # win32 compatibility fixes if settings['windows_compatibility'] or sys.platform == 'win32': new_filename = new_filename.replace('./', '_/').replace( '.\\', '_\\') # replace . at the beginning of file and directory names new_filename = new_filename.replace('/.', '/_').replace( '\\.', '\\_') if new_filename and new_filename[0] == '.': new_filename = '_' + new_filename[1:] # Fix for precomposed characters on OSX if sys.platform == "darwin": new_filename = unicodedata.normalize("NFD", new_filename) return os.path.realpath(os.path.join(new_dirname, new_filename)) def _rename(self, old_filename, metadata): new_filename, ext = os.path.splitext( self._make_filename(old_filename, metadata)) if old_filename == new_filename + ext: return old_filename new_dirname = os.path.dirname(new_filename) if not os.path.isdir(encode_filename(new_dirname)): os.makedirs(new_dirname) tmp_filename = new_filename i = 1 while (not pathcmp(old_filename, new_filename + ext) and os.path.exists(encode_filename(new_filename + ext))): new_filename = "%s (%d)" % (tmp_filename, i) i += 1 new_filename = new_filename + ext log.debug("Moving file %r => %r", old_filename, new_filename) shutil.move(encode_filename(old_filename), encode_filename(new_filename)) return new_filename def _save_images(self, dirname, metadata): """Save the cover images to disk.""" if not metadata.images: return counters = defaultdict(lambda: 0) images = [] if config.setting["caa_save_single_front_image"]: images = metadata.get_single_front_image() if not images: images = metadata.images for image in images: image.save(dirname, metadata, counters) def _move_additional_files(self, old_filename, new_filename): """Move extra files, like playlists...""" old_path = encode_filename(os.path.dirname(old_filename)) new_path = encode_filename(os.path.dirname(new_filename)) patterns = encode_filename( config.setting["move_additional_files_pattern"]) patterns = [string_(p.strip()) for p in patterns.split() if p.strip()] try: names = list(map(encode_filename, os.listdir(old_path))) except os.error: log.error("Error: {} directory not found".naming_format(old_path)) return filtered_names = [name for name in names if name[0] != "."] for pattern in patterns: pattern_regex = re.compile( encode_filename(fnmatch.translate(pattern)), re.IGNORECASE) file_names = names if pattern[0] != '.': file_names = filtered_names for old_file in file_names: if pattern_regex.match(old_file): new_file = os.path.join(new_path, old_file) old_file = os.path.join(old_path, old_file) # FIXME we shouldn't do this from a thread! if self.tagger.files.get(decode_filename(old_file)): log.debug("File loaded in the tagger, not moving %r", old_file) continue log.debug("Moving %r to %r", old_file, new_file) shutil.move(old_file, new_file) def remove(self, from_parent=True): if from_parent and self.parent: log.debug("Removing %r from %r", self, self.parent) self.parent.remove_file(self) self.tagger.acoustidmanager.remove(self) self.state = File.REMOVED def move(self, parent): if parent != self.parent: log.debug("Moving %r from %r to %r", self, self.parent, parent) self.clear_lookup_task() self.tagger._acoustid.stop_analyze(self) if self.parent: self.clear_pending() self.parent.remove_file(self) self.parent = parent self.parent.add_file(self) self.tagger.acoustidmanager.update( self, self.metadata['musicbrainz_recordingid']) def _move(self, parent): if parent != self.parent: log.debug("Moving %r from %r to %r", self, self.parent, parent) if self.parent: self.parent.remove_file(self) self.parent = parent self.tagger.acoustidmanager.update( self, self.metadata['musicbrainz_recordingid']) def supports_tag(self, name): """Returns whether tag ``name`` can be saved to the file.""" return True def is_saved(self): return self.similarity == 1.0 and self.state == File.NORMAL def update(self, signal=True): new_metadata = self.new_metadata names = set(new_metadata.keys()) names.update(self.orig_metadata.keys()) clear_existing_tags = config.setting["clear_existing_tags"] for name in names: if not name.startswith('~') and self.supports_tag(name): new_values = new_metadata.getall(name) if not (new_values or clear_existing_tags): continue orig_values = self.orig_metadata.getall(name) if orig_values != new_values: self.similarity = self.orig_metadata.compare(new_metadata) if self.state in (File.CHANGED, File.NORMAL): self.state = File.CHANGED break else: if (self.metadata.images and self.orig_metadata.images != self.metadata.images): self.state = File.CHANGED else: self.similarity = 1.0 if self.state in (File.CHANGED, File.NORMAL): self.state = File.NORMAL if signal: log.debug("Updating file %r", self) if self.item: self.item.update() def can_save(self): """Return if this object can be saved.""" return True def can_remove(self): """Return if this object can be removed.""" return True def can_edit_tags(self): """Return if this object supports tag editing.""" return True def can_analyze(self): """Return if this object can be fingerprinted.""" return True def can_autotag(self): return True def can_refresh(self): return False def can_view_info(self): return True def _info(self, metadata, file): if hasattr(file.info, 'length'): metadata.length = int(file.info.length * 1000) if hasattr(file.info, 'bitrate') and file.info.bitrate: metadata['~bitrate'] = file.info.bitrate / 1000.0 if hasattr(file.info, 'sample_rate') and file.info.sample_rate: metadata['~sample_rate'] = file.info.sample_rate if hasattr(file.info, 'channels') and file.info.channels: metadata['~channels'] = file.info.channels if hasattr(file.info, 'bits_per_sample') and file.info.bits_per_sample: metadata['~bits_per_sample'] = file.info.bits_per_sample metadata['~format'] = self.__class__.__name__.replace('File', '') self._add_path_to_metadata(metadata) def _add_path_to_metadata(self, metadata): metadata['~dirname'] = os.path.dirname(self.filename) filename, extension = os.path.splitext(os.path.basename(self.filename)) metadata['~filename'] = filename metadata['~extension'] = extension.lower()[1:] def get_state(self): return self._state # in order to significantly speed up performance, the number of pending # files is cached num_pending_files = 0 def set_state(self, state, update=False): if state != self._state: if state == File.PENDING: File.num_pending_files += 1 elif self._state == File.PENDING: File.num_pending_files -= 1 self._state = state if update: self.update() self.tagger.tagger_stats_changed.emit() state = property(get_state, set_state) def column(self, column): m = self.metadata if column == "title" and not m["title"]: return self.base_filename return m[column] def _lookup_finished(self, lookuptype, document, http, error): self.lookup_task = None if self.state == File.REMOVED: return if error: log.error( "Network error encountered during the lookup for %s. Error code: %s", self.filename, error) try: if lookuptype == "metadata": tracks = document['recordings'] elif lookuptype == "acoustid": tracks = document['recordings'] except (KeyError, TypeError): tracks = None # no matches if not tracks: self.tagger.window.set_statusbar_message( N_("No matching tracks for file '%(filename)s'"), {'filename': self.filename}, timeout=3000) self.clear_pending() return # multiple matches -- calculate similarities to each of them match = sorted( (self.metadata.compare_to_track(track, self.comparison_weights) for track in tracks), reverse=True, key=itemgetter(0))[0] if lookuptype != 'acoustid' and match[0] < config.setting[ 'file_lookup_threshold']: self.tagger.window.set_statusbar_message(N_( "No matching tracks above the threshold for file '%(filename)s'" ), {'filename': self.filename}, timeout=3000) self.clear_pending() return self.tagger.window.set_statusbar_message( N_("File '%(filename)s' identified!"), {'filename': self.filename}, timeout=3000) self.clear_pending() rg, release, track = match[1:] if lookuptype == 'acoustid': self.tagger.acoustidmanager.add(self, track['id']) if release: self.tagger.get_release_group_by_id(rg['id']).loaded_albums.add( release['id']) self.tagger.move_file_to_track(self, release['id'], track['id']) else: self.tagger.move_file_to_nat(self, track['id'], node=track) def lookup_metadata(self): """Try to identify the file using the existing metadata.""" if self.lookup_task: return self.tagger.window.set_statusbar_message( N_("Looking up the metadata for file %(filename)s ..."), {'filename': self.filename}) self.clear_lookup_task() metadata = self.metadata self.set_pending() self.lookup_task = self.tagger.mb_api.find_tracks( partial(self._lookup_finished, 'metadata'), track=metadata['title'], artist=metadata['artist'], release=metadata['album'], tnum=metadata['tracknumber'], tracks=metadata['totaltracks'], qdur=string_(metadata.length // 2000), isrc=metadata['isrc'], limit=QUERY_LIMIT) def clear_lookup_task(self): if self.lookup_task: self.tagger.webservice.remove_task(self.lookup_task) self.lookup_task = None def set_pending(self): if self.state != File.REMOVED: self.state = File.PENDING self.update() def clear_pending(self, force_update=False): if self.state == File.PENDING: self.state = File.NORMAL self.update() elif force_update: self.update() def iterfiles(self, save=False): yield self def _get_tracknumber(self): try: return self.metadata["tracknumber"] except: return 0 tracknumber = property(_get_tracknumber, doc="The track number as an int.") def _get_discnumber(self): try: return self.metadata["discnumber"] except: return 0 discnumber = property(_get_discnumber, doc="The disc number as an int.")
def test_cmd_setmulti_empty_splitter_does_nothing(self): context = Metadata() self.assertEqual("", self.parser.eval("$setmulti(test,multi; valued,)", context)) # no return value self.assertEqual(["multi; valued"], context.getall("test"))
def test_lyrcis_with_description(self): metadata = Metadata({'lyrics:foo': 'bar'}) loaded_metadata = save_and_load_metadata(self.filename, metadata) self.assertEqual(metadata['lyrics:foo'], loaded_metadata['lyrics:foo'])
class MetadataTest(unittest.TestCase): original = None tags = [] def setUp(self): config.setting = settings.copy() self.metadata = Metadata() self.metadata["single1"] = "single1-value" self.metadata.add_unique("single2", "single2-value") self.metadata.add_unique("single2", "single2-value") self.multi1 = ["multi1-value", "multi1-value"] self.metadata.add("multi1", self.multi1[0]) self.metadata.add("multi1", self.multi1[1]) self.multi2 = ["multi2-value1", "multi2-value2"] self.metadata["multi2"] = self.multi2 self.multi3 = ["multi3-value1", "multi3-value2"] self.metadata.set("multi3", self.multi3) self.metadata["~hidden"] = "hidden-value" def tearDown(self): pass def test_metadata_setitem(self): self.assertEqual(["single1-value"], dict.get(self.metadata, "single1")) self.assertEqual(["single2-value"], dict.get(self.metadata, "single2")) self.assertEqual(self.multi1, dict.get(self.metadata, "multi1")) self.assertEqual(self.multi2, dict.get(self.metadata, "multi2")) self.assertEqual(self.multi3, dict.get(self.metadata, "multi3")) self.assertEqual(["hidden-value"], dict.get(self.metadata, "~hidden")) def test_metadata_get(self): self.assertEqual("single1-value", self.metadata["single1"]) self.assertEqual("single1-value", self.metadata.get("single1")) self.assertEqual(["single1-value"], self.metadata.getall("single1")) self.assertEqual(MULTI_VALUED_JOINER.join(self.multi1), self.metadata["multi1"]) self.assertEqual(MULTI_VALUED_JOINER.join(self.multi1), self.metadata.get("multi1")) self.assertEqual(self.multi1, self.metadata.getall("multi1")) self.assertEqual("", self.metadata["nonexistent"]) self.assertEqual(None, self.metadata.get("nonexistent")) self.assertEqual([], self.metadata.getall("nonexistent")) self.assertEqual(dict.items(self.metadata), self.metadata.rawitems()) metadata_items = [(x, z) for (x, y) in dict.items(self.metadata) for z in y] self.assertEqual(metadata_items, list(self.metadata.items())) def test_metadata_delete(self): self.metadata.delete("single1") self.assertNotIn("single1", self.metadata) self.assertIn("single1", self.metadata.deleted_tags) def test_metadata_implicit_delete(self): self.metadata["single2"] = "" self.assertNotIn("single2", self.metadata) self.assertIn("single2", self.metadata.deleted_tags) self.metadata["unknown"] = "" self.assertNotIn("unknown", self.metadata) self.assertNotIn("unknown", self.metadata.deleted_tags) def test_metadata_set_explicit_empty(self): self.metadata.delete("single1") self.metadata.set("single1", []) self.assertIn("single1", self.metadata) self.assertNotIn("single1", self.metadata.deleted_tags) self.assertEqual([], self.metadata.getall("single1")) def test_metadata_undelete(self): self.metadata.delete("single1") self.assertNotIn("single1", self.metadata) self.assertIn("single1", self.metadata.deleted_tags) self.metadata["single1"] = "value1" self.assertIn("single1", self.metadata) self.assertNotIn("single1", self.metadata.deleted_tags) def test_metadata_update(self): m = Metadata() m["old"] = "old-value" self.metadata.delete("single1") m.update(self.metadata) self.assertIn("old", m) self.assertNotIn("single1", m) self.assertIn("single1", m.deleted_tags) self.assertEqual("single2-value", m["single2"]) self.assertEqual(self.metadata.deleted_tags, m.deleted_tags) self.metadata["old"] = "old-value" for (key, value) in dict.items(self.metadata): self.assertIn(key, m) self.assertEqual(value, dict.get(m, key)) for (key, value) in dict.items(m): self.assertIn(key, self.metadata) self.assertEqual(value, dict.get(self.metadata, key)) def test_metadata_clear(self): self.metadata.clear() self.assertEqual(0, len(self.metadata)) def test_metadata_clear_deleted(self): self.metadata.delete("single1") self.assertIn("single1", self.metadata.deleted_tags) self.metadata.clear_deleted() self.assertNotIn("single1", self.metadata.deleted_tags) def test_metadata_applyfunc(self): def func(x): return x[1:] self.metadata.apply_func(func) self.assertEqual("ingle1-value", self.metadata["single1"]) self.assertEqual("ingle1-value", self.metadata.get("single1")) self.assertEqual(["ingle1-value"], self.metadata.getall("single1")) self.assertEqual(MULTI_VALUED_JOINER.join(map(func, self.multi1)), self.metadata["multi1"]) self.assertEqual(MULTI_VALUED_JOINER.join(map(func, self.multi1)), self.metadata.get("multi1")) self.assertEqual(list(map(func, self.multi1)), self.metadata.getall("multi1")) self.assertEqual("", self.metadata["nonexistent"]) self.assertEqual(None, self.metadata.get("nonexistent")) self.assertEqual([], self.metadata.getall("nonexistent")) self.assertEqual(dict.items(self.metadata), self.metadata.rawitems()) metadata_items = [(x, z) for (x, y) in dict.items(self.metadata) for z in y] self.assertEqual(metadata_items, list(self.metadata.items())) def test_length_score(self): results = [(20000, 0, 0.333333333333), (20000, 10000, 0.666666666667), (20000, 20000, 1.0), (20000, 30000, 0.666666666667), (20000, 40000, 0.333333333333), (20000, 50000, 0.0)] for (a, b, expected) in results: actual = Metadata.length_score(a, b) self.assertAlmostEqual(expected, actual, msg="a={a}, b={b}".format(a=a, b=b))
def _load(self, filename): log.debug("Loading file %r", filename) file = self._File(encode_filename(filename)) file.tags = file.tags or {} metadata = Metadata() for origname, values in file.tags.items(): for value in values: name = origname if name == "date" or name == "originaldate": # YYYY-00-00 => YYYY value = sanitize_date(value) elif name == 'performer' or name == 'comment': # transform "performer=Joe Barr (Piano)" to "performer:Piano=Joe Barr" name += ':' if value.endswith(')'): start = len(value) - 2 count = 1 while count > 0 and start > 0: if value[start] == ')': count += 1 elif value[start] == '(': count -= 1 start -= 1 if start > 0: name += value[start + 2:-1] value = value[:start] elif name.startswith('rating'): try: name, email = name.split(':', 1) except ValueError: email = '' if email != config.setting['rating_user_email']: continue name = '~rating' value = unicode( int( round((float(value) * (config.setting['rating_steps'] - 1))))) elif name == "fingerprint" and value.startswith( "MusicMagic Fingerprint"): name = "musicip_fingerprint" value = value[22:] elif name == "tracktotal": if "totaltracks" in file.tags: continue name = "totaltracks" elif name == "disctotal": if "totaldiscs" in file.tags: continue name = "totaldiscs" elif name == "metadata_block_picture": image = mutagen.flac.Picture( base64.standard_b64decode(value)) try: coverartimage = TagCoverArtImage( file=filename, tag=name, types=types_from_id3(image.type), comment=image.desc, support_types=True, data=image.data, ) except CoverArtImageError as e: log.error('Cannot load image from %r: %s' % (filename, e)) else: metadata.append_image(coverartimage) continue elif name in self.__translate: name = self.__translate[name] metadata.add(name, value) if self._File == mutagen.flac.FLAC: for image in file.pictures: try: coverartimage = TagCoverArtImage( file=filename, tag='FLAC/PICTURE', types=types_from_id3(image.type), comment=image.desc, support_types=True, data=image.data, ) except CoverArtImageError as e: log.error('Cannot load image from %r: %s' % (filename, e)) else: metadata.append_image(coverartimage) # Read the unofficial COVERART tags, for backward compatibillity only if not "metadata_block_picture" in file.tags: try: for data in file["COVERART"]: try: coverartimage = TagCoverArtImage( file=filename, tag='COVERART', data=base64.standard_b64decode(data)) except CoverArtImageError as e: log.error('Cannot load image from %r: %s' % (filename, e)) else: metadata.append_image(coverartimage) except KeyError: pass self._info(metadata, file) return metadata
class Album(DataObject, Item): release_group_loaded = QtCore.pyqtSignal() def __init__(self, album_id, discid=None): DataObject.__init__(self, album_id) self.metadata = Metadata() self.orig_metadata = Metadata() self.tracks = [] self.loaded = False self.load_task = None self.release_group = None self._files = 0 self._requests = 0 self._tracks_loaded = False self._discid = discid self._after_load_callbacks = [] self.unmatched_files = Cluster(_("Unmatched Files"), special=True, related_album=self, hide_if_empty=True) self.errors = [] self.status = None self._album_artists = [] self.update_metadata_images_enabled = True def __repr__(self): return '<Album %s %r>' % (self.id, self.metadata["album"]) def iterfiles(self, save=False): for track in self.tracks: for file in track.iterfiles(): yield file if not save: for file in self.unmatched_files.iterfiles(): yield file def enable_update_metadata_images(self, enabled): self.update_metadata_images_enabled = enabled def append_album_artist(self, album_artist_id): """Append artist id to the list of album artists and return an AlbumArtist instance""" album_artist = AlbumArtist(album_artist_id) self._album_artists.append(album_artist) return album_artist def get_album_artists(self): """Returns the list of album artists (as AlbumArtist objects)""" return self._album_artists def _parse_release(self, release_node): log.debug("Loading release %r ...", self.id) self._tracks_loaded = False release_id = release_node['id'] if release_id != self.id: self.tagger.mbid_redirects[self.id] = release_id album = self.tagger.albums.get(release_id) if album: log.debug("Release %r already loaded", release_id) album.match_files(self.unmatched_files.files) album.update() self.tagger.remove_album(self) return False else: del self.tagger.albums[self.id] self.tagger.albums[release_id] = self self.id = release_id # Get release metadata m = self._new_metadata m.length = 0 rg_node = release_node['release-group'] rg = self.release_group = self.tagger.get_release_group_by_id( rg_node['id']) rg.loaded_albums.add(self.id) rg.refcount += 1 release_group_to_metadata(rg_node, rg.metadata, rg) m.copy(rg.metadata) release_to_metadata(release_node, m, album=self) if self._discid: m['musicbrainz_discid'] = self._discid # Custom VA name if m['musicbrainz_albumartistid'] == VARIOUS_ARTISTS_ID: m['albumartistsort'] = m['albumartist'] = config.setting['va_name'] # Convert Unicode punctuation if config.setting['convert_punctuation']: m.apply_func(asciipunct) m['totaldiscs'] = len(release_node['media']) # Add album to collections add_release_to_user_collections(release_node) # Run album metadata plugins try: run_album_metadata_processors(self, m, release_node) except: self.error_append(traceback.format_exc()) self._release_node = release_node return True def _release_request_finished(self, document, http, error): if self.load_task is None: return self.load_task = None parsed = False try: if error: self.error_append(http.errorString()) # Fix for broken NAT releases if error == QtNetwork.QNetworkReply.ContentNotFoundError: nats = False nat_name = config.setting["nat_name"] files = list(self.unmatched_files.files) for file in files: recordingid = file.metadata["musicbrainz_recordingid"] if mbid_validate(recordingid) and file.metadata[ "album"] == nat_name: nats = True self.tagger.move_file_to_nat(file, recordingid) self.tagger.nats.update() if nats and not self.get_num_unmatched_files(): self.tagger.remove_album(self) error = False else: try: parsed = self._parse_release(document) except Exception: error = True self.error_append(traceback.format_exc()) finally: self._requests -= 1 if parsed or error: self._finalize_loading(error) # does http need to be set to None to free the memory used by the network response? # http://qt-project.org/doc/qt-5/qnetworkaccessmanager.html says: # After the request has finished, it is the responsibility of the user # to delete the QNetworkReply object at an appropriate time. # Do not directly delete it inside the slot connected to finished(). # You can use the deleteLater() function. def error_append(self, msg): log.error(msg) self.errors.append(msg) def _finalize_loading(self, error): if error: self.metadata.clear() self.status = _("[could not load album %s]") % self.id del self._new_metadata del self._new_tracks self.update() return if self._requests > 0: return if not self._tracks_loaded: artists = set() totalalbumtracks = 0 absolutetracknumber = 0 va = self._new_metadata[ 'musicbrainz_albumartistid'] == VARIOUS_ARTISTS_ID djmix_ars = {} if hasattr(self._new_metadata, "_djmix_ars"): djmix_ars = self._new_metadata._djmix_ars for medium_node in self._release_node['media']: mm = Metadata() mm.copy(self._new_metadata) medium_to_metadata(medium_node, mm) discpregap = False for dj in djmix_ars.get(mm["discnumber"], []): mm.add("djmixer", dj) if "pregap" in medium_node: discpregap = True absolutetracknumber += 1 track = self._finalize_loading_track( medium_node['pregap'], mm, artists, va, absolutetracknumber, discpregap) track.metadata['~pregap'] = "1" track_count = medium_node['track-count'] if track_count: tracklist_node = medium_node['tracks'] for track_node in tracklist_node: absolutetracknumber += 1 track = self._finalize_loading_track( track_node, mm, artists, va, absolutetracknumber, discpregap) if "data-tracks" in medium_node: for track_node in medium_node['data-tracks']: absolutetracknumber += 1 track = self._finalize_loading_track( track_node, mm, artists, va, absolutetracknumber, discpregap) track.metadata['~datatrack'] = "1" totalalbumtracks = string_(absolutetracknumber) for track in self._new_tracks: track.metadata["~totalalbumtracks"] = totalalbumtracks if len(artists) > 1: track.metadata["~multiartist"] = "1" del self._release_node self._tracks_loaded = True if not self._requests: self.enable_update_metadata_images(False) # Prepare parser for user's script if config.setting["enable_tagger_scripts"]: for s_pos, s_name, s_enabled, s_text in config.setting[ "list_of_scripts"]: if s_enabled and s_text: parser = ScriptParser() for track in self._new_tracks: # Run tagger script for each track try: parser.eval(s_text, track.metadata) except: self.error_append(traceback.format_exc()) # Strip leading/trailing whitespace track.metadata.strip_whitespace() # Run tagger script for the album itself try: parser.eval(s_text, self._new_metadata) except: self.error_append(traceback.format_exc()) self._new_metadata.strip_whitespace() for track in self.tracks: track.metadata_images_changed.connect( self.update_metadata_images) for file in list(track.linked_files): file.move(self.unmatched_files) self.metadata = self._new_metadata self.tracks = self._new_tracks del self._new_metadata del self._new_tracks self.loaded = True self.status = None self.match_files(self.unmatched_files.files) self.enable_update_metadata_images(True) self.update() self.tagger.window.set_statusbar_message( N_('Album %(id)s loaded: %(artist)s - %(album)s'), { 'id': self.id, 'artist': self.metadata['albumartist'], 'album': self.metadata['album'] }, timeout=3000) for func in self._after_load_callbacks: func() self._after_load_callbacks = [] def _finalize_loading_track(self, track_node, metadata, artists, va, absolutetracknumber, discpregap): track = Track(track_node['recording']['id'], self) self._new_tracks.append(track) # Get track metadata tm = track.metadata tm.copy(metadata) track_to_metadata(track_node, track) track.metadata["~absolutetracknumber"] = absolutetracknumber track._customize_metadata() self._new_metadata.length += tm.length artists.add(tm["artist"]) if va: tm["compilation"] = "1" if discpregap: tm["~discpregap"] = "1" # Run track metadata plugins try: run_track_metadata_processors(self, tm, self._release_node, track_node) except: self.error_append(traceback.format_exc()) return track def load(self, priority=False, refresh=False): if self._requests: log.info("Not reloading, some requests are still active.") return self.tagger.window.set_statusbar_message( N_('Loading album %(id)s ...'), {'id': self.id}) self.loaded = False self.status = _("[loading album information]") if self.release_group: self.release_group.loaded = False self.release_group.folksonomy_tags.clear() self.metadata.clear() self.folksonomy_tags.clear() self.update() self._new_metadata = Metadata() self._new_tracks = [] self._requests = 1 self.errors = [] require_authentication = False inc = [ 'release-groups', 'media', 'recordings', 'artist-credits', 'artists', 'aliases', 'labels', 'isrcs', 'collections' ] if config.setting['release_ars'] or config.setting['track_ars']: inc += [ 'artist-rels', 'release-rels', 'url-rels', 'recording-rels', 'work-rels' ] if config.setting['track_ars']: inc += ['recording-level-rels', 'work-level-rels'] if config.setting['folksonomy_tags']: if config.setting['only_my_tags']: require_authentication = True inc += ['user-tags'] else: inc += ['tags'] if config.setting['enable_ratings']: require_authentication = True inc += ['user-ratings'] self.load_task = self.tagger.mb_api.get_release_by_id( self.id, self._release_request_finished, inc=inc, mblogin=require_authentication, priority=priority, refresh=refresh) def run_when_loaded(self, func): if self.loaded: func() else: self._after_load_callbacks.append(func) def stop_loading(self): if self.load_task: self.tagger.webservice.remove_task(self.load_task) self.load_task = None def update(self, update_tracks=True): if self.item: self.item.update(update_tracks) def _add_file(self, track, file): self._files += 1 self.update(update_tracks=False) file.metadata_images_changed.connect(self.update_metadata_images) self.update_metadata_images() def _remove_file(self, track, file): self._files -= 1 self.update(update_tracks=False) file.metadata_images_changed.disconnect(self.update_metadata_images) self.update_metadata_images() def match_files(self, files, use_recordingid=True): """Match files to tracks on this album, based on metadata similarity or recordingid.""" for file in list(files): if file.state == File.REMOVED: continue matches = [] recordingid = file.metadata['musicbrainz_recordingid'] if use_recordingid and mbid_validate(recordingid): matches = self._get_recordingid_matches(file, recordingid) if not matches: for track in self.tracks: sim = track.metadata.compare(file.orig_metadata) if sim >= config.setting['track_matching_threshold']: matches.append((sim, track)) if matches: matches.sort(key=itemgetter(0), reverse=True) file.move(matches[0][1]) else: file.move(self.unmatched_files) def match_file(self, file, recordingid=None): """Match the file on a track on this album, based on recordingid or metadata similarity.""" if file.state == File.REMOVED: return if recordingid is not None: matches = self._get_recordingid_matches(file, recordingid) if matches: matches.sort(key=itemgetter(0), reverse=True) file.move(matches[0][1]) return self.match_files([file], use_recordingid=False) def _get_recordingid_matches(self, file, recordingid): matches = [] tracknumber = file.metadata['tracknumber'] discnumber = file.metadata['discnumber'] for track in self.tracks: tm = track.metadata if recordingid == tm['musicbrainz_recordingid']: if tracknumber == tm['tracknumber']: if discnumber == tm['discnumber']: matches.append((4.0, track)) break else: matches.append((3.0, track)) else: matches.append((2.0, track)) return matches def can_save(self): return self._files > 0 def can_remove(self): return True def can_edit_tags(self): return True def can_analyze(self): return False def can_autotag(self): return False def can_refresh(self): return True def can_view_info(self): return (self.loaded and (self.metadata.images or self.orig_metadata.images)) or self.errors def is_album_like(self): return True def get_num_matched_tracks(self): num = 0 for track in self.tracks: if track.is_linked(): num += 1 return num def get_num_unmatched_files(self): return len(self.unmatched_files.files) def get_num_total_files(self): return self._files + len(self.unmatched_files.files) def is_complete(self): if not self.tracks: return False for track in self.tracks: if not track.is_complete(): return False if self.get_num_unmatched_files(): return False else: return True def is_modified(self): if self.tracks: for track in self.tracks: for file in track.linked_files: if not file.is_saved(): return True return False def get_num_unsaved_files(self): count = 0 for track in self.tracks: for file in track.linked_files: if not file.is_saved(): count += 1 return count def column(self, column): if column == 'title': if self.status is not None: title = self.status else: title = self.metadata['album'] if self.tracks: linked_tracks = 0 for track in self.tracks: if track.is_linked(): linked_tracks += 1 text = '%s\u200E (%d/%d' % (title, linked_tracks, len(self.tracks)) unmatched = self.get_num_unmatched_files() if unmatched: text += '; %d?' % (unmatched, ) unsaved = self.get_num_unsaved_files() if unsaved: text += '; %d*' % (unsaved, ) # CoverArt.set_metadata uses the orig_metadata.images if metadata.images is empty # in order to show existing cover art if there's no cover art for a release. So # we do the same here in order to show the number of images consistently. if self.metadata.images: metadata = self.metadata else: metadata = self.orig_metadata number_of_images = len(metadata.images) if getattr(metadata, 'has_common_images', True): text += ngettext("; %i image", "; %i images", number_of_images) % number_of_images else: text += ngettext("; %i image not in all tracks", "; %i different images among tracks", number_of_images) % number_of_images return text + ')' else: return title elif column == '~length': length = self.metadata.length if length: return format_time(length) else: return '' elif column == 'artist': return self.metadata['albumartist'] else: return '' def switch_release_version(self, mbid): if mbid == self.id: return for file in list(self.iterfiles(True)): file.move(self.unmatched_files) album = self.tagger.albums.get(mbid) if album: album.match_files(self.unmatched_files.files) album.update() self.tagger.remove_album(self) else: del self.tagger.albums[self.id] self.release_group.loaded_albums.discard(self.id) self.id = mbid self.tagger.albums[mbid] = self self.load(priority=True, refresh=True) def update_metadata_images(self): if not self.update_metadata_images_enabled: return update_metadata_images(self) self.update(False) def keep_original_images(self): self.enable_update_metadata_images(False) for track in self.tracks: track.keep_original_images() for file in list(self.unmatched_files.files): file.keep_original_images() self.enable_update_metadata_images(True) self.update_metadata_images()
def _finalize_loading(self, error): if error: self.metadata.clear() self.status = _("[could not load album %s]") % self.id del self._new_metadata del self._new_tracks self.update() return if self._requests > 0: return if not self._tracks_loaded: artists = set() totalalbumtracks = 0 absolutetracknumber = 0 va = self._new_metadata[ 'musicbrainz_albumartistid'] == VARIOUS_ARTISTS_ID djmix_ars = {} if hasattr(self._new_metadata, "_djmix_ars"): djmix_ars = self._new_metadata._djmix_ars for medium_node in self._release_node['media']: mm = Metadata() mm.copy(self._new_metadata) medium_to_metadata(medium_node, mm) discpregap = False for dj in djmix_ars.get(mm["discnumber"], []): mm.add("djmixer", dj) if "pregap" in medium_node: discpregap = True absolutetracknumber += 1 track = self._finalize_loading_track( medium_node['pregap'], mm, artists, va, absolutetracknumber, discpregap) track.metadata['~pregap'] = "1" track_count = medium_node['track-count'] if track_count: tracklist_node = medium_node['tracks'] for track_node in tracklist_node: absolutetracknumber += 1 track = self._finalize_loading_track( track_node, mm, artists, va, absolutetracknumber, discpregap) if "data-tracks" in medium_node: for track_node in medium_node['data-tracks']: absolutetracknumber += 1 track = self._finalize_loading_track( track_node, mm, artists, va, absolutetracknumber, discpregap) track.metadata['~datatrack'] = "1" totalalbumtracks = string_(absolutetracknumber) for track in self._new_tracks: track.metadata["~totalalbumtracks"] = totalalbumtracks if len(artists) > 1: track.metadata["~multiartist"] = "1" del self._release_node self._tracks_loaded = True if not self._requests: self.enable_update_metadata_images(False) # Prepare parser for user's script if config.setting["enable_tagger_scripts"]: for s_pos, s_name, s_enabled, s_text in config.setting[ "list_of_scripts"]: if s_enabled and s_text: parser = ScriptParser() for track in self._new_tracks: # Run tagger script for each track try: parser.eval(s_text, track.metadata) except: self.error_append(traceback.format_exc()) # Strip leading/trailing whitespace track.metadata.strip_whitespace() # Run tagger script for the album itself try: parser.eval(s_text, self._new_metadata) except: self.error_append(traceback.format_exc()) self._new_metadata.strip_whitespace() for track in self.tracks: track.metadata_images_changed.connect( self.update_metadata_images) for file in list(track.linked_files): file.move(self.unmatched_files) self.metadata = self._new_metadata self.tracks = self._new_tracks del self._new_metadata del self._new_tracks self.loaded = True self.status = None self.match_files(self.unmatched_files.files) self.enable_update_metadata_images(True) self.update() self.tagger.window.set_statusbar_message( N_('Album %(id)s loaded: %(artist)s - %(album)s'), { 'id': self.id, 'artist': self.metadata['albumartist'], 'album': self.metadata['album'] }, timeout=3000) for func in self._after_load_callbacks: func() self._after_load_callbacks = []
def test_cmd_copymerge_nonlist(self): context = Metadata() context["target"] = "targetval" context["source"] = "sourceval" self._eval_and_check_copymerge(context, ["targetval", "sourceval"])
class MetadataTest(PicardTestCase): original = None tags = [] def setUp(self): super().setUp() config.setting = settings.copy() self.metadata = Metadata() self.metadata["single1"] = "single1-value" self.metadata.add_unique("single2", "single2-value") self.metadata.add_unique("single2", "single2-value") self.multi1 = ["multi1-value", "multi1-value"] self.metadata.add("multi1", self.multi1[0]) self.metadata.add("multi1", self.multi1[1]) self.multi2 = ["multi2-value1", "multi2-value2"] self.metadata["multi2"] = self.multi2 self.multi3 = ["multi3-value1", "multi3-value2"] self.metadata.set("multi3", self.multi3) self.metadata["~hidden"] = "hidden-value" self.metadata_d1 = Metadata({ 'a': 'b', 'c': 2, 'd': ['x', 'y'], 'x': '' }) self.metadata_d2 = Metadata({ 'a': 'b', 'c': 2, 'd': ['x', 'y'], 'x': 'z' }) self.metadata_d3 = Metadata({'c': 3, 'd': ['u', 'w'], 'x': 'p'}) def tearDown(self): pass def test_metadata_setitem(self): self.assertEqual(["single1-value"], self.metadata.getraw("single1")) self.assertEqual(["single2-value"], self.metadata.getraw("single2")) self.assertEqual(self.multi1, self.metadata.getraw("multi1")) self.assertEqual(self.multi2, self.metadata.getraw("multi2")) self.assertEqual(self.multi3, self.metadata.getraw("multi3")) self.assertEqual(["hidden-value"], self.metadata.getraw("~hidden")) def test_metadata_get(self): self.assertEqual("single1-value", self.metadata["single1"]) self.assertEqual("single1-value", self.metadata.get("single1")) self.assertEqual(["single1-value"], self.metadata.getall("single1")) self.assertEqual(["single1-value"], self.metadata.getraw("single1")) self.assertEqual(MULTI_VALUED_JOINER.join(self.multi1), self.metadata["multi1"]) self.assertEqual(MULTI_VALUED_JOINER.join(self.multi1), self.metadata.get("multi1")) self.assertEqual(self.multi1, self.metadata.getall("multi1")) self.assertEqual(self.multi1, self.metadata.getraw("multi1")) self.assertEqual("", self.metadata["nonexistent"]) self.assertEqual(None, self.metadata.get("nonexistent")) self.assertEqual([], self.metadata.getall("nonexistent")) self.assertRaises(KeyError, self.metadata.getraw, "nonexistent") self.assertEqual(self.metadata._store.items(), self.metadata.rawitems()) metadata_items = [(x, z) for (x, y) in self.metadata.rawitems() for z in y] self.assertEqual(metadata_items, list(self.metadata.items())) def test_metadata_delete(self): self.metadata.delete("single1") self.assertNotIn("single1", self.metadata) self.assertIn("single1", self.metadata.deleted_tags) def test_metadata_implicit_delete(self): self.metadata["single2"] = "" self.assertNotIn("single2", self.metadata) self.assertIn("single2", self.metadata.deleted_tags) self.metadata["unknown"] = "" self.assertNotIn("unknown", self.metadata) self.assertNotIn("unknown", self.metadata.deleted_tags) def test_metadata_set_explicit_empty(self): self.metadata.delete("single1") self.metadata.set("single1", []) self.assertIn("single1", self.metadata) self.assertNotIn("single1", self.metadata.deleted_tags) self.assertEqual([], self.metadata.getall("single1")) def test_metadata_undelete(self): self.metadata.delete("single1") self.assertNotIn("single1", self.metadata) self.assertIn("single1", self.metadata.deleted_tags) self.metadata["single1"] = "value1" self.assertIn("single1", self.metadata) self.assertNotIn("single1", self.metadata.deleted_tags) def test_metadata_copy(self): m = Metadata() m["old"] = "old-value" self.metadata.delete("single1") m.copy(self.metadata) self.assertEqual(self.metadata._store, m._store) self.assertEqual(self.metadata.deleted_tags, m.deleted_tags) self.assertEqual(self.metadata.length, m.length) self.assertEqual(self.metadata.images, m.images) def test_metadata_copy_without_images(self): m = Metadata() m.copy(self.metadata, copy_images=False) self.assertEqual(self.metadata._store, m._store) self.assertEqual(self.metadata.deleted_tags, m.deleted_tags) self.assertEqual(self.metadata.length, m.length) self.assertEqual(ImageList(), m.images) def test_metadata_update(self): m = Metadata() m["old"] = "old-value" self.metadata.delete("single1") m.update(self.metadata) self.assertIn("old", m) self.assertNotIn("single1", m) self.assertIn("single1", m.deleted_tags) self.assertEqual("single2-value", m["single2"]) self.assertEqual(self.metadata.deleted_tags, m.deleted_tags) self.assertEqual(self.metadata.images, m.images) self.metadata["old"] = "old-value" self.assertEqual(self.metadata._store, m._store) def test_metadata_clear(self): self.metadata.clear() self.assertEqual(0, len(self.metadata)) def test_metadata_clear_deleted(self): self.metadata.delete("single1") self.assertIn("single1", self.metadata.deleted_tags) self.metadata.clear_deleted() self.assertNotIn("single1", self.metadata.deleted_tags) def test_metadata_applyfunc(self): def func(x): return x[1:] self.metadata.apply_func(func) self.assertEqual("ingle1-value", self.metadata["single1"]) self.assertEqual("ingle1-value", self.metadata.get("single1")) self.assertEqual(["ingle1-value"], self.metadata.getall("single1")) self.assertEqual(MULTI_VALUED_JOINER.join(map(func, self.multi1)), self.metadata["multi1"]) self.assertEqual(MULTI_VALUED_JOINER.join(map(func, self.multi1)), self.metadata.get("multi1")) self.assertEqual(list(map(func, self.multi1)), self.metadata.getall("multi1")) def test_metadata_applyfunc_preserve_tags(self): self.assertTrue(len(PRESERVED_TAGS) > 0) m = Metadata() m[PRESERVED_TAGS[0]] = 'value1' m['not_preserved'] = 'value2' def func(x): return x[1:] m.apply_func(func) self.assertEqual("value1", m[PRESERVED_TAGS[0]]) self.assertEqual("alue2", m['not_preserved']) def test_length_score(self): results = [(20000, 0, 0.333333333333), (20000, 10000, 0.666666666667), (20000, 20000, 1.0), (20000, 30000, 0.666666666667), (20000, 40000, 0.333333333333), (20000, 50000, 0.0)] for (a, b, expected) in results: actual = Metadata.length_score(a, b) self.assertAlmostEqual(expected, actual, msg="a={a}, b={b}".format(a=a, b=b)) def test_compare_is_equal(self): m1 = Metadata() m1["title"] = "title1" m1["tracknumber"] = "2" m1.length = 360 m2 = Metadata() m2["title"] = "title1" m2["tracknumber"] = "2" m2.length = 360 self.assertEqual(m1.compare(m2), m2.compare(m1)) self.assertEqual(m1.compare(m2), 1) def test_compare_lengths(self): m1 = Metadata() m1.length = 360 m2 = Metadata() m2.length = 300 self.assertAlmostEqual(m1.compare(m2), 0.998) def test_compare_tracknumber_difference(self): m1 = Metadata() m1["tracknumber"] = "1" m2 = Metadata() m2["tracknumber"] = "2" self.assertEqual(m1.compare(m2), 0) def test_compare_deleted(self): m1 = Metadata() m1["artist"] = "TheArtist" m1["title"] = "title1" m2 = Metadata() m2["artist"] = "TheArtist" m2.delete("title") self.assertTrue(m1.compare(m2) < 1) def test_strip_whitespace(self): m1 = Metadata() m1["artist"] = " TheArtist " m1["title"] = "\t\u00A0 tit le1 \r\n" m1.strip_whitespace() self.assertEqual(m1["artist"], "TheArtist") self.assertEqual(m1["title"], "tit le1") def test_metadata_mapping_init(self): d = {'a': 'b', 'c': 2, 'd': ['x', 'y'], 'x': '', 'z': {'u', 'w'}} deleted_tags = set('c') m = Metadata(d, deleted_tags=deleted_tags, length=1234) self.assertTrue('a' in m) self.assertEqual(m.getraw('a'), ['b']) self.assertEqual(m['d'], MULTI_VALUED_JOINER.join(d['d'])) self.assertNotIn('c', m) self.assertNotIn('length', m) self.assertIn('c', m.deleted_tags) self.assertEqual(m.length, 1234) def test_metadata_mapping_init_zero(self): m = Metadata(tag1='a', tag2=0, tag3='', tag4=None) m['tag5'] = 0 m['tag1'] = '' self.assertIn('tag1', m.deleted_tags) self.assertEqual(m['tag2'], '0') self.assertNotIn('tag3', m) self.assertNotIn('tag4', m) self.assertEqual(m['tag5'], '0') def test_metadata_mapping_del(self): m = self.metadata_d1 self.assertEqual(m.getraw('a'), ['b']) self.assertNotIn('a', m.deleted_tags) self.assertNotIn('x', m.deleted_tags) self.assertRaises(KeyError, m.getraw, 'x') del m['a'] self.assertRaises(KeyError, m.getraw, 'a') self.assertIn('a', m.deleted_tags) # NOTE: historic behavior of Metadata.delete() # an attempt to delete an non-existing tag, will add it to the list # of deleted tags # so this will not raise a KeyError # as is it differs from dict or even defaultdict behavior del m['unknown'] self.assertIn('unknown', m.deleted_tags) def test_metadata_mapping_iter(self): l = set(self.metadata_d1) self.assertEqual(l, {'a', 'c', 'd'}) def test_metadata_mapping_keys(self): l = set(self.metadata_d1.keys()) self.assertEqual(l, {'a', 'c', 'd'}) def test_metadata_mapping_values(self): l = set(self.metadata_d1.values()) self.assertEqual(l, {'b', '2', 'x; y'}) def test_metadata_mapping_len(self): m = self.metadata_d1 self.assertEqual(len(m), 3) del m['x'] self.assertEqual(len(m), 3) del m['c'] self.assertEqual(len(m), 2) def _check_mapping_update(self, m): self.assertEqual(m['a'], 'b') self.assertEqual(m['c'], '3') self.assertEqual(m.getraw('d'), ['u', 'w']) self.assertEqual(m['x'], '') self.assertIn('x', m.deleted_tags) def test_metadata_mapping_update(self): # update from Metadata m = self.metadata_d2 m2 = self.metadata_d3 del m2['x'] m.update(m2) self._check_mapping_update(m) def test_metadata_mapping_update_dict(self): # update from dict m = self.metadata_d2 d2 = {'c': 3, 'd': ['u', 'w'], 'x': ''} m.update(d2) self._check_mapping_update(m) def test_metadata_mapping_update_tuple(self): # update from tuple m = self.metadata_d2 d2 = (('c', 3), ('d', ['u', 'w']), ('x', '')) m.update(d2) self._check_mapping_update(m) def test_metadata_mapping_update_dictlike(self): # update from kwargs m = self.metadata_d2 m.update(c=3, d=['u', 'w'], x='') self._check_mapping_update(m) def test_metadata_mapping_update_noparam(self): # update without parameter m = self.metadata_d2 self.assertRaises(TypeError, m.update) self.assertEqual(m['a'], 'b') def test_metadata_mapping_update_intparam(self): # update without parameter m = self.metadata_d2 self.assertRaises(TypeError, m.update, 123) def test_metadata_mapping_update_strparam(self): # update without parameter m = self.metadata_d2 self.assertRaises(ValueError, m.update, 'abc') def test_metadata_mapping_update_kw(self): m = Metadata(tag1='a', tag2='b') m.update(tag1='c') self.assertEqual(m['tag1'], 'c') self.assertEqual(m['tag2'], 'b') m.update(tag2='') self.assertIn('tag2', m.deleted_tags) def test_metadata_mapping_update_kw_del(self): m = Metadata(tag1='a', tag2='b') del m['tag1'] m2 = Metadata(tag1='c', tag2='d') del m2['tag2'] m.update(m2) self.assertEqual(m['tag1'], 'c') self.assertNotIn('tag2', m) self.assertNotIn('tag1', m.deleted_tags) self.assertIn('tag2', m.deleted_tags) def test_metadata_mapping_images(self): image1 = create_image(b'A', comment='A') image2 = create_image(b'B', comment='B') m1 = Metadata(a='b', length=1234, images=[image1]) self.assertEqual(m1.images[0], image1) self.assertEqual(len(m1), 2) # one tag, one image m1.images.append(image2) self.assertEqual(m1.images[1], image2) m1.images.pop(0) self.assertEqual(m1.images[0], image2) m2 = Metadata(a='c', length=4567, images=[image1]) m1.update(m2) self.assertEqual(m1.images[0], image1) m1.images.pop(0) self.assertEqual(len(m1), 1) # one tag, zero image self.assertFalse(m1.images) def test_metadata_mapping_iterable(self): m = Metadata(tag_tuple=('a', 0)) m['tag_set'] = {'c', 'd'} m['tag_dict'] = {'e': 1, 'f': 2} m['tag_str'] = 'gh' self.assertIn('0', m.getraw('tag_tuple')) self.assertIn('c', m.getraw('tag_set')) self.assertIn('e', m.getraw('tag_dict')) self.assertIn('gh', m.getraw('tag_str'))
class Cluster(QtCore.QObject, Item): # Weights for different elements when comparing a cluster to a release comparison_weights = { 'album': 17, 'albumartist': 6, 'totaltracks': 5, 'releasecountry': 2, 'format': 2, } def __init__(self, name, artist="", special=False, related_album=None, hide_if_empty=False): QtCore.QObject.__init__(self) self.item = None self.metadata = Metadata() self.metadata['album'] = name self.metadata['albumartist'] = artist self.metadata['totaltracks'] = 0 self.special = special self.hide_if_empty = hide_if_empty self.related_album = related_album self.files = [] self.lookup_task = None def __repr__(self): return '<Cluster %r>' % self.metadata['album'] def __len__(self): return len(self.files) def add_files(self, files): for file in files: self.metadata.length += file.metadata.length file._move(self) file.update(signal=False) self.files.extend(files) self.metadata['totaltracks'] = len(self.files) self.item.add_files(files) def add_file(self, file): self.metadata.length += file.metadata.length self.files.append(file) self.metadata['totaltracks'] = len(self.files) file._move(self) file.update(signal=False) self.item.add_file(file) def remove_file(self, file): self.metadata.length -= file.metadata.length self.files.remove(file) self.metadata['totaltracks'] = len(self.files) self.item.remove_file(file) if not self.special and self.get_num_files() == 0: self.tagger.remove_cluster(self) def update(self): if self.item: self.item.update() def get_num_files(self): return len(self.files) def iterfiles(self, save=False): for file in self.files: yield file def can_save(self): """Return if this object can be saved.""" if self.files: return True else: return False def can_remove(self): """Return if this object can be removed.""" return not self.special def can_edit_tags(self): """Return if this object supports tag editing.""" return True def can_analyze(self): """Return if this object can be fingerprinted.""" return any([_file.can_analyze() for _file in self.files]) def can_autotag(self): return True def can_refresh(self): return False def can_browser_lookup(self): return not self.special def can_view_info(self): if self.files: return True else: return False def is_album_like(self): return True def column(self, column): if column == 'title': return '%s (%d)' % (self.metadata['album'], len(self.files)) elif (column == '~length' and self.special) or column == 'album': return '' elif column == '~length': return format_time(self.metadata.length) elif column == 'artist': return self.metadata['albumartist'] return self.metadata[column] def _lookup_finished(self, document, http, error): self.lookup_task = None try: releases = document.metadata[0].release_list[0].release except (AttributeError, IndexError): releases = None mparms = {'album': self.metadata['album']} # no matches if not releases: self.tagger.window.set_statusbar_message( N_("No matching releases for cluster %(album)s"), mparms, timeout=3000) return # multiple matches -- calculate similarities to each of them match = sorted((self.metadata.compare_to_release( release, Cluster.comparison_weights) for release in releases), reverse=True, key=itemgetter(0))[0] if match[0] < config.setting['cluster_lookup_threshold']: self.tagger.window.set_statusbar_message( N_("No matching releases for cluster %(album)s"), mparms, timeout=3000) return self.tagger.window.set_statusbar_message( N_("Cluster %(album)s identified!"), mparms, timeout=3000) self.tagger.move_files_to_album(self.files, match[1].id) def lookup_metadata(self): """Try to identify the cluster using the existing metadata.""" if self.lookup_task: return self.tagger.window.set_statusbar_message( N_("Looking up the metadata for cluster %(album)s..."), {'album': self.metadata['album']}) self.lookup_task = self.tagger.xmlws.find_releases( self._lookup_finished, artist=self.metadata['albumartist'], release=self.metadata['album'], tracks=str(len(self.files)), limit=25) def clear_lookup_task(self): if self.lookup_task: self.tagger.xmlws.remove_task(self.lookup_task) self.lookup_task = None @staticmethod def cluster(files, threshold): artistDict = ClusterDict() albumDict = ClusterDict() tracks = [] for file in files: artist = file.metadata["albumartist"] or file.metadata["artist"] album = file.metadata["album"] # Improve clustering from directory structure if no existing tags # Only used for grouping and to provide cluster title / artist - not added to file tags. filename = file.filename if config.setting[ "windows_compatibility"] or sys.platform == "win32": filename = ntpath.splitdrive(filename)[1] album, artist = album_artist_from_path(filename, album, artist) # For each track, record the index of the artist and album within the clusters tracks.append((artistDict.add(artist), albumDict.add(album))) artist_cluster_engine = ClusterEngine(artistDict) artist_cluster_engine.cluster(threshold) album_cluster_engine = ClusterEngine(albumDict) album_cluster_engine.cluster(threshold) # Arrange tracks into albums albums = {} for i in xrange(len(tracks)): cluster = album_cluster_engine.getClusterFromId(tracks[i][1]) if cluster is not None: albums.setdefault(cluster, []).append(i) # Now determine the most prominent names in the cluster and build the # final cluster list for album_id, album in albums.items(): album_name = album_cluster_engine.getClusterTitle(album_id) artist_max = 0 artist_id = None artist_hist = {} for track_id in album: cluster = artist_cluster_engine.getClusterFromId( tracks[track_id][0]) if cluster is not None: cnt = artist_hist.get(cluster, 0) + 1 if cnt > artist_max: artist_max = cnt artist_id = cluster artist_hist[cluster] = cnt if artist_id is None: artist_name = u"Various Artists" else: artist_name = artist_cluster_engine.getClusterTitle(artist_id) yield album_name, artist_name, (files[i] for i in album)
def test_compare_lengths(self): m1 = Metadata() m1.length = 360 m2 = Metadata() m2.length = 300 self.assertAlmostEqual(m1.compare(m2), 0.998)
def test_cmd_copymerge_removedupes(self): context = Metadata() context["target"] = ["tag1", "tag2"] context["source"] = ["tag2", "tag3"] self._eval_and_check_copymerge(context, ["tag1", "tag2", "tag3"])
def test_compare_tracknumber_difference(self): m1 = Metadata() m1["tracknumber"] = "1" m2 = Metadata() m2["tracknumber"] = "2" self.assertEqual(m1.compare(m2), 0)
def test_cmd_copymerge_notarget(self): context = Metadata() tagsToCopy = ["tag1", "tag2"] context["source"] = tagsToCopy self._eval_and_check_copymerge(context, tagsToCopy)
def _load(self, filename): log.debug("Loading file %r", filename) file = self._File(encode_filename(filename), ID3=compatid3.CompatID3) tags = file.tags or {} # upgrade custom 2.3 frames to 2.4 for old, new in self.__upgrade.items(): if old in tags and new not in tags: f = tags.pop(old) tags.add(getattr(id3, new)(encoding=f.encoding, text=f.text)) metadata = Metadata() for frame in tags.values(): frameid = frame.FrameID if frameid in self.__translate: name = self.__translate[frameid] if frameid.startswith('T'): for text in frame.text: if text: metadata.add(name, unicode(text)) elif frameid == 'COMM': for text in frame.text: if text: metadata.add('%s:%s' % (name, frame.desc), unicode(text)) else: metadata.add(name, unicode(frame)) elif frameid == "TMCL": for role, name in frame.people: if role or name: metadata.add('performer:%s' % role, name) elif frameid == "TIPL": # If file is ID3v2.3, TIPL tag could contain TMCL # so we will test for TMCL values and add to TIPL if not TMCL for role, name in frame.people: if role in self._tipl_roles and name: metadata.add(self._tipl_roles[role], name) else: metadata.add('performer:%s' % role, name) elif frameid == 'TXXX': name = frame.desc if name in self.__translate_freetext: name = self.__translate_freetext[name] elif ((name in self.__rtranslate) != (name in self.__rtranslate_freetext)): # If the desc of a TXXX frame conflicts with the name of a # Picard tag, load it into ~id3:TXXX:desc rather than desc. # # This basically performs an XOR, making sure that 'name' # is in __rtranslate or __rtranslate_freetext, but not # both. (Being in both implies we support reading it both # ways.) Currently, the only tag in both is license. name = '~id3:TXXX:' + name for text in frame.text: metadata.add(name, unicode(text)) elif frameid == 'USLT': name = 'lyrics' if frame.desc: name += ':%s' % frame.desc metadata.add(name, unicode(frame.text)) elif frameid == 'UFID' and frame.owner == 'http://musicbrainz.org': metadata['musicbrainz_recordingid'] = frame.data.decode( 'ascii', 'ignore') elif frameid in self.__tag_re_parse.keys(): m = self.__tag_re_parse[frameid].search(frame.text[0]) if m: for name, value in m.groupdict().iteritems(): if value is not None: metadata[name] = value else: log.error("Invalid %s value '%s' dropped in %r", frameid, frame.text[0], filename) elif frameid == 'APIC': try: coverartimage = TagCoverArtImage( file=filename, tag=frameid, types=types_from_id3(frame.type), comment=frame.desc, support_types=True, data=frame.data, ) except CoverArtImageError as e: log.error('Cannot load image from %r: %s' % (filename, e)) else: metadata.append_image(coverartimage) elif frameid == 'POPM': # Rating in ID3 ranges from 0 to 255, normalize this to the range 0 to 5 if frame.email == config.setting['rating_user_email']: rating = unicode( int( round(frame.rating / 255.0 * (config.setting['rating_steps'] - 1)))) metadata.add('~rating', rating) if 'date' in metadata: sanitized = sanitize_date(metadata.getall('date')[0]) if sanitized: metadata['date'] = sanitized self._info(metadata, file) return metadata
def test_cmd_inmulti(self): context = Metadata() # Test with single-value string context["foo"] = "First:A; Second:B; Third:C" # Tests with $in for comparison purposes self.assertScriptResultEquals("$in(%foo%,Second:B)", "1", context) self.assertScriptResultEquals("$in(%foo%,irst:A; Second:B; Thi)", "1", context) self.assertScriptResultEquals("$in(%foo%,First:A; Second:B; Third:C)", "1", context) # Base $inmulti tests self.assertScriptResultEquals("$inmulti(%foo%,Second:B)", "", context) self.assertScriptResultEquals("$inmulti(%foo%,irst:A; Second:B; Thi)", "", context) self.assertScriptResultEquals( "$inmulti(%foo%,First:A; Second:B; Third:C)", "1", context) # Test separator override but with existing separator - results should be same as base self.assertScriptResultEquals("$inmulti(%foo%,Second:B,; )", "", context) self.assertScriptResultEquals( "$inmulti(%foo%,irst:A; Second:B; Thi,; )", "", context) self.assertScriptResultEquals( "$inmulti(%foo%,First:A; Second:B; Third:C,; )", "1", context) # Test separator override self.assertScriptResultEquals("$inmulti(%foo%,First:A,:)", "", context) self.assertScriptResultEquals("$inmulti(%foo%,Second:B,:)", "", context) self.assertScriptResultEquals("$inmulti(%foo%,Third:C,:)", "", context) self.assertScriptResultEquals("$inmulti(%foo%,First,:)", "1", context) self.assertScriptResultEquals("$inmulti(%foo%,A; Second,:)", "1", context) self.assertScriptResultEquals("$inmulti(%foo%,B; Third,:)", "1", context) self.assertScriptResultEquals("$inmulti(%foo%,C,:)", "1", context) # Test with multi-values context["foo"] = ["First:A", "Second:B", "Third:C"] # Tests with $in for comparison purposes self.assertScriptResultEquals("$in(%foo%,Second:B)", "1", context) self.assertScriptResultEquals("$in(%foo%,irst:A; Second:B; Thi)", "1", context) self.assertScriptResultEquals("$in(%foo%,First:A; Second:B; Third:C)", "1", context) # Base $inmulti tests self.assertScriptResultEquals("$inmulti(%foo%,Second:B)", "1", context) self.assertScriptResultEquals("$inmulti(%foo%,irst:A; Second:B; Thi)", "", context) self.assertScriptResultEquals( "$inmulti(%foo%,First:A; Second:B; Third:C)", "", context) # Test separator override but with existing separator - results should be same as base self.assertScriptResultEquals("$inmulti(%foo%,Second:B,; )", "1", context) self.assertScriptResultEquals( "$inmulti(%foo%,irst:A; Second:B; Thi,; )", "", context) self.assertScriptResultEquals( "$inmulti(%foo%,First:A; Second:B; Third:C,; )", "", context) # Test separator override self.assertScriptResultEquals("$inmulti(%foo%,First:A,:)", "", context) self.assertScriptResultEquals("$inmulti(%foo%,Second:B,:)", "", context) self.assertScriptResultEquals("$inmulti(%foo%,Third:C,:)", "", context) self.assertScriptResultEquals("$inmulti(%foo%,First,:)", "1", context) self.assertScriptResultEquals("$inmulti(%foo%,A; Second,:)", "1", context) self.assertScriptResultEquals("$inmulti(%foo%,B; Third,:)", "1", context) self.assertScriptResultEquals("$inmulti(%foo%,C,:)", "1", context)
def itunes_grouping_metadata(self): metadata = Metadata() metadata['grouping'] = 'The Grouping' metadata['work'] = 'The Work' return metadata
def test_cmd_get(self): context = Metadata() context["test"] = "aaa" self.assertScriptResultEquals("$get(test)", "aaa", context) context["test2"] = ["multi", "valued"] self.assertScriptResultEquals("$get(test2)", "multi; valued", context)
def _file_save_image(filename, image): f = picard.formats.open_(filename) metadata = Metadata() metadata.append_image(image) f._save(filename, metadata)
class Album(DataObject, Item): release_group_loaded = QtCore.pyqtSignal() def __init__(self, album_id, discid=None): DataObject.__init__(self, album_id) self.metadata = Metadata() self.orig_metadata = Metadata() self.tracks = [] self.loaded = False self.load_task = None self.release_group = None self._files = 0 self._requests = 0 self._tracks_loaded = False self._discids = set() if discid: self._discids.add(discid) self._after_load_callbacks = [] self.unmatched_files = Cluster(_("Unmatched Files"), special=True, related_album=self, hide_if_empty=True) self.errors = [] self.status = None self._album_artists = [] self.update_metadata_images_enabled = True def __repr__(self): return '<Album %s %r>' % (self.id, self.metadata["album"]) def iterfiles(self, save=False): for track in self.tracks: for file in track.iterfiles(): yield file if not save: for file in self.unmatched_files.iterfiles(): yield file def enable_update_metadata_images(self, enabled): self.update_metadata_images_enabled = enabled def append_album_artist(self, album_artist_id): """Append artist id to the list of album artists and return an AlbumArtist instance""" album_artist = AlbumArtist(album_artist_id) self._album_artists.append(album_artist) return album_artist def add_discid(self, discid): if not discid: return self._discids.add(discid) for track in self.tracks: medium_discids = track.metadata.getall('~musicbrainz_discids') track_discids = list(self._discids.intersection(medium_discids)) if track_discids: track.metadata['musicbrainz_discid'] = track_discids track.update() for file in track.linked_files: file.metadata['musicbrainz_discid'] = track_discids file.update() def get_album_artists(self): """Returns the list of album artists (as AlbumArtist objects)""" return self._album_artists def _parse_release(self, release_node): log.debug("Loading release %r ...", self.id) self._tracks_loaded = False release_id = release_node['id'] if release_id != self.id: self.tagger.mbid_redirects[self.id] = release_id album = self.tagger.albums.get(release_id) if album: log.debug("Release %r already loaded", release_id) album.match_files(self.unmatched_files.files) album.update() self.tagger.remove_album(self) return False else: del self.tagger.albums[self.id] self.tagger.albums[release_id] = self self.id = release_id # Make the release artist nodes available, since they may # contain supplementary data (aliases, tags, genres, ratings) # which aren't present in the release group, track, or # recording artist nodes. We can copy them into those places # wherever the IDs match, so that the data is shared and # available for use in mbjson.py and external plugins. self._release_artist_nodes = _create_artist_node_dict(release_node) # Get release metadata m = self._new_metadata m.length = 0 rg_node = release_node['release-group'] rg = self.release_group = self.tagger.get_release_group_by_id( rg_node['id']) rg.loaded_albums.add(self.id) rg.refcount += 1 _copy_artist_nodes(self._release_artist_nodes, rg_node) release_group_to_metadata(rg_node, rg.metadata, rg) m.copy(rg.metadata) release_to_metadata(release_node, m, album=self) # Custom VA name if m['musicbrainz_albumartistid'] == VARIOUS_ARTISTS_ID: m['albumartistsort'] = m['albumartist'] = config.setting['va_name'] # Convert Unicode punctuation if config.setting['convert_punctuation']: m.apply_func(asciipunct) m['totaldiscs'] = len(release_node['media']) # Add album to collections add_release_to_user_collections(release_node) # Run album metadata plugins try: run_album_metadata_processors(self, m, release_node) except BaseException: self.error_append(traceback.format_exc()) self._release_node = release_node return True def _release_request_finished(self, document, http, error): if self.load_task is None: return self.load_task = None parsed = False try: if error: self.error_append(http.errorString()) # Fix for broken NAT releases if error == QtNetwork.QNetworkReply.ContentNotFoundError: nats = False nat_name = config.setting["nat_name"] files = list(self.unmatched_files.files) for file in files: recordingid = file.metadata["musicbrainz_recordingid"] if mbid_validate(recordingid) and file.metadata[ "album"] == nat_name: nats = True self.tagger.move_file_to_nat(file, recordingid) self.tagger.nats.update() if nats and not self.get_num_unmatched_files(): self.tagger.remove_album(self) error = False else: try: parsed = self._parse_release(document) except Exception: error = True self.error_append(traceback.format_exc()) finally: self._requests -= 1 if parsed or error: self._finalize_loading(error) # does http need to be set to None to free the memory used by the network response? # http://qt-project.org/doc/qt-5/qnetworkaccessmanager.html says: # After the request has finished, it is the responsibility of the user # to delete the QNetworkReply object at an appropriate time. # Do not directly delete it inside the slot connected to finished(). # You can use the deleteLater() function. def error_append(self, msg): log.error(msg) self.errors.append(msg) def _finalize_loading(self, error): if error: self.metadata.clear() self.status = _("[could not load album %s]") % self.id del self._new_metadata del self._new_tracks self.update() return if self._requests > 0: return if not self._tracks_loaded: artists = set() all_media = [] absolutetracknumber = 0 va = self._new_metadata[ 'musicbrainz_albumartistid'] == VARIOUS_ARTISTS_ID djmix_ars = {} if hasattr(self._new_metadata, "_djmix_ars"): djmix_ars = self._new_metadata._djmix_ars for medium_node in self._release_node['media']: mm = Metadata() mm.copy(self._new_metadata) medium_to_metadata(medium_node, mm) discpregap = False format = medium_node.get('format') if format: all_media.append(format) for dj in djmix_ars.get(mm["discnumber"], []): mm.add("djmixer", dj) if 'discs' in medium_node: discids = [disc.get('id') for disc in medium_node['discs']] mm['~musicbrainz_discids'] = discids mm['musicbrainz_discid'] = list( self._discids.intersection(discids)) if "pregap" in medium_node: discpregap = True absolutetracknumber += 1 track = self._finalize_loading_track( medium_node['pregap'], mm, artists, va, absolutetracknumber, discpregap) track.metadata['~pregap'] = "1" track_count = medium_node['track-count'] if track_count: tracklist_node = medium_node['tracks'] for track_node in tracklist_node: absolutetracknumber += 1 track = self._finalize_loading_track( track_node, mm, artists, va, absolutetracknumber, discpregap) if "data-tracks" in medium_node: for track_node in medium_node['data-tracks']: absolutetracknumber += 1 track = self._finalize_loading_track( track_node, mm, artists, va, absolutetracknumber, discpregap) track.metadata['~datatrack'] = "1" totalalbumtracks = absolutetracknumber self._new_metadata['~totalalbumtracks'] = totalalbumtracks # Generate a list of unique media, but keep order of first appearance self._new_metadata['media'] = " / ".join( list(OrderedDict.fromkeys(all_media))) for track in self._new_tracks: track.metadata["~totalalbumtracks"] = totalalbumtracks if len(artists) > 1: track.metadata["~multiartist"] = "1" del self._release_node del self._release_artist_nodes self._tracks_loaded = True if not self._requests: self.enable_update_metadata_images(False) # Prepare parser for user's script for s_name, s_text in enabled_tagger_scripts_texts(): parser = ScriptParser() for track in self._new_tracks: # Run tagger script for each track try: parser.eval(s_text, track.metadata) except ScriptError: log.exception( "Failed to run tagger script %s on track", s_name) track.metadata.strip_whitespace() # Run tagger script for the album itself try: parser.eval(s_text, self._new_metadata) except ScriptError: log.exception("Failed to run tagger script %s on album", s_name) self._new_metadata.strip_whitespace() for track in self.tracks: track.metadata_images_changed.connect( self.update_metadata_images) for file in list(track.linked_files): file.move(self.unmatched_files) self.metadata = self._new_metadata self.orig_metadata.copy(self.metadata) self.tracks = self._new_tracks del self._new_metadata del self._new_tracks self.loaded = True self.status = None self.match_files(self.unmatched_files.files) self.enable_update_metadata_images(True) self.update() self.tagger.window.set_statusbar_message( N_('Album %(id)s loaded: %(artist)s - %(album)s'), { 'id': self.id, 'artist': self.metadata['albumartist'], 'album': self.metadata['album'] }, timeout=3000) for func in self._after_load_callbacks: func() self._after_load_callbacks = [] if self.item.isSelected(): self.tagger.window.refresh_metadatabox() def _finalize_loading_track(self, track_node, metadata, artists, va, absolutetracknumber, discpregap): # As noted in `_parse_release` above, the release artist nodes # may contain supplementary data that isn't present in track # artist nodes. Similarly, the track artists may contain # information which the recording artists don't. Copy this # information across to wherever the artist IDs match. _copy_artist_nodes(self._release_artist_nodes, track_node) _copy_artist_nodes(self._release_artist_nodes, track_node['recording']) _copy_artist_nodes(_create_artist_node_dict(track_node), track_node['recording']) track = Track(track_node['recording']['id'], self) self._new_tracks.append(track) # Get track metadata tm = track.metadata tm.copy(metadata) track_to_metadata(track_node, track) tm["~absolutetracknumber"] = absolutetracknumber track.orig_metadata.copy(tm) track._customize_metadata() self._new_metadata.length += tm.length artists.add(tm["artist"]) if va: tm["compilation"] = "1" else: del tm["compilation"] if discpregap: tm["~discpregap"] = "1" # Run track metadata plugins try: run_track_metadata_processors(self, tm, track_node, self._release_node) except BaseException: self.error_append(traceback.format_exc()) return track def load(self, priority=False, refresh=False): if self._requests: log.info("Not reloading, some requests are still active.") return self.tagger.window.set_statusbar_message( N_('Loading album %(id)s ...'), {'id': self.id}) self.loaded = False self.status = _("[loading album information]") if self.release_group: self.release_group.loaded = False self.release_group.genres.clear() self.metadata.clear() self.genres.clear() self.update() self._new_metadata = Metadata() self._new_tracks = [] self._requests = 1 self.errors = [] require_authentication = False inc = [ 'release-groups', 'media', 'discids', 'recordings', 'artist-credits', 'artists', 'aliases', 'labels', 'isrcs', 'collections' ] if self.tagger.webservice.oauth_manager.is_authorized(): require_authentication = True inc += ['user-collections'] if config.setting['release_ars'] or config.setting['track_ars']: inc += [ 'artist-rels', 'release-rels', 'url-rels', 'recording-rels', 'work-rels' ] if config.setting['track_ars']: inc += ['recording-level-rels', 'work-level-rels'] require_authentication = self.set_genre_inc_params( inc) or require_authentication if config.setting['enable_ratings']: require_authentication = True inc += ['user-ratings'] self.load_task = self.tagger.mb_api.get_release_by_id( self.id, self._release_request_finished, inc=inc, mblogin=require_authentication, priority=priority, refresh=refresh) def run_when_loaded(self, func): if self.loaded: func() else: self._after_load_callbacks.append(func) def stop_loading(self): if self.load_task: self.tagger.webservice.remove_task(self.load_task) self.load_task = None def update(self, update_tracks=True): if self.item: self.item.update(update_tracks) def _add_file(self, track, file): self._files += 1 self.update(update_tracks=False) add_metadata_images(self, [file]) file.metadata_images_changed.connect(self.update_metadata_images) def _remove_file(self, track, file): self._files -= 1 self.update(update_tracks=False) file.metadata_images_changed.disconnect(self.update_metadata_images) remove_metadata_images(self, [file]) def _match_files(self, files, recordingid=None, threshold=0): """Match files to tracks on this album, based on metadata similarity or recordingid.""" tracks_cache = defaultdict(lambda: None) def build_tracks_cache(): for track in self.tracks: tm_recordingid = track.orig_metadata['musicbrainz_recordingid'] tm_tracknumber = track.orig_metadata['tracknumber'] tm_discnumber = track.orig_metadata['discnumber'] for tup in ((tm_recordingid, tm_tracknumber, tm_discnumber), (tm_recordingid, tm_tracknumber), (tm_recordingid, )): tracks_cache[tup] = track SimMatchAlbum = namedtuple('SimMatchAlbum', 'similarity track') for file in list(files): if file.state == File.REMOVED: continue # if we have a recordingid to match against, use that in priority recid = recordingid or file.metadata['musicbrainz_recordingid'] if recid and mbid_validate(recid): if not tracks_cache: build_tracks_cache() tracknumber = file.metadata['tracknumber'] discnumber = file.metadata['discnumber'] track = (tracks_cache[(recid, tracknumber, discnumber)] or tracks_cache[(recid, tracknumber)] or tracks_cache[(recid, )]) if track: yield (file, track) continue # try to match by similarity def candidates(): for track in self.tracks: yield SimMatchAlbum(similarity=track.metadata.compare( file.orig_metadata), track=track) QtCore.QCoreApplication.processEvents() no_match = SimMatchAlbum(similarity=-1, track=self.unmatched_files) best_match = find_best_match(candidates, no_match) if best_match.similarity < threshold: yield (file, no_match.track) else: yield (file, best_match.result.track) def match_files(self, files, recordingid=None): """Match and move files to tracks on this album, based on metadata similarity or recordingid.""" moves = self._match_files( files, recordingid=recordingid, threshold=config.setting['track_matching_threshold']) for file, target in moves: file.move(target) def can_save(self): return self._files > 0 def can_remove(self): return True def can_edit_tags(self): return True def can_analyze(self): return False def can_autotag(self): return False def can_refresh(self): return True def can_view_info(self): return (self.loaded and (self.metadata.images or self.orig_metadata.images)) or self.errors def is_album_like(self): return True def get_num_matched_tracks(self): num = 0 for track in self.tracks: if track.is_linked(): num += 1 return num def get_num_unmatched_files(self): return len(self.unmatched_files.files) def get_num_total_files(self): return self._files + len(self.unmatched_files.files) def is_complete(self): if not self.tracks: return False for track in self.tracks: if not track.is_complete(): return False if self.get_num_unmatched_files(): return False else: return True def is_modified(self): if self.tracks: for track in self.tracks: for file in track.linked_files: if not file.is_saved(): return True return False def get_num_unsaved_files(self): count = 0 for track in self.tracks: for file in track.linked_files: if not file.is_saved(): count += 1 return count def column(self, column): if column == 'title': if self.status is not None: title = self.status else: title = self.metadata['album'] if self.tracks: linked_tracks = 0 for track in self.tracks: if track.is_linked(): linked_tracks += 1 text = '%s\u200E (%d/%d' % (title, linked_tracks, len(self.tracks)) unmatched = self.get_num_unmatched_files() if unmatched: text += '; %d?' % (unmatched, ) unsaved = self.get_num_unsaved_files() if unsaved: text += '; %d*' % (unsaved, ) # CoverArt.set_metadata uses the orig_metadata.images if metadata.images is empty # in order to show existing cover art if there's no cover art for a release. So # we do the same here in order to show the number of images consistently. if self.metadata.images: metadata = self.metadata else: metadata = self.orig_metadata number_of_images = len(metadata.images) if getattr(metadata, 'has_common_images', True): text += ngettext("; %i image", "; %i images", number_of_images) % number_of_images else: text += ngettext("; %i image not in all tracks", "; %i different images among tracks", number_of_images) % number_of_images return text + ')' else: return title elif column == '~length': length = self.metadata.length if length: return format_time(length) else: return '' elif column == 'artist': return self.metadata['albumartist'] elif column == 'tracknumber': return self.metadata['~totalalbumtracks'] elif column == 'discnumber': return self.metadata['totaldiscs'] else: return self.metadata[column] def switch_release_version(self, mbid): if mbid == self.id: return for file in list(self.iterfiles(True)): file.move(self.unmatched_files) album = self.tagger.albums.get(mbid) if album: album.match_files(self.unmatched_files.files) album.update() self.tagger.remove_album(self) else: del self.tagger.albums[self.id] self.release_group.loaded_albums.discard(self.id) self.id = mbid self.tagger.albums[mbid] = self self.load(priority=True, refresh=True) def update_metadata_images(self): if not self.update_metadata_images_enabled: return update_metadata_images(self) self.update(False) def keep_original_images(self): self.enable_update_metadata_images(False) for track in self.tracks: track.keep_original_images() for file in list(self.unmatched_files.files): file.keep_original_images() self.enable_update_metadata_images(True) self.update_metadata_images()
def _cover_metadata(self): imgdata = self.jpegdata metadata = Metadata() metadata.append_image( TagCoverArtImage( file='a', tag='a', data=imgdata + b'a', support_types=True, types=[u'booklet', u'front'], )) metadata.append_image( TagCoverArtImage( file='b', tag='b', data=imgdata + b'b', support_types=True, types=[u'back'], )) metadata.append_image( TagCoverArtImage( file='c', tag='c', data=imgdata + b'c', support_types=True, types=[u'front'], )) metadata.append_image( TagCoverArtImage( file='d', tag='d', data=imgdata + b'd', )) metadata.append_image( TagCoverArtImage(file='e', tag='e', data=imgdata + b'e', is_front=False)) metadata.append_image( TagCoverArtImage(file='f', tag='f', data=imgdata + b'f', types=[u'front'])) metadata.append_image( TagCoverArtImage(file='g', tag='g', data=imgdata + b'g', types=[u'back'], is_front=True)) return metadata
def test_license_single_non_url(self): metadata = Metadata({'license': 'foo'}) loaded_metadata = save_and_load_metadata(self.filename, metadata) self.assertEqual(metadata['license'], loaded_metadata['license']) raw_metadata = load_raw(self.filename) self.assertEqual(metadata['license'], raw_metadata['TXXX:LICENSE'])
def test_can_open_and_save(self): metadata = Metadata() save_and_load_metadata(self.filename, metadata)
def parse_artists(self, artists): for node in artists: artist = Metadata() artist_to_metadata(node, artist) artist['score'] = node['score'] self.search_results.append(artist)
def test_cmd_set_multi_valued(self): context = Metadata() context["source"] = ["multi", "valued"] self.parser.eval("$set(test,%source%)", context) self.assertEqual(context.getall("test"), ["multi; valued"]) # list has only a single value