def write_advisory(self, lib, opts, args): self.config.set_args(opts) for item in lib.items(u'advisory:1..'): mf = mediafile.MediaFile(item.path) if item.advisory != mf.itunesadvisory: fakeitem = new_item(item) fakeitem.advisory = mf.itunesadvisory show_model_changes(item, fakeitem) if not opts.pretend: mf.itunesadvisory = item.advisory mf.save()
def spotify_explicit_track(self, lib, track, item): if 'advisory' not in item: item['advisory'] = 1 if show_model_changes(item): item.store() if item.album and item._cached_album: album = item._cached_album if 'albumadvisory' not in album: album['albumadvisory'] = 1 if show_model_changes(album): album.store()
def find_work(self, item, force): """Finds the parent work of a recording and populates the tags accordingly. The parent work is found recursively, by finding the direct parent repeatedly until there are no more links in the chain. We return the final, topmost work in the chain. Namely, the tags parentwork, parentwork_disambig, mb_parentworkid, parent_composer, parent_composer_sort and work_date are populated. """ if not item.mb_workid: self._log.info( 'No work for {}, \ add one at https://musicbrainz.org/recording/{}', item, item.mb_trackid) return hasparent = hasattr(item, 'parentwork') if force or not hasparent: try: work_info, work_date = find_parentwork_info(item.mb_workid) except musicbrainzngs.musicbrainz.WebServiceError as e: self._log.debug("error fetching work: {}", e) return parent_info = self.get_info(item, work_info) if 'parent_composer' in parent_info: self._log.debug("Work fetched: {} - {}", parent_info['parentwork'], parent_info['parent_composer']) else: self._log.debug("Work fetched: {} - no parent composer", parent_info['parentwork']) elif hasparent: self._log.debug("{}: Work present, skipping", item) return # apply all non-null values to the item for key, value in parent_info.items(): if value: item[key] = value if work_date: item['work_date'] = work_date ui.show_model_changes(item, fields=[ 'parentwork', 'parentwork_disambig', 'mb_parentworkid', 'parent_composer', 'parent_composer_sort', 'work_date' ])
def write_items(lib, query, pretend): """Write tag information from the database to the respective files in the filesystem. """ items, albums = _do_query(lib, query, False, False) for item in items: # Item deleted? if not os.path.exists(syspath(item.path)): log.info(u'missing file: {0}'.format( util.displayable_path(item.path) )) continue # Get an Item object reflecting the "clean" (on-disk) state. try: clean_item = library.Item.from_path(item.path) except Exception as exc: log.error(u'error reading {0}: {1}'.format( displayable_path(item.path), exc )) continue # Check for and display changes. changed = ui.show_model_changes(item, clean_item, library.ITEM_KEYS_WRITABLE, always=True) if changed and not pretend: try: item.write() except library.FileOperationError as exc: log.error(exc)
def func(self, lib, opts, args): """Command handler for the metasync function. """ pretend = opts.pretend source = opts.source query = ui.decargs(args) sources = {} for player in source: __import__('beetsplug.metasync', fromlist=[str(player)]) module = 'beetsplug.metasync.' + player if module not in modules.keys(): log.error(u'Unknown metadata source \'' + player + '\'') continue classes = inspect.getmembers(modules[module], inspect.isclass) for entry in classes: if entry[0].lower() == player: sources[player] = entry[1]() else: continue for item in lib.items(query): for player in sources.values(): player.get_data(item) changed = ui.show_model_changes(item) if changed and not pretend: item.store()
def write_items(lib, query, pretend): """Write tag information from the database to the respective files in the filesystem. """ items, albums = _do_query(lib, query, False, False) for item in items: # Item deleted? if not os.path.exists(syspath(item.path)): log.info(u'missing file: {0}'.format( util.displayable_path(item.path) )) continue # Get an Item object reflecting the "clean" (on-disk) state. try: clean_item = library.Item.from_path(item.path) except Exception as exc: log.error(u'error reading {0}: {1}'.format( displayable_path(item.path), exc )) continue # Check for and display changes. changed = ui.show_model_changes(item, clean_item, library.ITEM_KEYS_META, always=True) if changed and not pretend: try: item.write() except Exception as exc: log.error(u'could not write {0}: {1}'.format( util.displayable_path(item.path), exc )) continue
def albums(self, lib, query, move, pretend, write): """Retrieve and apply info from the autotagger for albums matched by query and their items. """ # Process matching albums. for album in lib.albums(query): # Do we have a valid Beatport album? items = self.get_album_tracks(album) if not items: continue # Get the Beatport album information. albuminfo = self.beatport_plugin.album_for_id(album.mb_albumid) if not albuminfo: self._log.info( 'Release ID {} not found for album {}', album.mb_albumid, album, ) continue beatport_trackid_to_trackinfo = { track.track_id: track for track in albuminfo.tracks } library_trackid_to_item = { int(item.mb_trackid): item for item in items } item_to_trackinfo = { item: beatport_trackid_to_trackinfo[track_id] for track_id, item in library_trackid_to_item.items() } self._log.info('applying changes to {}', album) with lib.transaction(): autotag.apply_metadata(albuminfo, item_to_trackinfo) changed = False # Find any changed item to apply Beatport changes to album. any_changed_item = items[0] for item in items: item_changed = ui.show_model_changes(item) changed |= item_changed if item_changed: any_changed_item = item apply_item_changes(lib, item, move, pretend, write) if pretend or not changed: continue # Update album structure to reflect an item in it. for key in library.Album.item_keys: album[key] = any_changed_item[key] album.store() # Move album art (and any inconsistent items). if move and lib.directory in util.ancestry(items[0].path): self._log.debug('moving album {}', album) album.move()
def modify_items(lib, mods, dels, query, write, move, album, confirm): """Modifies matching items according to key=value assignments.""" # Parse key=value specifications into a dictionary. model_cls = library.Album if album else library.Item fsets = {} for mod in mods: key, value = mod.split('=', 1) fsets[key] = model_cls._parse(key, value) # Get the items to modify. items, albums = _do_query(lib, query, album, False) objs = albums if album else items # Apply changes *temporarily*, preview them, and collect modified # objects. print_('Modifying %i %ss.' % (len(objs), 'album' if album else 'item')) changed = set() for obj in objs: for field, value in fsets.iteritems(): obj[field] = value for field in dels: del obj[field] if ui.show_model_changes(obj): changed.add(obj) # Still something to do? if not changed: print_('No changes to make.') return # Confirm action. if confirm: extra = ' and write tags' if write else '' if not ui.input_yn('Really modify%s (Y/n)?' % extra): return # Apply changes to database. with lib.transaction(): for obj in changed: if move: cur_path = obj.path if lib.directory in ancestry(cur_path): # In library? log.debug('moving object %s' % cur_path) obj.move() obj.store() # Apply tags if requested. if write: if album: changed_items = itertools.chain(*(a.items() for a in changed)) else: changed_items = changed for item in changed_items: try: item.write() except library.FileOperationError as exc: log.error(exc)
def modify_command(self, lib, opts, args): self.handle_common_args(opts, args) sole_tracks = set(item.id for item, is_sole_track in self.list_sole_tracks(lib) if is_sole_track) for item in lib.items(): sole_track = item.id in sole_tracks existing_sole_track = item.get("sole_track", False) if existing_sole_track != sole_track: if not sole_track: del item["sole_track"] else: item["sole_track"] = True ui.show_model_changes(item, fields=("sole_track", )) if not opts.pretend: item.store()
def mbsync_albums(lib, query, move, pretend, write): """Retrieve and apply info from the autotagger for albums matched by query and their items. """ # Process matching albums. for a in lib.albums(query): if not a.mb_albumid: log.info(u'Skipping album {0}: has no mb_albumid'.format(a.id)) continue items = list(a.items()) # Get the MusicBrainz album information. album_info = hooks.album_for_mbid(a.mb_albumid) if not album_info: log.info(u'Release ID not found: {0}'.format(a.mb_albumid)) continue # Construct a track mapping according to MBIDs. This should work # for albums that have missing or extra tracks. mapping = {} for item in items: for track_info in album_info.tracks: if item.mb_trackid == track_info.track_id: mapping[item] = track_info break # Apply. with lib.transaction(): autotag.apply_metadata(album_info, mapping) changed = False for item in items: item_changed = ui.show_model_changes(item) changed |= item_changed if item_changed: apply_item_changes(lib, item, move, pretend, write) if not changed: # No change to any item. continue if not pretend: # Update album structure to reflect an item in it. for key in library.Album.item_keys: a[key] = items[0][key] a.store() # Move album art (and any inconsistent items). if move and lib.directory in util.ancestry(items[0].path): log.debug(u'moving album {0}'.format(a.id)) a.move()
def show_changes(self, lib, task, match=None): if match is None: match = task.match changes = False if task.is_album: newmapping = {new_item(item): track_info for item, track_info in match.mapping.items()} autotag.apply_metadata(match.info, newmapping) # olditems[0].get_album() isn't working, create our own to compare olditems = list(match.mapping.keys()) oldvalues = dict((key, olditems[0][key]) for key in self.album_fields) oldalbum = library.Album(lib, **oldvalues) newitems = list(newmapping.keys()) values = dict((key, newitems[0][key]) for key in self.album_fields) album = library.Album(lib, **values) compare_fields = self.get_fields(self.album_fields, oldvalues) album_changes = show_model_changes(album, oldalbum, compare_fields) if album_changes: changes = True new_by_info = {track_info: item for item, track_info in newmapping.items()} for item, track_info in match.mapping.items(): newitem = new_by_info[track_info] compare_fields = self.get_fields(self.nonalbum_fields, item) item_changes = show_model_changes(newitem, item, compare_fields) if item_changes: changes = True else: fakeitem = new_item(task.item) autotag.apply_item_metadata(fakeitem, match.info) compare_fields = self.get_fields(self.all_fields, task.item) changes = show_model_changes(fakeitem, task.item, compare_fields) return changes
def func(self, lib, opts, args): """Command handler for the metasync function. """ pretend = opts.pretend query = ui.decargs(args) sources = [] for source in opts.sources: sources.extend(source.split(',')) sources = sources or self.config['source'].as_str_seq() meta_source_instances = {} items = lib.items(query) # Avoid needlessly instantiating meta sources (can be expensive) if not items: self._log.info(u'No items found matching query') return # Instantiate the meta sources for player in sources: try: cls = META_SOURCES[player] except KeyError: self._log.error(u'Unknown metadata source \'{0}\''.format( player)) try: meta_source_instances[player] = cls(self.config, self._log) except (ImportError, ConfigValueError) as e: self._log.error(u'Failed to instantiate metadata source ' u'\'{0}\': {1}'.format(player, e)) # Avoid needlessly iterating over items if not meta_source_instances: self._log.error(u'No valid metadata sources found') return # Sync the items with all of the meta sources for item in items: for meta_source in meta_source_instances.values(): meta_source.sync_from_source(item) changed = ui.show_model_changes(item) if changed and not pretend: item.store()
def edit(lib, opts, args): editor = os.environ.get("EDITOR") if not editor: raise Exception("you must set EDITOR in your environment") editor_command = shlex.split(editor) items = _do_query(lib, args) if len(items) < 1: raise Exception("query didn't match any items") temp_file = tempfile.NamedTemporaryFile() encoding = locale.getpreferredencoding() StreamWriter = codecs.getwriter(encoding) _write_analysis(items, StreamWriter(temp_file), True) temp_file.flush() editor_command.append(temp_file.name) subprocess.check_call(editor_command) temp_file.seek(0) all_fields = _get_fields(True) new_values = {} for line in codecs.iterdecode(temp_file, encoding): line = line.strip() if line.startswith("#"): continue parts = re.split(r"\s*:\s*", line, 1) if len(parts) != 2: raise Exception("can't understand %r" % (line,)) key, new_value_str = parts if key not in all_fields: raise Exception("unknown field %r in %r" % (key, line)) elif not new_value_str: # We use None instead of an empty string everywhere # because some fields (e.g. the "date" field, a DateField) # chokes on "". new_values[key] = None elif new_value_str.lower() != KEEP.lower(): new_values[key] = library.Item._parse(key, new_value_str) with lib.transaction(): for item in items: for key, value in new_values.iteritems(): item[key] = value changed = ui.show_model_changes(item) if changed and not opts.dry_run: item.store() if not opts.dry_run: print "writing tags" for item in items: item.try_write()
def edit(lib, opts, args): editor = os.environ.get("EDITOR") if not editor: raise Exception("you must set EDITOR in your environment") editor_command = shlex.split(editor) items = _do_query(lib, args) if len(items) < 1: raise Exception("query didn't match any items") temp_file = tempfile.NamedTemporaryFile() encoding = locale.getpreferredencoding() StreamWriter = codecs.getwriter(encoding) _write_analysis(items, StreamWriter(temp_file), True) temp_file.flush() editor_command.append(temp_file.name) subprocess.check_call(editor_command) temp_file.seek(0) all_fields = _get_fields(True) new_values = {} for line in codecs.iterdecode(temp_file, encoding): line = line.strip() if line.startswith("#"): continue parts = re.split(r"\s*:\s*", line, 1) if len(parts) != 2: raise Exception("can't understand %r" % (line, )) key, new_value_str = parts if key not in all_fields: raise Exception("unknown field %r in %r" % (key, line)) elif not new_value_str: # We use None instead of an empty string everywhere # because some fields (e.g. the "date" field, a DateField) # chokes on "". new_values[key] = None elif new_value_str.lower() != KEEP.lower(): new_values[key] = library.Item._parse(key, new_value_str) with lib.transaction(): for item in items: for key, value in new_values.iteritems(): item[key] = value changed = ui.show_model_changes(item) if changed and not opts.dry_run: item.store() if not opts.dry_run: print "writing tags" for item in items: item.try_write()
def _print_and_apply_changes(lib, item, old_data, move, pretend, write): """Apply changes to an Item and preview them in the console. Return a boolean indicating whether any changes were made. """ changed = ui.show_model_changes(item) if not changed: return False # If we're just pretending, then don't move or save. if not pretend: # Move the item if it's in the library. if move and lib.directory in util.ancestry(item.path): item.move(with_album=False) if write and not item.try_write(): return False item.store() return True
def _print_and_apply_changes(lib, item, old_data, move, pretend, write): """Apply changes to an Item and preview them in the console. Return a boolean indicating whether any changes were made. """ changed = ui.show_model_changes(item) if not changed: return False # If we're just pretending, then don't move or save. if not pretend: # Move the item if it's in the library. if move and lib.directory in util.ancestry(item.path): item.move(with_album=False) if write: try: item.write() except Exception as exc: log.error(u'could not sync {0}: {1}'.format( util.displayable_path(item.path), exc)) return False item.store() return True
def albums(self, lib, query, move, pretend, write): """Retrieve and apply info from the autotagger for albums matched by query and their items. """ # Process matching albums. for a in lib.albums(query): album_formatted = format(a) if not a.mb_albumid: self._log.info('Skipping album with no mb_albumid: {0}', album_formatted) continue items = list(a.items()) # Do we have a valid MusicBrainz album ID? if not re.match(MBID_REGEX, a.mb_albumid): self._log.info('Skipping album with invalid mb_albumid: {0}', album_formatted) continue # Get the MusicBrainz album information. album_info = hooks.album_for_mbid(a.mb_albumid) if not album_info: self._log.info('Release ID {0} not found for album {1}', a.mb_albumid, album_formatted) continue # Map release track and recording MBIDs to their information. # Recordings can appear multiple times on a release, so each MBID # maps to a list of TrackInfo objects. releasetrack_index = {} track_index = defaultdict(list) for track_info in album_info.tracks: releasetrack_index[track_info.release_track_id] = track_info track_index[track_info.track_id].append(track_info) # Construct a track mapping according to MBIDs (release track MBIDs # first, if available, and recording MBIDs otherwise). This should # work for albums that have missing or extra tracks. mapping = {} for item in items: if item.mb_releasetrackid and \ item.mb_releasetrackid in releasetrack_index: mapping[item] = releasetrack_index[item.mb_releasetrackid] else: candidates = track_index[item.mb_trackid] if len(candidates) == 1: mapping[item] = candidates[0] else: # If there are multiple copies of a recording, they are # disambiguated using their disc and track number. for c in candidates: if (c.medium_index == item.track and c.medium == item.disc): mapping[item] = c break # Apply. self._log.debug('applying changes to {}', album_formatted) with lib.transaction(): autotag.apply_metadata(album_info, mapping) changed = False # Find any changed item to apply MusicBrainz changes to album. any_changed_item = items[0] for item in items: item_changed = ui.show_model_changes(item) changed |= item_changed if item_changed: any_changed_item = item apply_item_changes(lib, item, move, pretend, write) if not changed: # No change to any item. continue if not pretend: # Update album structure to reflect an item in it. for key in library.Album.item_keys: a[key] = any_changed_item[key] a.store() # Move album art (and any inconsistent items). if move and lib.directory in util.ancestry(items[0].path): self._log.debug('moving album {0}', album_formatted) a.move()
def edit_objects(self, objs, fields): """Dump a set of Model objects to a file as text, ask the user to edit it, and apply any changes to the objects. Return a boolean indicating whether the edit succeeded. """ # Get the content to edit as raw data structures. old_data = [flatten(o, fields) for o in objs] # Set up a temporary file with the initial data for editing. if six.PY2: new = NamedTemporaryFile(mode='w', suffix='.yaml', delete=False) else: new = NamedTemporaryFile(mode='w', suffix='.yaml', delete=False, encoding='utf-8') old_str = dump(old_data) new.write(old_str) if six.PY2: old_str = old_str.decode('utf-8') new.close() # Loop until we have parseable data and the user confirms. try: while True: # Ask the user to edit the data. edit(new.name, self._log) # Read the data back after editing and check whether anything # changed. with codecs.open(new.name, encoding='utf-8') as f: new_str = f.read() if new_str == old_str: ui.print_(u"No changes; aborting.") return False # Parse the updated data. try: new_data = load(new_str) except ParseError as e: ui.print_(u"Could not read data: {}".format(e)) if ui.input_yn(u"Edit again to fix? (Y/n)", True): continue else: return False # Show the changes. # If the objects are not on the DB yet, we need a copy of their # original state for show_model_changes. objs_old = [obj.copy() if obj.id < 0 else None for obj in objs] self.apply_data(objs, old_data, new_data) changed = False for obj, obj_old in zip(objs, objs_old): changed |= ui.show_model_changes(obj, obj_old) if not changed: ui.print_(u'No changes to apply.') return False # Confirm the changes. choice = ui.input_options( (u'continue Editing', u'apply', u'cancel') ) if choice == u'a': # Apply. return True elif choice == u'c': # Cancel. return False elif choice == u'e': # Keep editing. # Reset the temporary changes to the objects. I we have a # copy from above, use that, else reload from the database. objs = [(old_obj or obj) for old_obj, obj in zip(objs_old, objs)] for obj in objs: if not obj.id < 0: obj.load() continue # Remove the temporary file before returning. finally: os.remove(new.name)
def update_items(lib, query, album, move, pretend): """For all the items matched by the query, update the library to reflect the item's embedded tags. """ with lib.transaction(): items, _ = _do_query(lib, query, album) # Walk through the items and pick up their changes. affected_albums = set() for item in items: # Item deleted? if not os.path.exists(syspath(item.path)): ui.print_obj(item, lib) ui.print_(ui.colorize('red', u' deleted')) if not pretend: item.remove(True) affected_albums.add(item.album_id) continue # Did the item change since last checked? if item.current_mtime() <= item.mtime: log.debug(u'skipping %s because mtime is up to date (%i)' % (displayable_path(item.path), item.mtime)) continue # Read new data. try: item.read() except Exception as exc: log.error(u'error reading {0}: {1}'.format( displayable_path(item.path), exc)) continue # Special-case album artist when it matches track artist. (Hacky # but necessary for preserving album-level metadata for non- # autotagged imports.) if not item.albumartist: old_item = lib.get_item(item.id) if old_item.albumartist == old_item.artist == item.artist: item.albumartist = old_item.albumartist item._dirty.discard('albumartist') # Check for and display changes. changed = ui.show_model_changes(item, fields=library.ITEM_KEYS_META) # Save changes. if not pretend: if changed: # Move the item if it's in the library. if move and lib.directory in ancestry(item.path): item.move() item.store() affected_albums.add(item.album_id) else: # The file's mtime was different, but there were no # changes to the metadata. Store the new mtime, # which is set in the call to read(), so we don't # check this again in the future. item.store() # Skip album changes while pretending. if pretend: return # Modify affected albums to reflect changes in their items. for album_id in affected_albums: if album_id is None: # Singletons. continue album = lib.get_album(album_id) if not album: # Empty albums have already been removed. log.debug('emptied album %i' % album_id) continue first_item = album.items().get() # Update album structure to reflect an item in it. for key in library.ALBUM_KEYS_ITEM: album[key] = first_item[key] album.store() # Move album art (and any inconsistent items). if move and lib.directory in ancestry(first_item.path): log.debug('moving album %i' % album_id) album.move()
def mbsync_albums(lib, query, move, pretend, write): """Retrieve and apply info from the autotagger for albums matched by query and their items. """ # Process matching albums. for a in lib.albums(query): if not a.mb_albumid: log.info(u'Skipping album {0}: has no mb_albumid'.format(a.id)) continue items = list(a.items()) # Get the MusicBrainz album information. album_info = hooks.album_for_mbid(a.mb_albumid) if not album_info: log.info(u'Release ID not found: {0}'.format(a.mb_albumid)) continue # Map recording MBIDs to their information. Recordings can appear # multiple times on a release, so each MBID maps to a list of TrackInfo # objects. track_index = defaultdict(list) for track_info in album_info.tracks: track_index[track_info.track_id].append(track_info) # Construct a track mapping according to MBIDs. This should work # for albums that have missing or extra tracks. If there are multiple # copies of a recording, they are disambiguated using their disc and # track number. mapping = {} for item in items: candidates = track_index[item.mb_trackid] if len(candidates) == 1: mapping[item] = candidates[0] else: for c in candidates: if c.medium_index == item.track and c.medium == item.disc: mapping[item] = c break # Apply. with lib.transaction(): autotag.apply_metadata(album_info, mapping) changed = False for item in items: item_changed = ui.show_model_changes(item) changed |= item_changed if item_changed: apply_item_changes(lib, item, move, pretend, write) if not changed: # No change to any item. continue if not pretend: # Update album structure to reflect an item in it. for key in library.Album.item_keys: a[key] = items[0][key] a.store() # Move album art (and any inconsistent items). if move and lib.directory in util.ancestry(items[0].path): log.debug(u'moving album {0}'.format(a.id)) a.move()
def _show(self, **kwargs): change = ui.show_model_changes(self.a, self.b, **kwargs) out = self.io.getoutput() return change, out
def edit_objects(self, objs, fields): """Dump a set of Model objects to a file as text, ask the user to edit it, and apply any changes to the objects. Return a boolean indicating whether the edit succeeded. """ # Get the content to edit as raw data structures. old_data = [flatten(o, fields) for o in objs] # Set up a temporary file with the initial data for editing. new = NamedTemporaryFile(suffix=".yaml", delete=False) old_str = dump(old_data) new.write(old_str) new.close() # Loop until we have parseable data and the user confirms. try: while True: # Ask the user to edit the data. edit(new.name) # Read the data back after editing and check whether anything # changed. with open(new.name) as f: new_str = f.read() if new_str == old_str: ui.print_("No changes; aborting.") return False # Parse the updated data. try: new_data = load(new_str) except ParseError as e: ui.print_("Could not read data: {}".format(e)) if ui.input_yn("Edit again to fix? (Y/n)", True): continue else: return False # Show the changes. self.apply_data(objs, old_data, new_data) changed = False for obj in objs: changed |= ui.show_model_changes(obj) if not changed: ui.print_("No changes to apply.") return False # Confirm the changes. choice = ui.input_options(("continue Editing", "apply", "cancel")) if choice == "a": # Apply. return True elif choice == "c": # Cancel. return False elif choice == "e": # Keep editing. # Reset the temporary changes to the objects. for obj in objs: obj.read() continue # Remove the temporary file before returning. finally: os.remove(new.name)
def edit_objects(self, objs, fields): """Dump a set of Model objects to a file as text, ask the user to edit it, and apply any changes to the objects. Return a boolean indicating whether the edit succeeded. """ # Get the content to edit as raw data structures. old_data = [flatten(o, fields) for o in objs] # Set up a temporary file with the initial data for editing. new = NamedTemporaryFile(suffix='.yaml', delete=False) old_str = dump(old_data) new.write(old_str) new.close() # Loop until we have parseable data and the user confirms. try: while True: # Ask the user to edit the data. edit(new.name) # Read the data back after editing and check whether anything # changed. with open(new.name) as f: new_str = f.read() if new_str == old_str: ui.print_("No changes; aborting.") return False # Parse the updated data. try: new_data = load(new_str) except ParseError as e: ui.print_("Could not read data: {}".format(e)) if ui.input_yn("Edit again to fix? (Y/n)", True): continue else: return False # Show the changes. self.apply_data(objs, old_data, new_data) changed = False for obj in objs: changed |= ui.show_model_changes(obj) if not changed: ui.print_('No changes to apply.') return False # Confirm the changes. choice = ui.input_options( ('continue Editing', 'apply', 'cancel')) if choice == 'a': # Apply. return True elif choice == 'c': # Cancel. return False elif choice == 'e': # Keep editing. # Reset the temporary changes to the objects. for obj in objs: obj.read() continue # Remove the temporary file before returning. finally: os.remove(new.name)
def edit_objects(self, objs, fields): """Dump a set of Model objects to a file as text, ask the user to edit it, and apply any changes to the objects. Return a boolean indicating whether the edit succeeded. """ # Get the content to edit as raw data structures. old_data = [flatten(o, fields) for o in objs] # Set up a temporary file with the initial data for editing. if six.PY2: new = NamedTemporaryFile(mode='w', suffix='.yaml', delete=False) else: new = NamedTemporaryFile(mode='w', suffix='.yaml', delete=False, encoding='utf-8') old_str = dump(old_data) new.write(old_str) if six.PY2: old_str = old_str.decode('utf-8') new.close() # Loop until we have parseable data and the user confirms. try: while True: # Ask the user to edit the data. edit(new.name, self._log) # Read the data back after editing and check whether anything # changed. with codecs.open(new.name, encoding='utf-8') as f: new_str = f.read() if new_str == old_str: ui.print_(u"No changes; aborting.") return False # Parse the updated data. try: new_data = load(new_str) except ParseError as e: ui.print_(u"Could not read data: {}".format(e)) if ui.input_yn(u"Edit again to fix? (Y/n)", True): continue else: return False # Show the changes. # If the objects are not on the DB yet, we need a copy of their # original state for show_model_changes. objs_old = [ deepcopy(obj) if not obj._db else None for obj in objs ] self.apply_data(objs, old_data, new_data) changed = False for obj, obj_old in zip(objs, objs_old): changed |= ui.show_model_changes(obj, obj_old) if not changed: ui.print_(u'No changes to apply.') return False # Confirm the changes. choice = ui.input_options( (u'continue Editing', u'apply', u'cancel')) if choice == u'a': # Apply. return True elif choice == u'c': # Cancel. return False elif choice == u'e': # Keep editing. # Reset the temporary changes to the objects. for obj in objs: obj.read() continue # Remove the temporary file before returning. finally: os.remove(new.name)
def albums(self, lib, query, move, pretend, write): """Retrieve and apply info from the autotagger for albums matched by query and their items. """ # Process matching albums. for a in lib.albums(query): album_formatted = format(a) if not a.mb_albumid: self._log.info(u'Skipping album with no mb_albumid: {0}', album_formatted) continue items = list(a.items()) # Get the MusicBrainz album information. album_info = hooks.album_for_mbid(a.mb_albumid) if not album_info: self._log.info(u'Release ID {0} not found for album {1}', a.mb_albumid, album_formatted) continue # Map release track and recording MBIDs to their information. # Recordings can appear multiple times on a release, so each MBID # maps to a list of TrackInfo objects. releasetrack_index = dict() track_index = defaultdict(list) for track_info in album_info.tracks: releasetrack_index[track_info.release_track_id] = track_info track_index[track_info.track_id].append(track_info) # Construct a track mapping according to MBIDs (release track MBIDs # first, if available, and recording MBIDs otherwise). This should # work for albums that have missing or extra tracks. mapping = {} for item in items: if item.mb_releasetrackid and \ item.mb_releasetrackid in releasetrack_index: mapping[item] = releasetrack_index[item.mb_releasetrackid] else: candidates = track_index[item.mb_trackid] if len(candidates) == 1: mapping[item] = candidates[0] else: # If there are multiple copies of a recording, they are # disambiguated using their disc and track number. for c in candidates: if (c.medium_index == item.track and c.medium == item.disc): mapping[item] = c break # Apply. self._log.debug(u'applying changes to {}', album_formatted) with lib.transaction(): autotag.apply_metadata(album_info, mapping) changed = False # Find any changed item to apply MusicBrainz changes to album. any_changed_item = items[0] for item in items: item_changed = ui.show_model_changes(item) changed |= item_changed if item_changed: any_changed_item = item apply_item_changes(lib, item, move, pretend, write) if not changed: # No change to any item. continue if not pretend: # Update album structure to reflect an item in it. for key in library.Album.item_keys: a[key] = any_changed_item[key] a.store() # Move album art (and any inconsistent items). if move and lib.directory in util.ancestry(items[0].path): self._log.debug(u'moving album {0}', album_formatted) a.move()
def mbsync_albums(lib, query, move, pretend, write): """Retrieve and apply info from the autotagger for albums matched by query and their items. """ # Process matching albums. for a in lib.albums(query): if not a.mb_albumid: log.info(u'Skipping album {0}: has no mb_albumid', a.id) continue items = list(a.items()) # Get the MusicBrainz album information. album_info = hooks.album_for_mbid(a.mb_albumid) if not album_info: log.info(u'Release ID not found: {0}', a.mb_albumid) continue # Map recording MBIDs to their information. Recordings can appear # multiple times on a release, so each MBID maps to a list of TrackInfo # objects. track_index = defaultdict(list) for track_info in album_info.tracks: track_index[track_info.track_id].append(track_info) # Construct a track mapping according to MBIDs. This should work # for albums that have missing or extra tracks. If there are multiple # copies of a recording, they are disambiguated using their disc and # track number. mapping = {} for item in items: candidates = track_index[item.mb_trackid] if len(candidates) == 1: mapping[item] = candidates[0] else: for c in candidates: if c.medium_index == item.track and c.medium == item.disc: mapping[item] = c break # Apply. with lib.transaction(): autotag.apply_metadata(album_info, mapping) changed = False for item in items: item_changed = ui.show_model_changes(item) changed |= item_changed if item_changed: apply_item_changes(lib, item, move, pretend, write) if not changed: # No change to any item. continue if not pretend: # Update album structure to reflect an item in it. for key in library.Album.item_keys: a[key] = items[0][key] a.store() # Move album art (and any inconsistent items). if move and lib.directory in util.ancestry(items[0].path): log.debug(u'moving album {0}', a.id) a.move()