def __init__(self): super(ZeroPlugin, self).__init__() # Listeners. self.register_listener("write", self.write_event) self.register_listener("import_task_choice", self.import_task_choice_event) self.config.add({"fields": [], "keep_fields": [], "update_database": False}) self.patterns = {} self.warned = False # We'll only handle `fields` or `keep_fields`, but not both. if self.config["fields"] and self.config["keep_fields"]: self._log.warn("cannot blacklist and whitelist at the same time") # Blacklist mode. if self.config["fields"]: self.validate_config("fields") for field in self.config["fields"].as_str_seq(): self.set_pattern(field) # Whitelist mode. elif self.config["keep_fields"]: self.validate_config("keep_fields") for field in MediaFile.fields(): if field in self.config["keep_fields"].as_str_seq(): continue self.set_pattern(field) # These fields should always be preserved. for key in ("id", "path", "album_id"): if key in self.patterns: del self.patterns[key]
def __init__(self): super(ZeroPlugin, self).__init__() # Listeners. self.register_listener('write', self.write_event) self.register_listener('import_task_choice', self.import_task_choice_event) self.config.add({ 'fields': [], }) self.patterns = {} self.warned = False for field in self.config['fields'].as_str_seq(): if field in ('id', 'path', 'album_id'): log.warn(u'[zero] field \'{0}\' ignored, zeroing ' u'it would be dangerous'.format(field)) continue if field not in MediaFile.fields(): log.error(u'[zero] invalid field: {0}'.format(field)) continue try: self.patterns[field] = self.config[field].as_str_seq() except confit.NotFoundError: # Matches everything self.patterns[field] = True
def __init__(self): super(ZeroPlugin, self).__init__() # Listeners. self.register_listener('write', self.write_event) self.register_listener('import_task_choice', self.import_task_choice_event) self.config.add({ 'fields': [], 'update_database': False, }) self.patterns = {} self.warned = False for field in self.config['fields'].as_str_seq(): if field in ('id', 'path', 'album_id'): self._log.warn(u'field \'{0}\' ignored, zeroing ' u'it would be dangerous', field) continue if field not in MediaFile.fields(): self._log.error(u'invalid field: {0}', field) continue try: self.patterns[field] = self.config[field].as_str_seq() except confit.NotFoundError: # Matches everything self.patterns[field] = True
def validate_config(self, mode): """Check whether fields in the configuration are valid. `mode` should either be "fields" or "keep_fields", indicating the section of the configuration to validate. """ for field in self.config[mode].as_str_seq(): if field not in MediaFile.fields(): self._log.error("invalid field: {0}", field) continue if mode == "fields" and field in ("id", "path", "album_id"): self._log.warn("field '{0}' ignored, zeroing " "it would be dangerous", field) continue
def validate_config(self, mode): """Check whether fields in the configuration are valid. `mode` should either be "fields" or "keep_fields", indicating the section of the configuration to validate. """ for field in self.config[mode].as_str_seq(): if field not in MediaFile.fields(): self._log.error(u'invalid field: {0}', field) continue if mode == 'fields' and field in ('id', 'path', 'album_id'): self._log.warn(u'field \'{0}\' ignored, zeroing ' u'it would be dangerous', field) continue
def __setitem__(self, key, value): """Set the item's value for a standard field or a flexattr. """ # Encode unicode paths and read buffers. if key == 'path': if isinstance(value, unicode): value = bytestring_path(value) elif isinstance(value, buffer): value = bytes(value) if key in MediaFile.fields(): self.mtime = 0 # Reset mtime on dirty. super(Item, self).__setitem__(key, value)
def __setitem__(self, key, value): """Set the item's value for a standard field or a flexattr. """ # Encode unicode paths and read buffers. if key == 'path': if isinstance(value, unicode): value = bytestring_path(value) elif isinstance(value, buffer): value = bytes(value) if key in MediaFile.fields(): self.mtime = 0 # Reset mtime on dirty. super(Item, self).__setitem__(key, value)
def _set_pattern(self, field): """Populate `self.fields_to_progs` for a given field. Do some sanity checks then compile the regexes. """ if field not in MediaFile.fields(): self._log.error(u'invalid field: {0}', field) elif field in ('id', 'path', 'album_id'): self._log.warning(u'field \'{0}\' ignored, zeroing ' u'it would be dangerous', field) else: try: for pattern in self.config[field].as_str_seq(): prog = re.compile(pattern, re.IGNORECASE) self.fields_to_progs.setdefault(field, []).append(prog) except confit.NotFoundError: # Matches everything self.fields_to_progs[field] = []
def _set_pattern(self, field): """Populate `self.fields_to_progs` for a given field. Do some sanity checks then compile the regexes. """ if field not in MediaFile.fields(): self._log.error(u'invalid field: {0}', field) elif field in ('id', 'path', 'album_id'): self._log.warning( u'field \'{0}\' ignored, zeroing ' u'it would be dangerous', field) else: try: for pattern in self.config[field].as_str_seq(): prog = re.compile(pattern, re.IGNORECASE) self.fields_to_progs.setdefault(field, []).append(prog) except confit.NotFoundError: # Matches everything self.fields_to_progs[field] = []
def __init__(self): super(ZeroPlugin, self).__init__() # Listeners. self.register_listener('write', self.write_event) self.register_listener('import_task_choice', self.import_task_choice_event) self.config.add({ 'fields': [], 'keep_fields': [], 'update_database': False, }) self.patterns = {} self.warned = False # We'll only handle `fields` or `keep_fields`, but not both. if self.config['fields'] and self.config['keep_fields']: self._log.warning( u'cannot blacklist and whitelist at the same time' ) # Blacklist mode. if self.config['fields']: self.validate_config('fields') for field in self.config['fields'].as_str_seq(): self.set_pattern(field) # Whitelist mode. elif self.config['keep_fields']: self.validate_config('keep_fields') for field in MediaFile.fields(): if field in self.config['keep_fields'].as_str_seq(): continue self.set_pattern(field) # These fields should always be preserved. for key in ('id', 'path', 'album_id'): if key in self.patterns: del self.patterns[key]
def __init__(self): super(ZeroPlugin, self).__init__() # Listeners. self.register_listener('write', self.write_event) self.register_listener('import_task_choice', self.import_task_choice_event) self.config.add({ 'fields': [], 'keep_fields': [], 'update_database': False, }) self.patterns = {} self.warned = False # We'll only handle `fields` or `keep_fields`, but not both. if self.config['fields'] and self.config['keep_fields']: self._log.warning( u'cannot blacklist and whitelist at the same time') # Blacklist mode. if self.config['fields']: self.validate_config('fields') for field in self.config['fields'].as_str_seq(): self.set_pattern(field) # Whitelist mode. elif self.config['keep_fields']: self.validate_config('keep_fields') for field in MediaFile.fields(): if field in self.config['keep_fields'].as_str_seq(): continue self.set_pattern(field) # These fields should always be preserved. for key in ('id', 'path', 'album_id'): if key in self.patterns: del self.patterns[key]
def __init__(self): super(ZeroPlugin, self).__init__() self.register_listener('write', self.write_event) self.register_listener('import_task_choice', self.import_task_choice_event) self.config.add({ 'auto': True, 'fields': [], 'keep_fields': [], 'update_database': False, }) self.fields_to_progs = {} self.warned = False """Read the bulk of the config into `self.fields_to_progs`. After construction, `fields_to_progs` contains all the fields that should be zeroed as keys and maps each of those to a list of compiled regexes (progs) as values. A field is zeroed if its value matches one of the associated progs. If progs is empty, then the associated field is always zeroed. """ if self.config['fields'] and self.config['keep_fields']: self._log.warning( u'cannot blacklist and whitelist at the same time' ) # Blacklist mode. elif self.config['fields']: for field in self.config['fields'].as_str_seq(): self._set_pattern(field) # Whitelist mode. elif self.config['keep_fields']: for field in MediaFile.fields(): if (field not in self.config['keep_fields'].as_str_seq() and # These fields should always be preserved. field not in ('id', 'path', 'album_id')): self._set_pattern(field)
def __init__(self): super(ZeroPlugin, self).__init__() self.register_listener('write', self.write_event) self.register_listener('import_task_choice', self.import_task_choice_event) self.config.add({ 'auto': True, 'fields': [], 'keep_fields': [], 'update_database': False, }) self.fields_to_progs = {} self.warned = False """Read the bulk of the config into `self.fields_to_progs`. After construction, `fields_to_progs` contains all the fields that should be zeroed as keys and maps each of those to a list of compiled regexes (progs) as values. A field is zeroed if its value matches one of the associated progs. If progs is empty, then the associated field is always zeroed. """ if self.config['fields'] and self.config['keep_fields']: self._log.warning( u'cannot blacklist and whitelist at the same time') # Blacklist mode. elif self.config['fields']: for field in self.config['fields'].as_str_seq(): self._set_pattern(field) # Whitelist mode. elif self.config['keep_fields']: for field in MediaFile.fields(): if (field not in self.config['keep_fields'].as_str_seq() and # These fields should always be preserved. field not in ('id', 'path', 'album_id')): self._set_pattern(field)
def test_known_fields(self): fields = list(ReadWriteTestBase.tag_fields) fields.extend(('encoder', 'images', 'genres', 'albumtype')) self.assertItemsEqual(MediaFile.fields(), fields)
ERROR_PLAYER_SYNC = 55 ERROR_EXIST = 56 VOLUME_MIN = 0 VOLUME_MAX = 100 SAFE_COMMANDS = ( # Commands that are available when unauthenticated. u"close", u"commands", u"notcommands", u"password", u"ping", ) ITEM_KEYS_WRITABLE = set(MediaFile.fields()).intersection(Item._fields.keys()) # Loggers. log = logging.getLogger("beets.bpd") global_log = logging.getLogger("beets") # Gstreamer import error. class NoGstreamerError(Exception): pass # Error-handling, exceptions, parameter parsing. class BPDError(Exception):
class Item(LibModel): _table = 'items' _flex_table = 'item_attributes' _fields = { 'id': types.PRIMARY_ID, 'path': PathType(), 'album_id': types.FOREIGN_ID, 'title': types.STRING, 'artist': types.STRING, 'artist_sort': types.STRING, 'artist_credit': types.STRING, 'album': types.STRING, 'albumartist': types.STRING, 'albumartist_sort': types.STRING, 'albumartist_credit': types.STRING, 'genre': types.STRING, 'composer': types.STRING, 'grouping': types.STRING, 'year': types.PaddedInt(4), 'month': types.PaddedInt(2), 'day': types.PaddedInt(2), 'track': types.PaddedInt(2), 'tracktotal': types.PaddedInt(2), 'disc': types.PaddedInt(2), 'disctotal': types.PaddedInt(2), 'lyrics': types.STRING, 'comments': types.STRING, 'bpm': types.INTEGER, 'comp': types.BOOLEAN, 'mb_trackid': types.STRING, 'mb_albumid': types.STRING, 'mb_artistid': types.STRING, 'mb_albumartistid': types.STRING, 'albumtype': types.STRING, 'label': types.STRING, 'acoustid_fingerprint': types.STRING, 'acoustid_id': types.STRING, 'mb_releasegroupid': types.STRING, 'asin': types.STRING, 'catalognum': types.STRING, 'script': types.STRING, 'language': types.STRING, 'country': types.STRING, 'albumstatus': types.STRING, 'media': types.STRING, 'albumdisambig': types.STRING, 'disctitle': types.STRING, 'encoder': types.STRING, 'rg_track_gain': types.NULL_FLOAT, 'rg_track_peak': types.NULL_FLOAT, 'rg_album_gain': types.NULL_FLOAT, 'rg_album_peak': types.NULL_FLOAT, 'original_year': types.PaddedInt(4), 'original_month': types.PaddedInt(2), 'original_day': types.PaddedInt(2), 'initial_key': MusicalKey(), 'length': DurationType(), 'bitrate': types.ScaledInt(1000, u'kbps'), 'format': types.STRING, 'samplerate': types.ScaledInt(1000, u'kHz'), 'bitdepth': types.INTEGER, 'channels': types.INTEGER, 'mtime': DateType(), 'added': DateType(), } _search_fields = ('artist', 'title', 'comments', 'album', 'albumartist', 'genre') _types = { 'data_source': types.STRING, } _media_fields = set(MediaFile.readable_fields()) \ .intersection(_fields.keys()) """Set of item fields that are backed by `MediaFile` fields. Any kind of field (fixed, flexible, and computed) may be a media field. Only these fields are read from disk in `read` and written in `write`. """ _media_tag_fields = set(MediaFile.fields()).intersection(_fields.keys()) """Set of item fields that are backed by *writable* `MediaFile` tag fields. This excludes fields that represent audio data, such as `bitrate` or `length`. """ _formatter = FormattedItemMapping _sorts = {'artist': SmartArtistSort} _format_config_key = 'format_item' @classmethod def _getters(cls): getters = plugins.item_field_getters() getters['singleton'] = lambda i: i.album_id is None getters['filesize'] = Item.try_filesize # In bytes. return getters @classmethod def from_path(cls, path): """Creates a new item from the media file at the specified path. """ # Initiate with values that aren't read from files. i = cls(album_id=None) i.read(path) i.mtime = i.current_mtime() # Initial mtime. return i def __setitem__(self, key, value): """Set the item's value for a standard field or a flexattr. """ # Encode unicode paths and read buffers. if key == 'path': if isinstance(value, unicode): value = bytestring_path(value) elif isinstance(value, buffer): value = bytes(value) if key in MediaFile.fields(): self.mtime = 0 # Reset mtime on dirty. super(Item, self).__setitem__(key, value) def update(self, values): """Set all key/value pairs in the mapping. If mtime is specified, it is not reset (as it might otherwise be). """ super(Item, self).update(values) if self.mtime == 0 and 'mtime' in values: self.mtime = values['mtime'] def get_album(self): """Get the Album object that this item belongs to, if any, or None if the item is a singleton or is not associated with a library. """ if not self._db: return None return self._db.get_album(self) # Interaction with file metadata. def read(self, read_path=None): """Read the metadata from the associated file. If `read_path` is specified, read metadata from that file instead. Updates all the properties in `_media_fields` from the media file. Raises a `ReadError` if the file could not be read. """ if read_path is None: read_path = self.path else: read_path = normpath(read_path) try: mediafile = MediaFile(syspath(read_path)) except (OSError, IOError, UnreadableFileError) as exc: raise ReadError(read_path, exc) for key in self._media_fields: value = getattr(mediafile, key) if isinstance(value, (int, long)): if value.bit_length() > 63: value = 0 self[key] = value # Database's mtime should now reflect the on-disk value. if read_path == self.path: self.mtime = self.current_mtime() self.path = read_path def write(self, path=None, tags=None): """Write the item's metadata to a media file. All fields in `_media_fields` are written to disk according to the values on this object. `path` is the path of the mediafile to write the data to. It defaults to the item's path. `tags` is a dictionary of additional metadata the should be written to the file. (These tags need not be in `_media_fields`.) Can raise either a `ReadError` or a `WriteError`. """ if path is None: path = self.path else: path = normpath(path) # Get the data to write to the file. item_tags = dict(self) item_tags = { k: v for k, v in item_tags.items() if k in self._media_fields } # Only write media fields. if tags is not None: item_tags.update(tags) plugins.send('write', item=self, path=path, tags=item_tags) # Open the file. try: mediafile = MediaFile(syspath(path), id3v23=beets.config['id3v23'].get(bool)) except (OSError, IOError, UnreadableFileError) as exc: raise ReadError(self.path, exc) # Write the tags to the file. mediafile.update(item_tags) try: mediafile.save() except (OSError, IOError, MutagenError) as exc: raise WriteError(self.path, exc) # The file has a new mtime. if path == self.path: self.mtime = self.current_mtime() plugins.send('after_write', item=self, path=path) def try_write(self, path=None, tags=None): """Calls `write()` but catches and logs `FileOperationError` exceptions. Returns `False` an exception was caught and `True` otherwise. """ try: self.write(path, tags) return True except FileOperationError as exc: log.error(u"{0}", exc) return False def try_sync(self, write, move, with_album=True): """Synchronize the item with the database and, possibly, updates its tags on disk and its path (by moving the file). `write` indicates whether to write new tags into the file. Similarly, `move` controls whether the path should be updated. In the latter case, files are *only* moved when they are inside their library's directory (if any). Similar to calling :meth:`write`, :meth:`move`, and :meth:`store` (conditionally). """ if write: self.try_write() if move: # Check whether this file is inside the library directory. if self._db and self._db.directory in util.ancestry(self.path): log.debug(u'moving {0} to synchronize path', util.displayable_path(self.path)) self.move(with_album=with_album) self.store() # Files themselves. def move_file(self, dest, copy=False, link=False): """Moves or copies the item's file, updating the path value if the move succeeds. If a file exists at ``dest``, then it is slightly modified to be unique. """ if not util.samefile(self.path, dest): dest = util.unique_path(dest) if copy: util.copy(self.path, dest) plugins.send("item_copied", item=self, source=self.path, destination=dest) elif link: util.link(self.path, dest) plugins.send("item_linked", item=self, source=self.path, destination=dest) else: plugins.send("before_item_moved", item=self, source=self.path, destination=dest) util.move(self.path, dest) plugins.send("item_moved", item=self, source=self.path, destination=dest) # Either copying or moving succeeded, so update the stored path. self.path = dest def current_mtime(self): """Returns the current mtime of the file, rounded to the nearest integer. """ return int(os.path.getmtime(syspath(self.path))) def try_filesize(self): """Get the size of the underlying file in bytes. If the file is missing, return 0 (and log a warning). """ try: return os.path.getsize(syspath(self.path)) except (OSError, Exception) as exc: log.warning(u'could not get filesize: {0}', exc) return 0 # Model methods. def remove(self, delete=False, with_album=True): """Removes the item. If `delete`, then the associated file is removed from disk. If `with_album`, then the item's album (if any) is removed if it the item was the last in the album. """ super(Item, self).remove() # Remove the album if it is empty. if with_album: album = self.get_album() if album and not album.items(): album.remove(delete, False) # Send a 'item_removed' signal to plugins plugins.send('item_removed', item=self) # Delete the associated file. if delete: util.remove(self.path) util.prune_dirs(os.path.dirname(self.path), self._db.directory) self._db._memotable = {} def move(self, copy=False, link=False, basedir=None, with_album=True): """Move the item to its designated location within the library directory (provided by destination()). Subdirectories are created as needed. If the operation succeeds, the item's path field is updated to reflect the new location. If `copy` is true, moving the file is copied rather than moved. Similarly, `link` creates a symlink instead. basedir overrides the library base directory for the destination. If the item is in an album, the album is given an opportunity to move its art. (This can be disabled by passing with_album=False.) The item is stored to the database if it is in the database, so any dirty fields prior to the move() call will be written as a side effect. You probably want to call save() to commit the DB transaction. """ self._check_db() dest = self.destination(basedir=basedir) # Create necessary ancestry for the move. util.mkdirall(dest) # Perform the move and store the change. old_path = self.path self.move_file(dest, copy, link) self.store() # If this item is in an album, move its art. if with_album: album = self.get_album() if album: album.move_art(copy) album.store() # Prune vacated directory. if not copy: util.prune_dirs(os.path.dirname(old_path), self._db.directory) # Templating. def destination(self, fragment=False, basedir=None, platform=None, path_formats=None): """Returns the path in the library directory designated for the item (i.e., where the file ought to be). fragment makes this method return just the path fragment underneath the root library directory; the path is also returned as Unicode instead of encoded as a bytestring. basedir can override the library's base directory for the destination. """ self._check_db() platform = platform or sys.platform basedir = basedir or self._db.directory path_formats = path_formats or self._db.path_formats # Use a path format based on a query, falling back on the # default. for query, path_format in path_formats: if query == PF_KEY_DEFAULT: continue query, _ = parse_query_string(query, type(self)) if query.match(self): # The query matches the item! Use the corresponding path # format. break else: # No query matched; fall back to default. for query, path_format in path_formats: if query == PF_KEY_DEFAULT: break else: assert False, u"no default path format" if isinstance(path_format, Template): subpath_tmpl = path_format else: subpath_tmpl = Template(path_format) # Evaluate the selected template. subpath = self.evaluate_template(subpath_tmpl, True) # Prepare path for output: normalize Unicode characters. if platform == 'darwin': subpath = unicodedata.normalize('NFD', subpath) else: subpath = unicodedata.normalize('NFC', subpath) if beets.config['asciify_paths']: subpath = unidecode(subpath) maxlen = beets.config['max_filename_length'].get(int) if not maxlen: # When zero, try to determine from filesystem. maxlen = util.max_filename_length(self._db.directory) subpath, fellback = util.legalize_path(subpath, self._db.replacements, maxlen, os.path.splitext(self.path)[1], fragment) if fellback: # Print an error message if legalization fell back to # default replacements because of the maximum length. log.warning( u'Fell back to default replacements when naming ' u'file {}. Configure replacements to avoid lengthening ' u'the filename.', subpath) if fragment: return subpath else: return normpath(os.path.join(basedir, subpath))
def test_fields_in_readable_fields(self): readable = MediaFile.readable_fields() for field in MediaFile.fields(): self.assertIn(field, readable)
def test_known_fields(self): fields = list(ReadWriteTestBase.tag_fields) fields.extend(('encoder', 'images', 'genres', 'albumtype')) self.assertItemsEqual(MediaFile.fields(), fields)
def test_properties_from_fields(self): path = os.path.join(_common.RSRC, 'full.mp3') mediafile = MediaFile(path) for field in MediaFile.fields(): self.assertTrue(hasattr(mediafile, field))
ERROR_PLAYER_SYNC = 55 ERROR_EXIST = 56 VOLUME_MIN = 0 VOLUME_MAX = 100 SAFE_COMMANDS = ( # Commands that are available when unauthenticated. u'close', u'commands', u'notcommands', u'password', u'ping', ) ITEM_KEYS_WRITABLE = set(MediaFile.fields()).intersection(Item._fields.keys()) # Loggers. log = logging.getLogger('beets.bpd') global_log = logging.getLogger('beets') # Gstreamer import error. class NoGstreamerError(Exception): pass # Error-handling, exceptions, parameter parsing. class BPDError(Exception):
def test_properties_from_fields(self): path = os.path.join(_common.RSRC, 'full.mp3') mediafile = MediaFile(path) for field in MediaFile.fields(): self.assertTrue(hasattr(mediafile, field))
def test_fields_in_readable_fields(self): readable = MediaFile.readable_fields() for field in MediaFile.fields(): self.assertIn(field, readable)
ERROR_PLAYLIST_MAX = 51 ERROR_SYSTEM = 52 ERROR_PLAYLIST_LOAD = 53 ERROR_UPDATE_ALREADY = 54 ERROR_PLAYER_SYNC = 55 ERROR_EXIST = 56 VOLUME_MIN = 0 VOLUME_MAX = 100 SAFE_COMMANDS = ( # Commands that are available when unauthenticated. u'close', u'commands', u'notcommands', u'password', u'ping', ) ITEM_KEYS_WRITABLE = set(MediaFile.fields()).intersection(ITEM_KEYS) # Loggers. log = logging.getLogger('beets.bpd') global_log = logging.getLogger('beets') # Gstreamer import error. class NoGstreamerError(Exception): pass # Error-handling, exceptions, parameter parsing. class BPDError(Exception): """An error that should be exposed to the client to the BPD server.