def __init__(self, path='library.blb', directory='~/Music', path_formats=((PF_KEY_DEFAULT, '$artist/$album/$track $title'),), art_filename='cover', timeout=5.0, replacements=None, item_fields=ITEM_FIELDS, album_fields=ALBUM_FIELDS): if path == ':memory:': self.path = path else: self.path = bytestring_path(normpath(path)) self.directory = bytestring_path(normpath(directory)) self.path_formats = path_formats self.art_filename = bytestring_path(art_filename) self.replacements = replacements self.timeout = timeout self.conn = sqlite3.connect(self.path, timeout) self.conn.row_factory = sqlite3.Row # this way we can access our SELECT results like dictionaries self._make_table('items', item_fields) self._make_table('albums', album_fields)
def configure(self, config): wl_filename = ui.config_val(config, 'lastgenre', 'whitelist', None) if not wl_filename: # No filename specified. Instead, use the whitelist that's included # with the plugin (inside the package). wl_filename = DEFAULT_WHITELIST wl_filename = normpath(wl_filename) # Read the whitelist file. whitelist = set() with open(wl_filename) as f: for line in f: line = line.decode('utf8').strip().lower() if line: whitelist.add(line) options['whitelist'] = whitelist # Read the genres tree for canonicalization if enabled. c14n_filename = ui.config_val(config, 'lastgenre', 'canonical', None) if c14n_filename is not None: c14n_filename = c14n_filename.strip() if not c14n_filename: c14n_filename = C14N_TREE c14n_filename = normpath(c14n_filename) from yaml import load genres_tree = load(open(c14n_filename, 'r')) branches = [] flatten_tree(genres_tree, [], branches) options['branches'] = branches options['c14n'] = True
def __init__(self, path='library.blb', directory='~/Music', path_formats=None, art_filename='cover', timeout=5.0, item_fields=ITEM_FIELDS, album_fields=ALBUM_FIELDS): if path == ':memory:': self.path = path else: self.path = bytestring_path(normpath(path)) self.directory = bytestring_path(normpath(directory)) if path_formats is None: path_formats = {'default': '$artist/$album/$track $title'} elif isinstance(path_formats, basestring): path_formats = {'default': path_formats} self.path_formats = path_formats self.art_filename = bytestring_path(art_filename) self.timeout = timeout self.conn = sqlite3.connect(self.path, timeout) self.conn.row_factory = sqlite3.Row # this way we can access our SELECT results like dictionaries self._make_table('items', item_fields) self._make_table('albums', album_fields)
def setup(self): """Setup plugin from config options """ if self.config['auto']: self.import_stages = [self.imported] self._genre_cache = {} # Read the whitelist file if enabled. self.whitelist = set() wl_filename = self.config['whitelist'].get() if wl_filename in (True, ''): # Indicates the default whitelist. wl_filename = WHITELIST if wl_filename: wl_filename = normpath(wl_filename) with open(wl_filename, b'r') as f: for line in f: line = line.decode('utf8').strip().lower() if line and not line.startswith(u'#'): self.whitelist.add(line) # Read the genres tree for canonicalization if enabled. self.c14n_branches = [] c14n_filename = self.config['canonical'].get() if c14n_filename in (True, ''): # Default tree. c14n_filename = C14N_TREE if c14n_filename: c14n_filename = normpath(c14n_filename) genres_tree = yaml.load(open(c14n_filename, 'r')) flatten_tree(genres_tree, [], self.c14n_branches)
def update_playlists(lib): ui.print_("Updating smart playlists...") playlists = config['smartplaylist']['playlists'].get(list) playlist_dir = config['smartplaylist']['playlist_dir'].as_filename() relative_to = config['smartplaylist']['relative_to'].get() if relative_to: relative_to = normpath(relative_to) for playlist in playlists: items = [] items.extend(_items_for_query(lib, playlist, True)) items.extend(_items_for_query(lib, playlist, False)) m3us = {} basename = playlist['name'].encode('utf8') # As we allow tags in the m3u names, we'll need to iterate through # the items and generate the correct m3u file names. for item in items: m3u_name = item.evaluate_template(basename, True) if not (m3u_name in m3us): m3us[m3u_name] = [] item_path = item.path if relative_to: item_path = os.path.relpath(item.path, relative_to) if item_path not in m3us[m3u_name]: m3us[m3u_name].append(item_path) # Now iterate through the m3us that we need to generate for m3u in m3us: m3u_path = normpath(os.path.join(playlist_dir, m3u)) with open(syspath(m3u_path), 'w') as f: for path in m3us[m3u]: f.write(path + '\n') ui.print_("... Done")
def setUp(self): super(PathQueryTest, self).setUp() # This is the item we'll try to match. self.i.path = util.normpath('/a/b/c.mp3') self.i.title = u'path item' self.i.album = u'path album' self.i.store() self.lib.add_album([self.i]) # A second item for testing exclusion. i2 = _common.item() i2.path = util.normpath('/x/y/z.mp3') i2.title = 'another item' i2.album = 'another album' self.lib.add(i2) self.lib.add_album([i2]) # Unadorned path queries with path separators in them are considered # path queries only when the path in question actually exists. So we # mock the existence check to return true. self.patcher_exists = patch('beets.library.os.path.exists') self.patcher_exists.start().return_value = True # We have to create function samefile as it does not exist on # Windows and python 2.7 self.patcher_samefile = patch('beets.library.os.path.samefile', create=True) self.patcher_samefile.start().return_value = True
def setup(self): """Setup plugin from config options """ if self.config["auto"]: self.import_stages = [self.imported] self._genre_cache = {} # Read the whitelist file if enabled. self.whitelist = set() wl_filename = self.config["whitelist"].get() if wl_filename in (True, ""): # Indicates the default whitelist. wl_filename = WHITELIST if wl_filename: wl_filename = normpath(wl_filename) with open(wl_filename, "rb") as f: for line in f: line = line.decode("utf8").strip().lower() if line and not line.startswith(u"#"): self.whitelist.add(line) # Read the genres tree for canonicalization if enabled. self.c14n_branches = [] c14n_filename = self.config["canonical"].get() if c14n_filename in (True, ""): # Default tree. c14n_filename = C14N_TREE if c14n_filename: c14n_filename = normpath(c14n_filename) genres_file = codecs.open(c14n_filename, "r", encoding="utf-8") genres_tree = yaml.load(genres_file) flatten_tree(genres_tree, [], self.c14n_branches)
def assert_equal_path(self, a, b): """Check that two paths are equal.""" # The common case. if a == b: return self.assertEqual(util.normpath(a), util.normpath(b), u'paths are not equal: {!r} and {!r}'.format(a, b))
def __init__(self, path='library.blb', directory='~/Music', path_formats=((PF_KEY_DEFAULT, '$artist/$album/$track $title'),), replacements=None): if path != ':memory:': self.path = bytestring_path(normpath(path)) super(Library, self).__init__(path) self.directory = bytestring_path(normpath(directory)) self.path_formats = path_formats self.replacements = replacements self._memotable = {} # Used for template substitution performance.
def extract_func(lib, opts, args): if opts.outpath: art.extract_first(self._log, normpath(opts.outpath), lib.items(decargs(args))) else: filename = bytestring_path(opts.filename or config["art_filename"].get()) if os.path.dirname(filename) != "": self._log.error(u"Only specify a name rather than a path for -n") return for album in lib.albums(decargs(args)): artpath = normpath(os.path.join(album.path, filename)) artpath = art.extract_first(self._log, artpath, album.items()) if artpath and opts.associate: album.set_art(artpath) album.store()
def __init__(self, field, pattern, fast=True): super(PathQuery, self).__init__(field, pattern, fast) # Match the path as a single file. self.file_path = util.bytestring_path(util.normpath(pattern)) # As a directory (prefix). self.dir_path = util.bytestring_path(os.path.join(self.file_path, ''))
def info(paths): # Set up fields to output. fields = [] for name, _, _, mffield in library.ITEM_FIELDS: if mffield: fields.append(name) # Line format. other_fields = ['album art'] maxwidth = max(len(name) for name in fields + other_fields) lineformat = u'{{:>{0}}}: {{0}}'.format(maxwidth) first = True for path in paths: if not first: ui.print_() path = util.normpath(path) ui.print_(path) try: mf = mediafile.MediaFile(path) except mediafile.UnreadableFileError: ui.print_('cannot read file') continue # Basic fields. for name in fields: ui.print_(lineformat.format(name, getattr(mf, name))) # Extra stuff. ui.print_(lineformat.format('album art', mf.art is not None)) first = False
def write(self, path=None): """Write the item's metadata to a media file. ``path`` defaults to the item's path property. Can raise either a `ReadError` or a `WriteError`. """ if path is None: path = self.path else: path = normpath(path) try: f = MediaFile(syspath(path)) except (OSError, IOError) as exc: raise ReadError(self.path, exc) plugins.send('write', item=self, path=path) for key in ITEM_KEYS_WRITABLE: setattr(f, key, self[key]) try: f.save(id3v23=beets.config['id3v23'].get(bool)) except (OSError, IOError, MutagenError) as exc: raise WriteError(self.path, exc) # The file has a new mtime. self.mtime = self.current_mtime() plugins.send('after_write', item=self)
def noimport_files(self, lib, paths): # Check the user-specified directories. for path in paths: if not os.path.exists(syspath(normpath(path))): raise ui.UserError(u'no such file or directory: {0}'.format( displayable_path(path))) # Open the state file state = importer._open_state() # Create the 'taghistory' set if it doesn't exist if 'taghistory' not in state: state['taghistory'] = set() # For every path... for path in paths: added = 0 # ...get the list of albums in that path... for dirs, paths_in_dir in importer.albums_in_dir(path): # ...check if they're not already in the 'taghistory' set if tuple(dirs) not in state['taghistory']: # ...and add them... state['taghistory'].add(tuple(map(normpath, dirs))) added += 1 # Save the state file importer._save_state(state) log.info(u'Added {0} paths to the skip list', added)
def read(self, read_path=None): """Read the metadata from the associated file. If `read_path` is specified, read metadata from that file instead. Updates all the properties in `_media_fields` from the media file. Raises a `ReadError` if the file could not be read. """ if read_path is None: read_path = self.path else: read_path = normpath(read_path) try: mediafile = MediaFile(syspath(read_path)) except (OSError, IOError, UnreadableFileError) as exc: raise ReadError(read_path, exc) for key in self._media_fields: value = getattr(mediafile, key) if isinstance(value, (int, long)): if value.bit_length() > 63: value = 0 self[key] = value # Database's mtime should now reflect the on-disk value. if read_path == self.path: self.mtime = self.current_mtime() self.path = read_path
def __init__(self): super(ImportFeedsPlugin, self).__init__() self.config.add({ 'formats': [], 'm3u_name': u'imported.m3u', 'dir': None, 'relative_to': None, 'absolute_path': False, }) feeds_dir = self.config['dir'].get() if feeds_dir: feeds_dir = os.path.expanduser(bytestring_path(feeds_dir)) self.config['dir'] = feeds_dir if not os.path.exists(syspath(feeds_dir)): os.makedirs(syspath(feeds_dir)) relative_to = self.config['relative_to'].get() if relative_to: self.config['relative_to'] = normpath(relative_to) else: self.config['relative_to'] = feeds_dir self.register_listener('library_opened', self.library_opened) self.register_listener('album_imported', self.album_imported) self.register_listener('item_imported', self.item_imported)
def read(self, read_path=None): """Read the metadata from the associated file. If `read_path` is specified, read metadata from that file instead. Updates all the properties in `_media_fields` from the media file. Raises a `ReadError` if the file could not be read. """ if read_path is None: read_path = self.path else: read_path = normpath(read_path) try: mediafile = MediaFile(syspath(read_path)) except (OSError, IOError, UnreadableFileError) as exc: raise ReadError(read_path, exc) for key in self._media_fields: value = getattr(mediafile, key) if isinstance(value, (int, long)): # Filter values wider than 64 bits (in signed representation). # SQLite cannot store them. py26: Post transition, we can use: # value.bit_length() > 63 if abs(value) >= 2 ** 63: value = 0 self[key] = value # Database's mtime should now reflect the on-disk value. if read_path == self.path: self.mtime = self.current_mtime() self.path = read_path
def write(self, path=None): """Write the item's metadata to a media file. All fields in `_media_fields` are written to disk according to the values on this object. Can raise either a `ReadError` or a `WriteError`. """ if path is None: path = self.path else: path = normpath(path) tags = dict(self) plugins.send('write', item=self, path=path, tags=tags) try: mediafile = MediaFile(syspath(path), id3v23=beets.config['id3v23'].get(bool)) except (OSError, IOError, UnreadableFileError) as exc: raise ReadError(self.path, exc) mediafile.update(tags) try: mediafile.save() except (OSError, IOError, MutagenError) as exc: raise WriteError(self.path, exc) # The file has a new mtime. if path == self.path: self.mtime = self.current_mtime() plugins.send('after_write', item=self, path=path)
def embed_func(lib, opts, args): if opts.file: imagepath = normpath(opts.file) if not os.path.isfile(syspath(imagepath)): raise ui.UserError(u'image file {0} not found'.format( displayable_path(imagepath) )) items = lib.items(decargs(args)) # Confirm with user. if not opts.yes and not _confirm(items, not opts.file): return for item in items: art.embed_item(self._log, item, imagepath, maxwidth, None, compare_threshold, ifempty) else: albums = lib.albums(decargs(args)) # Confirm with user. if not opts.yes and not _confirm(albums, not opts.file): return for album in albums: art.embed_album(self._log, album, maxwidth, False, compare_threshold, ifempty) self.remove_artfile(album)
def __init__(self): super(LastGenrePlugin, self).__init__() self.import_stages = [self.imported] self.config.add({ 'whitelist': os.path.join(os.path.dirname(__file__), 'genres.txt'), 'fallback': None, 'canonical': None, }) # Read the whitelist file. wl_filename = self.config['whitelist'].as_filename() whitelist = set() with open(wl_filename) as f: for line in f: line = line.decode('utf8').strip().lower() if line: whitelist.add(line) options['whitelist'] = whitelist # Read the genres tree for canonicalization if enabled. c14n_filename = self.config['canonical'].get() if c14n_filename is not None: c14n_filename = c14n_filename.strip() if not c14n_filename: c14n_filename = C14N_TREE c14n_filename = normpath(c14n_filename) genres_tree = yaml.load(open(c14n_filename, 'r')) branches = [] flatten_tree(genres_tree, [], branches) options['branches'] = branches options['c14n'] = True
def write(self, path=None): """Write the item's metadata to a media file. Updates the mediafile with properties from itself. Can raise either a `ReadError` or a `WriteError`. """ if path is None: path = self.path else: path = normpath(path) try: mediafile = MediaFile(path) except (OSError, IOError) as exc: raise ReadError(self.path, exc) plugins.send('write', item=self, path=path) try: mediafile.update(self, id3v23=beets.config['id3v23'].get(bool)) except (OSError, IOError, MutagenError) as exc: raise WriteError(self.path, exc) # The file has a new mtime. if path == self.path: self.mtime = self.current_mtime() plugins.send('after_write', item=self, path=path)
def read(self, read_path=None): """Read the metadata from the associated file. If read_path is specified, read metadata from that file instead. Raises a `ReadError` if the file could not be read. """ if read_path is None: read_path = self.path else: read_path = normpath(read_path) try: f = MediaFile(syspath(read_path)) except (OSError, IOError) as exc: raise ReadError(read_path, exc) for key in ITEM_KEYS_META: value = getattr(f, key) if isinstance(value, (int, long)): # Filter values wider than 64 bits (in signed # representation). SQLite cannot store them. # py26: Post transition, we can use: # value.bit_length() > 63 if abs(value) >= 2 ** 63: value = 0 setattr(self, key, value) # Database's mtime should now reflect the on-disk value. if read_path == self.path: self.mtime = self.current_mtime() self.path = read_path
def update_playlists(self, lib): self._log.info(u"Updating {0} smart playlists...", len(self._matched_playlists)) playlist_dir = self.config['playlist_dir'].as_filename() playlist_dir = bytestring_path(playlist_dir) relative_to = self.config['relative_to'].get() if relative_to: relative_to = normpath(relative_to) # Maps playlist filenames to lists of track filenames. m3us = {} for playlist in self._matched_playlists: name, (query, q_sort), (album_query, a_q_sort) = playlist self._log.debug(u"Creating playlist {0}", name) items = [] if query: items.extend(lib.items(query, q_sort)) if album_query: for album in lib.albums(album_query, a_q_sort): items.extend(album.items()) # As we allow tags in the m3u names, we'll need to iterate through # the items and generate the correct m3u file names. for item in items: m3u_name = item.evaluate_template(name, True) m3u_name = sanitize_path(m3u_name, lib.replacements) if m3u_name not in m3us: m3us[m3u_name] = [] item_path = item.path if relative_to: item_path = os.path.relpath(item.path, relative_to) if item_path not in m3us[m3u_name]: m3us[m3u_name].append(item_path) # Write all of the accumulated track lists to files. for m3u in m3us: m3u_path = normpath(os.path.join(playlist_dir, bytestring_path(m3u))) mkdirall(m3u_path) with open(syspath(m3u_path), 'wb') as f: for path in m3us[m3u]: f.write(path + b'\n') self._log.info(u"{0} playlists updated", len(self._matched_playlists))
def move_func(lib, config, opts, args): dest = opts.dest if dest is not None: dest = normpath(dest) if not os.path.isdir(dest): raise ui.UserError('no such directory: %s' % dest) move_items(lib, dest, decargs(args), opts.copy, opts.album)
def _build_m3u_filename(basename): """Builds unique m3u filename by appending given basename to current date.""" basename = re.sub(r"[\s,'\"]", '_', basename) date = datetime.datetime.now().strftime("%Y%m%d_%Hh%M") path = normpath(os.path.join(_feeds_dir, date+'_'+basename+'.m3u')) return path
def embed_func(lib, opts, args): if opts.file: imagepath = normpath(opts.file) for item in lib.items(decargs(args)): embed_item(item, imagepath, maxwidth) else: for album in lib.albums(decargs(args)): embed_album(album, maxwidth)
def is_path_query(cls, query_part): """Try to guess whether a unicode query part is a path query. Condition: separator precedes colon and the file exists. """ colon = query_part.find(':') if colon != -1: query_part = query_part[:colon] return (os.sep in query_part and os.path.exists(syspath(normpath(query_part))))
def export_func(lib, config, opts, args): dest = opts.dest if dest is None: raise ui.UserError('the destination directory must be specified') if dest is not None: dest = normpath(dest) if not os.path.isdir(dest): raise ui.UserError('no such directory: %s' % dest) export_items(lib, dest, decargs(args), opts.format, opts.album)
def embed_func(lib, opts, args): if opts.file: imagepath = normpath(opts.file) if not os.path.isfile(syspath(imagepath)): raise ui.UserError(u"image file {0} not found".format(displayable_path(imagepath))) for item in lib.items(decargs(args)): art.embed_item(self._log, item, imagepath, maxwidth, None, compare_threshold, ifempty) else: for album in lib.albums(decargs(args)): art.embed_album(self._log, album, maxwidth, False, compare_threshold, ifempty) self.remove_artfile(album)
def test_get_item(self): item_path = util.normpath('/foo/bar.flac') item = Item(title=u'title', path=item_path, id=1) item.add(self.lib) log = Mock() mpdstats = MPDStats(self.lib, log) self.assertEqual(str(mpdstats.get_item(item_path)), str(item)) self.assertIsNone(mpdstats.get_item('/some/non-existing/path')) self.assertIn(u'item not found:', log.info.call_args[0][0])
def extract_func(lib, opts, args): outpath = normpath(opts.outpath or config['art_filename'].get()) for item in lib.items(decargs(args)): if self.extract(outpath, item): return
def test_link_changes_path(self): self.i.move(link=True) self.assertEqual(self.i.path, util.normpath(self.dest))
def test_get_destination_path_for_training(self): tmpdir = self.create_temp_dir() tmpdir_slashed = "{}/".format(tmpdir) temp_sub_dir = os.path.join(tmpdir, "music") os.mkdir(temp_sub_dir) cfg = { "targets": { "MPD-no-device-root": { "alias": "I have no device_root", "device_path": "music" }, "MPD-non-existent": { "device_root": "/this/does/not/exist/i/hope", "device_path": "music" }, "MPD1": { "device_root": tmpdir, "device_path": "music" }, "MPD2": { "device_root": tmpdir_slashed, "device_path": "music" }, "MPD3": { "device_root": tmpdir, "device_path": "/music" }, "MPD4": { "device_root": tmpdir_slashed, "device_path": "/music" }, "MPD5": { "device_root": tmpdir_slashed, "device_path": "/music/" }, }, "trainings": { "T0-no-target": { "alias": "I have no target", }, "T0-no-device-root": { "target": "MPD-no-device-root", }, "T0-non-existent": { "target": "MPD-non-existent", }, "T1": { "target": "MPD1", }, "T2": { "target": "MPD2", }, "T3": { "target": "MPD3", }, "T4": { "target": "MPD4", }, "T5": { "target": "MPD5", } } } config = get_plugin_configuration(cfg) # No target training = config["trainings"]["T0-no-target"] path = common.get_destination_path_for_training(training) self.assertIsNone(path) # No device_root in target training = config["trainings"]["T0-no-device-root"] path = common.get_destination_path_for_training(training) self.assertIsNone(path) # No non existent device_root in target training = config["trainings"]["T0-non-existent"] path = common.get_destination_path_for_training(training) self.assertIsNone(path) # No separators between root and path training = config["trainings"]["T1"] expected = os.path.realpath( util.normpath(os.path.join(tmpdir, "music")).decode()) path = common.get_destination_path_for_training(training) self.assertEqual(expected, path) # final slash on device_root training = config["trainings"]["T2"] expected = os.path.realpath( util.normpath(os.path.join(tmpdir, "music")).decode()) path = common.get_destination_path_for_training(training) self.assertEqual(expected, path) # leading slash on device path training = config["trainings"]["T3"] expected = os.path.realpath( util.normpath(os.path.join(tmpdir, "music")).decode()) path = common.get_destination_path_for_training(training) self.assertEqual(expected, path) # final slash on device_root and leading slash on device path training = config["trainings"]["T4"] expected = os.path.realpath( util.normpath(os.path.join(tmpdir, "music")).decode()) path = common.get_destination_path_for_training(training) self.assertEqual(expected, path) # slashes allover training = config["trainings"]["T5"] expected = os.path.realpath( util.normpath(os.path.join(tmpdir, "music")).decode()) path = common.get_destination_path_for_training(training) self.assertEqual(expected, path)
def parse(self, string): return normpath(bytestring_path(string))
def extract_func(lib, opts, args): outpath = normpath(opts.outpath or 'cover') extract(lib, outpath, decargs(args))
def destination(self, fragment=False, basedir=None, platform=None, path_formats=None): """Returns the path in the library directory designated for the item (i.e., where the file ought to be). fragment makes this method return just the path fragment underneath the root library directory; the path is also returned as Unicode instead of encoded as a bytestring. basedir can override the library's base directory for the destination. """ self._check_db() platform = platform or sys.platform basedir = basedir or self._db.directory path_formats = path_formats or self._db.path_formats # Use a path format based on a query, falling back on the # default. for query, path_format in path_formats: if query == PF_KEY_DEFAULT: continue query, _ = parse_query_string(query, type(self)) if query.match(self): # The query matches the item! Use the corresponding path # format. break else: # No query matched; fall back to default. for query, path_format in path_formats: if query == PF_KEY_DEFAULT: break else: assert False, "no default path format" if isinstance(path_format, Template): subpath_tmpl = path_format else: subpath_tmpl = Template(path_format) # Evaluate the selected template. subpath = self.evaluate_template(subpath_tmpl, True) # Prepare path for output: normalize Unicode characters. if platform == 'darwin': subpath = unicodedata.normalize('NFD', subpath) else: subpath = unicodedata.normalize('NFC', subpath) if beets.config['asciify_paths']: subpath = unidecode(subpath) # Truncate components and remove forbidden characters. subpath = util.sanitize_path(subpath, self._db.replacements) # Encode for the filesystem. if not fragment: subpath = bytestring_path(subpath) # Preserve extension. _, extension = os.path.splitext(self.path) if fragment: # Outputting Unicode. extension = extension.decode('utf8', 'ignore') subpath += extension.lower() # Truncate too-long components. maxlen = beets.config['max_filename_length'].get(int) if not maxlen: # When zero, try to determine from filesystem. maxlen = util.max_filename_length(self._db.directory) subpath = util.truncate_path(subpath, maxlen) if fragment: return subpath else: return normpath(os.path.join(basedir, subpath))