コード例 #1
0
class Item(LibModel):
    _table = 'items'
    _flex_table = 'item_attributes'
    _fields = {
        'id': types.PRIMARY_ID,
        'path': PathType(),
        'album_id': types.FOREIGN_ID,
        'title': types.STRING,
        'artist': types.STRING,
        'artist_sort': types.STRING,
        'artist_credit': types.STRING,
        'album': types.STRING,
        'albumartist': types.STRING,
        'albumartist_sort': types.STRING,
        'albumartist_credit': types.STRING,
        'genre': types.STRING,
        'composer': types.STRING,
        'grouping': types.STRING,
        'year': types.PaddedInt(4),
        'month': types.PaddedInt(2),
        'day': types.PaddedInt(2),
        'track': types.PaddedInt(2),
        'tracktotal': types.PaddedInt(2),
        'disc': types.PaddedInt(2),
        'disctotal': types.PaddedInt(2),
        'lyrics': types.STRING,
        'comments': types.STRING,
        'bpm': types.INTEGER,
        'comp': types.BOOLEAN,
        'mb_trackid': types.STRING,
        'mb_albumid': types.STRING,
        'mb_artistid': types.STRING,
        'mb_albumartistid': types.STRING,
        'albumtype': types.STRING,
        'label': types.STRING,
        'acoustid_fingerprint': types.STRING,
        'acoustid_id': types.STRING,
        'mb_releasegroupid': types.STRING,
        'asin': types.STRING,
        'catalognum': types.STRING,
        'script': types.STRING,
        'language': types.STRING,
        'country': types.STRING,
        'albumstatus': types.STRING,
        'media': types.STRING,
        'albumdisambig': types.STRING,
        'disctitle': types.STRING,
        'encoder': types.STRING,
        'rg_track_gain': types.NULL_FLOAT,
        'rg_track_peak': types.NULL_FLOAT,
        'rg_album_gain': types.NULL_FLOAT,
        'rg_album_peak': types.NULL_FLOAT,
        'original_year': types.PaddedInt(4),
        'original_month': types.PaddedInt(2),
        'original_day': types.PaddedInt(2),
        'initial_key': MusicalKey(),
        'length': DurationType(),
        'bitrate': types.ScaledInt(1000, u'kbps'),
        'format': types.STRING,
        'samplerate': types.ScaledInt(1000, u'kHz'),
        'bitdepth': types.INTEGER,
        'channels': types.INTEGER,
        'mtime': DateType(),
        'added': DateType(),
    }

    _search_fields = ('artist', 'title', 'comments', 'album', 'albumartist',
                      'genre')

    _types = {
        'data_source': types.STRING,
    }

    _media_fields = set(MediaFile.readable_fields()) \
        .intersection(_fields.keys())
    """Set of item fields that are backed by `MediaFile` fields.

    Any kind of field (fixed, flexible, and computed) may be a media
    field. Only these fields are read from disk in `read` and written in
    `write`.
    """

    _media_tag_fields = set(MediaFile.fields()).intersection(_fields.keys())
    """Set of item fields that are backed by *writable* `MediaFile` tag
    fields.

    This excludes fields that represent audio data, such as `bitrate` or
    `length`.
    """

    _formatter = FormattedItemMapping

    _sorts = {'artist': SmartArtistSort}

    _format_config_key = 'format_item'

    @classmethod
    def _getters(cls):
        getters = plugins.item_field_getters()
        getters['singleton'] = lambda i: i.album_id is None
        getters['filesize'] = Item.try_filesize  # In bytes.
        return getters

    @classmethod
    def from_path(cls, path):
        """Creates a new item from the media file at the specified path.
        """
        # Initiate with values that aren't read from files.
        i = cls(album_id=None)
        i.read(path)
        i.mtime = i.current_mtime()  # Initial mtime.
        return i

    def __setitem__(self, key, value):
        """Set the item's value for a standard field or a flexattr.
        """
        # Encode unicode paths and read buffers.
        if key == 'path':
            if isinstance(value, unicode):
                value = bytestring_path(value)
            elif isinstance(value, buffer):
                value = bytes(value)

        if key in MediaFile.fields():
            self.mtime = 0  # Reset mtime on dirty.

        super(Item, self).__setitem__(key, value)

    def update(self, values):
        """Set all key/value pairs in the mapping. If mtime is
        specified, it is not reset (as it might otherwise be).
        """
        super(Item, self).update(values)
        if self.mtime == 0 and 'mtime' in values:
            self.mtime = values['mtime']

    def get_album(self):
        """Get the Album object that this item belongs to, if any, or
        None if the item is a singleton or is not associated with a
        library.
        """
        if not self._db:
            return None
        return self._db.get_album(self)

    # Interaction with file metadata.

    def read(self, read_path=None):
        """Read the metadata from the associated file.

        If `read_path` is specified, read metadata from that file
        instead. Updates all the properties in `_media_fields`
        from the media file.

        Raises a `ReadError` if the file could not be read.
        """
        if read_path is None:
            read_path = self.path
        else:
            read_path = normpath(read_path)
        try:
            mediafile = MediaFile(syspath(read_path))
        except (OSError, IOError, UnreadableFileError) as exc:
            raise ReadError(read_path, exc)

        for key in self._media_fields:
            value = getattr(mediafile, key)
            if isinstance(value, (int, long)):
                if value.bit_length() > 63:
                    value = 0
            self[key] = value

        # Database's mtime should now reflect the on-disk value.
        if read_path == self.path:
            self.mtime = self.current_mtime()

        self.path = read_path

    def write(self, path=None, tags=None):
        """Write the item's metadata to a media file.

        All fields in `_media_fields` are written to disk according to
        the values on this object.

        `path` is the path of the mediafile to write the data to. It
        defaults to the item's path.

        `tags` is a dictionary of additional metadata the should be
        written to the file. (These tags need not be in `_media_fields`.)

        Can raise either a `ReadError` or a `WriteError`.
        """
        if path is None:
            path = self.path
        else:
            path = normpath(path)

        # Get the data to write to the file.
        item_tags = dict(self)
        item_tags = {
            k: v
            for k, v in item_tags.items() if k in self._media_fields
        }  # Only write media fields.
        if tags is not None:
            item_tags.update(tags)
        plugins.send('write', item=self, path=path, tags=item_tags)

        # Open the file.
        try:
            mediafile = MediaFile(syspath(path),
                                  id3v23=beets.config['id3v23'].get(bool))
        except (OSError, IOError, UnreadableFileError) as exc:
            raise ReadError(self.path, exc)

        # Write the tags to the file.
        mediafile.update(item_tags)
        try:
            mediafile.save()
        except (OSError, IOError, MutagenError) as exc:
            raise WriteError(self.path, exc)

        # The file has a new mtime.
        if path == self.path:
            self.mtime = self.current_mtime()
        plugins.send('after_write', item=self, path=path)

    def try_write(self, path=None, tags=None):
        """Calls `write()` but catches and logs `FileOperationError`
        exceptions.

        Returns `False` an exception was caught and `True` otherwise.
        """
        try:
            self.write(path, tags)
            return True
        except FileOperationError as exc:
            log.error(u"{0}", exc)
            return False

    def try_sync(self, write, move, with_album=True):
        """Synchronize the item with the database and, possibly, updates its
        tags on disk and its path (by moving the file).

        `write` indicates whether to write new tags into the file. Similarly,
        `move` controls whether the path should be updated. In the
        latter case, files are *only* moved when they are inside their
        library's directory (if any).

        Similar to calling :meth:`write`, :meth:`move`, and :meth:`store`
        (conditionally).
        """
        if write:
            self.try_write()
        if move:
            # Check whether this file is inside the library directory.
            if self._db and self._db.directory in util.ancestry(self.path):
                log.debug(u'moving {0} to synchronize path',
                          util.displayable_path(self.path))
                self.move(with_album=with_album)
        self.store()

    # Files themselves.

    def move_file(self, dest, copy=False, link=False):
        """Moves or copies the item's file, updating the path value if
        the move succeeds. If a file exists at ``dest``, then it is
        slightly modified to be unique.
        """
        if not util.samefile(self.path, dest):
            dest = util.unique_path(dest)
        if copy:
            util.copy(self.path, dest)
            plugins.send("item_copied",
                         item=self,
                         source=self.path,
                         destination=dest)
        elif link:
            util.link(self.path, dest)
            plugins.send("item_linked",
                         item=self,
                         source=self.path,
                         destination=dest)
        else:
            plugins.send("before_item_moved",
                         item=self,
                         source=self.path,
                         destination=dest)
            util.move(self.path, dest)
            plugins.send("item_moved",
                         item=self,
                         source=self.path,
                         destination=dest)

        # Either copying or moving succeeded, so update the stored path.
        self.path = dest

    def current_mtime(self):
        """Returns the current mtime of the file, rounded to the nearest
        integer.
        """
        return int(os.path.getmtime(syspath(self.path)))

    def try_filesize(self):
        """Get the size of the underlying file in bytes.

        If the file is missing, return 0 (and log a warning).
        """
        try:
            return os.path.getsize(syspath(self.path))
        except (OSError, Exception) as exc:
            log.warning(u'could not get filesize: {0}', exc)
            return 0

    # Model methods.

    def remove(self, delete=False, with_album=True):
        """Removes the item. If `delete`, then the associated file is
        removed from disk. If `with_album`, then the item's album (if
        any) is removed if it the item was the last in the album.
        """
        super(Item, self).remove()

        # Remove the album if it is empty.
        if with_album:
            album = self.get_album()
            if album and not album.items():
                album.remove(delete, False)

        # Send a 'item_removed' signal to plugins
        plugins.send('item_removed', item=self)

        # Delete the associated file.
        if delete:
            util.remove(self.path)
            util.prune_dirs(os.path.dirname(self.path), self._db.directory)

        self._db._memotable = {}

    def move(self, copy=False, link=False, basedir=None, with_album=True):
        """Move the item to its designated location within the library
        directory (provided by destination()). Subdirectories are
        created as needed. If the operation succeeds, the item's path
        field is updated to reflect the new location.

        If `copy` is true, moving the file is copied rather than moved.
        Similarly, `link` creates a symlink instead.

        basedir overrides the library base directory for the
        destination.

        If the item is in an album, the album is given an opportunity to
        move its art. (This can be disabled by passing
        with_album=False.)

        The item is stored to the database if it is in the database, so
        any dirty fields prior to the move() call will be written as a
        side effect. You probably want to call save() to commit the DB
        transaction.
        """
        self._check_db()
        dest = self.destination(basedir=basedir)

        # Create necessary ancestry for the move.
        util.mkdirall(dest)

        # Perform the move and store the change.
        old_path = self.path
        self.move_file(dest, copy, link)
        self.store()

        # If this item is in an album, move its art.
        if with_album:
            album = self.get_album()
            if album:
                album.move_art(copy)
                album.store()

        # Prune vacated directory.
        if not copy:
            util.prune_dirs(os.path.dirname(old_path), self._db.directory)

    # Templating.

    def destination(self,
                    fragment=False,
                    basedir=None,
                    platform=None,
                    path_formats=None):
        """Returns the path in the library directory designated for the
        item (i.e., where the file ought to be). fragment makes this
        method return just the path fragment underneath the root library
        directory; the path is also returned as Unicode instead of
        encoded as a bytestring. basedir can override the library's base
        directory for the destination.
        """
        self._check_db()
        platform = platform or sys.platform
        basedir = basedir or self._db.directory
        path_formats = path_formats or self._db.path_formats

        # Use a path format based on a query, falling back on the
        # default.
        for query, path_format in path_formats:
            if query == PF_KEY_DEFAULT:
                continue
            query, _ = parse_query_string(query, type(self))
            if query.match(self):
                # The query matches the item! Use the corresponding path
                # format.
                break
        else:
            # No query matched; fall back to default.
            for query, path_format in path_formats:
                if query == PF_KEY_DEFAULT:
                    break
            else:
                assert False, u"no default path format"
        if isinstance(path_format, Template):
            subpath_tmpl = path_format
        else:
            subpath_tmpl = Template(path_format)

        # Evaluate the selected template.
        subpath = self.evaluate_template(subpath_tmpl, True)

        # Prepare path for output: normalize Unicode characters.
        if platform == 'darwin':
            subpath = unicodedata.normalize('NFD', subpath)
        else:
            subpath = unicodedata.normalize('NFC', subpath)

        if beets.config['asciify_paths']:
            subpath = unidecode(subpath)

        maxlen = beets.config['max_filename_length'].get(int)
        if not maxlen:
            # When zero, try to determine from filesystem.
            maxlen = util.max_filename_length(self._db.directory)

        subpath, fellback = util.legalize_path(subpath, self._db.replacements,
                                               maxlen,
                                               os.path.splitext(self.path)[1],
                                               fragment)
        if fellback:
            # Print an error message if legalization fell back to
            # default replacements because of the maximum length.
            log.warning(
                u'Fell back to default replacements when naming '
                u'file {}. Configure replacements to avoid lengthening '
                u'the filename.', subpath)

        if fragment:
            return subpath
        else:
            return normpath(os.path.join(basedir, subpath))
コード例 #2
0
ファイル: test_mediafile.py プロジェクト: Cornellio/beets
 def test_fields_in_readable_fields(self):
     readable = MediaFile.readable_fields()
     for field in MediaFile.fields():
         self.assertIn(field, readable)
コード例 #3
0
ファイル: test_mediafile.py プロジェクト: Cornellio/beets
 def test_properties_from_readable_fields(self):
     path = os.path.join(_common.RSRC, 'full.mp3')
     mediafile = MediaFile(path)
     for field in MediaFile.readable_fields():
         self.assertTrue(hasattr(mediafile, field))
コード例 #4
0
ファイル: test_mediafile.py プロジェクト: tux-00/beets
 def test_fields_in_readable_fields(self):
     readable = MediaFile.readable_fields()
     for field in MediaFile.fields():
         self.assertIn(field, readable)
コード例 #5
0
ファイル: test_mediafile.py プロジェクト: tux-00/beets
 def test_properties_from_readable_fields(self):
     path = os.path.join(_common.RSRC, 'full.mp3')
     mediafile = MediaFile(path)
     for field in MediaFile.readable_fields():
         self.assertTrue(hasattr(mediafile, field))
コード例 #6
0
ファイル: library.py プロジェクト: zammottomate/beets
class Item(LibModel):
    _table = 'items'
    _flex_table = 'item_attributes'
    _fields = {
        'id':       types.PRIMARY_ID,
        'path':     PathType(),
        'album_id': types.FOREIGN_ID,

        'title':                types.STRING,
        'artist':               types.STRING,
        'artist_sort':          types.STRING,
        'artist_credit':        types.STRING,
        'album':                types.STRING,
        'albumartist':          types.STRING,
        'albumartist_sort':     types.STRING,
        'albumartist_credit':   types.STRING,
        'genre':                types.STRING,
        'composer':             types.STRING,
        'grouping':             types.STRING,
        'year':                 types.PaddedInt(4),
        'month':                types.PaddedInt(2),
        'day':                  types.PaddedInt(2),
        'track':                types.PaddedInt(2),
        'tracktotal':           types.PaddedInt(2),
        'disc':                 types.PaddedInt(2),
        'disctotal':            types.PaddedInt(2),
        'lyrics':               types.STRING,
        'comments':             types.STRING,
        'bpm':                  types.INTEGER,
        'comp':                 types.BOOLEAN,
        'mb_trackid':           types.STRING,
        'mb_albumid':           types.STRING,
        'mb_artistid':          types.STRING,
        'mb_albumartistid':     types.STRING,
        'albumtype':            types.STRING,
        'label':                types.STRING,
        'acoustid_fingerprint': types.STRING,
        'acoustid_id':          types.STRING,
        'mb_releasegroupid':    types.STRING,
        'asin':                 types.STRING,
        'catalognum':           types.STRING,
        'script':               types.STRING,
        'language':             types.STRING,
        'country':              types.STRING,
        'albumstatus':          types.STRING,
        'media':                types.STRING,
        'albumdisambig':        types.STRING,
        'disctitle':            types.STRING,
        'encoder':              types.STRING,
        'rg_track_gain':        types.NULL_FLOAT,
        'rg_track_peak':        types.NULL_FLOAT,
        'rg_album_gain':        types.NULL_FLOAT,
        'rg_album_peak':        types.NULL_FLOAT,
        'original_year':        types.PaddedInt(4),
        'original_month':       types.PaddedInt(2),
        'original_day':         types.PaddedInt(2),
        'initial_key':          MusicalKey(),

        'length':      types.FLOAT,
        'bitrate':     types.ScaledInt(1000, u'kbps'),
        'format':      types.STRING,
        'samplerate':  types.ScaledInt(1000, u'kHz'),
        'bitdepth':    types.INTEGER,
        'channels':    types.INTEGER,
        'mtime':       DateType(),
        'added':       DateType(),
    }

    _search_fields = ('artist', 'title', 'comments',
                      'album', 'albumartist', 'genre')

    _media_fields = set(MediaFile.readable_fields()) \
        .intersection(_fields.keys())
    """Set of item fields that are backed by `MediaFile` fields.

    Any kind of field (fixed, flexible, and computed) may be a media
    field. Only these fields are read from disk in `read` and written in
    `write`.
    """

    _formatter = FormattedItemMapping

    @classmethod
    def _getters(cls):
        getters = plugins.item_field_getters()
        getters['singleton'] = lambda i: i.album_id is None
        return getters

    @classmethod
    def from_path(cls, path):
        """Creates a new item from the media file at the specified path.
        """
        # Initiate with values that aren't read from files.
        i = cls(album_id=None)
        i.read(path)
        i.mtime = i.current_mtime()  # Initial mtime.
        return i

    def __setitem__(self, key, value):
        """Set the item's value for a standard field or a flexattr.
        """
        # Encode unicode paths and read buffers.
        if key == 'path':
            if isinstance(value, unicode):
                value = bytestring_path(value)
            elif isinstance(value, buffer):
                value = str(value)

        if key in MediaFile.fields():
            self.mtime = 0  # Reset mtime on dirty.

        super(Item, self).__setitem__(key, value)

    def update(self, values):
        """Set all key/value pairs in the mapping. If mtime is
        specified, it is not reset (as it might otherwise be).
        """
        super(Item, self).update(values)
        if self.mtime == 0 and 'mtime' in values:
            self.mtime = values['mtime']

    def get_album(self):
        """Get the Album object that this item belongs to, if any, or
        None if the item is a singleton or is not associated with a
        library.
        """
        if not self._db:
            return None
        return self._db.get_album(self)

    # Interaction with file metadata.

    def read(self, read_path=None):
        """Read the metadata from the associated file.

        If `read_path` is specified, read metadata from that file
        instead. Updates all the properties in `_media_fields`
        from the media file.

        Raises a `ReadError` if the file could not be read.
        """
        if read_path is None:
            read_path = self.path
        else:
            read_path = normpath(read_path)
        try:
            mediafile = MediaFile(syspath(read_path))
        except (OSError, IOError, UnreadableFileError) as exc:
            raise ReadError(read_path, exc)

        for key in self._media_fields:
            value = getattr(mediafile, key)
            if isinstance(value, (int, long)):
                # Filter values wider than 64 bits (in signed representation).
                # SQLite cannot store them. py26: Post transition, we can use:
                # value.bit_length() > 63
                if abs(value) >= 2 ** 63:
                    value = 0
            self[key] = value

        # Database's mtime should now reflect the on-disk value.
        if read_path == self.path:
            self.mtime = self.current_mtime()

        self.path = read_path

    def write(self, path=None):
        """Write the item's metadata to a media file.

        All fields in `_media_fields` are written to disk according to
        the values on this object.

        Can raise either a `ReadError` or a `WriteError`.
        """
        if path is None:
            path = self.path
        else:
            path = normpath(path)

        plugins.send('write', item=self, path=path)

        try:
            mediafile = MediaFile(syspath(path),
                                  id3v23=beets.config['id3v23'].get(bool))
        except (OSError, IOError, UnreadableFileError) as exc:
            raise ReadError(self.path, exc)

        mediafile.update(self)
        try:
            mediafile.save()
        except (OSError, IOError, MutagenError) as exc:
            raise WriteError(self.path, exc)

        # The file has a new mtime.
        if path == self.path:
            self.mtime = self.current_mtime()
        plugins.send('after_write', item=self, path=path)

    def try_write(self, path=None):
        """Calls `write()` but catches and logs `FileOperationError`
        exceptions.

        Returns `False` an exception was caught and `True` otherwise.
        """
        try:
            self.write(path)
            return True
        except FileOperationError as exc:
            log.error(exc)
            return False

    # Files themselves.

    def move_file(self, dest, copy=False):
        """Moves or copies the item's file, updating the path value if
        the move succeeds. If a file exists at ``dest``, then it is
        slightly modified to be unique.
        """
        if not util.samefile(self.path, dest):
            dest = util.unique_path(dest)
        if copy:
            util.copy(self.path, dest)
            plugins.send("item_copied", item=self, source=self.path,
                         destination=dest)
        else:
            plugins.send("before_item_moved", item=self, source=self.path,
                         destination=dest)
            util.move(self.path, dest)
            plugins.send("item_moved", item=self, source=self.path,
                         destination=dest)

        # Either copying or moving succeeded, so update the stored path.
        self.path = dest

    def current_mtime(self):
        """Returns the current mtime of the file, rounded to the nearest
        integer.
        """
        return int(os.path.getmtime(syspath(self.path)))

    # Model methods.

    def remove(self, delete=False, with_album=True):
        """Removes the item. If `delete`, then the associated file is
        removed from disk. If `with_album`, then the item's album (if
        any) is removed if it the item was the last in the album.
        """
        super(Item, self).remove()

        # Remove the album if it is empty.
        if with_album:
            album = self.get_album()
            if album and not album.items():
                album.remove(delete, False)

        # Send a 'item_removed' signal to plugins
        plugins.send('item_removed', item=self)

        # Delete the associated file.
        if delete:
            util.remove(self.path)
            util.prune_dirs(os.path.dirname(self.path), self._db.directory)

        self._db._memotable = {}

    def move(self, copy=False, basedir=None, with_album=True):
        """Move the item to its designated location within the library
        directory (provided by destination()). Subdirectories are
        created as needed. If the operation succeeds, the item's path
        field is updated to reflect the new location.

        If copy is True, moving the file is copied rather than moved.

        basedir overrides the library base directory for the
        destination.

        If the item is in an album, the album is given an opportunity to
        move its art. (This can be disabled by passing
        with_album=False.)

        The item is stored to the database if it is in the database, so
        any dirty fields prior to the move() call will be written as a
        side effect. You probably want to call save() to commit the DB
        transaction.
        """
        self._check_db()
        dest = self.destination(basedir=basedir)

        # Create necessary ancestry for the move.
        util.mkdirall(dest)

        # Perform the move and store the change.
        old_path = self.path
        self.move_file(dest, copy)
        self.store()

        # If this item is in an album, move its art.
        if with_album:
            album = self.get_album()
            if album:
                album.move_art(copy)
                album.store()

        # Prune vacated directory.
        if not copy:
            util.prune_dirs(os.path.dirname(old_path), self._db.directory)

    # Templating.

    def destination(self, fragment=False, basedir=None, platform=None,
                    path_formats=None):
        """Returns the path in the library directory designated for the
        item (i.e., where the file ought to be). fragment makes this
        method return just the path fragment underneath the root library
        directory; the path is also returned as Unicode instead of
        encoded as a bytestring. basedir can override the library's base
        directory for the destination.
        """
        self._check_db()
        platform = platform or sys.platform
        basedir = basedir or self._db.directory
        path_formats = path_formats or self._db.path_formats

        # Use a path format based on a query, falling back on the
        # default.
        for query, path_format in path_formats:
            if query == PF_KEY_DEFAULT:
                continue
            (query, _) = get_query_sort(query, type(self))
            if query.match(self):
                # The query matches the item! Use the corresponding path
                # format.
                break
        else:
            # No query matched; fall back to default.
            for query, path_format in path_formats:
                if query == PF_KEY_DEFAULT:
                    break
            else:
                assert False, "no default path format"
        if isinstance(path_format, Template):
            subpath_tmpl = path_format
        else:
            subpath_tmpl = Template(path_format)

        # Evaluate the selected template.
        subpath = self.evaluate_template(subpath_tmpl, True)

        # Prepare path for output: normalize Unicode characters.
        if platform == 'darwin':
            subpath = unicodedata.normalize('NFD', subpath)
        else:
            subpath = unicodedata.normalize('NFC', subpath)

        if beets.config['asciify_paths']:
            subpath = unidecode(subpath)

        # Truncate components and remove forbidden characters.
        subpath = util.sanitize_path(subpath, self._db.replacements)

        # Encode for the filesystem.
        if not fragment:
            subpath = bytestring_path(subpath)

        # Preserve extension.
        _, extension = os.path.splitext(self.path)
        if fragment:
            # Outputting Unicode.
            extension = extension.decode('utf8', 'ignore')
        subpath += extension.lower()

        # Truncate too-long components.
        maxlen = beets.config['max_filename_length'].get(int)
        if not maxlen:
            # When zero, try to determine from filesystem.
            maxlen = util.max_filename_length(self._db.directory)
        subpath = util.truncate_path(subpath, maxlen)

        if fragment:
            return subpath
        else:
            return normpath(os.path.join(basedir, subpath))