示例#1
0
文件: trackdb.py 项目: twa022/exaile
    def __init__(self, name="", location="", pickle_attrs=[], loadfirst=False):
        """
            Sets up the trackDB.
        """

        # ensure that the DB is always loaded before any tracks are,
        # otherwise internal values are not loaded and may be lost/corrupted
        if loadfirst and Track._get_track_count() != 0:
            raise RuntimeError(
                ("Internal error! %d tracks already loaded, " +
                 "TrackDB must be loaded first!") % Track._get_track_count())

        self.name = name
        self.location = location
        self._dirty = False
        self.tracks = {}  # key is always URI of the track
        self.pickle_attrs = pickle_attrs
        self.pickle_attrs += ['tracks', 'name', '_key']
        self._saving = False
        self._key = 0
        self._dbversion = 2.0
        self._dbminorversion = 0
        self._deleted_keys = []
        if location:
            self.load_from_location()
            self._timeout_save()
示例#2
0
 def __init__(self, name="", location="", pickle_attrs=[], loadfirst=False):
     """
         Sets up the trackDB.
     """
     
     # ensure that the DB is always loaded before any tracks are, 
     # otherwise internal values are not loaded and may be lost/corrupted
     if loadfirst and Track._get_track_count() != 0:
         raise RuntimeError(("Internal error! %d tracks already loaded, " +
             "TrackDB must be loaded first!") % Track._get_track_count())
     
     self.name = name
     self.location = location
     self._dirty = False
     self.tracks = {}
     self.pickle_attrs = pickle_attrs
     self.pickle_attrs += ['tracks', 'name', '_key']
     self._saving = False
     self._key = 0
     self._dbversion = 2.0
     self._dbminorversion = 0
     self._deleted_keys = []
     if location:
         self.load_from_location()
         self._timeout_save()
示例#3
0
文件: util.py 项目: thiblahute/exaile
def get_tracks_from_uri(uri):
    """
        Returns all valid tracks located at uri
        
        :param uri: the uri to retrieve the tracks from
        :type uri: string
        :returns: the retrieved tracks
        :rtype: list of :class:`xl.trax.Track`
    """
    tracks = []

    gloc = gio.File(uri)

    # don't do advanced checking on streaming-type uris as it can fail or
    # otherwise be terribly slow.
    # TODO: move uri definition somewhere more common for easy reuse?
    if gloc.get_uri_scheme() in ('http', 'mms', 'cdda'):
        return [Track(uri)]

    try:
        file_type = gloc.query_info("standard::type").get_file_type()
    except gio.Error:  # E.g. cdda
        file_type = None
    if file_type == gio.FILE_TYPE_DIRECTORY:
        # TODO: refactor Library so we dont need the collection obj
        from xl.collection import Library, Collection
        tracks = Collection('scanner')
        lib = Library(uri)
        lib.set_collection(tracks)
        lib.rescan()
        tracks = tracks.get_tracks()
    else:
        tracks = [Track(uri)]
    return tracks
示例#4
0
文件: trackdb.py 项目: lh77x7/exaile
    def load_from_location(self, location=None):
        """
        Restores :class:`TrackDB` state from the pickled representation
        stored at the specified location.

        :param location: the location to load the data from
        :type location: string
        """
        if not location:
            location = self.location
        if not location:
            raise AttributeError(
                _("You did not specify a location to load the db from")
            )

        logger.debug("Loading %s DB from %s.", self.name, location)

        pdata = common.open_shelf(location)

        if "_dbversion" in pdata:
            if int(pdata['_dbversion']) > int(self._dbversion):
                raise common.VersionError("DB was created on a newer Exaile version.")
            elif pdata['_dbversion'] < self._dbversion:
                logger.info("Upgrading DB format....")
                import shutil

                shutil.copyfile(location, location + "-%s.bak" % pdata['_dbversion'])
                import xl.migrations.database as dbmig

                dbmig.handle_migration(
                    self, pdata, pdata['_dbversion'], self._dbversion
                )

        for attr in self.pickle_attrs:
            try:
                if 'tracks' == attr:
                    data = {}
                    for k in (x for x in pdata.keys() if x.startswith("tracks-")):
                        p = pdata[k]
                        tr = Track(_unpickles=p[0])
                        loc = tr.get_loc_for_io()
                        if loc not in data:
                            data[loc] = TrackHolder(tr, p[1], **p[2])
                        else:
                            logger.warning("Duplicate track found: %s", loc)
                            # presumably the second track was written because of an error,
                            # so use the first track found.
                            del pdata[k]

                    setattr(self, attr, data)
                else:
                    setattr(self, attr, pdata.get(attr, getattr(self, attr)))
            except Exception:
                # FIXME: Do something about this
                logger.exception("Exception occurred while loading %s", location)

        pdata.close()

        self._dirty = False
示例#5
0
文件: trackdb.py 项目: exaile/exaile
    def load_from_location(self, location=None):
        """
            Restores :class:`TrackDB` state from the pickled representation
            stored at the specified location.

            :param location: the location to load the data from
            :type location: string
        """
        if not location:
            location = self.location
        if not location:
            raise AttributeError(
                _("You did not specify a location to load the db from")
            )

        logger.debug("Loading %s DB from %s.", self.name, location)

        pdata = common.open_shelf(location)

        if "_dbversion" in pdata:
            if int(pdata['_dbversion']) > int(self._dbversion):
                raise common.VersionError("DB was created on a newer Exaile version.")
            elif pdata['_dbversion'] < self._dbversion:
                logger.info("Upgrading DB format....")
                import shutil

                shutil.copyfile(location, location + "-%s.bak" % pdata['_dbversion'])
                import xl.migrations.database as dbmig

                dbmig.handle_migration(
                    self, pdata, pdata['_dbversion'], self._dbversion
                )

        for attr in self.pickle_attrs:
            try:
                if 'tracks' == attr:
                    data = {}
                    for k in (x for x in pdata.keys() if x.startswith("tracks-")):
                        p = pdata[k]
                        tr = Track(_unpickles=p[0])
                        loc = tr.get_loc_for_io()
                        if loc not in data:
                            data[loc] = TrackHolder(tr, p[1], **p[2])
                        else:
                            logger.warning("Duplicate track found: %s", loc)
                            # presumably the second track was written because of an error,
                            # so use the first track found.
                            del pdata[k]

                    setattr(self, attr, data)
                else:
                    setattr(self, attr, pdata.get(attr, getattr(self, attr)))
            except Exception:
                # FIXME: Do something about this
                logger.exception("Exception occurred while loading %s", location)

        pdata.close()

        self._dirty = False
示例#6
0
文件: util.py 项目: twa022/exaile
def recursive_tracks_from_file(gfile: Gio.File) -> Iterable[Track]:
    """
        Get recursive tracks from Gio.File
        If it's a directory, expands
        Gets only valid tracks
    """
    ftype = gfile.query_info('standard::type', Gio.FileQueryInfoFlags.NONE,
                             None).get_file_type()
    if ftype == Gio.FileType.DIRECTORY:
        file_infos = gfile.enumerate_children('standard::name',
                                              Gio.FileQueryInfoFlags.NONE,
                                              None)
        files = (gfile.get_child(fi.get_name()) for fi in file_infos)
        for sub_gfile in files:
            for i in recursive_tracks_from_file(sub_gfile):
                yield i
    else:
        uri = gfile.get_uri()
        if is_valid_track(uri):
            yield Track(uri)
示例#7
0
    def load_from_location(self, location=None):
        """
            Restores :class:`TrackDB` state from the pickled representation
            stored at the specified location.

            :param location: the location to load the data from
            :type location: string
        """
        if not location:
            location = self.location
        if not location:
            raise AttributeError(
                _("You did not specify a location to load the db from"))

        logger.debug("Loading %s DB from %s." % (self.name, location))

        try:
            try:
                pdata = shelve.open(location,
                                    flag='c',
                                    protocol=common.PICKLE_PROTOCOL)
            except ImportError:
                import bsddb3  # ArchLinux disabled bsddb in python2, so we have to use the external module
                _db = bsddb3.hashopen(location, 'c')
                pdata = shelve.Shelf(_db, protocol=common.PICKLE_PROTOCOL)
            if pdata.has_key("_dbversion"):
                if int(pdata['_dbversion']) > int(self._dbversion):
                    raise common.VersionError, \
                            "DB was created on a newer Exaile version."
                elif pdata['_dbversion'] < self._dbversion:
                    logger.info("Upgrading DB format....")
                    import shutil
                    shutil.copyfile(location,
                                    location + "-%s.bak" % pdata['_dbversion'])
                    import xl.migrations.database as dbmig
                    dbmig.handle_migration(self, pdata, pdata['_dbversion'],
                                           self._dbversion)

        except common.VersionError:
            raise
        except Exception:
            logger.error("Failed to open music DB.")
            common.log_exception(log=logger)
            return

        for attr in self.pickle_attrs:
            try:
                if 'tracks' == attr:
                    data = {}
                    for k in (x for x in pdata.keys() \
                            if x.startswith("tracks-")):
                        p = pdata[k]
                        tr = Track(_unpickles=p[0])
                        loc = tr.get_loc_for_io()
                        if loc not in data:
                            data[loc] = TrackHolder(tr, p[1], **p[2])
                        else:
                            logger.warning("Duplicate track found: %s" % loc)
                            # presumably the second track was written because of an error,
                            # so use the first track found.
                            del pdata[k]

                    setattr(self, attr, data)
                else:
                    setattr(self, attr, pdata[attr])
            except Exception:
                # FIXME: Do something about this
                logger.warn("Exception occurred while loading %s" % location)
                common.log_exception(log=logger)

        pdata.close()

        self._dirty = False
示例#8
0
文件: trackdb.py 项目: dangmai/exaile
    def load_from_location(self, location=None):
        """
            Restores :class:`TrackDB` state from the pickled representation
            stored at the specified location.

            :param location: the location to load the data from
            :type location: string
        """
        if not location:
            location = self.location
        if not location:
            raise AttributeError(_("You did not specify a location to load the db from"))

        logger.debug("Loading %s DB from %s." % (self.name, location))

        try:
            try:
                pdata = shelve.open(location, flag="c", protocol=common.PICKLE_PROTOCOL)
            except ImportError:
                import bsddb3  # ArchLinux disabled bsddb in python2, so we have to use the external module

                _db = bsddb3.hashopen(location, "c")
                pdata = shelve.Shelf(_db, protocol=common.PICKLE_PROTOCOL)
            if "_dbversion" in pdata:
                if int(pdata["_dbversion"]) > int(self._dbversion):
                    raise common.VersionError, "DB was created on a newer Exaile version."
                elif pdata["_dbversion"] < self._dbversion:
                    logger.info("Upgrading DB format....")
                    import shutil

                    shutil.copyfile(location, location + "-%s.bak" % pdata["_dbversion"])
                    import xl.migrations.database as dbmig

                    dbmig.handle_migration(self, pdata, pdata["_dbversion"], self._dbversion)

        except common.VersionError:
            raise
        except Exception:
            logger.exception("Failed to open music DB.")
            return

        for attr in self.pickle_attrs:
            try:
                if "tracks" == attr:
                    data = {}
                    for k in (x for x in pdata.keys() if x.startswith("tracks-")):
                        p = pdata[k]
                        tr = Track(_unpickles=p[0])
                        loc = tr.get_loc_for_io()
                        if loc not in data:
                            data[loc] = TrackHolder(tr, p[1], **p[2])
                        else:
                            logger.warning("Duplicate track found: %s" % loc)
                            # presumably the second track was written because of an error,
                            # so use the first track found.
                            del pdata[k]

                    setattr(self, attr, data)
                else:
                    setattr(self, attr, pdata.get(attr, getattr(self, attr)))
            except Exception:
                # FIXME: Do something about this
                logger.exception("Exception occurred while loading %s" % location)

        pdata.close()

        self._dirty = False