def read_from_path(uri, track_type=Track): """ Reads tags from a specified uri """ (path, ext) = os.path.splitext(uri.lower()) ext = ext[1:] #if ext not in formats: # xlmisc.log('%s format is not understood' % ext) # return None tr = track_type(uri) if tr.type != 'device': tr.type = 'file' format = formats.get(ext) if not format: return tr try: format.fill_tag_from_path(tr) except HeaderNotFoundError: logger.debug("Possibly corrupt file: " + uri) return None except: common.log_exception(log=logger) return None return tr
def sink_from_preset(player, preset): if preset == "custom": pipe = settings.get_option("%s/custom_sink_pipe" % player._name, "") if not pipe: logger.error("No custom sink pipe set for %s" % player._name) return None name = _("Custom") else: d = SINK_PRESETS.get(preset, "") if not d: logger.error("Could not find sink preset %s for %s." % (preset, player._name)) return None name = d['name'] pipe = d['pipe'] if preset != 'auto': dname = settings.get_option('%s/audiosink_device' % player._name) if dname: pipe += ' device=' + dname if 'pipeargs' in d: pipe += ' ' + d['pipeargs'] try: sink = AudioSink(name, pipe, player) except Exception: common.log_exception(log=logger, message="Could not enable audiosink %s for %s." % (preset, player._name)) return None return sink
def get_handler(self, udi): dev_obj = self.bus.get_object("org.freedesktop.Hal", udi) device = dbus.Interface(dev_obj, "org.freedesktop.Hal.Device") try: capabilities = device.GetProperty("info.capabilities") except dbus.exceptions.DBusException,e: if not e.get_dbus_name() == "org.freedesktop.Hal.NoSuchProperty": common.log_exception(logger) return None
def reset_providers(self): self.sinks = {} for provider in self.get_providers(): try: self.sinks[provider.name] = provider() except: logger.warning("Could not create %s element for %s." % \ (provider, self.get_name()) ) common.log_exception(log=logger)
def is_type(self, device, capabilities): if "portable_audio_player" in capabilities: try: if "storage" in device.GetProperty( "portable_audio_player.access_method.protocols"): return 10 except dbus.exceptions.DBusException,e: if not e.get_dbus_name() == "org.freedesktop.Hal.NoSuchProperty": common.log_exception(log=logger)
def get_position(self): if self.is_paused(): return self.last_position try: self.last_position = self.dec.query_position(gst.FORMAT_TIME)[0] except gst.QueryError: common.log_exception(logger) self.last_position = 0 return self.last_position
def _get_panel_type(self): import imp try: _cdguipanel = imp.load_source("_cdguipanel", os.path.join(os.path.dirname(__file__), "_cdguipanel.py")) return _cdguipanel.CDPanel except: common.log_exception(log=logger, message="Could not import cd gui panel") return 'flatplaylist'
def execute(self, query, args=None): """ Executes a query """ cur = self._cursor if not args: args = [] try: cur.execute(query, args) except: common.log_exception(log=logger)
def is_type(self, device, capabilities): if "portable_audio_player" in capabilities: try: if "storage" in device.GetProperty( "portable_audio_player.access_method.protocols"): return 10 except dbus.exceptions.DBusException, e: if not e.get_dbus_name( ) == "org.freedesktop.Hal.NoSuchProperty": common.log_exception(log=logger)
def _check_compilation(self, ccheck, compilations, tr): """ This is the hacky way to test to see if a particular track is a part of a compilation. Basically, if there is more than one track in a directory that has the same album but different artist, we assume that it's part of a compilation. :param ccheck: dictionary for internal use :param compilations: if a compilation is found, it'll be appended to this list :param tr: the track to check """ # check for compilations if not settings.get_option('collection/file_based_compilations', True): return def joiner(value): if not value or isinstance(value, basestring): return value else: try: return u"\u0000".join(value) except UnicodeDecodeError: return "\0".join(value) try: basedir = joiner(tr.get_tag_raw('__basedir')) album = joiner(tr.get_tag_raw('album')) artist = joiner(tr.get_tag_raw('artist')) except Exception: logger.warning("Error while checking for compilation: " + `tr`) return if not basedir or not album or not artist: return album = album.lower() artist = artist.lower() try: if not basedir in ccheck: ccheck[basedir] = {} if not album in ccheck[basedir]: ccheck[basedir][album] = deque() except TypeError: common.log_exception(log=logger) return if ccheck[basedir][album] and \ artist not in ccheck[basedir][album]: if not (basedir, album) in compilations: compilations.append((basedir, album)) logger.info("Compilation %(album)r detected in %(dir)r" % {'album': album, 'dir': basedir}) ccheck[basedir][album].append(artist)
def _check_compilation(self, ccheck, compilations, tr): """ This is the hacky way to test to see if a particular track is a part of a compilation. Basically, if there is more than one track in a directory that has the same album but different artist, we assume that it's part of a compilation. :param ccheck: dictionary for internal use :param compilations: if a compilation is found, it'll be appended to this list :param tr: the track to check """ # check for compilations if not settings.get_option('collection/file_based_compilations', True): return def joiner(value): if not value or isinstance(value, basestring): return value else: try: return u"\u0000".join(value) except UnicodeDecodeError: return "\0".join(value) try: basedir = joiner(tr.get_tag_raw('__basedir')) album = joiner(tr.get_tag_raw('album')) artist = joiner(tr.get_tag_raw('artist')) except Exception: logger.warning("Error while checking for compilation: " + `tr`) return if not basedir or not album or not artist: return album = album.lower() artist = artist.lower() try: if not basedir in ccheck: ccheck[basedir] = {} if not album in ccheck[basedir]: ccheck[basedir][album] = deque() except TypeError: common.log_exception(log=logger) return if ccheck[basedir][album] and \ artist not in ccheck[basedir][album]: if not (basedir, album) in compilations: compilations.append((basedir, album)) logger.debug("Compilation %(album)r detected in %(dir)r" % {'album': album, 'dir': basedir}) ccheck[basedir][album].append(artist)
def _get_panel_type(self): import imp try: _cdguipanel = imp.load_source( "_cdguipanel", os.path.join(os.path.dirname(__file__), "_cdguipanel.py")) return _cdguipanel.CDPanel except: common.log_exception(log=logger, message="Could not import cd gui panel") return 'flatplaylist'
def disable(exaile): global GNOME_MMKEYS if GNOME_MMKEYS: try: GNOME_MMKEYS.ReleaseMediaPlayerKeys("Exaile") except: common.log_exception() GNOME_MMKEYS = None return False GNOME_MMKEYS = None return True
def emit(self, event): """ Emits an Event, calling any registered callbacks. event: the Event to emit [Event] """ emit_logmsg = self.use_logger and (not self.logger_filter or \ re.search(self.logger_filter, event.type)) with self.lock: callbacks = set() for tcall in [_NONE, event.type]: for ocall in [_NONE, event.object]: try: callbacks.update(self.callbacks[tcall][ocall]) except KeyError: pass # now call them for cb in callbacks: try: if not cb.valid: try: self.callbacks[event.type][event.object].remove(cb) except (KeyError, ValueError): pass elif event.time >= cb.time: if emit_logmsg: logger.debug("Attempting to call " "%(function)s in response " "to %(event)s." % { 'function': cb.wfunction(), 'event': event.type }) cb.wfunction().__call__(event.type, event.object, event.data, *cb.args, **cb.kwargs) except Exception: # something went wrong inside the function we're calling common.log_exception( logger, message="Event callback exception caught!") if emit_logmsg: logger.debug( "Sent '%(type)s' event from " "'%(object)s' with data '%(data)s'." % { 'type': event.type, 'object': repr(event.object), 'data': repr(event.data) })
def _migrate_old_settings(oldsettings): for (t, section, oldsetting, newspot, func) in _SETTINGS_MAP: value = None try: if func in globals(): func = globals()[func] if callable(func): value = func(section, oldsetting, oldsettings) if not value: value = oldsettings.get(section, oldsetting) value = t(value) settings.set_option(newspot, value) except: common.log_exception(log=logger)
def enable_plugin(self, pluginname): try: plugin = self.load_plugin(pluginname) if not plugin: raise Exception("Error loading plugin") plugin.enable(self.exaile) if not inspect.ismodule(plugin): self.__enable_new_plugin(plugin) self.enabled_plugins[pluginname] = plugin logger.debug("Loaded plugin %s" % pluginname) self.save_enabled() except Exception, e: traceback.print_exc() logger.warning("Unable to enable plugin %s" % pluginname) common.log_exception(logger) raise e
def write_tags(self): """ Writes tags to the file for this Track. Returns False if unsuccessful, and a Format object from `xl.metadata` otherwise. """ try: f = metadata.get_format(self.get_loc_for_io()) if f is None: return False # not a supported type f.write_tags(self.__tags) return f except: common.log_exception() return False
def disable_plugin(self, pluginname): try: plugin = self.enabled_plugins[pluginname] del self.enabled_plugins[pluginname] except KeyError: common.log_exception(logger, message="Plugin not found, possibly already disabled") return False try: plugin.disable(self.exaile) logger.debug("Unloaded plugin %s" % pluginname) self.save_enabled() except Exception, e: traceback.print_exc() logger.warning("Unable to fully disable plugin %s" % pluginname) common.log_exception(logger) raise e
def get_results(self, artist): ar = urllib.quote_plus(artist.encode('utf-8')) url = 'http://ws.audioscrobbler.com/2.0/?method=artist.getsimilar&artist=%s&api_key=' + API_KEY try: f = urllib.urlopen(url % ar).read() except IOError: common.log_exception() return [] retlist = [] xml = ETree.fromstring(f) for e in xml.getiterator('artist'): retlist.append((float(e.find('match').text), e.find('name').text)) return retlist
def get_results(self, artist): ar = urllib.quote_plus(artist.encode('utf-8')) url = 'http://ws.audioscrobbler.com/2.0/?method=artist.getsimilar&artist=%s&api_key='+API_KEY try: f = urllib.urlopen(url%ar).read() except IOError: common.log_exception() return [] retlist = [] xml = ETree.fromstring(f) for e in xml.getiterator('artist'): retlist.append((float(e.find('match').text), e.find('name').text)) return retlist
def emit(self, event): """ Emits an Event, calling any registered callbacks. event: the Event to emit [Event] """ emit_logmsg = self.use_logger and (not self.logger_filter or \ re.search(self.logger_filter, event.type)) with self.lock: callbacks = set() for tcall in [_NONE, event.type]: for ocall in [_NONE, event.object]: try: callbacks.update(self.callbacks[tcall][ocall]) except KeyError: pass # now call them for cb in callbacks: try: if not cb.valid: try: self.callbacks[event.type][event.object].remove(cb) except (KeyError, ValueError): pass elif event.time >= cb.time: if emit_logmsg: logger.debug("Attempting to call " "%(function)s in response " "to %(event)s." % { 'function': cb.wfunction(), 'event': event.type}) cb.wfunction().__call__(event.type, event.object, event.data, *cb.args, **cb.kwargs) except Exception: # something went wrong inside the function we're calling common.log_exception(logger, message="Event callback exception caught!") if emit_logmsg: logger.debug("Sent '%(type)s' event from " "'%(object)s' with data '%(data)s'." % {'type' : event.type, 'object' : repr(event.object), 'data' : repr(event.data)})
def submit_to_scrobbler(self, track, time_started, time_played): if scrobbler.SESSION_ID and track and time_started and time_played: try: scrobbler.submit( track.get_tag_raw("artist", join=True), track.get_tag_raw("title", join=True), int(time_started), "P", "", int(track.get_tag_raw("__length")), track.get_tag_raw("album", join=True), track.split_numerical(track.get_tag_raw("tracknumber"))[0] or 0, autoflush=True, ) except: common.log_exception() logger.warning("AS: Failed to submit track")
def submit_to_scrobbler(self, track, time_started, time_played): if scrobbler.SESSION_ID and track and time_started and time_played: try: scrobbler.submit( track.get_tag_raw('artist', join=True), track.get_tag_raw('title', join=True), int(time_started), 'P', '', int(track.get_tag_raw('__length')), track.get_tag_raw('album', join=True), track.split_numerical(track.get_tag_raw('tracknumber'))[0] or 0, autoflush=True, ) except: common.log_exception() logger.warning("AS: Failed to submit track")
def rescan_libraries(self, startup_only=False, force_update=False): """ Rescans all libraries associated with this Collection """ if self._scanning: raise Exception("Collection is already being scanned") if len(self.libraries) == 0: event.log_event('scan_progress_update', self, 100) return # no libraries, no need to scan :) self._scanning = True self._scan_stopped = False self.file_count = -1 # negative means we dont know it yet self.__count_files() scan_interval = 20 for library in self.libraries.itervalues(): if not force_update and startup_only and not (library.monitored and library.startup_scan): continue event.add_callback(self._progress_update, 'tracks_scanned', library) library.rescan(notify_interval=scan_interval, force_update=force_update) event.remove_callback(self._progress_update, 'tracks_scanned', library) self._running_total_count += self._running_count if self._scan_stopped: break else: # didnt break try: self.save_to_location() except AttributeError: common.log_exception(log=logger) event.log_event('scan_progress_update', self, 100) self._running_total_count = 0 self._running_count = 0 self._scanning = False self.file_count = -1
def rescan_libraries(self, startup_only=False): """ Rescans all libraries associated with this Collection """ if self._scanning: raise Exception("Collection is already being scanned") if len(self.libraries) == 0: event.log_event('scan_progress_update', self, 100) return # no libraries, no need to scan :) self._scanning = True self._scan_stopped = False self.file_count = -1 # negative means we dont know it yet self.__count_files() scan_interval = 20 for library in self.libraries.itervalues(): if startup_only and not (library.monitored and library.startup_scan): continue event.add_callback(self._progress_update, 'tracks_scanned', library) library.rescan(notify_interval=scan_interval) event.remove_callback(self._progress_update, 'tracks_scanned', library) self._running_total_count += self._running_count if self._scan_stopped: break else: # didnt break try: self.save_to_location() except AttributeError: common.log_exception(log=logger) event.log_event('scan_progress_update', self, 100) self._running_total_count = 0 self._running_count = 0 self._scanning = False self.file_count = -1
def connect(self): try: self.bus = dbus.SystemBus() hal_obj = self.bus.get_object('org.freedesktop.Hal', '/org/freedesktop/Hal/Manager') self.hal = dbus.Interface(hal_obj, 'org.freedesktop.Hal.Manager') logger.debug("HAL Providers: %s" % repr(self.get_providers())) for p in self.get_providers(): try: self.on_provider_added(p) except: logger.warning("Failed to load HAL devices for %s" % p.name) common.log_exception(logger) self.setup_device_events() logger.debug("Connected to HAL") event.log_event("hal_connected", self, None) except: logger.warning("Failed to connect to HAL, " \ "autodetection of devices will be disabled.")
def initialize(self, username, password, server): try: logger.info("Attempting to connect to AudioScrobbler (%s)" % server) scrobbler.login(username, password, hashpw=False, post_url=server) except: try: scrobbler.login(username, password, hashpw=True, post_url=server) except: self.connecting = False common.log_exception() return logger.info("Connected to AudioScrobbler") event.add_callback(self.on_play, "playback_track_start", player.PLAYER) event.add_callback(self.on_stop, "playback_track_end", player.PLAYER) self.connected = True self.connecting = False
def connect(self): assert self._state == 'init' logger.debug("Connecting to %s", self.name) try: self.obj = self._connect() logger.info("Connected to %s", self.name) event.log_event("hal_connected", self, None) except Exception: logger.warning("Failed to connect to %s, " \ "autodetection of devices will be disabled.", self.name) common.log_exception() return False self._state = 'addremove' logger.debug("%s: state = addremove", self.name) self._add_all(self.obj) self._state = 'listening' logger.debug("%s: state = listening", self.name) return True
def read_tags(self): """ Reads tags from the file for this Track. Returns False if unsuccessful, and a Format object from `xl.metadata` otherwise. """ try: f = metadata.get_format(self.get_loc_for_io()) if f is None: self._scan_valid = False return False # not a supported type ntags = f.read_all() for k, v in ntags.iteritems(): self.set_tag_raw(k, v) # remove tags that have been deleted in the file, while # taking into account that the db may have tags not # supported by the file's tag format. if f.others: supported_tags = [ t for t in self.list_tags() \ if not t.startswith("__") ] else: supported_tags = f.tag_mapping.keys() for tag in supported_tags: if tag not in ntags.keys(): self.set_tag_raw(tag, None) # fill out file specific items gloc = gio.File(self.get_loc_for_io()) mtime = gloc.query_info("time::modified").get_modification_time() self.set_tag_raw('__modified', mtime) # TODO: this probably breaks on non-local files path = gloc.get_parent().get_path() self.set_tag_raw('__basedir', path) self._dirty = True self._scan_valid = True return f except Exception: self._scan_valid = False common.log_exception() return False
def read_tags(self): """ Reads tags from the file for this Track. Returns False if unsuccessful, and a Format object from `xl.metadata` otherwise. """ try: f = metadata.get_format(self.get_loc_for_io()) if f is None: self._scan_valid = False return False # not a supported type ntags = f.read_all() for k, v in ntags.iteritems(): self.set_tag_raw(k, v) # remove tags that have been deleted in the file, while # taking into account that the db may have tags not # supported by the file's tag format. if f.others: supported_tags = [ t for t in self.list_tags() \ if not t.startswith("__") ] else: supported_tags = f.tag_mapping.keys() for tag in supported_tags: if tag not in ntags.keys(): self.set_tag_raw(tag, None) # fill out file specific items gloc = gio.File(self.get_loc_for_io()) mtime = gloc.query_info("time::modified").get_modification_time() self.set_tag_raw('__modified', mtime) # TODO: this probably breaks on non-local files path = gloc.get_parent().get_path() self.set_tag_raw('__basedir', path) self._dirty = True self._scan_valid = True return f except: self._scan_valid = False common.log_exception() return False
def reset_providers(self): self.elements = {} dups = {} for provider in self.get_providers(): idx = provider.index if idx in self.elements: dup = dups.setdefault(idx, [self.elements[idx].name]) dup.append(provider.name) while idx in self.elements: idx += 1 try: self.elements[idx] = provider(self.player) except: logger.warning("Could not create %s element for %s." % \ (provider, self.get_name()) ) common.log_exception(log=logger) #self.setup_elements() for k, v in dups.iteritems(): logger.warning("Audio plugins %s are sharing index %s (may have unpredictable output!)", v, k)
def select(self, query, args=[]): """ Runs a select and returns all rows. This is only for small select operations. If you want to do a large select, use DBManager.realcursor() """ db = self._get_from_pool() cur = db.cursor() cur.execute(query, args) rows = [] while True: try: row = cur.fetchone() if not row: break rows.append(row) except: common.log_exception(log=logger) cur.close() return rows
def _enable(eventname, player, nothing): global GNOME_MMKEYS def on_gnome_mmkey(app, key): if app == "Exaile": callback(key) try: bus = dbus.SessionBus() try: # Mate desktop obj = bus.get_object('org.mate.SettingsDaemon', '/org/mate/SettingsDaemon/MediaKeys') GNOME_MMKEYS = dbus.Interface(obj, 'org.mate.SettingsDaemon.MediaKeys') except dbus.DBusException: try: # New method (for Gnome 2.22.x) obj = bus.get_object('org.gnome.SettingsDaemon', '/org/gnome/SettingsDaemon/MediaKeys') GNOME_MMKEYS = dbus.Interface(obj, 'org.gnome.SettingsDaemon.MediaKeys') except dbus.DBusException: try: # Old method obj = bus.get_object('org.gnome.SettingsDaemon', '/org/gnome/SettingsDaemon') GNOME_MMKEYS = dbus.Interface(obj, 'org.gnome.SettingsDaemon') except dbus.DBusException: # Make sure we bail out completely here raise except dbus.DBusException: disable(None) # Disconnect if we failed to load completely GNOME_MMKEYS = None common.log_exception(logger) return False else: GNOME_MMKEYS.GrabMediaPlayerKeys("Exaile", 0) GNOME_MMKEYS.connect_to_signal('MediaPlayerKeyPressed', on_gnome_mmkey) return True
def reset_providers(self): self.elements = {} dups = {} for provider in self.get_providers(): idx = provider.index if idx in self.elements: dup = dups.setdefault(idx, [self.elements[idx].name]) dup.append(provider.name) while idx in self.elements: idx += 1 try: self.elements[idx] = provider(self.player) except: logger.warning("Could not create %s element for %s." % \ (provider, self.get_name()) ) common.log_exception(log=logger) #self.setup_elements() for k, v in dups.iteritems(): logger.warning( "Audio plugins %s are sharing index %s (may have unpredictable output!)", v, k)
def unlink_stream(self, stream): try: current = stream.get_track() pad = stream.get_static_pad("src").get_peer() stream.unlink(self.adder) try: self.adder.release_request_pad(pad) except TypeError: pass gobject.idle_add(stream.set_state, gst.STATE_NULL) try: self.pipe.remove(stream) except gst.RemoveError: logger.debug("Failed to remove stream %s"%stream) if stream in self.streams: self.streams[self.streams.index(stream)] = None event.log_event("playback_track_end", self, current) return True except AttributeError: return True except: common.log_exception(log=logger) return False
def initialize(self, username, password, server): try: logger.info("Attempting to connect to AudioScrobbler (%s)" % server) scrobbler.login(username, password, hashpw=False, post_url=server) except: try: scrobbler.login(username, password, hashpw=True, post_url=server) except: self.connecting = False common.log_exception() return logger.info("Connected to AudioScrobbler") event.add_callback(self.on_play, 'playback_track_start', player.PLAYER) event.add_callback(self.on_stop, 'playback_track_end', player.PLAYER) self.connected = True self.connecting = False
def load_tracks(db, current=None): """ Loads all tracks currently stored in the database """ global ALBUMS items = ('PATHS', 'ARTISTS', 'RADIO', 'PLAYLISTS') for item in items: globals()[item] = dict() ALBUMS = {} added = dict() tracks = TrackData() for row in db.select(""" SELECT paths.name, title, artists.name, albums.name, disc_id, tracks.genre, track, length, bitrate, year, modified, user_rating, rating, blacklisted, time_added, encoding, plays FROM tracks, paths, artists, albums WHERE ( paths.id=tracks.path AND artists.id = tracks.artist AND albums.id = tracks.album ) AND blacklisted=0 ORDER BY THE_CUTTER(artists.name), LOWER(albums.name), disc_id, track, title """): t = oldtrack.Track(*row) path, ext = os.path.splitext(row[0].lower().encode('utf-8')) t.type = "file" if already_added(t, added): continue tracks.append(t) cur = db.cursor(new=True) for item in items: cur.execute("SELECT id, name FROM %s" % item.lower()) while True: try: row = cur.fetchone() if not row: break globals()[item][row[1]] = row[0] except: common.log_exception() cur.execute("SELECT artist, name, id FROM albums") while True: try: row = cur.fetchone() if not row: break ALBUMS["%d - %s" % (row[0], row[1])] = row[2] except: common.log_exception() cur.close() db._close_thread() return tracks
def rescan(self, notify_interval=None, force_update=False): """ Rescan the associated folder and add the contained files to the Collection """ # TODO: use gio's cancellable support if self.collection is None: return True if self.scanning: return logger.info("Scanning library: %s", self.location) self.scanning = True db = self.collection libloc = gio.File(self.location) count = 0 dirtracks = deque() compilations = deque() ccheck = {} for fil in common.walk(libloc): count += 1 type = fil.query_info("standard::type").get_file_type() if type == gio.FILE_TYPE_DIRECTORY: if dirtracks: for tr in dirtracks: self._check_compilation(ccheck, compilations, tr) for (basedir, album) in compilations: base = basedir.replace('"', '\\"') alb = album.replace('"', '\\"') items = [ tr for tr in dirtracks if \ tr.get_tag_raw('__basedir') == base and \ # FIXME: this is ugly alb in "".join( tr.get_tag_raw('album') or []).lower() ] for item in items: item.set_tag_raw('__compilation', (basedir, album)) dirtracks = deque() compilations = deque() ccheck = {} elif type == gio.FILE_TYPE_REGULAR: tr = self.update_track(fil, force_update=force_update) if not tr: continue if dirtracks is not None: dirtracks.append(tr) # do this so that if we have, say, a 4000-song folder # we dont get bogged down trying to keep track of them # for compilation detection. Most albums have far fewer # than 110 tracks anyway, so it is unlikely that this # restriction will affect the heuristic's accuracy. # 110 was chosen to accomodate "top 100"-style # compilations. if len(dirtracks) > 110: logger.info("Too many files, skipping " "compilation detection heuristic.") dirtracks = None if self.collection and self.collection._scan_stopped: self.scanning = False logger.info("Scan canceled") return # progress update if notify_interval is not None and count % notify_interval == 0: event.log_event('tracks_scanned', self, count) # final progress update if notify_interval is not None: event.log_event('tracks_scanned', self, count) removals = deque() for tr in self.collection.tracks.itervalues(): tr = tr._track loc = tr.get_loc_for_io() if not loc: continue gloc = gio.File(loc) try: if not gloc.has_prefix(libloc): continue except UnicodeDecodeError: common.log_exception(log=logger) continue if not gloc.query_exists(): removals.append(tr) for tr in removals: logger.debug(u"Removing %s"%unicode(tr)) self.collection.remove(tr) logger.info("Scan completed: %s", self.location) self.scanning = False
def save_to_location(self, location=None): """ Saves a pickled representation of this :class:`TrackDB` to the specified location. :param location: the location to save the data to :type location: string """ if not self._dirty: for track in self.tracks.itervalues(): if track._track._dirty: self._dirty = True break if not self._dirty: return if not location: location = self.location if not location: raise AttributeError( _("You did not specify a location to save the db")) if self._saving: return self._saving = True logger.debug("Saving %s DB to %s." % (self.name, location)) try: try: pdata = shelve.open(location, flag='c', protocol=common.PICKLE_PROTOCOL) except ImportError: import bsddb3 _db = bsddb3.hashopen(location, 'c') pdata = shelve.Shelf(_db, protocol=common.PICKLE_PROTOCOL) if pdata.get('_dbversion', self._dbversion) > self._dbversion: raise common.VersionError, \ "DB was created on a newer Exaile." except Exception: logger.error("Failed to open music DB for writing.") common.log_exception(log=logger) return for attr in self.pickle_attrs: # bad hack to allow saving of lists/dicts of Tracks if 'tracks' == attr: for k, track in self.tracks.iteritems(): key = "tracks-%s" % track._key if track._track._dirty or key not in pdata: pdata[key] = (track._track._pickles(), track._key, deepcopy(track._attrs)) else: pdata[attr] = deepcopy(getattr(self, attr)) pdata['_dbversion'] = self._dbversion for key in self._deleted_keys: key = "tracks-%s" % key if key in pdata: del pdata[key] pdata.sync() pdata.close() for track in self.tracks.itervalues(): track._track._dirty = False self._dirty = False self._saving = False
def load_from_location(self, location=None): """ Restores :class:`TrackDB` state from the pickled representation stored at the specified location. :param location: the location to load the data from :type location: string """ if not location: location = self.location if not location: raise AttributeError( _("You did not specify a location to load the db from")) logger.debug("Loading %s DB from %s." % (self.name, location)) try: try: pdata = shelve.open(location, flag='c', protocol=common.PICKLE_PROTOCOL) except ImportError: import bsddb3 # ArchLinux disabled bsddb in python2, so we have to use the external module _db = bsddb3.hashopen(location, 'c') pdata = shelve.Shelf(_db, protocol=common.PICKLE_PROTOCOL) if pdata.has_key("_dbversion"): if int(pdata['_dbversion']) > int(self._dbversion): raise common.VersionError, \ "DB was created on a newer Exaile version." elif pdata['_dbversion'] < self._dbversion: logger.info("Upgrading DB format....") import shutil shutil.copyfile(location, location + "-%s.bak" % pdata['_dbversion']) import xl.migrations.database as dbmig dbmig.handle_migration(self, pdata, pdata['_dbversion'], self._dbversion) except common.VersionError: raise except Exception: logger.error("Failed to open music DB.") common.log_exception(log=logger) return for attr in self.pickle_attrs: try: if 'tracks' == attr: data = {} for k in (x for x in pdata.keys() \ if x.startswith("tracks-")): p = pdata[k] tr = Track(_unpickles=p[0]) loc = tr.get_loc_for_io() if loc not in data: data[loc] = TrackHolder(tr, p[1], **p[2]) else: logger.warning("Duplicate track found: %s" % loc) # presumably the second track was written because of an error, # so use the first track found. del pdata[k] setattr(self, attr, data) else: setattr(self, attr, pdata[attr]) except Exception: # FIXME: Do something about this logger.warn("Exception occurred while loading %s" % location) common.log_exception(log=logger) pdata.close() self._dirty = False
def save_to_location(self, location=None): """ Saves a pickled representation of this :class:`TrackDB` to the specified location. :param location: the location to save the data to :type location: string """ if not self._dirty: for track in self.tracks.itervalues(): if track._track._dirty: self._dirty = True break if not self._dirty: return if not location: location = self.location if not location: raise AttributeError( _("You did not specify a location to save the db")) if self._saving: return self._saving = True logger.debug("Saving %s DB to %s." % (self.name, location)) try: try: pdata = shelve.open(location, flag='c', protocol=common.PICKLE_PROTOCOL) except ImportError: import bsddb3 _db = bsddb3.hashopen(location, 'c') pdata = shelve.Shelf(_db, protocol=common.PICKLE_PROTOCOL) if pdata.get('_dbversion', self._dbversion) > self._dbversion: raise common.VersionError, \ "DB was created on a newer Exaile." except Exception: logger.error("Failed to open music DB for writing.") common.log_exception(log=logger) return for attr in self.pickle_attrs: # bad hack to allow saving of lists/dicts of Tracks if 'tracks' == attr: for k, track in self.tracks.iteritems(): key = "tracks-%s" % track._key if track._track._dirty or key not in pdata: pdata[key] = ( track._track._pickles(), track._key, deepcopy(track._attrs) ) else: pdata[attr] = deepcopy(getattr(self, attr)) pdata['_dbversion'] = self._dbversion for key in self._deleted_keys: key = "tracks-%s" % key if key in pdata: del pdata[key] pdata.sync() pdata.close() for track in self.tracks.itervalues(): track._track._dirty = False self._dirty = False self._saving = False
def rescan(self, notify_interval=None): """ Rescan the associated folder and add the contained files to the Collection """ # TODO: use gio's cancellable support if self.collection is None: return True if self.scanning: return logger.info("Scanning library: %s", self.location) self.scanning = True db = self.collection libloc = gio.File(self.location) count = 0 dirtracks = deque() compilations = deque() ccheck = {} for fil in common.walk(libloc): count += 1 type = fil.query_info("standard::type").get_file_type() if type == gio.FILE_TYPE_DIRECTORY: if dirtracks: for tr in dirtracks: self._check_compilation(ccheck, compilations, tr) for (basedir, album) in compilations: base = basedir.replace('"', '\\"') alb = album.replace('"', '\\"') items = [ tr for tr in dirtracks if \ tr.get_tag_raw('__basedir') == base and \ # FIXME: this is ugly alb in "".join( tr.get_tag_raw('album') or []).lower() ] for item in items: item.set_tag_raw('__compilation', (basedir, album)) dirtracks = deque() compilations = deque() ccheck = {} elif type == gio.FILE_TYPE_REGULAR: tr = self.update_track(fil) if not tr: continue if dirtracks is not None: dirtracks.append(tr) # do this so that if we have, say, a 4000-song folder # we dont get bogged down trying to keep track of them # for compilation detection. Most albums have far fewer # than 110 tracks anyway, so it is unlikely that this # restriction will affect the heuristic's accuracy. # 110 was chosen to accomodate "top 100"-style # compilations. if len(dirtracks) > 110: logger.info("Too many files, skipping " "compilation detection heuristic.") dirtracks = None if self.collection and self.collection._scan_stopped: self.scanning = False logger.info("Scan canceled") return # progress update if notify_interval is not None and count % notify_interval == 0: event.log_event('tracks_scanned', self, count) # final progress update if notify_interval is not None: event.log_event('tracks_scanned', self, count) removals = deque() for tr in self.collection.tracks.itervalues(): tr = tr._track loc = tr.get_loc_for_io() if not loc: continue gloc = gio.File(loc) try: if not gloc.has_prefix(libloc): continue except UnicodeDecodeError: common.log_exception(log=logger) continue if not gloc.query_exists(): removals.append(tr) for tr in removals: logger.debug(u"Removing %s"%unicode(tr)) self.collection.remove(tr) logger.info("Scan completed: %s", self.location) self.scanning = False
class Track(object): """ Represents a single track. """ # save a little memory this way __slots__ = ["__tags", "_scan_valid", "_dirty", "__weakref__", "_init"] # this is used to enforce the one-track-per-uri rule __tracksdict = weakref.WeakValueDictionary() # store a copy of the settings values here - much faster (0.25 cpu # seconds) (see _the_cuts_cb) __the_cuts = settings.get_option('collection/strip_list', []) def __new__(cls, *args, **kwargs): """ override the construction of new Track objects so that if there is already a Track for a given uri, we just return that Track instance instead of creating a new one. """ # subclassing interferes with the one-track-per-uri scheme and # with save and restore of tracks, so we disallow it. if cls != Track: raise TypeError, "Track cannot be subclassed!" uri = None if len(args) > 0: uri = args[0] else: uri = kwargs.get("uri") # Restore uri from pickled state if possible. This means that # if a given Track is in more than one TrackDB, the first # TrackDB to get loaded takes precedence, and any data in the # second TrackDB is consequently ignored. Thus if at all # possible, Tracks should NOT be persisted in more than one # TrackDB at a time. unpickles = None if uri is None: if len(args) > 2: unpickles = args[2] else: unpickles = kwargs.get("_unpickles") if unpickles is not None: uri = unpickles.get("__loc") if uri is not None: uri = gio.File(uri).get_uri() try: tr = cls.__tracksdict[uri] tr._init = False # if the track *does* happen to be pickled in more than one # place, then we need to preserve any internal tags that aren't # persisted to disk. # # See https://bugs.launchpad.net/exaile/+bug/1054637 if unpickles is None: if len(args) > 2: unpickles = args[2] else: unpickles = kwargs.get("_unpickles") if unpickles is not None: for tag, values in unpickles.iteritems(): tags = tr.list_tags() if tag.startswith('__') and tag not in tags: tr.set_tag_raw(tag, values) except KeyError: tr = object.__new__(cls) cls.__tracksdict[uri] = tr tr._init = True return tr else: # this should always fail in __init__, and will never be # called in well-formed code. tr = object.__new__(cls) tr._init = True return tr def __init__(self, uri=None, scan=True, _unpickles=None): """ :param uri: The path to the track. :param scan: Whether to try to read tags from the given uri. Use only if the tags need to be set by a different source. :param _unpickles: used internally to restore from a pickled state. not for normal use. """ # don't re-init if its a reused track. see __new__ if self._init == False: return self.__tags = {} self._scan_valid = None # whether our last tag read attempt worked self._dirty = False if _unpickles: self._unpickles(_unpickles) self.__register() elif uri: self.set_loc(uri) if scan: self.read_tags() else: raise ValueError, "Cannot create a Track from nothing" def __register(self): """ Register this instance into the global registry of Track objects. """ self.__tracksdict[self.__tags['__loc']] = self def __unregister(self): """ Unregister this instance from the global registry of Track objects. """ try: del self.__tracksdict[self.__tags['__loc']] except KeyError: pass def set_loc(self, loc): """ Sets the location. :param loc: the location, as either a uri or a file path. """ self.__unregister() gloc = gio.File(loc) self.__tags['__loc'] = gloc.get_uri() self.__register() event.log_event('track_tags_changed', self, '__loc') def exists(self): """ Returns whether the file exists This can be very slow, use with caution! """ return gio.File(self.get_loc_for_io()).query_exists() def get_loc_for_io(self): """ Gets the location as a full uri. Safe for IO operations via gio, not suitable for display to users as it may be in non-utf-8 encodings. """ return self.__tags['__loc'] def local_file_name(self): """ If the file is accessible on the local filesystem, returns a standard path to it (e.g. "/home/foo/bar"), otherwise, returns None. If a path is returned, it is safe to use for IO operations. Existence of a path does *not* guarantee file existence. """ raise DeprecationWarning('get_local_path() is ' 'preferred over local_file_name()') return self.get_local_path() def get_local_path(self): """ If the file is accessible on a local filesystem, retrieves the full path to it, otherwise nothing. :returns: the file path or None :rtype: string or NoneType """ return gio.File(self.get_loc_for_io()).get_path() def get_basename(self): """ Returns the base name of a resource """ gfile = gio.File(self.get_loc_for_io()) return gfile.get_basename() def get_type(self): """ Get the URI schema the file uses, e.g. file, http, smb. """ return gio.File(self.get_loc_for_io()).get_uri_scheme() def write_tags(self): """ Writes tags to the file for this Track. Returns False if unsuccessful, and a Format object from `xl.metadata` otherwise. """ try: f = metadata.get_format(self.get_loc_for_io()) if f is None: return False # not a supported type f.write_tags(self.__tags) return f except IOError, e: # error writing to the file, probably logger.warning("Could not write tags to file: %s" % e) return False except Exception, e: common.log_exception(logger) logger.warning( "Unknown exception: Could not write tags to file: %s" % e) return False
def __init(self): """ Initializes Exaile """ # pylint: disable-msg=W0201 logger.info("Loading Exaile %s on Python %s..." % (__version__, platform.python_version())) logger.info("Loading settings...") try: from xl import settings except common.VersionError: common.log_exception(log=logger) sys.exit(1) logger.debug("Settings loaded from %s" % settings.location) # display locale information if available try: import locale lc, enc = locale.getlocale() if enc is not None: logger.info("Using %s %s locale" % (lc, enc)) else: logger.info("Using unknown locale") except: pass splash = None if self.options.StartGui: from xl import settings if settings.get_option('gui/use_splash', True): from xlgui.widgets.info import Splash splash = Splash() splash.show() firstrun = settings.get_option("general/first_run", True) if not self.options.NoImport and \ (firstrun or self.options.ForceImport): try: sys.path.insert(0, xdg.get_data_path("migrations")) import migration_200907100931 as migrator del sys.path[0] migrator.migrate(force=self.options.ForceImport) del migrator except: common.log_exception(log=logger, message=_("Failed to migrate from 0.2.14")) # Migrate old rating options from xl.migrations.settings import rating rating.migrate() # Migrate builtin OSD to plugin from xl.migrations.settings import osd osd.migrate() # Initialize plugin manager from xl import plugins self.plugins = plugins.PluginsManager(self) if not self.options.SafeMode: logger.info("Loading plugins...") self.plugins.load_enabled() else: logger.info("Safe mode enabled, not loading plugins.") # Initialize the collection logger.info("Loading collection...") from xl import collection try: self.collection = collection.Collection("Collection", location=os.path.join(xdg.get_data_dir(), 'music.db')) except common.VersionError: common.log_exception(log=logger) sys.exit(1) from xl import event # Set up the player and playback queue from xl import player event.log_event("player_loaded", player.PLAYER, None) # Initalize playlist manager from xl import playlist self.playlists = playlist.PlaylistManager() self.smart_playlists = playlist.PlaylistManager('smart_playlists', playlist.SmartPlaylist) if firstrun: self._add_default_playlists() event.log_event("playlists_loaded", self, None) # Initialize dynamic playlist support from xl import dynamic dynamic.MANAGER.collection = self.collection # Initalize device manager logger.info("Loading devices...") from xl import devices self.devices = devices.DeviceManager() event.log_event("device_manager_ready", self, None) # Initialize dynamic device discovery interface # -> if initialized and connected, then the object is not None self.udisks2 = None self.udisks = None self.hal = None if self.options.Hal: from xl import hal udisks2 = hal.UDisks2(self.devices) if udisks2.connect(): self.udisks2 = udisks2 else: udisks = hal.UDisks(self.devices) if udisks.connect(): self.udisks = udisks else: self.hal = hal.HAL(self.devices) self.hal.connect() else: self.hal = None # Radio Manager from xl import radio self.stations = playlist.PlaylistManager('radio_stations') self.radio = radio.RadioManager() self.gui = None # Setup GUI if self.options.StartGui: logger.info("Loading interface...") import xlgui self.gui = xlgui.Main(self) self.gui.main.window.show_all() event.log_event("gui_loaded", self, None) if splash is not None: splash.destroy() restore = True if self.gui: # Find out if the user just passed in a list of songs # TODO: find a better place to put this # using arg[2:] because arg[1:] will include --startgui args = [ gio.File(arg).get_uri() for arg in self.args ] if len(args) > 0: restore = False self.gui.open_uri(args[0], play=True) for arg in args[1:]: self.gui.open_uri(arg) # kick off autoscan of libraries # -> don't do it in command line mode, since that isn't expected self.gui.rescan_collection_with_progress(True) if restore: player.QUEUE._restore_player_state( os.path.join(xdg.get_data_dir(), 'player.state')) if firstrun: settings.set_option("general/first_run", False) self.loading = False Exaile._exaile = self event.log_event("exaile_loaded", self, None)
def __init(self): """ Initializes Exaile """ # pylint: disable-msg=W0201 logger.info("Loading Exaile %s on Python %s..." % (__version__, platform.python_version())) logger.info("Loading settings...") try: from xl import settings except common.VersionError: common.log_exception(log=logger) sys.exit(1) logger.debug("Settings loaded from %s" % settings.location) # display locale information if available try: import locale lc, enc = locale.getlocale() if enc is not None: logger.info("Using %s %s locale" % (lc, enc)) else: logger.info("Using unknown locale") except: pass splash = None if self.options.StartGui: from xl import settings if settings.get_option('gui/use_splash', True): from xlgui.widgets.info import Splash splash = Splash() splash.show() firstrun = settings.get_option("general/first_run", True) if not self.options.NoImport and \ (firstrun or self.options.ForceImport): try: sys.path.insert(0, xdg.get_data_path("migrations")) import migration_200907100931 as migrator del sys.path[0] migrator.migrate(force=self.options.ForceImport) del migrator except: common.log_exception( log=logger, message=_("Failed to migrate from 0.2.14")) # Migrate old rating options from xl.migrations.settings import rating rating.migrate() # Migrate builtin OSD to plugin from xl.migrations.settings import osd osd.migrate() # Initialize plugin manager from xl import plugins self.plugins = plugins.PluginsManager(self) if not self.options.SafeMode: logger.info("Loading plugins...") self.plugins.load_enabled() else: logger.info("Safe mode enabled, not loading plugins.") # Initialize the collection logger.info("Loading collection...") from xl import collection try: self.collection = collection.Collection("Collection", location=os.path.join( xdg.get_data_dir(), 'music.db')) except common.VersionError: common.log_exception(log=logger) sys.exit(1) from xl import event # Set up the player and playback queue from xl import player event.log_event("player_loaded", player.PLAYER, None) # Initalize playlist manager from xl import playlist self.playlists = playlist.PlaylistManager() self.smart_playlists = playlist.PlaylistManager( 'smart_playlists', playlist.SmartPlaylist) if firstrun: self._add_default_playlists() event.log_event("playlists_loaded", self, None) # Initialize dynamic playlist support from xl import dynamic dynamic.MANAGER.collection = self.collection # Initalize device manager logger.info("Loading devices...") from xl import devices self.devices = devices.DeviceManager() event.log_event("device_manager_ready", self, None) # Initialize dynamic device discovery interface # -> if initialized and connected, then the object is not None self.udisks2 = None self.udisks = None self.hal = None if self.options.Hal: from xl import hal udisks2 = hal.UDisks2(self.devices) if udisks2.connect(): self.udisks2 = udisks2 else: udisks = hal.UDisks(self.devices) if udisks.connect(): self.udisks = udisks else: self.hal = hal.HAL(self.devices) self.hal.connect() else: self.hal = None # Radio Manager from xl import radio self.stations = playlist.PlaylistManager('radio_stations') self.radio = radio.RadioManager() self.gui = None # Setup GUI if self.options.StartGui: logger.info("Loading interface...") import xlgui self.gui = xlgui.Main(self) self.gui.main.window.show_all() event.log_event("gui_loaded", self, None) if splash is not None: splash.destroy() restore = True if self.gui: # Find out if the user just passed in a list of songs # TODO: find a better place to put this # using arg[2:] because arg[1:] will include --startgui args = [gio.File(arg).get_uri() for arg in self.args] if len(args) > 0: restore = False self.gui.open_uri(args[0], play=True) for arg in args[1:]: self.gui.open_uri(arg) # kick off autoscan of libraries # -> don't do it in command line mode, since that isn't expected self.gui.rescan_collection_with_progress(True) if restore: player.QUEUE._restore_player_state( os.path.join(xdg.get_data_dir(), 'player.state')) if firstrun: settings.set_option("general/first_run", False) self.loading = False Exaile._exaile = self event.log_event("exaile_loaded", self, None)