def job(self): dirs = self.dirs added = [] db_uris = set(MediaDB.get_all_uris()) alldirs = [ utils.get_path_from_uri(each_dir) for each_dir in dirs ] for mdir in alldirs: for dirpath, dirs, names in os.walk(mdir): [ dirs.remove(each_dir) for each_dir in dirs if each_dir[0] == "." ] for each_dir in dirs: full_path_dir = os.path.join(dirpath, each_dir) if os.path.islink(full_path_dir): alldirs.append(os.path.realpath(full_path_dir)) valid_files = [] for name in names: full_path_file = os.path.join(dirpath, name) if name[0] != "." and common.file_is_supported(full_path_file): valid_files.append(full_path_file) yield full_path_file valid_files = set(valid_files) for each_file in valid_files: real_file = os.path.realpath(each_file) uri = utils.get_uri_from_path(real_file) if real_file not in db_uris: added.append(uri) elif os.path.getctime(real_file) > MediaDB.get_song(uri).get("#ctime"): added.append(uri) added = set(added) for uri in added: self.add_to_library(uri) yield utils.get_path_from_uri(uri)
def add_uris(self, uris, pos=None, sort=True): if uris == None: return if not isinstance(uris, (tuple, list, set)): uris = [uris] uris = [utils.get_uri_from_path(uri) for uri in uris] utils.ThreadLoad(self.load_taginfo, uris, pos, sort).start()
def choose_file_and_convert(self): filename = WinFile(False).run() if filename and common.file_is_supported(filename): tags = {"uri" : utils.get_uri_from_path(filename)} s = Song() s.init_from_dict(tags) s.read_from_file() AttributesUI([s]).show_window()
def add_uris(self, uris, pos=None, sort=True): if uris == None: return if not isinstance(uris, (tuple, list, set)): uris = [ uris ] uris = [ utils.get_uri_from_path(uri) for uri in uris ] utils.ThreadLoad(self.load_taginfo, uris, pos, sort).start()
def choose_file_and_convert(self): filename = WinFile(False).run() if filename and common.file_is_supported(filename): tags = {"uri": utils.get_uri_from_path(filename)} s = Song() s.init_from_dict(tags) s.set_type("local") s.read_from_file() AttributesUI([s]).show_window()
def get_tags(self): tag_list = [] for key, track in self.tracks.items(): other_tags = {} audio_file_prefix, audio_file_ext = os.path.splitext(self.audio_file) audio_file_bad = "%s%d%s" % (audio_file_prefix, key, audio_file_ext) other_tags["album"] = self.attribs["TITLE"] other_tags["uri"] = utils.get_uri_from_path(audio_file_bad) other_tags["real_uri"] = utils.get_uri_from_path(self.audio_file) other_tags["song_type"] = "cue" other_tags["artist"] = track.attribs["PERFORMER"] other_tags["title"] = track.attribs["TITLE"] other_tags["track"] = key other_tags["#duration"] = self.get_track_length(key) other_tags["seek"] = track.indexes[1] / 1000 other_tags["#size"] = os.path.getsize(self.audio_file) other_tags["#mtime"] = os.path.getmtime(self.audio_file) other_tags["#ctime"] = os.path.getctime(self.audio_file) tag_list.append(other_tags) return tag_list
def job(self): '''job''' dirs = self.dirs added = [] db_uris = set(MediaDB.get_all_uris()) alldirs = [utils.get_path_from_uri(each_dir) for each_dir in dirs] for mdir in alldirs: for dirpath, dirs, names in os.walk(mdir): [ dirs.remove(each_dir) for each_dir in dirs if each_dir[0] == "." ] for each_dir in dirs: full_path_dir = os.path.join(dirpath, each_dir) if os.path.islink(full_path_dir): alldirs.append(os.path.realpath(full_path_dir)) valid_files = [] for name in names: full_path_file = os.path.join(dirpath, name) if name[0] != "." and common.file_is_supported( full_path_file): valid_files.append(full_path_file) yield full_path_file valid_files = set(valid_files) for each_file in valid_files: real_file = os.path.realpath(each_file) uri = utils.get_uri_from_path(real_file) if real_file not in db_uris: added.append(uri) elif os.path.getctime(real_file) > MediaDB.get_song( uri).get("#ctime"): added.append(uri) added = set(added) # for uri in added: # self.__get_or_create_song(uri) # end = time.time() # if (end - start) * 1000 > 1000: # self.callback(self.add_song_cache, self.pos, self.sort) # self.pos += len(self.add_song_cache) # del self.add_song_cache[:] # start = time.time() # else: # end = time.time() # yield utils.get_path_from_uri(uri) # if self.add_song_cache: if added: gobject.idle_add(self.callback, added, self.pos, self.sort)
def finish_job(self): self.emit("end") self.__set_status_icon("success") self.set_progress_ratio(1.0) try: gobject.source_remove(self.__updater_id) except: pass if self.trans_data["to_playlist"]: tags = {"uri" : utils.get_uri_from_path(self.output_path)} song = MediaDB.get_or_create_song(tags, "local", read_from_file=True) if song: Dispatcher.add_songs([song])
def add_file(self, filename=None, play=False): if filename is None: uri = WinFile().run() else: uri = utils.get_uri_from_path(filename) if uri and common.file_is_supported(utils.get_path_from_uri(uri)): try: songs = MediaDB.get_songs_by_uri(uri) except: pass else: self.add_songs(songs, play=play)
def get_tags(self): tag_list = [] for key, track in self.tracks.items(): other_tags = {} audio_file_prefix, audio_file_ext = os.path.splitext( self.audio_file) audio_file_bad = "%s%d%s" % (audio_file_prefix, key, audio_file_ext) other_tags["album"] = self.attribs["TITLE"] other_tags["uri"] = utils.get_uri_from_path(audio_file_bad) other_tags["real_uri"] = utils.get_uri_from_path(self.audio_file) other_tags["song_type"] = "cue" other_tags["artist"] = track.attribs["PERFORMER"] other_tags["title"] = track.attribs["TITLE"] other_tags["track"] = key other_tags["#duration"] = self.get_track_length(key) other_tags["seek"] = track.indexes[1] / 1000 other_tags["#size"] = os.path.getsize(self.audio_file) other_tags["#mtime"] = os.path.getmtime(self.audio_file) other_tags["#ctime"] = os.path.getctime(self.audio_file) tag_list.append(other_tags) return tag_list
def add_file(self, filename=None, play=False): if filename is None: uri = WinFile().run() else: uri = utils.get_uri_from_path(filename) if uri and common.file_is_supported(utils.get_path_from_uri(uri)): tags = {"uri": uri} try: song = MediaDB.get_or_create_song(tags, "local", read_from_file=True) except: pass else: self.add_songs(song, play=play)
def job(self): '''job''' dirs = self.dirs added = [] db_uris = set(MediaDB.get_all_uris()) alldirs = [ utils.get_path_from_uri(each_dir) for each_dir in dirs ] for mdir in alldirs: for dirpath, dirs, names in os.walk(mdir): [ dirs.remove(each_dir) for each_dir in dirs if each_dir[0] == "." ] for each_dir in dirs: full_path_dir = os.path.join(dirpath, each_dir) if os.path.islink(full_path_dir): alldirs.append(os.path.realpath(full_path_dir)) valid_files = [] for name in names: full_path_file = os.path.join(dirpath, name) if name[0] != "." and common.file_is_supported(full_path_file): valid_files.append(full_path_file) yield full_path_file valid_files = set(valid_files) for each_file in valid_files: real_file = os.path.realpath(each_file) uri = utils.get_uri_from_path(real_file) if real_file not in db_uris: added.append(uri) elif os.path.getctime(real_file) > MediaDB.get_song(uri).get("#ctime"): added.append(uri) added = set(added) # for uri in added: # self.__get_or_create_song(uri) # end = time.time() # if (end - start) * 1000 > 1000: # self.callback(self.add_song_cache, self.pos, self.sort) # self.pos += len(self.add_song_cache) # del self.add_song_cache[:] # start = time.time() # else: # end = time.time() # yield utils.get_path_from_uri(uri) # if self.add_song_cache: if added: gobject.idle_add(self.callback, added, self.pos, self.sort)
def job(self): dirs = self.dirs added = [] db_uris = set(MediaDB.get_all_uris()) alldirs = [utils.get_path_from_uri(each_dir) for each_dir in dirs] for mdir in alldirs: for dirpath, dirs, names in os.walk(mdir): [ dirs.remove(each_dir) for each_dir in dirs if each_dir[0] == "." ] for each_dir in dirs: full_path_dir = os.path.join(dirpath, each_dir) if os.path.islink(full_path_dir): alldirs.append(os.path.realpath(full_path_dir)) valid_files = [] for name in names: full_path_file = os.path.join(dirpath, name) if name[0] != "." and common.file_is_supported( full_path_file): valid_files.append(full_path_file) yield full_path_file valid_files = set(valid_files) for each_file in valid_files: real_file = os.path.realpath(each_file) uri = utils.get_uri_from_path(real_file) if real_file not in db_uris: added.append(uri) elif os.path.getctime(real_file) > MediaDB.get_song( uri).get("#ctime"): added.append(uri) added = set(added) for uri in added: self.add_to_library(uri) yield utils.get_path_from_uri(uri)
def get_cover(self, song, try_web=True): default_image_path = self.default_cover album = self.get_cover_search_str(song) image_path = get_cache_file("cover/%s.jpg" % album) image_path_disable = get_cache_file("cover/%s.jpg.#disable#" % album) if (not song.get_str("title") and not song.get_str("album")) or os.path.exists(image_path_disable) or image_path in self.COVER_TO_SKIP: return default_image_path # Cover already exist. if os.path.exists(image_path): try: gtk.gdk.pixbuf_new_from_file_at_size(image_path, COVER_SIZE["x"], COVER_SIZE["y"]) except gobject.GError: os.unlink(image_path) else: return image_path # Retrieve cover from mp3 tag if song.get_scheme() == "file" and song.get_ext() in [".mp3", ".tta"]: found = False fp = None try: fp = file(image_path, "wb+") tag = ID3(song.get_path()) for frame in tag.getall("APIC"): found = True fp.write(frame.data) fp.flush() fp.seek(0, 0) except: if fp: fp.close() else: if fp: fp.close() if found and self.cleanup_cover(song, image_path): return image_path # Search in local directory of the file. if song.get("uri") != None and song.get_scheme() == "file": song_dir = song.get_dir() if os.path.exists(song_dir): list_file = os.listdir(song_dir) for pattern in COVER_PATTERNS: matches = fnmatch.filter(list_file, pattern) if matches: matches = sorted(matches, lambda a,b : (len(a) - len(b)) * 10 + cmp(a, b)) if self.cleanup_cover(song, song_dir + "/" + matches[0], image_path): return image_path if not config.getboolean("setting", "offline") and try_web and is_network_connected(): try: ret = False # try url cover tag if song.get("album_cover_url"): ret = utils.download(song.get("album_cover_url"), utils.get_uri_from_path(image_path)) if ret and self.cleanup_cover(song, image_path): return image_path cover_img_url = multi_query_artist_engine(album) if cover_img_url: ret = utils.download(cover_img_url, image_path) if ret and self.cleanup_cover(song, image_path): return image_path except: pass # No cover found self.remove_cover(song) if try_web: self.logdebug("cover not found %s (web: %s)", image_path, try_web) return default_image_path
def write_tags(self): tags = deepcopy(self.raw_song) tags["uri"] = utils.get_uri_from_path(self.output_path) s = Song() s.init_from_dict(tags) s.write_to_file()
def __read_from_remote_file(self): ''' Load song information from remote file. ''' GST_IDS = { "title": "title", "genre": "genre", "artist": "artist", "album": "album", "bitrate": "#bitrate", 'track-number': "#track" } is_finalize = False is_tagged = False def unknown_type(*param): raise "W:Song:GstTag:Gst decoder: type inconnu" def finalize(pipeline): state_ret = pipeline.set_state(gst.STATE_NULL) if state_ret != gst.STATE_CHANGE_SUCCESS: print "Failed change to null" def message(bus, message, pipeline): if message.type == gst.MESSAGE_EOS: finalize(pipeline) elif message.type == gst.MESSAGE_TAG: taglist = message.parse_tag() for key in taglist.keys(): if GST_IDS.has_key(key): if key == "bitrate": value = int(taglist[key] / 100) elif isinstance(taglist[key], long): value = int(taglist[key]) else: value = taglist[key] self[GST_IDS[key]] = utils.fix_charset(value) print key, ":", utils.fix_charset(value) is_tagged = True elif message.type == gst.MESSAGE_ERROR: err, debug = message.parse_error() finalize(pipeline) raise "W:Song:GstTag:Decoder error: %s%s" % (err, debug) try: try: url = utils.get_uri_from_path(self.get("uri").encode("utf-8")) pipeline = gst.parse_launch( "gnomevfssrc location=" + url + " ! decodebin name=decoder ! fakesink") except gobject.GError: raise "W:Song:GstTag:Failed to build pipeline to read metadata of", self.get( "uri") decoder = pipeline.get_by_name("decoder") decoder.connect("unknown_type", unknown_type) bus = pipeline.get_bus() bus.connect("message", message, pipeline) bus.add_signal_watch() state_ret = pipeline.set_state(gst.STATE_PAUSED) timeout = 10 while state_ret == gst.STATE_CHANGE_ASYNC and not is_finalize and timeout > 0: state_ret, _state, _pending_state = pipeline.get_state( 1 * gst.SECOND) timeout -= 1 if state_ret != gst.STATE_CHANGE_SUCCESS: finalize(pipeline) print "W:Song:GstTag:Failed Read Media" else: if not is_tagged: bus.poll(gst.MESSAGE_TAG, 5 * gst.SECOND) try: query = gst.query_new_duration(gst.FORMAT_TIME) if pipeline.query(query): total = query.parse_duration()[1] else: total = 0 except gst.QueryError: total = 0 total //= gst.MSECOND self["#duration"] = total if not is_tagged: print "W:Song:GstTag: Media found but no tag found" finalize(pipeline) except Exception, e: print "W: Error while loading (" + self.get( "uri") + ")Tracback :", e self.last_error = ( "Error while reading") + ": " + self.get_filename() return False
def __read_from_remote_file(self): ''' Load song information from remote file. ''' GST_IDS = {"title" : "title", "genre" : "genre", "artist" : "artist", "album" : "album", "bitrate" : "#bitrate", 'track-number':"#track"} is_finalize = False is_tagged = False def unknown_type(*param): raise "W:Song:GstTag:Gst decoder: type inconnu" def finalize(pipeline): state_ret = pipeline.set_state(gst.STATE_NULL) if state_ret != gst.STATE_CHANGE_SUCCESS: print "Failed change to null" def message(bus, message, pipeline): if message.type == gst.MESSAGE_EOS: finalize(pipeline) elif message.type == gst.MESSAGE_TAG: taglist = message.parse_tag() for key in taglist.keys(): if GST_IDS.has_key(key): if key == "bitrate": value = int(taglist[key] / 100) elif isinstance(taglist[key], long): value = int(taglist[key]) else: value = taglist[key] self[GST_IDS[key]] = utils.fix_charset(value) print key,":", utils.fix_charset(value) is_tagged = True elif message.type == gst.MESSAGE_ERROR: err, debug = message.parse_error() finalize(pipeline) raise "W:Song:GstTag:Decoder error: %s\n%s" % (err,debug) try: try: url = utils.get_uri_from_path(self.get("uri").encode("utf-8")) pipeline = gst.parse_launch("gnomevfssrc location="+url+" ! decodebin name=decoder ! fakesink") except gobject.GError: raise "W:Song:GstTag:Failed to build pipeline to read metadata of",self.get("uri") decoder = pipeline.get_by_name("decoder") decoder.connect("unknown_type", unknown_type) bus = pipeline.get_bus() bus.connect("message", message, pipeline) bus.add_signal_watch() state_ret = pipeline.set_state(gst.STATE_PAUSED) timeout = 10 while state_ret == gst.STATE_CHANGE_ASYNC and not is_finalize and timeout > 0: state_ret, _state, _pending_state = pipeline.get_state(1 * gst.SECOND) timeout -= 1 if state_ret != gst.STATE_CHANGE_SUCCESS: finalize(pipeline) print "W:Song:GstTag:Failed Read Media" else: if not is_tagged: bus.poll(gst.MESSAGE_TAG, 5 * gst.SECOND) try: query = gst.query_new_duration(gst.FORMAT_TIME) if pipeline.query(query): total = query.parse_duration()[1] else: total = 0 except gst.QueryError: total = 0 total //= gst.MSECOND self["#duration"] = total if not is_tagged: print "W:Song:GstTag: Media found but no tag found" finalize(pipeline) except Exception, e: print "W: Error while loading ("+self.get("uri")+")\nTracback :",e self.last_error = ("Error while reading") + ": " + self.get_filename() return False