def load_extension(self): """Load and initialize the gPodder extension module""" if self.module is not None: logger.info('Module already loaded.') return if not self.metadata.for_current_ui: logger.info('Not loading "%s" (only_for = "%s")', self.name, self.metadata.only_for) return basename, extension = os.path.splitext(os.path.basename(self.filename)) fp = open(self.filename, 'r') try: module_file = imp.load_module(basename, fp, self.filename, (extension, 'r', imp.PY_SOURCE)) finally: # Remove the .pyc file if it was created during import util.delete_file(self.filename + 'c') fp.close() self.default_config = getattr(module_file, 'DefaultConfig', {}) if self.default_config: self.manager.core.config.register_defaults({ 'extensions': { self.name: self.default_config, } }) self.config = getattr(self.manager.core.config.extensions, self.name) self.module = module_file.gPodderExtension(self) logger.info('Module loaded: %s', self.filename)
def requestImage(self, id, size, requestedSize): key = (id, requestedSize) if key in self._cache: return self._cache[key] cover_file, cover_url, podcast_url, podcast_title = id.split('|') def get_filename(): return self.downloader.get_cover(cover_file, cover_url, podcast_url, podcast_title, None, None, True) filename = get_filename() image = QImage() if not image.load(filename): if filename.startswith(cover_file): logger.info('Deleting broken cover art: %s', filename) util.delete_file(filename) image.load(get_filename()) if not image.isNull(): self._cache[key] = image.scaled(requestedSize, Qt.KeepAspectRatioByExpanding, Qt.SmoothTransformation) return self._cache[key]
def run( self): self.download_id=services.download_status_manager.reserve_download_id() services.download_status_manager.register_download_id( self.download_id, self) # Initial status update services.download_status_manager.update_status( self.download_id, episode=self.episode.title, url=self.episode.url, speed=self.speed, progress=self.progress) acquired=services.download_status_manager.s_acquire() try: try: if self.cancelled: return util.delete_file( self.tempname) self.downloader.retrieve( self.episode.url, self.tempname, reporthook=self.status_updated) shutil.move( self.tempname, self.filename) self.channel.addDownloadedItem( self.episode) services.download_status_manager.download_completed(self.download_id) finally: services.download_status_manager.remove_download_id( self.download_id) services.download_status_manager.s_release( acquired) except DownloadCancelledException: log('Download has been cancelled: %s', self.episode.title, traceback=None, sender=self) except IOError, ioe: if self.notification is not None: title=ioe.strerror message=_('An error happened while trying to download <b>%s</b>.') % ( saxutils.escape( self.episode.title), ) self.notification( message, title) log( 'Error "%s" while downloading "%s": %s', ioe.strerror, self.episode.title, ioe.filename, sender=self)
def load_extension(self): """Load and initialize the gPodder extension module""" if self.module is not None: logger.info('Module already loaded.') return if not self.metadata.available_for_current_ui: logger.info('Not loading "%s" (only_for = "%s")', self.name, self.metadata.only_for) return basename, extension = os.path.splitext(os.path.basename(self.filename)) fp = open(self.filename, 'r') try: module_file = imp.load_module(basename, fp, self.filename, (extension, 'r', imp.PY_SOURCE)) finally: # Remove the .pyc file if it was created during import util.delete_file(self.filename + 'c') fp.close() self.default_config = getattr(module_file, 'DefaultConfig', {}) if self.default_config: self.manager.core.config.register_defaults( {'extensions': { self.name: self.default_config, }}) self.config = getattr(self.manager.core.config.extensions, self.name) self.module = module_file.gPodderExtension(self) logger.info('Module loaded: %s', self.filename)
def delete_from_disk(self): filename = self.local_filename(create=False, check_only=True) if filename is not None: gpodder.user_extensions.on_episode_delete(self, filename) util.delete_file(filename) self.set_state(gpodder.STATE_DELETED)
def run(self): # Speed calculation (re-)starts here self.__start_time = 0 self.__start_blocks = 0 # If the download has already been cancelled, skip it if self.status == SyncTask.CANCELLED: util.delete_file(self.tempname) self.progress = 0.0 self.speed = 0.0 return False # We only start this download if its status is "queued" if self.status != SyncTask.QUEUED: return False # We are synching this file right now self.status = SyncTask.DOWNLOADING self._notification_shown = False try: logger.info('Starting SyncTask') self.device.add_track(self.episode, reporthook=self.status_updated) except Exception, e: self.status = SyncTask.FAILED logger.error('Sync failed: %s', str(e), exc_info=True) self.error_message = _('Error: %s') % (str(e),)
def requestImage(self, id, size, requestedSize): key = (id, requestedSize) if key in self._cache: return self._cache[key] cover_file, cover_url, podcast_url, podcast_title = ( urllib.unquote(x) for x in id.split('|')) def get_filename(): return self.downloader.get_cover(cover_file, cover_url, podcast_url, podcast_title, None, None, True) filename = get_filename() image = QImage() if not image.load(filename): if filename.startswith(cover_file): logger.info('Deleting broken cover art: %s', filename) util.delete_file(filename) image.load(get_filename()) if not image.isNull(): self._cache[key] = image.scaled(requestedSize, Qt.KeepAspectRatioByExpanding, Qt.SmoothTransformation) return self._cache[key]
def delete_partial_files(self): temporary_files = [self.tempname] # youtube-dl creates .partial.* files for adaptive formats temporary_files += glob.glob('%s.*' % self.tempname) for tempfile in temporary_files: util.delete_file(tempfile)
def run(self): # Speed calculation (re-)starts here self.__start_time = 0 self.__start_blocks = 0 # If the download has already been cancelled, skip it if self.status == SyncTask.CANCELLED: util.delete_file(self.tempname) self.progress = 0.0 self.speed = 0.0 return False # We only start this download if its status is "queued" if self.status != SyncTask.QUEUED: return False # We are synching this file right now self.status = SyncTask.DOWNLOADING self._notification_shown = False try: logger.info('Starting SyncTask') self.device.add_track(self.episode, reporthook=self.status_updated) except Exception, e: self.status = SyncTask.FAILED logger.error('Download failed: %s', str(e), exc_info=True) self.error_message = _('Error: %s') % (str(e), )
def delete(self): filename = self.local_filename(create=False, check_only=True) if filename is not None: util.delete_file(filename) self.state = gpodder.STATE_DELETED self.is_new = False self.save()
def remove_cover(self, channel): """ Removes the current cover for the channel so that a new one is downloaded the next time we request the channel cover. """ util.delete_file(channel.cover_file) self.notify("cover-removed", channel.url)
def remove_downloaded(self): # Remove the playlist file if it exists m3u_filename = self.get_playlist_filename() if os.path.exists(m3u_filename): util.delete_file(m3u_filename) # Remove the download directory shutil.rmtree(self.save_dir, True)
def remove_cover(self, channel): """ Removes the current cover for the channel so that a new one is downloaded the next time we request the channel cover. """ util.delete_file(channel.cover_file) self.notify('cover-removed', channel.url)
def remove_track(self, track): self.notify('status', _('Removing %s') % track.title) util.delete_file(track.filename) directory = os.path.dirname(track.filename) if self.directory_is_empty(directory) and self._config.one_folder_per_podcast: try: os.rmdir(directory) except: logger.error('Cannot remove %s', directory)
def remove_track(self, track): self.notify('status', _('Removing %s') % track.title) util.delete_file(track.filename) directory = os.path.dirname(track.filename) if self.directory_is_empty(directory) and self._config.fssync_channel_subfolders: try: os.rmdir(directory) except: log('Cannot remove %s', directory, sender=self)
def run(self): # Speed calculation (re-)starts here self.__start_time = 0 self.__start_blocks = 0 # If the download has already been cancelled, skip it if self.status == DownloadTask.CANCELLED: util.delete_file(self.tempname) self.progress = 0.0 self.speed = 0.0 return False # We only start this download if its status is "queued" if self.status != DownloadTask.QUEUED: return False # We are downloading this file right now self.status = DownloadTask.DOWNLOADING self._notification_shown = False try: # Resolve URL and start downloading the episode fmt_ids = youtube.get_fmt_ids(self._config.youtube) url = youtube.get_real_download_url(self.__episode.url, fmt_ids) url = vimeo.get_real_download_url(url, self._config.vimeo.fileformat) url = escapist_videos.get_real_download_url(url) url = url.strip() downloader = DownloadURLOpener(self.__episode.channel) # HTTP Status codes for which we retry the download retry_codes = (408, 418, 504, 598, 599) max_retries = max(0, self._config.auto.retries) # Retry the download on timeout (bug 1013) for retry in range(max_retries + 1): if retry > 0: logger.info('Retrying download of %s (%d)', url, retry) time.sleep(1) try: headers, real_url = downloader.retrieve_resume( url, self.tempname, reporthook=self.status_updated) # If we arrive here, the download was successful break except urllib.ContentTooShortError, ctse: if retry < max_retries: logger.info('Content too short: %s - will retry.', url) continue raise except socket.timeout, tmout: if retry < max_retries: logger.info('Socket timeout: %s - will retry.', url) continue raise
def run(self): # Speed calculation (re-)starts here self.__start_time = 0 self.__start_blocks = 0 # If the download has already been cancelled, skip it if self.status == DownloadTask.CANCELLED: util.delete_file(self.tempname) self.progress = 0.0 self.speed = 0.0 return False # We only start this download if its status is "queued" if self.status != DownloadTask.QUEUED: return False # We are downloading this file right now self.status = DownloadTask.DOWNLOADING self._notification_shown = False try: # Resolve URL and start downloading the episode fmt_ids = youtube.get_fmt_ids(self._config.youtube) url = youtube.get_real_download_url(self.__episode.url, fmt_ids) url = vimeo.get_real_download_url(url, self._config.vimeo.fileformat) url = escapist_videos.get_real_download_url(url) url = url.strip() downloader = DownloadURLOpener(self.__episode.channel) # HTTP Status codes for which we retry the download retry_codes = (408, 418, 504, 598, 599) max_retries = max(0, self._config.auto.retries) # Retry the download on timeout (bug 1013) for retry in range(max_retries + 1): if retry > 0: logger.info('Retrying download of %s (%d)', url, retry) time.sleep(1) try: headers, real_url = downloader.retrieve_resume(url, self.tempname, reporthook=self.status_updated) # If we arrive here, the download was successful break except urllib.ContentTooShortError, ctse: if retry < max_retries: logger.info('Content too short: %s - will retry.', url) continue raise except socket.timeout, tmout: if retry < max_retries: logger.info('Socket timeout: %s - will retry.', url) continue raise
def remove_track_gpod(self, track): filename = gpod.itdb_filename_on_ipod(track) try: gpod.itdb_playlist_remove_track(self.podcasts_playlist, track) except: logger.info('Track %s not in playlist', track.title) gpod.itdb_track_unlink(track) util.delete_file(filename)
def convert(): ffmpeg = subprocess.Popen(['ffmpeg', '-f', 'concat', '-nostdin', '-y', '-i', list_filename, '-c', 'copy', out_filename]) result = ffmpeg.wait() util.delete_file(list_filename) util.idle_add(lambda: indicator.on_finished()) util.idle_add(lambda: self.gpodder.show_message( _('Videos successfully converted') if result == 0 else _('Error converting videos'), _('Concatenation result'), important=True))
def convert(): ffmpeg = util.Popen(['ffmpeg', '-f', 'concat', '-nostdin', '-y', '-i', list_filename, '-c', 'copy', out_filename], close_fds=True) result = ffmpeg.wait() util.delete_file(list_filename) util.idle_add(lambda: indicator.on_finished()) util.idle_add(lambda: self.gpodder.show_message( _('Videos successfully converted') if result == 0 else _('Error converting videos'), _('Concatenation result'), important=True))
def clean_up_downloads(delete_partial=False): """Clean up temporary files left behind by old gPodder versions delete_partial - If True, also delete in-progress downloads """ temporary_files = glob.glob('%s/*/.tmp-*' % gpodder.downloads) if delete_partial: temporary_files += glob.glob('%s/*/*.partial' % gpodder.downloads) for tempfile in temporary_files: util.delete_file(tempfile)
def remove_track(self, track): self.notify('status', _('Removing %s') % track.title) track=track.libgpodtrack filename=gpod.itdb_filename_on_ipod(track) try: gpod.itdb_playlist_remove_track(self.podcasts_playlist, track) except: log('Track %s not in playlist', track.title, sender=self) gpod.itdb_track_unlink(track) util.delete_file(filename)
def update_m3u_playlist(self): m3u_filename = self.get_playlist_filename() downloaded_episodes = self.get_downloaded_episodes() if not downloaded_episodes: log('No episodes - removing %s', m3u_filename, sender=self) util.delete_file(m3u_filename) return log('Writing playlist to %s', m3u_filename, sender=self) util.write_m3u_playlist(m3u_filename, \ PodcastEpisode.sort_by_pubdate(downloaded_episodes))
def delete_episode_by_url(self, url): episode = self.db.load_episode(url, factory=self.episode_factory) if episode is not None: filename = episode.local_filename(create=False) if filename is not None: util.delete_file(filename) else: log('Cannot delete episode: %s (I have no filename!)', episode.title, sender=self) episode.set_state(gpodder.STATE_DELETED) self.update_m3u_playlist()
def channels_to_model(channels): new_model=gtk.ListStore(str, str, str, gtk.gdk.Pixbuf, int, gtk.gdk.Pixbuf, str) for channel in channels: (count_available, count_downloaded, count_new, count_unplayed)=channel.get_episode_stats() new_iter=new_model.append() new_model.set(new_iter, 0, channel.url) new_model.set(new_iter, 1, channel.title) title_markup=saxutils.escape(channel.title) description_markup=saxutils.escape(util.get_first_line(channel.description)) description='%s\n<small>%s</small>' % (title_markup, description_markup) if channel.parse_error is not None: description='<span foreground="#ff0000">%s</span>' % description new_model.set(new_iter, 6, channel.parse_error) else: new_model.set(new_iter, 6, '') new_model.set(new_iter, 2, description) if count_unplayed > 0 or count_downloaded > 0: new_model.set(new_iter, 3, draw.draw_pill_pixbuf(str(count_unplayed), str(count_downloaded))) if count_new > 0: new_model.set( new_iter, 4, pango.WEIGHT_BOLD) else: new_model.set( new_iter, 4, pango.WEIGHT_NORMAL) channel_cover_found=False if os.path.exists( channel.cover_file) and os.path.getsize(channel.cover_file) > 0: try: new_model.set( new_iter, 5, gtk.gdk.pixbuf_new_from_file_at_size( channel.cover_file, 32, 32)) channel_cover_found=True except: exctype, value=sys.exc_info()[:2] log( 'Could not convert icon file "%s", error was "%s"', channel.cover_file, value ) util.delete_file(channel.cover_file) if not channel_cover_found: iconsize=gtk.icon_size_from_name('channel-icon') if not iconsize: iconsize=gtk.icon_size_register('channel-icon',32,32) icon_theme=gtk.icon_theme_get_default() globe_icon_name='applications-internet' try: new_model.set( new_iter, 5, icon_theme.load_icon(globe_icon_name, iconsize, 0)) except: log( 'Cannot load "%s" icon (using an old or incomplete icon theme?)', globe_icon_name) new_model.set( new_iter, 5, None) return new_model
def delete_episode_by_url(self, url): global_lock.acquire() downloaded_episodes=self.load_downloaded_episodes() for episode in self.get_all_episodes(): if episode.url == url: util.delete_file( episode.local_filename()) if episode in downloaded_episodes: downloaded_episodes.remove( episode) self.save_downloaded_episodes( downloaded_episodes) self.update_m3u_playlist(downloaded_episodes) global_lock.release()
def find_partial_downloads(channels, start_progress_callback, progress_callback, finish_progress_callback): """Find partial downloads and match them with episodes channels - A list of all model.PodcastChannel objects start_progress_callback - A callback(count) when partial files are searched progress_callback - A callback(title, progress) when an episode was found finish_progress_callback - A callback(resumable_episodes) when finished """ # Look for partial file downloads partial_files = glob.glob(os.path.join(gpodder.downloads, '*', '*.partial')) count = len(partial_files) resumable_episodes = [] if count: start_progress_callback(count) candidates = [f[:-len('.partial')] for f in partial_files] found = 0 for channel in channels: for episode in channel.get_all_episodes(): filename = episode.local_filename(create=False, check_only=True) if filename in candidates: found += 1 progress_callback(episode.title, float(found) / count) candidates.remove(filename) partial_files.remove(filename + '.partial') if os.path.exists(filename): # The file has already been downloaded; # remove the leftover partial file util.delete_file(filename + '.partial') else: resumable_episodes.append(episode) if not candidates: break if not candidates: break for f in partial_files: logger.warn('Partial file without episode: %s', f) util.delete_file(f) finish_progress_callback(resumable_episodes) else: clean_up_downloads(True)
def delete_download(self): filename = self.local_filename(create=False, check_only=True) if filename is not None: util.delete_file(filename) art_filename = self.art_file if art_filename is not None: for extension in self.podcast.model.core.cover_downloader.EXTENSIONS: if os.path.exists(art_filename + extension): art_filename = art_filename + extension util.delete_file(art_filename) self.state = gpodder.STATE_DELETED self.is_new = False self.save()
def find_partial_downloads(directory, channels, start_progress_callback, progress_callback, finish_progress_callback): """Find partial downloads and match them with episodes directory - Download directory channels - A list of all model.PodcastChannel objects start_progress_callback - A callback(count) when partial files are searched progress_callback - A callback(title, progress) when an episode was found finish_progress_callback - A callback(resumable_episodes) when finished """ # Look for partial file downloads partial_files = glob.glob(os.path.join(directory, '*', '*.partial')) count = len(partial_files) resumable_episodes = [] if count: start_progress_callback(count) candidates = [f[:-len('.partial')] for f in partial_files] found = 0 for channel in channels: for episode in channel.episodes: filename = episode.local_filename(create=False, check_only=True) if filename in candidates: found += 1 progress_callback(episode.title, float(found)/count) candidates.remove(filename) partial_files.remove(filename+'.partial') if os.path.exists(filename): # The file has already been downloaded; # remove the leftover partial file util.delete_file(filename+'.partial') else: resumable_episodes.append(episode) if not candidates: break if not candidates: break for f in partial_files: logger.warn('Partial file without episode: %s', f) util.delete_file(f) finish_progress_callback(resumable_episodes) else: clean_up_downloads(directory, True)
def find_partial_downloads(channels, start_progress_callback, progress_callback, finish_progress_callback): """Find partial downloads and match them with episodes channels - A list of all model.PodcastChannel objects start_progress_callback - A callback(count) when partial files are searched progress_callback - A callback(title, progress) when an episode was found finish_progress_callback - A callback(resumable_episodes) when finished """ # Look for partial file downloads, ignoring .partial.* files created by youtube-dl partial_files = glob.glob(os.path.join(gpodder.downloads, '*', '*.partial')) count = len(partial_files) resumable_episodes = [] start_progress_callback(count) if count: candidates = [f[:-len('.partial')] for f in partial_files] found = 0 for channel in channels: for episode in channel.get_all_episodes(): filename = episode.local_filename(create=False, check_only=True) if filename in candidates: found += 1 progress_callback(episode.title, found / count) candidates.remove(filename) partial_files.remove(filename + '.partial') if os.path.exists(filename): # The file has already been downloaded; # remove the leftover partial file util.delete_file(filename + '.partial') else: resumable_episodes.append(episode) if not candidates: break if not candidates: break for f in partial_files: logger.warning('Partial file without episode: %s', f) util.delete_file(f) # never delete partial: either we can't clean them up because we offer to # resume download or there are none to delete in the first place. clean_up_downloads(delete_partial=False) finish_progress_callback(resumable_episodes)
def save(self, filename=None): if filename is None: filename = self.__filename logger.info('Flushing settings to disk') try: fp = open(filename+'.tmp', 'wt') fp.write(repr(self.__json_config)) fp.close() util.atomic_rename(filename+'.tmp', filename) except: logger.error('Cannot write settings to %s', filename) util.delete_file(filename+'.tmp') raise self.__save_thread = None
def save(self, filename=None): if filename is None: filename = self.__filename logger.info("Flushing settings to disk") try: fp = open(filename + ".tmp", "wt") fp.write(repr(self.__json_config)) fp.close() util.atomic_rename(filename + ".tmp", filename) except: logger.error("Cannot write settings to %s", filename) util.delete_file(filename + ".tmp") raise self.__save_thread = None
def image_download_thread( self, url, callback_pixbuf=None, callback_status=None, callback_finished=None, cover_file=None): if callback_status is not None: util.idle_add(callback_status, _('Downloading podcast cover...')) pixbuf=gtk.gdk.PixbufLoader() if cover_file is None: log( 'Downloading %s', url) pixbuf.write( urllib.urlopen(url).read()) if cover_file is not None and not os.path.exists(cover_file): log( 'Downloading cover to %s', cover_file) cachefile=open( cover_file, "w") cachefile.write( urllib.urlopen(url).read()) cachefile.close() if cover_file is not None: log( 'Reading cover from %s', cover_file) try: pixbuf.write( open( cover_file, "r").read()) except: # Probably a data error, delete temp file log('Data error while reading pixbuf. Deleting %s', cover_file, sender=self) util.delete_file(cover_file) try: pixbuf.close() except: # data error, delete temp file util.delete_file( cover_file) MAX_SIZE=400 if callback_pixbuf is not None: pb=pixbuf.get_pixbuf() if pb: if pb.get_width() > MAX_SIZE: factor=MAX_SIZE*1.0/pb.get_width() pb=pb.scale_simple( int(pb.get_width()*factor), int(pb.get_height()*factor), gtk.gdk.INTERP_BILINEAR) if pb.get_height() > MAX_SIZE: factor=MAX_SIZE*1.0/pb.get_height() pb=pb.scale_simple( int(pb.get_width()*factor), int(pb.get_height()*factor), gtk.gdk.INTERP_BILINEAR) util.idle_add(callback_pixbuf, pb) if callback_status is not None: util.idle_add(callback_status, '') if callback_finished is not None: util.idle_add(callback_finished)
def __init__(self, filename, debug=False): self.filename = filename + '.minidb' need_migration = not os.path.exists(self.filename) self.db = minidb.Store(self.filename, debug=debug, smartupdate=True) self.db.register(model.PodcastEpisode) self.db.register(model.PodcastChannel) if need_migration: try: MigrateJSONDBToMiniDB(self).migrate() except Exception as e: logger.fatal('Could not migrate database: %s', e, exc_info=True) self.db.close() self.db = None util.delete_file(self.filename) sys.exit(1)
def status_updated( self, count, blockSize, totalSize): if totalSize: self.progress=100.0*float(count*blockSize)/float(totalSize) # We see a different "total size" while downloading, # so correct the total size variable in the thread if totalSize != self.total_size: log('Correcting file size for %s from %d to %d while downloading.', self.url, self.total_size, totalSize, sender=self) self.total_size=totalSize else: self.progress=100.0 self.calculate_speed( count, blockSize) if self.last_update < time.time() - (1.0 / self.MAX_UPDATES_PER_SEC): services.download_status_manager.update_status( self.download_id, speed=self.speed, progress=self.progress) self.last_update=time.time() if self.cancelled: util.delete_file( self.tempname) raise DownloadCancelledException()
def run(self): # Speed calculation (re-)starts here self.__start_time = 0 self.__start_blocks = 0 # If the download has already been cancelled, skip it if self.status == SyncTask.CANCELLED: util.delete_file(self.tempname) self.progress = 0.0 self.speed = 0.0 return False # We only start this download if its status is "downloading" if self.status != SyncTask.DOWNLOADING: return False # We are synching this file right now self.status = SyncTask.DOWNLOADING self._notification_shown = False try: logger.info('Starting SyncTask') self.device.add_track(self.episode, reporthook=self.status_updated) except Exception as e: self.status = SyncTask.FAILED logger.error('Sync failed: %s', str(e), exc_info=True) self.error_message = _('Error: %s') % (str(e), ) if self.status == SyncTask.DOWNLOADING: # Everything went well - we're done self.status = SyncTask.DONE if self.total_size <= 0: self.total_size = util.calculate_size(self.filename) logger.info('Total size updated to %d', self.total_size) self.progress = 1.0 gpodder.user_extensions.on_episode_synced(self.device, self.__episode) return True self.speed = 0.0 # We finished, but not successfully (at least not really) return False
def clean_up_downloads( self, delete_partial=False): # Clean up temporary files left behind by old gPodder versions if delete_partial: temporary_files=glob.glob( '%s/*/.tmp-*' % ( self.downloaddir, )) for tempfile in temporary_files: util.delete_file( tempfile) # Clean up empty download folders download_dirs=glob.glob( '%s/*' % ( self.downloaddir, )) for ddir in download_dirs: if os.path.isdir( ddir): globr=glob.glob( '%s/*' % ( ddir, )) if not globr and ddir != self.config.bittorrent_dir: log( 'Stale download directory found: %s', os.path.basename( ddir)) try: os.rmdir( ddir) log( 'Successfully removed %s.', ddir) except: log( 'Could not remove %s.', ddir)
def run(self): # Speed calculation (re-)starts here self.__start_time = 0 self.__start_blocks = 0 # If the download has already been cancelled, skip it if self.status == SyncTask.CANCELLED: util.delete_file(self.tempname) self.progress = 0.0 self.speed = 0.0 return False # We only start this download if its status is "downloading" if self.status != SyncTask.DOWNLOADING: return False # We are synching this file right now self.status = SyncTask.DOWNLOADING self._notification_shown = False try: logger.info('Starting SyncTask') self.device.add_track(self.episode, reporthook=self.status_updated) except Exception as e: self.status = SyncTask.FAILED logger.error('Sync failed: %s', str(e), exc_info=True) self.error_message = _('Error: %s') % (str(e),) if self.status == SyncTask.DOWNLOADING: # Everything went well - we're done self.status = SyncTask.DONE if self.total_size <= 0: self.total_size = util.calculate_size(self.filename) logger.info('Total size updated to %d', self.total_size) self.progress = 1.0 gpodder.user_extensions.on_episode_synced(self.device, self.__episode) return True self.speed = 0.0 # We finished, but not successfully (at least not really) return False
def save(self, filename=None): if filename is None: filename = self.__filename logger.info('Flushing settings to disk') try: # revoke unix group/world permissions (this has no effect under windows) umask = os.umask(0o077) with open(filename + '.tmp', 'wt') as fp: fp.write(repr(self.__json_config)) util.atomic_rename(filename + '.tmp', filename) except: logger.error('Cannot write settings to %s', filename) util.delete_file(filename + '.tmp') raise finally: os.umask(umask) self.__save_thread = None
def __get_cover(self, channel, url, async_mode=False, avoid_downloading=False): def get_filename(): return self.downloader.get_cover(channel.cover_file, url or channel.cover_url, channel.url, channel.title, channel.auth_username, channel.auth_password, not avoid_downloading) if url is not None: filename = get_filename() if filename.startswith(channel.cover_file): logger.info('Replacing cover: %s', filename) util.delete_file(filename) filename = get_filename() pixbuf = None try: pixbuf = GdkPixbuf.Pixbuf.new_from_file(filename) except Exception as e: logger.warn('Cannot load cover art', exc_info=True) if pixbuf is None and filename.startswith(channel.cover_file): logger.info('Deleting broken cover: %s', filename) util.delete_file(filename) filename = get_filename() try: pixbuf = GdkPixbuf.Pixbuf.new_from_file(filename) except Exception as e: logger.warn('Corrupt cover art on server, deleting', exc_info=True) util.delete_file(filename) if async_mode: self.notify('cover-available', channel, pixbuf) else: return (channel.url, pixbuf)
def __get_cover(self, channel, url, async_mode=False, avoid_downloading=False): def get_filename(): return self.downloader.get_cover(channel.cover_file, url or channel.cover_url, channel.url, channel.title, channel.auth_username, channel.auth_password, not avoid_downloading) if url is not None: filename = get_filename() if filename.startswith(channel.cover_file): logger.info('Replacing cover: %s', filename) util.delete_file(filename) filename = get_filename() pixbuf = None try: pixbuf = GdkPixbuf.Pixbuf.new_from_file(filename) except Exception as e: logger.warning('Cannot load cover art', exc_info=True) if pixbuf is None and filename.startswith(channel.cover_file): logger.info('Deleting broken cover: %s', filename) util.delete_file(filename) filename = get_filename() try: pixbuf = GdkPixbuf.Pixbuf.new_from_file(filename) except Exception as e: logger.warning('Corrupt cover art on server, deleting', exc_info=True) util.delete_file(filename) if async_mode: self.notify('cover-available', channel, pixbuf) else: return (channel.url, pixbuf)
def update_m3u_playlist(self): m3u_filename = self.get_playlist_filename() downloaded_episodes = self.get_downloaded_episodes() if not downloaded_episodes: log('No episodes - removing %s', m3u_filename, sender=self) util.delete_file(m3u_filename) return log('Writing playlist to %s', m3u_filename, sender=self) f = open(m3u_filename, 'w') f.write('#EXTM3U\n') for episode in PodcastEpisode.sort_by_pubdate(downloaded_episodes): if episode.was_downloaded(and_exists=True): filename = episode.local_filename(create=False) assert filename is not None if os.path.dirname(filename).startswith(os.path.dirname(m3u_filename)): filename = filename[len(os.path.dirname(m3u_filename)+os.sep):] f.write('#EXTINF:0,'+self.title+' - '+episode.title+' ('+episode.cute_pubdate()+')\n') f.write(filename+'\n') f.close()
self.request_cover(channel, custom_url) def __get_cover(self, channel, url, async=False, avoid_downloading=False): def get_filename(): return self.downloader.get_cover(channel.cover_file, url or channel.cover_url, channel.url, channel.title, channel.auth_username, channel.auth_password, not avoid_downloading) if url is not None: filename = get_filename() if filename.startswith(channel.cover_file): logger.info('Replacing cover: %s', filename) util.delete_file(filename) filename = get_filename() pixbuf = None try: pixbuf = gtk.gdk.pixbuf_new_from_file(filename) except Exception, e: logger.warn('Cannot load cover art', exc_info=True) if filename.startswith(channel.cover_file): logger.info('Deleting broken cover: %s', filename) util.delete_file(filename) filename = get_filename() pixbuf = gtk.gdk.pixbuf_new_from_file(filename) if async:
def run(self): # Speed calculation (re-)starts here self.__start_time = 0 self.__start_blocks = 0 # If the download has already been cancelled, skip it if self.status == DownloadTask.CANCELLED: util.delete_file(self.tempname) self.progress = 0.0 self.speed = 0.0 return False # We only start this download if its status is "queued" if self.status != DownloadTask.QUEUED: return False # We are downloading this file right now self.status = DownloadTask.DOWNLOADING self._notification_shown = False try: # Resolve URL and start downloading the episode url = youtube.get_real_download_url(self.__episode.url, \ self._config.youtube_preferred_fmt_id) downloader = DownloadURLOpener(self.__episode.channel) headers, real_url = downloader.retrieve_resume(url, \ self.tempname, reporthook=self.status_updated) new_mimetype = headers.get('content-type', self.__episode.mimetype) old_mimetype = self.__episode.mimetype _basename, ext = os.path.splitext(self.filename) if new_mimetype != old_mimetype or util.wrong_extension(ext): log('Correcting mime type: %s => %s', old_mimetype, new_mimetype, sender=self) old_extension = self.__episode.extension() self.__episode.mimetype = new_mimetype new_extension = self.__episode.extension() # If the desired filename extension changed due to the new # mimetype, we force an update of the local filename to fix the # extension. if old_extension != new_extension or util.wrong_extension(ext): self.filename = self.__episode.local_filename( create=True, force_update=True) # TODO: Check if "real_url" is different from "url" and if it is, # see if we can get a better episode filename out of it # Look at the Content-disposition header; use if if available disposition_filename = get_header_param(headers, \ 'filename', 'content-disposition') # Some servers do send the content-disposition header, but provide # an empty filename, resulting in an empty string here (bug 1440) if disposition_filename is not None and disposition_filename != '': # The server specifies a download filename - try to use it disposition_filename = os.path.basename(disposition_filename) self.filename = self.__episode.local_filename(create=True, \ force_update=True, template=disposition_filename) new_mimetype, encoding = mimetypes.guess_type(self.filename) if new_mimetype is not None: log('Using content-disposition mimetype: %s', new_mimetype, sender=self) self.__episode.set_mimetype(new_mimetype, commit=True) shutil.move(self.tempname, self.filename) # Model- and database-related updates after a download has finished self.__episode.on_downloaded(self.filename) # If a user command has been defined, execute the command setting some environment variables if len(self._config.cmd_download_complete) > 0: os.environ["GPODDER_EPISODE_URL"] = self.__episode.url or '' os.environ[ "GPODDER_EPISODE_TITLE"] = self.__episode.title or '' os.environ["GPODDER_EPISODE_FILENAME"] = self.filename or '' os.environ["GPODDER_EPISODE_PUBDATE"] = str( int(self.__episode.pubDate)) os.environ["GPODDER_EPISODE_LINK"] = self.__episode.link or '' os.environ[ "GPODDER_EPISODE_DESC"] = self.__episode.description or '' os.environ[ "GPODDER_CHANNEL_TITLE"] = self.__episode.channel.title or '' util.run_external_command(self._config.cmd_download_complete) except DownloadCancelledException: log('Download has been cancelled/paused: %s', self, sender=self) if self.status == DownloadTask.CANCELLED: util.delete_file(self.tempname) self.progress = 0.0 self.speed = 0.0 except urllib.ContentTooShortError, ctse: self.status = DownloadTask.FAILED self.error_message = _('Missing content from server')
def removed_from_list(self): if self.status != self.DONE: util.delete_file(self.tempname)
def run(self): # Speed calculation (re-)starts here self.__start_time = 0 self.__start_blocks = 0 # If the download has already been cancelled, skip it if self.status == DownloadTask.CANCELLED: util.delete_file(self.tempname) self.progress = 0.0 self.speed = 0.0 return False # We only start this download if its status is "queued" if self.status != DownloadTask.QUEUED: return False # We are downloading this file right now self.status = DownloadTask.DOWNLOADING self._notification_shown = False try: # Resolve URL and start downloading the episode fmt_ids = youtube.get_fmt_ids(self._config.youtube) url = youtube.get_real_download_url(self.__episode.url, fmt_ids) url = vimeo.get_real_download_url(url, self._config.vimeo.fileformat) url = escapist_videos.get_real_download_url(url) # We should have properly-escaped characters in the URL, but sometimes # this is not true -- take any characters that are not in ASCII and # convert them to UTF-8 and then percent-encode the UTF-8 string data # Example: https://github.com/gpodder/gpodder/issues/232 url_chars = [] for char in url: if ord(char) <= 31 or ord(char) >= 127: for char in urllib.quote(char.encode('utf-8')): url_chars.append(char.decode('utf-8')) else: url_chars.append(char) url = u''.join(url_chars) url = url.strip() downloader = DownloadURLOpener(self.__episode.channel) # HTTP Status codes for which we retry the download retry_codes = (408, 418, 504, 598, 599) max_retries = max(0, self._config.auto.retries) # Retry the download on timeout (bug 1013) for retry in range(max_retries + 1): if retry > 0: logger.info('Retrying download of %s (%d)', url, retry) time.sleep(1) try: headers, real_url = downloader.retrieve_resume( url, self.tempname, reporthook=self.status_updated) # If we arrive here, the download was successful break except urllib.ContentTooShortError, ctse: if retry < max_retries: logger.info('Content too short: %s - will retry.', url) continue raise except socket.timeout, tmout: if retry < max_retries: logger.info('Socket timeout: %s - will retry.', url) continue raise
def add_track(self, episode, reporthook=None): self.notify('status', _('Adding %s') % episode.title) tracklist = gpod.sw_get_playlist_tracks(self.podcasts_playlist) podcasturls = [track.podcasturl for track in tracklist] if episode.url in podcasturls: # Mark as played on iPod if played locally (and set podcast flags) self.set_podcast_flags(tracklist[podcasturls.index(episode.url)], episode) return True original_filename = episode.local_filename(create=False) # The file has to exist, if we ought to transfer it, and therefore, # local_filename(create=False) must never return None as filename assert original_filename is not None local_filename = original_filename if util.calculate_size(original_filename) > self.get_free_space(): logger.error('Not enough space on %s, sync aborted...', self.mountpoint) d = {'episode': episode.title, 'mountpoint': self.mountpoint} message = _( 'Error copying %(episode)s: Not enough free space on %(mountpoint)s' ) self.errors.append(message % d) self.cancelled = True return False local_filename = episode.local_filename(create=False) (fn, extension) = os.path.splitext(local_filename) if extension.lower().endswith('ogg'): logger.error('Cannot copy .ogg files to iPod.') return False track = gpod.itdb_track_new() # Add release time to track if episode.published has a valid value if episode.published > 0: try: # libgpod>= 0.5.x uses a new timestamp format track.time_released = gpod.itdb_time_host_to_mac( int(episode.published)) except: # old (pre-0.5.x) libgpod versions expect mactime, so # we're going to manually build a good mactime timestamp here :) # # + 2082844800 for unixtime => mactime (1970 => 1904) track.time_released = int(episode.published + 2082844800) track.title = str(episode.title) track.album = str(episode.channel.title) track.artist = str(episode.channel.title) track.description = str(util.remove_html_tags(episode.description)) track.podcasturl = str(episode.url) track.podcastrss = str(episode.channel.url) track.tracklen = get_track_length(local_filename) track.size = os.path.getsize(local_filename) if episode.file_type() == 'audio': track.filetype = 'mp3' track.mediatype = 0x00000004 elif episode.file_type() == 'video': track.filetype = 'm4v' track.mediatype = 0x00000006 self.set_podcast_flags(track, episode) gpod.itdb_track_add(self.itdb, track, -1) gpod.itdb_playlist_add_track(self.master_playlist, track, -1) gpod.itdb_playlist_add_track(self.podcasts_playlist, track, -1) copied = gpod.itdb_cp_track_to_ipod(track, str(local_filename), None) reporthook(episode.file_size, 1, episode.file_size) # If the file has been converted, delete the temporary file here if local_filename != original_filename: util.delete_file(local_filename) return True
def run(self): # Speed calculation (re-)starts here self.__start_time = 0 self.__start_blocks = 0 # If the download has already been cancelled, skip it if self.status == DownloadTask.CANCELLED: util.delete_file(self.tempname) self.progress = 0.0 self.speed = 0.0 return False # We only start this download if its status is "downloading" if self.status != DownloadTask.DOWNLOADING: return False # We are downloading this file right now self.status = DownloadTask.DOWNLOADING self._notification_shown = False try: # Resolve URL and start downloading the episode fmt_ids = youtube.get_fmt_ids(self._config.youtube) url = youtube.get_real_download_url(self.__episode.url, fmt_ids) url = vimeo.get_real_download_url(url, self._config.vimeo.fileformat) url = escapist_videos.get_real_download_url(url) url = url.strip() # Properly escapes Unicode characters in the URL path section # TODO: Explore if this should also handle the domain # Based on: http://stackoverflow.com/a/18269491/1072626 # In response to issue: https://github.com/gpodder/gpodder/issues/232 def iri_to_url(url): url = urllib.parse.urlsplit(url) url = list(url) # First unquote to avoid escaping quoted content url[2] = urllib.parse.unquote(url[2]) url[2] = urllib.parse.quote(url[2]) url = urllib.parse.urlunsplit(url) return url url = iri_to_url(url) downloader = DownloadURLOpener(self.__episode.channel) # HTTP Status codes for which we retry the download retry_codes = (408, 418, 504, 598, 599) max_retries = max(0, self._config.auto.retries) # Retry the download on timeout (bug 1013) for retry in range(max_retries + 1): if retry > 0: logger.info('Retrying download of %s (%d)', url, retry) time.sleep(1) try: headers, real_url = downloader.retrieve_resume( url, self.tempname, reporthook=self.status_updated) # If we arrive here, the download was successful break except urllib.error.ContentTooShortError as ctse: if retry < max_retries: logger.info('Content too short: %s - will retry.', url) continue raise except socket.timeout as tmout: if retry < max_retries: logger.info('Socket timeout: %s - will retry.', url) continue raise except gPodderDownloadHTTPError as http: if retry < max_retries and http.error_code in retry_codes: logger.info('HTTP error %d: %s - will retry.', http.error_code, url) continue raise new_mimetype = headers.get('content-type', self.__episode.mime_type) old_mimetype = self.__episode.mime_type _basename, ext = os.path.splitext(self.filename) if new_mimetype != old_mimetype or util.wrong_extension(ext): logger.info('Updating mime type: %s => %s', old_mimetype, new_mimetype) old_extension = self.__episode.extension() self.__episode.mime_type = new_mimetype new_extension = self.__episode.extension() # If the desired filename extension changed due to the new # mimetype, we force an update of the local filename to fix the # extension. if old_extension != new_extension or util.wrong_extension(ext): self.filename = self.__episode.local_filename( create=True, force_update=True) # In some cases, the redirect of a URL causes the real filename to # be revealed in the final URL (e.g. http://gpodder.org/bug/1423) if real_url != url and not util.is_known_redirecter(real_url): realname, realext = util.filename_from_url(real_url) # Only update from redirect if the redirected-to filename has # a proper extension (this is needed for e.g. YouTube) if not util.wrong_extension(realext): real_filename = ''.join((realname, realext)) self.filename = self.__episode.local_filename( create=True, force_update=True, template=real_filename) logger.info( 'Download was redirected (%s). New filename: %s', real_url, os.path.basename(self.filename)) # Look at the Content-disposition header; use if if available disposition_filename = get_header_param(headers, 'filename', 'content-disposition') if disposition_filename is not None: try: disposition_filename.decode('ascii') except: logger.warn( 'Content-disposition header contains non-ASCII characters - ignoring' ) disposition_filename = None # Some servers do send the content-disposition header, but provide # an empty filename, resulting in an empty string here (bug 1440) if disposition_filename is not None and disposition_filename != '': # The server specifies a download filename - try to use it disposition_filename = os.path.basename(disposition_filename) self.filename = self.__episode.local_filename(create=True, \ force_update=True, template=disposition_filename) new_mimetype, encoding = mimetypes.guess_type(self.filename) if new_mimetype is not None: logger.info('Using content-disposition mimetype: %s', new_mimetype) self.__episode.mime_type = new_mimetype # Re-evaluate filename and tempname to take care of podcast renames # while downloads are running (which will change both file names) self.filename = self.__episode.local_filename(create=False) self.tempname = os.path.join(os.path.dirname(self.filename), os.path.basename(self.tempname)) shutil.move(self.tempname, self.filename) # Model- and database-related updates after a download has finished self.__episode.on_downloaded(self.filename) except DownloadCancelledException: logger.info('Download has been cancelled/paused: %s', self) if self.status == DownloadTask.CANCELLED: util.delete_file(self.tempname) self.progress = 0.0 self.speed = 0.0 except urllib.error.ContentTooShortError as ctse: self.status = DownloadTask.FAILED self.error_message = _('Missing content from server') except IOError as ioe: logger.error('%s while downloading "%s": %s', ioe.strerror, self.__episode.title, ioe.filename, exc_info=True) self.status = DownloadTask.FAILED d = {'error': ioe.strerror, 'filename': ioe.filename} self.error_message = _('I/O Error: %(error)s: %(filename)s') % d except gPodderDownloadHTTPError as gdhe: logger.error('HTTP %s while downloading "%s": %s', gdhe.error_code, self.__episode.title, gdhe.error_message, exc_info=True) self.status = DownloadTask.FAILED d = {'code': gdhe.error_code, 'message': gdhe.error_message} self.error_message = _('HTTP Error %(code)s: %(message)s') % d except Exception as e: self.status = DownloadTask.FAILED logger.error('Download failed: %s', str(e), exc_info=True) self.error_message = _('Error: %s') % (str(e), ) if self.status == DownloadTask.DOWNLOADING: # Everything went well - we're done self.status = DownloadTask.DONE if self.total_size <= 0: self.total_size = util.calculate_size(self.filename) logger.info('Total size updated to %d', self.total_size) self.progress = 1.0 gpodder.user_extensions.on_episode_downloaded(self.__episode) return True self.speed = 0.0 # We finished, but not successfully (at least not really) return False
def run(self): # Speed calculation (re-)starts here self.__start_time = 0 self.__start_blocks = 0 # If the download has already been cancelled, skip it if self.status == DownloadTask.CANCELLED: util.delete_file(self.tempname) self.progress = 0.0 self.speed = 0.0 return False # We only start this download if its status is "downloading" if self.status != DownloadTask.DOWNLOADING: return False # We are downloading this file right now self.status = DownloadTask.DOWNLOADING self._notification_shown = False # Restore a reference to this task in the episode # when running a recycled task following a pause or failed # see #649 if not self.episode.download_task: self.episode.download_task = self try: custom_downloader = registry.custom_downloader.resolve( self._config, None, self.episode) url = self.__episode.url if custom_downloader: logger.info('Downloading %s with %s', url, custom_downloader) headers, real_url = custom_downloader.retrieve_resume( self.tempname, reporthook=self.status_updated) else: # Resolve URL and start downloading the episode res = registry.download_url.resolve(self._config, None, self.episode) if res: url = res if url == self.__episode.url: # don't modify custom urls (#635 - vimeo breaks if * is unescaped) url = url.strip() url = util.iri_to_url(url) logger.info("Downloading %s", url) downloader = DownloadURLOpener(self.__episode.channel) # HTTP Status codes for which we retry the download retry_codes = (408, 418, 504, 598, 599) max_retries = max(0, self._config.auto.retries) # Retry the download on timeout (bug 1013) for retry in range(max_retries + 1): if retry > 0: logger.info('Retrying download of %s (%d)', url, retry) time.sleep(1) try: headers, real_url = downloader.retrieve_resume( url, self.tempname, reporthook=self.status_updated) # If we arrive here, the download was successful break except urllib.error.ContentTooShortError as ctse: if retry < max_retries: logger.info('Content too short: %s - will retry.', url) continue raise except socket.timeout as tmout: if retry < max_retries: logger.info('Socket timeout: %s - will retry.', url) continue raise except gPodderDownloadHTTPError as http: if retry < max_retries and http.error_code in retry_codes: logger.info('HTTP error %d: %s - will retry.', http.error_code, url) continue raise new_mimetype = headers.get('content-type', self.__episode.mime_type) old_mimetype = self.__episode.mime_type _basename, ext = os.path.splitext(self.filename) if new_mimetype != old_mimetype or util.wrong_extension(ext): logger.info('Updating mime type: %s => %s', old_mimetype, new_mimetype) old_extension = self.__episode.extension() self.__episode.mime_type = new_mimetype new_extension = self.__episode.extension() # If the desired filename extension changed due to the new # mimetype, we force an update of the local filename to fix the # extension. if old_extension != new_extension or util.wrong_extension(ext): self.filename = self.__episode.local_filename( create=True, force_update=True) # In some cases, the redirect of a URL causes the real filename to # be revealed in the final URL (e.g. http://gpodder.org/bug/1423) if real_url != url and not util.is_known_redirecter(real_url): realname, realext = util.filename_from_url(real_url) # Only update from redirect if the redirected-to filename has # a proper extension (this is needed for e.g. YouTube) if not util.wrong_extension(realext): real_filename = ''.join((realname, realext)) self.filename = self.__episode.local_filename( create=True, force_update=True, template=real_filename) logger.info( 'Download was redirected (%s). New filename: %s', real_url, os.path.basename(self.filename)) # Look at the Content-disposition header; use if if available disposition_filename = get_header_param(headers, 'filename', 'content-disposition') # Some servers do send the content-disposition header, but provide # an empty filename, resulting in an empty string here (bug 1440) if disposition_filename is not None and disposition_filename != '': # The server specifies a download filename - try to use it # filename_from_url to remove query string; see #591 fn, ext = util.filename_from_url(disposition_filename) logger.debug( "converting disposition filename '%s' to local filename '%s%s'", disposition_filename, fn, ext) disposition_filename = fn + ext self.filename = self.__episode.local_filename( create=True, force_update=True, template=disposition_filename) new_mimetype, encoding = mimetypes.guess_type(self.filename) if new_mimetype is not None: logger.info('Using content-disposition mimetype: %s', new_mimetype) self.__episode.mime_type = new_mimetype # Re-evaluate filename and tempname to take care of podcast renames # while downloads are running (which will change both file names) self.filename = self.__episode.local_filename(create=False) self.tempname = os.path.join(os.path.dirname(self.filename), os.path.basename(self.tempname)) shutil.move(self.tempname, self.filename) # Model- and database-related updates after a download has finished self.__episode.on_downloaded(self.filename) except DownloadCancelledException: logger.info('Download has been cancelled/paused: %s', self) if self.status == DownloadTask.CANCELLED: util.delete_file(self.tempname) self.progress = 0.0 self.speed = 0.0 except urllib.error.ContentTooShortError as ctse: self.status = DownloadTask.FAILED self.error_message = _('Missing content from server') except IOError as ioe: logger.error('%s while downloading "%s": %s', ioe.strerror, self.__episode.title, ioe.filename, exc_info=True) self.status = DownloadTask.FAILED d = {'error': ioe.strerror, 'filename': ioe.filename} self.error_message = _('I/O Error: %(error)s: %(filename)s') % d except gPodderDownloadHTTPError as gdhe: logger.error('HTTP %s while downloading "%s": %s', gdhe.error_code, self.__episode.title, gdhe.error_message, exc_info=True) self.status = DownloadTask.FAILED d = {'code': gdhe.error_code, 'message': gdhe.error_message} self.error_message = _('HTTP Error %(code)s: %(message)s') % d except Exception as e: self.status = DownloadTask.FAILED logger.error('Download failed: %s', str(e), exc_info=True) self.error_message = _('Error: %s') % (str(e), ) if self.status == DownloadTask.DOWNLOADING: # Everything went well - we're done self.status = DownloadTask.DONE if self.total_size <= 0: self.total_size = util.calculate_size(self.filename) logger.info('Total size updated to %d', self.total_size) self.progress = 1.0 gpodder.user_extensions.on_episode_downloaded(self.__episode) return True self.speed = 0.0 # We finished, but not successfully (at least not really) return False
def run(self): # Speed calculation (re-)starts here self.__start_time = 0 self.__start_blocks = 0 # If the download has already been cancelled, skip it if self.status == DownloadTask.CANCELLED: util.delete_file(self.tempname) self.progress = 0.0 self.speed = 0.0 self.recycle() return False # We only start this download if its status is "downloading" if self.status != DownloadTask.DOWNLOADING: return False # We are downloading this file right now self.status = DownloadTask.DOWNLOADING self._notification_shown = False # Restore a reference to this task in the episode # when running a recycled task following a pause or failed # see #649 if not self.episode.download_task: self.episode.download_task = self url = self.__episode.url try: if url == '': raise DownloadNoURLException() if self.downloader: downloader = self.downloader.custom_downloader( self._config, self.episode) else: downloader = registry.custom_downloader.resolve( self._config, None, self.episode) if downloader: logger.info('Downloading %s with %s', url, downloader) else: downloader = DefaultDownloader.custom_downloader( self._config, self.episode) headers, real_url = downloader.retrieve_resume( self.tempname, self.status_updated) new_mimetype = headers.get('content-type', self.__episode.mime_type) old_mimetype = self.__episode.mime_type _basename, ext = os.path.splitext(self.filename) if new_mimetype != old_mimetype or util.wrong_extension(ext): logger.info('Updating mime type: %s => %s', old_mimetype, new_mimetype) old_extension = self.__episode.extension() self.__episode.mime_type = new_mimetype # don't call local_filename because we'll get the old download name new_extension = self.__episode.extension( may_call_local_filename=False) # If the desired filename extension changed due to the new # mimetype, we force an update of the local filename to fix the # extension. if old_extension != new_extension or util.wrong_extension(ext): self.filename = self.__episode.local_filename( create=True, force_update=True) # In some cases, the redirect of a URL causes the real filename to # be revealed in the final URL (e.g. http://gpodder.org/bug/1423) if real_url != url and not util.is_known_redirecter(real_url): realname, realext = util.filename_from_url(real_url) # Only update from redirect if the redirected-to filename has # a proper extension (this is needed for e.g. YouTube) if not util.wrong_extension(realext): real_filename = ''.join((realname, realext)) self.filename = self.__episode.local_filename( create=True, force_update=True, template=real_filename) logger.info( 'Download was redirected (%s). New filename: %s', real_url, os.path.basename(self.filename)) # Look at the Content-disposition header; use if if available disposition_filename = util.get_header_param( headers, 'filename', 'content-disposition') # Some servers do send the content-disposition header, but provide # an empty filename, resulting in an empty string here (bug 1440) if disposition_filename is not None and disposition_filename != '': # The server specifies a download filename - try to use it # filename_from_url to remove query string; see #591 fn, ext = util.filename_from_url(disposition_filename) logger.debug( "converting disposition filename '%s' to local filename '%s%s'", disposition_filename, fn, ext) disposition_filename = fn + ext self.filename = self.__episode.local_filename( create=True, force_update=True, template=disposition_filename) new_mimetype, encoding = mimetypes.guess_type(self.filename) if new_mimetype is not None: logger.info('Using content-disposition mimetype: %s', new_mimetype) self.__episode.mime_type = new_mimetype # Re-evaluate filename and tempname to take care of podcast renames # while downloads are running (which will change both file names) self.filename = self.__episode.local_filename(create=False) self.tempname = os.path.join(os.path.dirname(self.filename), os.path.basename(self.tempname)) shutil.move(self.tempname, self.filename) # Model- and database-related updates after a download has finished self.__episode.on_downloaded(self.filename) except DownloadCancelledException: logger.info('Download has been cancelled/paused: %s', self) if self.status == DownloadTask.CANCELLED: util.delete_file(self.tempname) self.progress = 0.0 self.speed = 0.0 except DownloadNoURLException: self.status = DownloadTask.FAILED self.error_message = _('Episode has no URL to download') except urllib.error.ContentTooShortError as ctse: self.status = DownloadTask.FAILED self.error_message = _('Missing content from server') except ConnectionError as ce: # special case request exception self.status = DownloadTask.FAILED logger.error('Download failed: %s', str(ce), exc_info=True) d = {'host': ce.args[0].pool.host, 'port': ce.args[0].pool.port} self.error_message = _( "Couldn't connect to server %(host)s:%(port)s" % d) except RequestException as re: # extract MaxRetryError to shorten the exception message if isinstance(re.args[0], MaxRetryError): re = re.args[0] logger.error('%s while downloading "%s"', str(re), self.__episode.title, exc_info=True) self.status = DownloadTask.FAILED d = {'error': str(re)} self.error_message = _('Request Error: %(error)s') % d except IOError as ioe: logger.error('%s while downloading "%s": %s', ioe.strerror, self.__episode.title, ioe.filename, exc_info=True) self.status = DownloadTask.FAILED d = {'error': ioe.strerror, 'filename': ioe.filename} self.error_message = _('I/O Error: %(error)s: %(filename)s') % d except gPodderDownloadHTTPError as gdhe: logger.error('HTTP %s while downloading "%s": %s', gdhe.error_code, self.__episode.title, gdhe.error_message, exc_info=True) self.status = DownloadTask.FAILED d = {'code': gdhe.error_code, 'message': gdhe.error_message} self.error_message = _('HTTP Error %(code)s: %(message)s') % d except Exception as e: self.status = DownloadTask.FAILED logger.error('Download failed: %s', str(e), exc_info=True) self.error_message = _('Error: %s') % (str(e), ) if self.status == DownloadTask.DOWNLOADING: # Everything went well - we're done self.status = DownloadTask.DONE if self.total_size <= 0: self.total_size = util.calculate_size(self.filename) logger.info('Total size updated to %d', self.total_size) self.progress = 1.0 gpodder.user_extensions.on_episode_downloaded(self.__episode) return True self.speed = 0.0 # We finished, but not successfully (at least not really) return False
new_mimetype) self.__episode.mime_type = new_mimetype # Re-evaluate filename and tempname to take care of podcast renames # while downloads are running (which will change both file names) self.filename = self.__episode.local_filename(create=False) self.tempname = os.path.join(os.path.dirname(self.filename), os.path.basename(self.tempname)) shutil.move(self.tempname, self.filename) # Model- and database-related updates after a download has finished self.__episode.on_downloaded(self.filename) except DownloadCancelledException: logger.info('Download has been cancelled/paused: %s', self) if self.status == DownloadTask.CANCELLED: util.delete_file(self.tempname) self.progress = 0.0 self.speed = 0.0 except urllib.ContentTooShortError, ctse: self.status = DownloadTask.FAILED self.error_message = _('Missing content from server') except IOError, ioe: logger.error('%s while downloading "%s": %s', ioe.strerror, self.__episode.title, ioe.filename, exc_info=True) self.status = DownloadTask.FAILED d = {'error': ioe.strerror, 'filename': ioe.filename} self.error_message = _('I/O Error: %(error)s: %(filename)s') % d except gPodderDownloadHTTPError, gdhe: