def _add_default(self, default): # folder if isinstance(default, tuple) and isinstance(default[1], list): defaultFolder = default try: c_folder = folder.ChannelFolder.get_by_title(defaultFolder[0]) except ObjectNotFoundError: c_folder = folder.ChannelFolder(defaultFolder[0]) c_folder.signal_change() for url, autodownload in defaultFolder[1]: logging.info("adding feed %s" % (url, )) d_feed = feed.lookup_feed(default[0]) if d_feed is None: d_feed = feed.Feed(url, initiallyAutoDownloadable=autodownload) d_feed.set_folder(c_folder) d_feed.signal_change() # feed else: d_feed = feed.lookup_feed(default[0]) if d_feed is None: logging.info("adding feed %s" % (default, )) d_feed = feed.Feed(default[0], initiallyAutoDownloadable=default[1]) d_feed.signal_change()
def _add_default(self, default, section): # folder if isinstance(default, tuple) and isinstance(default[1], list): defaultFolder = default try: c_folder = folder.ChannelFolder.get_by_title(defaultFolder[0]) except ObjectNotFoundError: c_folder = folder.ChannelFolder(defaultFolder[0]) c_folder.section = section c_folder.signal_change() for url, autodownload in defaultFolder[1]: logging.info("adding feed %s in section %s" % (url, section)) d_feed = feed.lookup_feed(default[0]) if d_feed is None: d_feed = feed.Feed(url, initiallyAutoDownloadable=autodownload) d_feed.set_folder(c_folder) d_feed.section = section d_feed.signal_change() # feed else: d_feed = feed.lookup_feed(default[0]) if d_feed is None: logging.info("adding feed %s in section %s" % (default, section)) d_feed = feed.Feed(default[0], initiallyAutoDownloadable=default[1]) d_feed.section = section d_feed.signal_change()
def check_url_exists(url): """Checks to see if there's an item with this url already downloaded. In the case of the item existing in the manual feed, this pops up a dialog box with the status of the item. :param url: the url to check :returns: True if there is already an item for that url downloaded and False otherwise. """ manual_feed = feed.Feed.get_manual_feed() for i in manual_feed.items: if i.get_url() == url: title = _("Download already exists") text1 = _("That URL is already an external download.") download_state = None if i.downloader is not None: download_state = i.downloader.get_state() if download_state in ("paused", "stopped", "failed"): i.download() text2 = _( "%(appname)s will begin downloading it now.", {"appname": app.config.get(prefs.SHORT_APP_NAME)} ) elif download_state == "downloading": text2 = _("It is downloading now.") else: text2 = _("It has already been downloaded.") dialogs.MessageBoxDialog(title, "%s %s" % (text1, text2)).run() return True existing_feed = feed.lookup_feed(url) if existing_feed is not None: return True return False
def filter_existing_feed_urls(urls): """Takes a list of feed urls and returns a list of urls that aren't already being managed by Miro. :param urls: list of urls to filter :returns: list of urls not already in Miro """ return [u for u in urls if feed.lookup_feed(u) is None]
def get_sync_items(self): """ Returns two lists of ItemInfos; one for items we need to sync, and one for items which have expired. """ sync = self.device.database[u'sync'] views = [] url_to_view = {} infos = set() expired = set() sync_all_podcasts = sync[u'podcasts'].get(u'all', True) if sync.setdefault(u'podcasts', {}).get(u'enabled', False): for url in sync[u'podcasts'].setdefault(u'items', []): feed_ = feed.lookup_feed(url) if feed_ is not None: if sync_all_podcasts: view = feed_.downloaded_items else: view = feed_.unwatched_items views.append(view) url_to_view[url] = view if sync.setdefault(u'playlists', {}).get(u'enabled', False): for name in sync[u'playlists'].setdefault(u'items', []): try: playlist_ = playlist.SavedPlaylist.get_by_title(name) except database.ObjectNotFoundError: continue views.append(item.Item.playlist_view(playlist_.id)) for view in views: source = itemsource.DatabaseItemSource(view) try: infos.update( [info for info in source.fetch_all() if not self.device.database.item_exists(info)]) finally: source.unlink() for file_type in (u'audio', u'video'): for info in itemsource.DeviceItemSource(self.device, file_type).fetch_all(): if (info.feed_url and info.file_url and info.feed_url in url_to_view): view = url_to_view[info.feed_url] new_view = database.View( view.fetcher, view.where + (' AND (rd.origURL=? OR rd.url=? ' 'OR item.url=?)'), view.values + (info.file_url, info.file_url, info.file_url), view.order_by, view.joins, view.limit) if not new_view.count(): expired.add(info) return infos, expired
def handle_feed(self, feed_dict, parent_folder): """ Feed subscriptions look like:: { 'type': 'feed', 'url': URL of the RSS/Atom feed 'title': name of the feed (optional), 'section': one of ['audio', 'video'] (ignored if it's in a folder), 'search_term': terms for which this feed is a search (optional), 'auto_download': one of 'all', 'new', 'off' (optional), 'expiry_time': one of 'system', 'never', an integer of hours (optional), } """ url = feed_dict['url'] search_term = feed_dict.get('search_term') f = feed.lookup_feed(url, search_term) if f is None: if parent_folder: section = parent_folder.section else: section = self._get_section(feed_dict) f = feed.Feed(url, section=section, search_term=search_term) title = feed_dict.get('title') if title is not None and title != '': f.set_title(title) auto_download_mode = feed_dict.get('auto_download_mode') if ((auto_download_mode is not None and auto_download_mode in ['all', 'new', 'off'])): f.set_auto_download_mode(auto_download_mode) expiry_time = feed_dict.get('expiry_time') if expiry_time is not None and expiry_time != '': if expiry_time == 'system': f.set_expiration(u'system', 0) elif expiry_time == 'never': f.set_expiration(u'never', 0) else: f.set_expiration(u'feed', expiry_time) if parent_folder is not None: f.set_folder(parent_folder) return True else: return False
def handle_feed(self, feed_dict, parent_folder): """ Feed subscriptions look like:: { 'type': 'feed', 'url': URL of the RSS/Atom feed 'title': name of the feed (optional), 'search_term': terms for which this feed is a search (optional), 'auto_download': one of 'all', 'new', 'off' (optional), 'expiry_time': one of 'system', 'never', an integer of hours (optional), } """ url = feed_dict['url'] search_term = feed_dict.get('search_term') f = feed.lookup_feed(url, search_term) if f is None: f = feed.Feed(url, search_term=search_term) title = feed_dict.get('title') if title is not None and title != '': f.set_title(title) auto_download_mode = feed_dict.get('auto_download_mode') if ((auto_download_mode is not None and auto_download_mode in ['all', 'new', 'off'])): f.set_auto_download_mode(auto_download_mode) expiry_time = feed_dict.get('expiry_time') if expiry_time is not None and expiry_time != '': if expiry_time == 'system': f.set_expiration(u'system', 0) elif expiry_time == 'never': f.set_expiration(u'never', 0) else: f.set_expiration(u'feed', expiry_time) if parent_folder is not None: f.set_folder(parent_folder) return True else: return False
def check_url_exists(url): """Checks to see if there's an item with this url already downloaded. In the case of the item existing in the manual feed, this pops up a dialog box with the status of the item. :param url: the url to check :returns: True if there is already an item for that url downloaded and False otherwise. """ manual_feed = feed.Feed.get_manual_feed() #item urls have the + sign escaped escaped_url = url.replace('+', '%20') for i in manual_feed.items: if i.get_url() == escaped_url: title = _("Download already exists") text1 = _("That URL is already an external download.") download_state = None if i.downloader is not None: download_state = i.downloader.get_state() if download_state in ('paused', 'stopped', 'failed'): i.download() text2 = _("%(appname)s will begin downloading it now.", {"appname": app.config.get(prefs.SHORT_APP_NAME)}) elif download_state == 'downloading': text2 = _("It is downloading now.") else: text2 = _("It has already been downloaded.") dialogs.MessageBoxDialog(title, "%s %s" % (text1, text2)).run() return True existing_feed = feed.lookup_feed(url) if existing_feed is not None: return True return False