def _add_default(self, default): # folder if isinstance(default, tuple) and isinstance(default[1], list): defaultFolder = default try: c_folder = folder.ChannelFolder.get_by_title(defaultFolder[0]) except ObjectNotFoundError: c_folder = folder.ChannelFolder(defaultFolder[0]) c_folder.signal_change() for url, autodownload in defaultFolder[1]: logging.info("adding feed %s" % (url, )) d_feed = feed.lookup_feed(default[0]) if d_feed is None: d_feed = feed.Feed(url, initiallyAutoDownloadable=autodownload) d_feed.set_folder(c_folder) d_feed.signal_change() # feed else: d_feed = feed.lookup_feed(default[0]) if d_feed is None: logging.info("adding feed %s" % (default, )) d_feed = feed.Feed(default[0], initiallyAutoDownloadable=default[1]) d_feed.signal_change()
def setUp(self): MiroTestCase.setUp(self) self.feed = feed.Feed(u"http://feed.org") self.i1 = item.Item(item.FeedParserValues({'title': u'item1'}), feed_id=self.feed.id) self.i2 = item.Item(item.FeedParserValues({'title': u'item2'}), feed_id=self.feed.id) self.feed2 = feed.Feed(u"http://feed.com") self.i3 = item.Item(item.FeedParserValues({'title': u'item3'}), feed_id=self.feed2.id)
def test_manual_feed_checking(self): f = feed.Feed(u"dtv:manualFeed") f2 = feed.Feed(u"dtv:manualFeed") f3 = feed.Feed(u"dtv:manualFeed") self.check_object_list_passes_test([f]) self.check_object_list_fails_test([f, f2]) self.error_signal_okay = True test_list = [f, f2, f3] databasesanity.check_sanity(test_list) self.assertEquals(len(test_list), 1) self.assertEquals(self.saw_error, True)
def test_phantom_feed_checking(self): f = feed.Feed(u"http://feed.uk") i = item.Item(item.FeedParserValues({}), feed_id=f.id) i2 = item.FileItem(self.make_temp_path('.txt'), feed_id=f.id) self.check_object_list_fails_test([i]) self.check_fix_if_possible([i, i2], []) self.check_object_list_passes_test([i, f]) self.check_object_list_passes_test([])
def test_simple(self): feed1 = feed.Feed(u"http://example.com/feed/") feed1.finish_generate_feed( feed.RSSFeedImpl(u"http://example.com/feed/", feed1, u"Foo feed")) data = self._get_export([feed1], []) subs = autodiscover.parse_content(data) self.assertEquals(subs[0]["url"], u"http://example.com/feed/") self.assertEquals(subs[0]["title"], u"Foo feed") self.assertEquals(subs[0]["type"], "feed")
def setUp(self): StoreDatabaseTest.setUp(self) self.feed = feed.Feed(u"dtv:savedsearch/all?q=dogs") self.item = item.Item(item.FeedParserValues({'title': u'item1'}), feed_id=self.feed.id) self.downloader = downloader.RemoteDownloader( u'http://example.com/1/item1/movie.mpeg', self.item) self.item.set_downloader(self.downloader) self.tab_order = tabs.TabOrder(u'channel') self.guide = guide.ChannelGuide(u'http://example.com/') self.theme_hist = theme.ThemeHistory() self.view_state = widgetstate.ViewState((u'testtype', u'testid', 0))
def setup_global_feed(url, *args, **kwargs): view = feed.Feed.make_view('origURL=?', (url, )) view_count = view.count() if view_count == 0: logging.info("Spawning global feed %s", url) feed.Feed(url, *args, **kwargs) elif view_count > 1: allFeeds = [f for f in view] for extra in allFeeds[1:]: extra.remove() raise StartupError("Database inconsistent", "Too many db objects for %s" % url)
def add_feeds(urls, new_folder_name=None): """Adds a list of feeds that aren't already added to Miro to Miro. :param urls: list of urls to be added :param new_folder_name: if not None, the feeds will be added to this folder when created. """ if not urls: return if new_folder_name is not None: new_folder = folder.ChannelFolder(new_folder_name) for url in filter_existing_feed_urls(urls): f = feed.Feed(url) if new_folder_name is not None: f.set_folder(new_folder)
def handle_feed(self, feed_dict, parent_folder): """ Feed subscriptions look like:: { 'type': 'feed', 'url': URL of the RSS/Atom feed 'title': name of the feed (optional), 'search_term': terms for which this feed is a search (optional), 'auto_download': one of 'all', 'new', 'off' (optional), 'expiry_time': one of 'system', 'never', an integer of hours (optional), } """ url = feed_dict['url'] search_term = feed_dict.get('search_term') f = feed.lookup_feed(url, search_term) if f is None: f = feed.Feed(url, search_term=search_term) title = feed_dict.get('title') if title is not None and title != '': f.set_title(title) auto_download_mode = feed_dict.get('auto_download_mode') if ((auto_download_mode is not None and auto_download_mode in ['all', 'new', 'off'])): f.set_auto_download_mode(auto_download_mode) expiry_time = feed_dict.get('expiry_time') if expiry_time is not None and expiry_time != '': if expiry_time == 'system': f.set_expiration(u'system', 0) elif expiry_time == 'never': f.set_expiration(u'never', 0) else: f.set_expiration(u'feed', expiry_time) if parent_folder is not None: f.set_folder(parent_folder) return True else: return False
def setUp(self): StoreDatabaseTest.setUp(self) self.f1 = feed.Feed(u"http://example.com/1") self.f2 = feed.Feed(u"http://example.com/2") self.folder = folder.ChannelFolder(u'test channel folder') self.tab_order = tabs.TabOrder(u'channel')
def setUp(self): EventLoopTest.setUp(self) self.feed = feed.Feed(u'http://example.com/') self.item = item.Item(item.FeedParserValues({}), feed_id=self.feed.id) self.guide = guide.ChannelGuide(u'http://example.com/guide/')
def make_feed(self, url): my_feed = feed.Feed(url) self.process_idles() return my_feed