def testDifficultyOfFeedItems(self): feed = RSSFeedRule().feed1 download_from_feed(feed, zeeguu.db.session, 3) articles = feed.get_articles(limit=2) assert len(articles) == 2 assert articles[0].fk_difficulty
def setUp(self): super().setUp() self.spiegel = RSSFeedRule().feed1 download_from_feed(self.spiegel, self.db.session, 3) self.telegraaf = RSSFeedRule().feed2 download_from_feed(self.telegraaf, self.db.session, 3)
def test_get_feed_items_with_metrics(self): download_from_feed(self.feed1, zeeguu.db.session, 3) feed_items = self.json_from_api_get( f"get_feed_items_with_metrics/{self.feed1.id}") assert len(feed_items) > 0 assert feed_items[0]["title"] assert feed_items[0]["summary"] assert feed_items[0]["published"] assert feed_items[0]['metrics']
def test_recommended(self): self.feed1 = RSSFeedRule().feed1 self.feed2 = RSSFeedRule().feed2 download_from_feed(self.feed1, zeeguu.db.session, 2) download_from_feed(self.feed2, zeeguu.db.session, 3) RSSFeedRegistration.find_or_create(zeeguu.db.session, self.user, self.feed1) RSSFeedRegistration.find_or_create(zeeguu.db.session, self.user, self.feed2) feed_items = self.json_from_api_get(f"/user_articles/recommended/5") assert (len(feed_items) == 5)
def testDownloadWithTopic(self): feed = RSSFeedRule().feed1 topic = Topic("Spiegel") zeeguu.db.session.add(topic) zeeguu.db.session.commit() loc_topic = LocalizedTopic(topic, self.lan, "spiegelDE", "spiegel") zeeguu.db.session.add(loc_topic) zeeguu.db.session.commit() download_from_feed(feed, zeeguu.db.session, 3) article = feed.get_articles(limit=2)[0] assert (topic in article.topics)
def testDownloadWithWords(self): feed = RSSFeedRule().feed1 download_from_feed(feed, zeeguu.db.session, 3) article = feed.get_articles(limit=2)[0] # Try two words, as one might be filtered out word = strip_article_title_word(article.title.split()[0]) article_word = ArticleWord.find_by_word(word) if word in ['www', '' ] or word.isdigit() or len(word) < 3 or len(word) > 25: assert (article_word is None) else: assert (article in article_word.articles)
#!/usr/bin/env python """ Script that goes through all the feeds that are available in the DB and retrieves the newest articles in order to populate the DB with them. The DB is populated by saving Article objects in the articles table. Before this script checking whether there were new items in a given feed was done while serving the request for items to read. That was too slow. To be called from a cron job. """ import zeeguu from zeeguu.content_retriever.article_downloader import download_from_feed from zeeguu.model import RSSFeed session = zeeguu.db.session counter = 0 for feed in RSSFeed.query.all(): counter += 1 print(f"{counter}: DOWNLOADING {feed.title}".encode('utf-8')) download_from_feed(feed, zeeguu.db.session) print(f"{counter}: FINISHED DOWNLOADING {feed.title}".encode('utf-8'))