def test_feed_model(self, get): """Behaviour of the ``Feed`` model""" get.return_value = responses(200, 'rss20.xml') feed = self.cat.feeds.create(name='RSS test', url='rss20.xml') feed.save() self.cat.delete_after = 'never' self.cat.save() feed_from_db = Feed.objects.get(pk=feed.id) # __unicode__ self.assertEqual('%s' % feed_from_db, 'RSS test') # get_absolute_url() self.assertEqual('/feed/%s/' % feed.id, feed.get_absolute_url()) # update() update_feed(feed.url, use_etags=False) unique_feed = UniqueFeed.objects.get(url=feed.url) self.assertEqual(unique_feed.title, 'Sample Feed') self.assertEqual(unique_feed.link, 'http://example.org/') feed = Feed.objects.get(pk=feed.id) self.assertEqual(feed.entries.count(), 1) self.assertEqual(feed.entries.all()[0].title, 'First item title')
def test_feed_model(self, get): """Behaviour of the ``Feed`` model""" get.return_value = responses(200, "rss20.xml") feed = FeedFactory.create(name="RSS test", url="http://rss20.xml", user__ttl=99999) feed.save() feed_from_db = Feed.objects.get(pk=feed.id) # __unicode__ self.assertEqual("%s" % feed_from_db, "RSS test") # get_absolute_url() self.assertEqual("/feed/%s/" % feed.id, feed.get_absolute_url()) # update() update_feed(feed.url) data = job_details(feed.url, connection=get_redis_connection()) self.assertEqual(data["title"], "Sample Feed") self.assertEqual(data["link"], "http://example.org/") feed = Feed.objects.get(pk=feed.id) [entry] = es.manager.user(feed.user).fetch()["hits"] self.assertEqual(entry.title, "First item title") self.assertEqual(feed.favicon_img(), "") feed.favicon = "fav.png" self.assertEqual(feed.favicon_img(), '<img src="/media/fav.png" width="16" height="16" />')
def test_errors(self, get): codes = [400, 401, 403, 404, 500, 502, 503] def get_side_effect(): yield responses(304) for code in codes: yield responses(code) get.side_effect = get_side_effect() feed = FeedFactory.create() self.assertEqual(len(get.call_args_list), 1) for code in codes: get.return_value = responses(code) feed = UniqueFeed.objects.get(url=feed.url) self.assertFalse(feed.muted) self.assertEqual(feed.job_details.get('error'), None) self.assertEqual(feed.job_details['backoff_factor'], 1) feed.schedule() data = job_details(feed.url, connection=get_redis_connection()) update_feed(feed.url, backoff_factor=data['backoff_factor']) feed = UniqueFeed.objects.get(url=feed.url) self.assertFalse(feed.muted) data = job_details(feed.url, connection=get_redis_connection()) self.assertEqual(data['error'], code) self.assertEqual(data['backoff_factor'], 2) # Restore status for next iteration schedule_job(feed.url, backoff_factor=1, error=None, schedule_in=0) feed = UniqueFeed.objects.get(url=feed.url) self.assertEqual(feed.job_details.get('error'), None)
def test_errors(self, get): get.return_value = responses(304) feed = FeedFactory.create() for code in [400, 401, 403, 404, 500, 502, 503]: get.return_value = responses(code) feed = UniqueFeed.objects.get(url=feed.url) self.assertFalse(feed.muted) self.assertEqual(feed.error, None) self.assertEqual(feed.backoff_factor, 1) feed.schedule() data = job_details(feed.url) update_feed(feed.url, backoff_factor=data['backoff_factor']) feed = UniqueFeed.objects.get(url=feed.url) self.assertFalse(feed.muted) data = job_details(feed.url) self.assertEqual(data['error'], code) self.assertEqual(data['backoff_factor'], 2) # Restore status for next iteration feed.backoff_factor = 1 feed.error = None feed.save(update_fields=['backoff_factor', 'error']) feed.schedule()
def test_ctype(self, get, head): # Updatefeed doesn't fail if content-type is missing head.side_effect = resolve_url get.return_value = responses(200, 'sw-all.xml', headers={}) feed = FeedFactory.create() update_feed(feed.url) get.assert_called_with(feed.url, headers={ 'User-Agent': USER_AGENT % '1 subscriber', 'Accept': feedparser.ACCEPT_HEADER }, timeout=10, auth=None) get.return_value = responses(200, 'sw-all.xml', headers={'Content-Type': None}) update_feed(feed.url) get.assert_called_with(feed.url, headers={ 'User-Agent': USER_AGENT % '1 subscriber', 'Accept': feedparser.ACCEPT_HEADER }, timeout=10, auth=None)
def test_backoff(self, get): get.return_value = responses(304) feed = FeedFactory.create() feed = UniqueFeed.objects.get(url=feed.url) detail = feed.job_details self.assertFalse('error' in detail) self.assertEqual(detail['backoff_factor'], 1) feed.schedule() data = job_details(feed.url, connection=get_redis_connection()) get.return_value = responses(502) for i in range(12): update_feed(feed.url, backoff_factor=data['backoff_factor']) feed = UniqueFeed.objects.get(url=feed.url) self.assertFalse(feed.muted) data = job_details(feed.url, connection=get_redis_connection()) self.assertEqual(data['error'], 502) self.assertEqual(data['backoff_factor'], min(i + 2, 10)) get.side_effect = RequestException feed = UniqueFeed.objects.get() patch_job(feed.url, error=None, backoff_factor=1) data = job_details(feed.url, connection=get_redis_connection()) for i in range(12): update_feed(feed.url, backoff_factor=data['backoff_factor']) feed = UniqueFeed.objects.get(url=feed.url) self.assertFalse(feed.muted) data = job_details(feed.url, connection=get_redis_connection()) self.assertEqual(data['error'], 'timeout') self.assertEqual(data['backoff_factor'], min(i + 2, 10))
def test_mark_as_read(self, get): get.return_value = responses(304) user = UserFactory.create() feed = FeedFactory.create(category__user=user, user=user) url = reverse('feeds:unread') response = self.app.get(url, user=user) self.assertNotContains(response, '"Mark all as read"') get.return_value = responses(200, 'sw-all.xml') update_feed(feed.url) response = self.app.get(url, user=user) self.assertContains(response, '"Mark all as read"') form = response.forms['read'] response = form.submit() self.assertRedirects(response, url) response = response.follow() self.assertContains(response, '30 entries have been marked as read') self.assertEqual(user.entries.filter(read=False).count(), 0) self.assertEqual(user.entries.filter(read=True).count(), 30) form = response.forms['undo'] response = form.submit() self.assertRedirects(response, url) response = response.follow() self.assertContains(response, "30 entries have been marked as unread") self.assertEqual(user.entries.filter(read=False).count(), 30) self.assertEqual(user.entries.filter(read=True).count(), 0)
def test_add_to_readitlaterlist(self, post, get): data = {'action': 'read_later'} self.user.read_later = 'readitlater' self.user.read_later_credentials = json.dumps({'username': '******', 'password': '******'}) self.user.save() get.return_value = responses(200, self.feed.url) update_feed(self.feed.url, use_etags=False) get.assert_called_with( 'sw-all.xml', headers={'User-Agent': USER_AGENT % '1 subscriber', 'Accept': feedparser.ACCEPT_HEADER}, timeout=10) url = reverse('feeds:item', args=[Entry.objects.all()[0].pk]) response = self.client.get(url) self.assertContains(response, 'Add to Read it later') response = self.client.post(url, data) # Read it Later doesn't provide the article URL so we can't display a # useful link self.assertContains(response, "added to your reading list") post.assert_called_with( 'https://readitlaterlist.com/v2/add', data={u'username': u'foo', 'url': u'http://simonwillison.net/2010/Mar/12/re2/', 'apikey': 'test read it later API key', u'password': u'bar', 'title': (u'RE2: a principled approach to regular ' u'expression matching')}, )
def test_backoff(self, get): get.return_value = responses(304) feed = FeedFactory.create() feed = UniqueFeed.objects.get(url=feed.url) self.assertEqual(feed.error, None) self.assertEqual(feed.backoff_factor, 1) get.return_value = responses(502) for i in range(12): update_feed(feed.url, backoff_factor=feed.backoff_factor) feed = UniqueFeed.objects.get(url=feed.url) self.assertFalse(feed.muted) self.assertEqual(feed.error, '502') self.assertEqual(feed.backoff_factor, min(i + 2, 10)) get.side_effect = RequestException feed = UniqueFeed.objects.get() feed.error = None feed.backoff_factor = 1 feed.save() for i in range(12): update_feed(feed.url, backoff_factor=feed.backoff_factor) feed = UniqueFeed.objects.get(url=feed.url) self.assertFalse(feed.muted) self.assertEqual(feed.error, 'timeout') self.assertEqual(feed.backoff_factor, min(i + 2, 10))
def test_feed_model(self, get): """Behaviour of the ``Feed`` model""" get.return_value = responses(200, 'rss20.xml') feed = FeedFactory.create(name='RSS test', url='rss20.xml') feed.save() feed_from_db = Feed.objects.get(pk=feed.id) # __unicode__ self.assertEqual('%s' % feed_from_db, 'RSS test') # get_absolute_url() self.assertEqual('/feed/%s/' % feed.id, feed.get_absolute_url()) # update() update_feed(feed.url) data = job_details(feed.url, connection=get_redis_connection()) self.assertEqual(data['title'], 'Sample Feed') self.assertEqual(data['link'], 'http://example.org/') feed = Feed.objects.get(pk=feed.id) self.assertEqual(feed.entries.count(), 1) self.assertEqual(feed.entries.all()[0].title, 'First item title') self.assertEqual(feed.favicon_img(), '') feed.favicon = 'fav.png' self.assertEqual(feed.favicon_img(), '<img src="/media/fav.png" width="16" height="16" />')
def test_feed_model(self, get, head): """Behaviour of the ``Feed`` model""" head.return_value = responses(200) get.return_value = responses(200, 'rss20.xml') feed = FeedFactory.create(name='RSS test', url='http://rss20.xml', user__ttl=99999) feed.save() feed_from_db = Feed.objects.get(pk=feed.id) # __unicode__ self.assertEqual('%s' % feed_from_db, 'RSS test') # get_absolute_url() self.assertEqual('/feed/%s/' % feed.id, feed.get_absolute_url()) # update() update_feed(feed.url) data = job_details(feed.url, connection=get_redis_connection()) self.assertEqual(data['title'], 'Sample Feed') self.assertEqual(data['link'], 'http://example.org/') feed = Feed.objects.get(pk=feed.id) [entry] = es.manager.user(feed.user).fetch()['hits'] self.assertEqual(entry.title, 'First item title') self.assertEqual(feed.favicon_img(), '') feed.favicon = 'fav.png' self.assertEqual(feed.favicon_img(), '<img src="/media/fav.png" width="16" height="16" />')
def test_task_timeout_handling(self, get): get.return_value = responses(304) feed = FeedFactory.create() get.side_effect = JobTimeoutException self.assertEqual(UniqueFeed.objects.get().backoff_factor, 1) update_feed(feed.url) self.assertEqual(UniqueFeed.objects.get().backoff_factor, 2)
def test_mark_as_read(self, get): get.return_value = responses(304) user = UserFactory.create(ttl=99999) feed = FeedFactory.create(category__user=user, user=user) url = reverse('feeds:unread') response = self.app.get(url, user=user) self.assertNotContains(response, '"Mark all as read"') get.return_value = responses(200, 'sw-all.xml') update_feed(feed.url) response = self.app.get(url, user=user) self.assertContains(response, '"Mark all as read"') form = response.forms['read-all'] response = form.submit() self.assertRedirects(response, url) response = response.follow() self.assertContains(response, '30 entries have been marked as read') if user.es: counts = self.counts(user, read={'read': True}, unread={'read': False}) unread = counts['unread'] read = counts['read'] else: unread = user.entries.filter(read=False).count() read = user.entries.filter(read=True).count() self.assertEqual(unread, 0) self.assertEqual(read, 30) form = response.forms['undo'] response = form.submit() self.assertRedirects(response, url) response = response.follow() self.assertContains(response, "30 entries have been marked as unread") if user.es: counts = self.counts(user, read={'read': True}, unread={'read': False}) unread = counts['unread'] read = counts['read'] else: unread = user.entries.filter(read=False).count() read = user.entries.filter(read=True).count() self.assertEqual(unread, 30) self.assertEqual(read, 0) form = response.forms['read-page'] if user.es: some_entries = es.manager.user(user).only('_id').fetch(per_page=5) some_entries = [e.pk for e in some_entries['hits']] else: some_entries = user.entries.all()[:5].values_list('pk', flat=True) form['entries'] = json.dumps(list(some_entries)) response = form.submit() self.assertRedirects(response, url) response = response.follow() self.assertContains(response, "5 entries have been marked as read")
def test_last_entry(self, get): get.return_value = responses(200, self.feed.url) update_feed(self.feed.url, use_etags=False) last_item = self.user.entries.order_by('date')[0] url = reverse('feeds:item', args=[last_item.id]) response = self.client.get(url) self.assertNotContains(response, 'Next →')
def test_entry_model(self, get): get.return_value = responses(200, 'sw-all.xml') feed = FeedFactory.create() update_feed(feed.url) title = 'RE2: a principled approach to regular expression matching' entry = Entry.objects.get(title=title) # __unicode__ self.assertEqual('%s' % entry, title)
def test_add_to_instapaper(self, post, get, head): # noqa head.side_effect = resolve_url post.return_value = responses(200, data=json.dumps([{ 'type': 'bookmark', 'bookmark_id': 12345, 'title': 'Some bookmark', 'url': 'http://example.com/some-bookmark', }])) user = UserFactory.create( read_later='instapaper', read_later_credentials=json.dumps({ 'oauth_token': 'token', 'oauth_token_secret': 'token secret', }), ) get.return_value = responses(304) feed = FeedFactory.create(category__user=user, user=user) get.reset_mock() get.return_value = responses(200, 'sw-all.xml') update_feed(feed.url) get.assert_called_once_with(feed.url, headers={ 'User-Agent': USER_AGENT % '1 subscriber', 'Accept': feedparser.ACCEPT_HEADER }, timeout=10, auth=None) entry_pk = es.manager.user(user).fetch()['hits'][0].pk url = reverse('feeds:item', args=[entry_pk]) response = self.app.get(url, user=user) self.assertContains(response, "Add to Instapaper") form = response.forms['read-later'] response = form.submit() self.assertEqual(len(post.call_args_list), 1) args, kwargs = post.call_args self.assertEqual(args, ('https://www.instapaper.com/api/1/bookmarks/add', )) self.assertEqual(kwargs['data'], {'url': 'http://simonwillison.net/2010/Mar/12/re2/'}) entry = es.entry(user, entry_pk) self.assertEqual(entry.read_later_url, 'https://www.instapaper.com/read/12345') response = self.app.get(url, user=user) self.assertNotContains(response, "Add to Instapaper")
def test_task_timeout_handling(self, get): get.return_value = responses(304) feed = FeedFactory.create() get.side_effect = JobTimeoutException self.assertEqual( UniqueFeed.objects.get().job_details['backoff_factor'], 1) update_feed(feed.url) data = job_details(feed.url, connection=get_redis_connection()) self.assertEqual(data['backoff_factor'], 2)
def test_too_many_requests(self, get): get.return_value = responses(304) feed = FeedFactory.create() get.return_value = responses(429) update_feed(feed.url, backoff_factor=1) data = job_details(feed.url, connection=get_redis_connection()) # retry in 1 min self.assertTrue(58 < (epoch_to_utc(data['schedule_at']) - timezone.now()).seconds < 60)
def test_no_link(self, get): get.return_value = responses(200, 'rss20.xml') feed = FeedFactory.create() update_feed(feed.url) self.assertEqual(Entry.objects.count(), 1) get.return_value = responses(200, 'no-link.xml') feed.url = 'no-link.xml' feed.save(update_fields=['url']) update_feed(feed.url) self.assertEqual(Entry.objects.count(), 1)
def test_etag_modified(self, get): get.return_value = responses(304) feed = FeedFactory.create() update_feed(feed.url, etag='etag', modified='1234', subscribers=2) get.assert_called_with(feed.url, headers={ 'User-Agent': USER_AGENT % '2 subscribers', 'Accept': feedparser.ACCEPT_HEADER, 'If-None-Match': 'etag', 'If-Modified-Since': '1234', }, timeout=10)
def test_backoff(self, get): get.return_value = responses(502) feed = UniqueFeed.objects.get(url=self.feed.url) self.assertEqual(feed.error, None) self.assertEqual(feed.backoff_factor, 1) for i in range(12): update_feed(self.feed.url, use_etags=False) feed = UniqueFeed.objects.get(url=self.feed.url) self.assertFalse(feed.muted) self.assertEqual(feed.error, '502') self.assertEqual(feed.backoff_factor, min(i + 2, 10))
def test_multiple_objects(self, get): """Duplicates are removed at the next update""" get.return_value = responses(200, self.feed.url) update_feed(self.feed.url, use_etags=False) entry = self.feed.entries.all()[0] entry.id = None entry.save() entry.id = None entry.save() self.assertEqual(self.feed.entries.count(), 32) update_feed(self.feed.url, use_etags=False) self.assertEqual(self.feed.entries.count(), 30)
def test_etag_modified(self, get): get.return_value = responses(304) feed = FeedFactory.create() update_feed(feed.url, etag='etag', last_modified='1234', subscribers=2) get.assert_called_with( feed.url, headers={ 'User-Agent': USER_AGENT % '2 subscribers', 'Accept': feedparser.ACCEPT_HEADER, 'If-None-Match': 'etag', 'If-Modified-Since': '1234', }, timeout=10)
def test_no_link(self, get): get.return_value = responses(200, 'rss20.xml') self.feed.url = 'rss20.xml' self.feed.save() update_feed(self.feed.url, use_etags=False) self.assertEqual(Entry.objects.count(), 1) get.return_value = responses(200, 'no-link.xml') self.feed.url = 'no-link.xml' self.feed.save() update_feed(self.feed.url, use_etags=False) self.assertEqual(Entry.objects.count(), 1)
def test_restore_backoff(self, get): get.return_value = responses(304) FeedFactory.create() feed = UniqueFeed.objects.get() feed.error = 'timeout' feed.backoff_factor = 5 feed.save() update_feed(feed.url, error=feed.error, backoff_factor=feed.backoff_factor) data = job_details(feed.url, connection=get_redis_connection()) self.assertEqual(data['backoff_factor'], 1) self.assertTrue('error' not in data)
def test_last_entry(self, get): user = UserFactory.create() get.return_value = responses(200, 'sw-all.xml') feed = FeedFactory.create(category__user=user, user=user) with self.assertNumQueries(1): update_feed(feed.url) last_item = es.manager.user(user).order_by( 'timestamp').fetch()['hits'][0] url = reverse('feeds:item', args=[last_item.pk]) response = self.app.get(url, user=user) self.assertNotContains(response, 'Next →')
def test_too_many_requests(self, get): get.return_value = responses(304) feed = FeedFactory.create() get.return_value = responses(429) update_feed(feed.url, backoff_factor=1) data = job_details(feed.url, connection=get_redis_connection()) # retry in 1 min self.assertTrue( 58 < (epoch_to_utc(data['schedule_at']) - timezone.now()).seconds < 60 )
def test_last_entry(self, get): user = UserFactory.create() get.return_value = responses(200, "sw-all.xml") feed = FeedFactory.create(category__user=user, user=user) with self.assertNumQueries(2): update_feed(feed.url) self.assertEqual(Feed.objects.get().unread_count, user.entries.filter(read=False).count()) last_item = user.entries.order_by("date")[0] url = reverse("feeds:item", args=[last_item.pk]) response = self.app.get(url, user=user) self.assertNotContains(response, "Next →")
def test_restore_backoff(self, get): get.return_value = responses(304) FeedFactory.create() feed = UniqueFeed.objects.get() feed.error = 'timeout' feed.backoff_factor = 5 feed.save() update_feed(feed.url, error=feed.error, backoff_factor=feed.backoff_factor) feed = UniqueFeed.objects.get() self.assertEqual(feed.backoff_factor, 1) self.assertEqual(feed.error, '')
def test_add_to_instapaper(self, post, get): # noqa post.return_value = responses(200, data=json.dumps([{ 'type': 'bookmark', 'bookmark_id': 12345, 'title': 'Some bookmark', 'url': 'http://example.com/some-bookmark', }])) user = UserFactory.create( read_later='instapaper', read_later_credentials=json.dumps({ 'oauth_token': 'token', 'oauth_token_secret': 'token secret', }), ) get.return_value = responses(304) feed = FeedFactory.create(category__user=user, user=user) get.reset_mock() get.return_value = responses(200, 'sw-all.xml') update_feed(feed.url) get.assert_called_once_with( feed.url, headers={'User-Agent': USER_AGENT % '1 subscriber', 'Accept': feedparser.ACCEPT_HEADER}, timeout=10) if user.es: entry_pk = es.manager.user(user).fetch()['hits'][0].pk else: entry_pk = Entry.objects.all()[0].pk url = reverse('feeds:item', args=[entry_pk]) response = self.app.get(url, user=user) self.assertContains(response, "Add to Instapaper") form = response.forms['read-later'] response = form.submit() self.assertEqual(len(post.call_args_list), 1) args, kwargs = post.call_args self.assertEqual(args, ('https://www.instapaper.com/api/1/bookmarks/add',)) self.assertEqual(kwargs['data'], {'url': 'http://simonwillison.net/2010/Mar/12/re2/'}) if user.es: entry = es.entry(user, entry_pk) else: entry = Entry.objects.get(pk=entry_pk) self.assertEqual(entry.read_later_url, 'https://www.instapaper.com/read/12345') response = self.app.get(url, user=user) self.assertNotContains(response, "Add to Instapaper")
def test_last_entry(self, get): user = UserFactory.create() get.return_value = responses(200, 'sw-all.xml') feed = FeedFactory.create(category__user=user, user=user) with self.assertNumQueries(2): update_feed(feed.url) self.assertEqual(Feed.objects.get().unread_count, user.entries.filter(read=False).count()) last_item = user.entries.order_by('date')[0] url = reverse('feeds:item', args=[last_item.pk]) response = self.app.get(url, user=user) self.assertNotContains(response, 'Next →')
def test_entry_model(self, get): get.return_value = responses(200, self.feed.url) update_feed(self.feed.url, use_etags=False) title = 'RE2: a principled approach to regular expression matching' entry = Entry.objects.get(title=title) # __unicode__ self.assertEqual('%s' % entry, title) # get_link() self.assertEqual(entry.get_link(), entry.link) # Setting permalink entry.permalink = 'http://example.com/some-url' self.assertEqual(entry.get_link(), entry.permalink)
def test_mark_as_read(self, get, head): head.side_effect = resolve_url get.return_value = responses(304) user = UserFactory.create(ttl=99999) feed = FeedFactory.create(category__user=user, user=user) url = reverse('feeds:entries', args=['unread']) response = self.app.get(url, user=user) self.assertNotContains(response, '"Mark all as read"') get.return_value = responses(200, 'sw-all.xml') update_feed(feed.url) response = self.app.get(url, user=user) self.assertContains(response, '"Mark all as read"') form = response.forms['read-all'] response = form.submit() self.assertRedirects(response, url) response = response.follow() self.assertContains(response, '30 entries have been marked as read') counts = self.counts(user, read={'read': True}, unread={'read': False}) unread = counts['unread'] read = counts['read'] self.assertEqual(unread, 0) self.assertEqual(read, 30) form = response.forms['undo'] response = form.submit() self.assertRedirects(response, url) response = response.follow() self.assertContains(response, "30 entries have been marked as unread") counts = self.counts(user, read={'read': True}, unread={'read': False}) unread = counts['unread'] read = counts['read'] self.assertEqual(unread, 30) self.assertEqual(read, 0) form = response.forms['read-page'] some_entries = es.manager.user(user).only('_id').fetch(per_page=5) some_entries = [e.pk for e in some_entries['hits']] form['entries'] = json.dumps(list(some_entries)) response = form.submit() self.assertRedirects(response, url) response = response.follow() self.assertContains(response, "5 entries have been marked as read")
def test_mark_as_read(self, get): url = reverse('feeds:unread') response = self.client.get(url) self.assertNotContains(response, 'Mark all as read') get.return_value = responses(200, self.feed.url) update_feed(self.feed.url, use_etags=False) response = self.client.get(url) self.assertContains(response, 'Mark all as read') data = {'action': 'read'} response = self.client.post(url, data, follow=True) self.assertEqual(len(response.redirect_chain), 1) self.assertContains(response, '30 entries have been marked as read')
def test_duplicate(self, get): """Adding an entry the user already has marks it as read""" get.return_value = responses(200, 'rss20.xml') # Creating a feed will update it and assign the entry self.cat.feeds.create(url='http://exampleexample.com') # Update another feed with the same entry update_feed(self.feed.url, use_etags=False) self.assertEqual(Entry.objects.count(), 2) # One entry is unread Entry.objects.get(link="http://example.org/item/1", read=False) # Other is read Entry.objects.get(link="http://example.org/item/1", read=True)
def test_no_link(self, get): get.return_value = responses(200, 'rss20.xml') user = UserFactory.create(ttl=99999) feed = FeedFactory.create(user=user, category__user=user) update_feed(feed.url) count = self.counts(user, all={})['all'] self.assertEqual(count, 1) get.return_value = responses(200, 'no-link.xml') feed.url = 'http://no-link.xml' feed.save(update_fields=['url']) update_feed(feed.url) count = self.counts(user, all={})['all'] self.assertEqual(count, 1)
def test_no_date_and_304(self, get): """If the feed does not have a date, we'll have to find one. Also, since we update it twice, the 2nd time it's a 304 response.""" get.return_value = responses(200, 'no-date.xml') feed = FeedFactory.create() # Update the feed twice and make sure we don't index the content twice update_feed(feed.url) feed1 = Feed.objects.get(pk=feed.id) count1 = feed1.entries.count() update_feed(feed1.url) feed2 = Feed.objects.get(pk=feed1.id) count2 = feed2.entries.count() self.assertEqual(count1, count2)
def test_too_many_requests_retry(self, get): get.return_value = responses(304) feed = FeedFactory.create() get.return_value = responses(429, headers={'Retry-After': '3600'}) update_feed(feed.url, backoff_factor=1) data = job_details(feed.url, connection=get_redis_connection()) # Retry in 1 hour self.assertTrue(3590 < (epoch_to_utc(data['schedule_at']) - timezone.now()).seconds < 3600) # Other requests to same domain get.reset_mock() get.assert_not_called() update_feed(feed.url, backoff_factor=1) get.assert_not_called()
def test_entry_model(self, get): get.return_value = responses(200, 'sw-all.xml') feed = FeedFactory.create() update_feed(feed.url) title = 'RE2: a principled approach to regular expression matching' [entry] = es.manager.user(feed.user).fetch()['hits'] # __unicode__ self.assertEqual('%s' % entry, title) entry.title = '' self.assertEqual(entry.sanitized_title(), '(No title)') entry.title = 'Foo' entry.link = 'http://example.com/foo' self.assertEqual(entry.tweet(), u'Foo — http://example.com/foo')
def test_entry_model(self, get): get.return_value = responses(200, 'sw-all.xml') feed = FeedFactory.create() update_feed(feed.url) title = 'RE2: a principled approach to regular expression matching' entry = Entry.objects.get(title=title) # __unicode__ self.assertEqual('%s' % entry, title) entry.title = '' self.assertEqual(entry.sanitized_title(), '(No title)') entry.title = 'Foo' entry.link = 'http://example.com/foo' self.assertEqual(entry.tweet(), u'Foo — http://example.com/foo via @FeedHQ')
def test_add_to_instapaper(self, Client, get): # noqa client = Client.return_value r = Response({'status': 200}) client.request.return_value = [ r, json.dumps([{'type': 'bookmark', 'bookmark_id': 12345, 'title': 'Some bookmark', 'url': 'http://example.com/some-bookmark'}]) ] user = UserFactory.create( read_later='instapaper', read_later_credentials=json.dumps({ 'oauth_token': 'token', 'oauth_token_secret': 'token secret', }), ) get.return_value = responses(304) feed = FeedFactory.create(category__user=user, user=user) get.return_value = responses(200, 'sw-all.xml') update_feed(feed.url) get.assert_called_with( feed.url, headers={'User-Agent': USER_AGENT % '1 subscriber', 'Accept': feedparser.ACCEPT_HEADER}, timeout=10) entry_pk = Entry.objects.all()[0].pk url = reverse('feeds:item', args=[entry_pk]) response = self.app.get(url, user=user) self.assertContains(response, "Add to Instapaper") form = response.forms['read-later'] response = form.submit() body = 'url=http%3A%2F%2Fsimonwillison.net%2F2010%2FMar%2F12%2Fre2%2F' client.request.assert_called_with( 'https://www.instapaper.com/api/1/bookmarks/add', body=body, method='POST', ) self.assertEqual(Entry.objects.get(pk=entry_pk).read_later_url, 'https://www.instapaper.com/read/12345') response = self.app.get(url, user=user) self.assertNotContains(response, "Add to Instapaper")
def test_mark_as_read(self, get): get.return_value = responses(304) user = UserFactory.create() feed = FeedFactory.create(category__user=user, user=user) url = reverse('feeds:unread') response = self.app.get(url, user=user) self.assertNotContains(response, '"Mark all as read"') get.return_value = responses(200, 'sw-all.xml') update_feed(feed.url) response = self.app.get(url, user=user) self.assertContains(response, '"Mark all as read"') form = response.forms['read-all'] response = form.submit() self.assertRedirects(response, url) response = response.follow() self.assertContains(response, '30 entries have been marked as read') self.assertEqual(user.entries.filter(read=False).count(), 0) self.assertEqual(user.entries.filter(read=True).count(), 30) form = response.forms['undo'] response = form.submit() self.assertRedirects(response, url) response = response.follow() self.assertContains(response, "30 entries have been marked as unread") self.assertEqual(user.entries.filter(read=False).count(), 30) self.assertEqual(user.entries.filter(read=True).count(), 0) form = response.forms['read-page'] some_entries = user.entries.all()[:5].values_list('pk', flat=True) form['entries'] = json.dumps(list(some_entries)) response = form.submit() self.assertRedirects(response, url) response = response.follow() self.assertContains(response, "5 entries have been marked as read")