Exemplo n.º 1
0
    def test_manage_feed(self, get):
        get.return_value = responses(304)
        user = UserFactory.create()
        url = reverse('feeds:manage')
        response = self.app.get(url, user=user)
        self.assertContains(response, 'Manage feeds')

        FeedFactory.create(user=user, category=None)
        FeedFactory.create(user=user, category=None)
        FeedFactory.create(user=user, category=None)
        unique = UniqueFeed.objects.all()[0]
        schedule_job(unique.url, schedule_in=0, backoff_factor=10,
                     error=UniqueFeed.NOT_A_FEED,
                     connection=get_redis_connection())

        response = self.app.get(url, user=user)
        self.assertContains(response, 'Not a valid RSS/Atom feed')

        schedule_job(unique.url, schedule_in=0, error='blah',
                     connection=get_redis_connection())
        response = self.app.get(url, user=user)
        self.assertContains(response, 'Error')

        unique.muted = True
        unique.save()
        response = self.app.get(url, user=user)
        self.assertContains(response, 'Error')
Exemplo n.º 2
0
    def test_errors(self, get):
        codes = [400, 401, 403, 404, 500, 502, 503]

        def get_side_effect():
            yield responses(304)
            for code in codes:
                yield responses(code)

        get.side_effect = get_side_effect()
        feed = FeedFactory.create()
        self.assertEqual(len(get.call_args_list), 1)

        for code in codes:
            get.return_value = responses(code)
            feed = UniqueFeed.objects.get(url=feed.url)
            self.assertFalse(feed.muted)
            self.assertEqual(feed.job_details.get('error'), None)
            self.assertEqual(feed.job_details['backoff_factor'], 1)
            feed.schedule()
            data = job_details(feed.url, connection=get_redis_connection())

            update_feed(feed.url, backoff_factor=data['backoff_factor'])

            feed = UniqueFeed.objects.get(url=feed.url)
            self.assertFalse(feed.muted)
            data = job_details(feed.url, connection=get_redis_connection())
            self.assertEqual(data['error'], code)
            self.assertEqual(data['backoff_factor'], 2)

            # Restore status for next iteration
            schedule_job(feed.url, backoff_factor=1, error=None, schedule_in=0)
            feed = UniqueFeed.objects.get(url=feed.url)
            self.assertEqual(feed.job_details.get('error'), None)
Exemplo n.º 3
0
    def test_manage_feed(self, get):
        get.return_value = responses(304)
        user = UserFactory.create()
        url = reverse('feeds:manage')
        response = self.app.get(url, user=user)
        self.assertContains(response, 'Manage feeds')

        FeedFactory.create(user=user, category=None)
        FeedFactory.create(user=user, category=None)
        FeedFactory.create(user=user, category=None)
        unique = UniqueFeed.objects.all()[0]
        schedule_job(unique.url,
                     schedule_in=0,
                     backoff_factor=10,
                     error=UniqueFeed.NOT_A_FEED,
                     connection=get_redis_connection())

        response = self.app.get(url, user=user)
        self.assertContains(response, 'Not a valid RSS/Atom feed')

        schedule_job(unique.url,
                     schedule_in=0,
                     error='blah',
                     connection=get_redis_connection())
        response = self.app.get(url, user=user)
        self.assertContains(response, 'Error')

        unique.muted = True
        unique.save()
        response = self.app.get(url, user=user)
        self.assertContains(response, 'Error')
Exemplo n.º 4
0
    def test_errors(self, get):
        codes = [400, 401, 403, 404, 500, 502, 503]

        def get_side_effect():
            yield responses(304)
            for code in codes:
                yield responses(code)
        get.side_effect = get_side_effect()
        feed = FeedFactory.create()
        self.assertEqual(len(get.call_args_list), 1)

        for code in codes:
            get.return_value = responses(code)
            feed = UniqueFeed.objects.get(url=feed.url)
            self.assertFalse(feed.muted)
            self.assertEqual(feed.job_details.get('error'), None)
            self.assertEqual(feed.job_details['backoff_factor'], 1)
            feed.schedule()
            data = job_details(feed.url, connection=get_redis_connection())

            update_feed(feed.url, backoff_factor=data['backoff_factor'])

            feed = UniqueFeed.objects.get(url=feed.url)
            self.assertFalse(feed.muted)
            data = job_details(feed.url, connection=get_redis_connection())
            self.assertEqual(data['error'], code)
            self.assertEqual(data['backoff_factor'], 2)

            # Restore status for next iteration
            schedule_job(feed.url, backoff_factor=1, error=None, schedule_in=0)
            feed = UniqueFeed.objects.get(url=feed.url)
            self.assertEqual(feed.job_details.get('error'), None)
Exemplo n.º 5
0
    def test_backoff(self, get):
        get.return_value = responses(304)
        feed = FeedFactory.create()
        feed = UniqueFeed.objects.get(url=feed.url)
        detail = feed.job_details
        self.assertFalse('error' in detail)
        self.assertEqual(detail['backoff_factor'], 1)
        feed.schedule()
        data = job_details(feed.url, connection=get_redis_connection())

        get.return_value = responses(502)
        for i in range(12):
            update_feed(feed.url, backoff_factor=data['backoff_factor'])
            feed = UniqueFeed.objects.get(url=feed.url)
            self.assertFalse(feed.muted)
            data = job_details(feed.url, connection=get_redis_connection())
            self.assertEqual(data['error'], 502)
            self.assertEqual(data['backoff_factor'], min(i + 2, 10))

        get.side_effect = RequestException
        feed = UniqueFeed.objects.get()
        patch_job(feed.url, error=None, backoff_factor=1)
        data = job_details(feed.url, connection=get_redis_connection())

        for i in range(12):
            update_feed(feed.url, backoff_factor=data['backoff_factor'])
            feed = UniqueFeed.objects.get(url=feed.url)
            self.assertFalse(feed.muted)
            data = job_details(feed.url, connection=get_redis_connection())
            self.assertEqual(data['error'], 'timeout')
            self.assertEqual(data['backoff_factor'], min(i + 2, 10))
Exemplo n.º 6
0
    def test_backoff(self, get):
        get.return_value = responses(304)
        feed = FeedFactory.create()
        feed = UniqueFeed.objects.get(url=feed.url)
        detail = feed.job_details
        self.assertFalse('error' in detail)
        self.assertEqual(detail['backoff_factor'], 1)
        feed.schedule()
        data = job_details(feed.url, connection=get_redis_connection())

        get.return_value = responses(502)
        for i in range(12):
            update_feed(feed.url, backoff_factor=data['backoff_factor'])
            feed = UniqueFeed.objects.get(url=feed.url)
            self.assertFalse(feed.muted)
            data = job_details(feed.url, connection=get_redis_connection())
            self.assertEqual(data['error'], 502)
            self.assertEqual(data['backoff_factor'], min(i + 2, 10))

        get.side_effect = RequestException
        feed = UniqueFeed.objects.get()
        patch_job(feed.url, error=None, backoff_factor=1)
        data = job_details(feed.url, connection=get_redis_connection())

        for i in range(12):
            update_feed(feed.url, backoff_factor=data['backoff_factor'])
            feed = UniqueFeed.objects.get(url=feed.url)
            self.assertFalse(feed.muted)
            data = job_details(feed.url, connection=get_redis_connection())
            self.assertEqual(data['error'], 'timeout')
            self.assertEqual(data['backoff_factor'], min(i + 2, 10))
Exemplo n.º 7
0
    def test_feed_model(self, get, head):
        """Behaviour of the ``Feed`` model"""
        head.return_value = responses(200)
        get.return_value = responses(200, 'rss20.xml')
        feed = FeedFactory.create(name='RSS test', url='http://rss20.xml',
                                  user__ttl=99999)
        feed.save()

        feed_from_db = Feed.objects.get(pk=feed.id)

        # __unicode__
        self.assertEqual('%s' % feed_from_db, 'RSS test')

        # get_absolute_url()
        self.assertEqual('/feed/%s/' % feed.id, feed.get_absolute_url())

        # update()
        update_feed(feed.url)

        data = job_details(feed.url, connection=get_redis_connection())

        self.assertEqual(data['title'], 'Sample Feed')
        self.assertEqual(data['link'], 'http://example.org/')

        feed = Feed.objects.get(pk=feed.id)
        [entry] = es.manager.user(feed.user).fetch()['hits']
        self.assertEqual(entry.title, 'First item title')

        self.assertEqual(feed.favicon_img(), '')
        feed.favicon = 'fav.png'
        self.assertEqual(feed.favicon_img(),
                         '<img src="/media/fav.png" width="16" height="16" />')
    def forwards(self, orm):
        # Deleting field 'UniqueFeed.hub'
        db.delete_column(u'feeds_uniquefeed', 'hub')

        # Deleting field 'UniqueFeed.last_loop'
        db.delete_column(u'feeds_uniquefeed', 'last_loop')

        # Deleting field 'UniqueFeed.backoff_factor'
        db.delete_column(u'feeds_uniquefeed', 'backoff_factor')

        # Deleting field 'UniqueFeed.link'
        db.delete_column(u'feeds_uniquefeed', 'link')

        # Deleting field 'UniqueFeed.etag'
        db.delete_column(u'feeds_uniquefeed', 'etag')

        # Deleting field 'UniqueFeed.subscribers'
        db.delete_column(u'feeds_uniquefeed', 'subscribers')

        # Deleting field 'UniqueFeed.title'
        db.delete_column(u'feeds_uniquefeed', 'title')

        # Deleting field 'UniqueFeed.modified'
        db.delete_column(u'feeds_uniquefeed', 'modified')

        # Deleting field 'UniqueFeed.last_update'
        db.delete_column(u'feeds_uniquefeed', 'last_update')

        redis = get_redis_connection()
        jobs = redis.zrange(REDIS_KEY, 0, -1)
        for job in jobs:
            redis.hdel(job_key(job.decode('utf-8')), 'request_timeout')
Exemplo n.º 9
0
def patch_job(name, **kwargs):
    redis = get_redis_connection()
    for key, value in list(kwargs.items()):
        if value is None:
            redis.hdel(job_key(name), key)
            kwargs.pop(key)
    redis.hmset(job_key(name), kwargs)
Exemplo n.º 10
0
def patch_job(name, **kwargs):
    redis = get_redis_connection()
    for key, value in list(kwargs.items()):
        if value is None:
            redis.hdel(job_key(name), key)
            kwargs.pop(key)
    redis.hmset(job_key(name), kwargs)
Exemplo n.º 11
0
    def test_feed_model(self, get):
        """Behaviour of the ``Feed`` model"""
        get.return_value = responses(200, 'rss20.xml')
        feed = FeedFactory.create(name='RSS test', url='rss20.xml')
        feed.save()

        feed_from_db = Feed.objects.get(pk=feed.id)

        # __unicode__
        self.assertEqual('%s' % feed_from_db, 'RSS test')

        # get_absolute_url()
        self.assertEqual('/feed/%s/' % feed.id, feed.get_absolute_url())

        # update()
        update_feed(feed.url)

        data = job_details(feed.url, connection=get_redis_connection())

        self.assertEqual(data['title'], 'Sample Feed')
        self.assertEqual(data['link'], 'http://example.org/')

        feed = Feed.objects.get(pk=feed.id)
        self.assertEqual(feed.entries.count(), 1)
        self.assertEqual(feed.entries.all()[0].title, 'First item title')

        self.assertEqual(feed.favicon_img(), '')
        feed.favicon = 'fav.png'
        self.assertEqual(feed.favicon_img(),
                         '<img src="/media/fav.png" width="16" height="16" />')
Exemplo n.º 12
0
 def test_incomplete_read(self, get):
     get.side_effect = IncompleteRead("0 bytes read")
     FeedFactory.create()
     f = UniqueFeed.objects.get()
     self.assertFalse(f.muted)
     data = job_details(f.url, connection=get_redis_connection())
     self.assertEqual(data['error'], f.CONNECTION_ERROR)
    def forwards(self, orm):
        # Deleting field 'UniqueFeed.hub'
        db.delete_column(u'feeds_uniquefeed', 'hub')

        # Deleting field 'UniqueFeed.last_loop'
        db.delete_column(u'feeds_uniquefeed', 'last_loop')

        # Deleting field 'UniqueFeed.backoff_factor'
        db.delete_column(u'feeds_uniquefeed', 'backoff_factor')

        # Deleting field 'UniqueFeed.link'
        db.delete_column(u'feeds_uniquefeed', 'link')

        # Deleting field 'UniqueFeed.etag'
        db.delete_column(u'feeds_uniquefeed', 'etag')

        # Deleting field 'UniqueFeed.subscribers'
        db.delete_column(u'feeds_uniquefeed', 'subscribers')

        # Deleting field 'UniqueFeed.title'
        db.delete_column(u'feeds_uniquefeed', 'title')

        # Deleting field 'UniqueFeed.modified'
        db.delete_column(u'feeds_uniquefeed', 'modified')

        # Deleting field 'UniqueFeed.last_update'
        db.delete_column(u'feeds_uniquefeed', 'last_update')

        redis = get_redis_connection()
        jobs = redis.zrange(REDIS_KEY, 0, -1)
        for job in jobs:
            redis.hdel(job_key(job.decode('utf-8')), 'request_timeout')
Exemplo n.º 14
0
 def test_incomplete_read(self, get):
     get.side_effect = IncompleteRead("0 bytes read")
     FeedFactory.create()
     f = UniqueFeed.objects.get()
     self.assertFalse(f.muted)
     data = job_details(f.url, connection=get_redis_connection())
     self.assertEqual(data['error'], f.CONNECTION_ERROR)
Exemplo n.º 15
0
    def test_feed_model(self, get):
        """Behaviour of the ``Feed`` model"""
        get.return_value = responses(200, "rss20.xml")
        feed = FeedFactory.create(name="RSS test", url="http://rss20.xml", user__ttl=99999)
        feed.save()

        feed_from_db = Feed.objects.get(pk=feed.id)

        # __unicode__
        self.assertEqual("%s" % feed_from_db, "RSS test")

        # get_absolute_url()
        self.assertEqual("/feed/%s/" % feed.id, feed.get_absolute_url())

        # update()
        update_feed(feed.url)

        data = job_details(feed.url, connection=get_redis_connection())

        self.assertEqual(data["title"], "Sample Feed")
        self.assertEqual(data["link"], "http://example.org/")

        feed = Feed.objects.get(pk=feed.id)
        [entry] = es.manager.user(feed.user).fetch()["hits"]
        self.assertEqual(entry.title, "First item title")

        self.assertEqual(feed.favicon_img(), "")
        feed.favicon = "fav.png"
        self.assertEqual(feed.favicon_img(), '<img src="/media/fav.png" width="16" height="16" />')
Exemplo n.º 16
0
 def test_socket_timeout(self, get):
     m = get.return_value
     type(m).content = PropertyMock(side_effect=socket.timeout)
     FeedFactory.create()
     f = UniqueFeed.objects.get()
     self.assertFalse(f.muted)
     data = job_details(f.url, connection=get_redis_connection())
     self.assertEqual(data['error'], f.TIMEOUT)
Exemplo n.º 17
0
 def test_decode_error(self, get):
     get.side_effect = DecodeError("Received response with content-encoding"
                                   ": gzip, but failed to decode it.")
     FeedFactory.create()
     unique = UniqueFeed.objects.get()
     data = job_details(unique.url, connection=get_redis_connection())
     self.assertEqual(data['backoff_factor'], 2)
     self.assertEqual(data['error'], UniqueFeed.DECODE_ERROR)
Exemplo n.º 18
0
 def test_socket_timeout(self, get):
     m = get.return_value
     type(m).content = PropertyMock(side_effect=socket.timeout)
     FeedFactory.create()
     f = UniqueFeed.objects.get()
     self.assertFalse(f.muted)
     data = job_details(f.url, connection=get_redis_connection())
     self.assertEqual(data['error'], f.TIMEOUT)
Exemplo n.º 19
0
 def test_decode_error(self, get):
     get.side_effect = DecodeError("Received response with content-encoding"
                                   ": gzip, but failed to decode it.")
     FeedFactory.create()
     unique = UniqueFeed.objects.get()
     data = job_details(unique.url, connection=get_redis_connection())
     self.assertEqual(data['backoff_factor'], 2)
     self.assertEqual(data['error'], UniqueFeed.DECODE_ERROR)
Exemplo n.º 20
0
 def test_task_timeout_handling(self, get):
     get.return_value = responses(304)
     feed = FeedFactory.create()
     get.side_effect = JobTimeoutException
     self.assertEqual(
         UniqueFeed.objects.get().job_details['backoff_factor'], 1)
     update_feed(feed.url)
     data = job_details(feed.url, connection=get_redis_connection())
     self.assertEqual(data['backoff_factor'], 2)
Exemplo n.º 21
0
 def test_clean_rq(self):
     r = get_redis_connection()
     self.assertEqual(len(r.keys("rq:job:*")), 0)
     r.hmset("rq:job:abc", {"bar": "baz"})
     r.hmset("rq:job:def", {"created_at": times.format(times.now(), "UTC")})
     r.hmset("rq:job:123", {"created_at": times.format(times.now() - timedelta(days=10), "UTC")})
     self.assertEqual(len(r.keys("rq:job:*")), 3)
     call_command("clean_rq")
     self.assertEqual(len(r.keys("rq:job:*")), 2)
Exemplo n.º 22
0
 def test_handle_etag(self, get):
     get.return_value = responses(200, 'sw-all.xml',
                                  headers={'etag': 'foo',
                                           'last-modified': 'bar'})
     FeedFactory.create()
     data = job_details(UniqueFeed.objects.get().url,
                        connection=get_redis_connection())
     self.assertEqual(data['etag'], 'foo')
     self.assertEqual(data['modified'], 'bar')
Exemplo n.º 23
0
 def test_task_timeout_handling(self, get):
     get.return_value = responses(304)
     feed = FeedFactory.create()
     get.side_effect = JobTimeoutException
     self.assertEqual(
         UniqueFeed.objects.get().job_details['backoff_factor'], 1)
     update_feed(feed.url)
     data = job_details(feed.url, connection=get_redis_connection())
     self.assertEqual(data['backoff_factor'], 2)
Exemplo n.º 24
0
 def test_handle_etag(self, get, head):
     head.return_value = responses(200)
     get.return_value = responses(200, 'sw-all.xml',
                                  headers={'etag': 'foo',
                                           'last-modified': 'bar'})
     FeedFactory.create()
     data = job_details(UniqueFeed.objects.get().url,
                        connection=get_redis_connection())
     self.assertEqual(data['etag'], 'foo')
     self.assertEqual(data['modified'], 'bar')
Exemplo n.º 25
0
    def test_too_many_requests(self, get):
        get.return_value = responses(304)
        feed = FeedFactory.create()

        get.return_value = responses(429)
        update_feed(feed.url, backoff_factor=1)
        data = job_details(feed.url, connection=get_redis_connection())
        # retry in 1 min
        self.assertTrue(58 < (epoch_to_utc(data['schedule_at']) -
                              timezone.now()).seconds < 60)
Exemplo n.º 26
0
 def test_clean_rq(self):
     r = get_redis_connection()
     self.assertEqual(len(r.keys('rq:job:*')), 0)
     r.hmset('rq:job:abc', {'bar': 'baz'})
     r.hmset('rq:job:def', {'created_at': utcformat(utcnow())})
     r.hmset('rq:job:123',
             {'created_at': utcformat(utcnow() - timedelta(days=10))})
     self.assertEqual(len(r.keys('rq:job:*')), 3)
     call_command('clean_rq')
     self.assertEqual(len(r.keys('rq:job:*')), 2)
Exemplo n.º 27
0
 def test_clean_rq(self):
     r = get_redis_connection()
     self.assertEqual(len(r.keys('rq:job:*')), 0)
     r.hmset('rq:job:abc', {'bar': 'baz'})
     r.hmset('rq:job:def', {'created_at': utcformat(utcnow())})
     r.hmset('rq:job:123', {
         'created_at': utcformat(utcnow() - timedelta(days=10))})
     self.assertEqual(len(r.keys('rq:job:*')), 3)
     call_command('clean_rq')
     self.assertEqual(len(r.keys('rq:job:*')), 2)
Exemplo n.º 28
0
    def test_too_many_requests(self, get):
        get.return_value = responses(304)
        feed = FeedFactory.create()

        get.return_value = responses(429)
        update_feed(feed.url, backoff_factor=1)
        data = job_details(feed.url, connection=get_redis_connection())
        # retry in 1 min
        self.assertTrue(
            58 <
            (epoch_to_utc(data['schedule_at']) - timezone.now()).seconds <
            60
        )
Exemplo n.º 29
0
    def test_restore_backoff(self, get):
        get.return_value = responses(304)
        FeedFactory.create()
        feed = UniqueFeed.objects.get()
        feed.error = 'timeout'
        feed.backoff_factor = 5
        feed.save()
        update_feed(feed.url, error=feed.error,
                    backoff_factor=feed.backoff_factor)

        data = job_details(feed.url, connection=get_redis_connection())
        self.assertEqual(data['backoff_factor'], 1)
        self.assertTrue('error' not in data)
Exemplo n.º 30
0
    def test_restore_backoff(self, get):
        get.return_value = responses(304)
        FeedFactory.create()
        feed = UniqueFeed.objects.get()
        feed.error = 'timeout'
        feed.backoff_factor = 5
        feed.save()
        update_feed(feed.url, error=feed.error,
                    backoff_factor=feed.backoff_factor)

        data = job_details(feed.url, connection=get_redis_connection())
        self.assertEqual(data['backoff_factor'], 1)
        self.assertTrue('error' not in data)
Exemplo n.º 31
0
    def test_add_feed(self, get):
        get.return_value = responses(304)
        user = UserFactory.create()
        category = CategoryFactory.create(user=user)

        url = reverse("feeds:add_feed")
        response = self.app.get(url, user=user)
        self.assertContains(response, "Add a feed")

        form = response.forms["feed"]
        form["name"] = "Lulz"
        response = form.submit()  # there is no URL
        self.assertFormError(response, "form", "url", ["This field is required."])

        form["name"] = "Bobby"
        form["url"] = "http://example.com/feed.xml"
        form["category"] = category.pk
        response = form.submit()
        self.assertFormError(response, "form", "url", ["Invalid response code from URL: HTTP 304."])
        get.return_value = responses(200, "categories.opml")
        response = form.submit()
        self.assertFormError(response, "form", "url", ["This URL doesn't seem to be a valid feed."])

        get.return_value = responses(200, "bruno.im.png")
        response = form.submit()
        self.assertFormError(response, "form", "url", ["This URL doesn't seem to be a valid feed."])

        cache_key = "lock:feed_check:{0}".format(user.pk)
        redis = get_redis_connection()
        redis.set(cache_key, user.pk)
        response = form.submit()
        self.assertFormError(response, "form", "url", ["This action can only be done one at a time."])
        redis.delete(cache_key)

        get.return_value = responses(200, "brutasse.atom")
        response = form.submit()
        self.assertRedirects(response, "/manage/")
        response.follow()

        response = form.submit()
        self.assertFormError(response, "form", "url", ["It seems you're already subscribed to this feed."])

        # Provide initial params via ?feed=foo&name=bar
        response = self.app.get(url, {"feed": "https://example.com/blog/atom", "name": "Some Example Blog"})
        self.assertContains(response, 'value="https://example.com/blog/atom"')
        self.assertContains(response, 'value="Some Example Blog"')

        get.side_effect = ValueError
        user.feeds.all().delete()
        response = form.submit()
        self.assertFormError(response, "form", "url", ["Error fetching the feed."])
Exemplo n.º 32
0
    def test_too_many_requests_retry(self, get):
        get.return_value = responses(304)
        feed = FeedFactory.create()

        get.return_value = responses(429, headers={'Retry-After': '3600'})
        update_feed(feed.url, backoff_factor=1)
        data = job_details(feed.url, connection=get_redis_connection())
        # Retry in 1 hour
        self.assertTrue(3590 < (epoch_to_utc(data['schedule_at']) -
                                timezone.now()).seconds < 3600)

        # Other requests to same domain
        get.reset_mock()
        get.assert_not_called()
        update_feed(feed.url, backoff_factor=1)
        get.assert_not_called()
Exemplo n.º 33
0
    def test_too_many_requests_retry(self, get):
        get.return_value = responses(304)
        feed = FeedFactory.create()

        get.return_value = responses(429, headers={'Retry-After': '3600'})
        update_feed(feed.url, backoff_factor=1)
        data = job_details(feed.url, connection=get_redis_connection())
        # Retry in 1 hour
        self.assertTrue(
            3590 <
            (epoch_to_utc(data['schedule_at']) - timezone.now()).seconds <
            3600
        )

        # Other requests to same domain
        get.reset_mock()
        get.assert_not_called()
        update_feed(feed.url, backoff_factor=1)
        get.assert_not_called()
Exemplo n.º 34
0
 def test_handle_etag(self, get):
     get.return_value = responses(200, "sw-all.xml", headers={"etag": "foo", "last-modified": "bar"})
     FeedFactory.create()
     data = job_details(UniqueFeed.objects.get().url, connection=get_redis_connection())
     self.assertEqual(data["etag"], "foo")
     self.assertEqual(data["modified"], "bar")
Exemplo n.º 35
0
 def tearDown(self):  # noqa
     """Clean up the rache:* redis keys"""
     get_redis_connection().flushdb()
Exemplo n.º 36
0
    def test_add_feed(self, get, head):
        head.return_value = responses(200, url='http://foo.com/bar')
        get.return_value = responses(304)
        user = UserFactory.create()
        category = CategoryFactory.create(user=user)

        url = reverse('feeds:add_feed')
        response = self.app.get(url, user=user)
        self.assertContains(response, 'Add a feed')

        form = response.forms['feed']
        form['name'] = 'Lulz'
        response = form.submit()  # there is no URL
        self.assertFormError(response, 'form', 'url',
                             ['This field is required.'])

        form['name'] = 'Bobby'
        form['url'] = 'http://example.com/feed.xml'
        form['category'] = category.pk
        response = form.submit()
        self.assertFormError(response, 'form', 'url', [
            "Invalid response code from URL: HTTP 304.",
        ])
        get.return_value = responses(200, 'categories.opml')
        response = form.submit()
        self.assertFormError(response, 'form', 'url', [
            "This URL doesn't seem to be a valid feed.",
        ])

        get.return_value = responses(200, 'bruno.im.png')
        response = form.submit()
        self.assertFormError(response, 'form', 'url', [
            "This URL doesn't seem to be a valid feed.",
        ])

        cache_key = "lock:feed_check:{0}".format(user.pk)
        redis = get_redis_connection()
        redis.set(cache_key, user.pk)
        response = form.submit()
        self.assertFormError(response, 'form', 'url', [
            "This action can only be done one at a time.",
        ])
        redis.delete(cache_key)

        get.return_value = responses(200, 'brutasse.atom')
        response = form.submit()
        self.assertRedirects(response, '/manage/')
        response.follow()

        response = form.submit()
        self.assertFormError(
            response, 'form', 'url',
            ["It seems you're already subscribed to this feed."])

        # Provide initial params via ?feed=foo&name=bar
        response = self.app.get(url, {
            'feed': 'https://example.com/blog/atom',
            'name': 'Some Example Blog'
        })
        self.assertContains(response, 'value="https://example.com/blog/atom"')
        self.assertContains(response, 'value="Some Example Blog"')

        get.side_effect = ValueError
        user.feeds.all().delete()
        response = form.submit()
        self.assertFormError(response, 'form', 'url',
                             ['Error fetching the feed.'])
Exemplo n.º 37
0
    def test_update_feeds(self):
        u = UniqueFeed.objects.create(
            url='http://example.com/feed0',
        )
        u.schedule()
        patch_job(
            u.url,
            last_update=(timezone.now() - timedelta(hours=1)).strftime('%s')
        )
        u.schedule()
        UniqueFeed.objects.create(
            url='http://example.com/feed1',
        ).schedule()
        with self.assertNumQueries(0):
            jobs = list(pending_jobs(
                limit=5, reschedule_in=UniqueFeed.UPDATE_PERIOD * 60,
                connection=get_redis_connection()))
            self.assertEqual(len(jobs), 1)
            self.assertEqual(jobs[0]['id'], u.url)

        u.delete()
        delete_job(u.url, connection=get_redis_connection())
        with self.assertNumQueries(0):
            urls = list(pending_jobs(
                limit=5, reschedule_in=UniqueFeed.UPDATE_PERIOD * 60,
                connection=get_redis_connection()))
            self.assertEqual(len(urls), 0)

        u = UniqueFeed.objects.create(
            url='http://example.com/backoff',
        )
        u.schedule()
        patch_job(
            u.url, backoff_factor=10,
            last_update=(timezone.now() - timedelta(hours=28)).strftime('%s')
        )
        u.schedule()
        with self.assertNumQueries(0):
            jobs = list(pending_jobs(
                limit=5, reschedule_in=UniqueFeed.UPDATE_PERIOD * 60,
                connection=get_redis_connection()))
            self.assertEqual(len(jobs), 0)
        patch_job(u.url, backoff_factor=9)
        u.schedule()
        with self.assertNumQueries(0):
            jobs = list(pending_jobs(
                limit=5, reschedule_in=UniqueFeed.UPDATE_PERIOD * 60,
                connection=get_redis_connection()))
            self.assertEqual(len(jobs), 1)
            self.assertEqual(jobs[0]['id'], u.url)
            self.assertEqual(
                UniqueFeed.TIMEOUT_BASE * jobs[0]['backoff_factor'], 180)

        patch_job(u.url, last_update=int(time.time()))
        u.schedule()
        with self.assertNumQueries(0):
            jobs = list(pending_jobs(
                limit=5, reschedule_in=UniqueFeed.UPDATE_PERIOD * 60,
                connection=get_redis_connection()))
            self.assertEqual(len(jobs), 0)

        UniqueFeed.objects.create(
            url='http://example.com/lol',
        )

        for u in UniqueFeed.objects.all():
            patch_job(u.url, last_update=(
                timezone.now() - timedelta(hours=54)).strftime('%s'))

        # No subscribers -> deletion
        with self.assertNumQueries(2):
            call_command('delete_unsubscribed')
        self.assertEqual(UniqueFeed.objects.count(), 0)

        u = UniqueFeed.objects.create(
            url='http://example.com/foo',
        )
        u.schedule()
        patch_job(
            u.url,
            last_update=(timezone.now() - timedelta(hours=2)).strftime('%s'))
        u.schedule()
        u = UniqueFeed.objects.create(
            url='http://example.com/bar',
        )
        u.schedule()
        patch_job(
            u.url,
            last_update=(timezone.now() - timedelta(hours=2)).strftime('%s'))
        u.schedule()
        jobs = list(pending_jobs(
            limit=5, reschedule_in=UniqueFeed.UPDATE_PERIOD * 60,
            connection=get_redis_connection()))
        self.assertEqual(len(jobs), 2)
        self.assertEqual(jobs[0]['id'], 'http://example.com/bar')
        self.assertEqual(jobs[1]['id'], 'http://example.com/foo')
Exemplo n.º 38
0
 def tearDown(self):  # noqa
     """Clean up the rache:* redis keys"""
     get_redis_connection().flushdb()
Exemplo n.º 39
0
    def test_add_feed(self, get, head):
        head.return_value = responses(200, url='http://foo.com/bar')
        get.return_value = responses(304)
        user = UserFactory.create()
        category = CategoryFactory.create(user=user)

        url = reverse('feeds:add_feed')
        response = self.app.get(url, user=user)
        self.assertContains(response, 'Add a feed')

        form = response.forms['feed']
        form['name'] = 'Lulz'
        response = form.submit()  # there is no URL
        self.assertFormError(response, 'form', 'url',
                             ['This field is required.'])

        form['name'] = 'Bobby'
        form['url'] = 'http://example.com/feed.xml'
        form['category'] = category.pk
        response = form.submit()
        self.assertFormError(response, 'form', 'url', [
            "Invalid response code from URL: HTTP 304.",
        ])
        get.return_value = responses(200, 'categories.opml')
        response = form.submit()
        self.assertFormError(response, 'form', 'url', [
            "This URL doesn't seem to be a valid feed.",
        ])

        get.return_value = responses(200, 'bruno.im.png')
        response = form.submit()
        self.assertFormError(response, 'form', 'url', [
            "This URL doesn't seem to be a valid feed.",
        ])

        cache_key = "lock:feed_check:{0}".format(user.pk)
        redis = get_redis_connection()
        redis.set(cache_key, user.pk)
        response = form.submit()
        self.assertFormError(response, 'form', 'url', [
            "This action can only be done one at a time.",
        ])
        redis.delete(cache_key)

        get.return_value = responses(200, 'brutasse.atom')
        response = form.submit()
        self.assertRedirects(response, '/manage/')
        response.follow()

        response = form.submit()
        self.assertFormError(
            response, 'form', 'url',
            ["It seems you're already subscribed to this feed."])

        # Provide initial params via ?feed=foo&name=bar
        response = self.app.get(url, {'feed': 'https://example.com/blog/atom',
                                      'name': 'Some Example Blog'})
        self.assertContains(response, 'value="https://example.com/blog/atom"')
        self.assertContains(response, 'value="Some Example Blog"')

        get.side_effect = ValueError
        user.feeds.all().delete()
        response = form.submit()
        self.assertFormError(response, 'form', 'url',
                             ['Error fetching the feed.'])
Exemplo n.º 40
0
    def test_update_feeds(self):
        u = UniqueFeed.objects.create(url='http://example.com/feed0', )
        u.schedule()
        patch_job(u.url,
                  last_update=(timezone.now() -
                               timedelta(hours=1)).strftime('%s'))
        u.schedule()
        UniqueFeed.objects.create(url='http://example.com/feed1', ).schedule()
        with self.assertNumQueries(0):
            jobs = list(
                pending_jobs(limit=5,
                             reschedule_in=UniqueFeed.UPDATE_PERIOD * 60,
                             connection=get_redis_connection()))
            self.assertEqual(len(jobs), 1)
            self.assertEqual(jobs[0]['id'], u.url)

        u.delete()
        delete_job(u.url, connection=get_redis_connection())
        with self.assertNumQueries(0):
            urls = list(
                pending_jobs(limit=5,
                             reschedule_in=UniqueFeed.UPDATE_PERIOD * 60,
                             connection=get_redis_connection()))
            self.assertEqual(len(urls), 0)

        u = UniqueFeed.objects.create(url='http://example.com/backoff', )
        u.schedule()
        patch_job(u.url,
                  backoff_factor=10,
                  last_update=(timezone.now() -
                               timedelta(hours=28)).strftime('%s'))
        u.schedule()
        with self.assertNumQueries(0):
            jobs = list(
                pending_jobs(limit=5,
                             reschedule_in=UniqueFeed.UPDATE_PERIOD * 60,
                             connection=get_redis_connection()))
            self.assertEqual(len(jobs), 0)
        patch_job(u.url, backoff_factor=9)
        u.schedule()
        with self.assertNumQueries(0):
            jobs = list(
                pending_jobs(limit=5,
                             reschedule_in=UniqueFeed.UPDATE_PERIOD * 60,
                             connection=get_redis_connection()))
            self.assertEqual(len(jobs), 1)
            self.assertEqual(jobs[0]['id'], u.url)
            self.assertEqual(
                UniqueFeed.TIMEOUT_BASE * jobs[0]['backoff_factor'], 180)

        patch_job(u.url, last_update=int(time.time()))
        u.schedule()
        with self.assertNumQueries(0):
            jobs = list(
                pending_jobs(limit=5,
                             reschedule_in=UniqueFeed.UPDATE_PERIOD * 60,
                             connection=get_redis_connection()))
            self.assertEqual(len(jobs), 0)

        UniqueFeed.objects.create(url='http://example.com/lol', )

        for u in UniqueFeed.objects.all():
            patch_job(u.url,
                      last_update=(timezone.now() -
                                   timedelta(hours=54)).strftime('%s'))

        # No subscribers -> deletion
        with self.assertNumQueries(2):
            call_command('delete_unsubscribed')
        self.assertEqual(UniqueFeed.objects.count(), 0)

        u = UniqueFeed.objects.create(url='http://example.com/foo', )
        u.schedule()
        patch_job(u.url,
                  last_update=(timezone.now() -
                               timedelta(hours=2)).strftime('%s'))
        u.schedule()
        u = UniqueFeed.objects.create(url='http://example.com/bar', )
        u.schedule()
        patch_job(u.url,
                  last_update=(timezone.now() -
                               timedelta(hours=2)).strftime('%s'))
        u.schedule()
        jobs = list(
            pending_jobs(limit=5,
                         reschedule_in=UniqueFeed.UPDATE_PERIOD * 60,
                         connection=get_redis_connection()))
        self.assertEqual(len(jobs), 2)
        self.assertEqual(jobs[0]['id'], 'http://example.com/bar')
        self.assertEqual(jobs[1]['id'], 'http://example.com/foo')