def test_backoff(self, get): get.return_value = responses(304) feed = FeedFactory.create() feed = UniqueFeed.objects.get(url=feed.url) self.assertEqual(feed.error, None) self.assertEqual(feed.backoff_factor, 1) feed.schedule() data = job_details(feed.url) get.return_value = responses(502) for i in range(12): update_feed(feed.url, backoff_factor=data['backoff_factor']) feed = UniqueFeed.objects.get(url=feed.url) self.assertFalse(feed.muted) data = job_details(feed.url) self.assertEqual(data['error'], 502) self.assertEqual(data['backoff_factor'], min(i + 2, 10)) get.side_effect = RequestException feed = UniqueFeed.objects.get() feed.error = None feed.backoff_factor = 1 feed.save() feed.schedule() data = job_details(feed.url) for i in range(12): update_feed(feed.url, backoff_factor=data['backoff_factor']) feed = UniqueFeed.objects.get(url=feed.url) self.assertFalse(feed.muted) data = job_details(feed.url) self.assertEqual(data['error'], 'timeout') self.assertEqual(data['backoff_factor'], min(i + 2, 10))
def test_errors(self, get): get.return_value = responses(304) feed = FeedFactory.create() for code in [400, 401, 403, 404, 500, 502, 503]: get.return_value = responses(code) feed = UniqueFeed.objects.get(url=feed.url) self.assertFalse(feed.muted) self.assertEqual(feed.error, None) self.assertEqual(feed.backoff_factor, 1) feed.schedule() data = job_details(feed.url) update_feed(feed.url, backoff_factor=data['backoff_factor']) feed = UniqueFeed.objects.get(url=feed.url) self.assertFalse(feed.muted) data = job_details(feed.url) self.assertEqual(data['error'], code) self.assertEqual(data['backoff_factor'], 2) # Restore status for next iteration feed.backoff_factor = 1 feed.error = None feed.save(update_fields=['backoff_factor', 'error']) feed.schedule()
def test_backoff(self, get): get.return_value = responses(304) feed = FeedFactory.create() feed = UniqueFeed.objects.get(url=feed.url) detail = feed.job_details self.assertFalse('error' in detail) self.assertEqual(detail['backoff_factor'], 1) feed.schedule() data = job_details(feed.url, connection=get_redis_connection()) get.return_value = responses(502) for i in range(12): update_feed(feed.url, backoff_factor=data['backoff_factor']) feed = UniqueFeed.objects.get(url=feed.url) self.assertFalse(feed.muted) data = job_details(feed.url, connection=get_redis_connection()) self.assertEqual(data['error'], 502) self.assertEqual(data['backoff_factor'], min(i + 2, 10)) get.side_effect = RequestException feed = UniqueFeed.objects.get() patch_job(feed.url, error=None, backoff_factor=1) data = job_details(feed.url, connection=get_redis_connection()) for i in range(12): update_feed(feed.url, backoff_factor=data['backoff_factor']) feed = UniqueFeed.objects.get(url=feed.url) self.assertFalse(feed.muted) data = job_details(feed.url, connection=get_redis_connection()) self.assertEqual(data['error'], 'timeout') self.assertEqual(data['backoff_factor'], min(i + 2, 10))
def test_errors(self, get): codes = [400, 401, 403, 404, 500, 502, 503] def get_side_effect(): yield responses(304) for code in codes: yield responses(code) get.side_effect = get_side_effect() feed = FeedFactory.create() self.assertEqual(len(get.call_args_list), 1) for code in codes: get.return_value = responses(code) feed = UniqueFeed.objects.get(url=feed.url) self.assertFalse(feed.muted) self.assertEqual(feed.job_details.get('error'), None) self.assertEqual(feed.job_details['backoff_factor'], 1) feed.schedule() data = job_details(feed.url, connection=get_redis_connection()) update_feed(feed.url, backoff_factor=data['backoff_factor']) feed = UniqueFeed.objects.get(url=feed.url) self.assertFalse(feed.muted) data = job_details(feed.url, connection=get_redis_connection()) self.assertEqual(data['error'], code) self.assertEqual(data['backoff_factor'], 2) # Restore status for next iteration schedule_job(feed.url, backoff_factor=1, error=None, schedule_in=0) feed = UniqueFeed.objects.get(url=feed.url) self.assertEqual(feed.job_details.get('error'), None)
def test_incomplete_read(self, get): get.side_effect = IncompleteRead("0 bytes read") FeedFactory.create() f = UniqueFeed.objects.get() self.assertFalse(f.muted) data = job_details(f.url, connection=get_redis_connection()) self.assertEqual(data['error'], f.CONNECTION_ERROR)
def test_feed_model(self, get): """Behaviour of the ``Feed`` model""" get.return_value = responses(200, 'rss20.xml') feed = FeedFactory.create(name='RSS test', url='rss20.xml') feed.save() feed_from_db = Feed.objects.get(pk=feed.id) # __unicode__ self.assertEqual('%s' % feed_from_db, 'RSS test') # get_absolute_url() self.assertEqual('/feed/%s/' % feed.id, feed.get_absolute_url()) # update() update_feed(feed.url) data = job_details(feed.url, connection=get_redis_connection()) self.assertEqual(data['title'], 'Sample Feed') self.assertEqual(data['link'], 'http://example.org/') feed = Feed.objects.get(pk=feed.id) self.assertEqual(feed.entries.count(), 1) self.assertEqual(feed.entries.all()[0].title, 'First item title') self.assertEqual(feed.favicon_img(), '') feed.favicon = 'fav.png' self.assertEqual(feed.favicon_img(), '<img src="/media/fav.png" width="16" height="16" />')
def test_feed_model(self, get, head): """Behaviour of the ``Feed`` model""" head.return_value = responses(200) get.return_value = responses(200, 'rss20.xml') feed = FeedFactory.create(name='RSS test', url='http://rss20.xml', user__ttl=99999) feed.save() feed_from_db = Feed.objects.get(pk=feed.id) # __unicode__ self.assertEqual('%s' % feed_from_db, 'RSS test') # get_absolute_url() self.assertEqual('/feed/%s/' % feed.id, feed.get_absolute_url()) # update() update_feed(feed.url) data = job_details(feed.url, connection=get_redis_connection()) self.assertEqual(data['title'], 'Sample Feed') self.assertEqual(data['link'], 'http://example.org/') feed = Feed.objects.get(pk=feed.id) [entry] = es.manager.user(feed.user).fetch()['hits'] self.assertEqual(entry.title, 'First item title') self.assertEqual(feed.favicon_img(), '') feed.favicon = 'fav.png' self.assertEqual(feed.favicon_img(), '<img src="/media/fav.png" width="16" height="16" />')
def handle_sentry(self, *args, **kwargs): existing_jobs = set(scheduled_jobs()) for url in existing_jobs: details = job_details(url) attrs = { 'title': '', 'link': '', 'etag': '', 'modified': '', 'error': '', 'hub': '', 'backoff_factor': 1, 'subscribers': 1, } string_keys = ['title', 'link', 'etag', 'modified', 'error', 'hub'] for key in attrs: if key in details: value = details[key] if key in string_keys and not isinstance(value, basestring): value = str(value) attrs[key] = value attrs['title'] = attrs['title'][:2048] if 'last_update' in details: attrs['last_update'] = timezone.make_aware( datetime.utcfromtimestamp(details['last_update']), pytz.utc) UniqueFeed.objects.filter(url=url).update(**attrs)
def test_feed_model(self, get): """Behaviour of the ``Feed`` model""" get.return_value = responses(200, "rss20.xml") feed = FeedFactory.create(name="RSS test", url="http://rss20.xml", user__ttl=99999) feed.save() feed_from_db = Feed.objects.get(pk=feed.id) # __unicode__ self.assertEqual("%s" % feed_from_db, "RSS test") # get_absolute_url() self.assertEqual("/feed/%s/" % feed.id, feed.get_absolute_url()) # update() update_feed(feed.url) data = job_details(feed.url, connection=get_redis_connection()) self.assertEqual(data["title"], "Sample Feed") self.assertEqual(data["link"], "http://example.org/") feed = Feed.objects.get(pk=feed.id) [entry] = es.manager.user(feed.user).fetch()["hits"] self.assertEqual(entry.title, "First item title") self.assertEqual(feed.favicon_img(), "") feed.favicon = "fav.png" self.assertEqual(feed.favicon_img(), '<img src="/media/fav.png" width="16" height="16" />')
def test_socket_timeout(self, get): m = get.return_value type(m).content = PropertyMock(side_effect=socket.timeout) FeedFactory.create() f = UniqueFeed.objects.get() self.assertFalse(f.muted) data = job_details(f.url, connection=get_redis_connection()) self.assertEqual(data['error'], f.TIMEOUT)
def test_decode_error(self, get): get.side_effect = DecodeError("Received response with content-encoding" ": gzip, but failed to decode it.") FeedFactory.create() unique = UniqueFeed.objects.get() data = job_details(unique.url, connection=get_redis_connection()) self.assertEqual(data['backoff_factor'], 2) self.assertEqual(data['error'], UniqueFeed.DECODE_ERROR)
def test_handle_etag(self, get): get.return_value = responses(200, 'sw-all.xml', headers={'etag': 'foo', 'last-modified': 'bar'}) FeedFactory.create() data = job_details(UniqueFeed.objects.get().url) self.assertEqual(data['etag'], 'foo') self.assertEqual(data['modified'], 'bar')
def test_task_timeout_handling(self, get): get.return_value = responses(304) feed = FeedFactory.create() get.side_effect = JobTimeoutException self.assertEqual(UniqueFeed.objects.get().backoff_factor, 1) update_feed(feed.url) data = job_details(feed.url) self.assertEqual(data['backoff_factor'], 2)
def test_task_timeout_handling(self, get): get.return_value = responses(304) feed = FeedFactory.create() get.side_effect = JobTimeoutException self.assertEqual( UniqueFeed.objects.get().job_details['backoff_factor'], 1) update_feed(feed.url) data = job_details(feed.url, connection=get_redis_connection()) self.assertEqual(data['backoff_factor'], 2)
def test_job_details(self): schedule_job('details', schedule_in=-1, stuff='baz', other=123) self.assertEqual(job_details('details'), { 'id': 'details', 'stuff': 'baz', 'schedule_at': int(time.time()) - 1, 'other': 123, })
def test_job_details(self): schedule_job('details', schedule_in=-1, stuff='baz', other=123) self.assertEqual( job_details('details'), { 'id': 'details', 'stuff': 'baz', 'schedule_at': int(time.time()) - 1, 'other': 123, })
def test_handle_etag(self, get, head): head.return_value = responses(200) get.return_value = responses(200, 'sw-all.xml', headers={'etag': 'foo', 'last-modified': 'bar'}) FeedFactory.create() data = job_details(UniqueFeed.objects.get().url, connection=get_redis_connection()) self.assertEqual(data['etag'], 'foo') self.assertEqual(data['modified'], 'bar')
def test_too_many_requests(self, get): get.return_value = responses(304) feed = FeedFactory.create() get.return_value = responses(429) update_feed(feed.url, backoff_factor=1) data = job_details(feed.url, connection=get_redis_connection()) # retry in 1 min self.assertTrue(58 < (epoch_to_utc(data['schedule_at']) - timezone.now()).seconds < 60)
def test_restore_backoff(self, get): get.return_value = responses(304) FeedFactory.create() feed = UniqueFeed.objects.get() feed.error = 'timeout' feed.backoff_factor = 5 feed.save() update_feed(feed.url, error=feed.error, backoff_factor=feed.backoff_factor) data = job_details(feed.url, connection=get_redis_connection()) self.assertEqual(data['backoff_factor'], 1) self.assertTrue('error' not in data)
def test_too_many_requests(self, get): get.return_value = responses(304) feed = FeedFactory.create() get.return_value = responses(429) update_feed(feed.url, backoff_factor=1) data = job_details(feed.url, connection=get_redis_connection()) # retry in 1 min self.assertTrue( 58 < (epoch_to_utc(data['schedule_at']) - timezone.now()).seconds < 60 )
def test_too_many_requests_retry(self, get): get.return_value = responses(304) feed = FeedFactory.create() get.return_value = responses(429, headers={'Retry-After': '3600'}) update_feed(feed.url, backoff_factor=1) data = job_details(feed.url, connection=get_redis_connection()) # Retry in 1 hour self.assertTrue(3590 < (epoch_to_utc(data['schedule_at']) - timezone.now()).seconds < 3600) # Other requests to same domain get.reset_mock() get.assert_not_called() update_feed(feed.url, backoff_factor=1) get.assert_not_called()
def test_too_many_requests_retry(self, get): get.return_value = responses(304) feed = FeedFactory.create() get.return_value = responses(429, headers={'Retry-After': '3600'}) update_feed(feed.url, backoff_factor=1) data = job_details(feed.url, connection=get_redis_connection()) # Retry in 1 hour self.assertTrue( 3590 < (epoch_to_utc(data['schedule_at']) - timezone.now()).seconds < 3600 ) # Other requests to same domain get.reset_mock() get.assert_not_called() update_feed(feed.url, backoff_factor=1) get.assert_not_called()
def test_handle_etag(self, get): get.return_value = responses(200, "sw-all.xml", headers={"etag": "foo", "last-modified": "bar"}) FeedFactory.create() data = job_details(UniqueFeed.objects.get().url, connection=get_redis_connection()) self.assertEqual(data["etag"], "foo") self.assertEqual(data["modified"], "bar")
def get_job(name): redis = get_redis_connection() key = job_key(name) if not redis.exists(key): raise JobNotFound return job_details(name, connection=redis)