def test_incremental_crawl_failure(self, bucket_mock, conn_mock, crawl_mock): def failure_feed(url): if '/feed' in url: return {'notdata': [{'ooga': 'booga'}]} self.facebook_patch = patch( 'targetshare.integration.facebook.client.urllib2.urlopen', crawl_mock(1, 250, failure_feed) ) self.facebook_patch.start() the_past = epoch.from_date(timezone.now() - timedelta(days=365)) # Test runs in under a second typically, so we need to be slightly # behind present time, so that we can see fbm.incremental_epoch # get updated present = epoch.from_date(timezone.now() - timedelta(seconds=30)) fbm = models.FBSyncMap.items.create( fbid_primary=self.fbid, fbid_secondary=self.fbid, token=self.token.token, back_filled=False, back_fill_epoch=the_past, incremental_epoch=present, status=models.FBSyncMap.COMPLETE, bucket='test_bucket_0' ) existing_key = Mock() existing_key.get_contents_as_string.return_value = '{"updated": 1, "data": [{"test": "testing"}]}' bucket_mock.return_value = existing_key conn_mock.return_value = s3_feed.BucketManager() tasks.incremental_crawl(fbm.fbid_primary, fbm.fbid_secondary) new_fbm = models.FBSyncMap.items.get_item( fbid_primary=self.fbid, fbid_secondary=self.fbid) self.assertEqual(fbm.status, fbm.COMPLETE) self.assertEqual(int(new_fbm.incremental_epoch), present) self.assertFalse(existing_key.set_contents_from_string.called)
def setUp(self): super(TestFeedCrawlerTasks, self).setUp() self.fbid = 1111111 expires = timezone.datetime(2020, 1, 1, 12, 0, 0, tzinfo=timezone.utc) self.token = models.dynamo.Token.items.create( fbid=self.fbid, appid=1, token='1', expires=expires) models.FBApp.objects.create(appid=1, name='social-good', secret='sekret') self.facebook_patch = patch( 'targetshare.integration.facebook.client.urllib2.urlopen', crawl_mock(1, 250, mock_feed) ) self.token_patch = patch( 'targetshare.integration.facebook.client.debug_token', return_value={ 'data': { 'is_valid': True, 'expires_at': expires, } } ) self.facebook_patch.start() self.token_patch.start()