def test_get_last_archived_date(self, patched_get): status = scraper.SyncStatus(None, None) timestamp = datetime.datetime(2010, 11, 2, 3, 44, 55) mtime = (timestamp - datetime.datetime(1970, 1, 1)).total_seconds() patched_get.return_value = dict(maxrawfilemtimearchived=mtime) last_archived_date = status.get_last_archived_mtime() self.assertEqual(last_archived_date, timestamp)
def test_update_last_archived_date(self, patched_update): status = scraper.SyncStatus(None, None) status.update_last_archived_date(datetime.date(2012, 2, 29)) patched_update.assert_called_once() self.assertTrue(u'x2012-02-29' in patched_update.call_args[0]) index = patched_update.call_args[0].index(u'x2012-02-29') self.assertEqual(type(patched_update.call_args[0][index]), unicode)
def test_get_last_archived_date_bad_date(self, patched_get, log): status = scraper.SyncStatus(None, None) with self.assertRaises(scraper.NonRecoverableScraperException): patched_get.return_value = dict( lastsuccessfulcollection='2009-13-10') status.get_last_archived_date() self.assertIn('ERROR', [x.levelname for x in log.records])
def test_update_data_robustness(self, _log): client = mock.Mock() client.get.return_value = None client.put.side_effect = [ cloud_exceptions.ServiceUnavailable('one failure'), None ] status = scraper.SyncStatus(client, None) status.update_data('key', 'value')
def test_get_last_archived_date_bad_date(self, patched_get): with testfixtures.LogCapture() as log: status = scraper.SyncStatus(None, None) with self.assertRaises(scraper.NonRecoverableScraperException): patched_get.return_value = dict( maxrawfilemtimearchived='monkey') status.get_last_archived_mtime() self.assertIn('ERROR', [x.levelname for x in log.records])
def test_get_data_fails_eventually(self, _log): client = mock.Mock() client.key.return_value = {} client.get.side_effect = cloud_exceptions.ServiceUnavailable( 'permanent failure') status = scraper.SyncStatus(client, None) with self.assertRaises(cloud_exceptions.ServiceUnavailable): status.get_data()
def test_get_data_robustness(self, _log): client = mock.Mock() client.key.return_value = {} client.get.side_effect = [ cloud_exceptions.ServiceUnavailable('one failure'), {} ] status = scraper.SyncStatus(client, None) status.get_data()
def test_get_data_caches_key(self): client = mock.Mock() client.key.return_value = {} status = scraper.SyncStatus(client, None) status.get_data() self.assertEqual(client.key.call_count, 1) self.assertEqual(client.get.call_count, 1) status.get_data() self.assertEqual(client.key.call_count, 1) self.assertEqual(client.get.call_count, 2)
def test_log_handler(self, patched_update_data, _log): status = scraper.SyncStatus(None, None) loghandler = scraper.SyncStatusLogHandler(status) logger = logging.getLogger('temp_test') logger.setLevel(logging.ERROR) logger.addHandler(loghandler) logger.info('INFORMATIVE') patched_update_data.assert_not_called() logger.error('BADNESS') patched_update_data.assert_called_once() self.assertEqual(type(patched_update_data.call_args[0][1]), unicode)
def test_update_data_no_value(self): client = mock.Mock() client.get.return_value = None status = scraper.SyncStatus(client, None) status.update_data('key', 'value') self.assertEqual(client.put.call_count, 1)
def test_update_last_archived_date(self, patched_update): status = scraper.SyncStatus(None, None) status.update_last_archived_date(datetime.date(2012, 2, 29)) self.assertEqual(patched_update.call_count, 1) self.assertTrue(u'obsolete' in patched_update.call_args[0])
def test_update_data_no_value(self): client = mock.Mock() client.get.return_value = None status = scraper.SyncStatus(client, None) status.update_data('key', 'value') client.put.assert_called_once()
def test_get_last_archived_date_empty_date(self, patched_get): status = scraper.SyncStatus(None, None) patched_get.return_value = dict(lastsuccessfulcollection='') default_date = datetime.datetime(1970, 1, 1, 23, 59, 59) self.assertEqual(status.get_last_archived_mtime(default_date), default_date)
def test_get_last_archived_date_from_status_no_date(self, patched_get): patched_get.return_value = dict(irrelevant='monkey') status = scraper.SyncStatus(None, None) last_archived_date = status.get_last_archived_mtime() self.assertEqual(last_archived_date, datetime.datetime(2009, 1, 1, 0, 0, 0))
def test_get_last_archived_date_from_status_default(self, patched_get): patched_get.return_value = None status = scraper.SyncStatus(None, None) last_archived_date = status.get_last_archived_mtime() self.assertEqual(last_archived_date, datetime.datetime(2009, 1, 1, 0, 0, 0))
def test_get_last_archived_date(self, patched_get): status = scraper.SyncStatus(None, None) patched_get.return_value = dict(lastsuccessfulcollection='x2010-11-02') last_archived_date = status.get_last_archived_date() self.assertEqual(last_archived_date, datetime.date(2010, 11, 2))
def test_update_mtime(self, patched_update_data): status = scraper.SyncStatus(None, None) status.update_mtime(7) patched_update_data.assert_called_once_with('maxrawfilemtimearchived', 7)