def test_sync_history_end_date(self): """ Tests the functionality of requesting sync history before a given date """ # Setup self.repo_manager.create_repo('test_repo') # A date string to fake some dates date_string = '2013-06-01T12:00:0%sZ' # Create 3 entries, with each date entry one second later for i in range(0, 6, 2): r = RepoSyncResult.expected_result('test_repo', 'foo', 'bar', date_string % str(i), date_string % str(i + 1), 1, 1, 1, '', '', RepoSyncResult.RESULT_SUCCESS) RepoSyncResult.get_collection().save(r, safe=True) # Verify three entries in test_repo self.assertEqual(3, len(self.sync_manager.sync_history('test_repo'))) # Retrieve the first two entries end_date = '2013-06-01T12:00:03Z' end_entries = self.sync_manager.sync_history('test_repo', end_date=end_date) # Confirm the dates of the retrieved entries are earlier than or equal to the requested date self.assertEqual(2, len(end_entries)) for entry in end_entries: retrieved = dateutils.parse_iso8601_datetime(entry['started']) given_end = dateutils.parse_iso8601_datetime(end_date) self.assertTrue(retrieved <= given_end)
def test_sync_history_ascending_sort(self): """ Tests the sort functionality of sync_history """ # Setup self.repo_manager.create_repo('test_sort') date_string = '2013-06-01T12:00:0%sZ' # Add some consecutive sync entries for i in range(0, 10, 2): r = RepoSyncResult.expected_result('test_sort', 'foo', 'bar', date_string % str(i), date_string % str(i + 1), 1, 1, 1, '', '', RepoSyncResult.RESULT_SUCCESS) RepoSyncResult.get_collection().save(r, safe=True) # Test sort by ascending start date entries = self.sync_manager.sync_history(repo_id='test_sort', sort=constants.SORT_ASCENDING) self.assertEqual(5, len(entries)) # Verify that each entry has a earlier completed date than the next one for i in range(0, 4): first = dateutils.parse_iso8601_datetime(entries[i]['started']) second = dateutils.parse_iso8601_datetime(entries[i + 1]['started']) self.assertTrue(first < second)
def add_result(repo_id, offset): started = datetime.datetime.now(dateutils.local_tz()) completed = started + datetime.timedelta(days=offset) r = RepoSyncResult.expected_result(repo_id, 'foo', 'bar', dateutils.format_iso8601_datetime(started), dateutils.format_iso8601_datetime(completed), 1, 1, 1, '', '', RepoSyncResult.RESULT_SUCCESS) RepoSyncResult.get_collection().save(r, safe=True)
def add_result(repo_id, offset): started = datetime.datetime.now(dateutils.local_tz()) completed = started + datetime.timedelta(days=offset) r = RepoSyncResult.expected_result( repo_id, 'foo', 'bar', dateutils.format_iso8601_datetime(started), dateutils.format_iso8601_datetime(completed), 1, 1, 1, '', '', RepoSyncResult.RESULT_SUCCESS) RepoSyncResult.get_collection().save(r, safe=True)
def test_sync_history_descending_sort(self): # Setup self.repo_manager.create_repo('test_sort') date_string = '2013-06-01T12:00:0%sZ' # Add some consecutive sync entries for i in range(0, 10, 2): r = RepoSyncResult.expected_result('test_sort', 'foo', 'bar', date_string % str(i), date_string % str(i + 1), 1, 1, 1, '', '', RepoSyncResult.RESULT_SUCCESS) RepoSyncResult.get_collection().save(r, safe=True) # Test sort by descending start date entries = self.sync_manager.sync_history(repo_id='test_sort', sort=constants.SORT_DESCENDING) self.assertEqual(5, len(entries)) # Verify that each entry has a later completed date than the next one for i in range(0, 4): first = dateutils.parse_iso8601_datetime(entries[i]['started']) second = dateutils.parse_iso8601_datetime(entries[i + 1]['started']) self.assertTrue(first > second)
result_code = RepoSyncResult.RESULT_SUCCESS else: result_code = RepoSyncResult.RESULT_FAILED else: msg = _('Plugin type [%s] on repo [%s] did not return a valid sync report') msg = msg % (repo_importer['importer_type_id'], repo_id) logger.warn(msg) added_count = updated_count = removed_count = -1 # None? summary = details = msg result_code = RepoSyncResult.RESULT_ERROR # RESULT_UNKNOWN? result = RepoSyncResult.expected_result( repo_id, repo_importer['id'], repo_importer['importer_type_id'], sync_start_timestamp, sync_end_timestamp, added_count, updated_count, removed_count, summary, details, result_code) finally: # Do an update instead of a save in case the importer has changed the scratchpad importer_coll.update({'repo_id': repo_id}, {'$set': {'last_sync': sync_end_timestamp}}, safe=True) # Add a sync history entry for this run sync_result_coll.save(result, safe=True) return result def sync_history(self, repo_id, limit=None, sort=constants.SORT_DESCENDING, start_date=None, end_date=None): """ Returns sync history entries for the given repo, sorted from most recent
else: result_code = RepoSyncResult.RESULT_FAILED else: msg = _( 'Plugin type [%s] on repo [%s] did not return a valid sync report' ) _logger.warn(msg % (repo_importer['importer_type_id'], repo_obj.repo_id)) added_count = updated_count = removed_count = -1 # None? summary = details = msg result_code = RepoSyncResult.RESULT_ERROR # RESULT_UNKNOWN? sync_result = RepoSyncResult.expected_result( repo_obj.repo_id, repo_importer['id'], repo_importer['importer_type_id'], sync_start_timestamp, sync_end_timestamp, added_count, updated_count, removed_count, summary, details, result_code) finally: # Do an update instead of a save in case the importer has changed the scratchpad importer_collection.update({'repo_id': repo_obj.repo_id}, {'$set': { 'last_sync': sync_end_timestamp }}, safe=True) # Add a sync history entry for this run sync_result_collection.save(sync_result, safe=True) fire_manager.fire_repo_sync_finished(sync_result) if sync_result.result == RepoSyncResult.RESULT_FAILED: