Exemple #1
0
    def test_publish_history_ascending_sort(self):
        """
        Tests use the sort parameter to sort the results in ascending order by start time
        """

        # Setup
        self.repo_manager.create_repo('test_sort')
        self.distributor_manager.add_distributor('test_sort', 'mock-distributor', {}, True,
                                                 distributor_id='test_dist')
        # Create some consecutive publish entries
        date_string = '2013-06-01T12:00:0%sZ'
        for i in range(0, 10, 2):
            r = RepoPublishResult.expected_result(
                'test_sort', 'test_dist', 'bar', date_string % str(i), date_string % str(i + 1),
                'test-summary', 'test-details', RepoPublishResult.RESULT_SUCCESS)
            RepoPublishResult.get_collection().insert(r, safe=True)

        # Test that returned entries are in ascending order by time
        entries = self.publish_manager.publish_history('test_sort', 'test_dist',
                                                       sort=constants.SORT_ASCENDING)
        self.assertEqual(5, len(entries))
        for i in range(0, 4):
            first = dateutils.parse_iso8601_datetime(entries[i]['started'])
            second = dateutils.parse_iso8601_datetime(entries[i + 1]['started'])
            self.assertTrue(first < second)
Exemple #2
0
    def test_get(self, mock_path, mock_ok, mock_utils_get):
        call = ScheduledCall('PT1M', 'pulp.tasks.frequent')
        mock_utils_get.return_value = [call]

        ret = self.controller._get(call.id)
        schedule = mock_ok.call_args[0][0]

        self.assertEqual(ret, mock_ok.return_value)
        self.assertEqual(len(mock_ok.call_args[0]), 1)

        # spot-check the schedule
        self.assertEqual(schedule['_id'], call.id)
        self.assertEqual(schedule['schedule'], 'PT1M')
        self.assertEqual(schedule['task'], 'pulp.tasks.frequent')
        self.assertEqual(schedule['_href'], mock_path.return_value)

        # next_run is calculated on-demand, and there is a small chance that it
        # will be re-calculated in the call.for_display() call as 1 second later
        # than it was calculated above. Thus we will test that equality here
        # with a tolerance of 1 second
        for_display = call.for_display()
        call_next_run = dateutils.parse_iso8601_datetime(call.next_run)
        display_next_run = dateutils.parse_iso8601_datetime(
            for_display['next_run'])
        self.assertTrue(
            display_next_run - call_next_run <= timedelta(seconds=1))

        # now check overall equality with the actual for_display value
        del schedule['_href']
        del schedule['next_run']
        del for_display['next_run']
        self.assertEqual(schedule, for_display)

        # make sure we called the manager layer correctly
        mock_utils_get.assert_called_once_with([call.id])
    def test_update_task_status(self):
        """
        Tests the successful operation of update_task_status().
        """
        task_id = self.get_random_uuid()
        queue = 'special_queue'
        tags = ['test-tag1', 'test-tag2']
        state = 'waiting'
        TaskStatusManager.create_task_status(task_id, queue, tags, state)
        now = datetime.now(dateutils.utc_tz())
        start_time = dateutils.format_iso8601_datetime(now)
        delta = {
            'start_time': start_time,
            'state': 'running',
            'disregard': 'ignored',
            'progress_report': {
                'report-id': 'my-progress'
            }
        }

        updated = TaskStatusManager.update_task_status(task_id, delta)

        task_status = TaskStatusManager.find_by_task_id(task_id)
        self.assertEqual(task_status['start_time'], delta['start_time'])
        # Make sure that parse_iso8601_datetime is able to parse the start_time without errors
        dateutils.parse_iso8601_datetime(task_status['start_time'])
        self.assertEqual(task_status['state'], delta['state'])
        self.assertEqual(task_status['progress_report'],
                         delta['progress_report'])
        self.assertEqual(task_status['queue'], queue)
        self.assertEqual(updated['start_time'], delta['start_time'])
        self.assertEqual(updated['state'], delta['state'])
        self.assertEqual(updated['progress_report'], delta['progress_report'])
        self.assertTrue('disregard' not in updated)
        self.assertTrue('disregard' not in task_status)
Exemple #4
0
    def test_publish_history_start_date(self):

        # Setup
        self.repo_manager.create_repo('test_date')
        self.distributor_manager.add_distributor('test_date', 'mock-distributor', {}, True,
                                                 distributor_id='test_dist')
        # Create three consecutive publish entries
        date_string = '2013-06-01T12:00:0%sZ'
        for i in range(0, 6, 2):
            r = RepoPublishResult.expected_result(
                'test_date', 'test_dist', 'bar', date_string % str(i), date_string % str(i + 1),
                'test-summary', 'test-details', RepoPublishResult.RESULT_SUCCESS)
            RepoPublishResult.get_collection().insert(r, safe=True)

        # Verify
        self.assertEqual(3, len(self.publish_manager.publish_history('test_date', 'test_dist')))
        start_date = '2013-06-01T12:00:02Z'
        start_entries = self.publish_manager.publish_history('test_date', 'test_dist',
                                                             start_date=start_date)
        # Confirm the dates of the retrieved entries are later than or equal to the requested date
        self.assertEqual(2, len(start_entries))
        for entries in start_entries:
            retrieved = dateutils.parse_iso8601_datetime(entries['started'])
            given_start = dateutils.parse_iso8601_datetime(start_date)
            self.assertTrue(retrieved >= given_start)
Exemple #5
0
    def test_on_success_with_canceled_task(self, mock_request):
        """
        Make sure on_success() does not move a canceled Task to 'finished' state.
        """
        retval = 'random_return_value'
        task_id = str(uuid.uuid4())
        args = [1, 'b', 'iii']
        kwargs = {
            '1': 'for the money',
            'tags': ['test_tags'],
            'queue': WORKER_2_QUEUE
        }
        mock_request.called_directly = False
        task_status = TaskStatusManager.create_task_status(
            task_id, 'some_queue', state=CALL_CANCELED_STATE)
        task = tasks.Task()

        # This should not update the task status to finished, since this task was canceled.
        task.on_success(retval, task_id, args, kwargs)

        updated_task_status = TaskStatusManager.find_by_task_id(task_id)
        # Make sure the task is still canceled.
        self.assertEqual(updated_task_status['state'], CALL_CANCELED_STATE)
        self.assertEqual(updated_task_status['result'], retval)
        self.assertFalse(updated_task_status['finish_time'] is None)
        # Make sure that parse_iso8601_datetime is able to parse the finish_time without errors
        dateutils.parse_iso8601_datetime(updated_task_status['finish_time'])
Exemple #6
0
    def test_sync_history_end_date(self):
        """
        Tests the functionality of requesting sync history before a given date
        """
        # Setup
        self.repo_manager.create_repo('test_repo')
        # A date string to fake some dates
        date_string = '2013-06-01T12:00:0%sZ'
        # Create 3 entries, with each date entry one second later
        for i in range(0, 6, 2):
            r = RepoSyncResult.expected_result('test_repo', 'foo', 'bar', date_string % str(i),
                                               date_string % str(i + 1), 1, 1, 1, '', '',
                                               RepoSyncResult.RESULT_SUCCESS)
            RepoSyncResult.get_collection().save(r, safe=True)

        # Verify three entries in test_repo
        self.assertEqual(3, len(self.sync_manager.sync_history('test_repo')))
        # Retrieve the first two entries
        end_date = '2013-06-01T12:00:03Z'
        end_entries = self.sync_manager.sync_history('test_repo', end_date=end_date)
        # Confirm the dates of the retrieved entries are earlier than or equal to the requested date
        self.assertEqual(2, len(end_entries))
        for entry in end_entries:
            retrieved = dateutils.parse_iso8601_datetime(entry['started'])
            given_end = dateutils.parse_iso8601_datetime(end_date)
            self.assertTrue(retrieved <= given_end)
Exemple #7
0
    def test_on_success_handler_async_result(self, mock_request):
        """
        Make sure that overridden on_success handler updates task status correctly
        """
        retval = AsyncResult('foo-id')

        task_id = str(uuid.uuid4())
        args = [1, 'b', 'iii']
        kwargs = {
            '1': 'for the money',
            'tags': ['test_tags'],
            'queue': WORKER_2_QUEUE
        }
        mock_request.called_directly = False

        task_status = TaskStatusManager.create_task_status(
            task_id, 'some_queue')
        self.assertEqual(task_status['state'], 'waiting')
        self.assertEqual(task_status['finish_time'], None)

        task = tasks.Task()
        task.on_success(retval, task_id, args, kwargs)

        new_task_status = TaskStatusManager.find_by_task_id(task_id)
        self.assertEqual(new_task_status['state'], 'finished')
        self.assertEqual(new_task_status['result'], None)
        self.assertFalse(new_task_status['finish_time'] is None)
        # Make sure that parse_iso8601_datetime is able to parse the finish_time without errors
        dateutils.parse_iso8601_datetime(new_task_status['finish_time'])
        self.assertEqual(new_task_status['spawned_tasks'], ['foo-id'])
Exemple #8
0
    def test_task_status_update(self):
        """
        Tests the successful operation of task status update.
        """
        task_id = self.get_random_uuid()
        worker_name = 'special_worker_name'
        tags = ['test-tag1', 'test-tag2']
        state = 'waiting'
        TaskStatus(task_id, worker_name, tags, state).save()
        now = datetime.now(dateutils.utc_tz())
        start_time = dateutils.format_iso8601_datetime(now)
        delta = {'start_time': start_time,
                 'state': 'running',
                 'progress_report': {'report-id': 'my-progress'}}

        TaskStatus.objects(task_id=task_id).update_one(
            set__start_time=delta['start_time'], set__state=delta['state'],
            set__progress_report=delta['progress_report'])

        task_status = TaskStatus.objects(task_id=task_id).first()
        self.assertEqual(task_status['start_time'], delta['start_time'])
        # Make sure that parse_iso8601_datetime is able to parse the start_time without errors
        dateutils.parse_iso8601_datetime(task_status['start_time'])
        self.assertEqual(task_status['state'], delta['state'])
        self.assertEqual(task_status['progress_report'], delta['progress_report'])
        self.assertEqual(task_status['worker_name'], worker_name)
Exemple #9
0
    def test_spawned_task_status(self, mock_request):
        async_result = AsyncResult('foo-id')

        retval = tasks.TaskResult(error=PulpException('error-foo'),
                                  result='bar')
        retval.spawned_tasks = [async_result]

        task_id = str(uuid.uuid4())
        args = [1, 'b', 'iii']
        kwargs = {
            '1': 'for the money',
            'tags': ['test_tags'],
            'routing_key': WORKER_2
        }
        mock_request.called_directly = False

        task_status = TaskStatusManager.create_task_status(task_id)
        self.assertEqual(task_status['state'], 'waiting')
        self.assertEqual(task_status['finish_time'], None)

        task = tasks.Task()
        task.on_success(retval, task_id, args, kwargs)

        new_task_status = TaskStatusManager.find_by_task_id(task_id)
        self.assertEqual(new_task_status['state'], 'finished')
        self.assertEqual(new_task_status['result'], 'bar')
        self.assertEqual(new_task_status['error']['description'], 'error-foo')
        self.assertFalse(new_task_status['finish_time'] is None)
        # Make sure that parse_iso8601_datetime is able to parse the finish_time without errors
        dateutils.parse_iso8601_datetime(new_task_status['finish_time'])
        self.assertEqual(new_task_status['spawned_tasks'], ['foo-id'])
Exemple #10
0
    def test_with_years_duration(self, mock_time):
        """
        Test calculating the next run when the interval is a Duration object and uses years
        """
        last_runs = ('2015-01-01T10:00Z', '2016-01-01T10:00Z',
                     '2017-01-01T10:00Z', '2018-01-01T10:00Z')
        expected_next_runs = ('2016-01-01T10:00Z', '2017-01-01T10:00Z',
                              '2018-01-01T10:00Z', '2019-01-01T10:00Z')
        times = (
            1451642000.0,  # Just before 2016-01-01T10:00Z UTC
            1483264000.0,  # Just before 2017-01-01T10:00Z UTC
            1514800000.0,  # Just before 2018-01-01T10:00Z UTC
            1546336000.0,  # Just before 2019-01-01T10:00Z UTC
        )

        for last_run, current_time, expected_next_run in zip(
                last_runs, times, expected_next_runs):
            mock_time.return_value = current_time
            call = ScheduledCall('2014-01-01T10:00Z/P1M',
                                 'pulp.tasks.dosomething',
                                 total_run_count=2,
                                 last_run_at=last_run)
            next_run = call.calculate_next_run()

            self.assertEqual(
                dateutils.parse_iso8601_datetime(expected_next_run),
                dateutils.parse_iso8601_datetime(next_run))
    def test_update_task_status(self):
        """
        Tests the successful operation of update_task_status().
        """
        task_id = self.get_random_uuid()
        queue = 'special_queue'
        tags = ['test-tag1', 'test-tag2']
        state = 'waiting'
        TaskStatusManager.create_task_status(task_id, queue, tags, state)
        now = datetime.now(dateutils.utc_tz())
        start_time = dateutils.format_iso8601_datetime(now)
        delta = {'start_time': start_time,
                 'state': 'running',
                 'disregard': 'ignored',
                 'progress_report': {'report-id': 'my-progress'}}

        updated = TaskStatusManager.update_task_status(task_id, delta)

        task_status = TaskStatusManager.find_by_task_id(task_id)
        self.assertEqual(task_status['start_time'], delta['start_time'])
        # Make sure that parse_iso8601_datetime is able to parse the start_time without errors
        dateutils.parse_iso8601_datetime(task_status['start_time'])
        self.assertEqual(task_status['state'], delta['state'])
        self.assertEqual(task_status['progress_report'], delta['progress_report'])
        self.assertEqual(task_status['queue'], queue)
        self.assertEqual(updated['start_time'], delta['start_time'])
        self.assertEqual(updated['state'], delta['state'])
        self.assertEqual(updated['progress_report'], delta['progress_report'])
        self.assertTrue('disregard' not in updated)
        self.assertTrue('disregard' not in task_status)
    def test_publish_history(self):
        """
        Tests getting the history of publishes on a repo.
        """

        # Setup
        self.repo_manager.create_repo('foo')
        self.distributor_manager.add_distributor('foo',
                                                 'mock-distributor', {},
                                                 True,
                                                 distributor_id='dist-1')
        for i in range(1, 6):
            add_result('foo', 'dist-1', i)

        # Test
        entries = self.publish_manager.publish_history('foo', 'dist-1')

        # Verify. The returned entries should be limited to the constant defined in common.constants.
        self.assertEqual(constants.REPO_HISTORY_LIMIT, len(entries))

        #   Verify the default sort direction is descending order
        for i in range(0, 4):
            first = dateutils.parse_iso8601_datetime(entries[i]['started'])
            second = dateutils.parse_iso8601_datetime(entries[i +
                                                              1]['started'])
            self.assertTrue(first >= second)
    def test_publish_history_end_date(self):

        # Setup
        self.repo_manager.create_repo("test_date")
        self.distributor_manager.add_distributor("test_date", "mock-distributor", {}, True, distributor_id="test_dist")
        # Create three consecutive publish entries
        date_string = "2013-06-01T12:00:0%sZ"
        for i in range(0, 6, 2):
            r = RepoPublishResult.expected_result(
                "test_date",
                "test_dist",
                "bar",
                date_string % str(i),
                date_string % str(i + 1),
                "test-summary",
                "test-details",
                RepoPublishResult.RESULT_SUCCESS,
            )
            RepoPublishResult.get_collection().insert(r, safe=True)

        # Verify that all entries retrieved have dates prior to the given end date
        end_date = "2013-06-01T12:00:03Z"
        end_entries = self.publish_manager.publish_history("test_date", "test_dist", end_date=end_date)
        # Confirm the dates of the retrieved entries are earlier than or equal to the requested date
        self.assertEqual(2, len(end_entries))
        for entries in end_entries:
            retrieved = dateutils.parse_iso8601_datetime(entries["started"])
            given_end = dateutils.parse_iso8601_datetime(end_date)
            self.assertTrue(retrieved <= given_end)
Exemple #14
0
    def test_publish_history(self):
        """
        Tests getting the history of publishes on a repo.
        """

        # Setup
        self.repo_manager.create_repo('foo')
        self.distributor_manager.add_distributor('foo',
                                                 'mock-distributor', {},
                                                 True,
                                                 distributor_id='dist-1')
        for i in range(1, 6):
            add_result('foo', 'dist-1', i)

        # Test
        entries = self.publish_manager.publish_history('foo', 'dist-1')

        # Verify
        self.assertEqual(5, len(entries))

        #   Verify descending order
        for i in range(0, 4):
            first = dateutils.parse_iso8601_datetime(entries[i]['completed'])
            second = dateutils.parse_iso8601_datetime(entries[i +
                                                              1]['completed'])
            self.assertTrue(first > second)
    def test_publish_history_descending_sort(self):
        """
        Tests use the sort parameter to sort the results in descending order by start time
        """

        # Setup
        self.repo_manager.create_repo("test_sort")
        self.distributor_manager.add_distributor("test_sort", "mock-distributor", {}, True, distributor_id="test_dist")
        # Create some consecutive publish entries
        date_string = "2013-06-01T12:00:0%sZ"
        for i in range(0, 10, 2):
            r = RepoPublishResult.expected_result(
                "test_sort",
                "test_dist",
                "bar",
                date_string % str(i),
                date_string % str(i + 1),
                "test-summary",
                "test-details",
                RepoPublishResult.RESULT_SUCCESS,
            )
            RepoPublishResult.get_collection().insert(r, safe=True)

        # Test that returned entries are in descending order by time
        entries = self.publish_manager.publish_history("test_sort", "test_dist", sort=constants.SORT_DESCENDING)
        self.assertEqual(5, len(entries))
        for i in range(0, 4):
            first = dateutils.parse_iso8601_datetime(entries[i]["started"])
            second = dateutils.parse_iso8601_datetime(entries[i + 1]["started"])
            self.assertTrue(first > second)
Exemple #16
0
    def test_sync_history_ascending_sort(self):
        """
        Tests the sort functionality of sync_history
        """

        # Setup
        self.repo_manager.create_repo('test_sort')
        date_string = '2013-06-01T12:00:0%sZ'
        # Add some consecutive sync entries
        for i in range(0, 10, 2):
            r = RepoSyncResult.expected_result('test_sort', 'foo', 'bar',
                                               date_string % str(i),
                                               date_string % str(i + 1), 1, 1,
                                               1, '', '',
                                               RepoSyncResult.RESULT_SUCCESS)
            RepoSyncResult.get_collection().save(r, safe=True)

        # Test sort by ascending start date
        entries = self.sync_manager.sync_history(repo_id='test_sort',
                                                 sort=constants.SORT_ASCENDING)
        self.assertEqual(5, len(entries))
        # Verify that each entry has a earlier completed date than the next one
        for i in range(0, 4):
            first = dateutils.parse_iso8601_datetime(entries[i]['started'])
            second = dateutils.parse_iso8601_datetime(entries[i +
                                                              1]['started'])
            self.assertTrue(first < second)
Exemple #17
0
    def test_async_result(self, mock_request):
        retval = AsyncResult('foo-id')

        task_id = str(uuid.uuid4())
        args = [1, 'b', 'iii']
        kwargs = {
            '1': 'for the money',
            'tags': ['test_tags'],
            'routing_key': WORKER_2
        }
        mock_request.called_directly = False

        task_status = TaskStatus(task_id).save()
        self.assertEqual(task_status['state'], 'waiting')
        self.assertEqual(task_status['finish_time'], None)

        task = tasks.Task()
        task.on_success(retval, task_id, args, kwargs)

        new_task_status = TaskStatus.objects(task_id=task_id).first()
        self.assertEqual(new_task_status['state'], 'finished')
        self.assertEqual(new_task_status['result'], None)
        self.assertFalse(new_task_status['finish_time'] is None)
        # Make sure that parse_iso8601_datetime is able to parse the finish_time without errors
        dateutils.parse_iso8601_datetime(new_task_status['finish_time'])
        self.assertEqual(new_task_status['spawned_tasks'], ['foo-id'])
Exemple #18
0
    def test_updates_task_status_correctly(self, mock_request):
        exc = Exception()
        task_id = str(uuid.uuid4())
        args = [1, 'b', 'iii']
        kwargs = {'1': 'for the money', 'tags': ['test_tags']}

        class EInfo(object):
            """
            on_failure handler expects an instance of celery's ExceptionInfo class
            as one of the attributes. It stores string representation of traceback
            in it's traceback instance variable. This is a stub to imitate that behavior.
            """
            def __init__(self):
                self.traceback = "string_repr_of_traceback"

        einfo = EInfo()
        mock_request.called_directly = False

        task_status = TaskStatus(task_id).save()
        self.assertEqual(task_status['state'], 'waiting')
        self.assertEqual(task_status['finish_time'], None)
        self.assertEqual(task_status['traceback'], None)

        task = tasks.Task()
        task.on_failure(exc, task_id, args, kwargs, einfo)

        new_task_status = TaskStatus.objects(task_id=task_id).first()
        self.assertEqual(new_task_status['state'], 'error')
        self.assertFalse(new_task_status['finish_time'] is None)
        # Make sure that parse_iso8601_datetime is able to parse the finish_time without errors
        dateutils.parse_iso8601_datetime(new_task_status['finish_time'])
        self.assertEqual(new_task_status['traceback'], einfo.traceback)
Exemple #19
0
    def test_sync_history_end_date(self):
        """
        Tests the functionality of requesting sync history before a given date
        """
        # Setup
        self.repo_manager.create_repo('test_repo')
        # A date string to fake some dates
        date_string = '2013-06-01T12:00:0%sZ'
        # Create 3 entries, with each date entry one second later
        for i in range(0, 6, 2):
            r = RepoSyncResult.expected_result('test_repo', 'foo', 'bar',
                                               date_string % str(i),
                                               date_string % str(i + 1), 1, 1,
                                               1, '', '',
                                               RepoSyncResult.RESULT_SUCCESS)
            RepoSyncResult.get_collection().save(r, safe=True)

        # Verify three entries in test_repo
        self.assertEqual(3, len(self.sync_manager.sync_history('test_repo')))
        # Retrieve the first two entries
        end_date = '2013-06-01T12:00:03Z'
        end_entries = self.sync_manager.sync_history('test_repo',
                                                     end_date=end_date)
        # Confirm the dates of the retrieved entries are earlier than or equal to the requested date
        self.assertEqual(2, len(end_entries))
        for entry in end_entries:
            retrieved = dateutils.parse_iso8601_datetime(entry['started'])
            given_end = dateutils.parse_iso8601_datetime(end_date)
            self.assertTrue(retrieved <= given_end)
Exemple #20
0
    def test_spawned_task_status(self, mock_request):
        async_result = AsyncResult('foo-id')

        retval = tasks.TaskResult(error=PulpException('error-foo'),
                                  result='bar')
        retval.spawned_tasks = [async_result]

        task_id = str(uuid.uuid4())
        args = [1, 'b', 'iii']
        kwargs = {'1': 'for the money', 'tags': ['test_tags'], 'routing_key': WORKER_2}
        mock_request.called_directly = False

        task_status = TaskStatus(task_id).save()
        self.assertEqual(task_status['state'], 'waiting')
        self.assertEqual(task_status['finish_time'], None)

        task = tasks.Task()
        task.on_success(retval, task_id, args, kwargs)

        new_task_status = TaskStatus.objects(task_id=task_id).first()
        self.assertEqual(new_task_status['state'], 'finished')
        self.assertEqual(new_task_status['result'], 'bar')
        self.assertEqual(new_task_status['error']['description'], 'error-foo')
        self.assertFalse(new_task_status['finish_time'] is None)
        # Make sure that parse_iso8601_datetime is able to parse the finish_time without errors
        dateutils.parse_iso8601_datetime(new_task_status['finish_time'])
        self.assertEqual(new_task_status['spawned_tasks'], ['foo-id'])
Exemple #21
0
    def test_publish_history_descending_sort(self):
        """
        Tests use the sort parameter to sort the results in descending order by start time
        """

        # Setup
        self.repo_manager.create_repo('test_sort')
        self.distributor_manager.add_distributor('test_sort',
                                                 'mock-distributor', {},
                                                 True,
                                                 distributor_id='test_dist')
        # Create some consecutive publish entries
        date_string = '2013-06-01T12:00:0%sZ'
        for i in range(0, 10, 2):
            r = RepoPublishResult.expected_result(
                'test_sort', 'test_dist', 'bar', date_string % str(i),
                date_string % str(i + 1), 'test-summary', 'test-details',
                RepoPublishResult.RESULT_SUCCESS)
            RepoPublishResult.get_collection().insert(r, safe=True)

        # Test that returned entries are in descending order by time
        entries = self.publish_manager.publish_history(
            'test_sort', 'test_dist', sort=constants.SORT_DESCENDING)
        self.assertEqual(5, len(entries))
        for i in range(0, 4):
            first = dateutils.parse_iso8601_datetime(entries[i]['started'])
            second = dateutils.parse_iso8601_datetime(entries[i +
                                                              1]['started'])
            self.assertTrue(first > second)
Exemple #22
0
    def test_publish_history_end_date(self):

        # Setup
        self.repo_manager.create_repo('test_date')
        self.distributor_manager.add_distributor('test_date',
                                                 'mock-distributor', {},
                                                 True,
                                                 distributor_id='test_dist')
        # Create three consecutive publish entries
        date_string = '2013-06-01T12:00:0%sZ'
        for i in range(0, 6, 2):
            r = RepoPublishResult.expected_result(
                'test_date', 'test_dist', 'bar', date_string % str(i),
                date_string % str(i + 1), 'test-summary', 'test-details',
                RepoPublishResult.RESULT_SUCCESS)
            RepoPublishResult.get_collection().insert(r, safe=True)

        # Verify that all entries retrieved have dates prior to the given end date
        end_date = '2013-06-01T12:00:03Z'
        end_entries = self.publish_manager.publish_history('test_date',
                                                           'test_dist',
                                                           end_date=end_date)
        # Confirm the dates of the retrieved entries are earlier than or equal to the requested date
        self.assertEqual(2, len(end_entries))
        for entries in end_entries:
            retrieved = dateutils.parse_iso8601_datetime(entries['started'])
            given_end = dateutils.parse_iso8601_datetime(end_date)
            self.assertTrue(retrieved <= given_end)
Exemple #23
0
    def test_on_success_handler_async_result(self, mock_request):
        """
        Make sure that overridden on_success handler updates task status correctly
        """
        retval = AsyncResult('foo-id')

        task_id = str(uuid.uuid4())
        args = [1, 'b', 'iii']
        kwargs = {'1': 'for the money', 'tags': ['test_tags'], 'queue': RESERVED_WORKER_2}
        mock_request.called_directly = False

        task_status = TaskStatusManager.create_task_status(task_id, 'some_queue')
        self.assertEqual(task_status['state'], 'waiting')
        self.assertEqual(task_status['finish_time'], None)

        task = tasks.Task()
        task.on_success(retval, task_id, args, kwargs)

        new_task_status = TaskStatusManager.find_by_task_id(task_id)
        self.assertEqual(new_task_status['state'], 'finished')
        self.assertEqual(new_task_status['result'], None)
        self.assertFalse(new_task_status['finish_time'] is None)
        # Make sure that parse_iso8601_datetime is able to parse the finish_time without errors
        dateutils.parse_iso8601_datetime(new_task_status['finish_time'])
        self.assertEqual(new_task_status['spawned_tasks'], ['foo-id'])
Exemple #24
0
    def test_with_months_duration(self, mock_time):
        """
        Test calculating the next run when the interval is a Duration object and uses months
        """
        last_runs = ('2015-01-01T10:00Z', '2015-02-01T10:00Z',
                     '2015-03-01T10:00Z', '2015-04-01T10:00Z')
        expected_next_runs = ('2015-02-01T10:00Z', '2015-03-01T10:00Z',
                              '2015-04-01T10:00Z', '2015-05-01T10:00Z')
        times = (
            1422784799.0,  # Just before 2015-02-01T10:00Z UTC
            1425203999.0,  # Just before 2015-03-01T10:00Z UTC
            1427882399.0,  # Just before 2015-04-01T10:00Z UTC
            1430474399.0,  # Just before 2015-05-01T10:00Z UTC
        )

        for last_run, current_time, expected_next_run in zip(
                last_runs, times, expected_next_runs):
            mock_time.return_value = current_time
            call = ScheduledCall('2014-12-01T10:00Z/P1M',
                                 'pulp.tasks.dosomething',
                                 total_run_count=2,
                                 last_run_at=last_run)
            next_run = call.calculate_next_run()

            self.assertEqual(
                dateutils.parse_iso8601_datetime(expected_next_run),
                dateutils.parse_iso8601_datetime(next_run))
Exemple #25
0
    def test_on_failure_handler(self, mock_request):
        """
        Make sure that overridden on_failure handler updates task status correctly
        """
        exc = Exception()
        task_id = str(uuid.uuid4())
        args = [1, 'b', 'iii']
        kwargs = {'1': 'for the money', 'tags': ['test_tags']}

        class EInfo(object):
            """
            on_failure handler expects an instance of celery's ExceptionInfo class
            as one of the attributes. It stores string representation of traceback
            in it's traceback instance variable. This is a stub to imitate that behavior.
            """
            def __init__(self):
                self.traceback = "string_repr_of_traceback"
        einfo = EInfo()
        mock_request.called_directly = False

        task_status = TaskStatusManager.create_task_status(task_id, 'some_queue')
        self.assertEqual(task_status['state'], 'waiting')
        self.assertEqual(task_status['finish_time'], None)
        self.assertEqual(task_status['traceback'], None)

        task = tasks.Task()
        task.on_failure(exc, task_id, args, kwargs, einfo)

        new_task_status = TaskStatusManager.find_by_task_id(task_id)
        self.assertEqual(new_task_status['state'], 'error')
        self.assertFalse(new_task_status['finish_time'] is None)
        # Make sure that parse_iso8601_datetime is able to parse the finish_time without errors
        dateutils.parse_iso8601_datetime(new_task_status['finish_time'])
        self.assertEqual(new_task_status['traceback'], einfo.traceback)
Exemple #26
0
    def validate(self, value):
        super(ISO8601StringField, self).validate(value)

        try:
            dateutils.parse_iso8601_datetime(value)
        except ISO8601Error, e:
            self.error(str(e))
Exemple #27
0
    def test_get(self, mock_path, mock_ok, mock_utils_get):
        call = ScheduledCall('PT1M', 'pulp.tasks.frequent')
        mock_utils_get.return_value = [call]

        ret = self.controller._get(call.id)
        schedule = mock_ok.call_args[0][0]

        self.assertEqual(ret, mock_ok.return_value)
        self.assertEqual(len(mock_ok.call_args[0]), 1)

        # spot-check the schedule
        self.assertEqual(schedule['_id'], call.id)
        self.assertEqual(schedule['schedule'], 'PT1M')
        self.assertEqual(schedule['task'], 'pulp.tasks.frequent')
        self.assertEqual(schedule['_href'], mock_path.return_value)

        # next_run is calculated on-demand, and there is a small chance that it
        # will be re-calculated in the call.for_display() call as 1 second later
        # than it was calculated above. Thus we will test that equality here
        # with a tolerance of 1 second
        for_display = call.for_display()
        call_next_run = dateutils.parse_iso8601_datetime(call.next_run)
        display_next_run = dateutils.parse_iso8601_datetime(for_display['next_run'])
        self.assertTrue(display_next_run - call_next_run <= timedelta(seconds=1))

        # now check overall equality with the actual for_display value
        del schedule['_href']
        del schedule['next_run']
        del for_display['next_run']
        self.assertEqual(schedule, for_display)

        # make sure we called the manager layer correctly
        mock_utils_get.assert_called_once_with([call.id])
Exemple #28
0
    def test_with_past_runs(self, mock_time):
        # setup an hourly call that first ran not quite 2 hours ago, ran again
        # less than one hour ago, and should be scheduled to run at the end of
        # this hour
        mock_time.return_value = 1389389758.547976
        call = ScheduledCall('2014-01-10T20:00Z/PT1H', 'pulp.tasks.dosomething',
                             total_run_count=2, last_run_at='2014-01-10T21:00Z')

        next_run = call.calculate_next_run()

        self.assertEqual(dateutils.parse_iso8601_datetime('2014-01-10T22:00Z'),
                         dateutils.parse_iso8601_datetime(next_run))
Exemple #29
0
def iso8601_datetime_validator(x):
    """
    Validates that a user-entered value is a correct iso8601 date

    :param x: input value to be validated
    :type x: str

    :raise ValueError: if the input is not a valid iso8601 string
    """
    try:
        dateutils.parse_iso8601_datetime(x)
    except Exception:
        raise ValueError(_('value must be a valid iso8601 string (yyyy-mm-ddThh:mm:ssZ)'))
Exemple #30
0
def iso8601_datetime_validator(x):
    """
    Validates that a user-entered value is a correct iso8601 date

    :param x: input value to be validated
    :type x: str

    :raise ValueError: if the input is not a valid iso8601 string
    """
    try:
        dateutils.parse_iso8601_datetime(x)
    except Exception:
        raise ValueError(_('value must be a valid iso8601 string (yyyy-mm-ddThh:mm:ssZ)'))
Exemple #31
0
    def _calculate_times(self):
        """
        Calculates and returns several time-related values that tend to be needed
        at the same time.

        :return:    tuple of numbers described below...
                    now_s: current time as seconds since the epoch
                    first_run_s: time of the first run as seconds since the epoch,
                        calculated based on self.first_run
                    since_first_s: how many seconds have elapsed since the first
                        run
                    run_every_s: how many seconds should elapse between runs of
                        this schedule
                    last_scheduled_run_s: the most recent time at which this
                        schedule should have run based on its schedule, as
                        seconds since the epoch
                    expected_runs: number of runs that should have happened based
                        on the first_run time and the interval
        :rtype:     tuple

        """
        now_s = time.time()
        first_run_dt = dateutils.to_utc_datetime(dateutils.parse_iso8601_datetime(self.first_run))
        first_run_s = calendar.timegm(first_run_dt.utctimetuple())
        since_first_s = now_s - first_run_s
        run_every_s = timedelta_seconds(self.as_schedule_entry().schedule.run_every)
        # don't want this to be negative
        expected_runs = max(int(since_first_s / run_every_s), 0)
        last_scheduled_run_s = first_run_s + expected_runs * run_every_s

        return now_s, first_run_s, since_first_s, run_every_s, last_scheduled_run_s, expected_runs
Exemple #32
0
 def test_datetime_with_tz(self):
     n = datetime.datetime.now(dateutils.local_tz())
     s = dateutils.format_iso8601_datetime(n)
     b = dateutils.parse_iso8601_datetime(s)
     for f in self.dt_fields:
         self.assertTrue(
             getattr(n, f) == getattr(b, f), 'Field mismatch: %s' % f)
Exemple #33
0
    def from_progress_report(cls, report):
        """
        Parses the output from the build_progress_report method into an instance
        of this class. The intention is to use this client-side to reconstruct
        the instance as it is retrieved from the server.

        The build_final_report call on instances returned from this call will
        not function as it requires the server-side conduit to be provided.
        Additionally, any exceptions and tracebacks will be a text representation
        instead of formal objects.

        :param report: progress report retrieved from the server's task
        :type  report: dict
        :return:       instance populated with the state in the report
        :rtype:        ISOProgressReport
        """
        # Restore the state transition times to datetime objects
        for key, value in report['state_times'].items():
            report['state_times'][key] = parse_iso8601_datetime(value)

        # python 2.4 does not support unicode keys so convert to string
        new_report = {}
        for key in report:
            new_report[str(key)] = report[key]

        r = cls(None, **new_report)
        return r
Exemple #34
0
    def from_progress_report(cls, report):
        """
        Parses the output from the build_progress_report method into an instance
        of this class. The intention is to use this client-side to reconstruct
        the instance as it is retrieved from the server.

        The build_final_report call on instances returned from this call will
        not function as it requires the server-side conduit to be provided.
        Additionally, any exceptions and tracebacks will be a text representation
        instead of formal objects.

        :param report: progress report retrieved from the server's task
        :type  report: dict
        :return:       instance populated with the state in the report
        :rtype:        ISOProgressReport
        """
        # Restore the state transition times to datetime objects
        for key, value in report['state_times'].items():
            report['state_times'][key] = parse_iso8601_datetime(value)

        # python 2.4 does not support unicode keys so convert to string
        new_report = {}
        for key in report:
            new_report[str(key)] = report[key]

        r = cls(None, **new_report)
        return r
Exemple #35
0
def assert_last_sync_time(time_in_iso):
    now = dateutils.now_utc_datetime_with_tzinfo()
    finished = dateutils.parse_iso8601_datetime(time_in_iso)

    # Compare them within a threshold since they won't be exact
    difference = now - finished
    return difference.seconds < 2
Exemple #36
0
def assert_last_sync_time(time_in_iso):
    now = datetime.datetime.now(dateutils.local_tz())
    finished = dateutils.parse_iso8601_datetime(time_in_iso)

    # Compare them within a threshold since they won't be exact
    difference = now - finished
    return difference.seconds < 2
Exemple #37
0
    def last_publish(self, repo_id, distributor_id):
        """
        Returns the timestamp of the last publish call, regardless of its
        success or failure. If the repo has never been published, returns None.

        @param repo_id: identifies the repo
        @type  repo_id: str

        @param distributor_id: identifies the repo's distributor
        @type  distributor_id: str

        @return: timestamp of the last publish
        @rtype:  datetime or None

        @raise MissingResource: if there is no distributor identified by the
                given repo ID and distributor ID
        """

        # Validation
        coll = RepoDistributor.get_collection()
        repo_distributor = coll.find_one({'repo_id' : repo_id, 'id' : distributor_id})

        if repo_distributor is None:
            raise MissingResource(repo_id)

        # Convert to datetime instance
        date_str = repo_distributor['last_publish']

        if date_str is None:
            return date_str
        else:
            instance = dateutils.parse_iso8601_datetime(date_str)
            return instance
Exemple #38
0
    def test_now(self):
        call = ScheduledCall('PT1H', 'pulp.tasks.dosomething')

        now = datetime.utcnow().replace(tzinfo=dateutils.utc_tz())
        next_run = dateutils.parse_iso8601_datetime(call.calculate_next_run())

        self.assertTrue(next_run - now < timedelta(seconds=1))
Exemple #39
0
    def test_no_first_run(self):
        call = ScheduledCall('PT1M', 'pulp.tasks.dosomething')

        first_run = dateutils.parse_iso8601_datetime(call.first_run)

        # generously make sure the calculated first_run is within 1 second of now
        now = datetime.utcnow().replace(tzinfo=dateutils.utc_tz())
        self.assertTrue(abs(now - first_run) < timedelta(seconds=1))
Exemple #40
0
    def test_sync_history_with_limit(self):
        """
        Tests retrieving only a subset of all history entries
        """

        # Setup
        self.repo_manager.create_repo('zombie')
        for i in range(1, 10):
            add_result('zombie', i)

        # Test with a valid limit
        entries = self.sync_manager.sync_history('zombie', limit=3)
        self.assertEqual(3, len(entries))
        # Verify descending order.
        for entry in entries:
            first = dateutils.parse_iso8601_datetime(entry['started'])
            second = dateutils.parse_iso8601_datetime(entry['started'])
            self.assertTrue(first >= second)
Exemple #41
0
    def test_sync_history_with_limit(self):
        """
        Tests retrieving only a subset of all history entries
        """

        # Setup
        self.repo_manager.create_repo('zombie')
        for i in range(1, 10):
            add_result('zombie', i)

        # Test with a valid limit
        entries = self.sync_manager.sync_history('zombie', limit=3)
        self.assertEqual(3, len(entries))
        # Verify descending order.
        for entry in entries:
            first = dateutils.parse_iso8601_datetime(entry['started'])
            second = dateutils.parse_iso8601_datetime(entry['started'])
            self.assertTrue(first >= second)
    def test_publish_history_with_limit(self):
        """
        Tests using the limit to retrieve only a subset of the history.
        """

        # Setup
        self.repo_manager.create_repo("dragon")
        self.distributor_manager.add_distributor("dragon", "mock-distributor", {}, True, distributor_id="fire")
        for i in range(0, 10):
            add_result("dragon", "fire", i)

        # Test a valid limit
        entries = self.publish_manager.publish_history("dragon", "fire", limit=3)
        self.assertEqual(3, len(entries))
        for entry in entries:
            first = dateutils.parse_iso8601_datetime(entry["started"])
            second = dateutils.parse_iso8601_datetime(entry["started"])
            self.assertTrue(first >= second)
Exemple #43
0
    def _get_and_validate_params(get_params):
        """
        Retrieve and validiate parameters from passed in GET parameters.

        :param get_params: the http request's GET parameters.
        :type  get_params: dict

        :return: start_date, end_date, sort, limit
        :rtype:  tuple

        :raises exceptions.InvalidValue: if one or more params are invalid
        """
        sort = get_params.get(constants.REPO_HISTORY_FILTER_SORT)
        start_date = get_params.get(constants.REPO_HISTORY_FILTER_START_DATE)
        end_date = get_params.get(constants.REPO_HISTORY_FILTER_END_DATE)
        limit = get_params.get(constants.REPO_HISTORY_FILTER_LIMIT)

        invalid_values = []
        if limit is not None:
            try:
                limit = int(limit)
                if limit < 1:
                    invalid_values.append('limit')
            except ValueError:
                invalid_values.append('limit')

        if sort and sort not in constants.SORT_DIRECTION:
            invalid_values.append('sort')

        if start_date is not None:
            try:
                dateutils.parse_iso8601_datetime(start_date)
            except (ValueError, isodate.ISO8601Error):
                invalid_values.append('start_date')
        if end_date is not None:
            try:
                dateutils.parse_iso8601_datetime(end_date)
            except (ValueError, isodate.ISO8601Error):
                invalid_values.append('end_date')

        if invalid_values:
            raise exceptions.InvalidValue(invalid_values)

        return start_date, end_date, sort, limit
Exemple #44
0
    def _get_and_validate_params(get_params):
        """
        Retrieve and validiate parameters from passed in GET parameters.

        :param get_params: the http request's GET parameters.
        :type  get_params: dict

        :return: start_date, end_date, sort, limit
        :rtype:  tuple

        :raises exceptions.InvalidValue: if one or more params are invalid
        """
        sort = get_params.get(constants.REPO_HISTORY_FILTER_SORT)
        start_date = get_params.get(constants.REPO_HISTORY_FILTER_START_DATE)
        end_date = get_params.get(constants.REPO_HISTORY_FILTER_END_DATE)
        limit = get_params.get(constants.REPO_HISTORY_FILTER_LIMIT)

        invalid_values = []
        if limit is not None:
            try:
                limit = int(limit)
                if limit < 1:
                    invalid_values.append('limit')
            except ValueError:
                invalid_values.append('limit')

        if sort and sort not in constants.SORT_DIRECTION:
            invalid_values.append('sort')

        if start_date is not None:
            try:
                dateutils.parse_iso8601_datetime(start_date)
            except (ValueError, isodate.ISO8601Error):
                invalid_values.append('start_date')
        if end_date is not None:
            try:
                dateutils.parse_iso8601_datetime(end_date)
            except (ValueError, isodate.ISO8601Error):
                invalid_values.append('end_date')

        if invalid_values:
            raise exceptions.InvalidValue(invalid_values)

        return start_date, end_date, sort, limit
Exemple #45
0
    def test_with_canceled_task(self, mock_request):
        retval = 'random_return_value'
        task_id = str(uuid.uuid4())
        args = [1, 'b', 'iii']
        kwargs = {'1': 'for the money', 'tags': ['test_tags'], 'routing_key': WORKER_2}
        mock_request.called_directly = False
        TaskStatus(task_id, state=CALL_CANCELED_STATE).save()
        task = tasks.Task()

        # This should not update the task status to finished, since this task was canceled.
        task.on_success(retval, task_id, args, kwargs)

        updated_task_status = TaskStatus.objects(task_id=task_id).first()
        # Make sure the task is still canceled.
        self.assertEqual(updated_task_status['state'], CALL_CANCELED_STATE)
        self.assertEqual(updated_task_status['result'], retval)
        self.assertFalse(updated_task_status['finish_time'] is None)
        # Make sure that parse_iso8601_datetime is able to parse the finish_time without errors
        dateutils.parse_iso8601_datetime(updated_task_status['finish_time'])
Exemple #46
0
    def test_with_canceled_task(self, mock_request):
        retval = 'random_return_value'
        task_id = str(uuid.uuid4())
        args = [1, 'b', 'iii']
        kwargs = {'1': 'for the money', 'tags': ['test_tags'], 'routing_key': WORKER_2}
        mock_request.called_directly = False
        TaskStatus(task_id, state=CALL_CANCELED_STATE).save()
        task = tasks.Task()

        # This should not update the task status to finished, since this task was canceled.
        task.on_success(retval, task_id, args, kwargs)

        updated_task_status = TaskStatus.objects(task_id=task_id).first()
        # Make sure the task is still canceled.
        self.assertEqual(updated_task_status['state'], CALL_CANCELED_STATE)
        self.assertEqual(updated_task_status['result'], retval)
        self.assertFalse(updated_task_status['finish_time'] is None)
        # Make sure that parse_iso8601_datetime is able to parse the finish_time without errors
        dateutils.parse_iso8601_datetime(updated_task_status['finish_time'])
Exemple #47
0
    def test_sync_history(self):
        """
        Tests retrieving sync history for a repo.
        """

        # Setup
        self.repo_manager.create_repo('creeper')
        for i in range(1, 10):
            add_result('creeper', i)

        # Test
        entries = self.sync_manager.sync_history('creeper')

        # Verify there are 9 entries and they are in descending order
        self.assertEqual(9, len(entries))
        for entry in entries:
            first = dateutils.parse_iso8601_datetime(entry['started'])
            second = dateutils.parse_iso8601_datetime(entry['started'])
            self.assertTrue(first >= second)
Exemple #48
0
    def test_sync_history(self):
        """
        Tests retrieving sync history for a repo.
        """

        # Setup
        self.repo_manager.create_repo('creeper')
        for i in range(1, 10):
            add_result('creeper', i)

        # Test
        entries = self.sync_manager.sync_history('creeper')

        # Verify there are 9 entries and they are in descending order
        self.assertEqual(9, len(entries))
        for entry in entries:
            first = dateutils.parse_iso8601_datetime(entry['started'])
            second = dateutils.parse_iso8601_datetime(entry['started'])
            self.assertTrue(first >= second)
Exemple #49
0
    def test_future(self, mock_time):
        mock_time.return_value = 1389307330.966561
        call = ScheduledCall('2014-01-19T17:15Z/PT1H', 'pulp.tasks.dosomething')

        next_run = call.calculate_next_run()

        # make sure the next run is equal to the specified first run.
        # don't want to compare a generated ISO8601 string directly, because there
        # could be subtle variations that are valid but break string equality.
        self.assertEqual(dateutils.parse_iso8601_interval(call.iso_schedule)[1],
                         dateutils.parse_iso8601_datetime(next_run))
Exemple #50
0
    def test_updates_task_status_correctly(self, mock_request):
        retval = 'random_return_value'
        task_id = str(uuid.uuid4())
        args = [1, 'b', 'iii']
        kwargs = {'1': 'for the money', 'tags': ['test_tags'], 'routing_key': WORKER_2}
        mock_request.called_directly = False

        task_status = TaskStatus(task_id).save()
        self.assertEqual(task_status['state'], 'waiting')
        self.assertEqual(task_status['finish_time'], None)

        task = tasks.Task()
        task.on_success(retval, task_id, args, kwargs)

        new_task_status = TaskStatus.objects(task_id=task_id).first()
        self.assertEqual(new_task_status['state'], 'finished')
        self.assertEqual(new_task_status['result'], retval)
        self.assertFalse(new_task_status['finish_time'] is None)
        # Make sure that parse_iso8601_datetime is able to parse the finish_time without errors
        dateutils.parse_iso8601_datetime(new_task_status['finish_time'])
Exemple #51
0
    def test_sync_history_descending_sort(self):

        # Setup
        self.repo_manager.create_repo('test_sort')
        date_string = '2013-06-01T12:00:0%sZ'
        # Add some consecutive sync entries
        for i in range(0, 10, 2):
            r = RepoSyncResult.expected_result('test_sort', 'foo', 'bar', date_string % str(i),
                                               date_string % str(i + 1), 1, 1, 1, '', '',
                                               RepoSyncResult.RESULT_SUCCESS)
            RepoSyncResult.get_collection().save(r, safe=True)

        # Test sort by descending start date
        entries = self.sync_manager.sync_history(repo_id='test_sort', sort=constants.SORT_DESCENDING)
        self.assertEqual(5, len(entries))
        # Verify that each entry has a later completed date than the next one
        for i in range(0, 4):
            first = dateutils.parse_iso8601_datetime(entries[i]['started'])
            second = dateutils.parse_iso8601_datetime(entries[i + 1]['started'])
            self.assertTrue(first > second)
    def test_publish_history(self):
        """
        Tests getting the history of publishes on a repo.
        """

        # Setup
        self.repo_manager.create_repo("foo")
        self.distributor_manager.add_distributor("foo", "mock-distributor", {}, True, distributor_id="dist-1")
        for i in range(1, 9):
            add_result("foo", "dist-1", i)

        # Test
        entries = self.publish_manager.publish_history("foo", "dist-1")

        # Verify 8 entries were returned and that the sort direction is descending
        self.assertEqual(8, len(entries))
        for entry in entries:
            first = dateutils.parse_iso8601_datetime(entry["started"])
            second = dateutils.parse_iso8601_datetime(entry["started"])
            self.assertTrue(first >= second)
    def test_sync_history(self):
        """
        Tests retrieving sync history for a repo.
        """

        # Setup
        self.repo_manager.create_repo('creeper')
        for i in range(1, 6):
            add_result('creeper', i)

        # Test
        entries = self.sync_manager.sync_history('creeper')

        # Verify
        self.assertEqual(5, len(entries))

        #    Verify descending order
        for i in range(0, 4):
            first = dateutils.parse_iso8601_datetime(entries[i]['completed'])
            second = dateutils.parse_iso8601_datetime(entries[i + 1]['completed'])
            self.assertTrue(first > second)