Exemple #1
0
        def test_handle_cleanup_task_timeout(self, mock_now):
            """Tests calling handle_cleanup_task_timeout"""

            right_now = now()

            expected_results = {
                'errors': [{
                    'name': 'CLEANUP',
                    'title': NodeConditions.CLEANUP_ERR.title,
                    'description': NodeConditions.CLEANUP_ERR.description,
                    'started': datetime_to_string(right_now),
                    'last_updated': datetime_to_string(right_now)
                }],
                'warnings': [{
                    'name':
                    'CLEANUP_TIMEOUT' + ' %d' % WARNING_NAME_COUNTER,
                    'title':
                    NodeConditions.CLEANUP_TIMEOUT.title,
                    'description':
                    NodeConditions.CLEANUP_TIMEOUT.description % self.job_ids,
                    'started':
                    datetime_to_string(right_now),
                    'last_updated':
                    datetime_to_string(right_now)
                }]
            }

            self.conditions.handle_cleanup_task_timeout(self.job_exes)
            node_dict = {}
            self.conditions.generate_status_json(node_dict)

            self.assertDictEqual(node_dict, expected_results)
Exemple #2
0
    def add_task_results(self, tasks):
        """Adds the given job execution tasks to the results

        :param tasks: The job execution tasks to add
        :type tasks: list
        """

        task_list = self._task_results['tasks']
        for task in tasks:
            task_dict = {
                'task_id': task.id,
                'type': task.task_type,
                'was_launched': task.has_been_launched
            }
            if task.has_been_launched:
                task_dict.update(launched=datetime_to_string(task.launched),
                                 was_started=task.has_started)
                if task.has_started:
                    task_dict.update(started=datetime_to_string(task.started),
                                     was_timed_out=task.has_timed_out,
                                     ended=datetime_to_string(task.ended),
                                     status=task.final_status)
                    if task.exit_code is not None:
                        task_dict.update(exit_code=task.exit_code)
            task_list.append(task_dict)
Exemple #3
0
    def test_generate_status_json(self, mock_now):
        """Tests calling generate_status_json() successfully"""

        right_now = now()
        mock_now.return_value = right_now
        num_job_exes = JOB_EXES_WARNING_THRESHOLD + 1

        node = Node(self.node_agent, self.node, self.scheduler)
        node._conditions.handle_pull_task_failed()
        node._conditions.update_cleanup_count(num_job_exes)
        node._update_state()
        nodes_list = []
        node.generate_status_json(nodes_list)

        expected_results = [{'id': node.id, 'hostname': node.hostname, 'agent_id': self.node_agent, 'is_active': True,
                             'state': {'name': 'DEGRADED', 'title': Node.DEGRADED.title,
                                       'description': Node.DEGRADED.description},
                             'errors': [{'name': 'IMAGE_PULL', 'title': NodeConditions.IMAGE_PULL_ERR.title,
                                         'description': NodeConditions.IMAGE_PULL_ERR.description,
                                         'started': datetime_to_string(right_now),
                                         'last_updated': datetime_to_string(right_now)}],
                             'warnings': [{'name': 'CLEANUP', 'title': NodeConditions.CLEANUP_WARNING.title,
                                           'description': NodeConditions.CLEANUP_WARNING.description % num_job_exes,
                                           'started': datetime_to_string(right_now),
                                           'last_updated': datetime_to_string(right_now)}]}]
        self.assertListEqual(nodes_list, expected_results)
Exemple #4
0
        def test_handle_health_task_timeout(self, mock_now):
            """Tests calling handle_health_task_timeout"""

            right_now = now()
            self.conditions._error_active(NodeConditions.BAD_DAEMON_ERR)
            self.conditions._error_active(NodeConditions.BAD_LOGSTASH_ERR)
            self.conditions._error_active(NodeConditions.HEALTH_FAIL_ERR)
            self.conditions._error_active(NodeConditions.HEALTH_TIMEOUT_ERR)
            self.conditions._error_active(NodeConditions.LOW_DOCKER_SPACE_ERR)
            self.conditions.is_health_check_normal = True

            self.conditions.handle_health_task_timeout()

            expected_results = {
                'errors': [{
                    'name': 'HEALTH_TIMEOUT',
                    'title': NodeConditions.HEALTH_TIMEOUT_ERR.title,
                    'description':
                    NodeConditions.HEALTH_TIMEOUT_ERR.description,
                    'started': datetime_to_string(right_now),
                    'last_updated': datetime_to_string(right_now)
                }],
                'warnings': []
            }
            node_dict = {}
            self.conditions.generate_status_json(node_dict)

            self.assertItemsEqual(node_dict['errors'],
                                  expected_results['errors'])
            self.assertFalse(self.conditions.is_health_check_normal)
Exemple #5
0
        def test_update_cleanup_count(self, mock_now):
            """Tests calling update_cleanup_count"""

            right_now = now()
            self.conditions._warning_active(
                NodeConditions.SLOW_CLEANUP,
                NodeConditions.SLOW_CLEANUP.description % 1)
            self.conditions.update_cleanup_count(0)
            expected_results = {'errors': [], 'warnings': []}
            node_dict = {}
            self.conditions.generate_status_json(node_dict)
            self.assertDictEqual(node_dict, expected_results)

            self.conditions.update_cleanup_count(JOB_EXES_WARNING_THRESHOLD +
                                                 1)
            expected_results = {
                'errors': [],
                'warnings': [{
                    'name':
                    'SLOW_CLEANUP',
                    'title':
                    NodeConditions.SLOW_CLEANUP.title,
                    'description':
                    NodeConditions.SLOW_CLEANUP.description %
                    JOB_EXES_WARNING_THRESHOLD + 1,
                    'started':
                    datetime_to_string(right_now),
                    'last_updated':
                    datetime_to_string(right_now)
                }]
            }
            node_dict = {}
            self.conditions.generate_status_json(node_dict)
            self.assertDictEqual(node_dict, expected_results)
Exemple #6
0
    def to_json(self):
        """See :meth:`messaging.messages.message.CommandMessage.to_json`
        """

        json_dict = {}
        if self.current_job_id is not None:
            json_dict['current_job_id'] = self.current_job_id
        if self.started is not None:
            json_dict['started'] = datetime_to_string(self.started)
        if self.ended is not None:
            json_dict['ended'] = datetime_to_string(self.ended)
        if self.error_categories is not None:
            json_dict['error_categories'] = self.error_categories
        if self.error_ids is not None:
            json_dict['error_ids'] = self.error_ids
        if self.job_ids is not None:
            json_dict['job_ids'] = self.job_ids
        if self.job_type_ids is not None:
            json_dict['job_type_ids'] = self.job_type_ids
        if self.status is not None:
            json_dict['status'] = self.status
        if self.job_type_names is not None:
            json_dict['job_type_names'] = self.job_type_names
        if self.batch_ids is not None:
            json_dict['batch_ids'] = self.batch_ids
        if self.recipe_ids is not None:
            json_dict['recipe_ids'] = self.recipe_ids
        if self.is_superseded is not None:
            json_dict['is_superseded'] = self.is_superseded

        return json_dict
Exemple #7
0
    def to_json(self):
        """See :meth:`messaging.messages.message.CommandMessage.to_json`
        """

        job_exe_end_list = []

        for job_exe_end in self._job_exe_ends:
            job_exe_end_dict = {
                'id': job_exe_end.job_exe_id,
                'job_id': job_exe_end.job_id,
                'job_type_id': job_exe_end.job_type_id,
                'exe_num': job_exe_end.exe_num,
                'task_results': job_exe_end.task_results,
                'status': job_exe_end.status,
                'queued': datetime_to_string(job_exe_end.queued),
                'ended': datetime_to_string(job_exe_end.ended)
            }
            if job_exe_end.error_id:
                job_exe_end_dict['error_id'] = job_exe_end.error_id
            if job_exe_end.node_id:
                job_exe_end_dict['node_id'] = job_exe_end.node_id
            if job_exe_end.started:
                job_exe_end_dict['started'] = datetime_to_string(
                    job_exe_end.started)
            job_exe_end_list.append(job_exe_end_dict)

        return {'job_exe_end_models': job_exe_end_list}
Exemple #8
0
    def to_json(self):
        """See :meth:`messaging.messages.message.CommandMessage.to_json`
        """

        json_dict = {}
        if self.current_job_id is not None:
            json_dict['current_job_id'] = self.current_job_id
        if self.started is not None:
            json_dict['started'] = datetime_to_string(self.started)
        if self.ended is not None:
            json_dict['ended'] = datetime_to_string(self.ended)
        if self.error_categories is not None:
            json_dict['error_categories'] = self.error_categories
        if self.error_ids is not None:
            json_dict['error_ids'] = self.error_ids
        if self.job_ids is not None:
            json_dict['job_ids'] = self.job_ids
        if self.job_type_ids is not None:
            json_dict['job_type_ids'] = self.job_type_ids
        if self.priority is not None:
            json_dict['priority'] = self.priority
        if self.status is not None:
            json_dict['status'] = self.status

        return json_dict
Exemple #9
0
    def generate_status_json(self, node_dict):
        """Generates the portion of the status JSON that describes these node conditions

        :param node_dict: The dict for this node within the status JSON
        :type node_dict: dict
        """

        error_list = []
        for active_error in self._active_errors.values():
            error = {
                'name': active_error.error.name,
                'title': active_error.error.title,
                'description': active_error.error.description,
                'started': datetime_to_string(active_error.started),
                'last_updated': datetime_to_string(active_error.last_updated)
            }
            error_list.append(error)
        warning_list = []
        for active_warning in self._active_warnings.values():
            warning = {
                'name': active_warning.warning.name,
                'title': active_warning.warning.title,
                'description': active_warning.description,
                'started': datetime_to_string(active_warning.started),
                'last_updated': datetime_to_string(active_warning.last_updated)
            }
            warning_list.append(warning)
        node_dict['errors'] = error_list
        node_dict['warnings'] = warning_list
Exemple #10
0
    def to_json(self):
        """See :meth:`messaging.messages.message.CommandMessage.to_json`
        """

        return {
            'status_change': datetime_to_string(self.status_change),
            'job_ids': self._blocked_job_ids
        }
Exemple #11
0
    def to_json(self):
        """See :meth:`messaging.messages.message.CommandMessage.to_json`
        """

        return {
            'when': datetime_to_string(self.when),
            'job_ids': self._job_ids
        }
Exemple #12
0
    def to_json(self):
        """See :meth:`messaging.messages.message.CommandMessage.to_json`
        """

        jobs_list = []
        for completed_job in self._completed_jobs:
            jobs_list.append({'id': completed_job.job_id, 'exe_num': completed_job.exe_num})

        return {'ended': datetime_to_string(self.ended), 'jobs': jobs_list}
Exemple #13
0
    def generate_status_json(self, status_dict):
        """Generates the portion of the status JSON that describes the systems Scale depends on

        :param status_dict: The status JSON dict
        :type status_dict: dict
        """

        self._refresh_statuses()
        status_dict['last_updated'] = datetime_to_string(self._last_updated)
        status_dict['dependencies'] = self._all_statuses
        return status_dict
Exemple #14
0
    def _generate_status_json(self, when):
        """Generates the scheduler status JSON

        :param when: The current time
        :type when: :class:`datetime.datetime`
        """

        status_dict = {'timestamp': datetime_to_string(when)}
        node_mgr.generate_status_json(status_dict)
        job_exe_mgr.generate_status_json(status_dict['nodes'], when)
        job_type_mgr.generate_status_json(status_dict)
        Scheduler.objects.all().update(status=status_dict)
Exemple #15
0
    def to_json(self):
        """See :meth:`messaging.messages.message.CommandMessage.to_json`
        """

        error_list = []
        for error_id, job_list in self._failed_jobs.items():
            jobs_list = []
            for failed_job in job_list:
                jobs_list.append({'id': failed_job.job_id, 'exe_num': failed_job.exe_num})
            error_list.append({'id': error_id, 'jobs': jobs_list})

        return {'ended': datetime_to_string(self.ended), 'errors': error_list}
Exemple #16
0
    def test_status_successful(self):
        """Test getting scheduler status successfully"""

        when = now()
        status_thread = SchedulerStatusThread()
        status_thread._generate_status_json(when)

        # url = rest_util.get_url('/status/')
        url = '/v5/status/'
        response = self.client.generic('GET', url)
        self.assertEqual(response.status_code, status.HTTP_200_OK,
                         response.content)
        result = json.loads(response.content)
        self.assertEqual(result['timestamp'], datetime_to_string(when))

        url = '/v6/status/'
        response = self.client.generic('GET', url)
        self.assertEqual(response.status_code, status.HTTP_200_OK,
                         response.content)
        result = json.loads(response.content)
        self.assertEqual(result['timestamp'], datetime_to_string(when))
Exemple #17
0
        def test_handle_pull_task_failed(self, mock_now):
            """Tests calling handle_pull_task_failed"""

            right_now = now()
            self.conditions.is_pull_bad = False

            self.conditions.handle_pull_task_failed()

            expected_results = {
                'errors': [{
                    'name': 'IMAGE_PULL',
                    'title': NodeConditions.IMAGE_PULL_ERR.title,
                    'description': NodeConditions.IMAGE_PULL_ERR.description,
                    'started': datetime_to_string(right_now),
                    'last_updated': datetime_to_string(right_now)
                }],
                'warnings': []
            }
            node_dict = {}
            self.conditions.generate_status_json(node_dict)

            self.assertDictEqual(node_dict, expected_results)
            self.assertTrue(self.conditions.is_pull_bad)
Exemple #18
0
    def to_json(self):
        """See :meth:`messaging.messages.message.CommandMessage.to_json`
        """

        node_list = []
        for node_id, job_list in self._running_jobs.items():
            jobs_list = []
            for job_tuple in job_list:
                jobs_list.append({'id': job_tuple[0], 'exe_num': job_tuple[1]})
            node_list.append({'id': node_id, 'jobs': jobs_list})

        return {
            'started': datetime_to_string(self._started),
            'nodes': node_list
        }
Exemple #19
0
    def test_status_successful(self, mock_get_queue_size):
        """Test getting scheduler status successfully"""
        
        mock_get_queue_size.return_value = 0

        when = now()
        status_thread = SchedulerStatusThread()
        status_thread._generate_status_json(when)

        url = '/%s/status/' % self.api
        response = self.client.generic('GET', url)
        self.assertEqual(response.status_code, status.HTTP_200_OK, response.content)
        result = json.loads(response.content)
        self.assertEqual(result['timestamp'], datetime_to_string(when))
        self.assertDictEqual(result['vault'], {u'status': u'Secrets Not Configured', u'message': u'', u'sealed': False})
Exemple #20
0
    def _generate_status_json(self, when):
        """Generates the scheduler status JSON

        :param when: The current time
        :type when: :class:`datetime.datetime`
        """

        status_dict = {'timestamp': datetime_to_string(when)}
        scheduler_mgr.generate_status_json(status_dict)
        system_task_mgr.generate_status_json(status_dict)
        node_mgr.generate_status_json(status_dict)
        resource_mgr.generate_status_json(status_dict)
        job_exe_mgr.generate_status_json(status_dict['nodes'], when)
        task_mgr.generate_status_json(status_dict['nodes'])
        job_type_mgr.generate_status_json(status_dict)
        secrets_mgr.generate_status_json(status_dict)
        dependency_mgr.generate_status_json(status_dict)
        Scheduler.objects.all().update(status=status_dict)
Exemple #21
0
    def to_json(self):
        """See :meth:`messaging.messages.message.CommandMessage.to_json`
        """

        json_dict = {
            'recipe_ids': self._recipe_ids,
            'when': datetime_to_string(self.when),
            'supersede_all': self.supersede_all,
            'supersede_jobs': list(self.supersede_jobs),
            'supersede_subrecipes': list(self.supersede_subrecipes),
            'unpublish_all': self.unpublish_all,
            'unpublish_jobs': list(self.unpublish_jobs),
            'supersede_recursive_all': self.supersede_recursive_all,
            'supersede_recursive': list(self.supersede_recursive),
            'unpublish_recursive_all': self.unpublish_recursive_all,
            'unpublish_recursive': list(self.unpublish_recursive)
        }

        return json_dict
Exemple #22
0
    def generate_status_json(self, status_dict):
        """Generates the portion of the status JSON that describes system-level information

        :param status_dict: The status JSON dict
        :type status_dict: dict
        """

        services_list = []
        with self._lock:
            is_db_update_completed = self._is_db_update_completed
            when_db_update_completed = self._when_db_update_completed
            for service in self._services:
                services_list.append(service.generate_status_json())

        db_update_dict = {'is_completed': is_db_update_completed}
        if when_db_update_completed:
            db_update_dict['completed'] = datetime_to_string(
                when_db_update_completed)

        status_dict['system'] = {
            'database_update': db_update_dict,
            'services': services_list
        }
Exemple #23
0
    def test_calculate_stats(self):
        """Tests calculating individual statistics for a metrics entry."""
        job_type = job_test_utils.create_job_type()
        job1 = job_test_utils.create_job(job_type=job_type, status='COMPLETED', ended=datetime.datetime(2015, 1, 1, tzinfo=utc))
        task_results_dict = {'version': '1.0',
                             'tasks': [{'task_id': '1', 'type': 'pre', 'was_launched': True,
                                        'started': datetime_to_string(datetime.datetime(2015, 1, 1, 0, 30, 4, tzinfo=utc)),
                                        'ended': datetime_to_string(datetime.datetime(2015, 1, 1, 1, 6, tzinfo=utc))},
                                       {'task_id': '2', 'type': 'main', 'was_launched': True,
                                        'started': datetime_to_string(datetime.datetime(2015, 1, 1, 1, 40, 8, tzinfo=utc)),
                                        'ended': datetime_to_string(datetime.datetime(2015, 1, 1, 2, 30, 10, tzinfo=utc))},
                                       {'task_id': '3', 'type': 'post', 'was_launched': True,
                                        'started': datetime_to_string(datetime.datetime(2015, 1, 1, 3, 30, 12, tzinfo=utc)),
                                        'ended': datetime_to_string(datetime.datetime(2015, 1, 1, 4, 40, 14, tzinfo=utc))}]}
        job_test_utils.create_job_exe(
            job=job1, status=job1.status,
            queued=datetime.datetime(2015, 1, 1, tzinfo=utc),
            started=datetime.datetime(2015, 1, 1, 0, 10, 2, tzinfo=utc),
            # pre_started=datetime.datetime(2015, 1, 1, 0, 30, 4, tzinfo=utc),
            # pre_completed=datetime.datetime(2015, 1, 1, 1, 6, tzinfo=utc),
            # job_started=datetime.datetime(2015, 1, 1, 1, 40, 8, tzinfo=utc),
            # job_completed=datetime.datetime(2015, 1, 1, 2, 30, 10, tzinfo=utc),
            # post_started=datetime.datetime(2015, 1, 1, 3, 30, 12, tzinfo=utc),
            # post_completed=datetime.datetime(2015, 1, 1, 4, 40, 14, tzinfo=utc),
            ended=datetime.datetime(2015, 1, 1, 6, 0, 16, tzinfo=utc),
            task_results=TaskResults(task_results_dict)
        )
        job2 = job_test_utils.create_job(job_type=job_type, status='COMPLETED', ended=datetime.datetime(2015, 1, 1, tzinfo=utc))
        task_results_dict = {'version': '1.0',
                             'tasks': [{'task_id': '1', 'type': 'pre', 'was_launched': True,
                                        'started': datetime_to_string(datetime.datetime(2015, 1, 1, 4, 30, 4, tzinfo=utc)),
                                        'ended': datetime_to_string(datetime.datetime(2015, 1, 1, 6, 0, 8, tzinfo=utc))},
                                       {'task_id': '2', 'type': 'main', 'was_launched': True,
                                        'started': datetime_to_string(datetime.datetime(2015, 1, 1, 8, 40, 14, tzinfo=utc)),
                                        'ended': datetime_to_string(datetime.datetime(2015, 1, 1, 10, 30, 22, tzinfo=utc))},
                                       {'task_id': '3', 'type': 'post', 'was_launched': True,
                                        'started': datetime_to_string(datetime.datetime(2015, 1, 1, 12, 30, 32, tzinfo=utc)),
                                        'ended': datetime_to_string(datetime.datetime(2015, 1, 1, 14, 40, 44, tzinfo=utc))}]}
        job_test_utils.create_job_exe(
            job=job2, status=job2.status,
            queued=datetime.datetime(2015, 1, 1, tzinfo=utc),
            started=datetime.datetime(2015, 1, 1, 2, 10, 2, tzinfo=utc),
            # pre_started=datetime.datetime(2015, 1, 1, 4, 30, 4, tzinfo=utc),
            # pre_completed=datetime.datetime(2015, 1, 1, 6, 0, 8, tzinfo=utc),
            # job_started=datetime.datetime(2015, 1, 1, 8, 40, 14, tzinfo=utc),
            # job_completed=datetime.datetime(2015, 1, 1, 10, 30, 22, tzinfo=utc),
            # post_started=datetime.datetime(2015, 1, 1, 12, 30, 32, tzinfo=utc),
            # post_completed=datetime.datetime(2015, 1, 1, 14, 40, 44, tzinfo=utc),
            ended=datetime.datetime(2015, 1, 1, 16, 0, 58, tzinfo=utc),
            task_results=TaskResults(task_results_dict)
        )

        sys_error = error_test_utils.create_error(category='SYSTEM')
        job3a = job_test_utils.create_job(job_type=job_type, status='FAILED', ended=datetime.datetime(2015, 1, 1, tzinfo=utc),
                                          error=sys_error)
        job_test_utils.create_job_exe(job=job3a, status=job3a.status, ended=job3a.ended, error=sys_error)

        data_error = error_test_utils.create_error(category='DATA')
        job3b = job_test_utils.create_job(job_type=job_type, status='FAILED', ended=datetime.datetime(2015, 1, 1, tzinfo=utc),
                                          error=data_error)
        job_test_utils.create_job_exe(job=job3b, status=job3b.status, ended=job3b.ended, error=data_error)

        algo_error = error_test_utils.create_error(category='ALGORITHM')
        job3c = job_test_utils.create_job(job_type=job_type, status='FAILED', ended=datetime.datetime(2015, 1, 1, tzinfo=utc),
                                          error=algo_error)
        job_test_utils.create_job_exe(job=job3c, status=job3c.status, ended=job3c.ended, error=algo_error)

        job4 = job_test_utils.create_job(job_type=job_type, status='CANCELED', ended=datetime.datetime(2015, 1, 1, tzinfo=utc))
        job_test_utils.create_job_exe(job=job4, status=job4.status, ended=job4.ended)

        MetricsJobType.objects.calculate(datetime.date(2015, 1, 1))

        entries = MetricsJobType.objects.filter(occurred=datetime.date(2015, 1, 1))
        self.assertEqual(len(entries), 1)

        entry = entries.first()
        self.assertEqual(entry.occurred, datetime.date(2015, 1, 1))
        self.assertEqual(entry.completed_count, 2)
        self.assertEqual(entry.failed_count, 3)
        self.assertEqual(entry.canceled_count, 1)
        self.assertEqual(entry.total_count, 6)

        self.assertEqual(entry.error_system_count, 1)
        self.assertEqual(entry.error_data_count, 1)
        self.assertEqual(entry.error_algorithm_count, 1)

        self.assertEqual(entry.queue_time_sum, 8404)
        self.assertEqual(entry.queue_time_min, 602)
        self.assertEqual(entry.queue_time_max, 7802)
        self.assertEqual(entry.queue_time_avg, 4202)

        self.assertEqual(entry.pre_time_sum, 7560)
        self.assertEqual(entry.pre_time_min, 2156)
        self.assertEqual(entry.pre_time_max, 5404)
        self.assertEqual(entry.pre_time_avg, 3780)

        self.assertEqual(entry.job_time_sum, 9610)
        self.assertEqual(entry.job_time_min, 3002)
        self.assertEqual(entry.job_time_max, 6608)
        self.assertEqual(entry.job_time_avg, 4805)

        self.assertEqual(entry.post_time_sum, 12014)
        self.assertEqual(entry.post_time_min, 4202)
        self.assertEqual(entry.post_time_max, 7812)
        self.assertEqual(entry.post_time_avg, 6007)

        self.assertEqual(entry.run_time_sum, 70870)
        self.assertEqual(entry.run_time_min, 21014)
        self.assertEqual(entry.run_time_max, 49856)
        self.assertEqual(entry.run_time_avg, 35435)

        self.assertEqual(entry.stage_time_sum, 41686)
        self.assertEqual(entry.stage_time_min, 11654)
        self.assertEqual(entry.stage_time_max, 30032)
        self.assertEqual(entry.stage_time_avg, 20843)
Exemple #24
0
    def test_generate_status_json(self, mock_now):
        """Tests calling generate_status_json() successfully"""

        right_now = now()
        mock_now.return_value = right_now

        self.conditions._error_active(NodeConditions.BAD_DAEMON_ERR)
        self.conditions._error_active(NodeConditions.BAD_LOGSTASH_ERR)
        self.conditions._error_active(NodeConditions.CLEANUP_ERR)
        self.conditions._error_active(NodeConditions.HEALTH_FAIL_ERR)
        self.conditions._error_active(NodeConditions.HEALTH_TIMEOUT_ERR)
        self.conditions._error_active(NodeConditions.IMAGE_PULL_ERR)
        self.conditions._error_active(NodeConditions.LOW_DOCKER_SPACE_ERR)

        self.conditions._warning_active(
            NodeConditions.CLEANUP_FAILURE,
            NodeConditions.CLEANUP_FAILURE.description % [1, 2, 3])
        self.conditions._warning_active(
            NodeConditions.CLEANUP_TIMEOUT,
            NodeConditions.CLEANUP_TIMEOUT.description % [1, 2, 3])
        self.conditions._warning_active(
            NodeConditions.SLOW_CLEANUP,
            NodeConditions.SLOW_CLEANUP.description % 1)
        self.maxDiff = None

        node_dict = {}
        self.conditions.generate_status_json(node_dict)

        expected_results = {
            'errors': [{
                'name': 'BAD_DAEMON',
                'title': NodeConditions.BAD_DAEMON_ERR.title,
                'description': NodeConditions.BAD_DAEMON_ERR.description,
                'started': datetime_to_string(right_now),
                'last_updated': datetime_to_string(right_now)
            }, {
                'name': 'BAD_LOGSTASH',
                'title': NodeConditions.BAD_LOGSTASH_ERR.title,
                'description': NodeConditions.BAD_LOGSTASH_ERR.description,
                'started': datetime_to_string(right_now),
                'last_updated': datetime_to_string(right_now)
            }, {
                'name': 'CLEANUP',
                'title': NodeConditions.CLEANUP_ERR.title,
                'description': NodeConditions.CLEANUP_ERR.description,
                'started': datetime_to_string(right_now),
                'last_updated': datetime_to_string(right_now)
            }, {
                'name': 'HEALTH_FAIL',
                'title': NodeConditions.HEALTH_FAIL_ERR.title,
                'description': NodeConditions.HEALTH_FAIL_ERR.description,
                'started': datetime_to_string(right_now),
                'last_updated': datetime_to_string(right_now)
            }, {
                'name': 'HEALTH_TIMEOUT',
                'title': NodeConditions.HEALTH_TIMEOUT_ERR.title,
                'description': NodeConditions.HEALTH_TIMEOUT_ERR.description,
                'started': datetime_to_string(right_now),
                'last_updated': datetime_to_string(right_now)
            }, {
                'name': 'IMAGE_PULL',
                'title': NodeConditions.IMAGE_PULL_ERR.title,
                'description': NodeConditions.IMAGE_PULL_ERR.description,
                'started': datetime_to_string(right_now),
                'last_updated': datetime_to_string(right_now)
            }, {
                'name': 'LOW_DOCKER_SPACE',
                'title': NodeConditions.LOW_DOCKER_SPACE_ERR.title,
                'description': NodeConditions.LOW_DOCKER_SPACE_ERR.description,
                'started': datetime_to_string(right_now),
                'last_updated': datetime_to_string(right_now)
            }],
            'warnings': [{
                'name':
                'CLEANUP_FAILURE',
                'title':
                NodeConditions.CLEANUP_FAILURE.title,
                'description':
                NodeConditions.CLEANUP_FAILURE.description % [1, 2, 3],
                'started':
                datetime_to_string(right_now),
                'last_updated':
                datetime_to_string(right_now)
            }, {
                'name':
                'CLEANUP_TIMEOUT',
                'title':
                NodeConditions.CLEANUP_TIMEOUT.title,
                'description':
                NodeConditions.CLEANUP_TIMEOUT.description % [1, 2, 3],
                'started':
                datetime_to_string(right_now),
                'last_updated':
                datetime_to_string(right_now)
            }, {
                'name':
                'SLOW_CLEANUP',
                'title':
                NodeConditions.SLOW_CLEANUP.title,
                'description':
                NodeConditions.SLOW_CLEANUP.description % 1,
                'started':
                datetime_to_string(right_now),
                'last_updated':
                datetime_to_string(right_now)
            }]
        }

        self.assertCountEqual(node_dict['errors'], expected_results['errors'])
        self.assertCountEqual(node_dict['warnings'],
                              expected_results['warnings'])
        self.assertItemsEqual(node_dict['errors'], expected_results['errors'])
        self.assertItemsEqual(node_dict['warnings'],
                              expected_results['warnings'])
Exemple #25
0
    def test_execute(self):
        """Tests calling UpdateBatchMetrics.execute() successfully"""

        job_type = job_test_utils.create_job_type()
        definition = {
            'version':
            '1.0',
            'input_data': [],
            'jobs': [{
                'name': 'a',
                'job_type': {
                    'name': job_type.name,
                    'version': job_type.version,
                },
            }, {
                'name': 'b',
                'job_type': {
                    'name': job_type.name,
                    'version': job_type.version,
                },
            }, {
                'name': 'c',
                'job_type': {
                    'name': job_type.name,
                    'version': job_type.version,
                },
                'dependencies': [{
                    'name': 'b',
                }],
            }, {
                'name': 'd',
                'job_type': {
                    'name': job_type.name,
                    'version': job_type.version,
                },
                'dependencies': [{
                    'name': 'b',
                }],
            }, {
                'name': 'e',
                'job_type': {
                    'name': job_type.name,
                    'version': job_type.version,
                },
                'dependencies': [{
                    'name': 'd',
                }],
            }, {
                'name': 'f',
                'job_type': {
                    'name': job_type.name,
                    'version': job_type.version,
                },
            }, {
                'name': 'g',
                'job_type': {
                    'name': job_type.name,
                    'version': job_type.version,
                },
                'dependencies': [{
                    'name': 'f',
                }],
            }, {
                'name': 'h',
                'job_type': {
                    'name': job_type.name,
                    'version': job_type.version,
                },
            }]
        }
        recipe_type = recipe_test_utils.create_recipe_type(
            definition=definition)
        batch = batch_test_utils.create_batch(recipe_type=recipe_type)

        started = now()
        ended_1 = started + datetime.timedelta(minutes=1)
        ended_2 = started + datetime.timedelta(minutes=2)
        ended_3 = started + datetime.timedelta(minutes=3)
        ended_4 = started + datetime.timedelta(minutes=7)
        recipe_1 = recipe_test_utils.create_recipe(batch=batch,
                                                   recipe_type=recipe_type)
        job_1 = job_test_utils.create_job(status='COMPLETED',
                                          started=started,
                                          ended=ended_1)
        job_2 = job_test_utils.create_job(status='COMPLETED')
        task_results = {
            'version':
            '1.0',
            'tasks': [{
                'task_id': '1234',
                'type': 'main',
                'started': datetime_to_string(started),
                'ended': datetime_to_string(ended_2)
            }]
        }
        task_results = TaskResults(task_results=task_results,
                                   do_validate=False)
        job_test_utils.create_job_exe(job=job_2,
                                      status='COMPLETED',
                                      task_results=task_results)
        job_3 = job_test_utils.create_job(status='QUEUED')
        job_4 = job_test_utils.create_job(status='QUEUED')
        job_5 = job_test_utils.create_job(status='RUNNING')
        job_6 = job_test_utils.create_job(status='RUNNING')
        job_7 = job_test_utils.create_job(status='RUNNING')
        job_8 = job_test_utils.create_job(status='PENDING')
        job_9 = job_test_utils.create_job(status='PENDING')
        job_10 = job_test_utils.create_job(status='CANCELED')
        job_11 = job_test_utils.create_job(status='BLOCKED')
        job_12 = job_test_utils.create_job(status='FAILED')
        recipe_test_utils.create_recipe_job(recipe=recipe_1,
                                            job=job_1,
                                            job_name='a')
        recipe_test_utils.create_recipe_job(recipe=recipe_1,
                                            job=job_2,
                                            job_name='b')
        recipe_test_utils.create_recipe_job(recipe=recipe_1,
                                            job=job_3,
                                            job_name='c')
        recipe_test_utils.create_recipe_job(recipe=recipe_1,
                                            job=job_4,
                                            job_name='c')
        recipe_test_utils.create_recipe_job(recipe=recipe_1,
                                            job=job_5,
                                            job_name='c')
        recipe_test_utils.create_recipe_job(recipe=recipe_1,
                                            job=job_6,
                                            job_name='d')
        recipe_test_utils.create_recipe_job(recipe=recipe_1,
                                            job=job_7,
                                            job_name='d')
        recipe_test_utils.create_recipe_job(recipe=recipe_1,
                                            job=job_8,
                                            job_name='e')
        recipe_test_utils.create_recipe_job(recipe=recipe_1,
                                            job=job_9,
                                            job_name='e')
        recipe_test_utils.create_recipe_job(recipe=recipe_1,
                                            job=job_10,
                                            job_name='f')
        recipe_test_utils.create_recipe_job(recipe=recipe_1,
                                            job=job_11,
                                            job_name='g')
        recipe_test_utils.create_recipe_job(recipe=recipe_1,
                                            job=job_12,
                                            job_name='h')

        recipe_2 = recipe_test_utils.create_recipe(batch=batch,
                                                   recipe_type=recipe_type)
        recipe_2.is_completed = True
        recipe_2.save()
        job_13 = job_test_utils.create_job(status='FAILED')
        job_14 = job_test_utils.create_job(status='COMPLETED')
        job_15 = job_test_utils.create_job(status='RUNNING')
        job_16 = job_test_utils.create_job(status='RUNNING')
        job_17 = job_test_utils.create_job(status='QUEUED')
        job_18 = job_test_utils.create_job(status='QUEUED')
        job_19 = job_test_utils.create_job(status='QUEUED')
        job_20 = job_test_utils.create_job(status='QUEUED')
        job_21 = job_test_utils.create_job(status='PENDING')
        recipe_test_utils.create_recipe_job(recipe=recipe_2,
                                            job=job_13,
                                            job_name='a')
        recipe_test_utils.create_recipe_job(recipe=recipe_2,
                                            job=job_14,
                                            job_name='b')
        recipe_test_utils.create_recipe_job(recipe=recipe_2,
                                            job=job_15,
                                            job_name='c')
        recipe_test_utils.create_recipe_job(recipe=recipe_2,
                                            job=job_16,
                                            job_name='c')
        recipe_test_utils.create_recipe_job(recipe=recipe_2,
                                            job=job_17,
                                            job_name='d')
        recipe_test_utils.create_recipe_job(recipe=recipe_2,
                                            job=job_18,
                                            job_name='d')
        recipe_test_utils.create_recipe_job(recipe=recipe_2,
                                            job=job_19,
                                            job_name='d')
        recipe_test_utils.create_recipe_job(recipe=recipe_2,
                                            job=job_20,
                                            job_name='d')
        recipe_test_utils.create_recipe_job(recipe=recipe_2,
                                            job=job_21,
                                            job_name='e')

        recipe_3 = recipe_test_utils.create_recipe(batch=batch,
                                                   recipe_type=recipe_type)
        recipe_3.is_completed = True
        recipe_3.save()
        job_22 = job_test_utils.create_job(status='COMPLETED')
        job_23 = job_test_utils.create_job(status='COMPLETED')
        task_results = {
            'version':
            '1.0',
            'tasks': [{
                'task_id': '1234',
                'type': 'main',
                'started': datetime_to_string(started),
                'ended': datetime_to_string(ended_3)
            }]
        }
        task_results = TaskResults(task_results=task_results,
                                   do_validate=False)
        job_test_utils.create_job_exe(job=job_23,
                                      status='COMPLETED',
                                      task_results=task_results)
        job_24 = job_test_utils.create_job(status='COMPLETED',
                                           started=started,
                                           ended=ended_2)
        job_25 = job_test_utils.create_job(status='COMPLETED',
                                           started=started,
                                           ended=ended_3)
        job_26 = job_test_utils.create_job(status='COMPLETED',
                                           started=started,
                                           ended=ended_4)
        job_27 = job_test_utils.create_job(status='COMPLETED')
        recipe_test_utils.create_recipe_job(recipe=recipe_3,
                                            job=job_22,
                                            job_name='a')
        recipe_test_utils.create_recipe_job(recipe=recipe_3,
                                            job=job_23,
                                            job_name='b')
        recipe_test_utils.create_recipe_job(recipe=recipe_3,
                                            job=job_24,
                                            job_name='c')
        recipe_test_utils.create_recipe_job(recipe=recipe_3,
                                            job=job_25,
                                            job_name='c')
        recipe_test_utils.create_recipe_job(recipe=recipe_3,
                                            job=job_26,
                                            job_name='c')
        recipe_test_utils.create_recipe_job(recipe=recipe_3,
                                            job=job_27,
                                            job_name='c')

        # Generate recipe metrics
        Recipe.objects.update_recipe_metrics(
            [recipe_1.id, recipe_2.id, recipe_3.id])

        # Add batch to message
        message = UpdateBatchMetrics()
        if message.can_fit_more():
            message.add_batch(batch.id)

        # Execute message
        result = message.execute()
        self.assertTrue(result)

        batch = Batch.objects.get(id=batch.id)
        self.assertEqual(batch.jobs_total, 27)
        self.assertEqual(batch.jobs_pending, 3)
        self.assertEqual(batch.jobs_blocked, 1)
        self.assertEqual(batch.jobs_queued, 6)
        self.assertEqual(batch.jobs_running, 5)
        self.assertEqual(batch.jobs_failed, 2)
        self.assertEqual(batch.jobs_completed, 9)
        self.assertEqual(batch.jobs_canceled, 1)
        self.assertEqual(batch.recipes_total, 3)
        self.assertEqual(batch.recipes_completed, 2)

        batch_metrics = BatchMetrics.objects.filter(
            batch_id=batch.id).order_by('job_name')
        self.assertEqual(len(batch_metrics), 8)

        # Job A
        self.assertEqual(batch_metrics[0].job_name, 'a')
        self.assertEqual(batch_metrics[0].jobs_total, 3)
        self.assertEqual(batch_metrics[0].jobs_pending, 0)
        self.assertEqual(batch_metrics[0].jobs_blocked, 0)
        self.assertEqual(batch_metrics[0].jobs_queued, 0)
        self.assertEqual(batch_metrics[0].jobs_running, 0)
        self.assertEqual(batch_metrics[0].jobs_failed, 1)
        self.assertEqual(batch_metrics[0].jobs_completed, 2)
        self.assertEqual(batch_metrics[0].jobs_canceled, 0)
        self.assertEqual(batch_metrics[0].min_job_duration,
                         datetime.timedelta(minutes=1))
        self.assertEqual(batch_metrics[0].avg_job_duration,
                         datetime.timedelta(minutes=1))
        self.assertEqual(batch_metrics[0].max_job_duration,
                         datetime.timedelta(minutes=1))
        self.assertIsNone(batch_metrics[0].min_seed_duration)
        self.assertIsNone(batch_metrics[0].avg_seed_duration)
        self.assertIsNone(batch_metrics[0].max_seed_duration)

        # Job B
        self.assertEqual(batch_metrics[1].job_name, 'b')
        self.assertEqual(batch_metrics[1].jobs_total, 3)
        self.assertEqual(batch_metrics[1].jobs_pending, 0)
        self.assertEqual(batch_metrics[1].jobs_blocked, 0)
        self.assertEqual(batch_metrics[1].jobs_queued, 0)
        self.assertEqual(batch_metrics[1].jobs_running, 0)
        self.assertEqual(batch_metrics[1].jobs_failed, 0)
        self.assertEqual(batch_metrics[1].jobs_completed, 3)
        self.assertEqual(batch_metrics[1].jobs_canceled, 0)
        self.assertIsNone(batch_metrics[1].min_job_duration)
        self.assertIsNone(batch_metrics[1].avg_job_duration)
        self.assertIsNone(batch_metrics[1].max_job_duration)
        self.assertEqual(batch_metrics[1].min_seed_duration,
                         datetime.timedelta(minutes=2))
        self.assertEqual(batch_metrics[1].avg_seed_duration,
                         datetime.timedelta(minutes=2, seconds=30))
        self.assertEqual(batch_metrics[1].max_seed_duration,
                         datetime.timedelta(minutes=3))

        # Job C
        self.assertEqual(batch_metrics[2].job_name, 'c')
        self.assertEqual(batch_metrics[2].jobs_total, 9)
        self.assertEqual(batch_metrics[2].jobs_pending, 0)
        self.assertEqual(batch_metrics[2].jobs_blocked, 0)
        self.assertEqual(batch_metrics[2].jobs_queued, 2)
        self.assertEqual(batch_metrics[2].jobs_running, 3)
        self.assertEqual(batch_metrics[2].jobs_failed, 0)
        self.assertEqual(batch_metrics[2].jobs_completed, 4)
        self.assertEqual(batch_metrics[2].jobs_canceled, 0)
        self.assertEqual(batch_metrics[2].min_job_duration,
                         datetime.timedelta(minutes=2))
        self.assertEqual(batch_metrics[2].avg_job_duration,
                         datetime.timedelta(minutes=4))
        self.assertEqual(batch_metrics[2].max_job_duration,
                         datetime.timedelta(minutes=7))
        self.assertIsNone(batch_metrics[2].min_seed_duration)
        self.assertIsNone(batch_metrics[2].avg_seed_duration)
        self.assertIsNone(batch_metrics[2].max_seed_duration)

        # Job D
        self.assertEqual(batch_metrics[3].job_name, 'd')
        self.assertEqual(batch_metrics[3].jobs_total, 6)
        self.assertEqual(batch_metrics[3].jobs_pending, 0)
        self.assertEqual(batch_metrics[3].jobs_blocked, 0)
        self.assertEqual(batch_metrics[3].jobs_queued, 4)
        self.assertEqual(batch_metrics[3].jobs_running, 2)
        self.assertEqual(batch_metrics[3].jobs_failed, 0)
        self.assertEqual(batch_metrics[3].jobs_completed, 0)
        self.assertEqual(batch_metrics[3].jobs_canceled, 0)
        self.assertIsNone(batch_metrics[3].min_job_duration)
        self.assertIsNone(batch_metrics[3].avg_job_duration)
        self.assertIsNone(batch_metrics[3].max_job_duration)
        self.assertIsNone(batch_metrics[3].min_seed_duration)
        self.assertIsNone(batch_metrics[3].avg_seed_duration)
        self.assertIsNone(batch_metrics[3].max_seed_duration)

        # Job E
        self.assertEqual(batch_metrics[4].job_name, 'e')
        self.assertEqual(batch_metrics[4].jobs_total, 3)
        self.assertEqual(batch_metrics[4].jobs_pending, 3)
        self.assertEqual(batch_metrics[4].jobs_blocked, 0)
        self.assertEqual(batch_metrics[4].jobs_queued, 0)
        self.assertEqual(batch_metrics[4].jobs_running, 0)
        self.assertEqual(batch_metrics[4].jobs_failed, 0)
        self.assertEqual(batch_metrics[4].jobs_completed, 0)
        self.assertEqual(batch_metrics[4].jobs_canceled, 0)
        self.assertIsNone(batch_metrics[4].min_job_duration)
        self.assertIsNone(batch_metrics[4].avg_job_duration)
        self.assertIsNone(batch_metrics[4].max_job_duration)
        self.assertIsNone(batch_metrics[4].min_seed_duration)
        self.assertIsNone(batch_metrics[4].avg_seed_duration)
        self.assertIsNone(batch_metrics[4].max_seed_duration)

        # Job F
        self.assertEqual(batch_metrics[5].job_name, 'f')
        self.assertEqual(batch_metrics[5].jobs_total, 1)
        self.assertEqual(batch_metrics[5].jobs_pending, 0)
        self.assertEqual(batch_metrics[5].jobs_blocked, 0)
        self.assertEqual(batch_metrics[5].jobs_queued, 0)
        self.assertEqual(batch_metrics[5].jobs_running, 0)
        self.assertEqual(batch_metrics[5].jobs_failed, 0)
        self.assertEqual(batch_metrics[5].jobs_completed, 0)
        self.assertEqual(batch_metrics[5].jobs_canceled, 1)
        self.assertIsNone(batch_metrics[5].min_job_duration)
        self.assertIsNone(batch_metrics[5].avg_job_duration)
        self.assertIsNone(batch_metrics[5].max_job_duration)
        self.assertIsNone(batch_metrics[5].min_seed_duration)
        self.assertIsNone(batch_metrics[5].avg_seed_duration)
        self.assertIsNone(batch_metrics[5].max_seed_duration)

        # Job G
        self.assertEqual(batch_metrics[6].job_name, 'g')
        self.assertEqual(batch_metrics[6].jobs_total, 1)
        self.assertEqual(batch_metrics[6].jobs_pending, 0)
        self.assertEqual(batch_metrics[6].jobs_blocked, 1)
        self.assertEqual(batch_metrics[6].jobs_queued, 0)
        self.assertEqual(batch_metrics[6].jobs_running, 0)
        self.assertEqual(batch_metrics[6].jobs_failed, 0)
        self.assertEqual(batch_metrics[6].jobs_completed, 0)
        self.assertEqual(batch_metrics[6].jobs_canceled, 0)
        self.assertIsNone(batch_metrics[6].min_job_duration)
        self.assertIsNone(batch_metrics[6].avg_job_duration)
        self.assertIsNone(batch_metrics[6].max_job_duration)
        self.assertIsNone(batch_metrics[6].min_seed_duration)
        self.assertIsNone(batch_metrics[6].avg_seed_duration)
        self.assertIsNone(batch_metrics[6].max_seed_duration)

        # Job H
        self.assertEqual(batch_metrics[7].job_name, 'h')
        self.assertEqual(batch_metrics[7].jobs_total, 1)
        self.assertEqual(batch_metrics[7].jobs_pending, 0)
        self.assertEqual(batch_metrics[7].jobs_blocked, 0)
        self.assertEqual(batch_metrics[7].jobs_queued, 0)
        self.assertEqual(batch_metrics[7].jobs_running, 0)
        self.assertEqual(batch_metrics[7].jobs_failed, 1)
        self.assertEqual(batch_metrics[7].jobs_completed, 0)
        self.assertEqual(batch_metrics[7].jobs_canceled, 0)
        self.assertIsNone(batch_metrics[7].min_job_duration)
        self.assertIsNone(batch_metrics[7].avg_job_duration)
        self.assertIsNone(batch_metrics[7].max_job_duration)
        self.assertIsNone(batch_metrics[7].min_seed_duration)
        self.assertIsNone(batch_metrics[7].avg_seed_duration)
        self.assertIsNone(batch_metrics[7].max_seed_duration)

        # Test executing message again
        message_json_dict = message.to_json()
        message = UpdateBatchMetrics.from_json(message_json_dict)
        result = message.execute()
        self.assertTrue(result)
Exemple #26
0
        def test_handle_health_task_failed(self, mock_now):
            """Tests calling handle_health_task_failed"""

            right_now = now()

            self.conditions._error_active(NodeConditions.BAD_DAEMON_ERR)
            self.conditions._error_active(NodeConditions.BAD_LOGSTASH_ERR)
            self.conditions._error_active(NodeConditions.HEALTH_FAIL_ERR)
            self.conditions._error_active(NodeConditions.HEALTH_TIMEOUT_ERR)
            self.conditions._error_active(NodeConditions.LOW_DOCKER_SPACE_ERR)
            self.conditions.is_health_check_normal = True

            expected_results = {
                'errors': [{
                    'name': 'BAD_DAEMON',
                    'title': NodeConditions.BAD_DAEMON_ERR.title,
                    'description': NodeConditions.BAD_DAEMON_ERR.description,
                    'started': datetime_to_string(right_now),
                    'last_updated': datetime_to_string(right_now)
                }],
                'warnings': []
            }

            bad_daemon = job_test_utils.create_task_status_update(
                task_id='id',
                agent_id='agent',
                status='status',
                when=right_now,
                exit_code=HealthTask.BAD_DAEMON_CODE)
            self.conditions.handle_health_task_failed(bad_daemon)

            node_dict = {}
            self.conditions.generate_status_json(node_dict)

            self.assertItemsEqual(node_dict['errors'],
                                  expected_results['errors'])
            self.assertFalse(self.conditions.is_health_check_normal)

            expected_results = {
                'errors': [{
                    'name': 'BAD_LOGSTASH',
                    'title': NodeConditions.BAD_LOGSTASH_ERR.title,
                    'description': NodeConditions.BAD_LOGSTASH_ERR.description,
                    'started': datetime_to_string(right_now),
                    'last_updated': datetime_to_string(right_now)
                }],
                'warnings': []
            }

            bad_log = job_test_utils.create_task_status_update(
                task_id='id',
                agent_id='agent',
                status='status',
                when=right_now,
                exit_code=HealthTask.BAD_LOGSTASH_CODE)
            self.conditions.handle_health_task_failed(bad_log)

            node_dict = {}
            self.conditions.generate_status_json(node_dict)

            self.assertItemsEqual(node_dict['errors'],
                                  expected_results['errors'])
            self.assertFalse(self.conditions.is_health_check_normal)

            expected_results = {
                'errors': [{
                    'name': 'LOW_DOCKER_SPACE',
                    'title': NodeConditions.LOW_DOCKER_SPACE_ERR.title,
                    'description':
                    NodeConditions.LOW_DOCKER_SPACE_ERR.description,
                    'started': datetime_to_string(right_now),
                    'last_updated': datetime_to_string(right_now)
                }],
                'warnings': []
            }

            low_docker = job_test_utils.create_task_status_update(
                task_id='id',
                agent_id='agent',
                status='status',
                when=right_now,
                exit_code=HealthTask.LOW_DOCKER_SPACE_CODE)
            self.conditions.handle_health_task_failed(low_docker)

            node_dict = {}
            self.conditions.generate_status_json(node_dict)

            self.assertItemsEqual(node_dict['errors'],
                                  expected_results['errors'])
            self.assertFalse(self.conditions.is_health_check_normal)

            expected_results = {
                'errors': [{
                    'name': 'HEALTH_FAIL',
                    'title': NodeConditions.HEALTH_FAIL_ERR.title,
                    'description': NodeConditions.HEALTH_FAIL_ERR.description,
                    'started': datetime_to_string(right_now),
                    'last_updated': datetime_to_string(right_now)
                }],
                'warnings': []
            }

            unknown = job_test_utils.create_task_status_update(
                task_id='id',
                agent_id='agent',
                status='status',
                when=right_now,
                exit_code=0)
            self.conditions.handle_health_task_failed(unknown)

            node_dict = {}
            self.conditions.generate_status_json(node_dict)

            self.assertItemsEqual(node_dict['errors'],
                                  expected_results['errors'])
            self.assertFalse(self.conditions.is_health_check_normal)
Exemple #27
0
    def test_successful(self, mock_msg_mgr):
        """Tests successfully calling the v6 batch comparison view"""

        job_type_1 = job_test_utils.create_seed_job_type()
        job_type_2 = job_test_utils.create_seed_job_type()
        job_type_3 = job_test_utils.create_seed_job_type()

        rt_definition_1 = {
            'version': '6',
            'input': {'files': [{'name': 'INPUT_IMAGE', 'media_types': ['image/png'], 'required': True, 'multiple': False}],
                      'json': []},
            'nodes': {
                'job_a': {
                    'dependencies': [],
                    'input': {'INPUT_IMAGE': {'type': 'recipe', 'input': 'INPUT_IMAGE'}},
                    'node_type': {
                        'node_type': 'job',
                        'job_type_name': job_type_1.name,
                        'job_type_version': job_type_1.version,
                        'job_type_revision': 1,
                    }
                },
                'job_b': {
                    'dependencies': [{'name': 'job_a'}],
                    'input': {'INPUT_IMAGE': {'type': 'dependency', 'node': 'job_a',
                    'output': 'OUTPUT_IMAGE'}},
                    'node_type': {
                        'node_type': 'job',
                        'job_type_name': job_type_2.name,
                        'job_type_version': job_type_2.version,
                        'job_type_revision': 1,
                    }
                }
            }
        }

        rt_definition_2 = {
            'version': '6',
            'input': {'files': [{'name': 'INPUT_IMAGE', 'media_types': ['image/png'], 'required': True, 'multiple': False}],
                      'json': []},
            'nodes': {
                'job_c': {
                    'dependencies': [],
                    'input': {'INPUT_IMAGE': {'type': 'recipe', 'input': 'INPUT_IMAGE'}},
                    'node_type': {
                        'node_type': 'job',
                        'job_type_name': job_type_3.name,
                        'job_type_version': job_type_3.version,
                        'job_type_revision': 1,
                    }
                },
                'job_b': {
                    'dependencies': [{'name': 'job_c'}],
                    'input': {'INPUT_IMAGE': {'type': 'dependency', 'node': 'job_c',
                    'output': 'OUTPUT_IMAGE'}},
                    'node_type': {
                        'node_type': 'job',
                        'job_type_name': job_type_2.name,
                        'job_type_version': job_type_2.version,
                        'job_type_revision': 1,
                    }
                }
            }
        }
        recipe_type = recipe_test_utils.create_recipe_type_v6(definition=rt_definition_1)

        # Create a chain of two batches
        batch_1 = batch_test_utils.create_batch(recipe_type=recipe_type, is_creation_done=True, recipes_total=2)
        # Right now test utils will automatically have batch_1 supersede another batch, so we reset this so batch_1 is
        # its own chain
        batch_1.root_batch_id = batch_1.id
        batch_1.superseded_batch = None
        batch_1.save()
        # Change recipe type to new revision
        recipe_test_utils.edit_recipe_type_v6(recipe_type=recipe_type, definition=rt_definition_2, auto_update=True)
        recipe_type = RecipeType.objects.get(id=recipe_type.id)
        definition_2 = BatchDefinition()
        definition_2.root_batch_id = batch_1.root_batch_id
        batch_2 = batch_test_utils.create_batch(recipe_type=recipe_type, definition=definition_2)

        # Set metrics to test values
        Batch.objects.filter(id=batch_1.id).update(jobs_total=24, jobs_pending=0, jobs_blocked=10, jobs_queued=0,
                                                   jobs_running=0, jobs_failed=2, jobs_completed=12, jobs_canceled=0,
                                                   recipes_estimated=2, recipes_total=2, recipes_completed=1)
        Batch.objects.filter(id=batch_2.id).update(jobs_total=26, jobs_pending=2, jobs_blocked=6, jobs_queued=3,
                                                   jobs_running=5, jobs_failed=6, jobs_completed=3, jobs_canceled=1,
                                                   recipes_estimated=2, recipes_total=2, recipes_completed=0)
        min_seed_duration_1a = timedelta(seconds=43)
        avg_seed_duration_1a = timedelta(seconds=68)
        max_seed_duration_1a = timedelta(seconds=77)
        min_job_duration_1a = timedelta(seconds=45)
        avg_job_duration_1a = timedelta(seconds=70)
        max_job_duration_1a = timedelta(seconds=79)
        qry = BatchMetrics.objects.filter(batch_id=batch_1.id, job_name='job_a')
        qry.update(jobs_total=12, jobs_pending=0, jobs_blocked=0, jobs_queued=0, jobs_running=0, jobs_failed=0,
                   jobs_completed=12, jobs_canceled=0, min_seed_duration=min_seed_duration_1a,
                   avg_seed_duration=avg_seed_duration_1a, max_seed_duration=max_seed_duration_1a,
                   min_job_duration=min_job_duration_1a, avg_job_duration=avg_job_duration_1a,
                   max_job_duration=max_job_duration_1a)
        min_seed_duration_1b = timedelta(seconds=15)
        avg_seed_duration_1b = timedelta(seconds=18)
        max_seed_duration_1b = timedelta(seconds=23)
        min_job_duration_1b = timedelta(seconds=18)
        avg_job_duration_1b = timedelta(seconds=21)
        max_job_duration_1b = timedelta(seconds=26)
        qry = BatchMetrics.objects.filter(batch_id=batch_1.id, job_name='job_b')
        qry.update(jobs_total=12, jobs_pending=0, jobs_blocked=10, jobs_queued=0, jobs_running=0, jobs_failed=2,
                   jobs_completed=0, jobs_canceled=0, min_seed_duration=min_seed_duration_1b,
                   avg_seed_duration=avg_seed_duration_1b, max_seed_duration=max_seed_duration_1b,
                   min_job_duration=min_job_duration_1b, avg_job_duration=avg_job_duration_1b,
                   max_job_duration=max_job_duration_1b)
        min_seed_duration_2b = timedelta(seconds=9)
        avg_seed_duration_2b = timedelta(seconds=12)
        max_seed_duration_2b = timedelta(seconds=17)
        min_job_duration_2b = timedelta(seconds=12)
        avg_job_duration_2b = timedelta(seconds=15)
        max_job_duration_2b = timedelta(seconds=20)
        qry = BatchMetrics.objects.filter(batch_id=batch_2.id, job_name='job_b')
        qry.update(jobs_total=13, jobs_pending=0, jobs_blocked=0, jobs_queued=0, jobs_running=3, jobs_failed=6,
                   jobs_completed=3, jobs_canceled=1, min_seed_duration=min_seed_duration_2b,
                   avg_seed_duration=avg_seed_duration_2b, max_seed_duration=max_seed_duration_2b,
                   min_job_duration=min_job_duration_2b, avg_job_duration=avg_job_duration_2b,
                   max_job_duration=max_job_duration_2b)
        min_seed_duration_2c = timedelta(seconds=101)
        avg_seed_duration_2c = timedelta(seconds=136)
        max_seed_duration_2c = timedelta(seconds=158)
        min_job_duration_2c = timedelta(seconds=111)
        avg_job_duration_2c = timedelta(seconds=146)
        max_job_duration_2c = timedelta(seconds=168)
        qry = BatchMetrics.objects.filter(batch_id=batch_2.id, job_name='job_c')
        qry.update(jobs_total=13, jobs_pending=2, jobs_blocked=6, jobs_queued=3, jobs_running=2, jobs_failed=0,
                   jobs_completed=0, jobs_canceled=0, min_seed_duration=min_seed_duration_2c,
                   avg_seed_duration=avg_seed_duration_2c, max_seed_duration=max_seed_duration_2c,
                   min_job_duration=min_job_duration_2c, avg_job_duration=avg_job_duration_2c,
                   max_job_duration=max_job_duration_2c)
        expected_job_metrics = {'job_a': {'jobs_total': [12, None], 'jobs_pending': [0, None],
                                          'jobs_blocked': [0, None], 'jobs_queued': [0, None],
                                          'jobs_running': [0, None], 'jobs_failed': [0, None],
                                          'jobs_completed': [12, None], 'jobs_canceled': [0, None],
                                          'min_seed_duration': [duration_to_string(min_seed_duration_1a), None],
                                          'avg_seed_duration': [duration_to_string(avg_seed_duration_1a), None],
                                          'max_seed_duration': [duration_to_string(max_seed_duration_1a), None],
                                          'min_job_duration': [duration_to_string(min_job_duration_1a), None],
                                          'avg_job_duration': [duration_to_string(avg_job_duration_1a), None],
                                          'max_job_duration': [duration_to_string(max_job_duration_1a), None]},
                                'job_b': {'jobs_total': [12, 13], 'jobs_pending': [0, 0],
                                          'jobs_blocked': [10, 0], 'jobs_queued': [0, 0],
                                          'jobs_running': [0, 3], 'jobs_failed': [2, 6],
                                          'jobs_completed': [0, 3], 'jobs_canceled': [0, 1],
                                          'min_seed_duration': [duration_to_string(min_seed_duration_1b),
                                                                duration_to_string(min_seed_duration_2b)],
                                          'avg_seed_duration': [duration_to_string(avg_seed_duration_1b),
                                                                duration_to_string(avg_seed_duration_2b)],
                                          'max_seed_duration': [duration_to_string(max_seed_duration_1b),
                                                                duration_to_string(max_seed_duration_2b)],
                                          'min_job_duration': [duration_to_string(min_job_duration_1b),
                                                               duration_to_string(min_job_duration_2b)],
                                          'avg_job_duration': [duration_to_string(avg_job_duration_1b),
                                                               duration_to_string(avg_job_duration_2b)],
                                          'max_job_duration': [duration_to_string(max_job_duration_1b),
                                                               duration_to_string(max_job_duration_2b)]},
                                'job_c': {'jobs_total': [None, 13], 'jobs_pending': [None, 2],
                                          'jobs_blocked': [None, 6], 'jobs_queued': [None, 3],
                                          'jobs_running': [None, 2], 'jobs_failed': [None, 0],
                                          'jobs_completed': [None, 0], 'jobs_canceled': [None, 0],
                                          'min_seed_duration': [None, duration_to_string(min_seed_duration_2c)],
                                          'avg_seed_duration': [None, duration_to_string(avg_seed_duration_2c)],
                                          'max_seed_duration': [None, duration_to_string(max_seed_duration_2c)],
                                          'min_job_duration': [None, duration_to_string(min_job_duration_2c)],
                                          'avg_job_duration': [None, duration_to_string(avg_job_duration_2c)],
                                          'max_job_duration': [None, duration_to_string(max_job_duration_2c)]}
                               }
        expected_result = {'batches': [{'id': batch_1.id, 'title': batch_1.title, 'description': batch_1.description,
                                        'created': datetime_to_string(batch_1.created)},
                                       {'id': batch_2.id, 'title': batch_2.title, 'description': batch_2.description,
                                        'created': datetime_to_string(batch_2.created)}],
                           'metrics': {'jobs_total': [24, 26], 'jobs_pending': [0, 2], 'jobs_blocked': [10, 6],
                                       'jobs_queued': [0, 3], 'jobs_running': [0, 5], 'jobs_failed': [2, 6],
                                       'jobs_completed': [12, 3], 'jobs_canceled': [0, 1], 'recipes_estimated': [2, 2],
                                       'recipes_total': [2, 2], 'recipes_completed': [1, 0],
                                       'job_metrics': expected_job_metrics}
                          }
        url = '/v6/batches/comparison/%d/' % batch_2.root_batch_id
        response = self.client.get(url)
        self.assertEqual(response.status_code, status.HTTP_200_OK, response.content)

        result = json.loads(response.content)
        self.assertDictEqual(result, expected_result)
Exemple #28
0
    def _perform_update_iteration(self):
        """Performs a single iteration of the database update
        """

        # Retrieve 500 job executions that need to be updated and get job IDs
        job_ids = set()
        for job_exe in JobExecution.objects.filter(status__isnull=False).only('id', 'job_id')[:500]:
            job_ids.add(job_exe.job_id)

        # Retrieve all job executions for those jobs in sorted order
        job_exe_count = 0
        current_job_id = None
        current_exe_num = 1
        exe_num_dict = {}  # {exe_num: [job_exe.id]}
        job_exe_end_models = []
        job_exe_output_models = []
        job_exe_qry = JobExecution.objects.select_related('job').filter(job_id__in=job_ids)
        for job_exe in job_exe_qry.defer('resources', 'configuration', 'stdout', 'stderr').order_by('job_id', 'id'):
            job_exe_count += 1
            if job_exe.job_id == current_job_id:
                current_exe_num += 1
            else:
                current_job_id = job_exe.job_id
                current_exe_num = 1

            # This job_exe model needs to be updated with its exe_num
            if current_exe_num in exe_num_dict:
                exe_num_dict[current_exe_num].append(job_exe.id)
            else:
                exe_num_dict[current_exe_num] = [job_exe.id]

            if job_exe.status in ['COMPLETED', 'FAILED', 'CANCELED']:
                # Create corresponding job_exe_end model
                job_exe_end = JobExecutionEnd()
                job_exe_end.job_exe_id = job_exe.id
                job_exe_end.job_id = job_exe.job_id
                job_exe_end.job_type_id = job_exe.job.job_type_id
                job_exe_end.exe_num = current_exe_num

                # Create task results from job_exe task fields
                task_list = []
                if job_exe.pre_started:
                    pre_task_dict = {'task_id': '%s_%s' % (job_exe.get_cluster_id(), 'pre'), 'type': 'pre',
                                     'was_launched': True, 'was_started': True,
                                     'started': datetime_to_string(job_exe.pre_started)}
                    if job_exe.pre_completed:
                        pre_task_dict['ended'] = datetime_to_string(job_exe.pre_completed)
                    if job_exe.pre_exit_code is not None:
                        pre_task_dict['exit_code'] = job_exe.pre_exit_code
                    task_list.append(pre_task_dict)
                if job_exe.job_started:
                    job_task_dict = {'task_id': '%s_%s' % (job_exe.get_cluster_id(), 'job'), 'type': 'main',
                                     'was_launched': True, 'was_started': True,
                                     'started': datetime_to_string(job_exe.job_started)}
                    if job_exe.job_completed:
                        job_task_dict['ended'] = datetime_to_string(job_exe.job_completed)
                    if job_exe.job_exit_code is not None:
                        job_task_dict['exit_code'] = job_exe.job_exit_code
                    task_list.append(job_task_dict)
                if job_exe.post_started:
                    post_task_dict = {'task_id': '%s_%s' % (job_exe.get_cluster_id(), 'post'), 'type': 'post',
                                      'was_launched': True, 'was_started': True,
                                      'started': datetime_to_string(job_exe.post_started)}
                    if job_exe.post_completed:
                        post_task_dict['ended'] = datetime_to_string(job_exe.post_completed)
                    if job_exe.post_exit_code is not None:
                        post_task_dict['exit_code'] = job_exe.post_exit_code
                    task_list.append(post_task_dict)
                task_results = TaskResults({'tasks': task_list})

                job_exe_end.task_results = task_results.get_dict()
                job_exe_end.status = job_exe.status
                job_exe_end.error_id = job_exe.error_id
                job_exe_end.node_id = job_exe.node_id
                job_exe_end.queued = job_exe.queued
                job_exe_end.started = job_exe.started
                job_exe_end.seed_started = task_results.get_task_started('main')
                job_exe_end.seed_ended = task_results.get_task_ended('main')
                job_exe_end.ended = job_exe.ended
                job_exe_end_models.append(job_exe_end)

            if job_exe.status == 'COMPLETED':
                # Create corresponding job_exe_output model
                job_exe_output = JobExecutionOutput()
                job_exe_output.job_exe_id = job_exe.id
                job_exe_output.job_id = job_exe.job_id
                job_exe_output.job_type_id = job_exe.job.job_type_id
                job_exe_output.exe_num = current_exe_num
                job_exe_output.output = job_exe.results
                job_exe_output_models.append(job_exe_output)

        # Update/create models in an atomic transaction
        with transaction.atomic():
            for exe_num, job_exe_ids in exe_num_dict.items():
                JobExecution.objects.filter(id__in=job_exe_ids).update(exe_num=exe_num, status=None, error_id=None,
                                                                       command_arguments=None, environment=None,
                                                                       cpus_scheduled=None, mem_scheduled=None,
                                                                       disk_out_scheduled=None,
                                                                       disk_total_scheduled=None, pre_started=None,
                                                                       pre_completed=None, pre_exit_code=None,
                                                                       job_started=None, job_completed=None,
                                                                       job_exit_code=None, job_metrics=None,
                                                                       post_started=None, post_completed=None,
                                                                       post_exit_code=None, stdout=None, stderr=None,
                                                                       results_manifest=None, results=None, ended=None,
                                                                       last_modified=None)
            JobExecutionEnd.objects.bulk_create(job_exe_end_models)
            JobExecutionOutput.objects.bulk_create(job_exe_output_models)

        logger.info('Updated %d job executions', job_exe_count)
        self._updated_job_exe += job_exe_count
        percent = (float(self._updated_job_exe) / float(self._total_job_exe)) * 100.00
        print 'Completed %s of %s job executions (%.1f%%)' % (self._updated_job_exe, self._total_job_exe, percent)
Exemple #29
0
    def generate_status_json(self, status_dict):
        """Generates the portion of the status JSON that describes the scheduler settings and metrics

        :param status_dict: The status JSON dict
        :type status_dict: dict
        """

        with self._lock:
            when = now()
            state = self._state
            last_json = self._last_json
            job_fin_count = self._job_fin_count
            job_launch_count = self._job_launch_count
            new_offer_count = self._new_offer_count
            offer_launch_count = self._offer_launch_count
            task_fin_count = self._task_fin_count
            task_launch_count = self._task_launch_count
            task_update_count = self._task_update_count
            self._last_json = when
            self._job_fin_count = 0
            self._job_launch_count = 0
            self._new_offer_count = 0
            self._offer_launch_count = 0
            self._task_fin_count = 0
            self._task_launch_count = 0
            self._task_update_count = 0

        duration = (when - last_json).total_seconds()
        job_fin_per_sec = self._round_count_per_sec(job_fin_count / duration)
        job_launch_per_sec = self._round_count_per_sec(job_launch_count /
                                                       duration)
        new_offer_per_sec = self._round_count_per_sec(new_offer_count /
                                                      duration)
        offer_launch_per_sec = self._round_count_per_sec(offer_launch_count /
                                                         duration)
        task_fin_per_sec = self._round_count_per_sec(task_fin_count / duration)
        task_launch_per_sec = self._round_count_per_sec(task_launch_count /
                                                        duration)
        task_update_per_sec = self._round_count_per_sec(task_update_count /
                                                        duration)

        mesos_dict = {'framework_id': self.framework_id}
        metrics_dict = {
            'new_offers_per_sec': new_offer_per_sec,
            'task_updates_per_sec': task_update_per_sec,
            'tasks_finished_per_sec': task_fin_per_sec,
            'jobs_finished_per_sec': job_fin_per_sec,
            'jobs_launched_per_sec': job_launch_per_sec,
            'tasks_launched_per_sec': task_launch_per_sec,
            'offers_launched_per_sec': offer_launch_per_sec
        }

        self._warning_inactive_old()
        warning_list = []
        for active_warning in self._active_warnings.values():
            warning = {
                'name': active_warning.warning.name,
                'title': active_warning.warning.title,
                'description': active_warning.description,
                'started': datetime_to_string(active_warning.started),
                'last_updated': datetime_to_string(active_warning.last_updated)
            }
            warning_list.append(warning)

        state_dict = {
            'name': state.state,
            'title': state.title,
            'description': state.description
        }
        status_dict['scheduler'] = {
            'hostname': self.hostname,
            'mesos': mesos_dict,
            'metrics': metrics_dict,
            'state': state_dict,
            'warnings': warning_list
        }
Exemple #30
0
    def test_handle_cleanup_task_completed(self, mock_now):
        """Tests calling handle_cleanup_task_completed"""

        right_now = now()
        then = right_now - CLEANUP_WARN_THRESHOLD
        mock_now.return_value = right_now

        self.conditions._error_active(NodeConditions.CLEANUP_ERR)
        self.conditions._warning_active(
            NodeConditions.CLEANUP_FAILURE,
            NodeConditions.CLEANUP_FAILURE.description % [1, 2, 3])
        self.conditions._warning_active(
            NodeWarning(name='old-warning', title='old', description=None))
        self.conditions._active_warnings['old-warning'].last_updated = then

        node_dict = {}
        self.conditions.generate_status_json(node_dict)
        self.maxDiff = None

        expected_results = {
            'errors': [{
                'name': 'CLEANUP',
                'title': NodeConditions.CLEANUP_ERR.title,
                'description': NodeConditions.CLEANUP_ERR.description,
                'started': datetime_to_string(right_now),
                'last_updated': datetime_to_string(right_now)
            }],
            'warnings': [{
                'name':
                'CLEANUP_FAILURE',
                'title':
                NodeConditions.CLEANUP_FAILURE.title,
                'description':
                NodeConditions.CLEANUP_FAILURE.description % [1, 2, 3],
                'started':
                datetime_to_string(right_now),
                'last_updated':
                datetime_to_string(right_now)
            }, {
                'name': 'old-warning',
                'title': 'old',
                'description': None,
                'started': datetime_to_string(right_now),
                'last_updated': datetime_to_string(then)
            }]
        }

        self.assertItemsEqual(node_dict['errors'], expected_results['errors'])
        self.assertItemsEqual(node_dict['warnings'],
                              expected_results['warnings'])

        self.conditions.handle_cleanup_task_completed()
        node_dict = {}
        self.conditions.generate_status_json(node_dict)

        expected_results = {
            'errors': [],
            'warnings': [{
                'name':
                'CLEANUP_FAILURE',
                'title':
                NodeConditions.CLEANUP_FAILURE.title,
                'description':
                NodeConditions.CLEANUP_FAILURE.description % [1, 2, 3],
                'started':
                datetime_to_string(right_now),
                'last_updated':
                datetime_to_string(right_now)
            }]
        }

        self.assertDictEqual(node_dict, expected_results)

        @patch('scheduler.node.conditions.now')
        def test_handle_cleanup_task_failed(self, mock_now):
            """Tests calling handle_cleanup_task_failed"""

            right_now = now()

            expected_results = {
                'errors': [{
                    'name': 'CLEANUP',
                    'title': NodeConditions.CLEANUP_ERR.title,
                    'description': NodeConditions.CLEANUP_ERR.description,
                    'started': datetime_to_string(right_now),
                    'last_updated': datetime_to_string(right_now)
                }],
                'warnings': [{
                    'name':
                    'CLEANUP_FAILURE' + ' %d' % WARNING_NAME_COUNTER,
                    'title':
                    NodeConditions.CLEANUP_FAILURE.title,
                    'description':
                    NodeConditions.CLEANUP_FAILURE.description % self.job_ids,
                    'started':
                    datetime_to_string(right_now),
                    'last_updated':
                    datetime_to_string(right_now)
                }]
            }

            self.conditions.handle_cleanup_task_failed(self.job_exes)
            node_dict = {}
            self.conditions.generate_status_json(node_dict)

            self.assertDictEqual(node_dict, expected_results)

        @patch('scheduler.node.conditions.now')
        def test_handle_cleanup_task_timeout(self, mock_now):
            """Tests calling handle_cleanup_task_timeout"""

            right_now = now()

            expected_results = {
                'errors': [{
                    'name': 'CLEANUP',
                    'title': NodeConditions.CLEANUP_ERR.title,
                    'description': NodeConditions.CLEANUP_ERR.description,
                    'started': datetime_to_string(right_now),
                    'last_updated': datetime_to_string(right_now)
                }],
                'warnings': [{
                    'name':
                    'CLEANUP_TIMEOUT' + ' %d' % WARNING_NAME_COUNTER,
                    'title':
                    NodeConditions.CLEANUP_TIMEOUT.title,
                    'description':
                    NodeConditions.CLEANUP_TIMEOUT.description % self.job_ids,
                    'started':
                    datetime_to_string(right_now),
                    'last_updated':
                    datetime_to_string(right_now)
                }]
            }

            self.conditions.handle_cleanup_task_timeout(self.job_exes)
            node_dict = {}
            self.conditions.generate_status_json(node_dict)

            self.assertDictEqual(node_dict, expected_results)

        @patch('scheduler.node.conditions.now')
        def test_handle_health_task_completed(self, mock_now):
            """Tests calling handle_health_task_completed"""

            right_now = now()

            self.conditions._error_active(NodeConditions.BAD_DAEMON_ERR)
            self.conditions._error_active(NodeConditions.BAD_LOGSTASH_ERR)
            self.conditions._error_active(NodeConditions.CLEANUP_ERR)
            self.conditions._error_active(NodeConditions.HEALTH_FAIL_ERR)
            self.conditions._error_active(NodeConditions.HEALTH_TIMEOUT_ERR)
            self.conditions._error_active(NodeConditions.IMAGE_PULL_ERR)
            self.conditions._error_active(NodeConditions.LOW_DOCKER_SPACE_ERR)

            expected_results = {
                'errors': [{
                    'name': 'CLEANUP',
                    'title': NodeConditions.CLEANUP_ERR.title,
                    'description': NodeConditions.CLEANUP_ERR.description,
                    'started': datetime_to_string(right_now),
                    'last_updated': datetime_to_string(right_now)
                }, {
                    'name': 'IMAGE_PULL',
                    'title': NodeConditions.IMAGE_PULL_ERR.title,
                    'description': NodeConditions.IMAGE_PULL_ERR.description,
                    'started': datetime_to_string(right_now),
                    'last_updated': datetime_to_string(right_now)
                }],
                'warnings': []
            }

            self.conditions.handle_health_task_completed()
            node_dict = {}
            self.conditions.generate_status_json(node_dict)

            self.assertItemsEqual(node_dict['errors'],
                                  expected_results['errors'])
            self.assertTrue(self.conditions.is_health_check_normal)
            self.assertTrue(self.conditions.is_pull_bad)

        @patch('scheduler.node.conditions.now')
        def test_handle_health_task_failed(self, mock_now):
            """Tests calling handle_health_task_failed"""

            right_now = now()

            self.conditions._error_active(NodeConditions.BAD_DAEMON_ERR)
            self.conditions._error_active(NodeConditions.BAD_LOGSTASH_ERR)
            self.conditions._error_active(NodeConditions.HEALTH_FAIL_ERR)
            self.conditions._error_active(NodeConditions.HEALTH_TIMEOUT_ERR)
            self.conditions._error_active(NodeConditions.LOW_DOCKER_SPACE_ERR)
            self.conditions.is_health_check_normal = True

            expected_results = {
                'errors': [{
                    'name': 'BAD_DAEMON',
                    'title': NodeConditions.BAD_DAEMON_ERR.title,
                    'description': NodeConditions.BAD_DAEMON_ERR.description,
                    'started': datetime_to_string(right_now),
                    'last_updated': datetime_to_string(right_now)
                }],
                'warnings': []
            }

            bad_daemon = job_test_utils.create_task_status_update(
                task_id='id',
                agent_id='agent',
                status='status',
                when=right_now,
                exit_code=HealthTask.BAD_DAEMON_CODE)
            self.conditions.handle_health_task_failed(bad_daemon)

            node_dict = {}
            self.conditions.generate_status_json(node_dict)

            self.assertItemsEqual(node_dict['errors'],
                                  expected_results['errors'])
            self.assertFalse(self.conditions.is_health_check_normal)

            expected_results = {
                'errors': [{
                    'name': 'BAD_LOGSTASH',
                    'title': NodeConditions.BAD_LOGSTASH_ERR.title,
                    'description': NodeConditions.BAD_LOGSTASH_ERR.description,
                    'started': datetime_to_string(right_now),
                    'last_updated': datetime_to_string(right_now)
                }],
                'warnings': []
            }

            bad_log = job_test_utils.create_task_status_update(
                task_id='id',
                agent_id='agent',
                status='status',
                when=right_now,
                exit_code=HealthTask.BAD_LOGSTASH_CODE)
            self.conditions.handle_health_task_failed(bad_log)

            node_dict = {}
            self.conditions.generate_status_json(node_dict)

            self.assertItemsEqual(node_dict['errors'],
                                  expected_results['errors'])
            self.assertFalse(self.conditions.is_health_check_normal)

            expected_results = {
                'errors': [{
                    'name': 'LOW_DOCKER_SPACE',
                    'title': NodeConditions.LOW_DOCKER_SPACE_ERR.title,
                    'description':
                    NodeConditions.LOW_DOCKER_SPACE_ERR.description,
                    'started': datetime_to_string(right_now),
                    'last_updated': datetime_to_string(right_now)
                }],
                'warnings': []
            }

            low_docker = job_test_utils.create_task_status_update(
                task_id='id',
                agent_id='agent',
                status='status',
                when=right_now,
                exit_code=HealthTask.LOW_DOCKER_SPACE_CODE)
            self.conditions.handle_health_task_failed(low_docker)

            node_dict = {}
            self.conditions.generate_status_json(node_dict)

            self.assertItemsEqual(node_dict['errors'],
                                  expected_results['errors'])
            self.assertFalse(self.conditions.is_health_check_normal)

            expected_results = {
                'errors': [{
                    'name': 'HEALTH_FAIL',
                    'title': NodeConditions.HEALTH_FAIL_ERR.title,
                    'description': NodeConditions.HEALTH_FAIL_ERR.description,
                    'started': datetime_to_string(right_now),
                    'last_updated': datetime_to_string(right_now)
                }],
                'warnings': []
            }

            unknown = job_test_utils.create_task_status_update(
                task_id='id',
                agent_id='agent',
                status='status',
                when=right_now,
                exit_code=0)
            self.conditions.handle_health_task_failed(unknown)

            node_dict = {}
            self.conditions.generate_status_json(node_dict)

            self.assertItemsEqual(node_dict['errors'],
                                  expected_results['errors'])
            self.assertFalse(self.conditions.is_health_check_normal)

        @patch('scheduler.node.conditions.now')
        def test_handle_health_task_timeout(self, mock_now):
            """Tests calling handle_health_task_timeout"""

            right_now = now()
            self.conditions._error_active(NodeConditions.BAD_DAEMON_ERR)
            self.conditions._error_active(NodeConditions.BAD_LOGSTASH_ERR)
            self.conditions._error_active(NodeConditions.HEALTH_FAIL_ERR)
            self.conditions._error_active(NodeConditions.HEALTH_TIMEOUT_ERR)
            self.conditions._error_active(NodeConditions.LOW_DOCKER_SPACE_ERR)
            self.conditions.is_health_check_normal = True

            self.conditions.handle_health_task_timeout()

            expected_results = {
                'errors': [{
                    'name': 'HEALTH_TIMEOUT',
                    'title': NodeConditions.HEALTH_TIMEOUT_ERR.title,
                    'description':
                    NodeConditions.HEALTH_TIMEOUT_ERR.description,
                    'started': datetime_to_string(right_now),
                    'last_updated': datetime_to_string(right_now)
                }],
                'warnings': []
            }
            node_dict = {}
            self.conditions.generate_status_json(node_dict)

            self.assertItemsEqual(node_dict['errors'],
                                  expected_results['errors'])
            self.assertFalse(self.conditions.is_health_check_normal)

        @patch('scheduler.node.conditions.now')
        def test_handle_pull_task_completed(self, mock_now):
            """Tests calling handle_pull_task_completed"""

            right_now = now()
            self.conditions._error_active(NodeConditions.IMAGE_PULL_ERR)
            self.conditions.is_pull_bad = True

            self.conditions.handle_pull_task_completed()

            expected_results = {'errors': [], 'warnings': []}
            node_dict = {}
            self.conditions.generate_status_json(node_dict)

            self.assertDictEqual(node_dict, expected_results)
            self.assertFalse(self.conditions.is_pull_bad)

        @patch('scheduler.node.conditions.now')
        def test_handle_pull_task_failed(self, mock_now):
            """Tests calling handle_pull_task_failed"""

            right_now = now()
            self.conditions.is_pull_bad = False

            self.conditions.handle_pull_task_failed()

            expected_results = {
                'errors': [{
                    'name': 'IMAGE_PULL',
                    'title': NodeConditions.IMAGE_PULL_ERR.title,
                    'description': NodeConditions.IMAGE_PULL_ERR.description,
                    'started': datetime_to_string(right_now),
                    'last_updated': datetime_to_string(right_now)
                }],
                'warnings': []
            }
            node_dict = {}
            self.conditions.generate_status_json(node_dict)

            self.assertDictEqual(node_dict, expected_results)
            self.assertTrue(self.conditions.is_pull_bad)

        @patch('scheduler.node.conditions.now')
        def test_handle_pull_task_timeout(self, mock_now):
            """Tests calling handle_pull_task_timeout"""

            right_now = now()
            self.conditions._error_active(NodeConditions.IMAGE_PULL_ERR)
            self.conditions.is_pull_bad = True

            self.conditions.handle_pull_task_completed()

            expected_results = {
                'errors': [{
                    'name': 'IMAGE_PULL',
                    'title': NodeConditions.IMAGE_PULL_ERR.title,
                    'description': NodeConditions.IMAGE_PULL_ERR.description,
                    'started': datetime_to_string(right_now),
                    'last_updated': datetime_to_string(right_now)
                }],
                'warnings': []
            }
            node_dict = {}
            self.conditions.generate_status_json(node_dict)

            self.assertDictEqual(node_dict, expected_results)
            self.assertTrue(self.conditions.is_pull_bad)

        @patch('scheduler.node.conditions.now')
        def test_last_cleanup_task_error(self, mock_now):
            """Tests calling last_cleanup_task_error"""

            right_now = now()
            when = self.conditions.last_cleanup_task_error()
            self.assertIsNone(when)
            self.conditions._error_active(NodeConditions.CLEANUP_ERR)
            when = self.conditions.last_cleanup_task_error()
            self.assertEqual(when, right_now)

        @patch('scheduler.node.conditions.now')
        def test_last_image_pull_task_error(self, mock_now):
            """Tests calling last_image_pull_task_error"""

            right_now = now()
            when = self.conditions.last_image_pull_task_error()
            self.assertIsNone(when)
            self.conditions._error_active(NodeConditions.IMAGE_PULL_ERR)
            when = self.conditions.last_image_pull_task_error()
            self.assertEqual(when, right_now)

        @patch('scheduler.node.conditions.now')
        def test_update_cleanup_count(self, mock_now):
            """Tests calling update_cleanup_count"""

            right_now = now()
            self.conditions._warning_active(
                NodeConditions.SLOW_CLEANUP,
                NodeConditions.SLOW_CLEANUP.description % 1)
            self.conditions.update_cleanup_count(0)
            expected_results = {'errors': [], 'warnings': []}
            node_dict = {}
            self.conditions.generate_status_json(node_dict)
            self.assertDictEqual(node_dict, expected_results)

            self.conditions.update_cleanup_count(JOB_EXES_WARNING_THRESHOLD +
                                                 1)
            expected_results = {
                'errors': [],
                'warnings': [{
                    'name':
                    'SLOW_CLEANUP',
                    'title':
                    NodeConditions.SLOW_CLEANUP.title,
                    'description':
                    NodeConditions.SLOW_CLEANUP.description %
                    JOB_EXES_WARNING_THRESHOLD + 1,
                    'started':
                    datetime_to_string(right_now),
                    'last_updated':
                    datetime_to_string(right_now)
                }]
            }
            node_dict = {}
            self.conditions.generate_status_json(node_dict)
            self.assertDictEqual(node_dict, expected_results)