Exemplo n.º 1
0
 def test_duration_to_string(self):
     """Tests converting timedelta duration to ISO duration string"""
     duration_1 = datetime.timedelta(seconds=0)
     self.assertEqual(parse_util.duration_to_string(duration_1), 'PT0S')
     duration_2 = datetime.timedelta(days=4, seconds=58426)
     self.assertEqual(parse_util.duration_to_string(duration_2),
                      'P4DT16H13M46S')
     duration_3 = datetime.timedelta(seconds=542.0894)
     self.assertEqual(parse_util.duration_to_string(duration_3), 'PT9M2S')
     duration_4 = datetime.timedelta(seconds=542.5894)
     self.assertEqual(parse_util.duration_to_string(duration_4), 'PT9M3S')
Exemplo n.º 2
0
    def to_dict(self):
        """Returns a dict representing these metrics

        :returns: The dict representing these metrics
        :rtype: dict
        """

        metrics_dict = {'jobs_total': self.jobs_total, 'jobs_pending': self.jobs_pending,
                        'jobs_blocked': self.jobs_blocked, 'jobs_queued': self.jobs_queued,
                        'jobs_running': self.jobs_running, 'jobs_failed': self.jobs_failed,
                        'jobs_completed': self.jobs_completed, 'jobs_canceled': self.jobs_canceled,
                        'min_seed_duration': None, 'avg_seed_duration': None, 'max_seed_duration': None,
                        'min_job_duration': None, 'avg_job_duration': None, 'max_job_duration': None}
        if self.min_seed_duration:
            metrics_dict['min_seed_duration'] = parse_utils.duration_to_string(self.min_seed_duration)
        if self.avg_seed_duration:
            metrics_dict['avg_seed_duration'] = parse_utils.duration_to_string(self.avg_seed_duration)
        if self.max_seed_duration:
            metrics_dict['max_seed_duration'] = parse_utils.duration_to_string(self.max_seed_duration)
        if self.min_job_duration:
            metrics_dict['min_job_duration'] = parse_utils.duration_to_string(self.min_job_duration)
        if self.avg_job_duration:
            metrics_dict['avg_job_duration'] = parse_utils.duration_to_string(self.avg_job_duration)
        if self.max_job_duration:
            metrics_dict['max_job_duration'] = parse_utils.duration_to_string(self.max_job_duration)

        return metrics_dict
Exemplo n.º 3
0
    def test_successful(self, mock_msg_mgr):
        """Tests successfully calling the v6 batch comparison view"""

        job_type_1 = job_test_utils.create_seed_job_type()
        job_type_2 = job_test_utils.create_seed_job_type()
        job_type_3 = job_test_utils.create_seed_job_type()

        rt_definition_1 = {
            'version': '6',
            'input': {'files': [{'name': 'INPUT_IMAGE', 'media_types': ['image/png'], 'required': True, 'multiple': False}],
                      'json': []},
            'nodes': {
                'job_a': {
                    'dependencies': [],
                    'input': {'INPUT_IMAGE': {'type': 'recipe', 'input': 'INPUT_IMAGE'}},
                    'node_type': {
                        'node_type': 'job',
                        'job_type_name': job_type_1.name,
                        'job_type_version': job_type_1.version,
                        'job_type_revision': 1,
                    }
                },
                'job_b': {
                    'dependencies': [{'name': 'job_a'}],
                    'input': {'INPUT_IMAGE': {'type': 'dependency', 'node': 'job_a',
                    'output': 'OUTPUT_IMAGE'}},
                    'node_type': {
                        'node_type': 'job',
                        'job_type_name': job_type_2.name,
                        'job_type_version': job_type_2.version,
                        'job_type_revision': 1,
                    }
                }
            }
        }

        rt_definition_2 = {
            'version': '6',
            'input': {'files': [{'name': 'INPUT_IMAGE', 'media_types': ['image/png'], 'required': True, 'multiple': False}],
                      'json': []},
            'nodes': {
                'job_c': {
                    'dependencies': [],
                    'input': {'INPUT_IMAGE': {'type': 'recipe', 'input': 'INPUT_IMAGE'}},
                    'node_type': {
                        'node_type': 'job',
                        'job_type_name': job_type_3.name,
                        'job_type_version': job_type_3.version,
                        'job_type_revision': 1,
                    }
                },
                'job_b': {
                    'dependencies': [{'name': 'job_c'}],
                    'input': {'INPUT_IMAGE': {'type': 'dependency', 'node': 'job_c',
                    'output': 'OUTPUT_IMAGE'}},
                    'node_type': {
                        'node_type': 'job',
                        'job_type_name': job_type_2.name,
                        'job_type_version': job_type_2.version,
                        'job_type_revision': 1,
                    }
                }
            }
        }
        recipe_type = recipe_test_utils.create_recipe_type_v6(definition=rt_definition_1)

        # Create a chain of two batches
        batch_1 = batch_test_utils.create_batch(recipe_type=recipe_type, is_creation_done=True, recipes_total=2)
        # Right now test utils will automatically have batch_1 supersede another batch, so we reset this so batch_1 is
        # its own chain
        batch_1.root_batch_id = batch_1.id
        batch_1.superseded_batch = None
        batch_1.save()
        # Change recipe type to new revision
        recipe_test_utils.edit_recipe_type_v6(recipe_type=recipe_type, definition=rt_definition_2, auto_update=True)
        recipe_type = RecipeType.objects.get(id=recipe_type.id)
        definition_2 = BatchDefinition()
        definition_2.root_batch_id = batch_1.root_batch_id
        batch_2 = batch_test_utils.create_batch(recipe_type=recipe_type, definition=definition_2)

        # Set metrics to test values
        Batch.objects.filter(id=batch_1.id).update(jobs_total=24, jobs_pending=0, jobs_blocked=10, jobs_queued=0,
                                                   jobs_running=0, jobs_failed=2, jobs_completed=12, jobs_canceled=0,
                                                   recipes_estimated=2, recipes_total=2, recipes_completed=1)
        Batch.objects.filter(id=batch_2.id).update(jobs_total=26, jobs_pending=2, jobs_blocked=6, jobs_queued=3,
                                                   jobs_running=5, jobs_failed=6, jobs_completed=3, jobs_canceled=1,
                                                   recipes_estimated=2, recipes_total=2, recipes_completed=0)
        min_seed_duration_1a = timedelta(seconds=43)
        avg_seed_duration_1a = timedelta(seconds=68)
        max_seed_duration_1a = timedelta(seconds=77)
        min_job_duration_1a = timedelta(seconds=45)
        avg_job_duration_1a = timedelta(seconds=70)
        max_job_duration_1a = timedelta(seconds=79)
        qry = BatchMetrics.objects.filter(batch_id=batch_1.id, job_name='job_a')
        qry.update(jobs_total=12, jobs_pending=0, jobs_blocked=0, jobs_queued=0, jobs_running=0, jobs_failed=0,
                   jobs_completed=12, jobs_canceled=0, min_seed_duration=min_seed_duration_1a,
                   avg_seed_duration=avg_seed_duration_1a, max_seed_duration=max_seed_duration_1a,
                   min_job_duration=min_job_duration_1a, avg_job_duration=avg_job_duration_1a,
                   max_job_duration=max_job_duration_1a)
        min_seed_duration_1b = timedelta(seconds=15)
        avg_seed_duration_1b = timedelta(seconds=18)
        max_seed_duration_1b = timedelta(seconds=23)
        min_job_duration_1b = timedelta(seconds=18)
        avg_job_duration_1b = timedelta(seconds=21)
        max_job_duration_1b = timedelta(seconds=26)
        qry = BatchMetrics.objects.filter(batch_id=batch_1.id, job_name='job_b')
        qry.update(jobs_total=12, jobs_pending=0, jobs_blocked=10, jobs_queued=0, jobs_running=0, jobs_failed=2,
                   jobs_completed=0, jobs_canceled=0, min_seed_duration=min_seed_duration_1b,
                   avg_seed_duration=avg_seed_duration_1b, max_seed_duration=max_seed_duration_1b,
                   min_job_duration=min_job_duration_1b, avg_job_duration=avg_job_duration_1b,
                   max_job_duration=max_job_duration_1b)
        min_seed_duration_2b = timedelta(seconds=9)
        avg_seed_duration_2b = timedelta(seconds=12)
        max_seed_duration_2b = timedelta(seconds=17)
        min_job_duration_2b = timedelta(seconds=12)
        avg_job_duration_2b = timedelta(seconds=15)
        max_job_duration_2b = timedelta(seconds=20)
        qry = BatchMetrics.objects.filter(batch_id=batch_2.id, job_name='job_b')
        qry.update(jobs_total=13, jobs_pending=0, jobs_blocked=0, jobs_queued=0, jobs_running=3, jobs_failed=6,
                   jobs_completed=3, jobs_canceled=1, min_seed_duration=min_seed_duration_2b,
                   avg_seed_duration=avg_seed_duration_2b, max_seed_duration=max_seed_duration_2b,
                   min_job_duration=min_job_duration_2b, avg_job_duration=avg_job_duration_2b,
                   max_job_duration=max_job_duration_2b)
        min_seed_duration_2c = timedelta(seconds=101)
        avg_seed_duration_2c = timedelta(seconds=136)
        max_seed_duration_2c = timedelta(seconds=158)
        min_job_duration_2c = timedelta(seconds=111)
        avg_job_duration_2c = timedelta(seconds=146)
        max_job_duration_2c = timedelta(seconds=168)
        qry = BatchMetrics.objects.filter(batch_id=batch_2.id, job_name='job_c')
        qry.update(jobs_total=13, jobs_pending=2, jobs_blocked=6, jobs_queued=3, jobs_running=2, jobs_failed=0,
                   jobs_completed=0, jobs_canceled=0, min_seed_duration=min_seed_duration_2c,
                   avg_seed_duration=avg_seed_duration_2c, max_seed_duration=max_seed_duration_2c,
                   min_job_duration=min_job_duration_2c, avg_job_duration=avg_job_duration_2c,
                   max_job_duration=max_job_duration_2c)
        expected_job_metrics = {'job_a': {'jobs_total': [12, None], 'jobs_pending': [0, None],
                                          'jobs_blocked': [0, None], 'jobs_queued': [0, None],
                                          'jobs_running': [0, None], 'jobs_failed': [0, None],
                                          'jobs_completed': [12, None], 'jobs_canceled': [0, None],
                                          'min_seed_duration': [duration_to_string(min_seed_duration_1a), None],
                                          'avg_seed_duration': [duration_to_string(avg_seed_duration_1a), None],
                                          'max_seed_duration': [duration_to_string(max_seed_duration_1a), None],
                                          'min_job_duration': [duration_to_string(min_job_duration_1a), None],
                                          'avg_job_duration': [duration_to_string(avg_job_duration_1a), None],
                                          'max_job_duration': [duration_to_string(max_job_duration_1a), None]},
                                'job_b': {'jobs_total': [12, 13], 'jobs_pending': [0, 0],
                                          'jobs_blocked': [10, 0], 'jobs_queued': [0, 0],
                                          'jobs_running': [0, 3], 'jobs_failed': [2, 6],
                                          'jobs_completed': [0, 3], 'jobs_canceled': [0, 1],
                                          'min_seed_duration': [duration_to_string(min_seed_duration_1b),
                                                                duration_to_string(min_seed_duration_2b)],
                                          'avg_seed_duration': [duration_to_string(avg_seed_duration_1b),
                                                                duration_to_string(avg_seed_duration_2b)],
                                          'max_seed_duration': [duration_to_string(max_seed_duration_1b),
                                                                duration_to_string(max_seed_duration_2b)],
                                          'min_job_duration': [duration_to_string(min_job_duration_1b),
                                                               duration_to_string(min_job_duration_2b)],
                                          'avg_job_duration': [duration_to_string(avg_job_duration_1b),
                                                               duration_to_string(avg_job_duration_2b)],
                                          'max_job_duration': [duration_to_string(max_job_duration_1b),
                                                               duration_to_string(max_job_duration_2b)]},
                                'job_c': {'jobs_total': [None, 13], 'jobs_pending': [None, 2],
                                          'jobs_blocked': [None, 6], 'jobs_queued': [None, 3],
                                          'jobs_running': [None, 2], 'jobs_failed': [None, 0],
                                          'jobs_completed': [None, 0], 'jobs_canceled': [None, 0],
                                          'min_seed_duration': [None, duration_to_string(min_seed_duration_2c)],
                                          'avg_seed_duration': [None, duration_to_string(avg_seed_duration_2c)],
                                          'max_seed_duration': [None, duration_to_string(max_seed_duration_2c)],
                                          'min_job_duration': [None, duration_to_string(min_job_duration_2c)],
                                          'avg_job_duration': [None, duration_to_string(avg_job_duration_2c)],
                                          'max_job_duration': [None, duration_to_string(max_job_duration_2c)]}
                               }
        expected_result = {'batches': [{'id': batch_1.id, 'title': batch_1.title, 'description': batch_1.description,
                                        'created': datetime_to_string(batch_1.created)},
                                       {'id': batch_2.id, 'title': batch_2.title, 'description': batch_2.description,
                                        'created': datetime_to_string(batch_2.created)}],
                           'metrics': {'jobs_total': [24, 26], 'jobs_pending': [0, 2], 'jobs_blocked': [10, 6],
                                       'jobs_queued': [0, 3], 'jobs_running': [0, 5], 'jobs_failed': [2, 6],
                                       'jobs_completed': [12, 3], 'jobs_canceled': [0, 1], 'recipes_estimated': [2, 2],
                                       'recipes_total': [2, 2], 'recipes_completed': [1, 0],
                                       'job_metrics': expected_job_metrics}
                          }
        url = '/v6/batches/comparison/%d/' % batch_2.root_batch_id
        response = self.client.get(url)
        self.assertEqual(response.status_code, status.HTTP_200_OK, response.content)

        result = json.loads(response.content)
        self.assertDictEqual(result, expected_result)
Exemplo n.º 4
0
    def get_batch_comparison_v6(self, root_batch_id):
        """Returns the batch metrics for the v6 batch comparison REST API

        :param root_batch_id: The root batch ID of the batches to compare
        :type root_batch_id: int
        :returns: The list of batches in the chain
        :rtype: list
        """

        from batch.serializers import BatchBaseSerializerV6

        batches = Batch.objects.filter(
            root_batch_id=root_batch_id).prefetch_related('metrics')
        batches = batches.defer('definition', 'configuration').order_by('id')

        batch_list = []
        job_metrics_dict = {}
        for batch in batches:
            batch_list.append(BatchBaseSerializerV6(batch).data)
            batch.batch_metrics_dict = {}
            for batch_metrics in batch.metrics.all():
                batch.batch_metrics_dict[
                    batch_metrics.job_name] = batch_metrics
                if batch_metrics.job_name not in job_metrics_dict:
                    job_metrics = {
                        'jobs_total': [],
                        'jobs_pending': [],
                        'jobs_blocked': [],
                        'jobs_queued': [],
                        'jobs_running': [],
                        'jobs_failed': [],
                        'jobs_completed': [],
                        'jobs_canceled': [],
                        'min_seed_duration': [],
                        'avg_seed_duration': [],
                        'max_seed_duration': [],
                        'min_job_duration': [],
                        'avg_job_duration': [],
                        'max_job_duration': []
                    }
                    job_metrics_dict[batch_metrics.job_name] = job_metrics
        metrics_dict = {
            'jobs_total': [],
            'jobs_pending': [],
            'jobs_blocked': [],
            'jobs_queued': [],
            'jobs_running': [],
            'jobs_failed': [],
            'jobs_completed': [],
            'jobs_canceled': [],
            'recipes_estimated': [],
            'recipes_total': [],
            'recipes_completed': [],
            'job_metrics': job_metrics_dict
        }

        for batch in batches:
            metrics_dict['jobs_total'].append(batch.jobs_total)
            metrics_dict['jobs_pending'].append(batch.jobs_pending)
            metrics_dict['jobs_blocked'].append(batch.jobs_blocked)
            metrics_dict['jobs_queued'].append(batch.jobs_queued)
            metrics_dict['jobs_running'].append(batch.jobs_running)
            metrics_dict['jobs_failed'].append(batch.jobs_failed)
            metrics_dict['jobs_completed'].append(batch.jobs_completed)
            metrics_dict['jobs_canceled'].append(batch.jobs_canceled)
            metrics_dict['recipes_estimated'].append(batch.recipes_estimated)
            metrics_dict['recipes_total'].append(batch.recipes_total)
            metrics_dict['recipes_completed'].append(batch.recipes_completed)
            for job_name, job_metrics in job_metrics_dict.items():
                if job_name in batch.batch_metrics_dict:
                    batch_metrics = batch.batch_metrics_dict[job_name]
                    job_metrics['jobs_total'].append(batch_metrics.jobs_total)
                    job_metrics['jobs_pending'].append(
                        batch_metrics.jobs_pending)
                    job_metrics['jobs_blocked'].append(
                        batch_metrics.jobs_blocked)
                    job_metrics['jobs_queued'].append(
                        batch_metrics.jobs_queued)
                    job_metrics['jobs_running'].append(
                        batch_metrics.jobs_running)
                    job_metrics['jobs_failed'].append(
                        batch_metrics.jobs_failed)
                    job_metrics['jobs_completed'].append(
                        batch_metrics.jobs_completed)
                    job_metrics['jobs_canceled'].append(
                        batch_metrics.jobs_canceled)
                    if batch_metrics.min_seed_duration is not None:
                        min_seed_duration = parse_utils.duration_to_string(
                            batch_metrics.min_seed_duration)
                    else:
                        min_seed_duration = None
                    job_metrics['min_seed_duration'].append(min_seed_duration)
                    if batch_metrics.avg_seed_duration is not None:
                        avg_seed_duration = parse_utils.duration_to_string(
                            batch_metrics.avg_seed_duration)
                    else:
                        avg_seed_duration = None
                    job_metrics['avg_seed_duration'].append(avg_seed_duration)
                    if batch_metrics.max_seed_duration is not None:
                        max_seed_duration = parse_utils.duration_to_string(
                            batch_metrics.max_seed_duration)
                    else:
                        max_seed_duration = None
                    job_metrics['max_seed_duration'].append(max_seed_duration)
                    if batch_metrics.min_job_duration is not None:
                        min_job_duration = parse_utils.duration_to_string(
                            batch_metrics.min_job_duration)
                    else:
                        min_job_duration = None
                    job_metrics['min_job_duration'].append(min_job_duration)
                    if batch_metrics.avg_job_duration is not None:
                        avg_job_duration = parse_utils.duration_to_string(
                            batch_metrics.avg_job_duration)
                    else:
                        avg_job_duration = None
                    job_metrics['avg_job_duration'].append(avg_job_duration)
                    if batch_metrics.max_job_duration is not None:
                        max_job_duration = parse_utils.duration_to_string(
                            batch_metrics.max_job_duration)
                    else:
                        max_job_duration = None
                    job_metrics['max_job_duration'].append(max_job_duration)
                else:
                    for metric_name in job_metrics:
                        job_metrics[metric_name].append(
                            None
                        )  # Batch does not have this job, fill in metrics with None

        return {'batches': batch_list, 'metrics': metrics_dict}