class ProductFileSerializer(ProductFileBaseSerializer): """Converts product file model fields to REST output""" from job.job_type_serializers import JobTypeBaseSerializerV6 job_type = JobTypeBaseSerializerV6() batch = BatchBaseSerializerV6() recipe_type = RecipeTypeBaseSerializerV6()
class JobSerializerV6(JobBaseSerializerV6): """Converts job model fields to REST output.""" from batch.serializers import BatchBaseSerializerV6 from error.serializers import ErrorBaseSerializerV6 from recipe.serializers import RecipeBaseSerializerV6 from trigger.serializers import TriggerEventSerializerV6 job_type_rev = JobTypeRevisionBaseSerializer() event = TriggerEventSerializerV6() recipe = RecipeBaseSerializerV6() batch = BatchBaseSerializerV6() is_superseded = serializers.BooleanField() superseded_job = ModelIdSerializer() status = serializers.ChoiceField(choices=Job.JOB_STATUSES) node = NodeBaseSerializer() error = ErrorBaseSerializerV6() num_exes = serializers.IntegerField() input_file_size = serializers.FloatField() source_started = serializers.DateTimeField() source_ended = serializers.DateTimeField() created = serializers.DateTimeField() queued = serializers.DateTimeField() started = serializers.DateTimeField() ended = serializers.DateTimeField() last_status_change = serializers.DateTimeField() superseded = serializers.DateTimeField() last_modified = serializers.DateTimeField()
class ScaleFileSerializerV6(ScaleFileBaseSerializerV6): """Converts Scale file model fields to REST output""" from batch.serializers import BatchBaseSerializerV6 from job.job_type_serializers import JobTypeBaseSerializerV6 from recipe.serializers import RecipeTypeBaseSerializerV6 workspace = WorkspaceBaseSerializer() data_type_tags = serializers.ListField(child=serializers.CharField()) media_type = serializers.CharField() file_type = serializers.CharField() file_size = serializers.IntegerField() # TODO: BigIntegerField? file_path = serializers.CharField() is_deleted = serializers.BooleanField() url = serializers.URLField() created = serializers.DateTimeField() deleted = serializers.DateTimeField() data_started = serializers.DateTimeField() data_ended = serializers.DateTimeField() source_started = serializers.DateTimeField() source_ended = serializers.DateTimeField() source_sensor_class = serializers.CharField() source_sensor = serializers.CharField() source_collection = serializers.CharField() source_task = serializers.CharField() last_modified = serializers.DateTimeField() # TODO: update to use GeoJson instead of WKT geometry = WktField() center_point = WktField() countries = serializers.StringRelatedField(many=True, read_only=True) job_type = JobTypeBaseSerializerV6() job = ModelIdSerializer() job_exe = ModelIdSerializer() job_output = serializers.CharField() recipe_type = RecipeTypeBaseSerializerV6() recipe = ModelIdSerializer() recipe_node = serializers.CharField() batch = BatchBaseSerializerV6() is_superseded = serializers.BooleanField() superseded = serializers.DateTimeField()
class RecipeSerializerV6(RecipeBaseSerializerV6): """Converts recipe model fields to REST output.""" from batch.serializers import BatchBaseSerializerV6 from ingest.ingest_event_serializers import IngestEventBaseSerializerV6 from trigger.serializers import TriggerEventBaseSerializerV6 recipe_type_rev = RecipeTypeRevisionBaseSerializerV6() event = TriggerEventBaseSerializerV6() ingest_event = IngestEventBaseSerializerV6() batch = BatchBaseSerializerV6() recipe = RecipeBaseSerializerV6() is_superseded = serializers.BooleanField() superseded_recipe = ModelIdSerializer() superseded_by_recipe = None input_file_size = serializers.FloatField() source_started = serializers.DateTimeField() source_ended = serializers.DateTimeField() source_sensor_class = serializers.CharField() source_sensor = serializers.CharField() source_collection = serializers.CharField() source_task = serializers.CharField() jobs_total = serializers.IntegerField() jobs_pending = serializers.IntegerField() jobs_blocked = serializers.IntegerField() jobs_queued = serializers.IntegerField() jobs_running = serializers.IntegerField() jobs_failed = serializers.IntegerField() jobs_completed = serializers.IntegerField() jobs_canceled = serializers.IntegerField() sub_recipes_total = serializers.IntegerField() sub_recipes_completed = serializers.IntegerField() is_completed = serializers.BooleanField() created = serializers.DateTimeField() completed = serializers.DateTimeField() superseded = serializers.DateTimeField() last_modified = serializers.DateTimeField()
def get_batch_comparison_v6(self, root_batch_id): """Returns the batch metrics for the v6 batch comparison REST API :param root_batch_id: The root batch ID of the batches to compare :type root_batch_id: int :returns: The list of batches in the chain :rtype: list """ from batch.serializers import BatchBaseSerializerV6 batches = Batch.objects.filter( root_batch_id=root_batch_id).prefetch_related('metrics') batches = batches.defer('definition', 'configuration').order_by('id') batch_list = [] job_metrics_dict = {} for batch in batches: batch_list.append(BatchBaseSerializerV6(batch).data) batch.batch_metrics_dict = {} for batch_metrics in batch.metrics.all(): batch.batch_metrics_dict[ batch_metrics.job_name] = batch_metrics if batch_metrics.job_name not in job_metrics_dict: job_metrics = { 'jobs_total': [], 'jobs_pending': [], 'jobs_blocked': [], 'jobs_queued': [], 'jobs_running': [], 'jobs_failed': [], 'jobs_completed': [], 'jobs_canceled': [], 'min_seed_duration': [], 'avg_seed_duration': [], 'max_seed_duration': [], 'min_job_duration': [], 'avg_job_duration': [], 'max_job_duration': [] } job_metrics_dict[batch_metrics.job_name] = job_metrics metrics_dict = { 'jobs_total': [], 'jobs_pending': [], 'jobs_blocked': [], 'jobs_queued': [], 'jobs_running': [], 'jobs_failed': [], 'jobs_completed': [], 'jobs_canceled': [], 'recipes_estimated': [], 'recipes_total': [], 'recipes_completed': [], 'job_metrics': job_metrics_dict } for batch in batches: metrics_dict['jobs_total'].append(batch.jobs_total) metrics_dict['jobs_pending'].append(batch.jobs_pending) metrics_dict['jobs_blocked'].append(batch.jobs_blocked) metrics_dict['jobs_queued'].append(batch.jobs_queued) metrics_dict['jobs_running'].append(batch.jobs_running) metrics_dict['jobs_failed'].append(batch.jobs_failed) metrics_dict['jobs_completed'].append(batch.jobs_completed) metrics_dict['jobs_canceled'].append(batch.jobs_canceled) metrics_dict['recipes_estimated'].append(batch.recipes_estimated) metrics_dict['recipes_total'].append(batch.recipes_total) metrics_dict['recipes_completed'].append(batch.recipes_completed) for job_name, job_metrics in job_metrics_dict.items(): if job_name in batch.batch_metrics_dict: batch_metrics = batch.batch_metrics_dict[job_name] job_metrics['jobs_total'].append(batch_metrics.jobs_total) job_metrics['jobs_pending'].append( batch_metrics.jobs_pending) job_metrics['jobs_blocked'].append( batch_metrics.jobs_blocked) job_metrics['jobs_queued'].append( batch_metrics.jobs_queued) job_metrics['jobs_running'].append( batch_metrics.jobs_running) job_metrics['jobs_failed'].append( batch_metrics.jobs_failed) job_metrics['jobs_completed'].append( batch_metrics.jobs_completed) job_metrics['jobs_canceled'].append( batch_metrics.jobs_canceled) if batch_metrics.min_seed_duration is not None: min_seed_duration = parse_utils.duration_to_string( batch_metrics.min_seed_duration) else: min_seed_duration = None job_metrics['min_seed_duration'].append(min_seed_duration) if batch_metrics.avg_seed_duration is not None: avg_seed_duration = parse_utils.duration_to_string( batch_metrics.avg_seed_duration) else: avg_seed_duration = None job_metrics['avg_seed_duration'].append(avg_seed_duration) if batch_metrics.max_seed_duration is not None: max_seed_duration = parse_utils.duration_to_string( batch_metrics.max_seed_duration) else: max_seed_duration = None job_metrics['max_seed_duration'].append(max_seed_duration) if batch_metrics.min_job_duration is not None: min_job_duration = parse_utils.duration_to_string( batch_metrics.min_job_duration) else: min_job_duration = None job_metrics['min_job_duration'].append(min_job_duration) if batch_metrics.avg_job_duration is not None: avg_job_duration = parse_utils.duration_to_string( batch_metrics.avg_job_duration) else: avg_job_duration = None job_metrics['avg_job_duration'].append(avg_job_duration) if batch_metrics.max_job_duration is not None: max_job_duration = parse_utils.duration_to_string( batch_metrics.max_job_duration) else: max_job_duration = None job_metrics['max_job_duration'].append(max_job_duration) else: for metric_name in job_metrics: job_metrics[metric_name].append( None ) # Batch does not have this job, fill in metrics with None return {'batches': batch_list, 'metrics': metrics_dict}