Example #1
0
    def _populate_default_values(self, do_validate=False):
        """Populates any missing JSON fields that have default values
        """

        if 'parameters' not in self._definition:
            self._definition['parameters'] = InterfaceV6().get_dict()
        elif type(self._definition['parameters']) is not dict:
            raise InvalidDataSetDefinition('INVALID_DATASET_DEFINITION', '"parameters" is not a dictionary')
        else:
            self._definition['parameters'] = InterfaceV6(interface=self._definition['parameters'], do_validate=do_validate).get_dict()
        rest_utils.strip_schema_version(self._definition['parameters'])

        if 'global_parameters' not in self._definition:
            self._definition['global_parameters'] = InterfaceV6().get_dict()
        elif type(self._definition['global_parameters']) is not dict:
            raise InvalidDataSetDefinition('INVALID_DATASET_DEFINITION', '"global_parameters" is not a dictionary')
        else:
            self._definition['global_parameters'] = InterfaceV6(interface=self._definition['global_parameters'], do_validate=do_validate).get_dict()
        rest_utils.strip_schema_version(self._definition['global_parameters'])

        if 'global_data' not in self._definition:
            self._definition['global_data'] = DataV6().get_dict()
        elif type(self._definition['global_data']) is not dict:
            raise InvalidDataSetDefinition('INVALID_DATASET_DEFINITION', '"global_data" is not a dictionary')
        else:
            self._definition['global_data'] = DataV6(data=self._definition['global_data'], do_validate=do_validate).get_dict()
        rest_utils.strip_schema_version(self._definition['global_data'])
Example #2
0
    def __init__(self, definition=None):
        """Constructor

        :param definition: Parameters of the definition
        :type defintion: dict
        """

        if not definition:
            definition = {}
        self._definition = definition
        self.param_names = set()
        self.parameters = {}
        if 'parameters' in self._definition:
            self.parameters = InterfaceV6(
                interface=definition['parameters']).get_interface()
            self.param_names = set(self.parameters.parameters.keys())

        self.global_parameters = {}
        if 'global_parameters' in self._definition:
            self.global_parameters = InterfaceV6(
                definition['global_parameters']).get_interface()
            keys = self.global_parameters.parameters.keys()
            dupes = self.param_names.intersection(keys)
            if dupes:
                raise InvalidDataSetDefinition(
                    'DUPLICATE_PARAMETER',
                    'Invalid dataset definition: Names must be unique. %s defined more than once'
                    % dupes)
            self.param_names.update(keys)

        self.global_data = {}
        if 'global_data' in self._definition:
            self.global_data = DataV6(definition['global_data']).get_data()
Example #3
0
def create_dataset_members(dataset=None, data_list=None):
    """Creates a datasetmember model

    :keyword dataset: The dataset the members are a part of
    :type dataset: :class:`data.models.DataSet`
    :keyword data_list: The data for the members
    :type data_list: [dict]
    """
    if not dataset:
        dataset = create_dataset()

    if not data_list:
        data_list = [{
            'version': '7',
            'files': {},
            'json': {
                'input_c': 999,
                'input_d': {
                    'greeting': 'hello'
                }
            }
        }]
    data_objs = []
    for d in data_list:
        data_objs.append(DataV6(data=d).get_data())

    dataset_members = DataSetMember.objects.create_dataset_members(
        dataset=dataset, data_list=data_objs)
    return dataset_members
Example #4
0
    def from_json(json_dict):
        """See :meth:`messaging.messages.message.CommandMessage.from_json`
        """

        message = CreateJobs()
        message.event_id = json_dict['event_id']
        message.create_jobs_type = json_dict['create_jobs_type']

        if message.create_jobs_type == INPUT_DATA_TYPE:
            message.job_type_name = json_dict['job_type_name']
            message.job_type_version = json_dict['job_type_version']
            message.job_type_rev_num = json_dict['job_type_rev_num']
            message.input_data = DataV6(json_dict['input_data']).get_data()
        elif message.create_jobs_type == RECIPE_TYPE:
            message.recipe_id = json_dict['recipe_id']
            if 'root_recipe_id' in json_dict:
                message.root_recipe_id = json_dict['root_recipe_id']
            if 'superseded_recipe_id' in json_dict:
                message.superseded_recipe_id = json_dict['superseded_recipe_id']
            if 'batch_id' in json_dict:
                message.batch_id = json_dict['batch_id']
            for job_dict in json_dict['recipe_jobs']:
                recipe_job = RecipeJob(job_dict['job_type_name'], job_dict['job_type_version'],
                                       job_dict['job_type_rev_num'], job_dict['node_name'], job_dict['process_input'])
                message.add_recipe_job(recipe_job)
            if 'recipe_config' in json_dict:
                message.recipe_config = json_dict['recipe_config']
            if 'ingest_event_id' in json_dict:
                message.ingest_event_id = json_dict['ingest_event_id']

        return message
Example #5
0
    def _create_new_recipes(self):
        """Creates the recipe models for a new recipe run
        """
        rt = RecipeType.objects.get(name=self.recipe_type_name)
        if not rt.is_active:
            raise InactiveRecipeType("Recipe Type %s is inactive" % rt.name)

        recipe_type_rev = RecipeTypeRevision.objects.get_revision(
            self.recipe_type_name, self.recipe_type_rev_num)

        config = None
        if self.configuration:
            config = RecipeConfigurationV6(self.configuration)

        with transaction.atomic():
            recipe_input_data = DataV6(self.recipe_input_data).get_data()
            recipe = Recipe.objects.create_recipe_v6(
                recipe_type_rev=recipe_type_rev,
                event_id=self.event_id,
                ingest_id=self.ingest_event_id,
                input_data=recipe_input_data,
                batch_id=self.batch_id,
                recipe_config=config)
            recipe.save()
            self.new_recipes.append(recipe.id)

        recipes = []
        if recipe:
            recipes.append(recipe)

        return recipes
Example #6
0
    def test_convert_data_to_v6_json(self):
        """Tests calling convert_data_to_v6_json()"""

        # Try interface with nothing set
        data = Data()
        json = convert_data_to_v6_json(data)
        DataV6(data=json.get_dict(), do_validate=True)  # Revalidate

        # Try data with a variety of values
        data = Data()
        data.add_value(FileValue('input_a', [1234]))
        data.add_value(FileValue('input_b', [1235, 1236]))
        data.add_value(JsonValue('input_c', 'hello'))
        data.add_value(JsonValue('input_d', 11.9))
        json = convert_data_to_v6_json(data)
        DataV6(data=json.get_dict(), do_validate=True)  # Revalidate
        self.assertSetEqual(set(json.get_data().values.keys()), {'input_a', 'input_b', 'input_c', 'input_d'})
Example #7
0
    def get_data(self):
        """Returns the data for this datasetmember

        :returns: The data for this datasetmember
        :rtype: :class:`data.data.data.Data`
        """

        return DataV6(data=self.data, do_validate=False).get_data()
Example #8
0
    def post(self, request):
        """Queue a recipe and returns the new job information in JSON form

        :param request: the HTTP POST request
        :type request: :class:`rest_framework.request.Request`
        :rtype: :class:`rest_framework.response.Response`
        :returns: the HTTP response to send back to the user
        """
        if request.version != 'v6':
            raise Http404

        recipe_type_id = rest_util.parse_int(request, 'recipe_type_id')
        recipe_data = rest_util.parse_dict(request, 'input', {})
        configuration_dict = rest_util.parse_dict(request,
                                                  'configuration',
                                                  required=False)
        configuration = None

        try:
            recipeData = DataV6(recipe_data, do_validate=True)
        except InvalidData as ex:
            logger.exception('Unable to queue new recipe. Invalid input: %s',
                             recipe_data)
            raise BadParameter(unicode(ex))

        try:
            recipe_type = RecipeType.objects.get(pk=recipe_type_id)
        except RecipeType.DoesNotExist:
            raise Http404

        if configuration_dict:
            try:
                configuration = RecipeConfigurationV6(
                    configuration_dict, do_validate=True).get_configuration()
            except InvalidRecipeConfiguration as ex:
                message = 'Recipe configuration invalid'
                logger.exception(message)
                raise BadParameter('%s: %s' % (message, unicode(ex)))

        try:
            recipe = Queue.objects.queue_new_recipe_for_user_v6(
                recipe_type,
                recipeData.get_data(),
                recipe_config=configuration)
        except (InvalidData, InvalidRecipeData) as err:
            return Response('Invalid recipe data: ' + unicode(err),
                            status=status.HTTP_400_BAD_REQUEST)
        except InactiveRecipeType as err:
            return Response('Inactive recipe type: ' + unicode(err),
                            status=status.HTTP_400_BAD_REQUEST)

        serializer = RecipeSerializerV6(recipe)
        recipe_url = reverse('recipe_details_view',
                             args=[recipe.id],
                             request=request)
        return Response(serializer.data,
                        status=status.HTTP_201_CREATED,
                        headers=dict(location=recipe_url))
Example #9
0
    def test_validate_dataset_members(self):
        """Tests calling DataSetManager.validate_data_list() """

        data_dict = copy.deepcopy(dataset_test_utils.DATA_DEFINITION)
        data_dict['files']['input_e'] = [self.file1.id]
        data_dict['files']['input_f'] = [self.file2.id, self.file3.id]
        data = DataV6(data=data_dict).get_data()
        validation = DataSetMember.objects.validate_data_list(dataset_def=self.dataset.get_definition(), data_list=[data])
        self.assertTrue(validation.is_valid)

        data_dict = copy.deepcopy(dataset_test_utils.DATA_DEFINITION)
        del data_dict['files']['input_e']
        data = DataV6(data=data_dict).get_data()

        # call test
        validation = DataSetMember.objects.validate_data_list(dataset_def=self.dataset.get_definition(), data_list=[data])
        self.assertFalse(validation.is_valid)
        self.assertEqual(validation.errors[0].name, 'PARAM_REQUIRED')
Example #10
0
    def test_convert_data_to_v1_json(self):
        """Tests calling convert_data_to_v1_json()"""

        # Try interface with nothing set
        data = Data()
        interface = Interface()
        json = convert_data_to_v1_json(data, interface)
        DataV1(data=json.get_dict())  # Revalidate

        # Try data with a variety of values
        data = Data()
        data.add_value(FileValue('input_a', [1234]))
        data.add_value(FileValue('input_b', [1235, 1236]))
        data.add_value(JsonValue('input_c', 'hello'))
        data.add_value(JsonValue('input_d', 11.9))
        json = convert_data_to_v1_json(data, interface)
        self.assertDictEqual(
            json.get_dict(), {
                u'input_data': [{
                    u'name': u'input_d',
                    u'value': 11.9
                }, {
                    u'name': u'input_b',
                    u'file_ids': [1235, 1236]
                }, {
                    u'name': u'input_c',
                    u'value': u'hello'
                }, {
                    u'name': u'input_a',
                    u'file_id': 1234
                }],
                u'version':
                u'1.0'
            })
        DataV1(data=json.get_dict())  # Revalidate
        self.assertSetEqual(
            set(DataV6(json.get_dict()).get_data().values.keys()),
            {'input_a', 'input_b', 'input_c', 'input_d'})

        # Try data with a single file list that should be a directory
        data = Data()
        data.add_value(FileValue('input_a', [1234]))
        interface = Interface()
        file_param = FileParameter('input_a', [], True, True)
        interface.add_parameter(file_param)
        json = convert_data_to_v1_json(data, interface)

        self.assertDictEqual(
            json.get_dict(), {
                u'input_data': [{
                    u'name': u'input_a',
                    u'file_ids': [1234]
                }],
                u'version': u'1.0'
            })
Example #11
0
    def __init__(self, results_dict=None, do_validate=True):
        """Constructor

        :param results_dict: The dictionary representing the job results
        :type results_dict: dict
        """

        if not results_dict:
            results_dict = {}

        self._results_data = DataV6(results_dict, do_validate=True).get_data()
Example #12
0
    def test_init_validation(self):
        """Tests the validation done in __init__"""

        # Try minimal acceptable configuration
        DataV6(do_validate=True)

        # Invalid version
        data = {'version': 'BAD'}
        with self.assertRaises(InvalidData) as context:
            DataV6(data, do_validate=True)
        self.assertEqual(context.exception.error.name, 'INVALID_VERSION')

        # Valid v6 data
        data = {'version': '6', 'files': {'input_a': [1234], 'input_b': [1235, 1236]},
                'json': {'input_c': 999, 'input_d': {'hello'}}}
        DataV6(data=data, do_validate=True)

        # Conversion from v1 job data
        data = {'version': '1.0', 'input_data': [{'name': 'input_a', 'file_id': 1234},
                                                 {'name': 'input_b', 'file_ids': [1235, 1236]},
                                                 {'name': 'input_c', 'value': 'hello'}],
                'output_data': [{'name': 'geo_image', 'workspace_id': 12}]}
        DataV6(data=data, do_validate=True)

        # Conversion from v1 job results
        data = {'version': '1.0', 'output_data': [{'name': 'input_a', 'file_id': 1234},
                                                  {'name': 'input_b', 'file_ids': [1235, 1236]}]}
        DataV6(data=data, do_validate=True)

        # Conversion from v1 recipe data
        data = {'version': '1.0', 'input_data': [{'name': 'input_a', 'file_id': 1234},
                                                 {'name': 'input_b', 'file_ids': [1235, 1236]},
                                                 {'name': 'input_c', 'value': 'hello'}],
                'workspace_id': 12}
        DataV6(data=data, do_validate=True)
Example #13
0
    def __init__(self, data=None):
        """Creates a job data object from the given dictionary. The general format is checked for correctness, but the
        actual input and output details are not checked for correctness against the job interface. If the data is
        invalid, a :class:`job.configuration.data.exceptions.InvalidData` will be thrown.

        :param data: The job data
        :type data: dict
        """

        if not data:
            data = {}

        self._new_data = DataV6(data, do_validate=True).get_data()
Example #14
0
    def post(self, request):
        """Creates a new job, places it on the queue, and returns the new job information in JSON form

        :param request: the HTTP POST request
        :type request: :class:`rest_framework.request.Request`
        :rtype: :class:`rest_framework.response.Response`
        :returns: the HTTP response to send back to the user
        """

        job_type_id = rest_util.parse_int(request, 'job_type_id')
        job_data = rest_util.parse_dict(request, 'input', {})
        configuration_dict = rest_util.parse_dict(request, 'configuration', required=False)
        configuration = None

        try:
            jobData = DataV6(job_data, do_validate=True)
        except InvalidData as ex:
            logger.exception('Unable to queue new job. Invalid input: %s', job_data)
            raise BadParameter(unicode(ex))

        try:
            job_type = JobType.objects.get(pk=job_type_id)
        except JobType.DoesNotExist:
            raise Http404

        if configuration_dict:
            try:
                existing = convert_config_to_v6_json(job_type.get_job_configuration())
                configuration = JobConfigurationV6(configuration_dict, existing=existing,
                                                   do_validate=True).get_configuration()
            except InvalidJobConfiguration as ex:
                message = 'Job type configuration invalid'
                logger.exception(message)
                raise BadParameter('%s: %s' % (message, unicode(ex)))

        try:
            job_id = Queue.objects.queue_new_job_for_user_v6(job_type=job_type, job_data=jobData.get_data(),
                                                             job_configuration=configuration)
            CommandMessageManager().send_messages(create_process_job_input_messages([job_id]))
        except InvalidData as err:
            logger.exception('Invalid job data.')
            return Response('Invalid job data: ' + unicode(err), status=status.HTTP_400_BAD_REQUEST)

        try:
            job_details = Job.objects.get_details(job_id)
        except Job.DoesNotExist:
            raise Http404

        serializer = JobDetailsSerializerV6(job_details)
        job_url = reverse('job_details_view', args=[job_id], request=request)
        return Response(serializer.data, status=status.HTTP_201_CREATED, headers=dict(location=job_url))
Example #15
0
    def _create_jobs(self, job_type_rev):
        """Creates the job models for the message

        :param job_type_rev: The job type revision with populated job_type model
        :type job_type_rev: :class:`job.models.JobTypeRevision`
        :returns: The list of job models created
        :rtype: list
        """

        from recipe.models import RecipeNode

        jobs = []

        # If this new job(s) is in a recipe that supersedes another recipe, find the corresponding superseded job(s)
        superseded_job = None
        if self.superseded_recipe_id:
            superseded_jobs = RecipeNode.objects.get_superseded_recipe_jobs(
                self.superseded_recipe_id, self.recipe_node_name)
            if len(superseded_jobs) == 1:
                superseded_job = superseded_jobs[0]

        try:
            with transaction.atomic():
                # Bulk create jobs
                for _ in xrange(self.count):
                    input_data = DataV6(self.input_data,
                                        do_validate=True).get_data(
                                        ) if self.input_data else None
                    job = Job.objects.create_job_v6(
                        job_type_rev,
                        self.event_id,
                        input_data=input_data,
                        root_recipe_id=self.root_recipe_id,
                        recipe_id=self.recipe_id,
                        batch_id=self.batch_id,
                        superseded_job=superseded_job)
                    jobs.append(job)
                Job.objects.bulk_create(jobs)

                if self.recipe_id:
                    # Bulk create recipe nodes
                    node_models = RecipeNode.objects.create_recipe_job_nodes(
                        self.recipe_id, self.recipe_node_name, jobs)
                    RecipeNode.objects.bulk_create(node_models)
        except InvalidData:
            msg = 'Job of type (%s, %s, %d) was given invalid input data. Message will not re-run.'
            logger.exception(msg, self.job_type_name, self.job_type_version,
                             self.job_type_rev_num)
            jobs = []

        return jobs
Example #16
0
    def add_global_value(self, value):
        """Adds the data value

        :param value: The data value to add
        :type value: :class:`data.data.value.DataValue`

        :raises :class:`data.data.exceptions.InvalidData`: If the value is a duplicate
        """

        if self.global_data:
            self.global_data.add_value(value)
        else:
            self.global_data = DataV6().get_data()
            self.global_data.add_value(value)
Example #17
0
    def test_create_dataset_members(self):
        """Tests calling DataSetManager.create_dataset_members() """

        data_dict = copy.deepcopy(dataset_test_utils.DATA_DEFINITION)
        data_dict['files']['input_e'] = [self.file1.id]
        data_dict['files']['input_f'] = [self.file2.id, self.file3.id]
        data = DataV6(data=data_dict).get_data()

        # call test
        dataset_members = DataSetMember.objects.create_dataset_members(dataset=self.dataset, data_list=[data])

        # Check results
        the_dataset_member = DataSetMember.objects.get(pk=dataset_members[0].id)
        self.assertDictEqual(the_dataset_member.data, data_dict)
Example #18
0
    def test_json(self):
        """Tests converting a ProcessJobInput message to and from JSON"""

        job = job_test_utils.create_job(num_exes=0,
                                        status='PENDING',
                                        input_file_size=None,
                                        input=DataV6().get_dict())

        # Create message
        message = ProcessJobInput()
        message.job_id = job.id

        # Convert message to JSON and back, and then execute
        message_json_dict = message.to_json()
        new_message = ProcessJobInput.from_json(message_json_dict)
        result = new_message.execute()

        self.assertTrue(result)
        job = Job.objects.get(id=job.id)
        self.assertEqual(len(new_message.new_messages), 1)
        self.assertEqual(new_message.new_messages[0].type, 'queued_jobs')
        self.assertFalse(new_message.new_messages[0].requeue)
        # Job should have input_file_size set to 0 (no input files)
        self.assertEqual(job.input_file_size, 0.0)
Example #19
0
    def test_json_new(self):
        """Tests coverting a CreateBatchRecipes message to and from JSON"""

        jt_1 = job_test_utils.create_seed_job_type()
        jt_2 = job_test_utils.create_seed_job_type()
        jt_3 = job_test_utils.create_seed_job_type()
        jt_4 = job_test_utils.create_seed_job_type()

        recipe_def = {
            'version': '7',
            'input': {
                'files': [{
                    'name': 'INPUT_IMAGE',
                    'media_types': ['image/png'],
                    'required': True,
                    'multiple': False
                }],
                'json': []
            },
            'nodes': {
                'node_a': {
                    'dependencies': [],
                    'input': {
                        'input_a': {
                            'type': 'recipe',
                            'input': 'INPUT_IMAGE'
                        }
                    },
                    'node_type': {
                        'node_type': 'job',
                        'job_type_name': jt_1.name,
                        'job_type_version': jt_1.version,
                        'job_type_revision': jt_1.revision_num
                    }
                },
                'node_b': {
                    'dependencies': [],
                    'input': {
                        'input_a': {
                            'type': 'recipe',
                            'input': 'INPUT_IMAGE'
                        }
                    },
                    'node_type': {
                        'node_type': 'job',
                        'job_type_name': jt_2.name,
                        'job_type_version': jt_2.version,
                        'job_type_revision': jt_2.revision_num
                    }
                }
            }
        }
        sub_recipe_type = recipe_test_utils.create_recipe_type_v6(
            definition=recipe_def)
        sub_recipe = recipe_test_utils.create_recipe(
            recipe_type=sub_recipe_type)

        # Recipe with two jobs and one subrecipe (c -> d -> r)
        recipe_def = {
            'version': '7',
            'input': {
                'files': [{
                    'name': 'INPUT_IMAGE',
                    'media_types': ['image/png'],
                    'required': True,
                    'multiple': False
                }],
                'json': []
            },
            'nodes': {
                'recipe_node': {
                    'dependencies': [{
                        'name': 'node_d',
                        'acceptance': True
                    }],
                    'input': {
                        'input_a': {
                            'type': 'dependency',
                            'node': 'node_d',
                            'output': 'OUTPUT_IMAGE'
                        }
                    },
                    'node_type': {
                        'node_type': 'recipe',
                        'recipe_type_name': sub_recipe_type.name,
                        'recipe_type_revision': sub_recipe_type.revision_num
                    }
                },
                'node_c': {
                    'dependencies': [],
                    'input': {
                        'INPUT_IMAGE': {
                            'type': 'recipe',
                            'input': 'INPUT_IMAGE'
                        }
                    },
                    'node_type': {
                        'node_type': 'job',
                        'job_type_name': jt_3.name,
                        'job_type_version': jt_3.version,
                        'job_type_revision': jt_3.revision_num
                    }
                },
                'node_d': {
                    'dependencies': [{
                        'name': 'node_c',
                        'acceptance': True
                    }],
                    'input': {
                        'INPUT_IMAGE': {
                            'type': 'dependency',
                            'node': 'node_c',
                            'output': 'OUTPUT_IMAGE'
                        }
                    },
                    'node_type': {
                        'node_type': 'job',
                        'job_type_name': jt_4.name,
                        'job_type_version': jt_4.version,
                        'job_type_revision': jt_4.revision_num
                    }
                }
            }
        }
        recipe_type = recipe_test_utils.create_recipe_type_v6(
            definition=recipe_def)

        # Create a dataset of 6 files
        dataset_def = {
            'parameters': {
                'files': [{
                    'media_types': ['image/png'],
                    'required': True,
                    'multiple': False,
                    'name': 'INPUT_IMAGE'
                }],
                'json': []
            }
        }
        the_dataset = data_test_utils.create_dataset(definition=dataset_def)
        workspace = storage_test_utils.create_workspace()
        src_file_a = storage_test_utils.create_file(file_name='input_a.PNG',
                                                    file_type='SOURCE',
                                                    media_type='image/png',
                                                    file_size=10,
                                                    data_type_tags=['type'],
                                                    file_path='the_path',
                                                    workspace=workspace)
        src_file_b = storage_test_utils.create_file(file_name='input_b.PNG',
                                                    file_type='SOURCE',
                                                    media_type='image/png',
                                                    file_size=10,
                                                    data_type_tags=['type'],
                                                    file_path='the_path',
                                                    workspace=workspace)
        data_list = []
        data_dict = {
            'version': '6',
            'files': {
                'INPUT_IMAGE': [src_file_a.id]
            },
            'json': {}
        }
        data_list.append(DataV6(data=data_dict).get_dict())
        data_dict = {
            'version': '6',
            'files': {
                'INPUT_IMAGE': [src_file_b.id]
            },
            'json': {}
        }
        data_list.append(DataV6(data=data_dict).get_dict())
        member_2 = data_test_utils.create_dataset_members(dataset=the_dataset,
                                                          data_list=data_list)

        # Create the batch
        batch_definition = BatchDefinition()
        batch_definition.dataset = the_dataset.id
        forced_nodes = ForcedNodes()
        forced_nodes.all_nodes = True
        batch_definition.forced_nodes = forced_nodes
        batch = batch_test_utils.create_batch(recipe_type=recipe_type,
                                              definition=batch_definition)

        # Create the message
        message = create_batch_recipes_message(batch.id)

        # Convert message to JSON and back, and then execute
        message_json_dict = message.to_json()
        new_message = CreateBatchRecipes.from_json(message_json_dict)
        result = new_message.execute()

        self.assertTrue(result)
        # Should be two create_recipes message for the two files in the dataset
        self.assertEqual(len(new_message.new_messages), 2)

        # Verify each message has a different input
        src_ids = [src_file_a.id, src_file_b.id]
        for message in new_message.new_messages:
            self.assertEqual(message.type, 'create_recipes')
            self.assertEqual(message.create_recipes_type, 'new-recipe')
            file_id = DataV6(message.recipe_input_data).get_data(
            ).values['INPUT_IMAGE'].file_ids[0]
            self.assertTrue(file_id in src_ids)
            src_ids.remove(file_id)

        # Test re-processing existing recipes
        data_dict = {
            'version': '6',
            'files': {
                'INPUT_IMAGE': [src_file_a.id]
            },
            'json': {}
        }

        recipe_1 = recipe_test_utils.create_recipe(recipe_type=recipe_type,
                                                   input=data_dict)
        data_dict = {
            'version': '6',
            'files': {
                'INPUT_IMAGE': [src_file_b.id]
            },
            'json': {}
        }

        recipe_2 = recipe_test_utils.create_recipe(recipe_type=recipe_type,
                                                   input=data_dict)
        recipe_test_utils.process_recipe_inputs([recipe_1.id, recipe_2.id])

        batch_definition_2 = BatchDefinition()
        batch_definition_2.dataset = the_dataset.id
        forced_nodes = ForcedNodes()
        forced_nodes.all_nodes = True
        batch_definition_2.forced_nodes = forced_nodes
        batch_2 = batch_test_utils.create_batch(recipe_type=recipe_type,
                                                definition=batch_definition_2)

        # Create the message
        message = create_batch_recipes_message(batch_2.id)

        # Convert message to JSON and back, and then execute
        message_json_dict_2 = message.to_json()
        new_message_2 = CreateBatchRecipes.from_json(message_json_dict_2)
        result_2 = new_message_2.execute()

        self.assertTrue(result_2)
        self.assertEqual(len(new_message_2.new_messages), 1)
        message = new_message_2.new_messages[0]
        self.assertEqual(message.type, 'create_recipes')
        self.assertEqual(message.create_recipes_type, 'reprocess')
        self.assertSetEqual(set(message.root_recipe_ids),
                            {recipe_1.id, recipe_2.id})
Example #20
0
    def build_data_list(self,
                        template,
                        data_started=None,
                        data_ended=None,
                        created_started=None,
                        created_ended=None,
                        source_started=None,
                        source_ended=None,
                        source_sensor_classes=None,
                        source_sensors=None,
                        source_collections=None,
                        source_tasks=None,
                        mod_started=None,
                        mod_ended=None,
                        job_type_ids=None,
                        job_type_names=None,
                        job_ids=None,
                        is_published=None,
                        is_superseded=None,
                        file_names=None,
                        file_name_search=None,
                        job_outputs=None,
                        recipe_ids=None,
                        recipe_type_ids=None,
                        recipe_nodes=None,
                        batch_ids=None,
                        order=None,
                        file_type=None,
                        media_type=None):
        """Builds a list of data dictionaries from a template and file filters

        :param template: The template to fill with files found through filters
        :type template: dict
        :param data_started: Query files where data started after this time.
        :type data_started: :class:`datetime.datetime`
        :param data_ended: Query files where data ended before this time.
        :type data_ended: :class:`datetime.datetime`
        :param created_started: Query files created after this time.
        :type created_started: :class:`datetime.datetime`
        :param created_ended: Query files created before this time.
        :type created_ended: :class:`datetime.datetime`
        :param source_started: Query files where source collection started after this time.
        :type source_started: :class:`datetime.datetime`
        :param source_ended: Query files where source collection ended before this time.
        :type source_ended: :class:`datetime.datetime`
        :param source_sensor_classes: Query files with the given source sensor class.
        :type source_sensor_classes: :func:`list`
        :param source_sensor: Query files with the given source sensor.
        :type source_sensor: :func:`list`
        :param source_collection: Query files with the given source class.
        :type source_collection: :func:`list`
        :param source_tasks: Query files with the given source tasks.
        :type source_tasks: :func:`list`
        :param mod_started: Query files where the last modified date is after this time.
        :type mod_started: :class:`datetime.datetime`
        :param mod_ended: Query files where the last modified date is before this time.
        :type mod_ended: :class:`datetime.datetime`
        :param job_type_ids: Query files with jobs with the given type identifier.
        :type job_type_ids: :func:`list`
        :param job_type_names: Query files with jobs with the given type name.
        :type job_type_names: :func:`list`
        :keyword job_ids: Query files with a given job id
        :type job_ids: :func:`list`
        :param is_published: Query files flagged as currently exposed for publication.
        :type is_published: bool
        :param is_superseded: Query files that have/have not been superseded.
        :type is_superseded: bool
        :param file_names: Query files with the given file names.
        :type file_names: :func:`list`
        :param file_name_search: Query files with the given string in their file name.
        :type file_name_search: : string
        :keyword job_outputs: Query files with the given job outputs
        :type job_outputs: :func:`list`
        :keyword recipe_ids: Query files with a given recipe id
        :type recipe_ids: :func:`list`
        :keyword recipe_nodes: Query files with a given recipe nodes
        :type recipe_nodes: :func:`list`
        :keyword recipe_type_ids: Query files with the given recipe types
        :type recipe_type_ids: :func:`list`
        :keyword batch_ids: Query files with batches with the given identifiers.
        :type batch_ids: :func:`list`
        :param order: A list of fields to control the sort order.
        :type order: :func:`list`
        :keyword file_type: Query files with a given file_type
        :type recipe_ids: :func:`list`
        """

        files = ScaleFile.objects.filter_files(
            data_started=data_started,
            data_ended=data_ended,
            created_started=created_started,
            created_ended=created_ended,
            source_started=source_started,
            source_ended=source_ended,
            source_sensor_classes=source_sensor_classes,
            source_sensors=source_sensors,
            source_collections=source_collections,
            source_tasks=source_tasks,
            mod_started=mod_started,
            mod_ended=mod_ended,
            job_type_ids=job_type_ids,
            job_type_names=job_type_names,
            job_ids=job_ids,
            file_names=file_names,
            file_name_search=file_name_search,
            job_outputs=job_outputs,
            recipe_ids=recipe_ids,
            recipe_type_ids=recipe_type_ids,
            recipe_nodes=recipe_nodes,
            batch_ids=batch_ids,
            order=order,
            file_type=file_type,
            media_type=media_type)

        data_list = []
        try:
            for f in files:
                entry = copy.deepcopy(template)
                file_params = entry['files']
                for p in file_params:
                    if file_params[p] == 'FILE_VALUE':
                        file_params[p] = [f.id]
                data_list.append(
                    DataV6(data=entry, do_validate=True).get_data())
        except (KeyError, TypeError) as ex:
            raise InvalidData('INVALID_TEMPLATE',
                              "Specified template is invalid: %s" % ex)

        return data_list
Example #21
0
    def test_inputmap(self):
        dataset_def = {
            'parameters': {
                'files': [{
                    'media_types': ['image/png'],
                    'required': True,
                    'multiple': False,
                    'name': 'INPUT_IMAGE'
                }],
                'json': []
            }
        }
        the_dataset = data_test_utils.create_dataset(definition=dataset_def)
        workspace = storage_test_utils.create_workspace()
        src_file_a = storage_test_utils.create_file(file_name='input_a.PNG',
                                                    file_type='SOURCE',
                                                    media_type='image/png',
                                                    file_size=10,
                                                    data_type_tags=['type'],
                                                    file_path='the_path',
                                                    workspace=workspace)
        src_file_b = storage_test_utils.create_file(file_name='input_b.PNG',
                                                    file_type='SOURCE',
                                                    media_type='image/png',
                                                    file_size=10,
                                                    data_type_tags=['type'],
                                                    file_path='the_path',
                                                    workspace=workspace)
        data_list = []
        data_dict = {
            'version': '6',
            'files': {
                'FILE_INPUT': [src_file_a.id]
            },
            'json': {}
        }
        data_list.append(DataV6(data=data_dict).get_dict())
        data_dict = {
            'version': '6',
            'files': {
                'FILE_INPUT': [src_file_b.id]
            },
            'json': {}
        }
        data_list.append(DataV6(data=data_dict).get_dict())
        members = data_test_utils.create_dataset_members(dataset=the_dataset,
                                                         data_list=data_list)

        batch_def = BatchDefinition()
        batch_def.dataset = the_dataset.id
        batch = batch_test_utils.create_batch(definition=batch_def)

        json_dict = {
            'version':
            '6',
            'priority':
            100,
            'inputMap': [{
                'input': 'FILE_INPUT',
                'datasetParameter': 'INPUT_IMAGE'
            }]
        }
        json = BatchConfigurationV6(configuration=json_dict)
        configuration = json.get_configuration()
        configuration.validate(batch)

        json_dict = {
            'version': '6',
            'priority': 100,
            'inputMap': [{
                'input': 'FILE_INPUT',
                'datasetParameter': 'FILE_INPUT'
            }]
        }
        json = BatchConfigurationV6(configuration=json_dict)
        configuration = json.get_configuration()
        self.assertRaises(InvalidConfiguration, configuration.validate, batch)
Example #22
0
    def test_execute_reprocess(self):
        """Tests calling CreateBatchRecipes.execute() successfully when re-processing recipes"""

        # Importing module here to patch the max recipe num
        import batch.messages.create_batch_recipes
        batch.messages.create_batch_recipes.MAX_RECIPE_NUM = 5

        jt_1 = job_test_utils.create_seed_job_type()
        jt_2 = job_test_utils.create_seed_job_type()
        jt_3 = job_test_utils.create_seed_job_type()
        jt_4 = job_test_utils.create_seed_job_type()

        recipe_def = {
            'version': '7',
            'input': {
                'files': [{
                    'name': 'INPUT_IMAGE',
                    'media_types': ['image/png'],
                    'required': True,
                    'multiple': False
                }],
                'json': []
            },
            'nodes': {
                'node_a': {
                    'dependencies': [],
                    'input': {
                        'input_a': {
                            'type': 'recipe',
                            'input': 'INPUT_IMAGE'
                        }
                    },
                    'node_type': {
                        'node_type': 'job',
                        'job_type_name': jt_1.name,
                        'job_type_version': jt_1.version,
                        'job_type_revision': jt_1.revision_num
                    }
                },
                'node_b': {
                    'dependencies': [],
                    'input': {
                        'input_a': {
                            'type': 'recipe',
                            'input': 'INPUT_IMAGE'
                        }
                    },
                    'node_type': {
                        'node_type': 'job',
                        'job_type_name': jt_2.name,
                        'job_type_version': jt_2.version,
                        'job_type_revision': jt_2.revision_num
                    }
                }
            }
        }
        sub_recipe_type = recipe_test_utils.create_recipe_type_v6(
            definition=recipe_def)

        # Recipe with two jobs and one subrecipe (c -> d -> r)
        recipe_def = {
            'version': '7',
            'input': {
                'files': [{
                    'name': 'INPUT_IMAGE',
                    'media_types': ['image/png'],
                    'required': True,
                    'multiple': False
                }],
                'json': []
            },
            'nodes': {
                'recipe_node': {
                    'dependencies': [{
                        'name': 'node_d',
                        'acceptance': True
                    }],
                    'input': {
                        'input_a': {
                            'type': 'dependency',
                            'node': 'node_d',
                            'output': 'OUTPUT_IMAGE'
                        }
                    },
                    'node_type': {
                        'node_type': 'recipe',
                        'recipe_type_name': sub_recipe_type.name,
                        'recipe_type_revision': sub_recipe_type.revision_num
                    }
                },
                'node_c': {
                    'dependencies': [],
                    'input': {
                        'INPUT_IMAGE': {
                            'type': 'recipe',
                            'input': 'INPUT_IMAGE'
                        }
                    },
                    'node_type': {
                        'node_type': 'job',
                        'job_type_name': jt_3.name,
                        'job_type_version': jt_3.version,
                        'job_type_revision': jt_3.revision_num
                    }
                },
                'node_d': {
                    'dependencies': [{
                        'name': 'node_c',
                        'acceptance': True
                    }],
                    'input': {
                        'INPUT_IMAGE': {
                            'type': 'dependency',
                            'node': 'node_c',
                            'output': 'OUTPUT_IMAGE'
                        }
                    },
                    'node_type': {
                        'node_type': 'job',
                        'job_type_name': jt_4.name,
                        'job_type_version': jt_4.version,
                        'job_type_revision': jt_4.revision_num
                    }
                }
            }
        }
        recipe_type = recipe_test_utils.create_recipe_type_v6(
            definition=recipe_def)

        # Create a dataset of 6 files
        dataset_def = {
            'parameters': {
                'files': [{
                    'media_types': ['image/png'],
                    'required': True,
                    'multiple': False,
                    'name': 'INPUT_IMAGE'
                }],
                'json': []
            }
        }
        the_dataset = data_test_utils.create_dataset(definition=dataset_def)
        workspace = storage_test_utils.create_workspace()

        # Create 6 files & recipes to go along
        src_file_ids = []
        recipe_ids = []
        data_list = []
        for i in range(0, 6):
            file_name = 'input_%d.png' % i
            src_file = storage_test_utils.create_file(file_name=file_name,
                                                      file_type='SOURCE',
                                                      media_type='image/png',
                                                      file_size=10,
                                                      data_type_tags=['type'],
                                                      file_path='the_path',
                                                      workspace=workspace)
            src_file_ids.append(src_file.id)
            data_dict = {
                'version': '6',
                'files': {
                    'INPUT_IMAGE': [src_file.id]
                },
                'json': {}
            }
            data_list.append(DataV6(data=data_dict).get_dict())
            recipe = recipe_test_utils.create_recipe(recipe_type=recipe_type,
                                                     input=data_dict)
            recipe_ids.append(recipe.id)

        members = data_test_utils.create_dataset_members(dataset=the_dataset,
                                                         data_list=data_list)
        recipe_test_utils.process_recipe_inputs(recipe_ids)

        batch_definition = BatchDefinition()
        batch_definition.dataset = the_dataset.id
        batch_definition.supersedes = True
        forced_nodes = ForcedNodes()
        forced_nodes.all_nodes = True
        batch_definition.forced_nodes = forced_nodes
        new_batch = batch_test_utils.create_batch(recipe_type=recipe_type,
                                                  definition=batch_definition)

        # Create message
        message = batch.messages.create_batch_recipes.CreateBatchRecipes()
        message.batch_id = new_batch.id

        # Execute message
        result = message.execute()
        self.assertTrue(result)
        self.assertEqual(len(message.new_messages), 2)

        batch_recipes_message = message.new_messages[0]
        create_recipes_message = message.new_messages[1]
        self.assertEqual(batch_recipes_message.type, 'create_batch_recipes')
        self.assertEqual(batch_recipes_message.batch_id, new_batch.id)
        self.assertFalse(batch_recipes_message.is_prev_batch_done)
        self.assertEqual(batch_recipes_message.current_recipe_id,
                         recipe_ids[1])

        # Test the create_recipes_message
        self.assertEqual(create_recipes_message.type, 'create_recipes')
        self.assertSetEqual(
            set(create_recipes_message.root_recipe_ids), {
                recipe_ids[5], recipe_ids[4], recipe_ids[3], recipe_ids[2],
                recipe_ids[1]
            })
        self.assertEqual(create_recipes_message.batch_id, new_batch.id)
        self.assertEqual(create_recipes_message.event_id, new_batch.event_id)
        self.assertEqual(create_recipes_message.recipe_type_name,
                         new_batch.recipe_type.name)
        self.assertEqual(create_recipes_message.recipe_type_rev_num,
                         new_batch.recipe_type.revision_num)

        # Execute next create_batch_recipes messages
        result = batch_recipes_message.execute()
        self.assertTrue(result)

        # Should only have one last rcreate_recipes message
        self.assertEqual(len(batch_recipes_message.new_messages), 1)
        create_recipes_message = batch_recipes_message.new_messages[0]
        self.assertTrue(batch_recipes_message.is_prev_batch_done)
        self.assertEqual(create_recipes_message.type, 'create_recipes')
        self.assertSetEqual(set(create_recipes_message.root_recipe_ids),
                            {recipe_ids[0]})
        self.assertEqual(create_recipes_message.batch_id, new_batch.id)
        self.assertEqual(create_recipes_message.event_id, new_batch.event_id)
        self.assertEqual(create_recipes_message.recipe_type_name,
                         new_batch.recipe_type.name)
        self.assertEqual(create_recipes_message.recipe_type_rev_num,
                         new_batch.recipe_type.revision_num)

        # Test setting supersedes to false and make sure we don't have any reprocess messages
        batch_definition_2 = BatchDefinition()
        batch_definition_2.dataset = the_dataset.id
        batch_definition_2.supersedes = False
        forced_nodes = ForcedNodes()
        forced_nodes.all_nodes = True
        batch_definition_2.forced_nodes = forced_nodes
        new_batch_2 = batch_test_utils.create_batch(
            recipe_type=recipe_type, definition=batch_definition_2)

        # Create message
        message_2 = batch.messages.create_batch_recipes.CreateBatchRecipes()
        message_2.batch_id = new_batch_2.id
        # Execute message
        result_2 = message_2.execute()
        self.assertTrue(result_2)
        self.assertEqual(len(message_2.new_messages), 6)

        batch_recipes_message_2 = message_2.new_messages[0]
        self.assertEqual(batch_recipes_message_2.type, 'create_batch_recipes')
        self.assertEqual(batch_recipes_message_2.batch_id, new_batch_2.id)
        self.assertFalse(batch_recipes_message_2.is_prev_batch_done)

        # Make sure we've got 5 create-new-recipe messages
        for msg in message_2.new_messages[1:]:
            self.assertEqual(msg.create_recipes_type, 'new-recipe')
            self.assertEqual(msg.batch_id, new_batch_2.id)
            self.assertEqual(msg.event_id, new_batch_2.event_id)
            self.assertEqual(msg.recipe_type_name,
                             new_batch_2.recipe_type.name)
            self.assertEqual(msg.recipe_type_rev_num,
                             new_batch_2.recipe_type.revision_num)

        # Execute next create_batch_recipes messages
        result_3 = batch_recipes_message_2.execute()
        self.assertTrue(result_3)

        # Should only have one last rcreate_recipes message
        self.assertEqual(len(batch_recipes_message_2.new_messages), 1)
        create_recipes_message_3 = batch_recipes_message_2.new_messages[0]
        self.assertTrue(batch_recipes_message_2.is_prev_batch_done)
        self.assertEqual(create_recipes_message_3.type, 'create_recipes')
        self.assertEqual(create_recipes_message_3.batch_id, new_batch_2.id)
        self.assertEqual(create_recipes_message_3.event_id,
                         new_batch_2.event_id)
        self.assertEqual(create_recipes_message_3.recipe_type_name,
                         new_batch_2.recipe_type.name)
        self.assertEqual(create_recipes_message_3.recipe_type_rev_num,
                         new_batch_2.recipe_type.revision_num)
Example #23
0
    def test_execute_forced_nodes(self):
        """Tests calling CreateBatchRecipes.execute() when only specific nodes are forced"""

        # Importing module here to patch the max recipe num
        import batch.messages.create_batch_recipes
        batch.messages.create_batch_recipes.MAX_RECIPE_NUM = 5

        jt_1 = job_test_utils.create_seed_job_type()
        jt_2 = job_test_utils.create_seed_job_type()
        jt_3 = job_test_utils.create_seed_job_type()
        jt_4 = job_test_utils.create_seed_job_type()

        recipe_def = {
            'version': '7',
            'input': {
                'files': [{
                    'name': 'INPUT_IMAGE',
                    'media_types': ['image/png'],
                    'required': True,
                    'multiple': False
                }],
                'json': []
            },
            'nodes': {
                'node_a': {
                    'dependencies': [],
                    'input': {
                        'input_a': {
                            'type': 'recipe',
                            'input': 'INPUT_IMAGE'
                        }
                    },
                    'node_type': {
                        'node_type': 'job',
                        'job_type_name': jt_1.name,
                        'job_type_version': jt_1.version,
                        'job_type_revision': jt_1.revision_num
                    }
                },
                'node_b': {
                    'dependencies': [],
                    'input': {
                        'input_a': {
                            'type': 'recipe',
                            'input': 'INPUT_IMAGE'
                        }
                    },
                    'node_type': {
                        'node_type': 'job',
                        'job_type_name': jt_2.name,
                        'job_type_version': jt_2.version,
                        'job_type_revision': jt_2.revision_num
                    }
                }
            }
        }
        sub_recipe_type = recipe_test_utils.create_recipe_type_v6(
            definition=recipe_def)
        sub_recipe = recipe_test_utils.create_recipe(
            recipe_type=sub_recipe_type)

        # Recipe with two jobs and one subrecipe (c -> d -> r)
        recipe_def = {
            'version': '7',
            'input': {
                'files': [{
                    'name': 'INPUT_IMAGE',
                    'media_types': ['image/png'],
                    'required': True,
                    'multiple': False
                }],
                'json': []
            },
            'nodes': {
                'recipe_node': {
                    'dependencies': [{
                        'name': 'node_d',
                        'acceptance': True
                    }],
                    'input': {
                        'input_a': {
                            'type': 'dependency',
                            'node': 'node_d',
                            'output': 'OUTPUT_IMAGE'
                        }
                    },
                    'node_type': {
                        'node_type': 'recipe',
                        'recipe_type_name': sub_recipe_type.name,
                        'recipe_type_revision': sub_recipe_type.revision_num
                    }
                },
                'node_c': {
                    'dependencies': [],
                    'input': {
                        'INPUT_IMAGE': {
                            'type': 'recipe',
                            'input': 'INPUT_IMAGE'
                        }
                    },
                    'node_type': {
                        'node_type': 'job',
                        'job_type_name': jt_3.name,
                        'job_type_version': jt_3.version,
                        'job_type_revision': jt_3.revision_num
                    }
                },
                'node_d': {
                    'dependencies': [{
                        'name': 'node_c',
                        'acceptance': True
                    }],
                    'input': {
                        'INPUT_IMAGE': {
                            'type': 'dependency',
                            'node': 'node_c',
                            'output': 'OUTPUT_IMAGE'
                        }
                    },
                    'node_type': {
                        'node_type': 'job',
                        'job_type_name': jt_4.name,
                        'job_type_version': jt_4.version,
                        'job_type_revision': jt_4.revision_num
                    }
                }
            }
        }
        recipe_type = recipe_test_utils.create_recipe_type_v6(
            definition=recipe_def)

        # Create a dataset of 6 files
        dataset_def = {
            'parameters': {
                'files': [{
                    'media_types': ['image/png'],
                    'required': True,
                    'multiple': False,
                    'name': 'INPUT_IMAGE'
                }],
                'json': []
            }
        }
        the_dataset = data_test_utils.create_dataset(definition=dataset_def)
        workspace = storage_test_utils.create_workspace()

        # Create 6 files & recipes to go along
        src_file_ids = []
        recipe_ids = []
        data_list = []
        for i in range(0, 6):
            file_name = 'input_%d.png' % i
            src_file = storage_test_utils.create_file(file_name=file_name,
                                                      file_type='SOURCE',
                                                      media_type='image/png',
                                                      file_size=10,
                                                      data_type_tags=['type'],
                                                      file_path='the_path',
                                                      workspace=workspace)
            src_file_ids.append(src_file.id)
            data_dict = {
                'version': '6',
                'files': {
                    'INPUT_IMAGE': [src_file.id]
                },
                'json': {}
            }
            data_list.append(DataV6(data=data_dict).get_dict())
            recipe = recipe_test_utils.create_recipe(recipe_type=recipe_type,
                                                     input=data_dict)
            recipe_ids.append(recipe.id)
        members = data_test_utils.create_dataset_members(dataset=the_dataset,
                                                         data_list=data_list)
        recipe_test_utils.process_recipe_inputs(recipe_ids)

        batch_definition = BatchDefinition()
        batch_definition.dataset = the_dataset.id
        forced_nodes = ForcedNodes()
        forced_nodes.add_node('node_d')
        forced_nodes.all_nodes = False
        batch_definition.forced_nodes = forced_nodes

        new_batch = batch_test_utils.create_batch(recipe_type=recipe_type,
                                                  definition=batch_definition)
Example #24
0
    def setUp(self):
        django.setup()

        self.workspace = storage_test_utils.create_workspace()
        self.file = storage_test_utils.create_file()

        configuration = {
            'version': '1.0',
            'condition': {
                'media_type': 'text/plain',
            },
            'data': {
                'input_data_name': 'Recipe Input',
                'workspace_name': self.workspace.name,
            },
        }
        self.event = trigger_test_utils.create_trigger_event(
            trigger_type='BATCH')

        manifest = {
            'seedVersion': '1.0.0',
            'job': {
                'name': 'test-job',
                'jobVersion': '1.0.0',
                'packageVersion': '1.0.0',
                'title': 'My first job',
                'maintainer': {
                    'name': 'John Doe',
                    'email': '*****@*****.**'
                },
                'timeout': 3600,
                'interface': {
                    'command': 'my_command args',
                    'inputs': {
                        'files': [{
                            'name': 'TEST_INPUT_1',
                            'type': 'file',
                            'media_types': ['text/plain'],
                        }]
                    },
                    'outputs': {
                        'files': [{
                            'name': 'TEST_OUTPUT_1',
                            'type': 'files',
                            'media_type': 'image/tiff',
                        }],
                    }
                }
            }
        }
        self.seed_job_type = job_test_utils.create_seed_job_type(
            manifest=manifest)

        self.definition_v6 = {
            'version': '6',
            'input': {
                'files': [{
                    'name': 'TEST_INPUT_1',
                    'media_types': ['text/plain'],
                    'required': True,
                    'multiple': False
                }],
                'json': []
            },
            'nodes': {
                'job-a': {
                    'dependencies': [],
                    'input': {
                        'TEST_INPUT_1': {
                            'type': 'recipe',
                            'input': 'TEST_INPUT_1'
                        }
                    },
                    'node_type': {
                        'node_type': 'job',
                        'job_type_name': self.seed_job_type.name,
                        'job_type_version': self.seed_job_type.version,
                        'job_type_revision': 1,
                    }
                }
            }
        }

        self.recipe_type_v6 = recipe_test_utils.create_recipe_type_v6(
            definition=self.definition_v6)
        self.recipe_type_rev_v6 = RecipeTypeRevision.objects.get(
            recipe_type_id=self.recipe_type_v6.id)

        manifest_2 = {
            'seedVersion': '1.0.0',
            'job': {
                'name': 'test-job-2',
                'jobVersion': '1.0.0',
                'packageVersion': '1.0.0',
                'title': 'My second job',
                'maintainer': {
                    'name': 'John Doe',
                    'email': '*****@*****.**'
                },
                'timeout': 3600,
                'interface': {
                    'command': 'my_command args',
                    'inputs': {
                        'files': [{
                            'name': 'TEST_INPUT_2',
                            'type': 'file',
                            'media_types': ['image/tiff'],
                        }]
                    },
                    'outputs': {
                        'files': [{
                            'name': 'TEST_OUTPUT_2',
                            'type': 'file',
                            'media_type': 'image/png',
                        }],
                    }
                }
            }
        }
        self.seed_job_type_2 = job_test_utils.create_seed_job_type(
            manifest=manifest_2)

        self.definition_2_v6 = {
            'version': '6',
            'input': {
                'files': [{
                    'name': 'TEST_INPUT_1',
                    'media_types': ['text/plain'],
                    'required': True,
                    'multiple': False
                }],
                'json': []
            },
            'nodes': {
                'job-a': {
                    'dependencies': [],
                    'input': {
                        'TEST_INPUT_1': {
                            'type': 'recipe',
                            'input': 'TEST_INPUT_1'
                        }
                    },
                    'node_type': {
                        'node_type': 'job',
                        'job_type_name': self.seed_job_type.name,
                        'job_type_version': self.seed_job_type.version,
                        'job_type_revision': 1,
                    }
                },
                'job-b': {
                    'dependencies': [{
                        'name': 'job-a'
                    }],
                    'input': {
                        'TEST_INPUT_2': {
                            'type': 'dependency',
                            'node': 'job-a',
                            'output': 'TEST_OUTPUT_1'
                        }
                    },
                    'node_type': {
                        'node_type': 'job',
                        'job_type_name': self.seed_job_type_2.name,
                        'job_type_version': self.seed_job_type_2.version,
                        'job_type_revision': 1,
                    }
                }
            }
        }

        self.data_dict = {
            'version': '1.0',
            'input_data': [{
                'name': 'TEST_INPUT_1',
                'file_id': self.file.id,
            }],
            'workspace_id': self.workspace.id,
        }

        self.input_data = DataV6(self.data_dict, do_validate=False).get_data()
Example #25
0
    def setUp(self):
        django.setup()

        self.workspace = storage_test_utils.create_workspace()
        self.file = storage_test_utils.create_file()

        configuration = {
            'version': '1.0',
            'condition': {
                'media_type': 'text/plain',
            },
            'data': {
                'input_data_name': 'Recipe Input',
                'workspace_name': self.workspace.name,
            },
        }
        self.rule = trigger_test_utils.create_trigger_rule(configuration=configuration)
        self.event = trigger_test_utils.create_trigger_event(rule=self.rule)

        interface_1 = {
            'version': '1.0',
            'command': 'my_command',
            'command_arguments': 'args',
            'input_data': [{
                'name': 'Test Input 1',
                'type': 'file',
                'media_types': ['text/plain'],
            }],
            'output_data': [{
                'name': 'Test Output 1',
                'type': 'files',
                'media_type': 'image/png',
            }],
        }
        self.job_type_1 = job_test_utils.create_job_type(interface=interface_1)

        self.definition_1 = {
            'version': '1.0',
            'input_data': [{
                'name': 'Recipe Input',
                'type': 'file',
                'media_types': ['text/plain'],
            }],
            'jobs': [{
                'name': 'Job 1',
                'job_type': {
                    'name': self.job_type_1.name,
                    'version': self.job_type_1.version,
                },
                'recipe_inputs': [{
                    'recipe_input': 'Recipe Input',
                    'job_input': 'Test Input 1',
                }],
            }],
        }

        self.recipe_type = recipe_test_utils.create_recipe_type(definition=self.definition_1, trigger_rule=self.rule)
        self.recipe_type_rev = RecipeTypeRevision.objects.get(recipe_type_id=self.recipe_type.id)

        self.interface_2 = {
            'version': '1.0',
            'command': 'my_command',
            'command_arguments': 'args',
            'input_data': [{
                'name': 'Test Input 2',
                'type': 'files',
                'media_types': ['image/tiff'],
            }],
            'output_data': [{
                'name': 'Test Output 2',
                'type': 'file',
            }],
        }
        self.job_type_2 = job_test_utils.create_job_type(interface=self.interface_2)
        self.definition_2 = {
            'version': '1.0',
            'input_data': [{
                'name': 'Recipe Input',
                'type': 'file',
                'media_types': ['text/plain'],
            }],
            'jobs': [{
                'name': 'Job 1',
                'job_type': {
                    'name': self.job_type_1.name,
                    'version': self.job_type_1.version,
                },
                'recipe_inputs': [{
                    'recipe_input': 'Recipe Input',
                    'job_input': 'Test Input 1',
                }]
            }, {
                'name': 'Job 2',
                'job_type': {
                    'name': self.job_type_2.name,
                    'version': self.job_type_2.version,
                },
                'dependencies': [{
                    'name': 'Job 1',
                    'connections': [{
                        'output': 'Test Output 1',
                        'input': 'Test Input 2',
                    }],
                }],
            }],
        }

        self.data_dict = {
            'version': '1.0',
            'input_data': [{
                'name': 'Recipe Input',
                'file_id': self.file.id,
            }],
            'workspace_id': self.workspace.id,
        }

        self.input_data = DataV6(self.data_dict, do_validate=False).get_data()
Example #26
0
    def test_schedule_date_range_data_full(self, mock_msg_mgr):
        """Tests calling BatchManager.schedule_recipes() for a batch with a data date range restriction"""
        file1 = storage_test_utils.create_file()
        file1.data_started = datetime.datetime(2016, 1, 1, tzinfo=utc)
        file1.save()
        data1 = {
            'version': '1.0',
            'input_data': [{
                'name': 'Recipe Input',
                'file_id': file1.id,
            }],
            'workspace_id': self.workspace.id,
        }
        input_data_1 = DataV6(data1).get_data()
        recipe = Recipe.objects.create_recipe_v6(self.recipe_type_rev, self.event.id, input_data=input_data_1)
        recipe.save()
        recipe_file_1 = RecipeInputFile()
        recipe_file_1.recipe_id = recipe.id
        recipe_file_1.input_file_id = file1.id
        recipe_file_1.recipe_input = 'Recipe Input'
        recipe_file_1.created = recipe.created
        recipe_file_1.save()

        file2 = storage_test_utils.create_file()
        file2.data_started = datetime.datetime(2016, 2, 1, tzinfo=utc)
        file2.data_ended = datetime.datetime(2016, 2, 10, tzinfo=utc)
        file2.save()
        data2 = {
            'version': '1.0',
            'input_data': [{
                'name': 'Recipe Input',
                'file_id': file2.id,
            }],
            'workspace_id': self.workspace.id,
        }
        input_data_2 = DataV6(data2).get_data()
        recipe2 = Recipe.objects.create_recipe_v6(self.recipe_type_rev, self.event.id, input_data=input_data_2)
        recipe2.save()
        recipe_file_2 = RecipeInputFile()
        recipe_file_2.recipe_id = recipe2.id
        recipe_file_2.input_file_id = file2.id
        recipe_file_2.recipe_input = 'Recipe Input'
        recipe_file_2.created = recipe2.created
        recipe_file_2.save()

        file3 = storage_test_utils.create_file()
        file3.data_ended = datetime.datetime(2016, 3, 1, tzinfo=utc)
        file3.save()
        data3 = {
            'version': '1.0',
            'input_data': [{
                'name': 'Recipe Input',
                'file_id': file3.id,
            }],
            'workspace_id': self.workspace.id,
        }
        input_data_3 = DataV6(data3).get_data()
        recipe3 = Recipe.objects.create_recipe_v6(self.recipe_type_rev, self.event.id, input_data=input_data_3)
        recipe3.save()

        recipe_test_utils.edit_recipe_type(self.recipe_type, self.definition_2)

        definition = {
            'date_range': {
                'type': 'data',
                'started': '2016-02-01T00:00:00.000Z',
                'ended': '2016-02-10T00:00:00.000Z',
            },
        }
        batch = batch_test_utils.create_batch_old(recipe_type=self.recipe_type, definition=definition)

        Batch.objects.schedule_recipes(batch.id)

        batch = Batch.objects.get(pk=batch.id)
        self.assertEqual(batch.status, 'CREATED')
        self.assertEqual(batch.created_count, 1)
        self.assertEqual(batch.total_count, 1)
Example #27
0
    def create_all_v6(self, request):
        """Creates or edits a dataset - including the dataset members - and returns a link to the detail URL"""

        title = rest_util.parse_string(request, 'title', required=False)
        description = rest_util.parse_string(request,
                                             'description',
                                             required=False)
        definition = rest_util.parse_dict(request, 'definition', required=True)

        template = rest_util.parse_dict(request,
                                        'data_template',
                                        required=False)
        dry_run = rest_util.parse_bool(request, 'dry_run', default_value=False)

        # file filters
        data_started = rest_util.parse_timestamp(request,
                                                 'data_started',
                                                 required=False)
        data_ended = rest_util.parse_timestamp(request,
                                               'data_ended',
                                               required=False)
        rest_util.check_time_range(data_started, data_ended)

        source_started = rest_util.parse_timestamp(request,
                                                   'source_started',
                                                   required=False)
        source_ended = rest_util.parse_timestamp(request,
                                                 'source_ended',
                                                 required=False)
        rest_util.check_time_range(source_started, source_ended)

        source_sensor_classes = rest_util.parse_string_list(
            request, 'source_sensor_class', required=False)
        source_sensors = rest_util.parse_string_list(request,
                                                     'source_sensor',
                                                     required=False)
        source_collections = rest_util.parse_string_list(request,
                                                         'source_collection',
                                                         required=False)
        source_tasks = rest_util.parse_string_list(request,
                                                   'source_task',
                                                   required=False)

        mod_started = rest_util.parse_timestamp(request,
                                                'modified_started',
                                                required=False)
        mod_ended = rest_util.parse_timestamp(request,
                                              'modified_ended',
                                              required=False)
        rest_util.check_time_range(mod_started, mod_ended)

        job_type_ids = rest_util.parse_int_list(request,
                                                'job_type_id',
                                                required=False)
        job_type_names = rest_util.parse_string_list(request,
                                                     'job_type_name',
                                                     required=False)
        job_ids = rest_util.parse_int_list(request, 'job_id', required=False)
        file_names = rest_util.parse_string_list(request,
                                                 'file_name',
                                                 required=False)
        job_outputs = rest_util.parse_string_list(request,
                                                  'job_output',
                                                  required=False)
        recipe_ids = rest_util.parse_int_list(request,
                                              'recipe_id',
                                              required=False)
        recipe_type_ids = rest_util.parse_int_list(request,
                                                   'recipe_type_id',
                                                   required=False)
        recipe_nodes = rest_util.parse_string_list(request,
                                                   'recipe_node',
                                                   required=False)
        batch_ids = rest_util.parse_int_list(request,
                                             'batch_id',
                                             required=False)

        order = rest_util.parse_string_list(request, 'order', required=False)

        data = rest_util.parse_dict_list(request, 'data', required=False)
        data_list = []

        # validate the definition & create the dataset
        try:
            dataset_def = DataSetDefinitionV6(
                definition=definition, do_validate=True).get_definition()
        except InvalidDataSetDefinition as ex:
            message = 'DataSet definition is invalid'
            logger.exception(message)
            raise BadParameter('%s: %s' % (message, unicode(ex)))

        try:
            dataset = DataSet.objects.create_dataset_v6(
                dataset_def, title=title, description=description)
        except Exception as ex:
            message = 'Unable to create new dataset'
            logger.exception(message)
            raise BadParameter('%s: %s' % (message, unicode(ex)))

        try:
            dataset = DataSet.objects.get_details_v6(dataset.id)
        except DataSet.DoesNotExist:
            raise Http404

        if not data and not template:
            url = reverse('dataset_details_view',
                          args=[dataset.id],
                          request=request)
            serializer = DataSetDetailsSerializerV6(dataset)

            return Response(serializer.data,
                            status=status.HTTP_201_CREATED,
                            headers=dict(location=url))

        # Try and find the data
        if data:
            try:
                for d in data:
                    data = DataV6(data=d, do_validate=True).get_data()
                    data_list.append(data)
            except InvalidData as ex:
                message = 'Data is invalid'
                logger.exception(message)
                raise BadParameter('%s: %s' % (message, unicode(ex)))
        elif template:
            try:
                data_list = DataSetMember.objects.build_data_list(
                    template=template,
                    data_started=data_started,
                    data_ended=data_ended,
                    source_started=source_started,
                    source_ended=source_ended,
                    source_sensor_classes=source_sensor_classes,
                    source_sensors=source_sensors,
                    source_collections=source_collections,
                    source_tasks=source_tasks,
                    mod_started=mod_started,
                    mod_ended=mod_ended,
                    job_type_ids=job_type_ids,
                    job_type_names=job_type_names,
                    job_ids=job_ids,
                    file_names=file_names,
                    job_outputs=job_outputs,
                    recipe_ids=recipe_ids,
                    recipe_type_ids=recipe_type_ids,
                    recipe_nodes=recipe_nodes,
                    batch_ids=batch_ids,
                    order=order)
            except InvalidData as ex:
                message = 'Data is invalid'
                logger.exception(message)
                raise BadParameter('%s: %s' % (message, unicode(ex)))

        if not data_list:
            resp_dict = {
                'No Results':
                'No files found from filters and/or no data provided'
            }
            return Response(resp_dict)

        validation = DataSetMember.objects.validate_data_list(
            dataset_def=dataset_def, data_list=data_list)
        members = []
        if validation.is_valid and not dry_run:

            members = DataSetMember.objects.create_dataset_members(
                dataset=dataset, data_list=data_list)
            dataset = DataSet.objects.get(id=dataset.id)

            serializer = DataSetDetailsSerializerV6(dataset)
            url = reverse('dataset_details_view',
                          args=[dataset.id],
                          request=request)
            return Response(serializer.data,
                            status=status.HTTP_201_CREATED,
                            headers=dict(location=url))
        elif not validation.is_valid:
            raise BadParameter('%s: %s' %
                               ('Error(s) validating data against dataset',
                                [e.to_dict() for e in validation.errors]))

        resp_dict = []
        for dl in data_list:
            resp_dict.append(convert_data_to_v6_json(dl).get_dict())
        return Response(resp_dict)
Example #28
0
    def post_v6(self, request, dataset_id):
        """ Adds a datsetmember to the dataset

        :param request: the HTTP request
        :type request: :class:`rest_framework.request.Request`
        :param dataset_id: The id of the dataset
        :type dataset_id: int encoded as a str
        :rtype: :class:`rest_framework.response.Response`
        :returns: the HTTP response to send back to the user
        """

        template = rest_util.parse_dict(request,
                                        'data_template',
                                        required=False)
        dry_run = rest_util.parse_bool(request, 'dry_run', default_value=False)

        #file filters
        data_started = rest_util.parse_timestamp(request,
                                                 'data_started',
                                                 required=False)
        data_ended = rest_util.parse_timestamp(request,
                                               'data_ended',
                                               required=False)
        rest_util.check_time_range(data_started, data_ended)

        created_started = rest_util.parse_timestamp(request,
                                                    'created_started',
                                                    required=False)
        created_ended = rest_util.parse_timestamp(request,
                                                  'created_ended',
                                                  required=False)
        rest_util.check_time_range(created_started, created_ended)

        source_started = rest_util.parse_timestamp(request,
                                                   'source_started',
                                                   required=False)
        source_ended = rest_util.parse_timestamp(request,
                                                 'source_ended',
                                                 required=False)
        rest_util.check_time_range(source_started, source_ended)

        source_sensor_classes = rest_util.parse_string_list(
            request, 'source_sensor_class', required=False)
        source_sensors = rest_util.parse_string_list(request,
                                                     'source_sensor',
                                                     required=False)
        source_collections = rest_util.parse_string_list(request,
                                                         'source_collection',
                                                         required=False)
        source_tasks = rest_util.parse_string_list(request,
                                                   'source_task',
                                                   required=False)

        mod_started = rest_util.parse_timestamp(request,
                                                'modified_started',
                                                required=False)
        mod_ended = rest_util.parse_timestamp(request,
                                              'modified_ended',
                                              required=False)
        rest_util.check_time_range(mod_started, mod_ended)

        job_type_ids = rest_util.parse_int_list(request,
                                                'job_type_id',
                                                required=False)
        job_type_names = rest_util.parse_string_list(request,
                                                     'job_type_name',
                                                     required=False)
        job_ids = rest_util.parse_int_list(request, 'job_id', required=False)
        file_names = rest_util.parse_string_list(request,
                                                 'file_name',
                                                 required=False)
        job_outputs = rest_util.parse_string_list(request,
                                                  'job_output',
                                                  required=False)
        recipe_ids = rest_util.parse_int_list(request,
                                              'recipe_id',
                                              required=False)
        recipe_type_ids = rest_util.parse_int_list(request,
                                                   'recipe_type_id',
                                                   required=False)
        recipe_nodes = rest_util.parse_string_list(request,
                                                   'recipe_node',
                                                   required=False)
        batch_ids = rest_util.parse_int_list(request,
                                             'batch_id',
                                             required=False)

        order = rest_util.parse_string_list(request, 'order', required=False)

        data = rest_util.parse_dict_list(request, 'data', required=False)
        data_list = []

        try:
            if data:
                for d in data:
                    data = DataV6(data=d, do_validate=True).get_data()
                    data_list.append(data)
            else:
                data_list = DataSetMember.objects.build_data_list(
                    template=template,
                    data_started=data_started,
                    data_ended=data_ended,
                    created_started=created_started,
                    created_ended=created_ended,
                    source_started=source_started,
                    source_ended=source_ended,
                    source_sensor_classes=source_sensor_classes,
                    source_sensors=source_sensors,
                    source_collections=source_collections,
                    source_tasks=source_tasks,
                    mod_started=mod_started,
                    mod_ended=mod_ended,
                    job_type_ids=job_type_ids,
                    job_type_names=job_type_names,
                    job_ids=job_ids,
                    file_names=file_names,
                    job_outputs=job_outputs,
                    recipe_ids=recipe_ids,
                    recipe_type_ids=recipe_type_ids,
                    recipe_nodes=recipe_nodes,
                    batch_ids=batch_ids,
                    order=order)
        except InvalidData as ex:
            message = 'Data is invalid'
            logger.exception(message)
            raise BadParameter('%s: %s' % (message, unicode(ex)))

        if not data_list:
            resp_dict = {
                'No Results':
                'No files found from filters and/or no data provided'
            }
            return Response(resp_dict)

        try:
            dataset = DataSet.objects.get(pk=dataset_id)
        except DataSet.DoesNotExist:
            raise Http404

        validation = DataSetMember.objects.validate_data_list(
            dataset_def=dataset.get_definition(), data_list=data_list)
        members = []
        if validation.is_valid and not dry_run:
            members = DataSetMember.objects.create_dataset_members(
                dataset=dataset, data_list=data_list)
            serializer = DataSetMemberSerializerV6(members, many=True)
            return Response(serializer.data, status=status.HTTP_201_CREATED)
        elif not validation.is_valid:
            raise BadParameter('%s: %s' %
                               ('Error(s) validating data against dataset',
                                [e.to_dict() for e in validation.errors]))

        resp_dict = []
        for dl in data_list:
            resp_dict.append(convert_data_to_v6_json(dl).get_dict())
        return Response(resp_dict)
Example #29
0
    def test_execute_new(self):
        """Tests calling CreateBatchRecipes.execute() successfully"""

        # Importing module here to patch the max recipe num
        import batch.messages.create_batch_recipes
        batch.messages.create_batch_recipes.MAX_RECIPE_NUM = 5

        jt_1 = job_test_utils.create_seed_job_type()
        jt_2 = job_test_utils.create_seed_job_type()
        jt_3 = job_test_utils.create_seed_job_type()
        jt_4 = job_test_utils.create_seed_job_type()

        recipe_def = {
            'version': '7',
            'input': {
                'files': [{
                    'name': 'INPUT_IMAGE',
                    'media_types': ['image/png'],
                    'required': True,
                    'multiple': False
                }],
                'json': []
            },
            'nodes': {
                'node_a': {
                    'dependencies': [],
                    'input': {
                        'input_a': {
                            'type': 'recipe',
                            'input': 'INPUT_IMAGE'
                        }
                    },
                    'node_type': {
                        'node_type': 'job',
                        'job_type_name': jt_1.name,
                        'job_type_version': jt_1.version,
                        'job_type_revision': jt_1.revision_num
                    }
                },
                'node_b': {
                    'dependencies': [],
                    'input': {
                        'input_a': {
                            'type': 'recipe',
                            'input': 'INPUT_IMAGE'
                        }
                    },
                    'node_type': {
                        'node_type': 'job',
                        'job_type_name': jt_2.name,
                        'job_type_version': jt_2.version,
                        'job_type_revision': jt_2.revision_num
                    }
                }
            }
        }
        sub_recipe_type_1 = recipe_test_utils.create_recipe_type_v6(
            definition=recipe_def)

        recipe_def = {
            'version': '7',
            'input': {
                'files': [{
                    'name': 'INPUT_IMAGE',
                    'media_types': ['image/png'],
                    'required': True,
                    'multiple': False
                }],
                'json': []
            },
            'nodes': {
                'node_a': {
                    'dependencies': [],
                    'input': {
                        'input_a': {
                            'type': 'recipe',
                            'input': 'INPUT_IMAGE'
                        }
                    },
                    'node_type': {
                        'node_type': 'job',
                        'job_type_name': jt_3.name,
                        'job_type_version': jt_3.version,
                        'job_type_revision': jt_3.revision_num
                    }
                },
                'node_b': {
                    'dependencies': [],
                    'input': {
                        'input_a': {
                            'type': 'recipe',
                            'input': 'INPUT_IMAGE'
                        }
                    },
                    'node_type': {
                        'node_type': 'job',
                        'job_type_name': jt_4.name,
                        'job_type_version': jt_4.version,
                        'job_type_revision': jt_4.revision_num
                    }
                }
            }
        }
        sub_recipe_type_2 = recipe_test_utils.create_recipe_type_v6(
            definition=recipe_def)

        jt_5 = job_test_utils.create_seed_job_type()
        jt_6 = job_test_utils.create_seed_job_type()
        recipe_def = {
            'version': '7',
            'input': {
                'files': [{
                    'name': 'INPUT_IMAGE',
                    'media_types': ['image/png'],
                    'required': True,
                    'multiple': False
                }],
                'json': []
            },
            'nodes': {
                'recipe_node_a': {
                    'dependencies': [],
                    'input': {
                        'input_a': {
                            'type': 'recipe',
                            'input': 'INPUT_IMAGE'
                        }
                    },
                    'node_type': {
                        'node_type': 'recipe',
                        'recipe_type_name': sub_recipe_type_1.name,
                        'recipe_type_revision': sub_recipe_type_1.revision_num
                    }
                },
                'recipe_node_b': {
                    'dependencies': [{
                        'name': 'node_d',
                        'acceptance': True
                    }],
                    'input': {
                        'input_a': {
                            'type': 'dependency',
                            'node': 'node_d',
                            'output': 'OUTPUT_IMAGE'
                        }
                    },
                    'node_type': {
                        'node_type': 'recipe',
                        'recipe_type_name': sub_recipe_type_2.name,
                        'recipe_type_revision': sub_recipe_type_2.revision_num
                    }
                },
                'node_c': {
                    'dependencies': [],
                    'input': {
                        'INPUT_IMAGE': {
                            'type': 'recipe',
                            'input': 'INPUT_IMAGE'
                        }
                    },
                    'node_type': {
                        'node_type': 'job',
                        'job_type_name': jt_5.name,
                        'job_type_version': jt_5.version,
                        'job_type_revision': jt_5.revision_num
                    }
                },
                'node_d': {
                    'dependencies': [{
                        'name': 'node_c',
                        'acceptance': True
                    }],
                    'input': {
                        'INPUT_IMAGE': {
                            'type': 'dependency',
                            'node': 'node_c',
                            'output': 'OUTPUT_IMAGE'
                        }
                    },
                    'node_type': {
                        'node_type': 'job',
                        'job_type_name': jt_6.name,
                        'job_type_version': jt_6.version,
                        'job_type_revision': jt_6.revision_num
                    }
                }
            }
        }
        recipe_type = recipe_test_utils.create_recipe_type_v6(
            definition=recipe_def)

        # Create a dataset of 6 files
        dataset_def = {
            'parameters': {
                'files': [{
                    'media_types': ['image/png'],
                    'required': True,
                    'multiple': False,
                    'name': 'INPUT_IMAGE'
                }],
                'json': []
            }
        }
        the_dataset = data_test_utils.create_dataset(definition=dataset_def)
        workspace = storage_test_utils.create_workspace()

        # Create 6 files
        src_file_ids = []
        data_list = []
        for i in range(0, 6):
            file_name = 'input_%d.png' % i
            src_file = storage_test_utils.create_file(file_name=file_name,
                                                      file_type='SOURCE',
                                                      media_type='image/png',
                                                      file_size=10,
                                                      data_type_tags=['type'],
                                                      file_path='the_path',
                                                      workspace=workspace)
            src_file_ids.append(src_file.id)
            data_dict = {
                'version': '6',
                'files': {
                    'INPUT_IMAGE': [src_file.id]
                },
                'json': {}
            }
            data_list.append(DataV6(data=data_dict).get_dict())
        members = data_test_utils.create_dataset_members(dataset=the_dataset,
                                                         data_list=data_list)

        batch_definition = BatchDefinition()
        batch_definition.dataset = the_dataset.id
        forced_nodes = ForcedNodes()
        forced_nodes.all_nodes = True
        batch_definition.forced_nodes = forced_nodes

        new_batch = batch_test_utils.create_batch(recipe_type=recipe_type,
                                                  definition=batch_definition)

        # Create message
        message = batch.messages.create_batch_recipes.CreateBatchRecipes()
        message.batch_id = new_batch.id

        # Copy JSON for running same message again later
        message_json = message.to_json()

        # Execute message
        result = message.execute()
        self.assertTrue(result)

        # Should be 6 messages, one for next create_batch_recipes and 5 for creating new recipes
        self.assertEqual(len(message.new_messages), 6)

        # Create batch message
        batch_recipes_message = message.new_messages[0]
        self.assertEqual(batch_recipes_message.type, 'create_batch_recipes')
        self.assertEqual(batch_recipes_message.current_dataset_file_id,
                         src_file_ids[1])
        self.assertFalse(batch_recipes_message.is_prev_batch_done)

        from recipe.models import Recipe
        # Verify each message has a different input and execute
        src_ids = copy.deepcopy(src_file_ids)
        for msg in message.new_messages[1:]:
            self.assertEqual(msg.type, 'create_recipes')
            self.assertEqual(msg.create_recipes_type, 'new-recipe')
            file_id = DataV6(msg.recipe_input_data).get_data(
            ).values['INPUT_IMAGE'].file_ids[0]
            self.assertTrue(file_id in src_ids)
            src_ids.remove(file_id)

            # Execute the create_recipes messages
            result = msg.execute()
            self.assertTrue(result)

        # Verify 5 recipes have been created and they have the proper input files:
        recipes = Recipe.objects.all()
        self.assertEqual(len(recipes), 5)
        src_ids = copy.deepcopy(src_file_ids)
        for recipe in recipes:
            self.assertEqual(recipe.recipe_type.name,
                             new_batch.recipe_type.name)
            file_id = recipe.get_input_data().values['INPUT_IMAGE'].file_ids[0]
            self.assertTrue(file_id in src_ids)
            src_ids.remove(file_id)

        # Execute next create_batch_recipes messages
        result = batch_recipes_message.execute()
        self.assertTrue(result)
        # Should only have one last create_recipes message
        self.assertEqual(len(batch_recipes_message.new_messages), 1)
        create_recipes_message = batch_recipes_message.new_messages[0]
        self.assertTrue(batch_recipes_message.is_prev_batch_done)
        self.assertEqual(create_recipes_message.type, 'create_recipes')
        self.assertEqual(create_recipes_message.create_recipes_type,
                         'new-recipe')
        self.assertEqual(create_recipes_message.batch_id, new_batch.id)
        self.assertEqual(create_recipes_message.event_id, new_batch.event_id)
        self.assertEqual(create_recipes_message.recipe_type_name,
                         new_batch.recipe_type.name)
        self.assertEqual(create_recipes_message.recipe_type_rev_num,
                         new_batch.recipe_type.revision_num)