Esempio n. 1
0
    def test_json(self):
        """Tests coverting a CreateBatchRecipes message to and from JSON"""

        # Previous batch with three recipes
        recipe_type = recipe_test_utils.create_recipe_type()
        prev_batch = batch_test_utils.create_batch(recipe_type=recipe_type,
                                                   is_creation_done=True,
                                                   recipes_total=3)
        recipe_1 = recipe_test_utils.create_recipe(batch=prev_batch)
        recipe_2 = recipe_test_utils.create_recipe(batch=prev_batch)
        recipe_3 = recipe_test_utils.create_recipe(batch=prev_batch)

        definition = BatchDefinition()
        definition.root_batch_id = prev_batch.root_batch_id
        batch = batch_test_utils.create_batch(recipe_type=recipe_type,
                                              definition=definition)

        # Create message
        message = create_batch_recipes_message(batch.id)

        # Convert message to JSON and back, and then execute
        message_json_dict = message.to_json()
        new_message = CreateBatchRecipes.from_json(message_json_dict)
        result = new_message.execute()

        self.assertTrue(result)
        # Should be one reprocess_recipes message for the three recipes
        self.assertEqual(len(new_message.new_messages), 1)
        message = new_message.new_messages[0]
        self.assertEqual(message.type, 'reprocess_recipes')
        self.assertSetEqual(set(message._root_recipe_ids),
                            {recipe_1.id, recipe_2.id, recipe_3.id})
Esempio n. 2
0
def create_batch(title=None,
                 description=None,
                 recipe_type=None,
                 definition=None,
                 configuration=None,
                 is_creation_done=False,
                 recipes_total=None):
    """Creates a batch model for unit testing

    :returns: The batch model
    :rtype: :class:`batch.models.Batch`
    """

    if not recipe_type:
        recipe_type = recipe_test_utils.create_recipe_type()
    if not definition:
        definition = BatchDefinition()
        # Create a previous batch so we can create a valid definition
        # TODO: this can be replaced by a DataSet once they are implemented
        prev_batch = Batch()
        prev_batch.recipe_type = recipe_type
        prev_batch.recipe_type_rev = RecipeTypeRevision.objects.get_revision(
            recipe_type.id, recipe_type.revision_num)
        prev_batch.event = TriggerEvent.objects.create_trigger_event(
            'USER', None, {'user': '******'}, now())
        prev_batch.is_creation_done = True
        prev_batch.recipes_total = 10
        prev_batch.save()
        prev_batch.root_batch_id = prev_batch.id
        prev_batch.save()
        definition.root_batch_id = prev_batch.root_batch_id
    if not configuration:
        configuration = BatchConfiguration()
    if not title:
        global BATCH_TITLE_COUNTER
        title = 'Test Batch Title %i' % BATCH_TITLE_COUNTER
        BATCH_TITLE_COUNTER += 1
    if not description:
        global BATCH_DESCRIPTION_COUNTER
        description = 'Test Batch Description %i' % BATCH_DESCRIPTION_COUNTER
        BATCH_DESCRIPTION_COUNTER += 1
    event = TriggerEvent.objects.create_trigger_event('USER', None,
                                                      {'user': '******'},
                                                      now())

    batch = Batch.objects.create_batch_v6(title, description, recipe_type,
                                          event, definition, configuration)
    if is_creation_done:
        batch.is_creation_done = True
    if recipes_total is not None:
        batch.recipes_total = recipes_total
    batch.save()
    return batch
Esempio n. 3
0
    def get_definition(self):
        """Returns the batch definition represented by this JSON

        :returns: The batch definition
        :rtype: :class:`batch.definition.definition.BatchDefinition`:
        """

        definition = BatchDefinition()
        if 'previous_batch' in self._definition:
            prev_batch_dict = self._definition['previous_batch']
            definition.root_batch_id = prev_batch_dict['root_batch_id']
            if 'forced_nodes' in prev_batch_dict:
                definition.forced_nodes = ForcedNodesV6(prev_batch_dict['forced_nodes']).get_forced_nodes()

        return definition
Esempio n. 4
0
    def get_definition(self):
        """Returns the batch definition represented by this JSON

        :returns: The batch definition
        :rtype: :class:`batch.definition.definition.BatchDefinition`:
        """

        definition = BatchDefinition()
        if 'previous_batch' in self._definition:
            prev_batch_dict = self._definition['previous_batch']
            definition.root_batch_id = prev_batch_dict['root_batch_id']
            if 'job_names' in prev_batch_dict:
                definition.job_names = prev_batch_dict['job_names']
            if 'all_jobs' in prev_batch_dict:
                definition.all_jobs = prev_batch_dict['all_jobs']

        return definition
Esempio n. 5
0
    def test_convert_definition_to_v6(self):
        """Tests calling convert_definition_to_v6()"""

        # Try definition with nothing set
        definition = BatchDefinition()
        json = convert_definition_to_v6(definition)
        BatchDefinitionV6(definition=json.get_dict(),
                          do_validate=True)  # Revalidate

        # Try definition with previous batch ID set
        definition = BatchDefinition()
        definition.root_batch_id = 1234
        json = convert_definition_to_v6(definition)
        BatchDefinitionV6(definition=json.get_dict(),
                          do_validate=True)  # Revalidate
        self.assertEqual(json.get_definition().root_batch_id,
                         definition.root_batch_id)
Esempio n. 6
0
    def test_successful(self, mock_msg_mgr):
        """Tests successfully calling the v6 batch comparison view"""

        job_type_1 = job_test_utils.create_seed_job_type()
        job_type_2 = job_test_utils.create_seed_job_type()
        job_type_3 = job_test_utils.create_seed_job_type()

        rt_definition_1 = {
            'version': '6',
            'input': {'files': [{'name': 'INPUT_IMAGE', 'media_types': ['image/png'], 'required': True, 'multiple': False}],
                      'json': []},
            'nodes': {
                'job_a': {
                    'dependencies': [],
                    'input': {'INPUT_IMAGE': {'type': 'recipe', 'input': 'INPUT_IMAGE'}},
                    'node_type': {
                        'node_type': 'job',
                        'job_type_name': job_type_1.name,
                        'job_type_version': job_type_1.version,
                        'job_type_revision': 1,
                    }
                },
                'job_b': {
                    'dependencies': [{'name': 'job_a'}],
                    'input': {'INPUT_IMAGE': {'type': 'dependency', 'node': 'job_a',
                    'output': 'OUTPUT_IMAGE'}},
                    'node_type': {
                        'node_type': 'job',
                        'job_type_name': job_type_2.name,
                        'job_type_version': job_type_2.version,
                        'job_type_revision': 1,
                    }
                }
            }
        }

        rt_definition_2 = {
            'version': '6',
            'input': {'files': [{'name': 'INPUT_IMAGE', 'media_types': ['image/png'], 'required': True, 'multiple': False}],
                      'json': []},
            'nodes': {
                'job_c': {
                    'dependencies': [],
                    'input': {'INPUT_IMAGE': {'type': 'recipe', 'input': 'INPUT_IMAGE'}},
                    'node_type': {
                        'node_type': 'job',
                        'job_type_name': job_type_3.name,
                        'job_type_version': job_type_3.version,
                        'job_type_revision': 1,
                    }
                },
                'job_b': {
                    'dependencies': [{'name': 'job_c'}],
                    'input': {'INPUT_IMAGE': {'type': 'dependency', 'node': 'job_c',
                    'output': 'OUTPUT_IMAGE'}},
                    'node_type': {
                        'node_type': 'job',
                        'job_type_name': job_type_2.name,
                        'job_type_version': job_type_2.version,
                        'job_type_revision': 1,
                    }
                }
            }
        }
        recipe_type = recipe_test_utils.create_recipe_type_v6(definition=rt_definition_1)

        # Create a chain of two batches
        batch_1 = batch_test_utils.create_batch(recipe_type=recipe_type, is_creation_done=True, recipes_total=2)
        # Right now test utils will automatically have batch_1 supersede another batch, so we reset this so batch_1 is
        # its own chain
        batch_1.root_batch_id = batch_1.id
        batch_1.superseded_batch = None
        batch_1.save()
        # Change recipe type to new revision
        recipe_test_utils.edit_recipe_type_v6(recipe_type=recipe_type, definition=rt_definition_2, auto_update=True)
        recipe_type = RecipeType.objects.get(id=recipe_type.id)
        definition_2 = BatchDefinition()
        definition_2.root_batch_id = batch_1.root_batch_id
        batch_2 = batch_test_utils.create_batch(recipe_type=recipe_type, definition=definition_2)

        # Set metrics to test values
        Batch.objects.filter(id=batch_1.id).update(jobs_total=24, jobs_pending=0, jobs_blocked=10, jobs_queued=0,
                                                   jobs_running=0, jobs_failed=2, jobs_completed=12, jobs_canceled=0,
                                                   recipes_estimated=2, recipes_total=2, recipes_completed=1)
        Batch.objects.filter(id=batch_2.id).update(jobs_total=26, jobs_pending=2, jobs_blocked=6, jobs_queued=3,
                                                   jobs_running=5, jobs_failed=6, jobs_completed=3, jobs_canceled=1,
                                                   recipes_estimated=2, recipes_total=2, recipes_completed=0)
        min_seed_duration_1a = timedelta(seconds=43)
        avg_seed_duration_1a = timedelta(seconds=68)
        max_seed_duration_1a = timedelta(seconds=77)
        min_job_duration_1a = timedelta(seconds=45)
        avg_job_duration_1a = timedelta(seconds=70)
        max_job_duration_1a = timedelta(seconds=79)
        qry = BatchMetrics.objects.filter(batch_id=batch_1.id, job_name='job_a')
        qry.update(jobs_total=12, jobs_pending=0, jobs_blocked=0, jobs_queued=0, jobs_running=0, jobs_failed=0,
                   jobs_completed=12, jobs_canceled=0, min_seed_duration=min_seed_duration_1a,
                   avg_seed_duration=avg_seed_duration_1a, max_seed_duration=max_seed_duration_1a,
                   min_job_duration=min_job_duration_1a, avg_job_duration=avg_job_duration_1a,
                   max_job_duration=max_job_duration_1a)
        min_seed_duration_1b = timedelta(seconds=15)
        avg_seed_duration_1b = timedelta(seconds=18)
        max_seed_duration_1b = timedelta(seconds=23)
        min_job_duration_1b = timedelta(seconds=18)
        avg_job_duration_1b = timedelta(seconds=21)
        max_job_duration_1b = timedelta(seconds=26)
        qry = BatchMetrics.objects.filter(batch_id=batch_1.id, job_name='job_b')
        qry.update(jobs_total=12, jobs_pending=0, jobs_blocked=10, jobs_queued=0, jobs_running=0, jobs_failed=2,
                   jobs_completed=0, jobs_canceled=0, min_seed_duration=min_seed_duration_1b,
                   avg_seed_duration=avg_seed_duration_1b, max_seed_duration=max_seed_duration_1b,
                   min_job_duration=min_job_duration_1b, avg_job_duration=avg_job_duration_1b,
                   max_job_duration=max_job_duration_1b)
        min_seed_duration_2b = timedelta(seconds=9)
        avg_seed_duration_2b = timedelta(seconds=12)
        max_seed_duration_2b = timedelta(seconds=17)
        min_job_duration_2b = timedelta(seconds=12)
        avg_job_duration_2b = timedelta(seconds=15)
        max_job_duration_2b = timedelta(seconds=20)
        qry = BatchMetrics.objects.filter(batch_id=batch_2.id, job_name='job_b')
        qry.update(jobs_total=13, jobs_pending=0, jobs_blocked=0, jobs_queued=0, jobs_running=3, jobs_failed=6,
                   jobs_completed=3, jobs_canceled=1, min_seed_duration=min_seed_duration_2b,
                   avg_seed_duration=avg_seed_duration_2b, max_seed_duration=max_seed_duration_2b,
                   min_job_duration=min_job_duration_2b, avg_job_duration=avg_job_duration_2b,
                   max_job_duration=max_job_duration_2b)
        min_seed_duration_2c = timedelta(seconds=101)
        avg_seed_duration_2c = timedelta(seconds=136)
        max_seed_duration_2c = timedelta(seconds=158)
        min_job_duration_2c = timedelta(seconds=111)
        avg_job_duration_2c = timedelta(seconds=146)
        max_job_duration_2c = timedelta(seconds=168)
        qry = BatchMetrics.objects.filter(batch_id=batch_2.id, job_name='job_c')
        qry.update(jobs_total=13, jobs_pending=2, jobs_blocked=6, jobs_queued=3, jobs_running=2, jobs_failed=0,
                   jobs_completed=0, jobs_canceled=0, min_seed_duration=min_seed_duration_2c,
                   avg_seed_duration=avg_seed_duration_2c, max_seed_duration=max_seed_duration_2c,
                   min_job_duration=min_job_duration_2c, avg_job_duration=avg_job_duration_2c,
                   max_job_duration=max_job_duration_2c)
        expected_job_metrics = {'job_a': {'jobs_total': [12, None], 'jobs_pending': [0, None],
                                          'jobs_blocked': [0, None], 'jobs_queued': [0, None],
                                          'jobs_running': [0, None], 'jobs_failed': [0, None],
                                          'jobs_completed': [12, None], 'jobs_canceled': [0, None],
                                          'min_seed_duration': [duration_to_string(min_seed_duration_1a), None],
                                          'avg_seed_duration': [duration_to_string(avg_seed_duration_1a), None],
                                          'max_seed_duration': [duration_to_string(max_seed_duration_1a), None],
                                          'min_job_duration': [duration_to_string(min_job_duration_1a), None],
                                          'avg_job_duration': [duration_to_string(avg_job_duration_1a), None],
                                          'max_job_duration': [duration_to_string(max_job_duration_1a), None]},
                                'job_b': {'jobs_total': [12, 13], 'jobs_pending': [0, 0],
                                          'jobs_blocked': [10, 0], 'jobs_queued': [0, 0],
                                          'jobs_running': [0, 3], 'jobs_failed': [2, 6],
                                          'jobs_completed': [0, 3], 'jobs_canceled': [0, 1],
                                          'min_seed_duration': [duration_to_string(min_seed_duration_1b),
                                                                duration_to_string(min_seed_duration_2b)],
                                          'avg_seed_duration': [duration_to_string(avg_seed_duration_1b),
                                                                duration_to_string(avg_seed_duration_2b)],
                                          'max_seed_duration': [duration_to_string(max_seed_duration_1b),
                                                                duration_to_string(max_seed_duration_2b)],
                                          'min_job_duration': [duration_to_string(min_job_duration_1b),
                                                               duration_to_string(min_job_duration_2b)],
                                          'avg_job_duration': [duration_to_string(avg_job_duration_1b),
                                                               duration_to_string(avg_job_duration_2b)],
                                          'max_job_duration': [duration_to_string(max_job_duration_1b),
                                                               duration_to_string(max_job_duration_2b)]},
                                'job_c': {'jobs_total': [None, 13], 'jobs_pending': [None, 2],
                                          'jobs_blocked': [None, 6], 'jobs_queued': [None, 3],
                                          'jobs_running': [None, 2], 'jobs_failed': [None, 0],
                                          'jobs_completed': [None, 0], 'jobs_canceled': [None, 0],
                                          'min_seed_duration': [None, duration_to_string(min_seed_duration_2c)],
                                          'avg_seed_duration': [None, duration_to_string(avg_seed_duration_2c)],
                                          'max_seed_duration': [None, duration_to_string(max_seed_duration_2c)],
                                          'min_job_duration': [None, duration_to_string(min_job_duration_2c)],
                                          'avg_job_duration': [None, duration_to_string(avg_job_duration_2c)],
                                          'max_job_duration': [None, duration_to_string(max_job_duration_2c)]}
                               }
        expected_result = {'batches': [{'id': batch_1.id, 'title': batch_1.title, 'description': batch_1.description,
                                        'created': datetime_to_string(batch_1.created)},
                                       {'id': batch_2.id, 'title': batch_2.title, 'description': batch_2.description,
                                        'created': datetime_to_string(batch_2.created)}],
                           'metrics': {'jobs_total': [24, 26], 'jobs_pending': [0, 2], 'jobs_blocked': [10, 6],
                                       'jobs_queued': [0, 3], 'jobs_running': [0, 5], 'jobs_failed': [2, 6],
                                       'jobs_completed': [12, 3], 'jobs_canceled': [0, 1], 'recipes_estimated': [2, 2],
                                       'recipes_total': [2, 2], 'recipes_completed': [1, 0],
                                       'job_metrics': expected_job_metrics}
                          }
        url = '/v6/batches/comparison/%d/' % batch_2.root_batch_id
        response = self.client.get(url)
        self.assertEqual(response.status_code, status.HTTP_200_OK, response.content)

        result = json.loads(response.content)
        self.assertDictEqual(result, expected_result)
Esempio n. 7
0
    def test_inputmap(self):
        dataset_def = {
            'parameters': {
                'files': [{
                    'media_types': ['image/png'],
                    'required': True,
                    'multiple': False,
                    'name': 'INPUT_IMAGE'
                }],
                'json': []
            }
        }
        the_dataset = data_test_utils.create_dataset(definition=dataset_def)
        workspace = storage_test_utils.create_workspace()
        src_file_a = storage_test_utils.create_file(file_name='input_a.PNG',
                                                    file_type='SOURCE',
                                                    media_type='image/png',
                                                    file_size=10,
                                                    data_type_tags=['type'],
                                                    file_path='the_path',
                                                    workspace=workspace)
        src_file_b = storage_test_utils.create_file(file_name='input_b.PNG',
                                                    file_type='SOURCE',
                                                    media_type='image/png',
                                                    file_size=10,
                                                    data_type_tags=['type'],
                                                    file_path='the_path',
                                                    workspace=workspace)
        data_list = []
        data_dict = {
            'version': '6',
            'files': {
                'FILE_INPUT': [src_file_a.id]
            },
            'json': {}
        }
        data_list.append(DataV6(data=data_dict).get_dict())
        data_dict = {
            'version': '6',
            'files': {
                'FILE_INPUT': [src_file_b.id]
            },
            'json': {}
        }
        data_list.append(DataV6(data=data_dict).get_dict())
        members = data_test_utils.create_dataset_members(dataset=the_dataset,
                                                         data_list=data_list)

        batch_def = BatchDefinition()
        batch_def.dataset = the_dataset.id
        batch = batch_test_utils.create_batch(definition=batch_def)

        json_dict = {
            'version':
            '6',
            'priority':
            100,
            'inputMap': [{
                'input': 'FILE_INPUT',
                'datasetParameter': 'INPUT_IMAGE'
            }]
        }
        json = BatchConfigurationV6(configuration=json_dict)
        configuration = json.get_configuration()
        configuration.validate(batch)

        json_dict = {
            'version': '6',
            'priority': 100,
            'inputMap': [{
                'input': 'FILE_INPUT',
                'datasetParameter': 'FILE_INPUT'
            }]
        }
        json = BatchConfigurationV6(configuration=json_dict)
        configuration = json.get_configuration()
        self.assertRaises(InvalidConfiguration, configuration.validate, batch)
Esempio n. 8
0
    def test_execute(self):
        """Tests calling CreateBatchRecipes.execute() successfully"""

        # Importing module here to patch the max recipe num
        import batch.messages.create_batch_recipes
        batch.messages.create_batch_recipes.MAX_RECIPE_NUM = 5

        # Previous batch with six recipes
        recipe_type = recipe_test_utils.create_recipe_type()
        prev_batch = batch_test_utils.create_batch(recipe_type=recipe_type,
                                                   is_creation_done=True,
                                                   recipes_total=6)
        recipe_1 = recipe_test_utils.create_recipe(batch=prev_batch)
        recipe_2 = recipe_test_utils.create_recipe(batch=prev_batch)
        recipe_3 = recipe_test_utils.create_recipe(batch=prev_batch)
        recipe_4 = recipe_test_utils.create_recipe(batch=prev_batch)
        recipe_5 = recipe_test_utils.create_recipe(batch=prev_batch)
        recipe_6 = recipe_test_utils.create_recipe(batch=prev_batch)

        definition = BatchDefinition()
        definition.root_batch_id = prev_batch.root_batch_id
        new_batch = batch_test_utils.create_batch(recipe_type=recipe_type,
                                                  definition=definition)

        # Create message
        message = batch.messages.create_batch_recipes.CreateBatchRecipes()
        message.batch_id = new_batch.id

        # Copy JSON for running same message again later
        message_json = message.to_json()

        # Execute message
        result = message.execute()
        self.assertTrue(result)

        # Should be two messages, one for next create_batch_recipes and one for re-processing recipes
        self.assertEqual(len(message.new_messages), 2)
        batch_recipes_message = message.new_messages[0]
        reprocess_message = message.new_messages[1]
        self.assertEqual(batch_recipes_message.type, 'create_batch_recipes')
        self.assertEqual(batch_recipes_message.batch_id, new_batch.id)
        self.assertFalse(batch_recipes_message.is_prev_batch_done)
        self.assertEqual(batch_recipes_message.current_recipe_id, recipe_2.id)
        self.assertEqual(reprocess_message.type, 'reprocess_recipes')
        self.assertSetEqual(
            set(reprocess_message._root_recipe_ids),
            {recipe_2.id, recipe_3.id, recipe_4.id, recipe_5.id, recipe_6.id})

        # Test executing message again
        message = batch.messages.create_batch_recipes.CreateBatchRecipes.from_json(
            message_json)
        result = message.execute()
        self.assertTrue(result)

        # Should have same messages returned
        self.assertEqual(len(message.new_messages), 2)
        batch_recipes_message = message.new_messages[0]
        reprocess_message = message.new_messages[1]
        self.assertEqual(batch_recipes_message.type, 'create_batch_recipes')
        self.assertEqual(batch_recipes_message.batch_id, new_batch.id)
        self.assertFalse(batch_recipes_message.is_prev_batch_done)
        self.assertEqual(batch_recipes_message.current_recipe_id, recipe_2.id)
        self.assertEqual(reprocess_message.type, 'reprocess_recipes')
        self.assertSetEqual(
            set(reprocess_message._root_recipe_ids),
            {recipe_2.id, recipe_3.id, recipe_4.id, recipe_5.id, recipe_6.id})

        # Execute next create_batch_recipes messages
        result = batch_recipes_message.execute()
        self.assertTrue(result)

        # Should only have one last reprocess message
        self.assertEqual(len(batch_recipes_message.new_messages), 1)
        reprocess_message = batch_recipes_message.new_messages[0]
        self.assertTrue(batch_recipes_message.is_prev_batch_done)
        self.assertEqual(reprocess_message.type, 'reprocess_recipes')
        self.assertSetEqual(set(reprocess_message._root_recipe_ids),
                            {recipe_1.id})
Esempio n. 9
0
    def test_execute_forced_nodes(self):
        """Tests calling CreateBatchRecipes.execute() when only specific nodes are forced"""

        # Importing module here to patch the max recipe num
        import batch.messages.create_batch_recipes
        batch.messages.create_batch_recipes.MAX_RECIPE_NUM = 5

        jt_1 = job_test_utils.create_seed_job_type()
        jt_2 = job_test_utils.create_seed_job_type()
        jt_3 = job_test_utils.create_seed_job_type()
        jt_4 = job_test_utils.create_seed_job_type()

        recipe_def = {
            'version': '7',
            'input': {
                'files': [{
                    'name': 'INPUT_IMAGE',
                    'media_types': ['image/png'],
                    'required': True,
                    'multiple': False
                }],
                'json': []
            },
            'nodes': {
                'node_a': {
                    'dependencies': [],
                    'input': {
                        'input_a': {
                            'type': 'recipe',
                            'input': 'INPUT_IMAGE'
                        }
                    },
                    'node_type': {
                        'node_type': 'job',
                        'job_type_name': jt_1.name,
                        'job_type_version': jt_1.version,
                        'job_type_revision': jt_1.revision_num
                    }
                },
                'node_b': {
                    'dependencies': [],
                    'input': {
                        'input_a': {
                            'type': 'recipe',
                            'input': 'INPUT_IMAGE'
                        }
                    },
                    'node_type': {
                        'node_type': 'job',
                        'job_type_name': jt_2.name,
                        'job_type_version': jt_2.version,
                        'job_type_revision': jt_2.revision_num
                    }
                }
            }
        }
        sub_recipe_type = recipe_test_utils.create_recipe_type_v6(
            definition=recipe_def)
        sub_recipe = recipe_test_utils.create_recipe(
            recipe_type=sub_recipe_type)

        # Recipe with two jobs and one subrecipe (c -> d -> r)
        recipe_def = {
            'version': '7',
            'input': {
                'files': [{
                    'name': 'INPUT_IMAGE',
                    'media_types': ['image/png'],
                    'required': True,
                    'multiple': False
                }],
                'json': []
            },
            'nodes': {
                'recipe_node': {
                    'dependencies': [{
                        'name': 'node_d',
                        'acceptance': True
                    }],
                    'input': {
                        'input_a': {
                            'type': 'dependency',
                            'node': 'node_d',
                            'output': 'OUTPUT_IMAGE'
                        }
                    },
                    'node_type': {
                        'node_type': 'recipe',
                        'recipe_type_name': sub_recipe_type.name,
                        'recipe_type_revision': sub_recipe_type.revision_num
                    }
                },
                'node_c': {
                    'dependencies': [],
                    'input': {
                        'INPUT_IMAGE': {
                            'type': 'recipe',
                            'input': 'INPUT_IMAGE'
                        }
                    },
                    'node_type': {
                        'node_type': 'job',
                        'job_type_name': jt_3.name,
                        'job_type_version': jt_3.version,
                        'job_type_revision': jt_3.revision_num
                    }
                },
                'node_d': {
                    'dependencies': [{
                        'name': 'node_c',
                        'acceptance': True
                    }],
                    'input': {
                        'INPUT_IMAGE': {
                            'type': 'dependency',
                            'node': 'node_c',
                            'output': 'OUTPUT_IMAGE'
                        }
                    },
                    'node_type': {
                        'node_type': 'job',
                        'job_type_name': jt_4.name,
                        'job_type_version': jt_4.version,
                        'job_type_revision': jt_4.revision_num
                    }
                }
            }
        }
        recipe_type = recipe_test_utils.create_recipe_type_v6(
            definition=recipe_def)

        # Create a dataset of 6 files
        dataset_def = {
            'parameters': {
                'files': [{
                    'media_types': ['image/png'],
                    'required': True,
                    'multiple': False,
                    'name': 'INPUT_IMAGE'
                }],
                'json': []
            }
        }
        the_dataset = data_test_utils.create_dataset(definition=dataset_def)
        workspace = storage_test_utils.create_workspace()

        # Create 6 files & recipes to go along
        src_file_ids = []
        recipe_ids = []
        data_list = []
        for i in range(0, 6):
            file_name = 'input_%d.png' % i
            src_file = storage_test_utils.create_file(file_name=file_name,
                                                      file_type='SOURCE',
                                                      media_type='image/png',
                                                      file_size=10,
                                                      data_type_tags=['type'],
                                                      file_path='the_path',
                                                      workspace=workspace)
            src_file_ids.append(src_file.id)
            data_dict = {
                'version': '6',
                'files': {
                    'INPUT_IMAGE': [src_file.id]
                },
                'json': {}
            }
            data_list.append(DataV6(data=data_dict).get_dict())
            recipe = recipe_test_utils.create_recipe(recipe_type=recipe_type,
                                                     input=data_dict)
            recipe_ids.append(recipe.id)
        members = data_test_utils.create_dataset_members(dataset=the_dataset,
                                                         data_list=data_list)
        recipe_test_utils.process_recipe_inputs(recipe_ids)

        batch_definition = BatchDefinition()
        batch_definition.dataset = the_dataset.id
        forced_nodes = ForcedNodes()
        forced_nodes.add_node('node_d')
        forced_nodes.all_nodes = False
        batch_definition.forced_nodes = forced_nodes

        new_batch = batch_test_utils.create_batch(recipe_type=recipe_type,
                                                  definition=batch_definition)
Esempio n. 10
0
    def test_execute_reprocess(self):
        """Tests calling CreateBatchRecipes.execute() successfully when re-processing recipes"""

        # Importing module here to patch the max recipe num
        import batch.messages.create_batch_recipes
        batch.messages.create_batch_recipes.MAX_RECIPE_NUM = 5

        jt_1 = job_test_utils.create_seed_job_type()
        jt_2 = job_test_utils.create_seed_job_type()
        jt_3 = job_test_utils.create_seed_job_type()
        jt_4 = job_test_utils.create_seed_job_type()

        recipe_def = {
            'version': '7',
            'input': {
                'files': [{
                    'name': 'INPUT_IMAGE',
                    'media_types': ['image/png'],
                    'required': True,
                    'multiple': False
                }],
                'json': []
            },
            'nodes': {
                'node_a': {
                    'dependencies': [],
                    'input': {
                        'input_a': {
                            'type': 'recipe',
                            'input': 'INPUT_IMAGE'
                        }
                    },
                    'node_type': {
                        'node_type': 'job',
                        'job_type_name': jt_1.name,
                        'job_type_version': jt_1.version,
                        'job_type_revision': jt_1.revision_num
                    }
                },
                'node_b': {
                    'dependencies': [],
                    'input': {
                        'input_a': {
                            'type': 'recipe',
                            'input': 'INPUT_IMAGE'
                        }
                    },
                    'node_type': {
                        'node_type': 'job',
                        'job_type_name': jt_2.name,
                        'job_type_version': jt_2.version,
                        'job_type_revision': jt_2.revision_num
                    }
                }
            }
        }
        sub_recipe_type = recipe_test_utils.create_recipe_type_v6(
            definition=recipe_def)

        # Recipe with two jobs and one subrecipe (c -> d -> r)
        recipe_def = {
            'version': '7',
            'input': {
                'files': [{
                    'name': 'INPUT_IMAGE',
                    'media_types': ['image/png'],
                    'required': True,
                    'multiple': False
                }],
                'json': []
            },
            'nodes': {
                'recipe_node': {
                    'dependencies': [{
                        'name': 'node_d',
                        'acceptance': True
                    }],
                    'input': {
                        'input_a': {
                            'type': 'dependency',
                            'node': 'node_d',
                            'output': 'OUTPUT_IMAGE'
                        }
                    },
                    'node_type': {
                        'node_type': 'recipe',
                        'recipe_type_name': sub_recipe_type.name,
                        'recipe_type_revision': sub_recipe_type.revision_num
                    }
                },
                'node_c': {
                    'dependencies': [],
                    'input': {
                        'INPUT_IMAGE': {
                            'type': 'recipe',
                            'input': 'INPUT_IMAGE'
                        }
                    },
                    'node_type': {
                        'node_type': 'job',
                        'job_type_name': jt_3.name,
                        'job_type_version': jt_3.version,
                        'job_type_revision': jt_3.revision_num
                    }
                },
                'node_d': {
                    'dependencies': [{
                        'name': 'node_c',
                        'acceptance': True
                    }],
                    'input': {
                        'INPUT_IMAGE': {
                            'type': 'dependency',
                            'node': 'node_c',
                            'output': 'OUTPUT_IMAGE'
                        }
                    },
                    'node_type': {
                        'node_type': 'job',
                        'job_type_name': jt_4.name,
                        'job_type_version': jt_4.version,
                        'job_type_revision': jt_4.revision_num
                    }
                }
            }
        }
        recipe_type = recipe_test_utils.create_recipe_type_v6(
            definition=recipe_def)

        # Create a dataset of 6 files
        dataset_def = {
            'parameters': {
                'files': [{
                    'media_types': ['image/png'],
                    'required': True,
                    'multiple': False,
                    'name': 'INPUT_IMAGE'
                }],
                'json': []
            }
        }
        the_dataset = data_test_utils.create_dataset(definition=dataset_def)
        workspace = storage_test_utils.create_workspace()

        # Create 6 files & recipes to go along
        src_file_ids = []
        recipe_ids = []
        data_list = []
        for i in range(0, 6):
            file_name = 'input_%d.png' % i
            src_file = storage_test_utils.create_file(file_name=file_name,
                                                      file_type='SOURCE',
                                                      media_type='image/png',
                                                      file_size=10,
                                                      data_type_tags=['type'],
                                                      file_path='the_path',
                                                      workspace=workspace)
            src_file_ids.append(src_file.id)
            data_dict = {
                'version': '6',
                'files': {
                    'INPUT_IMAGE': [src_file.id]
                },
                'json': {}
            }
            data_list.append(DataV6(data=data_dict).get_dict())
            recipe = recipe_test_utils.create_recipe(recipe_type=recipe_type,
                                                     input=data_dict)
            recipe_ids.append(recipe.id)

        members = data_test_utils.create_dataset_members(dataset=the_dataset,
                                                         data_list=data_list)
        recipe_test_utils.process_recipe_inputs(recipe_ids)

        batch_definition = BatchDefinition()
        batch_definition.dataset = the_dataset.id
        batch_definition.supersedes = True
        forced_nodes = ForcedNodes()
        forced_nodes.all_nodes = True
        batch_definition.forced_nodes = forced_nodes
        new_batch = batch_test_utils.create_batch(recipe_type=recipe_type,
                                                  definition=batch_definition)

        # Create message
        message = batch.messages.create_batch_recipes.CreateBatchRecipes()
        message.batch_id = new_batch.id

        # Execute message
        result = message.execute()
        self.assertTrue(result)
        self.assertEqual(len(message.new_messages), 2)

        batch_recipes_message = message.new_messages[0]
        create_recipes_message = message.new_messages[1]
        self.assertEqual(batch_recipes_message.type, 'create_batch_recipes')
        self.assertEqual(batch_recipes_message.batch_id, new_batch.id)
        self.assertFalse(batch_recipes_message.is_prev_batch_done)
        self.assertEqual(batch_recipes_message.current_recipe_id,
                         recipe_ids[1])

        # Test the create_recipes_message
        self.assertEqual(create_recipes_message.type, 'create_recipes')
        self.assertSetEqual(
            set(create_recipes_message.root_recipe_ids), {
                recipe_ids[5], recipe_ids[4], recipe_ids[3], recipe_ids[2],
                recipe_ids[1]
            })
        self.assertEqual(create_recipes_message.batch_id, new_batch.id)
        self.assertEqual(create_recipes_message.event_id, new_batch.event_id)
        self.assertEqual(create_recipes_message.recipe_type_name,
                         new_batch.recipe_type.name)
        self.assertEqual(create_recipes_message.recipe_type_rev_num,
                         new_batch.recipe_type.revision_num)

        # Execute next create_batch_recipes messages
        result = batch_recipes_message.execute()
        self.assertTrue(result)

        # Should only have one last rcreate_recipes message
        self.assertEqual(len(batch_recipes_message.new_messages), 1)
        create_recipes_message = batch_recipes_message.new_messages[0]
        self.assertTrue(batch_recipes_message.is_prev_batch_done)
        self.assertEqual(create_recipes_message.type, 'create_recipes')
        self.assertSetEqual(set(create_recipes_message.root_recipe_ids),
                            {recipe_ids[0]})
        self.assertEqual(create_recipes_message.batch_id, new_batch.id)
        self.assertEqual(create_recipes_message.event_id, new_batch.event_id)
        self.assertEqual(create_recipes_message.recipe_type_name,
                         new_batch.recipe_type.name)
        self.assertEqual(create_recipes_message.recipe_type_rev_num,
                         new_batch.recipe_type.revision_num)

        # Test setting supersedes to false and make sure we don't have any reprocess messages
        batch_definition_2 = BatchDefinition()
        batch_definition_2.dataset = the_dataset.id
        batch_definition_2.supersedes = False
        forced_nodes = ForcedNodes()
        forced_nodes.all_nodes = True
        batch_definition_2.forced_nodes = forced_nodes
        new_batch_2 = batch_test_utils.create_batch(
            recipe_type=recipe_type, definition=batch_definition_2)

        # Create message
        message_2 = batch.messages.create_batch_recipes.CreateBatchRecipes()
        message_2.batch_id = new_batch_2.id
        # Execute message
        result_2 = message_2.execute()
        self.assertTrue(result_2)
        self.assertEqual(len(message_2.new_messages), 6)

        batch_recipes_message_2 = message_2.new_messages[0]
        self.assertEqual(batch_recipes_message_2.type, 'create_batch_recipes')
        self.assertEqual(batch_recipes_message_2.batch_id, new_batch_2.id)
        self.assertFalse(batch_recipes_message_2.is_prev_batch_done)

        # Make sure we've got 5 create-new-recipe messages
        for msg in message_2.new_messages[1:]:
            self.assertEqual(msg.create_recipes_type, 'new-recipe')
            self.assertEqual(msg.batch_id, new_batch_2.id)
            self.assertEqual(msg.event_id, new_batch_2.event_id)
            self.assertEqual(msg.recipe_type_name,
                             new_batch_2.recipe_type.name)
            self.assertEqual(msg.recipe_type_rev_num,
                             new_batch_2.recipe_type.revision_num)

        # Execute next create_batch_recipes messages
        result_3 = batch_recipes_message_2.execute()
        self.assertTrue(result_3)

        # Should only have one last rcreate_recipes message
        self.assertEqual(len(batch_recipes_message_2.new_messages), 1)
        create_recipes_message_3 = batch_recipes_message_2.new_messages[0]
        self.assertTrue(batch_recipes_message_2.is_prev_batch_done)
        self.assertEqual(create_recipes_message_3.type, 'create_recipes')
        self.assertEqual(create_recipes_message_3.batch_id, new_batch_2.id)
        self.assertEqual(create_recipes_message_3.event_id,
                         new_batch_2.event_id)
        self.assertEqual(create_recipes_message_3.recipe_type_name,
                         new_batch_2.recipe_type.name)
        self.assertEqual(create_recipes_message_3.recipe_type_rev_num,
                         new_batch_2.recipe_type.revision_num)
Esempio n. 11
0
    def test_execute_new(self):
        """Tests calling CreateBatchRecipes.execute() successfully"""

        # Importing module here to patch the max recipe num
        import batch.messages.create_batch_recipes
        batch.messages.create_batch_recipes.MAX_RECIPE_NUM = 5

        jt_1 = job_test_utils.create_seed_job_type()
        jt_2 = job_test_utils.create_seed_job_type()
        jt_3 = job_test_utils.create_seed_job_type()
        jt_4 = job_test_utils.create_seed_job_type()

        recipe_def = {
            'version': '7',
            'input': {
                'files': [{
                    'name': 'INPUT_IMAGE',
                    'media_types': ['image/png'],
                    'required': True,
                    'multiple': False
                }],
                'json': []
            },
            'nodes': {
                'node_a': {
                    'dependencies': [],
                    'input': {
                        'input_a': {
                            'type': 'recipe',
                            'input': 'INPUT_IMAGE'
                        }
                    },
                    'node_type': {
                        'node_type': 'job',
                        'job_type_name': jt_1.name,
                        'job_type_version': jt_1.version,
                        'job_type_revision': jt_1.revision_num
                    }
                },
                'node_b': {
                    'dependencies': [],
                    'input': {
                        'input_a': {
                            'type': 'recipe',
                            'input': 'INPUT_IMAGE'
                        }
                    },
                    'node_type': {
                        'node_type': 'job',
                        'job_type_name': jt_2.name,
                        'job_type_version': jt_2.version,
                        'job_type_revision': jt_2.revision_num
                    }
                }
            }
        }
        sub_recipe_type_1 = recipe_test_utils.create_recipe_type_v6(
            definition=recipe_def)

        recipe_def = {
            'version': '7',
            'input': {
                'files': [{
                    'name': 'INPUT_IMAGE',
                    'media_types': ['image/png'],
                    'required': True,
                    'multiple': False
                }],
                'json': []
            },
            'nodes': {
                'node_a': {
                    'dependencies': [],
                    'input': {
                        'input_a': {
                            'type': 'recipe',
                            'input': 'INPUT_IMAGE'
                        }
                    },
                    'node_type': {
                        'node_type': 'job',
                        'job_type_name': jt_3.name,
                        'job_type_version': jt_3.version,
                        'job_type_revision': jt_3.revision_num
                    }
                },
                'node_b': {
                    'dependencies': [],
                    'input': {
                        'input_a': {
                            'type': 'recipe',
                            'input': 'INPUT_IMAGE'
                        }
                    },
                    'node_type': {
                        'node_type': 'job',
                        'job_type_name': jt_4.name,
                        'job_type_version': jt_4.version,
                        'job_type_revision': jt_4.revision_num
                    }
                }
            }
        }
        sub_recipe_type_2 = recipe_test_utils.create_recipe_type_v6(
            definition=recipe_def)

        jt_5 = job_test_utils.create_seed_job_type()
        jt_6 = job_test_utils.create_seed_job_type()
        recipe_def = {
            'version': '7',
            'input': {
                'files': [{
                    'name': 'INPUT_IMAGE',
                    'media_types': ['image/png'],
                    'required': True,
                    'multiple': False
                }],
                'json': []
            },
            'nodes': {
                'recipe_node_a': {
                    'dependencies': [],
                    'input': {
                        'input_a': {
                            'type': 'recipe',
                            'input': 'INPUT_IMAGE'
                        }
                    },
                    'node_type': {
                        'node_type': 'recipe',
                        'recipe_type_name': sub_recipe_type_1.name,
                        'recipe_type_revision': sub_recipe_type_1.revision_num
                    }
                },
                'recipe_node_b': {
                    'dependencies': [{
                        'name': 'node_d',
                        'acceptance': True
                    }],
                    'input': {
                        'input_a': {
                            'type': 'dependency',
                            'node': 'node_d',
                            'output': 'OUTPUT_IMAGE'
                        }
                    },
                    'node_type': {
                        'node_type': 'recipe',
                        'recipe_type_name': sub_recipe_type_2.name,
                        'recipe_type_revision': sub_recipe_type_2.revision_num
                    }
                },
                'node_c': {
                    'dependencies': [],
                    'input': {
                        'INPUT_IMAGE': {
                            'type': 'recipe',
                            'input': 'INPUT_IMAGE'
                        }
                    },
                    'node_type': {
                        'node_type': 'job',
                        'job_type_name': jt_5.name,
                        'job_type_version': jt_5.version,
                        'job_type_revision': jt_5.revision_num
                    }
                },
                'node_d': {
                    'dependencies': [{
                        'name': 'node_c',
                        'acceptance': True
                    }],
                    'input': {
                        'INPUT_IMAGE': {
                            'type': 'dependency',
                            'node': 'node_c',
                            'output': 'OUTPUT_IMAGE'
                        }
                    },
                    'node_type': {
                        'node_type': 'job',
                        'job_type_name': jt_6.name,
                        'job_type_version': jt_6.version,
                        'job_type_revision': jt_6.revision_num
                    }
                }
            }
        }
        recipe_type = recipe_test_utils.create_recipe_type_v6(
            definition=recipe_def)

        # Create a dataset of 6 files
        dataset_def = {
            'parameters': {
                'files': [{
                    'media_types': ['image/png'],
                    'required': True,
                    'multiple': False,
                    'name': 'INPUT_IMAGE'
                }],
                'json': []
            }
        }
        the_dataset = data_test_utils.create_dataset(definition=dataset_def)
        workspace = storage_test_utils.create_workspace()

        # Create 6 files
        src_file_ids = []
        data_list = []
        for i in range(0, 6):
            file_name = 'input_%d.png' % i
            src_file = storage_test_utils.create_file(file_name=file_name,
                                                      file_type='SOURCE',
                                                      media_type='image/png',
                                                      file_size=10,
                                                      data_type_tags=['type'],
                                                      file_path='the_path',
                                                      workspace=workspace)
            src_file_ids.append(src_file.id)
            data_dict = {
                'version': '6',
                'files': {
                    'INPUT_IMAGE': [src_file.id]
                },
                'json': {}
            }
            data_list.append(DataV6(data=data_dict).get_dict())
        members = data_test_utils.create_dataset_members(dataset=the_dataset,
                                                         data_list=data_list)

        batch_definition = BatchDefinition()
        batch_definition.dataset = the_dataset.id
        forced_nodes = ForcedNodes()
        forced_nodes.all_nodes = True
        batch_definition.forced_nodes = forced_nodes

        new_batch = batch_test_utils.create_batch(recipe_type=recipe_type,
                                                  definition=batch_definition)

        # Create message
        message = batch.messages.create_batch_recipes.CreateBatchRecipes()
        message.batch_id = new_batch.id

        # Copy JSON for running same message again later
        message_json = message.to_json()

        # Execute message
        result = message.execute()
        self.assertTrue(result)

        # Should be 6 messages, one for next create_batch_recipes and 5 for creating new recipes
        self.assertEqual(len(message.new_messages), 6)

        # Create batch message
        batch_recipes_message = message.new_messages[0]
        self.assertEqual(batch_recipes_message.type, 'create_batch_recipes')
        self.assertEqual(batch_recipes_message.current_dataset_file_id,
                         src_file_ids[1])
        self.assertFalse(batch_recipes_message.is_prev_batch_done)

        from recipe.models import Recipe
        # Verify each message has a different input and execute
        src_ids = copy.deepcopy(src_file_ids)
        for msg in message.new_messages[1:]:
            self.assertEqual(msg.type, 'create_recipes')
            self.assertEqual(msg.create_recipes_type, 'new-recipe')
            file_id = DataV6(msg.recipe_input_data).get_data(
            ).values['INPUT_IMAGE'].file_ids[0]
            self.assertTrue(file_id in src_ids)
            src_ids.remove(file_id)

            # Execute the create_recipes messages
            result = msg.execute()
            self.assertTrue(result)

        # Verify 5 recipes have been created and they have the proper input files:
        recipes = Recipe.objects.all()
        self.assertEqual(len(recipes), 5)
        src_ids = copy.deepcopy(src_file_ids)
        for recipe in recipes:
            self.assertEqual(recipe.recipe_type.name,
                             new_batch.recipe_type.name)
            file_id = recipe.get_input_data().values['INPUT_IMAGE'].file_ids[0]
            self.assertTrue(file_id in src_ids)
            src_ids.remove(file_id)

        # Execute next create_batch_recipes messages
        result = batch_recipes_message.execute()
        self.assertTrue(result)
        # Should only have one last create_recipes message
        self.assertEqual(len(batch_recipes_message.new_messages), 1)
        create_recipes_message = batch_recipes_message.new_messages[0]
        self.assertTrue(batch_recipes_message.is_prev_batch_done)
        self.assertEqual(create_recipes_message.type, 'create_recipes')
        self.assertEqual(create_recipes_message.create_recipes_type,
                         'new-recipe')
        self.assertEqual(create_recipes_message.batch_id, new_batch.id)
        self.assertEqual(create_recipes_message.event_id, new_batch.event_id)
        self.assertEqual(create_recipes_message.recipe_type_name,
                         new_batch.recipe_type.name)
        self.assertEqual(create_recipes_message.recipe_type_rev_num,
                         new_batch.recipe_type.revision_num)
Esempio n. 12
0
    def test_json_new(self):
        """Tests coverting a CreateBatchRecipes message to and from JSON"""

        jt_1 = job_test_utils.create_seed_job_type()
        jt_2 = job_test_utils.create_seed_job_type()
        jt_3 = job_test_utils.create_seed_job_type()
        jt_4 = job_test_utils.create_seed_job_type()

        recipe_def = {
            'version': '7',
            'input': {
                'files': [{
                    'name': 'INPUT_IMAGE',
                    'media_types': ['image/png'],
                    'required': True,
                    'multiple': False
                }],
                'json': []
            },
            'nodes': {
                'node_a': {
                    'dependencies': [],
                    'input': {
                        'input_a': {
                            'type': 'recipe',
                            'input': 'INPUT_IMAGE'
                        }
                    },
                    'node_type': {
                        'node_type': 'job',
                        'job_type_name': jt_1.name,
                        'job_type_version': jt_1.version,
                        'job_type_revision': jt_1.revision_num
                    }
                },
                'node_b': {
                    'dependencies': [],
                    'input': {
                        'input_a': {
                            'type': 'recipe',
                            'input': 'INPUT_IMAGE'
                        }
                    },
                    'node_type': {
                        'node_type': 'job',
                        'job_type_name': jt_2.name,
                        'job_type_version': jt_2.version,
                        'job_type_revision': jt_2.revision_num
                    }
                }
            }
        }
        sub_recipe_type = recipe_test_utils.create_recipe_type_v6(
            definition=recipe_def)
        sub_recipe = recipe_test_utils.create_recipe(
            recipe_type=sub_recipe_type)

        # Recipe with two jobs and one subrecipe (c -> d -> r)
        recipe_def = {
            'version': '7',
            'input': {
                'files': [{
                    'name': 'INPUT_IMAGE',
                    'media_types': ['image/png'],
                    'required': True,
                    'multiple': False
                }],
                'json': []
            },
            'nodes': {
                'recipe_node': {
                    'dependencies': [{
                        'name': 'node_d',
                        'acceptance': True
                    }],
                    'input': {
                        'input_a': {
                            'type': 'dependency',
                            'node': 'node_d',
                            'output': 'OUTPUT_IMAGE'
                        }
                    },
                    'node_type': {
                        'node_type': 'recipe',
                        'recipe_type_name': sub_recipe_type.name,
                        'recipe_type_revision': sub_recipe_type.revision_num
                    }
                },
                'node_c': {
                    'dependencies': [],
                    'input': {
                        'INPUT_IMAGE': {
                            'type': 'recipe',
                            'input': 'INPUT_IMAGE'
                        }
                    },
                    'node_type': {
                        'node_type': 'job',
                        'job_type_name': jt_3.name,
                        'job_type_version': jt_3.version,
                        'job_type_revision': jt_3.revision_num
                    }
                },
                'node_d': {
                    'dependencies': [{
                        'name': 'node_c',
                        'acceptance': True
                    }],
                    'input': {
                        'INPUT_IMAGE': {
                            'type': 'dependency',
                            'node': 'node_c',
                            'output': 'OUTPUT_IMAGE'
                        }
                    },
                    'node_type': {
                        'node_type': 'job',
                        'job_type_name': jt_4.name,
                        'job_type_version': jt_4.version,
                        'job_type_revision': jt_4.revision_num
                    }
                }
            }
        }
        recipe_type = recipe_test_utils.create_recipe_type_v6(
            definition=recipe_def)

        # Create a dataset of 6 files
        dataset_def = {
            'parameters': {
                'files': [{
                    'media_types': ['image/png'],
                    'required': True,
                    'multiple': False,
                    'name': 'INPUT_IMAGE'
                }],
                'json': []
            }
        }
        the_dataset = data_test_utils.create_dataset(definition=dataset_def)
        workspace = storage_test_utils.create_workspace()
        src_file_a = storage_test_utils.create_file(file_name='input_a.PNG',
                                                    file_type='SOURCE',
                                                    media_type='image/png',
                                                    file_size=10,
                                                    data_type_tags=['type'],
                                                    file_path='the_path',
                                                    workspace=workspace)
        src_file_b = storage_test_utils.create_file(file_name='input_b.PNG',
                                                    file_type='SOURCE',
                                                    media_type='image/png',
                                                    file_size=10,
                                                    data_type_tags=['type'],
                                                    file_path='the_path',
                                                    workspace=workspace)
        data_list = []
        data_dict = {
            'version': '6',
            'files': {
                'INPUT_IMAGE': [src_file_a.id]
            },
            'json': {}
        }
        data_list.append(DataV6(data=data_dict).get_dict())
        data_dict = {
            'version': '6',
            'files': {
                'INPUT_IMAGE': [src_file_b.id]
            },
            'json': {}
        }
        data_list.append(DataV6(data=data_dict).get_dict())
        member_2 = data_test_utils.create_dataset_members(dataset=the_dataset,
                                                          data_list=data_list)

        # Create the batch
        batch_definition = BatchDefinition()
        batch_definition.dataset = the_dataset.id
        forced_nodes = ForcedNodes()
        forced_nodes.all_nodes = True
        batch_definition.forced_nodes = forced_nodes
        batch = batch_test_utils.create_batch(recipe_type=recipe_type,
                                              definition=batch_definition)

        # Create the message
        message = create_batch_recipes_message(batch.id)

        # Convert message to JSON and back, and then execute
        message_json_dict = message.to_json()
        new_message = CreateBatchRecipes.from_json(message_json_dict)
        result = new_message.execute()

        self.assertTrue(result)
        # Should be two create_recipes message for the two files in the dataset
        self.assertEqual(len(new_message.new_messages), 2)

        # Verify each message has a different input
        src_ids = [src_file_a.id, src_file_b.id]
        for message in new_message.new_messages:
            self.assertEqual(message.type, 'create_recipes')
            self.assertEqual(message.create_recipes_type, 'new-recipe')
            file_id = DataV6(message.recipe_input_data).get_data(
            ).values['INPUT_IMAGE'].file_ids[0]
            self.assertTrue(file_id in src_ids)
            src_ids.remove(file_id)

        # Test re-processing existing recipes
        data_dict = {
            'version': '6',
            'files': {
                'INPUT_IMAGE': [src_file_a.id]
            },
            'json': {}
        }

        recipe_1 = recipe_test_utils.create_recipe(recipe_type=recipe_type,
                                                   input=data_dict)
        data_dict = {
            'version': '6',
            'files': {
                'INPUT_IMAGE': [src_file_b.id]
            },
            'json': {}
        }

        recipe_2 = recipe_test_utils.create_recipe(recipe_type=recipe_type,
                                                   input=data_dict)
        recipe_test_utils.process_recipe_inputs([recipe_1.id, recipe_2.id])

        batch_definition_2 = BatchDefinition()
        batch_definition_2.dataset = the_dataset.id
        forced_nodes = ForcedNodes()
        forced_nodes.all_nodes = True
        batch_definition_2.forced_nodes = forced_nodes
        batch_2 = batch_test_utils.create_batch(recipe_type=recipe_type,
                                                definition=batch_definition_2)

        # Create the message
        message = create_batch_recipes_message(batch_2.id)

        # Convert message to JSON and back, and then execute
        message_json_dict_2 = message.to_json()
        new_message_2 = CreateBatchRecipes.from_json(message_json_dict_2)
        result_2 = new_message_2.execute()

        self.assertTrue(result_2)
        self.assertEqual(len(new_message_2.new_messages), 1)
        message = new_message_2.new_messages[0]
        self.assertEqual(message.type, 'create_recipes')
        self.assertEqual(message.create_recipes_type, 'reprocess')
        self.assertSetEqual(set(message.root_recipe_ids),
                            {recipe_1.id, recipe_2.id})