def test_get_nodes_to_create(self): """Tests calling Recipe.get_nodes_to_create()""" job_type = job_test_utils.create_job_type() sub_recipe_type = recipe_test_utils.create_recipe_type() # Create recipe definition = RecipeDefinition(Interface()) definition.add_job_node('A', job_type.name, job_type.version, job_type.revision_num) definition.add_condition_node('B', Interface(), DataFilter(True)) definition.add_condition_node('C', Interface(), DataFilter(True)) definition.add_condition_node('D', Interface(), DataFilter(False)) definition.add_job_node('E', job_type.name, job_type.version, job_type.revision_num) definition.add_job_node('F', job_type.name, job_type.version, job_type.revision_num) definition.add_recipe_node('G', sub_recipe_type.name, sub_recipe_type.revision_num) definition.add_recipe_node('H', sub_recipe_type.name, sub_recipe_type.revision_num) definition.add_dependency('A', 'D') definition.add_dependency('A', 'E') definition.add_dependency('B', 'E') definition.add_dependency('B', 'F') definition.add_dependency('C', 'F') definition.add_dependency('D', 'G') definition.add_dependency('E', 'G') definition.add_dependency('E', 'H') definition_json_dict = convert_recipe_definition_to_v6_json( definition).get_dict() recipe_type = recipe_test_utils.create_recipe_type( definition=definition_json_dict) recipe = recipe_test_utils.create_recipe(recipe_type=recipe_type) # Nodes A, B, and D already exist job_a = job_test_utils.create_job(job_type=job_type, status='COMPLETED', save=True) condition_b = recipe_test_utils.create_recipe_condition( is_processed=True, is_accepted=True, save=False) condition_d = recipe_test_utils.create_recipe_condition( is_processed=True, is_accepted=False, save=False) RecipeCondition.objects.bulk_create([condition_b, condition_d]) recipe_node_a = recipe_test_utils.create_recipe_node(recipe=recipe, node_name='A', job=job_a, save=False) recipe_node_b = recipe_test_utils.create_recipe_node( recipe=recipe, node_name='B', condition=condition_b, save=False) recipe_node_d = recipe_test_utils.create_recipe_node( recipe=recipe, node_name='D', condition=condition_d, save=False) RecipeNode.objects.bulk_create( [recipe_node_a, recipe_node_b, recipe_node_d]) recipe_instance = Recipe.objects.get_recipe_instance(recipe.id) nodes_to_create = recipe_instance.get_nodes_to_create() self.assertSetEqual(set(nodes_to_create.keys()), {'C', 'E', 'H'})
def get_filter(self): """Returns the data filter represented by this JSON :returns: The data filter :rtype: :class:`data.filter.filter.DataFilter`: """ data_filter = DataFilter([], self._data_filter['all']) for f in self._data_filter['filters']: data_filter.add_filter(f) return data_filter
def test_convert_recipe_definition_to_v6_json_full(self): """Tests calling convert_recipe_definition_to_v6_json() with a full definition""" interface = Interface() interface.add_parameter(FileParameter('file_param_a', ['image/gif'])) interface.add_parameter(JsonParameter('json_param_a', 'object')) interface.add_parameter(JsonParameter('json_param_b', 'object', required=False)) definition = RecipeDefinition(interface) definition.add_job_node('A', 'job_type_1', '1.0', 1) definition.add_job_node('B', 'job_type_2', '2.0', 1) definition.add_job_node('C', 'job_type_3', '1.0', 2) definition.add_recipe_node('D', 'recipe_type_1', 1) definition.add_condition_node('E', Interface(), DataFilter()) definition.add_job_node('F', 'job_type_4', '1.0', 1) definition.add_dependency('A', 'B') definition.add_dependency('A', 'C') definition.add_dependency('B', 'E') definition.add_dependency('C', 'D') definition.add_dependency('E', 'F') definition.add_recipe_input_connection('A', 'input_a', 'file_param_a') definition.add_dependency_input_connection('B', 'b_input_a', 'A', 'a_output_1') definition.add_dependency_input_connection('C', 'c_input_a', 'A', 'a_output_2') definition.add_dependency_input_connection('D', 'd_input_a', 'C', 'c_output_1') definition.add_recipe_input_connection('D', 'd_input_b', 'json_param_a') json = convert_recipe_definition_to_v6_json(definition) RecipeDefinitionV6(definition=json.get_dict(), do_validate=True) # Revalidate self.assertSetEqual(set(json.get_dict()['nodes'].keys()), {'A', 'B', 'C', 'D', 'E', 'F'})
def test_json(self): """Tests converting a ProcessCondition message to and from JSON""" definition = RecipeDefinition(Interface()) # TODO: once DataFilter is implemented, create a DataFilter object here that accepts the inputs definition.add_condition_node('node_a', Interface(), DataFilter(True)) definition_dict = convert_recipe_definition_to_v6_json( definition).get_dict() recipe_type = recipe_test_utils.create_recipe_type( definition=definition_dict) recipe = recipe_test_utils.create_recipe(recipe_type=recipe_type) condition = recipe_test_utils.create_recipe_condition(recipe=recipe, save=True) recipe_test_utils.create_recipe_node(recipe=recipe, node_name='node_a', condition=condition, save=True) # Create message message = create_process_condition_messages([condition.id])[0] # Convert message to JSON and back, and then execute message_json_dict = message.to_json() new_message = ProcessCondition.from_json(message_json_dict) result = new_message.execute() self.assertTrue(result) condition = RecipeCondition.objects.get(id=condition.id) self.assertEqual(len(new_message.new_messages), 1) self.assertEqual(new_message.new_messages[0].type, 'update_recipe') self.assertEqual(new_message.new_messages[0].root_recipe_id, recipe.id) self.assertTrue(condition.is_processed) self.assertIsNotNone(condition.processed) self.assertTrue(condition.is_accepted)
def test_convert_recipe_to_v6_json(self): """Tests calling convert_recipe_to_v6_json() successfully""" job_type_1 = job_test_utils.create_seed_job_type() job_type_2 = job_test_utils.create_seed_job_type() job_type_3 = job_test_utils.create_seed_job_type() job_type_4 = job_test_utils.create_seed_job_type() recipe_type_1 = recipe_test_utils.create_recipe_type_v6() interface = Interface() interface.add_parameter(FileParameter('file_param_1', ['image/gif'])) interface.add_parameter(JsonParameter('json_param_1', 'object')) df1 = DataFilter(filter_list=[{'name': 'file_param_1', 'type': 'media-type', 'condition': '==', 'values': ['image/gif']}, {'name': 'json_param_1', 'type': 'object', 'condition': 'superset of', 'values': [{}]}], all=False) definition = RecipeDefinition(interface) definition.add_job_node('A', job_type_1.name, job_type_1.version, job_type_1.revision_num) definition.add_job_node('B', job_type_2.name, job_type_2.version, job_type_2.revision_num) definition.add_job_node('C', job_type_3.name, job_type_3.version, job_type_3.revision_num) definition.add_recipe_node('D', recipe_type_1.name, recipe_type_1.revision_num) definition.add_job_node('E', job_type_4.name, job_type_4.version, job_type_4.revision_num) definition.add_condition_node('F', interface, df1) #False definition.add_job_node('G', job_type_4.name, job_type_4.version, job_type_4.revision_num) definition.add_dependency('A', 'B') definition.add_dependency('A', 'C') definition.add_dependency('B', 'E') definition.add_dependency('C', 'D') definition.add_dependency('A', 'F') definition.add_dependency('F', 'G') definition.add_recipe_input_connection('A', 'input_1', 'file_param_1') definition.add_dependency_input_connection('B', 'b_input_1', 'A', 'a_output_1') definition.add_dependency_input_connection('C', 'c_input_1', 'A', 'a_output_2') definition.add_dependency_input_connection('D', 'd_input_1', 'C', 'c_output_1') definition.add_recipe_input_connection('D', 'd_input_2', 'json_param_1') recipe = recipe_test_utils.create_recipe() job_a = job_test_utils.create_job(job_type=job_type_1, status='COMPLETED', save=False) job_b = job_test_utils.create_job(job_type=job_type_2, status='RUNNING', save=False) job_c = job_test_utils.create_job(job_type=job_type_3, status='COMPLETED', save=False) job_e = job_test_utils.create_job(job_type=job_type_4, status='PENDING', num_exes=0, save=False) Job.objects.bulk_create([job_a, job_b, job_c, job_e]) condition_f = recipe_test_utils.create_recipe_condition(is_processed=True, is_accepted=False, save=True) recipe_d = recipe_test_utils.create_recipe(recipe_type=recipe_type_1) recipe_node_a = recipe_test_utils.create_recipe_node(recipe=recipe, node_name='A', job=job_a, save=False) recipe_node_b = recipe_test_utils.create_recipe_node(recipe=recipe, node_name='B', job=job_b, save=False) recipe_node_c = recipe_test_utils.create_recipe_node(recipe=recipe, node_name='C', job=job_c, save=False) recipe_node_d = recipe_test_utils.create_recipe_node(recipe=recipe, node_name='D', sub_recipe=recipe_d, save=False) recipe_node_e = recipe_test_utils.create_recipe_node(recipe=recipe, node_name='E', job=job_e, save=False) recipe_node_f = recipe_test_utils.create_recipe_node(recipe=recipe, node_name='F', condition=condition_f, save=False) recipe_nodes = [recipe_node_a, recipe_node_b, recipe_node_c, recipe_node_d, recipe_node_e, recipe_node_f] recipe_instance = RecipeInstance(definition, recipe, recipe_nodes) json = convert_recipe_to_v6_json(recipe_instance) RecipeInstanceV6(json=json.get_dict(), do_validate=True) # Revalidate self.assertSetEqual(set(json.get_dict()['nodes'].keys()), {'A', 'B', 'C', 'D', 'E', 'F'})
def test_convert_filter_to_v6_json(self): """Tests calling convert_filter_to_v6_json()""" # Try interface with nothing set filter = DataFilter() json = convert_filter_to_v6_json(filter) DataFilterV6(data_filter=json.get_dict(), do_validate=True) # Revalidate # Try data with a variety of values filter_dict = { 'version': '6', 'filters': [{ 'name': 'input_a', 'type': 'media-type', 'condition': '==', 'values': ['application/json'] }, { 'name': 'input_b', 'type': 'string', 'condition': 'contains', 'values': ['abcde'] }, { 'name': 'input_c', 'type': 'integer', 'condition': '>', 'values': ['0'] }, { 'name': 'input_d', 'type': 'integer', 'condition': 'between', 'values': ['0', '100'] }] } data_filter = DataFilter(filter_list=filter_dict['filters']) json = convert_filter_to_v6_json(data_filter) DataFilterV6(data_filter=json.get_dict(), do_validate=True) # Revalidate filter_dict['filters'][2]['values'] = [0] filter_dict['filters'][3]['values'] = [0, 100] self.assertItemsEqual(json.get_filter().filter_list, filter_dict['filters'])
def get_filter(self): """Returns the data filter represented by this JSON :returns: The data filter :rtype: :class:`data.filter.filter.DataFilter`: """ data_filter = DataFilter(True) return data_filter
def test_json(self): """Tests converting a ProcessCondition message to and from JSON""" definition = RecipeDefinition(Interface()) cond_interface_1 = Interface() cond_interface_1.add_parameter(JsonParameter('cond_int', 'integer')) df1 = DataFilter(filter_list=[{ 'name': 'cond_int', 'type': 'integer', 'condition': '==', 'values': [0] }]) definition = RecipeDefinition(cond_interface_1) definition.add_condition_node('node_a', cond_interface_1, df1) definition.add_recipe_input_connection('node_a', 'cond_int', 'cond_int') definition_dict = convert_recipe_definition_to_v6_json( definition).get_dict() recipe_type = recipe_test_utils.create_recipe_type_v6( definition=definition_dict) data_1 = Data() data_1.add_value(JsonValue('cond_int', 0)) data_1_dict = convert_data_to_v6_json(data_1).get_dict() recipe = recipe_test_utils.create_recipe(recipe_type=recipe_type, input=data_1_dict) condition = recipe_test_utils.create_recipe_condition(recipe=recipe, save=True) recipe_test_utils.create_recipe_node(recipe=recipe, node_name='node_a', condition=condition, save=True) # Create message message = create_process_condition_messages([condition.id])[0] # Convert message to JSON and back, and then execute message_json_dict = message.to_json() new_message = ProcessCondition.from_json(message_json_dict) result = new_message.execute() self.assertTrue(result) condition = RecipeCondition.objects.get(id=condition.id) self.assertEqual(len(new_message.new_messages), 1) self.assertEqual(new_message.new_messages[0].type, 'update_recipe') self.assertEqual(new_message.new_messages[0].root_recipe_id, recipe.id) self.assertTrue(condition.is_processed) self.assertIsNotNone(condition.processed) self.assertTrue(condition.is_accepted)
def __init__(self, data_filter=None, do_validate=False): """Creates a v6 data filter JSON object from the given dictionary :param data_filter: The data filter JSON dict :type data_filter: dict :param do_validate: Whether to perform validation on the JSON schema :type do_validate: bool :raises :class:`data.filter.exceptions.InvalidDataFilter`: If the given data filter is invalid """ if not data_filter: data_filter = {'filters': [], 'all': True} self._data_filter = data_filter if 'version' not in self._data_filter: self._data_filter['version'] = SCHEMA_VERSION if self._data_filter['version'] not in SCHEMA_VERSIONS: msg = '%s is an unsupported version number' raise InvalidDataFilter('INVALID_VERSION', msg % self._data_filter['version']) if 'all' not in self._data_filter: self._data_filter['all'] = True if 'filters' not in self._data_filter: self._data_filter['filters'] = [] try: if do_validate: validate(self._data_filter, DATA_FILTER_SCHEMA) for f in data_filter['filters']: DataFilter.validate_filter(f) except ValidationError as ex: raise InvalidDataFilter('INVALID_DATA_FILTER', 'Invalid data filter: %s' % unicode(ex))
def get_definition(self): """Returns the recipe definition represented by this JSON :returns: The recipe definition :rtype: :class:`recipe.definition.definition.RecipeDefinition`: """ interface_json = InterfaceV6(self._definition['input'], do_validate=False) interface = interface_json.get_interface() definition = RecipeDefinition(interface) # Add all nodes to definition first for node_name, node_dict in self._definition['nodes'].items(): node_type_dict = node_dict['node_type'] if node_type_dict['node_type'] == 'condition': # TODO: complete recipe condition implementation cond_interface_json = InterfaceV6(node_type_dict['interface'], do_validate=False) definition.add_condition_node( node_name, cond_interface_json.get_interface(), DataFilter(True)) elif node_type_dict['node_type'] == 'job': definition.add_job_node(node_name, node_type_dict['job_type_name'], node_type_dict['job_type_version'], node_type_dict['job_type_revision']) elif node_type_dict['node_type'] == 'recipe': definition.add_recipe_node( node_name, node_type_dict['recipe_type_name'], node_type_dict['recipe_type_revision']) # Now add dependencies and connections for node_name, node_dict in self._definition['nodes'].items(): for dependency_dict in node_dict['dependencies']: definition.add_dependency(dependency_dict['name'], node_name) for conn_name, conn_dict in node_dict['input'].items(): if conn_dict['type'] == 'recipe': definition.add_recipe_input_connection( node_name, conn_name, conn_dict['input']) elif conn_dict['type'] == 'dependency': definition.add_dependency_input_connection( node_name, conn_name, conn_dict['node'], conn_dict['output']) return definition
def test_validate_successful(self): """Tests calling RecipeDefinition.validate() successfully""" recipe_interface = Interface() recipe_interface.add_parameter( JsonParameter('recipe_input_1', 'integer')) definition = RecipeDefinition(recipe_interface) definition.add_job_node('A', 'job_type_1', '1.0', 1) condition_interface = Interface() condition_interface.add_parameter( JsonParameter('cond_param', 'integer')) definition.add_condition_node('B', condition_interface, DataFilter(True)) definition.add_recipe_node('C', 'recipe_type_1', 1) definition.add_dependency('A', 'B') definition.add_dependency('B', 'C') definition.add_recipe_input_connection('A', 'a_input_1', 'recipe_input_1') definition.add_dependency_input_connection('B', 'cond_param', 'A', 'a_output_1') definition.add_dependency_input_connection('C', 'c_input_1', 'A', 'a_output_1') definition.add_dependency_input_connection('C', 'c_input_2', 'B', 'cond_param') job_input_interface = Interface() job_input_interface.add_parameter(JsonParameter( 'a_input_1', 'integer')) job_output_interface = Interface() job_output_interface.add_parameter( JsonParameter('a_output_1', 'integer')) recipe_input_interface = Interface() recipe_input_interface.add_parameter( JsonParameter('c_input_1', 'integer')) recipe_input_interface.add_parameter( JsonParameter('c_input_2', 'integer')) input_interfaces = { 'A': job_input_interface, 'C': recipe_input_interface } output_interfaces = {'A': job_output_interface, 'C': Interface()} warnings = definition.validate(input_interfaces, output_interfaces) self.assertListEqual(warnings, [])
def test_condition_hit(self): """Tests calling Recipe.has_completed() when an entire recipe has completed""" """ Job -> Condition -> Recipe parse-job -> condition-node -> recipe-node """ manifest_1 = { 'seedVersion': '1.0.0', 'job': { 'name': 'parse-job', 'jobVersion': '1.0.0', 'packageVersion': '1.0.0', 'title': 'Test Parse Job', 'description': 'Test Parse job', 'maintainer': { 'name': 'John Doe', 'email': '*****@*****.**' }, 'timeout': 10, 'interface': { 'command': '', 'inputs': { 'files': [{ 'name': 'INPUT_FILE', 'mediaTypes': ['image/x-hdf5-image'], 'required': True }], 'json': [] }, 'outputs': { 'files': [{ 'name': 'OUTPUT_A', 'pattern': '*.png', 'multiple': True }] } } } } job_type_1 = job_test_utils.create_seed_job_type(manifest=manifest_1) input_interface = Interface() input_interface.add_parameter( FileParameter('INPUT_FILE', ['image/x-hdf5-image'], multiple=False)) definition = RecipeDefinition(input_interface) definition.add_job_node('parse-job', job_type_1.name, job_type_1.version, job_type_1.revision_num) cond_interface = Interface() cond_interface.add_parameter(FileParameter('INPUT_FILE', ['image/png'])) df = DataFilter(filter_list=[{ 'name': 'cond', 'type': 'media-type', 'condition': '==', 'value': ['image/png'] }]) definition.add_condition_node('condition-node', cond_interface, df) sub_job_manifest = { 'seedVersion': '1.0.0', 'job': { 'name': 'recipe-job', 'jobVersion': '1.0.0', 'packageVersion': '1.0.0', 'title': 'Test Recipe Job', 'description': 'Test Recipe job', 'maintainer': { 'name': 'John Doe', 'email': '*****@*****.**' }, 'timeout': 10, 'interface': { 'command': '', 'inputs': { 'files': [{ 'name': 'INPUT_FILE', 'mediaTypes': ['image/png'], 'required': True }], 'json': [] }, 'outputs': { 'files': [{ 'name': 'OUTPUT_A', 'pattern': '*.png', 'multiple': True }] } } } } sub_job = job_test_utils.create_seed_job_type( manifest=sub_job_manifest) sub_interface = Interface() sub_interface.add_parameter(FileParameter('INPUT_FILE', ['image/png'])) definition_b = RecipeDefinition(sub_interface) definition_b.add_job_node('job_b', sub_job.name, sub_job.version, sub_job.revision_num) definition_b.add_recipe_input_connection('job_b', 'INPUT_FILE', 'INPUT_FILE') definition_b_dict = convert_recipe_definition_to_v6_json( definition_b).get_dict() sub_recipe_type = recipe_test_utils.create_recipe_type_v6( definition=definition_b_dict) definition.add_recipe_node('recipe-node', sub_recipe_type.name, sub_recipe_type.revision_num) # Connect the recipe input to the parse job definition.add_recipe_input_connection('parse-job', 'INPUT_FILE', 'INPUT_FILE') # Connect the condition node to the parse job output definition.add_dependency_input_connection('condition-node', 'cond', 'parse-job', 'OUTPUT_A') # Connect the sub recipe to the condition output definition.add_dependency_input_connection('recipe-node', 'INPUT_FILE', 'condition-node', 'cond')
def test_has_completed_true(self): """Tests calling Recipe.has_completed() when an entire recipe has completed""" data_dict = convert_data_to_v6_json(Data()).get_dict() job_type = job_test_utils.create_seed_job_type() sub_recipe_type = recipe_test_utils.create_recipe_type_v6() cond_interface_1 = Interface() cond_interface_1.add_parameter(JsonParameter('cond_int', 'integer')) df2 = DataFilter(filter_list=[{ 'name': 'cond_int', 'type': 'integer', 'condition': '==', 'values': [0] }, { 'name': 'cond_int', 'type': 'integer', 'condition': '!=', 'values': [0] }], all=True) #always False definition = RecipeDefinition(Interface()) definition.add_job_node('A', job_type.name, job_type.version, job_type.revision_num) definition.add_recipe_node('B', sub_recipe_type.name, sub_recipe_type.revision_num) definition.add_job_node('C', job_type.name, job_type.version, job_type.revision_num) definition.add_job_node('D', job_type.name, job_type.version, job_type.revision_num) definition.add_job_node('E', job_type.name, job_type.version, job_type.revision_num) definition.add_job_node('F', job_type.name, job_type.version, job_type.revision_num) definition.add_recipe_node('G', sub_recipe_type.name, sub_recipe_type.revision_num) definition.add_job_node('H', job_type.name, job_type.version, job_type.revision_num) definition.add_condition_node('I', cond_interface_1, df2) #False definition.add_job_node('J', job_type.name, job_type.version, job_type.revision_num) definition.add_dependency('A', 'C') definition.add_dependency('A', 'E') definition.add_dependency('A', 'H') definition.add_dependency('C', 'D') definition.add_dependency('G', 'H') definition.add_dependency('A', 'I') definition.add_dependency('I', 'J') job_a = job_test_utils.create_job(job_type=job_type, status='COMPLETED', output=data_dict, save=False) job_c = job_test_utils.create_job(job_type=job_type, status='COMPLETED', output=data_dict, save=False) job_d = job_test_utils.create_job(job_type=job_type, status='COMPLETED', output=data_dict, save=False) job_e = job_test_utils.create_job(job_type=job_type, status='COMPLETED', output=data_dict, save=False) job_f = job_test_utils.create_job(job_type=job_type, status='COMPLETED', output=data_dict, save=False) job_h = job_test_utils.create_job(job_type=job_type, status='COMPLETED', output=data_dict, save=False) Job.objects.bulk_create([job_a, job_c, job_d, job_e, job_f, job_h]) condition_i = recipe_test_utils.create_recipe_condition( is_processed=True, is_accepted=False, save=True) recipe_b = recipe_test_utils.create_recipe(recipe_type=sub_recipe_type, save=False) recipe_b.is_completed = True recipe_g = recipe_test_utils.create_recipe(recipe_type=sub_recipe_type, save=False) recipe_g.is_completed = True Recipe.objects.bulk_create([recipe_b, recipe_g]) definition_json_dict = convert_recipe_definition_to_v6_json( definition).get_dict() recipe_type = recipe_test_utils.create_recipe_type_v6( definition=definition_json_dict) recipe = recipe_test_utils.create_recipe(recipe_type=recipe_type) recipe_node_a = recipe_test_utils.create_recipe_node(recipe=recipe, node_name='A', job=job_a, save=False, is_original=False) recipe_node_c = recipe_test_utils.create_recipe_node(recipe=recipe, node_name='C', job=job_c, save=False, is_original=False) recipe_node_d = recipe_test_utils.create_recipe_node(recipe=recipe, node_name='D', job=job_d, save=False, is_original=False) recipe_node_e = recipe_test_utils.create_recipe_node(recipe=recipe, node_name='E', job=job_e, save=False) recipe_node_f = recipe_test_utils.create_recipe_node(recipe=recipe, node_name='F', job=job_f, save=False) recipe_node_h = recipe_test_utils.create_recipe_node(recipe=recipe, node_name='H', job=job_h, save=False) recipe_node_i = recipe_test_utils.create_recipe_node( recipe=recipe, node_name='I', condition=condition_i, save=False) recipe_node_g = recipe_test_utils.create_recipe_node( recipe=recipe, node_name='G', sub_recipe=recipe_g, save=False, is_original=False) recipe_node_b = recipe_test_utils.create_recipe_node( recipe=recipe, node_name='B', sub_recipe=recipe_b, save=False) RecipeNode.objects.bulk_create([ recipe_node_a, recipe_node_b, recipe_node_c, recipe_node_d, recipe_node_e, recipe_node_f, recipe_node_g, recipe_node_h, recipe_node_i ]) recipe_instance = Recipe.objects.get_recipe_instance(recipe.id) self.assertTrue(recipe_instance.has_completed())
def test_get_nodes_to_process_input(self): """Tests calling Recipe.get_nodes_to_process_input()""" data_dict = convert_data_to_v6_json(Data()).get_dict() job_type = job_test_utils.create_seed_job_type() sub_recipe_type = recipe_test_utils.create_recipe_type_v6() # Create recipe definition = RecipeDefinition(Interface()) cond_interface_1 = Interface() cond_interface_1.add_parameter(JsonParameter('cond_int', 'integer')) definition.add_job_node('A', job_type.name, job_type.version, job_type.revision_num) df1 = DataFilter(filter_list=[{ 'name': 'cond_int', 'type': 'integer', 'condition': '==', 'values': [0] }, { 'name': 'cond_int', 'type': 'integer', 'condition': '!=', 'values': [0] }], all=False) #always True df2 = DataFilter(filter_list=[{ 'name': 'cond_int', 'type': 'integer', 'condition': '==', 'values': [0] }, { 'name': 'cond_int', 'type': 'integer', 'condition': '!=', 'values': [0] }], all=True) #always False definition.add_condition_node('B', cond_interface_1, df1) #True definition.add_condition_node('C', cond_interface_1, df1) #True definition.add_condition_node('D', cond_interface_1, df2) #False definition.add_job_node('E', job_type.name, job_type.version, job_type.revision_num) definition.add_job_node('F', job_type.name, job_type.version, job_type.revision_num) definition.add_recipe_node('G', sub_recipe_type.name, sub_recipe_type.revision_num) definition.add_recipe_node('H', sub_recipe_type.name, sub_recipe_type.revision_num) definition.add_dependency('A', 'D') definition.add_dependency('A', 'E') definition.add_dependency('B', 'E') definition.add_dependency('B', 'F') definition.add_dependency('C', 'F') definition.add_dependency('D', 'G') definition.add_dependency('E', 'G') definition.add_dependency('E', 'H') definition_json_dict = convert_recipe_definition_to_v6_json( definition).get_dict() recipe_type = recipe_test_utils.create_recipe_type_v6( definition=definition_json_dict) recipe = recipe_test_utils.create_recipe(recipe_type=recipe_type, input=data_dict) # Nodes A, B, and D already exist job_a = job_test_utils.create_job(job_type=job_type, status='COMPLETED', input=data_dict, output=data_dict, save=True) condition_b = recipe_test_utils.create_recipe_condition( is_processed=True, is_accepted=True, save=False) condition_d = recipe_test_utils.create_recipe_condition( is_processed=True, is_accepted=False, save=False) RecipeCondition.objects.bulk_create([condition_b, condition_d]) recipe_node_a = recipe_test_utils.create_recipe_node(recipe=recipe, node_name='A', job=job_a, save=False) recipe_node_b = recipe_test_utils.create_recipe_node( recipe=recipe, node_name='B', condition=condition_b, save=False) recipe_node_d = recipe_test_utils.create_recipe_node( recipe=recipe, node_name='D', condition=condition_d, save=False) RecipeNode.objects.bulk_create( [recipe_node_a, recipe_node_b, recipe_node_d]) recipe_instance = Recipe.objects.get_recipe_instance(recipe.id) nodes_to_process = recipe_instance.get_nodes_to_process_input() self.assertSetEqual(set(nodes_to_process.keys()), {'C', 'E'})
def test_validate(self): """Tests calling DataFilter.validate()""" data_filter = DataFilter(all=False) data_filter.add_filter({'name': 'input_a', 'type': 'media-type', 'condition': '==', 'values': ['application/json']}) data_filter.add_filter({'name': 'input_b', 'type': 'string', 'condition': 'contains', 'values': ['abcde']}) data_filter.add_filter({'name': 'input_c', 'type': 'integer', 'condition': '>', 'values': ['0']}) data_filter.add_filter({'name': 'input_d', 'type': 'integer', 'condition': 'between', 'values': ['0', '100']}) data_filter.add_filter({'name': 'input_f', 'type': 'meta-data', 'condition': 'in', 'values': [['foo','baz']], 'fields': [['a','b']]}) interface = Interface() interface.add_parameter(FileParameter('input_a', ['application/json'])) warnings = data_filter.validate(interface) self.assertEqual(len(warnings), 4) self.assertEqual(warnings[0].name, 'UNMATCHED_FILTER') interface.add_parameter(JsonParameter('input_e', 'integer')) warnings = data_filter.validate(interface) self.assertEqual(len(warnings), 5) self.assertEqual(warnings[3].name, 'UNMATCHED_FILTER') self.assertEqual(warnings[4].name, 'UNMATCHED_PARAMETERS') interface.add_parameter(JsonParameter('input_b', 'integer')) with self.assertRaises(InvalidDataFilter) as context: data_filter.validate(interface) self.assertEqual(context.exception.error.name, 'MISMATCHED_TYPE') interface2 = Interface() interface2.add_parameter(FileParameter('input_a', ['application/json'])) interface2.add_parameter(JsonParameter('input_b', 'string')) interface2.add_parameter(JsonParameter('input_c', 'integer')) interface2.add_parameter(JsonParameter('input_d', 'integer')) interface2.add_parameter(FileParameter('input_f', ['integer'])) warnings = data_filter.validate(interface2) self.assertEqual(len(warnings), 0)
def test_is_data_accepted(self): """Tests calling DataFilter.is_data_accepted()""" data_filter = DataFilter(all=False) data_filter.add_filter({'name': 'input_a', 'type': 'media-type', 'condition': '==', 'values': ['application/json']}) data_filter.add_filter({'name': 'input_b', 'type': 'string', 'condition': 'contains', 'values': ['abcde']}) data_filter.add_filter({'name': 'input_c', 'type': 'integer', 'condition': '>', 'values': ['0']}) data_filter.add_filter({'name': 'input_d', 'type': 'integer', 'condition': 'between', 'values': ['0', '100']}) data_filter.add_filter({'name': 'input_f', 'type': 'meta-data', 'condition': 'in', 'values': [['foo', 'baz']], 'fields': [['a', 'b']]}) data = Data() file_value = FileValue('input_a', [self.file1.id]) data.add_value(file_value) # first filter passes, so data is accepted if all is set to false self.assertTrue(data_filter.is_data_accepted(data)) data_filter.all = True # other filters fail so data is not accepted self.assertFalse(data_filter.is_data_accepted(data)) # get other filters to pass json_value = JsonValue('input_b', 'abcdefg') data.add_value(json_value) json_value = JsonValue('input_c', '10') data.add_value(json_value) json_value = JsonValue('input_d', 50) data.add_value(json_value) file_value = FileValue('input_f', [self.file2.id]) data.add_value(file_value) self.assertTrue(data_filter.is_data_accepted(data))
def test_add_filter(self): """Tests calling DataFilter.add_value()""" data_filter = DataFilter() data_filter.add_filter({'name': 'input_a', 'type': 'media-type', 'condition': '==', 'values': ['application/json']}) data_filter.add_filter({'name': 'input_b', 'type': 'string', 'condition': 'contains', 'values': ['abcde']}) data_filter.add_filter({'name': 'input_c', 'type': 'integer', 'condition': '>', 'values': ['0']}) data_filter.add_filter({'name': 'input_d', 'type': 'integer', 'condition': 'between', 'values': ['0', '100']}) filter_dict = {'version': '6', 'filters': [ {'name': 'input_a', 'type': 'media-type', 'condition': '==', 'values': ['application/json']}, {'name': 'input_b', 'type': 'string', 'condition': 'contains', 'values': ['abcde']}, {'name': 'input_c', 'type': 'integer', 'condition': '>', 'values': [0]}, {'name': 'input_d', 'type': 'integer', 'condition': 'between', 'values': [0,100]} ]} self.assertItemsEqual(data_filter.filter_list, filter_dict['filters']) with self.assertRaises(InvalidDataFilter) as context: data_filter.add_filter({}) self.assertEqual(context.exception.error.name, 'MISSING_NAME') with self.assertRaises(InvalidDataFilter) as context: data_filter.add_filter({'name': 'input_a'}) self.assertEqual(context.exception.error.name, 'MISSING_TYPE') with self.assertRaises(InvalidDataFilter) as context: data_filter.add_filter({'name': 'input_a', 'type': 'integer'}) self.assertEqual(context.exception.error.name, 'MISSING_CONDITION')
def test_validate_filter(self): """Tests calling DataFilter.validate_filter()""" with self.assertRaises(InvalidDataFilter) as context: DataFilter.validate_filter({}) self.assertEqual(context.exception.error.name, 'MISSING_NAME') with self.assertRaises(InvalidDataFilter) as context: DataFilter.validate_filter({'name': 'input_a'}) self.assertEqual(context.exception.error.name, 'MISSING_TYPE') with self.assertRaises(InvalidDataFilter) as context: DataFilter.validate_filter({'name': 'input_a', 'type': 'integer'}) self.assertEqual(context.exception.error.name, 'MISSING_CONDITION') with self.assertRaises(InvalidDataFilter) as context: DataFilter.validate_filter({'name': 'input_a', 'type': 'integer', 'condition': '>'}) self.assertEqual(context.exception.error.name, 'MISSING_VALUES') with self.assertRaises(InvalidDataFilter) as context: DataFilter.validate_filter({'name': 'input_a', 'type': 'integer', 'condition': 'BAD', 'values': [0]}) self.assertEqual(context.exception.error.name, 'INVALID_CONDITION') with self.assertRaises(InvalidDataFilter) as context: DataFilter.validate_filter({'name': 'input_a', 'type': 'string', 'condition': 'between', 'values': ['0']}) self.assertEqual(context.exception.error.name, 'INVALID_CONDITION') with self.assertRaises(InvalidDataFilter) as context: DataFilter.validate_filter({'name': 'input_a', 'type': 'integer', 'condition': 'contains', 'values': [0]}) self.assertEqual(context.exception.error.name, 'INVALID_CONDITION') with self.assertRaises(InvalidDataFilter) as context: DataFilter.validate_filter({'name': 'input_a', 'type': 'boolean', 'condition': 'contains', 'values': [0]}) self.assertEqual(context.exception.error.name, 'INVALID_CONDITION') with self.assertRaises(InvalidDataFilter) as context: DataFilter.validate_filter({'name': 'input_a', 'type': 'bad', 'condition': 'contains', 'values': [0]}) self.assertEqual(context.exception.error.name, 'INVALID_TYPE') with self.assertRaises(InvalidDataFilter) as context: DataFilter.validate_filter({'name': 'input_a', 'type': 'integer', 'condition': '<', 'values': ['not a number']}) self.assertEqual(context.exception.error.name, 'VALUE_ERROR') with self.assertRaises(InvalidDataFilter) as context: DataFilter.validate_filter({'name': 'input_a', 'type': 'object', 'condition': '<', 'values': [['not a number']]}) self.assertEqual(context.exception.error.name, 'INVALID_CONDITION') with self.assertRaises(InvalidDataFilter) as context: DataFilter.validate_filter({'name': 'input_a', 'type': 'object', 'condition': '<', 'values': [['not a number']], 'fields': [['a'],['b']]}) self.assertEqual(context.exception.error.name, 'INVALID_FIELDS')
def test_execute(self): """Tests calling UpdateRecipe.execute() successfully""" data_dict = convert_data_to_v6_json(Data()).get_dict() job_a = job_test_utils.create_job(status='COMPLETED', input=data_dict, output=data_dict) recipe_b = recipe_test_utils.create_recipe() job_c = job_test_utils.create_job(status='PENDING') job_d = job_test_utils.create_job(status='BLOCKED') recipe_type_e = recipe_test_utils.create_recipe_type_v6() job_type_f = job_test_utils.create_seed_job_type() job_type_k = job_test_utils.create_seed_job_type() job_g = job_test_utils.create_job(status='FAILED', input=data_dict) job_h = job_test_utils.create_job(status='PENDING') condition_i = recipe_test_utils.create_recipe_condition(save=True) definition = RecipeDefinition(Interface()) definition.add_job_node('node_a', job_a.job_type.name, job_a.job_type.version, job_a.job_type_rev.revision_num) definition.add_recipe_node('node_b', recipe_b.recipe_type.name, recipe_b.recipe_type.revision_num) definition.add_job_node('node_c', job_c.job_type.name, job_c.job_type.version, job_c.job_type_rev.revision_num) definition.add_job_node('node_d', job_d.job_type.name, job_d.job_type.version, job_d.job_type_rev.revision_num) definition.add_recipe_node('node_e', recipe_type_e.name, recipe_type_e.revision_num) definition.add_job_node('node_f', job_type_f.name, job_type_f.version, job_type_f.revision_num) definition.add_job_node('node_g', job_g.job_type.name, job_g.job_type.version, job_g.job_type_rev.revision_num) definition.add_job_node('node_h', job_h.job_type.name, job_h.job_type.version, job_h.job_type_rev.revision_num) definition.add_condition_node('node_i', Interface(), DataFilter()) #True definition.add_condition_node('node_j', Interface(), DataFilter()) #True definition.add_job_node('node_k', job_type_k.name, job_type_k.version, job_type_k.revision_num) definition.add_dependency('node_a', 'node_c') definition.add_dependency('node_a', 'node_e') definition.add_dependency('node_a', 'node_g') definition.add_dependency('node_c', 'node_d') definition.add_dependency('node_e', 'node_f') definition.add_dependency('node_g', 'node_h') definition.add_dependency('node_a', 'node_i') definition.add_dependency('node_a', 'node_j') definition.add_dependency('node_i', 'node_k') definition.add_dependency('node_j', 'node_k') definition_dict = convert_recipe_definition_to_v6_json(definition).get_dict() recipe_type = recipe_test_utils.create_recipe_type_v6(definition=definition_dict) recipe = recipe_test_utils.create_recipe(recipe_type=recipe_type, input=data_dict) node_a = recipe_test_utils.create_recipe_node(recipe=recipe, node_name='node_a', job=job_a, save=False) node_b = recipe_test_utils.create_recipe_node(recipe=recipe, node_name='node_b', sub_recipe=recipe_b, save=False) node_c = recipe_test_utils.create_recipe_node(recipe=recipe, node_name='node_c', job=job_c, save=False) node_d = recipe_test_utils.create_recipe_node(recipe=recipe, node_name='node_d', job=job_d, save=False) node_g = recipe_test_utils.create_recipe_node(recipe=recipe, node_name='node_g', job=job_g, save=False) node_h = recipe_test_utils.create_recipe_node(recipe=recipe, node_name='node_h', job=job_h, save=False) node_i = recipe_test_utils.create_recipe_node(recipe=recipe, node_name='node_i', condition=condition_i, save=False) RecipeNode.objects.bulk_create([node_a, node_b, node_c, node_d, node_g, node_h, node_i]) forced_nodes = ForcedNodes() forced_nodes_e = ForcedNodes() forced_nodes_e.set_all_nodes() forced_nodes.add_subrecipe('node_e', forced_nodes_e) # Create and execute message message = create_update_recipe_message(recipe.id, forced_nodes=forced_nodes) result = message.execute() self.assertTrue(result) self.assertEqual(len(message.new_messages), 8) # Check messages blocked_jobs_msg = None pending_jobs_msg = None create_cond_msg = None create_jobs_msg = None create_recipes_msg = None process_condition_msg = None process_job_input_msg = None process_recipe_input_msg = None for msg in message.new_messages: if msg.type == 'blocked_jobs': blocked_jobs_msg = msg elif msg.type == 'pending_jobs': pending_jobs_msg = msg elif msg.type == 'create_conditions': create_cond_msg = msg elif msg.type == 'create_jobs': create_jobs_msg = msg elif msg.type == 'create_recipes': create_recipes_msg = msg elif msg.type == 'process_condition': process_condition_msg = msg elif msg.type == 'process_job_input': process_job_input_msg = msg elif msg.type == 'process_recipe_input': process_recipe_input_msg = msg self.assertIsNotNone(blocked_jobs_msg) self.assertIsNotNone(pending_jobs_msg) self.assertIsNotNone(create_cond_msg) self.assertIsNotNone(create_jobs_msg) self.assertIsNotNone(create_recipes_msg) self.assertIsNotNone(process_condition_msg) self.assertIsNotNone(process_job_input_msg) self.assertIsNotNone(process_recipe_input_msg) # Check message to change jobs to BLOCKED self.assertListEqual(blocked_jobs_msg._blocked_job_ids, [job_h.id]) # Check message to change jobs to PENDING self.assertListEqual(pending_jobs_msg._pending_job_ids, [job_d.id]) # Check message to create conditions self.assertEqual(create_cond_msg.recipe_id, recipe.id) self.assertEqual(create_cond_msg.root_recipe_id, recipe.root_superseded_recipe_id) condition = Condition('node_j', True) self.assertListEqual(create_cond_msg.conditions, [condition]) # Check message to create jobs self.assertEqual(create_jobs_msg.event_id, recipe.event_id) self.assertEqual(create_jobs_msg.create_jobs_type, RECIPE_TYPE) self.assertEqual(create_jobs_msg.recipe_id, recipe.id) self.assertEqual(create_jobs_msg.root_recipe_id, recipe.root_superseded_recipe_id) self.assertIsNone(create_jobs_msg.superseded_recipe_id) recipe_job = RecipeJob(job_type_f.name, job_type_f.version, job_type_f.revision_num, 'node_f', False) self.assertListEqual(create_jobs_msg.recipe_jobs, [recipe_job]) # Check message to create sub-recipes self.assertEqual(create_recipes_msg.event_id, recipe.event_id) msg_forced_nodes_dict = convert_forced_nodes_to_v6(create_recipes_msg.forced_nodes).get_dict() expected_forced_nodes_dict = convert_forced_nodes_to_v6(forced_nodes).get_dict() self.assertDictEqual(msg_forced_nodes_dict, expected_forced_nodes_dict) self.assertEqual(create_recipes_msg.create_recipes_type, SUB_RECIPE_TYPE) self.assertEqual(create_recipes_msg.recipe_id, recipe.id) self.assertEqual(create_recipes_msg.root_recipe_id, recipe.root_superseded_recipe_id) self.assertIsNone(create_recipes_msg.superseded_recipe_id) sub = SubRecipe(recipe_type_e.name, recipe_type_e.revision_num, 'node_e', True) self.assertListEqual(create_recipes_msg.sub_recipes, [sub]) # Check message to process condition self.assertEqual(process_condition_msg.condition_id, condition_i.id) # Check message to process job input self.assertEqual(process_job_input_msg.job_id, job_c.id) # Check message to process recipe input self.assertEqual(process_recipe_input_msg.recipe_id, recipe_b.id) # Test executing message again message_json_dict = message.to_json() message = UpdateRecipe.from_json(message_json_dict) result = message.execute() self.assertTrue(result) # Make sure the same messages are returned self.assertEqual(len(message.new_messages), 8) blocked_jobs_msg = None pending_jobs_msg = None create_cond_msg = None create_jobs_msg = None create_recipes_msg = None process_condition_msg = None process_job_input_msg = None process_recipe_input_msg = None for msg in message.new_messages: if msg.type == 'blocked_jobs': blocked_jobs_msg = msg elif msg.type == 'pending_jobs': pending_jobs_msg = msg elif msg.type == 'create_conditions': create_cond_msg = msg elif msg.type == 'create_jobs': create_jobs_msg = msg elif msg.type == 'create_recipes': create_recipes_msg = msg elif msg.type == 'process_condition': process_condition_msg = msg elif msg.type == 'process_job_input': process_job_input_msg = msg elif msg.type == 'process_recipe_input': process_recipe_input_msg = msg self.assertIsNotNone(blocked_jobs_msg) self.assertIsNotNone(pending_jobs_msg) self.assertIsNotNone(create_cond_msg) self.assertIsNotNone(create_jobs_msg) self.assertIsNotNone(create_recipes_msg) self.assertIsNotNone(process_condition_msg) self.assertIsNotNone(process_job_input_msg) self.assertIsNotNone(process_recipe_input_msg) # Check message to change jobs to BLOCKED self.assertListEqual(blocked_jobs_msg._blocked_job_ids, [job_h.id]) # Check message to change jobs to PENDING self.assertListEqual(pending_jobs_msg._pending_job_ids, [job_d.id]) # Check message to create conditions self.assertEqual(create_cond_msg.recipe_id, recipe.id) self.assertEqual(create_cond_msg.root_recipe_id, recipe.root_superseded_recipe_id) condition = Condition('node_j', True) self.assertListEqual(create_cond_msg.conditions, [condition]) # Check message to create jobs self.assertEqual(create_jobs_msg.event_id, recipe.event_id) self.assertEqual(create_jobs_msg.create_jobs_type, RECIPE_TYPE) self.assertEqual(create_jobs_msg.recipe_id, recipe.id) self.assertEqual(create_jobs_msg.root_recipe_id, recipe.root_superseded_recipe_id) self.assertIsNone(create_jobs_msg.superseded_recipe_id) recipe_job = RecipeJob(job_type_f.name, job_type_f.version, job_type_f.revision_num, 'node_f', False) self.assertListEqual(create_jobs_msg.recipe_jobs, [recipe_job]) # Check message to create sub-recipes self.assertEqual(create_recipes_msg.event_id, recipe.event_id) msg_forced_nodes_dict = convert_forced_nodes_to_v6(create_recipes_msg.forced_nodes).get_dict() expected_forced_nodes_dict = convert_forced_nodes_to_v6(forced_nodes).get_dict() self.assertDictEqual(msg_forced_nodes_dict, expected_forced_nodes_dict) self.assertEqual(create_recipes_msg.create_recipes_type, SUB_RECIPE_TYPE) self.assertEqual(create_recipes_msg.recipe_id, recipe.id) self.assertEqual(create_recipes_msg.root_recipe_id, recipe.root_superseded_recipe_id) self.assertIsNone(create_recipes_msg.superseded_recipe_id) sub = SubRecipe(recipe_type_e.name, recipe_type_e.revision_num, 'node_e', True) self.assertListEqual(create_recipes_msg.sub_recipes, [sub]) # Check message to process condition self.assertEqual(process_condition_msg.condition_id, condition_i.id) # Check message to process job input self.assertEqual(process_job_input_msg.job_id, job_c.id) # Check message to process recipe input self.assertEqual(process_recipe_input_msg.recipe_id, recipe_b.id)
def test_init_identical(self): """Tests creating a RecipeDiff between two identical recipe definitions""" interface_1 = Interface() interface_1.add_parameter(FileParameter('file_param_1', ['image/gif'])) interface_1.add_parameter(JsonParameter('json_param_1', 'object')) interface_2 = Interface() interface_2.add_parameter(FileParameter('file_param_1', ['image/gif'])) interface_2.add_parameter(JsonParameter('json_param_1', 'object')) cond_interface_1 = Interface() cond_interface_1.add_parameter(FileParameter('cond_file', ['image/gif'])) filter_1 = DataFilter(filter_list=[{'name': 'cond_file', 'type': 'filename', 'condition': 'contains', 'values': ['good_name.gif']}]) cond_interface_2 = Interface() cond_interface_2.add_parameter(FileParameter('cond_file', ['image/gif'])) filter_2 = DataFilter(filter_list=[{'name': 'cond_file', 'type': 'filename', 'condition': 'contains', 'values': ['good_name.gif']}]) definition_1 = RecipeDefinition(interface_1) definition_1.add_job_node('A', 'job_type_1', '1.0', 1) definition_1.add_job_node('B', 'job_type_2', '2.0', 1) definition_1.add_job_node('C', 'job_type_3', '1.0', 2) definition_1.add_recipe_node('D', 'recipe_type_1', 1) definition_1.add_condition_node('E', cond_interface_1, filter_1) definition_1.add_job_node('F', 'job_type_4', '1.0', 1) definition_1.add_dependency('A', 'B') definition_1.add_dependency('A', 'C') definition_1.add_dependency('C', 'D') definition_1.add_dependency('A', 'E') definition_1.add_dependency('E', 'F') definition_1.add_recipe_input_connection('A', 'input_1', 'file_param_1') definition_1.add_dependency_input_connection('B', 'b_input_1', 'A', 'a_output_1') definition_1.add_dependency_input_connection('C', 'c_input_1', 'A', 'a_output_2') definition_1.add_dependency_input_connection('D', 'd_input_1', 'C', 'c_output_1') definition_1.add_recipe_input_connection('D', 'd_input_2', 'json_param_1') definition_1.add_dependency_input_connection('E', 'cond_file', 'A', 'a_output_1') definition_1.add_dependency_input_connection('F', 'f_input_1', 'E', 'cond_file') definition_2 = RecipeDefinition(interface_2) definition_2.add_job_node('A', 'job_type_1', '1.0', 1) definition_2.add_job_node('B', 'job_type_2', '2.0', 1) definition_2.add_job_node('C', 'job_type_3', '1.0', 2) definition_2.add_recipe_node('D', 'recipe_type_1', 1) definition_2.add_condition_node('E', cond_interface_2, filter_2) definition_2.add_job_node('F', 'job_type_4', '1.0', 1) definition_2.add_dependency('A', 'B') definition_2.add_dependency('A', 'C') definition_2.add_dependency('C', 'D') definition_2.add_dependency('A', 'E') definition_2.add_dependency('E', 'F') definition_2.add_recipe_input_connection('A', 'input_1', 'file_param_1') definition_2.add_dependency_input_connection('B', 'b_input_1', 'A', 'a_output_1') definition_2.add_dependency_input_connection('C', 'c_input_1', 'A', 'a_output_2') definition_2.add_dependency_input_connection('D', 'd_input_1', 'C', 'c_output_1') definition_2.add_recipe_input_connection('D', 'd_input_2', 'json_param_1') definition_2.add_dependency_input_connection('E', 'cond_file', 'A', 'a_output_1') definition_2.add_dependency_input_connection('F', 'f_input_1', 'E', 'cond_file') diff = RecipeDiff(definition_1, definition_2) self.assertTrue(diff.can_be_reprocessed) self.assertListEqual(diff.reasons, []) # Every node should be unchanged and all should be copied during a reprocess nodes_to_copy = diff.get_nodes_to_copy() self.assertSetEqual(set(nodes_to_copy.keys()), {'A', 'B', 'C', 'D', 'E', 'F'}) for node_diff in nodes_to_copy.values(): self.assertEqual(node_diff.status, NodeDiff.UNCHANGED) self.assertFalse(node_diff.reprocess_new_node) self.assertListEqual(node_diff.changes, []) self.assertDictEqual(diff.get_nodes_to_supersede(), {}) self.assertDictEqual(diff.get_nodes_to_unpublish(), {})
def test_init_changes(self): """Tests creating a RecipeDiff when the newer definition has a variety of changes in it""" interface_1 = Interface() interface_1.add_parameter(FileParameter('file_param_1', ['image/gif'])) interface_1.add_parameter(JsonParameter('json_param_1', 'object')) interface_2 = Interface() interface_2.add_parameter(FileParameter('file_param_1', ['image/gif'])) interface_2.add_parameter(JsonParameter('json_param_1', 'object')) interface_2.add_parameter(JsonParameter('json_param_2', 'object', required=False)) cond_interface_1 = Interface() cond_interface_1.add_parameter(FileParameter('cond_file', ['image/gif'])) filter_1 = DataFilter(all=True) cond_interface_2 = Interface() cond_interface_2.add_parameter(FileParameter('cond_file', ['image/gif'])) filter_2 = DataFilter(all=False) definition_1 = RecipeDefinition(interface_1) definition_1.add_job_node('A', 'job_type_1', '1.0', 1) definition_1.add_job_node('B', 'job_type_2', '2.0', 1) definition_1.add_job_node('C', 'job_type_3', '1.0', 2) definition_1.add_recipe_node('D', 'recipe_type_1', 1) definition_1.add_job_node('E', 'job_type_4', '1.0', 1) definition_1.add_condition_node('G', cond_interface_1, filter_1) definition_1.add_job_node('H', 'job_type_4', '1.0', 1) definition_1.add_dependency('A', 'B') definition_1.add_dependency('A', 'C') definition_1.add_dependency('B', 'E') definition_1.add_dependency('C', 'D') definition_1.add_dependency('A', 'G') definition_1.add_dependency('G', 'H') definition_1.add_recipe_input_connection('A', 'input_1', 'file_param_1') definition_1.add_dependency_input_connection('B', 'b_input_1', 'A', 'a_output_1') definition_1.add_dependency_input_connection('C', 'c_input_1', 'A', 'a_output_2') definition_1.add_dependency_input_connection('D', 'd_input_1', 'C', 'c_output_1') definition_1.add_recipe_input_connection('D', 'd_input_2', 'json_param_1') definition_1.add_dependency_input_connection('G', 'cond_file', 'A', 'a_output_1') definition_1.add_dependency_input_connection('H', 'h_input_1', 'G', 'cond_file') definition_2 = RecipeDefinition(interface_2) # Nodes B and E are deleted definition_2.add_job_node('A', 'job_type_1', '1.0', 1) definition_2.add_job_node('C', 'job_type_3', '2.1', 1) # Change to job type version and revision definition_2.add_recipe_node('D', 'recipe_type_1', 1) definition_2.add_condition_node('G', cond_interface_2, filter_2) definition_2.add_job_node('H', 'job_type_4', '1.0', 1) definition_2.add_recipe_node('F', 'recipe_type_2', 5) # New node definition_2.add_dependency('A', 'C') definition_2.add_dependency('C', 'D') definition_2.add_dependency('D', 'F') definition_2.add_dependency('A', 'G') definition_2.add_dependency('G', 'H', False) # Change to fire when condition is False, not True definition_2.add_recipe_input_connection('A', 'input_1', 'file_param_1') definition_2.add_dependency_input_connection('C', 'c_input_1', 'A', 'a_output_2') definition_2.add_dependency_input_connection('D', 'd_input_1', 'C', 'c_output_1') definition_2.add_recipe_input_connection('D', 'd_input_2', 'json_param_1') definition_2.add_recipe_input_connection('F', 'f_input_1', 'json_param_2') definition_2.add_dependency_input_connection('G', 'cond_file', 'A', 'a_output_1') definition_2.add_dependency_input_connection('H', 'h_input_1', 'G', 'cond_file') diff = RecipeDiff(definition_1, definition_2) # Non-breaking recipe input changes so recipe can be reprocessed self.assertTrue(diff.can_be_reprocessed) self.assertListEqual(diff.reasons, []) # Check each node for correct fields node_a = diff.graph['A'] self.assertEqual(node_a.status, NodeDiff.UNCHANGED) self.assertFalse(node_a.reprocess_new_node) self.assertListEqual(node_a.changes, []) node_b = diff.graph['B'] self.assertEqual(node_b.status, NodeDiff.DELETED) self.assertFalse(node_b.reprocess_new_node) self.assertListEqual(node_b.changes, []) node_c = diff.graph['C'] self.assertEqual(node_c.status, NodeDiff.CHANGED) self.assertTrue(node_c.reprocess_new_node) self.assertEqual(len(node_c.changes), 2) self.assertEqual(node_c.changes[0].name, 'JOB_TYPE_VERSION_CHANGE') self.assertEqual(node_c.changes[1].name, 'JOB_TYPE_REVISION_CHANGE') node_d = diff.graph['D'] self.assertEqual(node_d.status, NodeDiff.CHANGED) self.assertTrue(node_d.reprocess_new_node) self.assertEqual(len(node_d.changes), 1) self.assertEqual(node_d.changes[0].name, 'PARENT_CHANGED') node_e = diff.graph['E'] self.assertEqual(node_e.status, NodeDiff.DELETED) self.assertFalse(node_e.reprocess_new_node) self.assertListEqual(node_e.changes, []) node_f = diff.graph['F'] self.assertEqual(node_f.status, NodeDiff.NEW) self.assertTrue(node_f.reprocess_new_node) self.assertListEqual(node_f.changes, []) node_g = diff.graph['G'] self.assertEqual(node_g.status, NodeDiff.CHANGED) self.assertTrue(node_g.reprocess_new_node) self.assertEqual(len(node_g.changes), 1) self.assertEqual(node_g.changes[0].name, 'FILTER_CHANGE') node_h = diff.graph['H'] self.assertEqual(node_h.status, NodeDiff.CHANGED) self.assertTrue(node_h.reprocess_new_node) self.assertEqual(len(node_h.changes), 2) self.assertEqual(node_h.changes[0].name, 'PARENT_CHANGED') self.assertEqual(node_h.changes[1].name, 'PARENT_ACCEPTANCE_CHANGED') # Check nodes to copy, supersede, and unpublish self.assertSetEqual(set(diff.get_nodes_to_copy().keys()), {'A'}) self.assertSetEqual(set(diff.get_nodes_to_supersede().keys()), {'B', 'C', 'D', 'E', 'G', 'H'}) self.assertSetEqual(set(diff.get_nodes_to_unpublish().keys()), {'B', 'E'})
def test_convert_recipe_diff_to_v6_json_with_changes(self): """Tests calling convert_recipe_diff_to_v6_json() with a diff containing a variety of changes""" interface_1 = Interface() interface_1.add_parameter(FileParameter('file_param_1', ['image/gif'])) interface_1.add_parameter(JsonParameter('json_param_1', 'object')) interface_2 = Interface() interface_2.add_parameter(FileParameter('file_param_1', ['image/gif'])) interface_2.add_parameter(JsonParameter('json_param_1', 'object')) interface_2.add_parameter( JsonParameter('json_param_2', 'object', required=False)) cond_interface_1 = Interface() cond_interface_1.add_parameter( FileParameter('cond_file', ['image/gif'])) # TODO: eventually implement two "real" and different filters filter_1 = DataFilter(False) cond_interface_2 = Interface() cond_interface_2.add_parameter( FileParameter('cond_file', ['image/gif'])) filter_2 = DataFilter(True) definition_1 = RecipeDefinition(interface_1) definition_1.add_job_node('A', 'job_type_1', '1.0', 1) definition_1.add_job_node('B', 'job_type_2', '2.0', 1) definition_1.add_job_node('C', 'job_type_3', '1.0', 2) definition_1.add_recipe_node('D', 'recipe_type_1', 1) definition_1.add_job_node('E', 'job_type_4', '1.0', 1) definition_1.add_condition_node('G', cond_interface_1, filter_1) definition_1.add_job_node('H', 'job_type_4', '1.0', 1) definition_1.add_dependency('A', 'B') definition_1.add_dependency('A', 'C') definition_1.add_dependency('B', 'E') definition_1.add_dependency('C', 'D') definition_1.add_dependency('A', 'G') definition_1.add_dependency('G', 'H') definition_1.add_recipe_input_connection('A', 'input_1', 'file_param_1') definition_1.add_dependency_input_connection('B', 'b_input_1', 'A', 'a_output_1') definition_1.add_dependency_input_connection('C', 'c_input_1', 'A', 'a_output_2') definition_1.add_dependency_input_connection('D', 'd_input_1', 'C', 'c_output_1') definition_1.add_recipe_input_connection('D', 'd_input_2', 'json_param_1') definition_1.add_dependency_input_connection('G', 'cond_file', 'A', 'a_output_1') definition_1.add_dependency_input_connection('H', 'h_input_1', 'G', 'cond_file') definition_2 = RecipeDefinition(interface_2) # Nodes B and E are deleted definition_2.add_job_node('A', 'job_type_1', '1.0', 1) definition_2.add_job_node('C', 'job_type_3', '2.1', 1) # Change to job type version and revision definition_2.add_recipe_node('D', 'recipe_type_1', 1) definition_2.add_condition_node('G', cond_interface_2, filter_2) definition_2.add_job_node('H', 'job_type_4', '1.0', 1) definition_2.add_recipe_node('F', 'recipe_type_2', 5) # New node definition_2.add_dependency('A', 'C') definition_2.add_dependency('C', 'D') definition_2.add_dependency('D', 'F') definition_2.add_dependency('A', 'G') definition_2.add_dependency('G', 'H') definition_2.add_recipe_input_connection('A', 'input_1', 'file_param_1') definition_2.add_dependency_input_connection('C', 'c_input_1', 'A', 'a_output_2') definition_2.add_dependency_input_connection('D', 'd_input_1', 'C', 'c_output_1') definition_2.add_recipe_input_connection('D', 'd_input_2', 'json_param_1') definition_2.add_recipe_input_connection('F', 'f_input_1', 'json_param_2') definition_2.add_dependency_input_connection('G', 'cond_file', 'A', 'a_output_1') definition_2.add_dependency_input_connection('H', 'h_input_1', 'G', 'cond_file') diff = RecipeDiff(definition_1, definition_2) json = convert_recipe_diff_to_v6_json(diff) RecipeDiffV6(diff=json.get_dict(), do_validate=True) # Revalidate self.assertTrue(json.get_dict()['can_be_reprocessed'])
def test_execute(self): """Tests calling ProcessCondition.execute() successfully""" workspace = storage_test_utils.create_workspace() file_1 = storage_test_utils.create_file(workspace=workspace, file_size=104857600.0) file_2 = storage_test_utils.create_file(workspace=workspace, file_size=987654321.0) file_3 = storage_test_utils.create_file(workspace=workspace, file_size=65456.0) file_4 = storage_test_utils.create_file(workspace=workspace, file_size=24564165456.0) manifest_1 = { 'seedVersion': '1.0.0', 'job': { 'name': 'job-a', 'jobVersion': '1.0.0', 'packageVersion': '1.0.0', 'title': '', 'description': '', 'maintainer': { 'name': 'John Doe', 'email': '*****@*****.**' }, 'timeout': 10, 'interface': { 'command': '', 'inputs': { 'files': [], 'json': [] }, 'outputs': { 'files': [{ 'name': 'OUTPUT_A', 'pattern': '*.png', 'multiple': True }] } } } } job_type_1 = job_test_utils.create_job_type(interface=manifest_1) manifest_2 = { 'seedVersion': '1.0.0', 'job': { 'name': 'job-b', 'jobVersion': '1.0.0', 'packageVersion': '1.0.0', 'title': '', 'description': '', 'maintainer': { 'name': 'John Doe', 'email': '*****@*****.**' }, 'timeout': 10, 'interface': { 'command': '', 'inputs': { 'files': [] }, 'outputs': { 'files': [{ 'name': 'OUTPUT_B', 'pattern': '*.png', 'multiple': True }] } } } } job_type_2 = job_test_utils.create_job_type(interface=manifest_2) output_1_dict = { 'version': '1.0', 'output_data': [{ 'name': 'OUTPUT_A', 'file_ids': [file_1.id, file_2.id] }] } output_2_dict = { 'version': '1.0', 'output_data': [{ 'name': 'OUTPUT_B', 'file_ids': [file_3.id, file_4.id] }] } cond_interface = Interface() cond_interface.add_parameter( FileParameter('INPUT_C_1', [], multiple=True)) cond_interface.add_parameter( FileParameter('INPUT_C_2', [], multiple=True)) definition = RecipeDefinition(Interface()) definition.add_job_node('node_a', job_type_1.name, job_type_1.version, job_type_1.revision_num) definition.add_job_node('node_b', job_type_2.name, job_type_2.version, job_type_2.revision_num) # TODO: once DataFilter is implemented, create a DataFilter object here that accepts the inputs definition.add_condition_node('node_c', cond_interface, DataFilter(True)) definition.add_dependency('node_a', 'node_c') definition.add_dependency('node_b', 'node_c') definition.add_dependency_input_connection('node_c', 'INPUT_C_1', 'node_a', 'OUTPUT_A') definition.add_dependency_input_connection('node_c', 'INPUT_C_2', 'node_b', 'OUTPUT_B') def_dict = convert_recipe_definition_to_v6_json(definition).get_dict() recipe_type = recipe_test_utils.create_recipe_type(definition=def_dict) recipe_data_dict = { 'version': '1.0', 'input_data': [], 'workspace_id': workspace.id } recipe = recipe_test_utils.create_recipe(recipe_type=recipe_type, input=recipe_data_dict) job_1 = job_test_utils.create_job(job_type=job_type_1, num_exes=1, status='COMPLETED', output=output_1_dict, recipe=recipe) job_2 = job_test_utils.create_job(job_type=job_type_2, num_exes=1, status='COMPLETED', output=output_2_dict, recipe=recipe) condition = recipe_test_utils.create_recipe_condition(recipe=recipe, save=True) node_a = recipe_test_utils.create_recipe_node(recipe=recipe, node_name='node_a', job=job_1, save=False) node_b = recipe_test_utils.create_recipe_node(recipe=recipe, node_name='node_b', job=job_2, save=False) node_c = recipe_test_utils.create_recipe_node(recipe=recipe, node_name='node_c', condition=condition, save=False) RecipeNode.objects.bulk_create([node_a, node_b, node_c]) # Create message message = create_process_condition_messages([condition.id])[0] # Execute message result = message.execute() self.assertTrue(result) condition = RecipeCondition.objects.get(id=condition.id) # Check for update_recipe message self.assertEqual(len(message.new_messages), 1) self.assertEqual(message.new_messages[0].type, 'update_recipe') self.assertEqual(message.new_messages[0].root_recipe_id, recipe.id) # Check condition flags self.assertTrue(condition.is_processed) self.assertIsNotNone(condition.processed) self.assertTrue(condition.is_accepted) # Check condition for expected data self.assertSetEqual(set(condition.get_data().values.keys()), {'INPUT_C_1', 'INPUT_C_2'}) self.assertListEqual(condition.get_data().values['INPUT_C_1'].file_ids, [file_1.id, file_2.id]) self.assertListEqual(condition.get_data().values['INPUT_C_2'].file_ids, [file_3.id, file_4.id]) # Test executing message again message_json_dict = message.to_json() message = ProcessCondition.from_json(message_json_dict) result = message.execute() self.assertTrue(result) # Still should have update_recipe message self.assertEqual(len(message.new_messages), 1) self.assertEqual(message.new_messages[0].type, 'update_recipe') self.assertEqual(message.new_messages[0].root_recipe_id, recipe.id)