def test_validate_connection(self): """Tests calling Interface.validate_connection()""" interface = Interface() connecting_interface = Interface() file_param = FileParameter('input_1', ['application/json']) interface.add_parameter(file_param) connecting_interface.add_parameter(file_param) json_param = JsonParameter('input_2', 'integer') interface.add_parameter(json_param) connecting_interface.add_parameter(json_param) # Valid connection interface.validate_connection(connecting_interface) new_file_param = FileParameter('input_3', ['image/gif'], required=True) interface.add_parameter(new_file_param) # Connection is missing required input 3 with self.assertRaises(InvalidInterfaceConnection) as context: interface.validate_connection(connecting_interface) self.assertEqual(context.exception.error.name, 'PARAM_REQUIRED') connecting_interface.add_parameter(new_file_param) mock_param = MagicMock() mock_param.name = 'input_4' mock_param.validate_connection.side_effect = InvalidInterfaceConnection('MOCK', '') interface.add_parameter(mock_param) connecting_interface.add_parameter(mock_param) # Invalid connection with self.assertRaises(InvalidInterfaceConnection) as context: interface.validate_connection(connecting_interface) self.assertEqual(context.exception.error.name, 'MOCK')
def test_validate(self): """Tests calling Data.validate()""" interface = Interface() data = Data() interface.add_parameter(FileParameter('input_1', ['application/json'])) interface.add_parameter(JsonParameter('input_2', 'integer')) data.add_value(FileValue('input_1', [123])) data.add_value(JsonValue('input_2', 100)) data.add_value(JsonValue('extra_input_1', 'hello')) data.add_value(JsonValue('extra_input_2', 'there')) # Valid data data.validate(interface) # Ensure extra data values are removed self.assertSetEqual(set(data.values.keys()), {'input_1', 'input_2'}) # Data is missing required input 3 interface.add_parameter(FileParameter('input_3', ['image/gif'], required=True)) with self.assertRaises(InvalidData) as context: data.validate(interface) self.assertEqual(context.exception.error.name, 'PARAM_REQUIRED') data.add_value(FileValue('input_3', [999])) # Input 3 taken care of now # Invalid data interface.add_parameter(JsonParameter('input_4', 'string')) mock_value = MagicMock() mock_value.name = 'input_4' mock_value.validate.side_effect = InvalidData('MOCK', '') data.add_value(mock_value) with self.assertRaises(InvalidData) as context: data.validate(interface) self.assertEqual(context.exception.error.name, 'MOCK')
def test_convert_definition_to_v6_json(self): """Tests calling convert_data_to_v6_json()""" # Try interface with nothing set definition = DataSetDefinitionV6() json = convert_definition_to_v6_json(definition.get_definition()) DataSetDefinitionV6(definition=json.get_dict(), do_validate=True) # Revalidate # Try data with a variety of values definition = DataSetDefinition(definition={}) file_param = FileParameter('input_a', ['application/json']) json_param = JsonParameter('input_b', 'integer') file_param2 = FileParameter('input_c', ['application/json']) json_param2 = JsonParameter('input_d', 'integer') definition.add_global_parameter(file_param) definition.add_global_parameter(json_param) definition.add_global_value(FileValue('input_a', [123])) definition.add_global_value(JsonValue('input_b', 100)) definition.add_parameter(file_param2) definition.add_parameter(json_param2) json = convert_definition_to_v6_json(definition) DataSetDefinitionV6(definition=json.get_dict(), do_validate=True) # Revalidate self.assertSetEqual(set(json.get_definition().get_parameters()), {'input_a', 'input_b', 'input_c', 'input_d'})
def test_convert_recipe_diff_to_v6_json_with_changes(self): """Tests calling convert_recipe_diff_to_v6_json() with a diff containing a variety of changes""" interface_1 = Interface() interface_1.add_parameter(FileParameter('file_param_1', ['image/gif'])) interface_1.add_parameter(JsonParameter('json_param_1', 'object')) interface_2 = Interface() interface_2.add_parameter(FileParameter('file_param_1', ['image/gif'])) interface_2.add_parameter(JsonParameter('json_param_1', 'object')) interface_2.add_parameter( JsonParameter('json_param_2', 'object', required=False)) definition_1 = RecipeDefinition(interface_1) definition_1.add_job_node('A', 'job_type_1', '1.0', 1) definition_1.add_job_node('B', 'job_type_2', '2.0', 1) definition_1.add_job_node('C', 'job_type_3', '1.0', 2) definition_1.add_recipe_node('D', 'recipe_type_1', 1) definition_1.add_job_node('E', 'job_type_4', '1.0', 1) definition_1.add_dependency('A', 'B') definition_1.add_dependency('A', 'C') definition_1.add_dependency('B', 'E') definition_1.add_dependency('C', 'D') definition_1.add_recipe_input_connection('A', 'input_1', 'file_param_1') definition_1.add_dependency_input_connection('B', 'b_input_1', 'A', 'a_output_1') definition_1.add_dependency_input_connection('C', 'c_input_1', 'A', 'a_output_2') definition_1.add_dependency_input_connection('D', 'd_input_1', 'C', 'c_output_1') definition_1.add_recipe_input_connection('D', 'd_input_2', 'json_param_1') definition_2 = RecipeDefinition(interface_2) # Nodes B and E are deleted definition_2.add_job_node('A', 'job_type_1', '1.0', 1) definition_2.add_job_node('C', 'job_type_3', '2.1', 1) # Change to job type version and revision definition_2.add_recipe_node('D', 'recipe_type_1', 1) definition_2.add_recipe_node('F', 'recipe_type_2', 5) # New node definition_2.add_dependency('A', 'C') definition_2.add_dependency('C', 'D') definition_2.add_dependency('D', 'F') definition_2.add_recipe_input_connection('A', 'input_1', 'file_param_1') definition_2.add_dependency_input_connection('C', 'c_input_1', 'A', 'a_output_2') definition_2.add_dependency_input_connection('D', 'd_input_1', 'C', 'c_output_1') definition_2.add_recipe_input_connection('D', 'd_input_2', 'json_param_1') definition_2.add_recipe_input_connection('F', 'f_input_1', 'json_param_2') diff = RecipeDiff(definition_1, definition_2) json = convert_recipe_diff_to_v6_json(diff) RecipeDiffV6(diff=json.get_dict(), do_validate=True) # Revalidate self.assertTrue(json.get_dict()['can_be_reprocessed'])
def test_convert_recipe_diff_to_v6_json_new_required_input(self): """Tests calling convert_recipe_diff_to_v6_json() with a diff where there is a breaking recipe interface change """ interface_1 = Interface() interface_1.add_parameter(FileParameter('file_param_1', ['image/gif'])) interface_1.add_parameter(JsonParameter('json_param_1', 'object')) interface_2 = Interface() interface_2.add_parameter(FileParameter('file_param_1', ['image/gif'])) interface_2.add_parameter(JsonParameter('json_param_1', 'object')) interface_2.add_parameter( JsonParameter('json_param_2', 'object', required=True)) definition_1 = RecipeDefinition(interface_1) definition_1.add_job_node('A', 'job_type_1', '1.0', 1) definition_1.add_job_node('B', 'job_type_2', '2.0', 1) definition_1.add_job_node('C', 'job_type_3', '1.0', 2) definition_1.add_recipe_node('D', 'recipe_type_1', 1) definition_1.add_dependency('A', 'B') definition_1.add_dependency('A', 'C') definition_1.add_dependency('C', 'D') definition_1.add_recipe_input_connection('A', 'input_1', 'file_param_1') definition_1.add_dependency_input_connection('B', 'b_input_1', 'A', 'a_output_1') definition_1.add_dependency_input_connection('C', 'c_input_1', 'A', 'a_output_2') definition_1.add_dependency_input_connection('D', 'd_input_1', 'C', 'c_output_1') definition_1.add_recipe_input_connection('D', 'd_input_2', 'json_param_1') definition_2 = RecipeDefinition(interface_2) definition_2.add_job_node('A', 'job_type_1', '1.0', 1) definition_2.add_job_node('B', 'job_type_2', '2.0', 1) definition_2.add_job_node('C', 'job_type_3', '1.1', 1) # Change to job type version and revision definition_2.add_recipe_node('D', 'recipe_type_1', 1) definition_2.add_dependency('A', 'B') definition_2.add_dependency('A', 'C') definition_2.add_dependency('C', 'D') definition_2.add_recipe_input_connection('A', 'input_1', 'file_param_1') definition_2.add_dependency_input_connection('B', 'b_input_1', 'A', 'a_output_1') definition_2.add_dependency_input_connection('C', 'c_input_1', 'A', 'a_output_2') definition_2.add_dependency_input_connection('D', 'd_input_1', 'C', 'c_output_1') definition_2.add_recipe_input_connection('D', 'd_input_2', 'json_param_1') diff = RecipeDiff(definition_1, definition_2) json = convert_recipe_diff_to_v6_json(diff) RecipeDiffV6(diff=json.get_dict(), do_validate=True) # Revalidate self.assertFalse(json.get_dict()['can_be_reprocessed'])
def test_init_new_required_input(self): """Tests creating a RecipeDiff when the newer definition has a new required input parameter""" interface_1 = Interface() interface_1.add_parameter(FileParameter('file_param_1', ['image/gif'])) interface_1.add_parameter(JsonParameter('json_param_1', 'object')) interface_2 = Interface() interface_2.add_parameter(FileParameter('file_param_1', ['image/gif'])) interface_2.add_parameter(JsonParameter('json_param_1', 'object')) interface_2.add_parameter(JsonParameter('json_param_2', 'object', required=True)) definition_1 = RecipeDefinition(interface_1) definition_1.add_job_node('A', 'job_type_1', '1.0', 1) definition_1.add_job_node('B', 'job_type_2', '2.0', 1) definition_1.add_job_node('C', 'job_type_3', '1.0', 2) definition_1.add_recipe_node('D', 'recipe_type_1', 1) definition_1.add_dependency('A', 'B') definition_1.add_dependency('A', 'C') definition_1.add_dependency('C', 'D') definition_1.add_recipe_input_connection('A', 'input_1', 'file_param_1') definition_1.add_dependency_input_connection('B', 'b_input_1', 'A', 'a_output_1') definition_1.add_dependency_input_connection('C', 'c_input_1', 'A', 'a_output_2') definition_1.add_dependency_input_connection('D', 'd_input_1', 'C', 'c_output_1') definition_1.add_recipe_input_connection('D', 'd_input_2', 'json_param_1') definition_2 = RecipeDefinition(interface_2) definition_2.add_job_node('A', 'job_type_1', '1.0', 1) definition_2.add_job_node('B', 'job_type_2', '2.0', 1) definition_2.add_job_node('C', 'job_type_3', '1.1', 1) # Change to job type version and revision definition_2.add_recipe_node('D', 'recipe_type_1', 1) definition_2.add_dependency('A', 'B') definition_2.add_dependency('A', 'C') definition_2.add_dependency('C', 'D') definition_2.add_recipe_input_connection('A', 'input_1', 'file_param_1') definition_2.add_dependency_input_connection('B', 'b_input_1', 'A', 'a_output_1') definition_2.add_dependency_input_connection('C', 'c_input_1', 'A', 'a_output_2') definition_2.add_dependency_input_connection('D', 'd_input_1', 'C', 'c_output_1') definition_2.add_recipe_input_connection('D', 'd_input_2', 'json_param_1') diff = RecipeDiff(definition_1, definition_2) self.assertFalse(diff.can_be_reprocessed) self.assertEqual(len(diff.reasons), 1) self.assertEqual(diff.reasons[0].name, 'INPUT_CHANGE') # Cannot be reprocessed, so no nodes to copy, supersede, or unpublish self.assertDictEqual(diff.get_nodes_to_copy(), {}) self.assertDictEqual(diff.get_nodes_to_supersede(), {}) self.assertDictEqual(diff.get_nodes_to_unpublish(), {}) # Ensure no nodes have reprocess_new_node set to true for node_diff in diff.graph.values(): self.assertFalse(node_diff.reprocess_new_node)
def test_validate(self): """Tests calling FileValue.validate()""" file_param = FileParameter('input_1', ['application/json']) json_param = JsonParameter('input_1', 'string') file_value = FileValue('input_1', [1234, 1235]) # Invalid parameter type with self.assertRaises(InvalidData) as context: file_value.validate(json_param) self.assertEqual(context.exception.error.name, 'MISMATCHED_PARAM_TYPE') # Zero files not accepted file_value = FileValue('input_1', []) with self.assertRaises(InvalidData) as context: file_value.validate(file_param) self.assertEqual(context.exception.error.name, 'NO_FILES') # Multiple files not accepted file_value = FileValue('input_1', [1234, 1235]) with self.assertRaises(InvalidData) as context: file_value.validate(file_param) self.assertEqual(context.exception.error.name, 'MULTIPLE_FILES') # Valid data value file_value = FileValue('input_1', [1234]) warnings = file_value.validate(file_param) self.assertListEqual(warnings, [])
def test_validate_connection(self): """Tests calling JsonParameter.validate_connection()""" json_param = JsonParameter('input_1', 'string') connecting_param = FileParameter('input_1', ['application/json']) # Invalid parameter type with self.assertRaises(InvalidInterfaceConnection) as context: json_param.validate_connection(connecting_param) self.assertEqual(context.exception.error.name, 'MISMATCHED_PARAM_TYPE') # Parameter is required connecting_param = JsonParameter('input_1', 'string', required=False) with self.assertRaises(InvalidInterfaceConnection) as context: json_param.validate_connection(connecting_param) self.assertEqual(context.exception.error.name, 'PARAM_REQUIRED') # Mismatched JSON type connecting_param = JsonParameter('input_1', 'integer') with self.assertRaises(InvalidInterfaceConnection) as context: json_param.validate_connection(connecting_param) self.assertEqual(context.exception.error.name, 'MISMATCHED_JSON_TYPE') # Valid parameter connection connecting_param = JsonParameter('input_1', 'string') warnings = json_param.validate_connection(connecting_param) self.assertListEqual(warnings, [])
def test_convert_recipe_definition_to_v6_json_full(self): """Tests calling convert_recipe_definition_to_v6_json() with a full definition""" interface = Interface() interface.add_parameter(FileParameter('file_param_a', ['image/gif'])) interface.add_parameter(JsonParameter('json_param_a', 'object')) interface.add_parameter( JsonParameter('json_param_b', 'object', required=False)) definition = RecipeDefinition(interface) definition.add_job_node('A', 'job_type_1', '1.0', 1) definition.add_job_node('B', 'job_type_2', '2.0', 1) definition.add_job_node('C', 'job_type_3', '1.0', 2) definition.add_recipe_node('D', 'recipe_type_1', 1) definition.add_job_node('E', 'job_type_4', '1.0', 1) definition.add_dependency('A', 'B') definition.add_dependency('A', 'C') definition.add_dependency('B', 'E') definition.add_dependency('C', 'D') definition.add_recipe_input_connection('A', 'input_1', 'file_param_a') definition.add_dependency_input_connection('B', 'b_input_1', 'A', 'a_output_1') definition.add_dependency_input_connection('C', 'c_input_1', 'A', 'a_output_2') definition.add_dependency_input_connection('D', 'd_input_1', 'C', 'c_output_1') definition.add_recipe_input_connection('D', 'd_input_2', 'json_param_a') json = convert_recipe_definition_to_v6_json(definition) RecipeDefinitionV6(definition=json.get_dict(), do_validate=True) # Revalidate self.assertSetEqual(set(json.get_dict()['nodes'].keys()), {'A', 'B', 'C', 'D', 'E'})
def test_convert_recipe_to_v6_json(self): """Tests calling convert_recipe_to_v6_json() successfully""" job_type_1 = job_test_utils.create_seed_job_type() job_type_2 = job_test_utils.create_seed_job_type() job_type_3 = job_test_utils.create_seed_job_type() job_type_4 = job_test_utils.create_seed_job_type() recipe_type_1 = recipe_test_utils.create_recipe_type_v6() interface = Interface() interface.add_parameter(FileParameter('file_param_1', ['image/gif'])) interface.add_parameter(JsonParameter('json_param_1', 'object')) df1 = DataFilter(filter_list=[{'name': 'file_param_1', 'type': 'media-type', 'condition': '==', 'values': ['image/gif']}, {'name': 'json_param_1', 'type': 'object', 'condition': 'superset of', 'values': [{}]}], all=False) definition = RecipeDefinition(interface) definition.add_job_node('A', job_type_1.name, job_type_1.version, job_type_1.revision_num) definition.add_job_node('B', job_type_2.name, job_type_2.version, job_type_2.revision_num) definition.add_job_node('C', job_type_3.name, job_type_3.version, job_type_3.revision_num) definition.add_recipe_node('D', recipe_type_1.name, recipe_type_1.revision_num) definition.add_job_node('E', job_type_4.name, job_type_4.version, job_type_4.revision_num) definition.add_condition_node('F', interface, df1) #False definition.add_job_node('G', job_type_4.name, job_type_4.version, job_type_4.revision_num) definition.add_dependency('A', 'B') definition.add_dependency('A', 'C') definition.add_dependency('B', 'E') definition.add_dependency('C', 'D') definition.add_dependency('A', 'F') definition.add_dependency('F', 'G') definition.add_recipe_input_connection('A', 'input_1', 'file_param_1') definition.add_dependency_input_connection('B', 'b_input_1', 'A', 'a_output_1') definition.add_dependency_input_connection('C', 'c_input_1', 'A', 'a_output_2') definition.add_dependency_input_connection('D', 'd_input_1', 'C', 'c_output_1') definition.add_recipe_input_connection('D', 'd_input_2', 'json_param_1') recipe = recipe_test_utils.create_recipe() job_a = job_test_utils.create_job(job_type=job_type_1, status='COMPLETED', save=False) job_b = job_test_utils.create_job(job_type=job_type_2, status='RUNNING', save=False) job_c = job_test_utils.create_job(job_type=job_type_3, status='COMPLETED', save=False) job_e = job_test_utils.create_job(job_type=job_type_4, status='PENDING', num_exes=0, save=False) Job.objects.bulk_create([job_a, job_b, job_c, job_e]) condition_f = recipe_test_utils.create_recipe_condition(is_processed=True, is_accepted=False, save=True) recipe_d = recipe_test_utils.create_recipe(recipe_type=recipe_type_1) recipe_node_a = recipe_test_utils.create_recipe_node(recipe=recipe, node_name='A', job=job_a, save=False) recipe_node_b = recipe_test_utils.create_recipe_node(recipe=recipe, node_name='B', job=job_b, save=False) recipe_node_c = recipe_test_utils.create_recipe_node(recipe=recipe, node_name='C', job=job_c, save=False) recipe_node_d = recipe_test_utils.create_recipe_node(recipe=recipe, node_name='D', sub_recipe=recipe_d, save=False) recipe_node_e = recipe_test_utils.create_recipe_node(recipe=recipe, node_name='E', job=job_e, save=False) recipe_node_f = recipe_test_utils.create_recipe_node(recipe=recipe, node_name='F', condition=condition_f, save=False) recipe_nodes = [recipe_node_a, recipe_node_b, recipe_node_c, recipe_node_d, recipe_node_e, recipe_node_f] recipe_instance = RecipeInstance(definition, recipe, recipe_nodes) json = convert_recipe_to_v6_json(recipe_instance) RecipeInstanceV6(json=json.get_dict(), do_validate=True) # Revalidate self.assertSetEqual(set(json.get_dict()['nodes'].keys()), {'A', 'B', 'C', 'D', 'E', 'F'})
def test_add_parameter(self): """Tests calling Interface.add_parameter()""" interface = Interface() file_param = FileParameter('input_1', ['application/json']) interface.add_parameter(file_param) json_param = JsonParameter('input_2', 'integer') interface.add_parameter(json_param) self.assertSetEqual(set(interface.parameters.keys()), {'input_1', 'input_2'}) # Duplicate parameter dup_param = FileParameter('input_1', [], required=False) with self.assertRaises(InvalidInterface) as context: interface.add_parameter(dup_param) self.assertEqual(context.exception.error.name, 'DUPLICATE_INPUT')
def test_convert_data_to_v1_json(self): """Tests calling convert_data_to_v1_json()""" # Try interface with nothing set data = Data() interface = Interface() json = convert_data_to_v1_json(data, interface) DataV1(data=json.get_dict()) # Revalidate # Try data with a variety of values data = Data() data.add_value(FileValue('input_a', [1234])) data.add_value(FileValue('input_b', [1235, 1236])) data.add_value(JsonValue('input_c', 'hello')) data.add_value(JsonValue('input_d', 11.9)) json = convert_data_to_v1_json(data, interface) self.assertDictEqual( json.get_dict(), { u'input_data': [{ u'name': u'input_d', u'value': 11.9 }, { u'name': u'input_b', u'file_ids': [1235, 1236] }, { u'name': u'input_c', u'value': u'hello' }, { u'name': u'input_a', u'file_id': 1234 }], u'version': u'1.0' }) DataV1(data=json.get_dict()) # Revalidate self.assertSetEqual( set(DataV6(json.get_dict()).get_data().values.keys()), {'input_a', 'input_b', 'input_c', 'input_d'}) # Try data with a single file list that should be a directory data = Data() data.add_value(FileValue('input_a', [1234])) interface = Interface() file_param = FileParameter('input_a', [], True, True) interface.add_parameter(file_param) json = convert_data_to_v1_json(data, interface) self.assertDictEqual( json.get_dict(), { u'input_data': [{ u'name': u'input_a', u'file_ids': [1234] }], u'version': u'1.0' })
def test_generate_node_input_data(self): """Tests calling RecipeDefinition.generate_node_input_data()""" input_interface = Interface() input_interface.add_parameter( FileParameter('recipe_input_1', ['image/gif'], multiple=True)) input_interface.add_parameter(JsonParameter('recipe_input_2', 'string')) definition = RecipeDefinition(input_interface) definition.add_job_node('node_a', 'job_type_1', '1.0', 1) definition.add_job_node('node_b', 'job_type_2', '1.0', 1) definition.add_job_node('node_c', 'job_type_3', '1.0', 1) definition.add_dependency('node_c', 'node_b') definition.add_dependency('node_c', 'node_a') definition.add_recipe_input_connection('node_c', 'input_1', 'recipe_input_1') definition.add_recipe_input_connection('node_c', 'input_2', 'recipe_input_2') definition.add_dependency_input_connection('node_c', 'input_3', 'node_a', 'output_a_1') definition.add_dependency_input_connection('node_c', 'input_4', 'node_a', 'output_a_2') definition.add_dependency_input_connection('node_c', 'input_5', 'node_b', 'output_b_1') recipe_data = Data() recipe_data.add_value(FileValue('recipe_input_1', [1, 2, 3, 4, 5])) recipe_data.add_value(JsonValue('recipe_input_2', 'Scale is awesome!')) a_output_data = Data() a_output_data.add_value(FileValue('output_a_1', [1234])) a_output_data.add_value(JsonValue('output_a_2', {'foo': 'bar'})) b_output_data = Data() b_output_data.add_value(JsonValue('output_b_1', 12.34)) node_outputs = { 'node_a': RecipeNodeOutput('node_a', 'job', 1, a_output_data), 'node_b': RecipeNodeOutput('node_b', 'job', 1, b_output_data) } node_data = definition.generate_node_input_data( 'node_c', recipe_data, node_outputs) self.assertSetEqual( set(node_data.values.keys()), {'input_1', 'input_2', 'input_3', 'input_4', 'input_5'}) self.assertListEqual(node_data.values['input_1'].file_ids, [1, 2, 3, 4, 5]) self.assertEqual(node_data.values['input_2'].value, 'Scale is awesome!') self.assertListEqual(node_data.values['input_3'].file_ids, [1234]) self.assertDictEqual(node_data.values['input_4'].value, {'foo': 'bar'}) self.assertEqual(node_data.values['input_5'].value, 12.34)
def test_validate(self): """Tests calling JsonParameter.validate()""" connecting_param = FileParameter('input_1', ['application/json']) # Invalid JSON type json_param = JsonParameter('input_1', 'BAD') with self.assertRaises(InvalidInterface) as context: json_param.validate() self.assertEqual(context.exception.error.name, 'INVALID_JSON_TYPE') # Valid JSON parameter json_param = JsonParameter('input_1', 'string') warnings = json_param.validate() self.assertListEqual(warnings, [])
def test_validate(self): """Tests calling DataFilter.validate()""" data_filter = DataFilter(all=False) data_filter.add_filter({'name': 'input_a', 'type': 'media-type', 'condition': '==', 'values': ['application/json']}) data_filter.add_filter({'name': 'input_b', 'type': 'string', 'condition': 'contains', 'values': ['abcde']}) data_filter.add_filter({'name': 'input_c', 'type': 'integer', 'condition': '>', 'values': ['0']}) data_filter.add_filter({'name': 'input_d', 'type': 'integer', 'condition': 'between', 'values': ['0', '100']}) data_filter.add_filter({'name': 'input_f', 'type': 'meta-data', 'condition': 'in', 'values': [['foo','baz']], 'fields': [['a','b']]}) interface = Interface() interface.add_parameter(FileParameter('input_a', ['application/json'])) warnings = data_filter.validate(interface) self.assertEqual(len(warnings), 4) self.assertEqual(warnings[0].name, 'UNMATCHED_FILTER') interface.add_parameter(JsonParameter('input_e', 'integer')) warnings = data_filter.validate(interface) self.assertEqual(len(warnings), 5) self.assertEqual(warnings[3].name, 'UNMATCHED_FILTER') self.assertEqual(warnings[4].name, 'UNMATCHED_PARAMETERS') interface.add_parameter(JsonParameter('input_b', 'integer')) with self.assertRaises(InvalidDataFilter) as context: data_filter.validate(interface) self.assertEqual(context.exception.error.name, 'MISMATCHED_TYPE') interface2 = Interface() interface2.add_parameter(FileParameter('input_a', ['application/json'])) interface2.add_parameter(JsonParameter('input_b', 'string')) interface2.add_parameter(JsonParameter('input_c', 'integer')) interface2.add_parameter(JsonParameter('input_d', 'integer')) interface2.add_parameter(FileParameter('input_f', ['integer'])) warnings = data_filter.validate(interface2) self.assertEqual(len(warnings), 0)
def test_add_parameter_from_output_interface(self): """Tests calling Interface.add_parameter_from_output_interface()""" interface = Interface() output_interface = Interface() file_param = FileParameter('input_1', ['application/json']) output_interface.add_parameter(file_param) json_param = JsonParameter('input_2', 'integer') output_interface.add_parameter(json_param) interface.add_parameter_from_output_interface('input_1', 'input_1', output_interface) self.assertSetEqual(set(interface.parameters.keys()), {'input_1'}) # Duplicate parameter with self.assertRaises(InvalidInterfaceConnection) as context: interface.add_parameter_from_output_interface('input_1', 'input_1', output_interface) self.assertEqual(context.exception.error.name, 'DUPLICATE_INPUT')
def merge_parameter_map(self, batch, dataset): """Returns the dataset parameters merged with the batch configuration input map :param batch: The batch :type batch: :class:`batch.models.Batch` :param dataset: The dataset of the batch :type dataset: :class:`data.models.DataSet` :returns: The map of datasest parameters :rtype: :class:`data.interface.Interface` """ # combine the parameters dataset_definition = dataset.get_definition() dataset_parameters = dataset_definition.global_parameters for param in dataset_definition.parameters.parameters: dataset_parameters.add_parameter( dataset_definition.parameters.parameters[param]) # map dataset param to inputs if applicable if batch.get_configuration().input_map: from data.interface.interface import Interface from data.interface.parameter import FileParameter, JsonParameter parameters = Interface() for param_name in dataset_parameters.parameters: param = dataset_parameters.parameters[param_name] for map_param in batch.get_configuration().input_map: if param_name == map_param['datasetParameter']: if param.PARAM_TYPE == 'file': parameters.add_parameter( FileParameter(map_param['input'], param.media_types, required=param.required, multiple=param.multiple)) elif param.PARAM_TYPE == 'json': parameters.add_parameter( JsonParameter(map_param['input'], param.json_type, required=param.required, multiple=param.multiple)) else: parameters.add_parameter(param) dataset_parameters = parameters return dataset_parameters
def get_interface(self): """Returns the interface represented by this JSON :returns: The interface :rtype: :class:`data.interface.interface.Interface`: """ interface = Interface() for file_dict in self._interface['files']: file_param = FileParameter(file_dict['name'], file_dict['media_types'], file_dict['required'], file_dict['multiple']) interface.add_parameter(file_param) for json_dict in self._interface['json']: json_param = JsonParameter(json_dict['name'], json_dict['type'], json_dict['required']) interface.add_parameter(json_param) return interface
def test_validate(self): """Tests calling Interface.validate()""" interface = Interface() file_param = FileParameter('input_1', ['application/json']) interface.add_parameter(file_param) json_param = JsonParameter('input_2', 'integer') interface.add_parameter(json_param) warnings = interface.validate() self.assertListEqual(warnings, []) mock_param = MagicMock() mock_param.name = 'input_3' mock_param.validate.side_effect = InvalidInterface('MOCK', '') interface.add_parameter(mock_param) # Invalid parameter with self.assertRaises(InvalidInterface) as context: interface.validate() self.assertEqual(context.exception.error.name, 'MOCK')
def test_convert_recipe_to_v6_json(self): """Tests calling convert_recipe_to_v6_json() successfully""" job_type_1 = job_test_utils.create_job_type() job_type_2 = job_test_utils.create_job_type() job_type_3 = job_test_utils.create_job_type() job_type_4 = job_test_utils.create_job_type() recipe_type_1 = recipe_test_utils.create_recipe_type() interface = Interface() interface.add_parameter(FileParameter('file_param_1', ['image/gif'])) interface.add_parameter(JsonParameter('json_param_1', 'object')) definition = RecipeDefinition(interface) definition.add_job_node('A', job_type_1.name, job_type_1.version, job_type_1.revision_num) definition.add_job_node('B', job_type_2.name, job_type_2.version, job_type_2.revision_num) definition.add_job_node('C', job_type_3.name, job_type_3.version, job_type_3.revision_num) definition.add_recipe_node('D', recipe_type_1.name, recipe_type_1.revision_num) definition.add_job_node('E', job_type_4.name, job_type_4.version, job_type_4.revision_num) definition.add_dependency('A', 'B') definition.add_dependency('A', 'C') definition.add_dependency('B', 'E') definition.add_dependency('C', 'D') definition.add_recipe_input_connection('A', 'input_1', 'file_param_1') definition.add_dependency_input_connection('B', 'b_input_1', 'A', 'a_output_1') definition.add_dependency_input_connection('C', 'c_input_1', 'A', 'a_output_2') definition.add_dependency_input_connection('D', 'd_input_1', 'C', 'c_output_1') definition.add_recipe_input_connection('D', 'd_input_2', 'json_param_1') recipe = recipe_test_utils.create_recipe() job_a = job_test_utils.create_job(job_type=job_type_1, status='COMPLETED', save=False) job_b = job_test_utils.create_job(job_type=job_type_2, status='RUNNING', save=False) job_c = job_test_utils.create_job(job_type=job_type_3, status='COMPLETED', save=False) job_e = job_test_utils.create_job(job_type=job_type_4, status='PENDING', num_exes=0, save=False) Job.objects.bulk_create([job_a, job_b, job_c, job_e]) recipe_d = recipe_test_utils.create_recipe(recipe_type=recipe_type_1) recipe_node_a = recipe_test_utils.create_recipe_node(recipe=recipe, node_name='A', job=job_a, save=False) recipe_node_b = recipe_test_utils.create_recipe_node(recipe=recipe, node_name='B', job=job_b, save=False) recipe_node_c = recipe_test_utils.create_recipe_node(recipe=recipe, node_name='C', job=job_c, save=False) recipe_node_d = recipe_test_utils.create_recipe_node( recipe=recipe, node_name='D', sub_recipe=recipe_d, save=False) recipe_node_e = recipe_test_utils.create_recipe_node(recipe=recipe, node_name='E', job=job_e, save=False) recipe_nodes = [ recipe_node_a, recipe_node_b, recipe_node_c, recipe_node_d, recipe_node_e ] recipe_instance = RecipeInstance(definition, recipe_nodes) json = convert_recipe_to_v6_json(recipe_instance) RecipeInstanceV6(json=json.get_dict(), do_validate=True) # Revalidate self.assertSetEqual(set(json.get_dict()['nodes'].keys()), {'A', 'B', 'C', 'D', 'E'})
def test_execute_with_data(self): """Tests calling ProcessRecipeInput.execute() successfully when the recipe already has data populated""" workspace = storage_test_utils.create_workspace() file_1 = storage_test_utils.create_file(workspace=workspace, file_size=10485760.0) file_2 = storage_test_utils.create_file(workspace=workspace, file_size=104857600.0) file_3 = storage_test_utils.create_file(workspace=workspace, file_size=987654321.0) recipe_interface = Interface() recipe_interface.add_parameter(FileParameter('input_a', ['text/plain'])) recipe_interface.add_parameter( FileParameter('input_b', ['text/plain'], multiple=True)) definition = RecipeDefinition(recipe_interface) definition_dict = convert_recipe_definition_to_v6_json( definition).get_dict() recipe_type = recipe_test_utils.create_recipe_type( definition=definition_dict) data = Data() data.add_value(FileValue('input_a', [file_1.id])) data.add_value(FileValue('input_b', [file_2.id, file_3.id])) data_dict = convert_data_to_v6_json(data).get_dict() recipe = recipe_test_utils.create_recipe(recipe_type=recipe_type, input=data_dict) # Create message message = ProcessRecipeInput() message.recipe_id = recipe.id # Execute message result = message.execute() self.assertTrue(result) recipe = Recipe.objects.get(id=recipe.id) # Check for update_recipes message self.assertEqual(len(message.new_messages), 1) self.assertEqual(message.new_messages[0].type, 'update_recipes') # Check recipe for expected input_file_size self.assertEqual(recipe.input_file_size, 1052.0) # Make sure recipe input file models are created recipe_input_files = RecipeInputFile.objects.filter( recipe_id=recipe.id) self.assertEqual(len(recipe_input_files), 3) for recipe_input_file in recipe_input_files: if recipe_input_file.input_file_id == file_1.id: self.assertEqual(recipe_input_file.recipe_input, 'input_a') elif recipe_input_file.input_file_id == file_2.id: self.assertEqual(recipe_input_file.recipe_input, 'input_b') elif recipe_input_file.input_file_id == file_3.id: self.assertEqual(recipe_input_file.recipe_input, 'input_b') else: self.fail('Invalid input file ID: %s' % recipe_input_file.input_file_id) # Test executing message again message_json_dict = message.to_json() message = ProcessRecipeInput.from_json(message_json_dict) result = message.execute() self.assertTrue(result) # Still should have update_recipes message self.assertEqual(len(message.new_messages), 1) self.assertEqual(message.new_messages[0].type, 'update_recipes') # Make sure recipe input file models are unchanged recipe_input_files = RecipeInputFile.objects.filter( recipe_id=recipe.id) self.assertEqual(len(recipe_input_files), 3)
def test_process_recipe_input(self): """Tests calling RecipeManager.process_recipe_input()""" date_1 = now() min_src_started_recipe_1 = date_1 - datetime.timedelta(days=200) max_src_ended_recipe_1 = date_1 + datetime.timedelta(days=200) date_2 = date_1 + datetime.timedelta(minutes=30) date_3 = date_1 + datetime.timedelta(minutes=40) date_4 = date_1 + datetime.timedelta(minutes=50) min_src_started_recipe_2 = date_1 - datetime.timedelta(days=500) max_src_ended_recipe_2 = date_1 + datetime.timedelta(days=500) s_class = 'A' s_sensor = '1' collection = '12345' task = 'abcd' workspace = storage_test_utils.create_workspace() file_1 = storage_test_utils.create_file(workspace=workspace, file_size=10485760.0, source_sensor_class=s_class, source_sensor=s_sensor, source_collection=collection, source_task=task) file_2 = storage_test_utils.create_file(workspace=workspace, file_size=104857600.0, source_started=date_2, source_ended=date_3, source_sensor_class=s_class, source_sensor=s_sensor, source_collection=collection, source_task=task) file_3 = storage_test_utils.create_file(workspace=workspace, file_size=987654321.0, source_started=min_src_started_recipe_1, source_ended=date_4) file_4 = storage_test_utils.create_file(workspace=workspace, file_size=46546.0, source_ended=max_src_ended_recipe_1) file_5 = storage_test_utils.create_file(workspace=workspace, file_size=83457.0, source_started=date_2) file_6 = storage_test_utils.create_file(workspace=workspace, file_size=42126588636633.0, source_ended=date_4) file_7 = storage_test_utils.create_file(workspace=workspace, file_size=76645464662354.0) file_8 = storage_test_utils.create_file(workspace=workspace, file_size=4654.0, source_started=min_src_started_recipe_2) file_9 = storage_test_utils.create_file(workspace=workspace, file_size=545.0, source_started=date_3, source_ended=max_src_ended_recipe_2) file_10 = storage_test_utils.create_file(workspace=workspace, file_size=0.154, source_ended=date_4, source_sensor_class=s_class, source_sensor=s_sensor, source_collection=collection, source_task=task) recipe_interface = Interface() recipe_interface.add_parameter(FileParameter('input_a', ['text/plain'])) recipe_interface.add_parameter(FileParameter('input_b', ['text/plain'], multiple=True)) definition = RecipeDefinition(recipe_interface) definition_dict = convert_recipe_definition_to_v6_json(definition).get_dict() recipe_type = recipe_test_utils.create_recipe_type_v6(definition=definition_dict) data_1 = Data() data_1.add_value(FileValue('input_a', [file_1.id])) data_1.add_value(FileValue('input_b', [file_2.id, file_3.id, file_4.id, file_5.id])) data_1_dict = convert_data_to_v6_json(data_1).get_dict() data_2 = Data() data_2.add_value(FileValue('input_a', [file_6.id])) data_2.add_value(FileValue('input_b', [file_7.id, file_8.id, file_9.id, file_10.id])) data_2_dict = convert_data_to_v6_json(data_2).get_dict() data_3 = Data() data_3_dict = convert_data_to_v6_json(data_3).get_dict() recipe_1 = recipe_test_utils.create_recipe(recipe_type=recipe_type, input=data_1_dict) recipe_2 = recipe_test_utils.create_recipe(recipe_type=recipe_type, input=data_2_dict) recipe_3 = recipe_test_utils.create_recipe(recipe_type=recipe_type, input=data_3_dict) # Execute method Recipe.objects.process_recipe_input(recipe_1) Recipe.objects.process_recipe_input(recipe_2) Recipe.objects.process_recipe_input(recipe_3) # Retrieve updated recipe models recipes = Recipe.objects.filter(id__in=[recipe_1.id, recipe_2.id, recipe_3.id]).order_by('id') recipe_1 = recipes[0] recipe_2 = recipes[1] recipe_3 = recipes[2] # Check recipes for expected fields self.assertEqual(recipe_1.input_file_size, 1053.0) self.assertEqual(recipe_1.source_started, min_src_started_recipe_1) self.assertEqual(recipe_1.source_ended, max_src_ended_recipe_1) self.assertEqual(recipe_1.source_sensor_class, s_class) self.assertEqual(recipe_1.source_sensor, s_sensor) self.assertEqual(recipe_1.source_collection, collection) self.assertEqual(recipe_1.source_task, task) self.assertEqual(recipe_2.input_file_size, 113269857.0) self.assertEqual(recipe_2.source_started, min_src_started_recipe_2) self.assertEqual(recipe_2.source_ended, max_src_ended_recipe_2) self.assertEqual(recipe_2.source_sensor_class, s_class) self.assertEqual(recipe_2.source_sensor, s_sensor) self.assertEqual(recipe_2.source_collection, collection) self.assertEqual(recipe_2.source_task, task) self.assertEqual(recipe_3.input_file_size, 0.0) self.assertIsNone(recipe_3.source_started) self.assertIsNone(recipe_3.source_ended) # Make sure recipe input file models are created recipe_input_files = RecipeInputFile.objects.filter(recipe_id=recipe_1.id) self.assertEqual(len(recipe_input_files), 5) input_files_dict = {'input_a': set(), 'input_b': set()} for recipe_input_file in recipe_input_files: input_files_dict[recipe_input_file.recipe_input].add(recipe_input_file.input_file_id) self.assertDictEqual(input_files_dict, {'input_a': {file_1.id}, 'input_b': {file_2.id, file_3.id, file_4.id, file_5.id}}) recipe_input_files = RecipeInputFile.objects.filter(recipe_id=recipe_2.id) self.assertEqual(len(recipe_input_files), 5) input_files_dict = {'input_a': set(), 'input_b': set()} for recipe_input_file in recipe_input_files: input_files_dict[recipe_input_file.recipe_input].add(recipe_input_file.input_file_id) self.assertDictEqual(input_files_dict, {'input_a': {file_6.id}, 'input_b': {file_7.id, file_8.id, file_9.id, file_10.id}}) self.assertEqual(RecipeInputFile.objects.filter(recipe_id=recipe_3.id).count(), 0)
def test_set_force_reprocess(self): """Tests calling RecipeDiff.set_force_reprocess()""" interface_1 = Interface() interface_1.add_parameter(FileParameter('file_param_1', ['image/gif'])) interface_1.add_parameter(JsonParameter('json_param_1', 'object')) interface_2 = Interface() interface_2.add_parameter(FileParameter('file_param_1', ['image/gif'])) interface_2.add_parameter(JsonParameter('json_param_1', 'object')) definition_1 = RecipeDefinition(interface_1) definition_1.add_job_node('A', 'job_type_1', '1.0', 1) definition_1.add_job_node('B', 'job_type_2', '2.0', 1) definition_1.add_job_node('C', 'job_type_3', '1.0', 2) definition_1.add_recipe_node('D', 'recipe_type_1', 1) definition_1.add_job_node('E', 'job_type_4', '1.0', 1) definition_1.add_dependency('A', 'B') definition_1.add_dependency('A', 'C') definition_1.add_dependency('C', 'D') definition_1.add_dependency('C', 'E') definition_1.add_recipe_input_connection('A', 'input_1', 'file_param_1') definition_1.add_dependency_input_connection('B', 'b_input_1', 'A', 'a_output_1') definition_1.add_dependency_input_connection('C', 'c_input_1', 'A', 'a_output_2') definition_1.add_dependency_input_connection('D', 'd_input_1', 'C', 'c_output_1') definition_1.add_recipe_input_connection('D', 'd_input_2', 'json_param_1') definition_1.add_dependency_input_connection('E', 'e_input_1', 'C', 'c_output_1') # No changes in definition 2 definition_2 = RecipeDefinition(interface_2) definition_2.add_job_node('A', 'job_type_1', '1.0', 1) definition_2.add_job_node('B', 'job_type_2', '2.0', 1) definition_2.add_job_node('C', 'job_type_3', '1.0', 2) definition_2.add_recipe_node('D', 'recipe_type_1', 1) definition_2.add_job_node('E', 'job_type_4', '1.0', 1) definition_2.add_dependency('A', 'B') definition_2.add_dependency('A', 'C') definition_2.add_dependency('C', 'D') definition_2.add_dependency('C', 'E') definition_2.add_recipe_input_connection('A', 'input_1', 'file_param_1') definition_2.add_dependency_input_connection('B', 'b_input_1', 'A', 'a_output_1') definition_2.add_dependency_input_connection('C', 'c_input_1', 'A', 'a_output_2') definition_2.add_dependency_input_connection('D', 'd_input_1', 'C', 'c_output_1') definition_2.add_recipe_input_connection('D', 'd_input_2', 'json_param_1') definition_2.add_dependency_input_connection('E', 'e_input_1', 'C', 'c_output_1') recipe_d_forced_nodes = ForcedNodes() recipe_d_forced_nodes.add_node('1') recipe_d_forced_nodes.add_node('2') top_forced_nodes = ForcedNodes() top_forced_nodes.add_node('C') top_forced_nodes.add_subrecipe('D', recipe_d_forced_nodes) diff = RecipeDiff(definition_1, definition_2) diff.set_force_reprocess(top_forced_nodes) # No recipe input changes so recipe can be reprocessed self.assertTrue(diff.can_be_reprocessed) self.assertListEqual(diff.reasons, []) # Check each node for correct fields node_a = diff.graph['A'] self.assertEqual(node_a.status, NodeDiff.UNCHANGED) self.assertFalse(node_a.reprocess_new_node) self.assertListEqual(node_a.changes, []) node_b = diff.graph['B'] self.assertEqual(node_b.status, NodeDiff.UNCHANGED) self.assertFalse(node_b.reprocess_new_node) self.assertListEqual(node_b.changes, []) node_c = diff.graph['C'] self.assertEqual(node_c.status, NodeDiff.UNCHANGED) self.assertTrue(node_c.reprocess_new_node) # Force reprocess self.assertListEqual(node_c.changes, []) node_d = diff.graph['D'] self.assertEqual(node_d.status, NodeDiff.UNCHANGED) self.assertTrue(node_d.reprocess_new_node) # Force reprocess self.assertListEqual(node_d.changes, []) # Check forced nodes object that got passed to recipe node D self.assertEqual(node_d.force_reprocess_nodes, recipe_d_forced_nodes) node_e = diff.graph['E'] self.assertEqual(node_e.status, NodeDiff.UNCHANGED) self.assertTrue( node_e.reprocess_new_node) # Force reprocess due to C being forced self.assertListEqual(node_e.changes, []) # Check nodes to copy, supersede, and unpublish self.assertSetEqual(set(diff.get_nodes_to_copy().keys()), {'A', 'B'}) self.assertSetEqual(set(diff.get_nodes_to_supersede().keys()), {'C', 'D', 'E'}) self.assertSetEqual(set(diff.get_nodes_to_unpublish().keys()), set())
def test_init_changes_in_middle_of_chains(self): """Tests creating a RecipeDiff where nodes are deleted from and inserted into the middle of a chain""" interface_1 = Interface() interface_1.add_parameter(FileParameter('file_param_1', ['image/gif'])) interface_1.add_parameter(JsonParameter('json_param_1', 'object')) interface_2 = Interface() interface_2.add_parameter(FileParameter('file_param_1', ['image/gif'])) interface_2.add_parameter(JsonParameter('json_param_1', 'object')) definition_1 = RecipeDefinition(interface_1) definition_1.add_job_node('A', 'job_type_1', '1.0', 1) definition_1.add_job_node('B', 'job_type_2', '2.0', 1) definition_1.add_job_node('C', 'job_type_3', '1.0', 2) definition_1.add_recipe_node('D', 'recipe_type_1', 1) definition_1.add_dependency('A', 'B') definition_1.add_dependency('A', 'C') definition_1.add_dependency('C', 'D') definition_1.add_recipe_input_connection('A', 'input_1', 'file_param_1') definition_1.add_dependency_input_connection('B', 'b_input_1', 'A', 'a_output_1') definition_1.add_dependency_input_connection('C', 'c_input_1', 'A', 'a_output_2') definition_1.add_dependency_input_connection('D', 'd_input_1', 'C', 'c_output_1') definition_1.add_recipe_input_connection('D', 'd_input_2', 'json_param_1') definition_2 = RecipeDefinition(interface_2) definition_2.add_job_node('A', 'job_type_1', '1.0', 1) definition_2.add_job_node('B', 'job_type_2', '2.0', 1) # Node C is deleted definition_2.add_recipe_node('D', 'recipe_type_1', 1) definition_2.add_job_node('E', 'job_type_4', '5.0', 2) # New node inbetween A and B definition_2.add_dependency('A', 'E') definition_2.add_dependency('E', 'B') definition_2.add_dependency('A', 'D') definition_2.add_recipe_input_connection('A', 'input_1', 'file_param_1') definition_2.add_dependency_input_connection('E', 'e_input_1', 'A', 'a_output_1') definition_2.add_dependency_input_connection('B', 'b_input_1', 'A', 'a_output_1') definition_2.add_dependency_input_connection('D', 'd_input_1', 'A', 'a_output_2') definition_2.add_recipe_input_connection('D', 'd_input_2', 'json_param_1') diff = RecipeDiff(definition_1, definition_2) # No recipe input changes so recipe can be reprocessed self.assertTrue(diff.can_be_reprocessed) self.assertListEqual(diff.reasons, []) # Check each node for correct fields node_a = diff.graph['A'] self.assertEqual(node_a.status, NodeDiff.UNCHANGED) self.assertFalse(node_a.reprocess_new_node) self.assertListEqual(node_a.changes, []) node_b = diff.graph['B'] self.assertEqual(node_b.status, NodeDiff.CHANGED) self.assertTrue(node_b.reprocess_new_node) self.assertEqual(len(node_b.changes), 2) self.assertEqual(node_b.changes[0].name, 'PARENT_NEW') self.assertEqual(node_b.changes[1].name, 'PARENT_REMOVED') node_c = diff.graph['C'] self.assertEqual(node_c.status, NodeDiff.DELETED) self.assertFalse(node_c.reprocess_new_node) self.assertListEqual(node_c.changes, []) node_d = diff.graph['D'] self.assertEqual(node_d.status, NodeDiff.CHANGED) self.assertTrue(node_d.reprocess_new_node) self.assertEqual(len(node_d.changes), 3) self.assertEqual(node_d.changes[0].name, 'PARENT_NEW') self.assertEqual(node_d.changes[1].name, 'PARENT_REMOVED') self.assertEqual(node_d.changes[2].name, 'INPUT_CHANGE') node_e = diff.graph['E'] self.assertEqual(node_e.status, NodeDiff.NEW) self.assertTrue(node_e.reprocess_new_node) self.assertListEqual(node_e.changes, []) # Check nodes to copy, supersede, and unpublish self.assertSetEqual(set(diff.get_nodes_to_copy().keys()), {'A'}) self.assertSetEqual(set(diff.get_nodes_to_supersede().keys()), {'B', 'C', 'D'}) self.assertSetEqual(set(diff.get_nodes_to_unpublish().keys()), {'C'})
def test_init_identical(self): """Tests creating a RecipeDiff between two identical recipe definitions""" interface_1 = Interface() interface_1.add_parameter(FileParameter('file_param_1', ['image/gif'])) interface_1.add_parameter(JsonParameter('json_param_1', 'object')) interface_2 = Interface() interface_2.add_parameter(FileParameter('file_param_1', ['image/gif'])) interface_2.add_parameter(JsonParameter('json_param_1', 'object')) definition_1 = RecipeDefinition(interface_1) definition_1.add_job_node('A', 'job_type_1', '1.0', 1) definition_1.add_job_node('B', 'job_type_2', '2.0', 1) definition_1.add_job_node('C', 'job_type_3', '1.0', 2) definition_1.add_recipe_node('D', 'recipe_type_1', 1) definition_1.add_dependency('A', 'B') definition_1.add_dependency('A', 'C') definition_1.add_dependency('C', 'D') definition_1.add_recipe_input_connection('A', 'input_1', 'file_param_1') definition_1.add_dependency_input_connection('B', 'b_input_1', 'A', 'a_output_1') definition_1.add_dependency_input_connection('C', 'c_input_1', 'A', 'a_output_2') definition_1.add_dependency_input_connection('D', 'd_input_1', 'C', 'c_output_1') definition_1.add_recipe_input_connection('D', 'd_input_2', 'json_param_1') definition_2 = RecipeDefinition(interface_2) definition_2.add_job_node('A', 'job_type_1', '1.0', 1) definition_2.add_job_node('B', 'job_type_2', '2.0', 1) definition_2.add_job_node('C', 'job_type_3', '1.0', 2) definition_2.add_recipe_node('D', 'recipe_type_1', 1) definition_2.add_dependency('A', 'B') definition_2.add_dependency('A', 'C') definition_2.add_dependency('C', 'D') definition_2.add_recipe_input_connection('A', 'input_1', 'file_param_1') definition_2.add_dependency_input_connection('B', 'b_input_1', 'A', 'a_output_1') definition_2.add_dependency_input_connection('C', 'c_input_1', 'A', 'a_output_2') definition_2.add_dependency_input_connection('D', 'd_input_1', 'C', 'c_output_1') definition_2.add_recipe_input_connection('D', 'd_input_2', 'json_param_1') diff = RecipeDiff(definition_1, definition_2) self.assertTrue(diff.can_be_reprocessed) self.assertListEqual(diff.reasons, []) # Every node should be unchanged and all should be copied during a reprocess nodes_to_copy = diff.get_nodes_to_copy() self.assertSetEqual(set(nodes_to_copy.keys()), {'A', 'B', 'C', 'D'}) for node_diff in nodes_to_copy.values(): self.assertEqual(node_diff.status, NodeDiff.UNCHANGED) self.assertFalse(node_diff.reprocess_new_node) self.assertListEqual(node_diff.changes, []) self.assertDictEqual(diff.get_nodes_to_supersede(), {}) self.assertDictEqual(diff.get_nodes_to_unpublish(), {})
def test_init_changes(self): """Tests creating a RecipeDiff when the newer definition has a variety of changes in it""" interface_1 = Interface() interface_1.add_parameter(FileParameter('file_param_1', ['image/gif'])) interface_1.add_parameter(JsonParameter('json_param_1', 'object')) interface_2 = Interface() interface_2.add_parameter(FileParameter('file_param_1', ['image/gif'])) interface_2.add_parameter(JsonParameter('json_param_1', 'object')) interface_2.add_parameter( JsonParameter('json_param_2', 'object', required=False)) definition_1 = RecipeDefinition(interface_1) definition_1.add_job_node('A', 'job_type_1', '1.0', 1) definition_1.add_job_node('B', 'job_type_2', '2.0', 1) definition_1.add_job_node('C', 'job_type_3', '1.0', 2) definition_1.add_recipe_node('D', 'recipe_type_1', 1) definition_1.add_job_node('E', 'job_type_4', '1.0', 1) definition_1.add_dependency('A', 'B') definition_1.add_dependency('A', 'C') definition_1.add_dependency('B', 'E') definition_1.add_dependency('C', 'D') definition_1.add_recipe_input_connection('A', 'input_1', 'file_param_1') definition_1.add_dependency_input_connection('B', 'b_input_1', 'A', 'a_output_1') definition_1.add_dependency_input_connection('C', 'c_input_1', 'A', 'a_output_2') definition_1.add_dependency_input_connection('D', 'd_input_1', 'C', 'c_output_1') definition_1.add_recipe_input_connection('D', 'd_input_2', 'json_param_1') definition_2 = RecipeDefinition(interface_2) # Nodes B and E are deleted definition_2.add_job_node('A', 'job_type_1', '1.0', 1) definition_2.add_job_node('C', 'job_type_3', '2.1', 1) # Change to job type version and revision definition_2.add_recipe_node('D', 'recipe_type_1', 1) definition_2.add_recipe_node('F', 'recipe_type_2', 5) # New node definition_2.add_dependency('A', 'C') definition_2.add_dependency('C', 'D') definition_2.add_dependency('D', 'F') definition_2.add_recipe_input_connection('A', 'input_1', 'file_param_1') definition_2.add_dependency_input_connection('C', 'c_input_1', 'A', 'a_output_2') definition_2.add_dependency_input_connection('D', 'd_input_1', 'C', 'c_output_1') definition_2.add_recipe_input_connection('D', 'd_input_2', 'json_param_1') definition_2.add_recipe_input_connection('F', 'f_input_1', 'json_param_2') diff = RecipeDiff(definition_1, definition_2) # Non-breaking recipe input changes so recipe can be reprocessed self.assertTrue(diff.can_be_reprocessed) self.assertListEqual(diff.reasons, []) # Check each node for correct fields node_a = diff.graph['A'] self.assertEqual(node_a.status, NodeDiff.UNCHANGED) self.assertFalse(node_a.reprocess_new_node) self.assertListEqual(node_a.changes, []) node_b = diff.graph['B'] self.assertEqual(node_b.status, NodeDiff.DELETED) self.assertFalse(node_b.reprocess_new_node) self.assertListEqual(node_b.changes, []) node_c = diff.graph['C'] self.assertEqual(node_c.status, NodeDiff.CHANGED) self.assertTrue(node_c.reprocess_new_node) self.assertEqual(len(node_c.changes), 2) self.assertEqual(node_c.changes[0].name, 'JOB_TYPE_VERSION_CHANGE') self.assertEqual(node_c.changes[1].name, 'JOB_TYPE_REVISION_CHANGE') node_d = diff.graph['D'] self.assertEqual(node_d.status, NodeDiff.CHANGED) self.assertTrue(node_d.reprocess_new_node) self.assertEqual(len(node_d.changes), 1) self.assertEqual(node_d.changes[0].name, 'PARENT_CHANGED') node_e = diff.graph['E'] self.assertEqual(node_e.status, NodeDiff.DELETED) self.assertFalse(node_e.reprocess_new_node) self.assertListEqual(node_e.changes, []) node_f = diff.graph['F'] self.assertEqual(node_f.status, NodeDiff.NEW) self.assertTrue(node_f.reprocess_new_node) self.assertListEqual(node_f.changes, []) # Check nodes to copy, supersede, and unpublish self.assertSetEqual(set(diff.get_nodes_to_copy().keys()), {'A'}) self.assertSetEqual(set(diff.get_nodes_to_supersede().keys()), {'B', 'C', 'D', 'E'}) self.assertSetEqual(set(diff.get_nodes_to_unpublish().keys()), {'B', 'E'})
def test_execute(self): """Tests calling ProcessCondition.execute() successfully""" workspace = storage_test_utils.create_workspace() file_1 = storage_test_utils.create_file(workspace=workspace, file_size=104857600.0) file_2 = storage_test_utils.create_file(workspace=workspace, file_size=987654321.0) file_3 = storage_test_utils.create_file(workspace=workspace, file_size=65456.0) file_4 = storage_test_utils.create_file(workspace=workspace, file_size=24564165456.0) manifest_1 = { 'seedVersion': '1.0.0', 'job': { 'name': 'job-a', 'jobVersion': '1.0.0', 'packageVersion': '1.0.0', 'title': '', 'description': '', 'maintainer': { 'name': 'John Doe', 'email': '*****@*****.**' }, 'timeout': 10, 'interface': { 'command': '', 'inputs': { 'files': [], 'json': [] }, 'outputs': { 'files': [{ 'name': 'OUTPUT_A', 'pattern': '*.png', 'multiple': True }] } } } } job_type_1 = job_test_utils.create_job_type(interface=manifest_1) manifest_2 = { 'seedVersion': '1.0.0', 'job': { 'name': 'job-b', 'jobVersion': '1.0.0', 'packageVersion': '1.0.0', 'title': '', 'description': '', 'maintainer': { 'name': 'John Doe', 'email': '*****@*****.**' }, 'timeout': 10, 'interface': { 'command': '', 'inputs': { 'files': [] }, 'outputs': { 'files': [{ 'name': 'OUTPUT_B', 'pattern': '*.png', 'multiple': True }] } } } } job_type_2 = job_test_utils.create_job_type(interface=manifest_2) output_1_dict = { 'version': '1.0', 'output_data': [{ 'name': 'OUTPUT_A', 'file_ids': [file_1.id, file_2.id] }] } output_2_dict = { 'version': '1.0', 'output_data': [{ 'name': 'OUTPUT_B', 'file_ids': [file_3.id, file_4.id] }] } cond_interface = Interface() cond_interface.add_parameter( FileParameter('INPUT_C_1', [], multiple=True)) cond_interface.add_parameter( FileParameter('INPUT_C_2', [], multiple=True)) definition = RecipeDefinition(Interface()) definition.add_job_node('node_a', job_type_1.name, job_type_1.version, job_type_1.revision_num) definition.add_job_node('node_b', job_type_2.name, job_type_2.version, job_type_2.revision_num) # TODO: once DataFilter is implemented, create a DataFilter object here that accepts the inputs definition.add_condition_node('node_c', cond_interface, DataFilter(True)) definition.add_dependency('node_a', 'node_c') definition.add_dependency('node_b', 'node_c') definition.add_dependency_input_connection('node_c', 'INPUT_C_1', 'node_a', 'OUTPUT_A') definition.add_dependency_input_connection('node_c', 'INPUT_C_2', 'node_b', 'OUTPUT_B') def_dict = convert_recipe_definition_to_v6_json(definition).get_dict() recipe_type = recipe_test_utils.create_recipe_type(definition=def_dict) recipe_data_dict = { 'version': '1.0', 'input_data': [], 'workspace_id': workspace.id } recipe = recipe_test_utils.create_recipe(recipe_type=recipe_type, input=recipe_data_dict) job_1 = job_test_utils.create_job(job_type=job_type_1, num_exes=1, status='COMPLETED', output=output_1_dict, recipe=recipe) job_2 = job_test_utils.create_job(job_type=job_type_2, num_exes=1, status='COMPLETED', output=output_2_dict, recipe=recipe) condition = recipe_test_utils.create_recipe_condition(recipe=recipe, save=True) node_a = recipe_test_utils.create_recipe_node(recipe=recipe, node_name='node_a', job=job_1, save=False) node_b = recipe_test_utils.create_recipe_node(recipe=recipe, node_name='node_b', job=job_2, save=False) node_c = recipe_test_utils.create_recipe_node(recipe=recipe, node_name='node_c', condition=condition, save=False) RecipeNode.objects.bulk_create([node_a, node_b, node_c]) # Create message message = create_process_condition_messages([condition.id])[0] # Execute message result = message.execute() self.assertTrue(result) condition = RecipeCondition.objects.get(id=condition.id) # Check for update_recipe message self.assertEqual(len(message.new_messages), 1) self.assertEqual(message.new_messages[0].type, 'update_recipe') self.assertEqual(message.new_messages[0].root_recipe_id, recipe.id) # Check condition flags self.assertTrue(condition.is_processed) self.assertIsNotNone(condition.processed) self.assertTrue(condition.is_accepted) # Check condition for expected data self.assertSetEqual(set(condition.get_data().values.keys()), {'INPUT_C_1', 'INPUT_C_2'}) self.assertListEqual(condition.get_data().values['INPUT_C_1'].file_ids, [file_1.id, file_2.id]) self.assertListEqual(condition.get_data().values['INPUT_C_2'].file_ids, [file_3.id, file_4.id]) # Test executing message again message_json_dict = message.to_json() message = ProcessCondition.from_json(message_json_dict) result = message.execute() self.assertTrue(result) # Still should have update_recipe message self.assertEqual(len(message.new_messages), 1) self.assertEqual(message.new_messages[0].type, 'update_recipe') self.assertEqual(message.new_messages[0].root_recipe_id, recipe.id)
def test_execute_with_recipe_legacy(self): """Tests calling ProcessRecipeInput.execute() successfully when a legacy sub-recipe has to get its data from its recipe """ workspace = storage_test_utils.create_workspace() file_1 = storage_test_utils.create_file(workspace=workspace, file_size=104857600.0) file_2 = storage_test_utils.create_file(workspace=workspace, file_size=987654321.0) file_3 = storage_test_utils.create_file(workspace=workspace, file_size=65456.0) file_4 = storage_test_utils.create_file(workspace=workspace, file_size=24564165456.0) manifest_a = { 'seedVersion': '1.0.0', 'job': { 'name': 'job-a', 'jobVersion': '1.0.0', 'packageVersion': '1.0.0', 'title': '', 'description': '', 'maintainer': { 'name': 'John Doe', 'email': '*****@*****.**' }, 'timeout': 10, 'interface': { 'command': '', 'inputs': { 'files': [], 'json': [] }, 'outputs': { 'files': [{ 'name': 'output_a', 'pattern': '*.png' }] } } } } job_type_a = job_test_utils.create_job_type(interface=manifest_a) output_data_a = Data() output_data_a.add_value(FileValue('output_a', [file_1.id])) output_data_a_dict = convert_data_to_v6_json(output_data_a).get_dict() manifest_b = { 'seedVersion': '1.0.0', 'job': { 'name': 'job-b', 'jobVersion': '1.0.0', 'packageVersion': '1.0.0', 'title': '', 'description': '', 'maintainer': { 'name': 'John Doe', 'email': '*****@*****.**' }, 'timeout': 10, 'interface': { 'command': '', 'inputs': { 'files': [], 'json': [] }, 'outputs': { 'files': [{ 'name': 'output_b', 'pattern': '*.png', 'multiple': True }] } } } } job_type_b = job_test_utils.create_job_type(interface=manifest_b) output_data_b = Data() output_data_b.add_value( FileValue('output_b', [file_2.id, file_3.id, file_4.id])) output_data_b_dict = convert_data_to_v6_json(output_data_b).get_dict() job_a = job_test_utils.create_job(job_type=job_type_a, num_exes=1, status='COMPLETED', output=output_data_a_dict) job_b = job_test_utils.create_job(job_type=job_type_b, num_exes=1, status='COMPLETED', output=output_data_b_dict) sub_recipe_interface_c = Interface() sub_recipe_interface_c.add_parameter( FileParameter('input_a', ['image/png'])) sub_recipe_interface_c.add_parameter( FileParameter('input_b', ['image/png'], multiple=True)) sub_recipe_def_c = RecipeDefinition(sub_recipe_interface_c) sub_recipe_def_dict_c = convert_recipe_definition_to_v1_json( sub_recipe_def_c).get_dict() sub_recipe_type_c = recipe_test_utils.create_recipe_type( definition=sub_recipe_def_dict_c) sub_recipe_c = recipe_test_utils.create_recipe( recipe_type=sub_recipe_type_c) definition = RecipeDefinition(Interface()) definition.add_job_node('node_a', job_type_a.name, job_type_a.version, job_type_a.revision_num) definition.add_job_node('node_b', job_type_b.name, job_type_b.version, job_type_b.revision_num) definition.add_recipe_node('node_c', sub_recipe_type_c.name, sub_recipe_type_c.revision_num) definition.add_dependency('node_c', 'node_a') definition.add_dependency_input_connection('node_c', 'input_a', 'node_a', 'output_a') definition.add_dependency('node_c', 'node_b') definition.add_dependency_input_connection('node_c', 'input_b', 'node_b', 'output_b') def_dict = convert_recipe_definition_to_v6_json(definition).get_dict() recipe_type = recipe_test_utils.create_recipe_type(definition=def_dict) recipe_data_dict = { 'version': '1.0', 'input_data': [], 'workspace_id': workspace.id } recipe = recipe_test_utils.create_recipe(recipe_type=recipe_type, input=recipe_data_dict) recipe_node_a = recipe_test_utils.create_recipe_node( recipe=recipe, node_name='node_a', job=job_a) recipe_node_b = recipe_test_utils.create_recipe_node( recipe=recipe, node_name='node_b', job=job_b) recipe_node_c = recipe_test_utils.create_recipe_node( recipe=recipe, node_name='node_c', sub_recipe=sub_recipe_c) RecipeNode.objects.bulk_create( [recipe_node_a, recipe_node_b, recipe_node_c]) job_a.recipe = recipe job_a.save() job_b.recipe = recipe job_b.save() sub_recipe_c.recipe = recipe sub_recipe_c.save() # Create message message = ProcessRecipeInput() message.recipe_id = sub_recipe_c.id # Execute message result = message.execute() self.assertTrue(result) sub_recipe_c = Recipe.objects.get(id=sub_recipe_c.id) # Check for update_recipes message self.assertEqual(len(message.new_messages), 1) self.assertEqual(message.new_messages[0].type, 'update_recipes') # Check sub-recipe for expected input_file_size self.assertEqual(sub_recipe_c.input_file_size, 24469.0) # Check sub-recipe for expected input data self.assertEqual( sub_recipe_c.input['version'], '1.0') # Should be legacy input data with workspace ID self.assertEqual(sub_recipe_c.input['workspace_id'], workspace.id) self.assertSetEqual(set(sub_recipe_c.get_input_data().values.keys()), {'input_a', 'input_b'}) self.assertListEqual( sub_recipe_c.get_input_data().values['input_a'].file_ids, [file_1.id]) self.assertListEqual( sub_recipe_c.get_input_data().values['input_b'].file_ids, [file_2.id, file_3.id, file_4.id]) # Make sure sub-recipe input file models are created input_files = RecipeInputFile.objects.filter(recipe_id=sub_recipe_c.id) self.assertEqual(len(input_files), 4) file_ids = {input_file.input_file_id for input_file in input_files} self.assertSetEqual(file_ids, {file_1.id, file_2.id, file_3.id, file_4.id}) # Test executing message again message_json_dict = message.to_json() message = ProcessRecipeInput.from_json(message_json_dict) result = message.execute() self.assertTrue(result) # Still should have update_recipes message self.assertEqual(len(message.new_messages), 1) self.assertEqual(message.new_messages[0].type, 'update_recipes') # Make sure recipe input file models are unchanged input_files = RecipeInputFile.objects.filter(recipe_id=sub_recipe_c.id) self.assertEqual(len(input_files), 4)
def test_condition_hit(self): """Tests calling Recipe.has_completed() when an entire recipe has completed""" """ Job -> Condition -> Recipe parse-job -> condition-node -> recipe-node """ manifest_1 = { 'seedVersion': '1.0.0', 'job': { 'name': 'parse-job', 'jobVersion': '1.0.0', 'packageVersion': '1.0.0', 'title': 'Test Parse Job', 'description': 'Test Parse job', 'maintainer': { 'name': 'John Doe', 'email': '*****@*****.**' }, 'timeout': 10, 'interface': { 'command': '', 'inputs': { 'files': [{ 'name': 'INPUT_FILE', 'mediaTypes': ['image/x-hdf5-image'], 'required': True }], 'json': [] }, 'outputs': { 'files': [{ 'name': 'OUTPUT_A', 'pattern': '*.png', 'multiple': True }] } } } } job_type_1 = job_test_utils.create_seed_job_type(manifest=manifest_1) input_interface = Interface() input_interface.add_parameter( FileParameter('INPUT_FILE', ['image/x-hdf5-image'], multiple=False)) definition = RecipeDefinition(input_interface) definition.add_job_node('parse-job', job_type_1.name, job_type_1.version, job_type_1.revision_num) cond_interface = Interface() cond_interface.add_parameter(FileParameter('INPUT_FILE', ['image/png'])) df = DataFilter(filter_list=[{ 'name': 'cond', 'type': 'media-type', 'condition': '==', 'value': ['image/png'] }]) definition.add_condition_node('condition-node', cond_interface, df) sub_job_manifest = { 'seedVersion': '1.0.0', 'job': { 'name': 'recipe-job', 'jobVersion': '1.0.0', 'packageVersion': '1.0.0', 'title': 'Test Recipe Job', 'description': 'Test Recipe job', 'maintainer': { 'name': 'John Doe', 'email': '*****@*****.**' }, 'timeout': 10, 'interface': { 'command': '', 'inputs': { 'files': [{ 'name': 'INPUT_FILE', 'mediaTypes': ['image/png'], 'required': True }], 'json': [] }, 'outputs': { 'files': [{ 'name': 'OUTPUT_A', 'pattern': '*.png', 'multiple': True }] } } } } sub_job = job_test_utils.create_seed_job_type( manifest=sub_job_manifest) sub_interface = Interface() sub_interface.add_parameter(FileParameter('INPUT_FILE', ['image/png'])) definition_b = RecipeDefinition(sub_interface) definition_b.add_job_node('job_b', sub_job.name, sub_job.version, sub_job.revision_num) definition_b.add_recipe_input_connection('job_b', 'INPUT_FILE', 'INPUT_FILE') definition_b_dict = convert_recipe_definition_to_v6_json( definition_b).get_dict() sub_recipe_type = recipe_test_utils.create_recipe_type_v6( definition=definition_b_dict) definition.add_recipe_node('recipe-node', sub_recipe_type.name, sub_recipe_type.revision_num) # Connect the recipe input to the parse job definition.add_recipe_input_connection('parse-job', 'INPUT_FILE', 'INPUT_FILE') # Connect the condition node to the parse job output definition.add_dependency_input_connection('condition-node', 'cond', 'parse-job', 'OUTPUT_A') # Connect the sub recipe to the condition output definition.add_dependency_input_connection('recipe-node', 'INPUT_FILE', 'condition-node', 'cond')
def test_validate(self): """Tests calling JsonValue.validate()""" file_param = FileParameter('input_1', ['application/json']) json_param = JsonParameter('input_1', 'string') json_value = JsonValue('input_1', 'hello') # Invalid parameter type with self.assertRaises(InvalidData) as context: json_value.validate(file_param) self.assertEqual(context.exception.error.name, 'MISMATCHED_PARAM_TYPE') # Invalid array json_param = JsonParameter('input_1', 'array') json_value = JsonValue('input_1', 123) with self.assertRaises(InvalidData) as context: json_value.validate(json_param) self.assertEqual(context.exception.error.name, 'INVALID_JSON_TYPE') # Invalid boolean json_param = JsonParameter('input_1', 'boolean') json_value = JsonValue('input_1', 123) with self.assertRaises(InvalidData) as context: json_value.validate(json_param) self.assertEqual(context.exception.error.name, 'INVALID_JSON_TYPE') # Invalid integer json_param = JsonParameter('input_1', 'integer') json_value = JsonValue('input_1', 123.5) with self.assertRaises(InvalidData) as context: json_value.validate(json_param) self.assertEqual(context.exception.error.name, 'INVALID_JSON_TYPE') # Invalid number json_param = JsonParameter('input_1', 'number') json_value = JsonValue('input_1', 'foo') with self.assertRaises(InvalidData) as context: json_value.validate(json_param) self.assertEqual(context.exception.error.name, 'INVALID_JSON_TYPE') # Invalid object json_param = JsonParameter('input_1', 'object') json_value = JsonValue('input_1', 123) with self.assertRaises(InvalidData) as context: json_value.validate(json_param) self.assertEqual(context.exception.error.name, 'INVALID_JSON_TYPE') # Invalid string json_param = JsonParameter('input_1', 'string') json_value = JsonValue('input_1', 123) with self.assertRaises(InvalidData) as context: json_value.validate(json_param) self.assertEqual(context.exception.error.name, 'INVALID_JSON_TYPE') # Valid array value json_param = JsonParameter('input_1', 'array') json_value = JsonValue('input_1', [1, 2, 3]) warnings = json_value.validate(json_param) self.assertListEqual(warnings, []) # Valid boolean value json_param = JsonParameter('input_1', 'boolean') json_value = JsonValue('input_1', True) warnings = json_value.validate(json_param) self.assertListEqual(warnings, []) # Valid integer value json_param = JsonParameter('input_1', 'integer') json_value = JsonValue('input_1', 1234) warnings = json_value.validate(json_param) self.assertListEqual(warnings, []) # Valid number value json_param = JsonParameter('input_1', 'number') json_value = JsonValue('input_1', 1234.5) warnings = json_value.validate(json_param) self.assertListEqual(warnings, []) # Valid object value json_param = JsonParameter('input_1', 'object') json_value = JsonValue('input_1', {'foo': 'bar'}) warnings = json_value.validate(json_param) self.assertListEqual(warnings, []) # Valid string value json_param = JsonParameter('input_1', 'string') json_value = JsonValue('input_1', 'hello') warnings = json_value.validate(json_param) self.assertListEqual(warnings, [])