def test_get_inputs_for_op_group(self): """ Tests method get_datatypes_inputs_for_operation_group. The DataType inputs will be from a DataType group. """ project, dt_group_id, first_dt, second_dt = self._create_datatype_group( ) first_dt.visible = False dao.store_entity(first_dt) second_dt.visible = False dao.store_entity(second_dt) op_group = model.OperationGroup(project.id, "group", "range1[1..2]") op_group = dao.store_entity(op_group) params_1 = json.dumps({ "param_5": "1", "param_1": first_dt.gid, "param_6": "2" }) params_2 = json.dumps({ "param_5": "1", "param_4": second_dt.gid, "param_6": "5" }) algo = dao.get_algorithm_by_module( 'tvb.tests.framework.adapters.testadapter3', 'TestAdapter3') op1 = model.Operation(self.test_user.id, project.id, algo.id, params_1, op_group_id=op_group.id) op2 = model.Operation(self.test_user.id, project.id, algo.id, params_2, op_group_id=op_group.id) dao.store_entities([op1, op2]) inputs = self.project_service.get_datatypes_inputs_for_operation_group( op_group.id, self.relevant_filter) self.assertEqual(len(inputs), 0) inputs = self.project_service.get_datatypes_inputs_for_operation_group( op_group.id, self.full_filter) self.assertEqual(len(inputs), 1, "Incorrect number of dataTypes.") self.assertFalse(first_dt.id == inputs[0].id, "Retrieved wrong dataType.") self.assertFalse(second_dt.id == inputs[0].id, "Retrieved wrong dataType.") self.assertTrue(dt_group_id == inputs[0].id, "Retrieved wrong dataType.") first_dt.visible = True dao.store_entity(first_dt) inputs = self.project_service.get_datatypes_inputs_for_operation_group( op_group.id, self.relevant_filter) self.assertEqual(len(inputs), 1, "Incorrect number of dataTypes.") self.assertFalse(first_dt.id == inputs[0].id, "Retrieved wrong dataType.") self.assertFalse(second_dt.id == inputs[0].id, "Retrieved wrong dataType.") self.assertTrue(dt_group_id == inputs[0].id, "Retrieved wrong dataType.") inputs = self.project_service.get_datatypes_inputs_for_operation_group( op_group.id, self.full_filter) self.assertEqual(len(inputs), 1, "Incorrect number of dataTypes.") self.assertFalse(first_dt.id == inputs[0].id, "Retrieved wrong dataType.") self.assertFalse(second_dt.id == inputs[0].id, "Retrieved wrong dataType.") self.assertTrue(dt_group_id == inputs[0].id, "Retrieved wrong dataType.")
def test_get_inputs_for_operation(self): """ Tests method get_datatype_and_datatypegroup_inputs_for_operation. Verifies filters' influence over results is as expected """ algo = dao.get_algorithm_by_module( 'tvb.tests.framework.adapters.testadapter3', 'TestAdapter3') array_wrappers = self._create_mapped_arrays(self.test_project.id) ids = [] for datatype in array_wrappers: ids.append(datatype[0]) datatype = dao.get_datatype_by_id(ids[0]) datatype.visible = False dao.store_entity(datatype) parameters = json.dumps({ "param_5": "1", "param_1": array_wrappers[0][2], "param_2": array_wrappers[1][2], "param_3": array_wrappers[2][2], "param_6": "0" }) operation = model.Operation(self.test_user.id, self.test_project.id, algo.id, parameters) operation = dao.store_entity(operation) inputs = self.project_service.get_datatype_and_datatypegroup_inputs_for_operation( operation.gid, self.relevant_filter) self.assertEqual(len(inputs), 2) self.assertTrue(ids[1] in [inputs[0].id, inputs[1].id], "Retrieved wrong dataType.") self.assertTrue(ids[2] in [inputs[0].id, inputs[1].id], "Retrieved wrong dataType.") self.assertFalse(ids[0] in [inputs[0].id, inputs[1].id], "Retrieved wrong dataType.") inputs = self.project_service.get_datatype_and_datatypegroup_inputs_for_operation( operation.gid, self.full_filter) self.assertEqual(len(inputs), 3, "Incorrect number of operations.") self.assertTrue(ids[0] in [inputs[0].id, inputs[1].id, inputs[2].id], "Retrieved wrong dataType.") self.assertTrue(ids[1] in [inputs[0].id, inputs[1].id, inputs[2].id], "Retrieved wrong dataType.") self.assertTrue(ids[2] in [inputs[0].id, inputs[1].id, inputs[2].id], "Retrieved wrong dataType.") project, dt_group_id, first_dt, _ = self._create_datatype_group() first_dt.visible = False dao.store_entity(first_dt) parameters = json.dumps({"other_param": "_", "param_1": first_dt.gid}) operation = model.Operation(self.test_user.id, project.id, algo.id, parameters) operation = dao.store_entity(operation) inputs = self.project_service.get_datatype_and_datatypegroup_inputs_for_operation( operation.gid, self.relevant_filter) self.assertEqual(len(inputs), 0, "Incorrect number of dataTypes.") inputs = self.project_service.get_datatype_and_datatypegroup_inputs_for_operation( operation.gid, self.full_filter) self.assertEqual(len(inputs), 1, "Incorrect number of dataTypes.") self.assertEqual(inputs[0].id, dt_group_id, "Wrong dataType.") self.assertTrue(inputs[0].id != first_dt.id, "Wrong dataType.")
def test_reduce_dimension_component(self): """ This method tests if the data passed to the launch method of the NDimensionArrayAdapter adapter is correct. The passed data should be a list of arrays with one dimension. """ inserted_data = FlowService().get_available_datatypes(self.test_project.id, "tvb.datatypes.arrays.MappedArray") self.assertEqual(len(inserted_data), 0, "Expected to find no data.") #create an operation algorithm_id = FlowService().get_algorithm_by_module_and_class('tvb_test.adapters.ndimensionarrayadapter', 'NDimensionArrayAdapter')[0].id operation = model.Operation(self.test_user.id, self.test_project.id, algorithm_id, 'test params', meta=json.dumps({DataTypeMetaData.KEY_STATE: "RAW"}), status="FINISHED", method_name=ABCAdapter.LAUNCH_METHOD) operation = dao.store_entity(operation) #save the array wrapper in DB adapter_instance = NDimensionArrayAdapter() PARAMS = {} self.operation_service.initiate_prelaunch(operation, adapter_instance, {}, **PARAMS) inserted_data = FlowService().get_available_datatypes(self.test_project.id, "tvb.datatypes.arrays.MappedArray") self.assertEqual(len(inserted_data), 1, "Problems when inserting data") gid = inserted_data[0][2] entity = dao.get_datatype_by_gid(gid) #from the 3D array do not select any array PARAMS = {"python_method": "reduce_dimension", "input_data": gid, "input_data_dimensions_0": "requiredDim_1", "input_data_dimensions_1": "", "input_data_dimensions_2": ""} try: self.operation_service.initiate_prelaunch(operation, adapter_instance, {}, **PARAMS) self.fail("Test should not pass. The resulted array should be a 1D array.") except Exception: # OK, do nothing; we were expecting to produce a 1D array pass #from the 3D array select only a 1D array first_dim = [gid + '_1_0', 'requiredDim_1'] PARAMS = {"python_method": "reduce_dimension", "input_data": gid, "input_data_dimensions_0": first_dim, "input_data_dimensions_1": gid + "_2_1"} self.operation_service.initiate_prelaunch(operation, adapter_instance, {}, **PARAMS) expected_result = entity.array_data[:, 0, 1] actual_result = adapter_instance.launch_param self.assertEqual(len(actual_result), len(expected_result), "Not the same size for results!") self.assertTrue(numpy.equal(actual_result, expected_result).all()) #from the 3D array select a 2D array first_dim = [gid + '_1_0', gid + '_1_1', 'requiredDim_2'] PARAMS = {"python_method": "reduce_dimension", "input_data": gid, "input_data_dimensions_0": first_dim, "input_data_dimensions_1": gid + "_2_1"} self.operation_service.initiate_prelaunch(operation, adapter_instance, {}, **PARAMS) expected_result = entity.array_data[slice(0, None), [0, 1], 1] actual_result = adapter_instance.launch_param self.assertEqual(len(actual_result), len(expected_result), "Not the same size for results!") self.assertTrue(numpy.equal(actual_result, expected_result).all()) #from 3D array select 1D array by applying SUM function on the first #dimension and average function on the second dimension PARAMS = {"python_method": "reduce_dimension", "input_data": gid, "input_data_dimensions_0": ["requiredDim_1", "func_sum"], "input_data_dimensions_1": "func_average", "input_data_dimensions_2": ""} self.operation_service.initiate_prelaunch(operation, adapter_instance, {}, **PARAMS) aux = numpy.sum(entity.array_data, axis=0) expected_result = numpy.average(aux, axis=0) actual_result = adapter_instance.launch_param self.assertEqual(len(actual_result), len(expected_result), "Not the same size of results!") self.assertTrue(numpy.equal(actual_result, expected_result).all()) #from 3D array select a 2D array and apply op. on the second dimension PARAMS = {"python_method": "reduce_dimension", "input_data": gid, "input_data_dimensions_0": ["requiredDim_2", "func_sum", "expected_shape_x,512", "operations_x,>"], "input_data_dimensions_1": "", "input_data_dimensions_2": ""} try: self.operation_service.initiate_prelaunch(operation, adapter_instance, {}, **PARAMS) self.fail("Test should not pass! The second dimension of the array should be >512.") except Exception: # OK, do nothing; pass
def create_datatype_group( self, subject=USER_FULL_NAME, state=DATATYPE_STATE, ): """ This method creates, stores and returns a DataTypeGroup entity. """ group = model.OperationGroup( self.project.id, ranges=[json.dumps(self.RANGE_1), json.dumps(self.RANGE_2)]) group = dao.store_entity(group) group_ms = model.OperationGroup( self.project.id, ranges=[json.dumps(self.RANGE_1), json.dumps(self.RANGE_2)]) group_ms = dao.store_entity(group_ms) datatype_group = model.DataTypeGroup(group, subject=subject, state=state, operation_id=self.operation.id) # Set storage path, before setting data datatype_group.storage_path = self.files_helper.get_project_folder( self.project, str(self.operation.id)) datatype_group = dao.store_entity(datatype_group) dt_group_ms = model.DataTypeGroup(group_ms, subject=subject, state=state, operation_id=self.operation.id) # Set storage path, before setting data dt_group_ms.storage_path = self.files_helper.get_project_folder( self.project, str(self.operation.id)) dao.store_entity(dt_group_ms) # Now create some data types and add them to group for range_val1 in self.RANGE_1[1]: for range_val2 in self.RANGE_2[1]: operation = model.Operation(self.user.id, self.project.id, self.algorithm.id, 'test parameters', meta=json.dumps(self.meta), status=model.STATUS_FINISHED, range_values=json.dumps({ self.RANGE_1[0]: range_val1, self.RANGE_2[0]: range_val2 })) operation.fk_operation_group = group.id operation = dao.store_entity(operation) datatype = self.create_datatype_with_storage( operation_id=operation.id) datatype.number1 = range_val1 datatype.number2 = range_val2 datatype.fk_datatype_group = datatype_group.id datatype.set_operation_id(operation.id) dao.store_entity(datatype) op_ms = model.Operation(self.user.id, self.project.id, self.algorithm.id, 'test parameters', meta=json.dumps(self.meta), status=model.STATUS_FINISHED, range_values=json.dumps({ self.RANGE_1[0]: range_val1, self.RANGE_2[0]: range_val2 })) op_ms.fk_operation_group = group_ms.id op_ms = dao.store_entity(op_ms) self.create_datatype_measure( datatype, op_ms, FilesHelper().get_project_folder(self.project, str(op_ms.id))) return datatype_group
def test_get_inputs_for_op_group_simple_inputs(self): """ Tests method get_datatypes_inputs_for_operation_group. The dataType inputs will not be part of a dataType group. """ #it's a list of 3 elem. array_wrappers = self._create_mapped_arrays(self.test_project.id) array_wrapper_ids = [] for datatype in array_wrappers: array_wrapper_ids.append(datatype[0]) datatype = dao.get_datatype_by_id(array_wrapper_ids[0]) datatype.visible = False dao.store_entity(datatype) op_group = model.OperationGroup(self.test_project.id, "group", "range1[1..2]") op_group = dao.store_entity(op_group) params_1 = json.dumps({ "param_5": "2", "param_1": array_wrappers[0][2], "param_2": array_wrappers[1][2], "param_6": "7" }) params_2 = json.dumps({ "param_5": "5", "param_3": array_wrappers[2][2], "param_2": array_wrappers[1][2], "param_6": "6" }) algo_group = dao.find_group( 'tvb.tests.framework.adapters.testadapter3', 'TestAdapter3') algo = dao.get_algorithm_by_group(algo_group.id) op1 = model.Operation(self.test_user.id, self.test_project.id, algo.id, params_1, op_group_id=op_group.id) op2 = model.Operation(self.test_user.id, self.test_project.id, algo.id, params_2, op_group_id=op_group.id) dao.store_entities([op1, op2]) inputs = self.project_service.get_datatypes_inputs_for_operation_group( op_group.id, self.relevant_filter) self.assertEqual(len(inputs), 2) self.assertFalse(array_wrapper_ids[0] in [inputs[0].id, inputs[1].id], "Retrieved wrong dataType.") self.assertTrue(array_wrapper_ids[1] in [inputs[0].id, inputs[1].id], "Retrieved wrong dataType.") self.assertTrue(array_wrapper_ids[2] in [inputs[0].id, inputs[1].id], "Retrieved wrong dataType.") inputs = self.project_service.get_datatypes_inputs_for_operation_group( op_group.id, self.full_filter) self.assertEqual(len(inputs), 3, "Incorrect number of dataTypes.") self.assertTrue( array_wrapper_ids[0] in [inputs[0].id, inputs[1].id, inputs[2].id]) self.assertTrue( array_wrapper_ids[1] in [inputs[0].id, inputs[1].id, inputs[2].id]) self.assertTrue( array_wrapper_ids[2] in [inputs[0].id, inputs[1].id, inputs[2].id])
def test_get_filtered_datatypes(self): """ Test the filter function when retrieving dataTypes. """ #Create some test operations start_dates = [ datetime.now(), datetime.strptime("08-06-2010", "%m-%d-%Y"), datetime.strptime("07-21-2010", "%m-%d-%Y"), datetime.strptime("05-06-2010", "%m-%d-%Y"), datetime.strptime("07-21-2011", "%m-%d-%Y") ] end_dates = [ datetime.now(), datetime.strptime("08-12-2010", "%m-%d-%Y"), datetime.strptime("08-12-2010", "%m-%d-%Y"), datetime.strptime("08-12-2011", "%m-%d-%Y"), datetime.strptime("08-12-2011", "%m-%d-%Y") ] for i in range(5): operation = model.Operation(self.test_user.id, self.test_project.id, self.algorithm1.id, 'test params', status=model.STATUS_FINISHED, start_date=start_dates[i], completion_date=end_dates[i]) operation = dao.store_entity(operation) storage_path = FilesHelper().get_project_folder( self.test_project, str(operation.id)) if i < 4: datatype_inst = Datatype1() datatype_inst.type = "Datatype1" datatype_inst.subject = "John Doe" + str(i) datatype_inst.state = "RAW" datatype_inst.set_operation_id(operation.id) dao.store_entity(datatype_inst) else: for _ in range(2): datatype_inst = Datatype2() datatype_inst.storage_path = storage_path datatype_inst.type = "Datatype2" datatype_inst.subject = "John Doe" + str(i) datatype_inst.state = "RAW" datatype_inst.string_data = ["data"] datatype_inst.set_operation_id(operation.id) dao.store_entity(datatype_inst) returned_data = self.flow_service.get_available_datatypes( self.test_project.id, Datatype1)[0] for row in returned_data: if row[1] != 'Datatype1': self.fail("Some invalid data was returned!") self.assertEqual(4, len(returned_data), "Invalid length of result") filter_op = FilterChain( fields=[ FilterChain.datatype + ".state", FilterChain.operation + ".start_date" ], values=["RAW", datetime.strptime("08-01-2010", "%m-%d-%Y")], operations=["==", ">"]) returned_data = self.flow_service.get_available_datatypes( self.test_project.id, Datatype1, filter_op)[0] returned_subjects = [one_data[3] for one_data in returned_data] if "John Doe0" not in returned_subjects or "John Doe1" not in returned_subjects or len( returned_subjects) != 2: self.fail("DataTypes were not filtered properly!")