def test_get_filtered_by_column(self): """ Test the filter function when retrieving dataTypes with a filter after a column from a class specific table (e.g. DATA_arraywrapper). """ operation_1 = TestFactory.create_operation( test_user=self.test_user, test_project=self.test_project) operation_2 = TestFactory.create_operation( test_user=self.test_user, test_project=self.test_project) one_dim_array = numpy.arange(5) two_dim_array = numpy.array([[1, 2], [2, 3], [1, 4]]) self._store_float_array(one_dim_array, "John Doe 1", operation_1.id) self._store_float_array(one_dim_array, "John Doe 2", operation_1.id) self._store_float_array(two_dim_array, "John Doe 3", operation_2.id) count = self.flow_service.get_available_datatypes( self.test_project.id, "tvb.datatypes.arrays.MappedArray")[1] self.assertEqual(count, 3, "Problems with inserting data") first_filter = FilterChain( fields=[FilterChain.datatype + '._nr_dimensions'], operations=["=="], values=[1]) count = self.flow_service.get_available_datatypes( self.test_project.id, "tvb.datatypes.arrays.MappedArray", first_filter)[1] self.assertEqual(count, 2, "Data was not filtered") second_filter = FilterChain( fields=[FilterChain.datatype + '._nr_dimensions'], operations=["=="], values=[2]) filtered_data = self.flow_service.get_available_datatypes( self.test_project.id, "tvb.datatypes.arrays.MappedArray", second_filter)[0] self.assertEqual(len(filtered_data), 1, "Data was not filtered") self.assertEqual(filtered_data[0][3], "John Doe 3") third_filter = FilterChain( fields=[FilterChain.datatype + '._length_1d'], operations=["=="], values=[3]) filtered_data = self.flow_service.get_available_datatypes( self.test_project.id, "tvb.datatypes.arrays.MappedArray", third_filter)[0] self.assertEqual(len(filtered_data), 1, "Data was not filtered correct") self.assertEqual(filtered_data[0][3], "John Doe 3") try: if os.path.exists('One_dim.txt'): os.remove('One_dim.txt') if os.path.exists('Two_dim.txt'): os.remove('Two_dim.txt') if os.path.exists('One_dim-1.txt'): os.remove('One_dim-1.txt') except Exception: pass
def test_get_linkable_projects(self): """ Test for retrieving the projects for a given user. """ initial_projects = self.project_service.retrieve_projects_for_user(self.test_user.id)[0] self.assertEqual(len(initial_projects), 0, "Database was not reset!") test_proj = [] user1 = TestFactory.create_user("another_user") for i in range(4): test_proj.append(TestFactory.create_project(self.test_user if i < 3 else user1, 'test_proj' + str(i))) project_storage = self.structure_helper.get_project_folder(test_proj[0]) operation = TestFactory.create_operation(test_user=self.test_user, test_project=test_proj[0]) project_storage = os.path.join(project_storage, str(operation.id)) os.makedirs(project_storage) datatype = dao.store_entity(model.DataType(module="test_data", subject="subj1", state="test_state", operation_id=operation.id)) linkable = self.project_service.get_linkable_projects_for_user(self.test_user.id, str(datatype.id))[0] self.assertEqual(len(linkable), 2, "Wrong count of link-able projects!") proj_names = [project.name for project in linkable] self.assertTrue(test_proj[1].name in proj_names) self.assertTrue(test_proj[2].name in proj_names) self.assertFalse(test_proj[3].name in proj_names)
def test_bct_all(self): """ Iterate all BCT algorithms and execute them. """ for adapter_instance in self.bct_adapters: algorithm = adapter_instance.stored_adapter operation = TestFactory.create_operation(algorithm=algorithm, test_user=self.test_user, test_project=self.test_project, operation_status=model.STATUS_STARTED) self.assertEqual(model.STATUS_STARTED, operation.status) ### Launch BCT algorithm submit_data = {algorithm.parameter_name: self.connectivity.gid} try: OperationService().initiate_prelaunch(operation, adapter_instance, {}, **submit_data) if algorithm.classname in BCTTest.EXPECTED_TO_FAIL_VALIDATION: raise Exception("Algorithm %s was expected to throw input validation " "exception, but did not!" % (algorithm.classname,)) operation = dao.get_operation_by_id(operation.id) ### Check that operation status after execution is success. self.assertEqual(STATUS_FINISHED, operation.status) ### Make sure at least one result exists for each BCT algorithm results = dao.get_generic_entity(model.DataType, operation.id, 'fk_from_operation') self.assertTrue(len(results) > 0) except InvalidParameterException, excep: ## Some algorithms are expected to throw validation exception. if algorithm.classname not in BCTTest.EXPECTED_TO_FAIL_VALIDATION: raise excep
def test_bct_all(self): """ Iterate all BCT algorithms and execute them. """ for adapter_instance in self.bct_adapters: algorithm = adapter_instance.stored_adapter operation = TestFactory.create_operation( algorithm=algorithm, test_user=self.test_user, test_project=self.test_project, operation_status=model.STATUS_STARTED) self.assertEqual(model.STATUS_STARTED, operation.status) ### Launch BCT algorithm submit_data = {algorithm.parameter_name: self.connectivity.gid} try: OperationService().initiate_prelaunch(operation, adapter_instance, {}, **submit_data) if algorithm.classname in BCTTest.EXPECTED_TO_FAIL_VALIDATION: raise Exception( "Algorithm %s was expected to throw input validation " "exception, but did not!" % (algorithm.classname, )) operation = dao.get_operation_by_id(operation.id) ### Check that operation status after execution is success. self.assertEqual(STATUS_FINISHED, operation.status) ### Make sure at least one result exists for each BCT algorithm results = dao.get_generic_entity(model.DataType, operation.id, 'fk_from_operation') self.assertTrue(len(results) > 0) except InvalidParameterException, excep: ## Some algorithms are expected to throw validation exception. if algorithm.classname not in BCTTest.EXPECTED_TO_FAIL_VALIDATION: raise excep
def test_write_operation_metadata(self): """ Test that a correct XML is created for an operation. """ operation = TestFactory.create_operation( test_user=self.test_user, test_project=self.test_project) expected_file = self.files_helper.get_operation_meta_file_path( self.PROJECT_NAME, operation.id) self.assertFalse(os.path.exists(expected_file)) self.files_helper.write_operation_metadata(operation) self.assertTrue(os.path.exists(expected_file)) operation_meta = XMLReader(expected_file).read_metadata() loaded_operation = model.Operation(None, None, None, None) loaded_operation.from_dict(operation_meta, dao) expected_dict = operation.to_dict()[1] found_dict = loaded_operation.to_dict()[1] for key, value in expected_dict.iteritems(): self.assertEqual(str(value), str(found_dict[key])) # Now validate that operation metaData can be also updated self.assertNotEqual("new_group_name", found_dict['user_group']) self.files_helper.update_operation_metadata(self.PROJECT_NAME, "new_group_name", operation.id) found_dict = XMLReader(expected_file).read_metadata() self.assertEqual("new_group_name", found_dict['user_group'])
def test_get_linkable_projects(self): """ Test for retrieving the projects for a given user. """ initial_projects = self.project_service.retrieve_projects_for_user( self.test_user.id)[0] self.assertEqual(len(initial_projects), 0, "Database was not reset!") test_proj = [] user1 = TestFactory.create_user("another_user") for i in range(4): test_proj.append( TestFactory.create_project(self.test_user if i < 3 else user1, 'test_proj' + str(i))) project_storage = self.structure_helper.get_project_folder( test_proj[0]) operation = TestFactory.create_operation(test_user=self.test_user, test_project=test_proj[0]) project_storage = os.path.join(project_storage, str(operation.id)) os.makedirs(project_storage) datatype = dao.store_entity( model.DataType(module="test_data", subject="subj1", state="test_state", operation_id=operation.id)) linkable = self.project_service.get_linkable_projects_for_user( self.test_user.id, str(datatype.id))[0] self.assertEqual(len(linkable), 2, "Wrong count of link-able projects!") proj_names = [project.name for project in linkable] self.assertTrue(test_proj[1].name in proj_names) self.assertTrue(test_proj[2].name in proj_names) self.assertFalse(test_proj[3].name in proj_names)
def setUp(self): """ Sets up the environment for running the tests; creates a `FigureController` and an operation """ self.init() self.figure_c = FigureController() self.operation = TestFactory.create_operation(test_user=self.test_user, test_project=self.test_project)
def setUp(self): """ Sets up the environment for running the tests; creates a `FigureController` and an operation """ self.init() self.figure_c = FigureController() self.operation = TestFactory.create_operation( test_user=self.test_user, test_project=self.test_project)
def setUp(self): """ Set up any additionally needed parameters. """ self.clean_database() super(GenshiTestNDimensionArray, self).setUp() self.test_user = TestFactory.create_user() self.test_project = TestFactory.create_project(self.test_user) self.operation = TestFactory.create_operation(test_user=self.test_user, test_project=self.test_project)
def setUp(self): """ Reset the database before each test. """ self.clean_database() self.flow_service = FlowService() self.test_user = TestFactory.create_user() self.test_project = TestFactory.create_project(admin=self.test_user) self.operation = TestFactory.create_operation(test_user=self.test_user, test_project=self.test_project)
def setUp(self): """ Reset the database before each test. """ self.clean_database() self.flow_service = FlowService() self.test_user = TestFactory.create_user() self.test_project = TestFactory.create_project(admin=self.test_user) self.operation = TestFactory.create_operation( test_user=self.test_user, test_project=self.test_project)
def test_viewoperations(self): """ Test the viewoperations from projectcontroller. """ operation = TestFactory.create_operation(test_user=self.test_user, test_project=self.test_project) result_dict = self.project_c.viewoperations(self.test_project.id) operation_list = result_dict["operationsList"] self.assertEqual(len(operation_list), 1) self.assertEqual(operation_list[0]["id"], str(operation.id)) self.assertTrue("no_filter_selected" in result_dict) self.assertTrue("total_op_count" in result_dict)
def test_store_image_from_operation(self): # test that image can be retrieved from operation test_operation = TestFactory.create_operation(test_user=self.user, test_project=self.project) self.figure_service.store_result_figure(self.project, self.user, "png", IMG_DATA, operation_id=test_operation.id) figures = dao.get_figures_for_operation(test_operation.id) self.assertEqual(1, len(figures)) image_path = self.files_helper.get_images_folder(self.project.name) image_path = os.path.join(image_path, figures[0].file_path) self.assertCanReadImage(image_path)
def test_get_filtered_by_column(self): """ Test the filter function when retrieving dataTypes with a filter after a column from a class specific table (e.g. DATA_arraywrapper). """ operation_1 = TestFactory.create_operation(test_user=self.test_user, test_project=self.test_project) operation_2 = TestFactory.create_operation(test_user=self.test_user, test_project=self.test_project) one_dim_array = numpy.arange(5) two_dim_array = numpy.array([[1, 2], [2, 3], [1, 4]]) self._store_float_array(one_dim_array, "John Doe 1", operation_1.id) self._store_float_array(one_dim_array, "John Doe 2", operation_1.id) self._store_float_array(two_dim_array, "John Doe 3", operation_2.id) count = self.flow_service.get_available_datatypes(self.test_project.id, "tvb.datatypes.arrays.MappedArray")[1] self.assertEqual(count, 3, "Problems with inserting data") first_filter = FilterChain(fields=[FilterChain.datatype + '._nr_dimensions'], operations=["=="], values=[1]) count = self.flow_service.get_available_datatypes(self.test_project.id, "tvb.datatypes.arrays.MappedArray", first_filter)[1] self.assertEqual(count, 2, "Data was not filtered") second_filter = FilterChain(fields=[FilterChain.datatype + '._nr_dimensions'], operations=["=="], values=[2]) filtered_data = self.flow_service.get_available_datatypes(self.test_project.id, "tvb.datatypes.arrays.MappedArray", second_filter)[0] self.assertEqual(len(filtered_data), 1, "Data was not filtered") self.assertEqual(filtered_data[0][3], "John Doe 3") third_filter = FilterChain(fields=[FilterChain.datatype + '._length_1d'], operations=["=="], values=[3]) filtered_data = self.flow_service.get_available_datatypes(self.test_project.id, "tvb.datatypes.arrays.MappedArray", third_filter)[0] self.assertEqual(len(filtered_data), 1, "Data was not filtered correct") self.assertEqual(filtered_data[0][3], "John Doe 3") try: if os.path.exists('One_dim.txt'): os.remove('One_dim.txt') if os.path.exists('Two_dim.txt'): os.remove('Two_dim.txt') if os.path.exists('One_dim-1.txt'): os.remove('One_dim-1.txt') except Exception: pass
def test_viewoperations(self): """ Test the viewoperations from projectcontroller. """ operation = TestFactory.create_operation( test_user=self.test_user, test_project=self.test_project) result_dict = self.project_c.viewoperations(self.test_project.id) operation_list = result_dict['operationsList'] self.assertEqual(len(operation_list), 1) self.assertEqual(operation_list[0]['id'], str(operation.id)) self.assertTrue('no_filter_selected' in result_dict) self.assertTrue('total_op_count' in result_dict)
def setUp(self): """ Reset the database before each test. """ self.import_service = ImportService() self.flow_service = FlowService() self.project_service = ProjectService() self.test_user = TestFactory.create_user() self.test_project = TestFactory.create_project(self.test_user, name="GeneratedProject", description="test_desc") self.operation = TestFactory.create_operation(test_user=self.test_user, test_project=self.test_project) self.adapter_instance = TestFactory.create_adapter(test_project=self.test_project) TestFactory.import_cff(test_user=self.test_user, test_project=self.test_project) self.zip_path = None
def setUp(self): """ Reset the database before each test. """ initialize_storage() self.datatypes_factory = DatatypesFactory() self.test_user = self.datatypes_factory.get_user() self.test_project = self.datatypes_factory.get_project() self.connectivity = self.datatypes_factory.create_connectivity(self.CONNECTIVITY_NODES)[1] algorithm = dao.get_algorithm_by_module(SIMULATOR_MODULE, SIMULATOR_CLASS) self.simulator_adapter = ABCAdapter.build_adapter(algorithm) self.operation = TestFactory.create_operation(algorithm, self.test_user, self.test_project, model.STATUS_STARTED, json.dumps(SIMULATOR_PARAMETERS)) SIMULATOR_PARAMETERS['connectivity'] = self.connectivity.gid
def test_get_operation_details(self): """ Verifies result dictionary has the expected keys / values after call to `get_operation_details(...` """ operation = TestFactory.create_operation( test_user=self.test_user, test_project=self.test_project, parameters='{"test" : "test"}' ) result_dict = self.project_c.get_operation_details(operation.gid) self.assertEqual(result_dict["entity_gid"], operation.gid) self.assertEqual(result_dict["nodeType"], "operation") operation_dict = result_dict["nodeFields"][0] self.assertEqual(operation_dict["burst_name"]["value"], "") self.assertEqual(operation_dict["count"]["value"], 1) self.assertEqual(operation_dict["gid"]["value"], operation.gid) self.assertEqual(operation_dict["operation_id"]["value"], operation.id)
def test_get_operation_details(self): """ Verifies result dictionary has the expected keys / values after call to `get_operation_details(...` """ operation = TestFactory.create_operation(test_user=self.test_user, test_project=self.test_project, parameters='{"test" : "test"}') result_dict = self.project_c.get_operation_details(operation.gid) self.assertEqual(result_dict['entity_gid'], operation.gid) self.assertEqual(result_dict['nodeType'], 'operation') operation_dict = result_dict['nodeFields'][0] self.assertEqual(operation_dict['burst_name']['value'], '') self.assertEqual(operation_dict['count']['value'], 1) self.assertEqual(operation_dict['gid']['value'], operation.gid) self.assertEqual(operation_dict['operation_id']['value'], operation.id)
def test_get_operation_details(self): """ Verifies result dictionary has the expected keys / values after call to `get_operation_details(...` """ operation = TestFactory.create_operation( test_user=self.test_user, test_project=self.test_project, parameters='{"test" : "test"}') result_dict = self.project_c.get_operation_details(operation.gid) self.assertEqual(result_dict['entity_gid'], operation.gid) self.assertEqual(result_dict['nodeType'], 'operation') operation_dict = result_dict['nodeFields'][0] self.assertEqual(operation_dict['burst_name']['value'], '') self.assertEqual(operation_dict['count']['value'], 1) self.assertEqual(operation_dict['gid']['value'], operation.gid) self.assertEqual(operation_dict['operation_id']['value'], operation.id)
def setUp(self): """ Reset the database before each test. """ self.import_service = ImportService() self.flow_service = FlowService() self.project_service = ProjectService() self.test_user = TestFactory.create_user() self.test_project = TestFactory.create_project(self.test_user, name="GeneratedProject", description="test_desc") self.operation = TestFactory.create_operation( test_user=self.test_user, test_project=self.test_project) self.adapter_instance = TestFactory.create_adapter() TestFactory.import_cff(test_user=self.test_user, test_project=self.test_project) self.zip_path = None
def setUp(self): """ Prepare the database before each test. """ self.import_service = ImportService() self.flow_service = FlowService() self.project_service = ProjectService() self.test_user = TestFactory.create_user() self.delete_project_folders() result = self.count_all_entities(DataType) self.assertEqual(0, result, "There should be no data type in DB") result = self.count_all_entities(Project) self.assertEqual(0, result) self.test_project = TestFactory.import_default_project(self.test_user) self.operation = TestFactory.create_operation(test_user=self.test_user, test_project=self.test_project) self.adapter_instance = TestFactory.create_adapter(test_project=self.test_project)
def _create_operations_with_inputs(self, is_group_parent=False): """ Method used for creating a complex tree of operations. If 'if_group_parent' is True then a new group will be created and one of its entries it will be used as input for the returned operations. """ group_dts, root_op_group_id = TestFactory.create_group( self.test_user, self.test_project) if is_group_parent: datatype_gid = group_dts[0].gid else: datatype_gid = ProjectServiceTest._create_value_wrapper( self.test_user, self.test_project)[1] parameters = json.dumps({"param_name": datatype_gid}) ops = [] for i in range(4): ops.append( TestFactory.create_operation(test_user=self.test_user, test_project=self.test_project)) if i in [1, 3]: ops[i].visible = False ops[i].parameters = parameters ops[i] = dao.store_entity(ops[i]) #groups _, ops_group = TestFactory.create_group(self.test_user, self.test_project) ops_group = dao.get_operations_in_group(ops_group) self.assertEqual(2, len(ops_group)) ops_group[0].parameters = parameters ops_group[0] = dao.store_entity(ops_group[0]) ops_group[1].visible = False ops_group[1].parameters = parameters ops_group[1] = dao.store_entity(ops_group[1]) ops.extend(ops_group) if is_group_parent: dt_group = dao.get_datatypegroup_by_op_group_id(root_op_group_id) return ops, dt_group.id return ops, datatype_gid
def test_prepare_inputs_datatype(self): """ Test for ABCAdapter.prepare_ui_inputs method when submitting DataType with sub-attributes. """ parent_op = TestFactory.create_operation() test_entity = dao.store_entity(model.DataType(operation_id=parent_op.id)) dataset_3 = {} for key, value in self.SUBMIT_DATASET_3.iteritems(): dataset_3[key.replace("$GID$", test_entity.gid)] = value.replace("$GID$", test_entity.gid) kwargs = self.test_adapter.prepare_ui_inputs(dataset_3) for expected_name, expected_type in self.EXPECTED_FILTERED_SET3.iteritems(): self.assertTrue(expected_name in kwargs) self.assertTrue(isinstance(kwargs[expected_name], expected_type)) self.assertEqual(len(self.EXPECTED_FILTERED_SET3), len(kwargs)) self.assertEqual(2, len(kwargs["surface_parameters"])) self.assertTrue(isinstance(kwargs["surface_parameters"]["att1"], int)) self.assertTrue(isinstance(kwargs["surface_parameters"]["att2"], float))
def test_write_operation_metadata(self): """ Test that a correct XML is created for an operation. """ operation = TestFactory.create_operation(test_user=self.test_user, test_project=self.test_project) expected_file = self.files_helper.get_operation_meta_file_path(self.PROJECT_NAME, operation.id) self.assertFalse(os.path.exists(expected_file)) self.files_helper.write_operation_metadata(operation) self.assertTrue(os.path.exists(expected_file)) operation_meta = XMLReader(expected_file).read_metadata() loaded_operation = model.Operation(None, None, None, None) loaded_operation.from_dict(operation_meta, dao) expected_dict = operation.to_dict()[1] found_dict = loaded_operation.to_dict()[1] for key, value in expected_dict.iteritems(): self.assertEqual(str(value), str(found_dict[key])) # Now validate that operation metaData can be also updated self.assertNotEqual("new_group_name", found_dict['user_group']) self.files_helper.update_operation_metadata(self.PROJECT_NAME, "new_group_name", operation.id) found_dict = XMLReader(expected_file).read_metadata() self.assertEqual("new_group_name", found_dict['user_group'])
def _create_value_wrapper(test_user, test_project=None): """ Creates a ValueWrapper dataType, and the associated parent Operation. This is also used in ProjectStructureTest. """ if test_project is None: test_project = TestFactory.create_project(test_user, 'test_proj') operation = TestFactory.create_operation(test_user=test_user, test_project=test_project) value_wrapper = ValueWrapper(data_value=5.0, data_name="my_value") value_wrapper.type = "ValueWrapper" value_wrapper.module = "tvb.datatypes.mapped_values" value_wrapper.subject = "John Doe" value_wrapper.state = "RAW_STATE" value_wrapper.set_operation_id(operation.id) adapter_instance = StoreAdapter([value_wrapper]) OperationService().initiate_prelaunch(operation, adapter_instance, {}) all_value_wrappers = FlowService().get_available_datatypes(test_project.id, "tvb.datatypes.mapped_values.ValueWrapper")[0] if len(all_value_wrappers) != 1: raise Exception("Should be only one value wrapper.") result_vw = ABCAdapter.load_entity_by_gid(all_value_wrappers[0][2]) return test_project, result_vw.gid, operation.gid
def _create_operations_with_inputs(self, is_group_parent=False): """ Method used for creating a complex tree of operations. If 'if_group_parent' is True then a new group will be created and one of its entries it will be used as input for the returned operations. """ group_dts, root_op_group_id = TestFactory.create_group(self.test_user, self.test_project) if is_group_parent: datatype_gid = group_dts[0].gid else: datatype_gid = ProjectServiceTest._create_value_wrapper(self.test_user, self.test_project)[1] parameters = json.dumps({"param_name": datatype_gid}) ops = [] for i in range(4): ops.append(TestFactory.create_operation(test_user=self.test_user, test_project=self.test_project)) if i in [1, 3]: ops[i].visible = False ops[i].parameters = parameters ops[i] = dao.store_entity(ops[i]) #groups _, ops_group = TestFactory.create_group(self.test_user, self.test_project) ops_group = dao.get_operations_in_group(ops_group) self.assertEqual(2, len(ops_group)) ops_group[0].parameters = parameters ops_group[0] = dao.store_entity(ops_group[0]) ops_group[1].visible = False ops_group[1].parameters = parameters ops_group[1] = dao.store_entity(ops_group[1]) ops.extend(ops_group) if is_group_parent: dt_group = dao.get_datatypegroup_by_op_group_id(root_op_group_id) return ops, dt_group.id return ops, datatype_gid
def _create_value_wrapper(test_user, test_project=None): """ Creates a ValueWrapper dataType, and the associated parent Operation. This is also used in ProjectStructureTest. """ if test_project is None: test_project = TestFactory.create_project(test_user, 'test_proj') operation = TestFactory.create_operation(test_user=test_user, test_project=test_project) value_wrapper = ValueWrapper(data_value=5.0, data_name="my_value") value_wrapper.type = "ValueWrapper" value_wrapper.module = "tvb.datatypes.mapped_values" value_wrapper.subject = "John Doe" value_wrapper.state = "RAW_STATE" value_wrapper.set_operation_id(operation.id) adapter_instance = StoreAdapter([value_wrapper]) OperationService().initiate_prelaunch(operation, adapter_instance, {}) all_value_wrappers = FlowService().get_available_datatypes( test_project.id, "tvb.datatypes.mapped_values.ValueWrapper")[0] if len(all_value_wrappers) != 1: raise Exception("Should be only one value wrapper.") result_vw = ABCAdapter.load_entity_by_gid(all_value_wrappers[0][2]) return test_project, result_vw.gid, operation.gid