def test_datatypes_groups(self): """ Tests if the dataType group is set correct on the dataTypes resulted from the same operation group. """ flow_service = FlowService() all_operations = dao.get_filtered_operations(self.test_project.id, None) assert len(all_operations) == 0, "There should be no operation" adapter_instance = TestFactory.create_adapter('tvb.tests.framework.adapters.testadapter3', 'TestAdapter3') data = {model.RANGE_PARAMETER_1: 'param_5', 'param_5': [1, 2]} ## Create Group of operations flow_service.fire_operation(adapter_instance, self.test_user, self.test_project.id, **data) all_operations = dao.get_filtered_operations(self.test_project.id, None) assert len(all_operations) == 1, "Expected one operation group" assert all_operations[0][2] == 2, "Expected 2 operations in group" operation_group_id = all_operations[0][3] assert operation_group_id != None, "The operation should be part of a group." self.operation_service.stop_operation(all_operations[0][0]) self.operation_service.stop_operation(all_operations[0][1]) ## Make sure operations are executed self.operation_service.launch_operation(all_operations[0][0], False) self.operation_service.launch_operation(all_operations[0][1], False) resulted_datatypes = dao.get_datatype_in_group(operation_group_id=operation_group_id) assert len(resulted_datatypes) >= 2, "Expected at least 2, but: " + str(len(resulted_datatypes)) dt = dao.get_datatype_by_id(resulted_datatypes[0].id) datatype_group = dao.get_datatypegroup_by_op_group_id(operation_group_id) assert dt.fk_datatype_group == datatype_group.id, "DataTypeGroup is incorrect"
def test_datatypes_groups(self): """ Tests if the dataType group is set correct on the dataTypes resulted from the same operation group. """ flow_service = FlowService() all_operations = dao.get_filtered_operations(self.test_project.id, None) self.assertEqual(len(all_operations), 0, "There should be no operation") algogroup = dao.find_group("tvb.tests.framework.adapters.testadapter3", "TestAdapter3") group, _ = flow_service.prepare_adapter(self.test_project.id, algogroup) adapter_instance = flow_service.build_adapter_instance(group) data = {model.RANGE_PARAMETER_1: "param_5", "param_5": [1, 2]} ## Create Group of operations flow_service.fire_operation(adapter_instance, self.test_user, self.test_project.id, **data) all_operations = dao.get_filtered_operations(self.test_project.id, None) self.assertEqual(len(all_operations), 1, "Expected one operation group") self.assertEqual(all_operations[0][2], 2, "Expected 2 operations in group") operation_group_id = all_operations[0][3] self.assertNotEquals(operation_group_id, None, "The operation should be part of a group.") self.operation_service.stop_operation(all_operations[0][0]) self.operation_service.stop_operation(all_operations[0][1]) ## Make sure operations are executed self.operation_service.launch_operation(all_operations[0][0], False) self.operation_service.launch_operation(all_operations[0][1], False) resulted_datatypes = dao.get_datatype_in_group(operation_group_id=operation_group_id) self.assertTrue(len(resulted_datatypes) >= 2, "Expected at least 2, but: " + str(len(resulted_datatypes))) dt = dao.get_datatype_by_id(resulted_datatypes[0].id) datatype_group = dao.get_datatypegroup_by_op_group_id(operation_group_id) self.assertEqual(dt.fk_datatype_group, datatype_group.id, "DataTypeGroup is incorrect")
def test_datatypes_groups(self): """ Tests if the dataType group is set correct on the dataTypes resulted from the same operation group. """ flow_service = FlowService() all_operations = dao.get_filtered_operations(self.test_project.id, None) self.assertEqual(len(all_operations), 0, "There should be no operation") algogroup = dao.find_group('tvb.tests.framework.adapters.testadapter3', 'TestAdapter3') group, _ = flow_service.prepare_adapter(self.test_project.id, algogroup) adapter_instance = flow_service.build_adapter_instance(group) data = {model.RANGE_PARAMETER_1: 'param_5', 'param_5': [1, 2]} ## Create Group of operations flow_service.fire_operation(adapter_instance, self.test_user, self.test_project.id, **data) all_operations = dao.get_filtered_operations(self.test_project.id, None) self.assertEqual(len(all_operations), 1, "Expected one operation group") self.assertEqual(all_operations[0][2], 2, "Expected 2 operations in group") operation_group_id = all_operations[0][3] self.assertNotEquals(operation_group_id, None, "The operation should be part of a group.") self.operation_service.stop_operation(all_operations[0][0]) self.operation_service.stop_operation(all_operations[0][1]) ## Make sure operations are executed self.operation_service.launch_operation(all_operations[0][0], False) self.operation_service.launch_operation(all_operations[0][1], False) resulted_datatypes = dao.get_datatype_in_group(operation_group_id=operation_group_id) self.assertTrue(len(resulted_datatypes) >= 2, "Expected at least 2, but: " + str(len(resulted_datatypes))) dt = dao.get_datatype_by_id(resulted_datatypes[0].id) datatype_group = dao.get_datatypegroup_by_op_group_id(operation_group_id) self.assertEqual(dt.fk_datatype_group, datatype_group.id, "DataTypeGroup is incorrect")
def test_datatypes_groups(self, test_adapter_factory): """ Tests if the dataType group is set correct on the dataTypes resulted from the same operation group. """ # TODO: re-write this to use groups correctly all_operations = dao.get_filtered_operations(self.test_project.id, None) assert len(all_operations) == 0, "There should be no operation" algo = test_adapter_factory(TestAdapter3) adapter_instance = ABCAdapter.build_adapter(algo) data = {model_burst.RANGE_PARAMETER_1: 'param_5', 'param_5': [1, 2]} ## Create Group of operations FlowService().fire_operation(adapter_instance, self.test_user, self.test_project.id) all_operations = dao.get_filtered_operations(self.test_project.id, None) assert len(all_operations) == 1, "Expected one operation group" assert all_operations[0][2] == 2, "Expected 2 operations in group" operation_group_id = all_operations[0][3] assert operation_group_id != None, "The operation should be part of a group." self.operation_service.stop_operation(all_operations[0][0]) self.operation_service.stop_operation(all_operations[0][1]) ## Make sure operations are executed self.operation_service.launch_operation(all_operations[0][0], False) self.operation_service.launch_operation(all_operations[0][1], False) resulted_datatypes = dao.get_datatype_in_group(operation_group_id=operation_group_id) assert len(resulted_datatypes) >= 2, "Expected at least 2, but: " + str(len(resulted_datatypes)) dt = dao.get_datatype_by_id(resulted_datatypes[0].id) datatype_group = dao.get_datatypegroup_by_op_group_id(operation_group_id) assert dt.fk_datatype_group == datatype_group.id, "DataTypeGroup is incorrect"
def test_import_export(self): """ Test the import/export mechanism for a project structure. The project contains the following data types: Connectivity, Surface, MappedArray and ValueWrapper. """ result = self.get_all_datatypes() expected_results = {} for one_data in result: expected_results[one_data.gid] = (one_data.module, one_data.type) #create an array mapped in DB data = {'param_1': 'some value'} OperationService().initiate_prelaunch(self.operation, self.adapter_instance, {}, **data) inserted = self.flow_service.get_available_datatypes(self.test_project.id, "tvb.datatypes.arrays.MappedArray")[1] self.assertEqual(1, inserted, "Problems when inserting data") #create a value wrapper value_wrapper = self._create_value_wrapper() count_operations = dao.get_filtered_operations(self.test_project.id, None, is_count=True) self.assertEqual(2, count_operations, "Invalid ops number before export!") # Export project as ZIP self.zip_path = ExportManager().export_project(self.test_project) self.assertTrue(self.zip_path is not None, "Exported file is none") # Remove the original project self.project_service.remove_project(self.test_project.id) result, lng_ = self.project_service.retrieve_projects_for_user(self.test_user.id) self.assertEqual(0, len(result), "Project Not removed!") self.assertEqual(0, lng_, "Project Not removed!") # Now try to import again project self.import_service.import_project_structure(self.zip_path, self.test_user.id) result = self.project_service.retrieve_projects_for_user(self.test_user.id)[0] self.assertEqual(len(result), 1, "There should be only one project.") self.assertEqual(result[0].name, "GeneratedProject", "The project name is not correct.") self.assertEqual(result[0].description, "test_desc", "The project description is not correct.") self.test_project = result[0] count_operations = dao.get_filtered_operations(self.test_project.id, None, is_count=True) #1 op. - import cff; 2 op. - save the array wrapper; self.assertEqual(2, count_operations, "Invalid ops number after export and import !") for gid in expected_results: datatype = dao.get_datatype_by_gid(gid) self.assertEqual(datatype.module, expected_results[gid][0], 'DataTypes not imported correctly') self.assertEqual(datatype.type, expected_results[gid][1], 'DataTypes not imported correctly') #check the value wrapper new_val = self.flow_service.get_available_datatypes(self.test_project.id, "tvb.datatypes.mapped_values.ValueWrapper")[0] self.assertEqual(1, len(new_val), "One !=" + str(len(new_val))) new_val = ABCAdapter.load_entity_by_gid(new_val[0][2]) self.assertEqual(value_wrapper.data_value, new_val.data_value, "Data value incorrect") self.assertEqual(value_wrapper.data_type, new_val.data_type, "Data type incorrect") self.assertEqual(value_wrapper.data_name, new_val.data_name, "Data name incorrect")
def test_datatypes_groups(self, test_adapter_factory, datatype_group_factory): """ Tests if the dataType group is set correct on the dataTypes resulted from the same operation group. """ all_operations = dao.get_filtered_operations(self.test_project.id, None) assert len(all_operations) == 0, "There should be no operation" dt_group = datatype_group_factory(project=self.test_project) model = TestModel() test_adapter_factory() adapter = TestFactory.create_adapter( "tvb.tests.framework.adapters.testadapter1", "TestAdapter1") operations = dao.get_operations_in_group(dt_group.id) for op in operations: model.gid = uuid.uuid4() op_path = StorageInterface().get_project_folder( self.test_project.name, str(op.id)) op.view_model_gid = model.gid.hex op.algorithm = adapter.stored_adapter h5.store_view_model(model, op_path) dao.store_entity(op) all_operations = dao.get_filtered_operations(self.test_project.id, None) assert len(all_operations) == 2, "Expected two operation groups" assert all_operations[0][2] == 6, "Expected 6 operations in one group" operation_group_id = all_operations[0][3] assert operation_group_id != None, "The operation should be part of a group." self.operation_service.stop_operation(all_operations[1][0]) self.operation_service.stop_operation(all_operations[1][1]) # Make sure operations are executed self.operation_service.launch_operation(all_operations[1][0], False) self.operation_service.launch_operation(all_operations[1][1], False) resulted_datatypes = dao.get_datatype_in_group( operation_group_id=operation_group_id) assert len( resulted_datatypes) >= 2, "Expected at least 2, but: " + str( len(resulted_datatypes)) dt = dao.get_datatype_by_id(resulted_datatypes[0].id) datatype_group = dao.get_datatypegroup_by_op_group_id( operation_group_id) assert dt.fk_datatype_group == datatype_group.id, "DataTypeGroup is incorrect"
def test_import_export_existing(self): """ Test the import/export mechanism for a project structure. The project contains the following data types: Connectivity, Surface, MappedArray and ValueWrapper. """ result = self.get_all_datatypes() expected_results = {} for one_data in result: expected_results[one_data.gid] = (one_data.module, one_data.type) #create an array mapped in DB data = {'param_1': 'some value'} OperationService().initiate_prelaunch(self.operation, self.adapter_instance, {}, **data) inserted = self.flow_service.get_available_datatypes( self.test_project.id, "tvb.datatypes.arrays.MappedArray") self.assertEqual(len(inserted), 2, "Problems when inserting data") #create a value wrapper self._create_value_wrapper() result = dao.get_filtered_operations(self.test_project.id, None) self.assertEqual( len(result), 2, "Should be two operations before export and not " + str(len(result)) + " !") self.zip_path = ExportManager().export_project(self.test_project) self.assertTrue(self.zip_path is not None, "Exported file is none") try: self.import_service.import_project_structure( self.zip_path, self.test_user.id) self.fail("Invalid import as the project already exists!") except ProjectImportException: #OK, do nothing. The project already exists. pass
def test_import_export(self, user_factory, project_factory, value_wrapper_factory): """ Test the import/export mechanism for a project structure. The project contains the following data types: Connectivity, Surface, MappedArray and ValueWrapper. """ test_user = user_factory() test_project = project_factory(test_user, "TestImportExport", "test_desc") zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_66.zip') TestFactory.import_zip_connectivity(test_user, test_project, zip_path) value_wrapper = value_wrapper_factory(test_user, test_project) result = self.get_all_datatypes() expected_results = {} for one_data in result: expected_results[one_data.gid] = (one_data.module, one_data.type) # Export project as ZIP self.zip_path = ExportManager().export_project(test_project) assert self.zip_path is not None, "Exported file is none" # Remove the original project self.project_service.remove_project(test_project.id) result, lng_ = self.project_service.retrieve_projects_for_user( test_user.id) assert 0 == len(result), "Project Not removed!" assert 0 == lng_, "Project Not removed!" # Now try to import again project self.import_service.import_project_structure(self.zip_path, test_user.id) result = self.project_service.retrieve_projects_for_user( test_user.id)[0] assert len(result) == 1, "There should be only one project." assert result[ 0].name == "TestImportExport", "The project name is not correct." assert result[ 0].description == "test_desc", "The project description is not correct." test_project = result[0] count_operations = dao.get_filtered_operations(test_project.id, None, is_count=True) # 1 op. - import conn; 2 op. - BCT Analyzer assert 2 == count_operations, "Invalid ops number after export and import !" for gid in expected_results: datatype = dao.get_datatype_by_gid(gid) assert datatype.module == expected_results[gid][ 0], 'DataTypes not imported correctly' assert datatype.type == expected_results[gid][ 1], 'DataTypes not imported correctly' # check the value wrapper new_val = try_get_last_datatype(test_project.id, ValueWrapperIndex) assert value_wrapper.data_value == new_val.data_value, "Data value incorrect" assert value_wrapper.data_type == new_val.data_type, "Data type incorrect" assert value_wrapper.data_name == new_val.data_name, "Data name incorrect"
def test_datatypes_groups(self): """ Tests if the dataType group is set correct on the dataTypes resulted from the same operation group. """ flow_service = FlowService() all_operations = dao.get_filtered_operations(self.test_project.id, None) assert len(all_operations) == 0, "There should be no operation" adapter_instance = TestFactory.create_adapter( 'tvb.tests.framework.adapters.testadapter3', 'TestAdapter3') data = {model.RANGE_PARAMETER_1: 'param_5', 'param_5': [1, 2]} ## Create Group of operations flow_service.fire_operation(adapter_instance, self.test_user, self.test_project.id, **data) all_operations = dao.get_filtered_operations(self.test_project.id, None) assert len(all_operations) == 1, "Expected one operation group" assert all_operations[0][2] == 2, "Expected 2 operations in group" operation_group_id = all_operations[0][3] assert operation_group_id != None, "The operation should be part of a group." self.operation_service.stop_operation(all_operations[0][0]) self.operation_service.stop_operation(all_operations[0][1]) ## Make sure operations are executed self.operation_service.launch_operation(all_operations[0][0], False) self.operation_service.launch_operation(all_operations[0][1], False) resulted_datatypes = dao.get_datatype_in_group( operation_group_id=operation_group_id) assert len( resulted_datatypes) >= 2, "Expected at least 2, but: " + str( len(resulted_datatypes)) dt = dao.get_datatype_by_id(resulted_datatypes[0].id) datatype_group = dao.get_datatypegroup_by_op_group_id( operation_group_id) assert dt.fk_datatype_group == datatype_group.id, "DataTypeGroup is incorrect"
def _create_datatype_group(self): """ Creates a project, one DataTypeGroup with 2 DataTypes into the new group. """ test_project = TestFactory.create_project(self.test_user, "NewProject") all_operations = dao.get_filtered_operations(test_project.id, None, is_count=True) assert 0 == all_operations, "There should be no operation." datatypes, op_group_id = TestFactory.create_group(self.test_user, test_project) dt_group = dao.get_datatypegroup_by_op_group_id(op_group_id) return test_project, dt_group.id, datatypes[0], datatypes[1]
def test_import_export_existing(self): """ Test the import/export mechanism for a project structure. The project contains the following data types: Connectivity, Surface, MappedArray and ValueWrapper. """ count_operations = dao.get_filtered_operations(self.test_project.id, None, is_count=True) self.assertEqual(2, count_operations, "Invalid ops before export!") self.zip_path = ExportManager().export_project(self.test_project) self.assertTrue(self.zip_path is not None, "Exported file is none") self.assertRaises(ProjectImportException, self.import_service.import_project_structure, self.zip_path, self.test_user.id)
def _create_datatype_group(self): """ Creates a project, one DataTypeGroup with 2 DataTypes into the new group. """ test_project = TestFactory.create_project(self.test_user, "NewProject") all_operations = dao.get_filtered_operations(test_project.id, None, is_count=True) self.assertEqual(0, all_operations, "There should be no operation.") datatypes, op_group_id = TestFactory.create_group(self.test_user, test_project) dt_group = dao.get_datatypegroup_by_op_group_id(op_group_id) return test_project, dt_group.id, datatypes[0], datatypes[1]
def test_import_export_existing(self): """ Test the import/export mechanism for a project structure. The project contains the following data types: Connectivity, Surface, MappedArray and ValueWrapper. """ count_operations = dao.get_filtered_operations(self.test_project.id, None, is_count=True) assert 2 == count_operations, "Invalid ops before export!" self.zip_path = ExportManager().export_project(self.test_project) assert self.zip_path is not None, "Exported file is none" with pytest.raises(ProjectImportException): self.import_service.import_project_structure( self.zip_path, self.test_user.id)
def test_export_import_figures(self, user_factory, project_factory): """ Test that ResultFigure instances are correctly restores after an export+import project """ # Prepare data user = user_factory() project = project_factory(user, "TestImportExportFigures") zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'paupau.zip') TestFactory.import_zip_connectivity(user, project, zip_path) figure_service = FigureService() figure_service.store_result_figure(project, user, "png", IMG_DATA, "bla") figure_service.store_result_figure(project, user, "png", IMG_DATA, "bla") figures = list( figure_service.retrieve_result_figures(project, user)[0].values())[0] assert 2 == len(figures) # export, delete and the import project self.zip_path = ExportManager().export_project(project) assert self.zip_path is not None, "Exported file is none" self.project_service.remove_project(project.id) self.import_service.import_project_structure(self.zip_path, user.id) # Check that state is as before export: one operation, one DT, 2 figures retrieved_project = self.project_service.retrieve_projects_for_user( user.id)[0][0] count_operations = dao.get_filtered_operations(retrieved_project.id, None, is_count=True) assert 1 == count_operations count_datatypes = dao.count_datatypes(retrieved_project.id, DataType) assert 1 == count_datatypes figures = list( figure_service.retrieve_result_figures(retrieved_project, user)[0].values())[0] assert 2 == len(figures) assert "bla" in figures[0].name assert "bla" in figures[1].name image_path = utils.url2path(figures[0].file_path) img_data = Image.open(image_path).load() assert img_data is not None
def test_import_export_existing(self, user_factory, project_factory): """ Test the import/export mechanism for a project structure. The project contains the following data types: Connectivity, Surface, MappedArray and ValueWrapper. """ test_user = user_factory() test_project = project_factory(test_user, "TestImportExport2") zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_66.zip') TestFactory.import_zip_connectivity(test_user, test_project, zip_path) count_operations = dao.get_filtered_operations(test_project.id, None, is_count=True) assert 1 == count_operations, "Invalid ops before export!" self.zip_path = ExportManager().export_project(test_project) assert self.zip_path is not None, "Exported file is none" with pytest.raises(ImportException): self.import_service.import_project_structure(self.zip_path, test_user.id)
def test_import_export(self): """ Test the import/export mechanism for a project structure. The project contains the following data types: Connectivity, Surface, MappedArray and ValueWrapper. """ result = self.get_all_datatypes() expected_results = {} for one_data in result: expected_results[one_data.gid] = (one_data.module, one_data.type) #create an array mapped in DB data = {'param_1': 'some value'} OperationService().initiate_prelaunch(self.operation, self.adapter_instance, {}, **data) inserted = self.flow_service.get_available_datatypes( self.test_project.id, "tvb.datatypes.arrays.MappedArray")[1] assert 1 == inserted, "Problems when inserting data" #create a value wrapper value_wrapper = self._create_value_wrapper() count_operations = dao.get_filtered_operations(self.test_project.id, None, is_count=True) assert 2 == count_operations, "Invalid ops number before export!" # Export project as ZIP self.zip_path = ExportManager().export_project(self.test_project) assert self.zip_path is not None, "Exported file is none" # Remove the original project self.project_service.remove_project(self.test_project.id) result, lng_ = self.project_service.retrieve_projects_for_user( self.test_user.id) assert 0 == len(result), "Project Not removed!" assert 0 == lng_, "Project Not removed!" # Now try to import again project self.import_service.import_project_structure(self.zip_path, self.test_user.id) result = self.project_service.retrieve_projects_for_user( self.test_user.id)[0] assert len(result) == 1, "There should be only one project." assert result[ 0].name == "GeneratedProject", "The project name is not correct." assert result[ 0].description == "test_desc", "The project description is not correct." self.test_project = result[0] count_operations = dao.get_filtered_operations(self.test_project.id, None, is_count=True) #1 op. - import cff; 2 op. - save the array wrapper; assert 2 == count_operations, "Invalid ops number after export and import !" for gid in expected_results: datatype = dao.get_datatype_by_gid(gid) assert datatype.module == expected_results[gid][ 0], 'DataTypes not imported correctly' assert datatype.type == expected_results[gid][ 1], 'DataTypes not imported correctly' #check the value wrapper new_val = self.flow_service.get_available_datatypes( self.test_project.id, "tvb.datatypes.mapped_values.ValueWrapper")[0] assert 1 == len(new_val), "One !=" + str(len(new_val)) new_val = ABCAdapter.load_entity_by_gid(new_val[0][2]) assert value_wrapper.data_value == new_val.data_value, "Data value incorrect" assert value_wrapper.data_type == new_val.data_type, "Data type incorrect" assert value_wrapper.data_name == new_val.data_name, "Data name incorrect"
def retrieve_project_full(self, project_id, applied_filters=None, current_page=1): """ Return a Tuple with Project entity and Operations for current Project. :param project_id: Current Project Identifier :param applied_filters: Filters to apply on Operations :param current_page: Number for current page in operations """ selected_project = self.find_project(project_id) total_filtered = self.count_filtered_operations(project_id, applied_filters) pages_no = total_filtered // OPERATIONS_PAGE_SIZE + (1 if total_filtered % OPERATIONS_PAGE_SIZE else 0) total_ops_nr = self.count_filtered_operations(project_id) start_idx = OPERATIONS_PAGE_SIZE * (current_page - 1) current_ops = dao.get_filtered_operations(project_id, applied_filters, start_idx, OPERATIONS_PAGE_SIZE) if current_ops is None: return selected_project, 0, [], 0 operations = [] view_categ_id = dao.get_visualisers_categories()[0].id for one_op in current_ops: try: result = {} if one_op[0] != one_op[1]: result["id"] = str(one_op[0]) + "-" + str(one_op[1]) else: result["id"] = str(one_op[0]) burst = dao.get_burst_for_operation_id(one_op[0]) result["burst_name"] = burst.name if burst else '-' result["count"] = one_op[2] result["gid"] = one_op[14] if one_op[3] is not None and one_op[3]: try: operation_group = dao.get_generic_entity(model.OperationGroup, one_op[3])[0] result["group"] = operation_group.name result["group"] = result["group"].replace("_", " ") result["operation_group_id"] = operation_group.id datatype_group = dao.get_datatypegroup_by_op_group_id(one_op[3]) result["datatype_group_gid"] = datatype_group.gid result["gid"] = operation_group.gid ## Filter only viewers for current DataTypeGroup entity: launcher = self.retrieve_launchers(datatype_group.gid, include_categories=[view_categ_id]).values()[0] view_groups = [] for launcher in launcher.values(): url = '/flow/' + str(launcher['category']) + '/' + str(launcher['id']) if launcher['part_of_group']: url = '/flow/prepare_group_launch/' + datatype_group.gid + '/' + \ str(launcher['category']) + '/' + str(launcher['id']) view_groups.append(dict(name=launcher["displayName"], url=url, param_name=launcher['children'][0]['param_name'], part_of_group=launcher['part_of_group'])) result["view_groups"] = view_groups except Exception: self.logger.exception("We will ignore group on entity:" + str(one_op)) result["datatype_group_gid"] = None else: result['group'] = None result['datatype_group_gid'] = None result["algorithm"] = dao.get_algorithm_by_id(one_op[4]) result["method"] = one_op[5] result["user"] = dao.get_user_by_id(one_op[6]) if type(one_op[7]) in (str, unicode): result["create"] = string2date(str(one_op[7])) else: result["create"] = one_op[7] if type(one_op[8]) in (str, unicode): result["start"] = string2date(str(one_op[8])) else: result["start"] = one_op[8] if type(one_op[9]) in (str, unicode): result["complete"] = string2date(str(one_op[9])) else: result["complete"] = one_op[9] if result["complete"] is not None and result["start"] is not None: result["duration"] = format_timedelta(result["complete"] - result["start"]) result["status"] = one_op[10] result["additional"] = one_op[11] result["visible"] = True if one_op[12] > 0 else False result['operation_tag'] = one_op[13] result['figures'] = None if not result['group']: datatype_results = dao.get_results_for_operation(result['id']) result['results'] = [dao.get_generic_entity(dt.module + '.' + dt.type, dt.gid, 'gid')[0] for dt in datatype_results] operation_figures = dao.get_figures_for_operation(result['id']) # Compute the full path to the figure / image on disk for figure in operation_figures: figures_folder = self.structure_helper.get_images_folder(figure.project.name) figure_full_path = os.path.join(figures_folder, figure.file_path) # Compute the path available from browser figure.figure_path = utils.path2url_part(figure_full_path) result['figures'] = operation_figures else: result['results'] = None operations.append(result) except Exception: ## We got an exception when processing one Operation Row. We will continue with the rest of the rows. self.logger.exception("Could not prepare operation for display:" + str(one_op)) return selected_project, total_ops_nr, operations, pages_no
def count_filtered_operations(project_id, filters=None): """Pass to DAO counters for filtered operations""" return dao.get_filtered_operations(project_id, filters, is_count=True)
def test_import_export(self): """ Test the import/export mechanism for a project structure. The project contains the following data types: Connectivity, Surface, MappedArray and ValueWrapper. """ result = self.get_all_datatypes() expected_results = {} for one_data in result: expected_results[one_data.gid] = (one_data.module, one_data.type) #create an array mapped in DB data = {'param_1': 'some value'} OperationService().initiate_prelaunch(self.operation, self.adapter_instance, {}, **data) inserted = self.flow_service.get_available_datatypes( self.test_project.id, "tvb.datatypes.arrays.MappedArray") self.assertEqual(len(inserted), 2, "Problems when inserting data") #create a value wrapper value_wrapper = self._create_value_wrapper() result = dao.get_filtered_operations(self.test_project.id, None) self.assertEqual( len(result), 2, "Should be two operations before export and not " + str(len(result)) + " !") self.zip_path = ExportManager().export_project(self.test_project) self.assertTrue(self.zip_path is not None, "Exported file is none") # Now remove the original project self.project_service.remove_project(self.test_project.id) result, lng_ = self.project_service.retrieve_projects_for_user( self.test_user.id) self.assertEqual(0, len(result), "Project Not removed!") self.assertEqual(0, lng_, "Project Not removed!") # Now try to import again project self.import_service.import_project_structure(self.zip_path, self.test_user.id) result = self.project_service.retrieve_projects_for_user( self.test_user.id)[0] self.assertEqual(len(result), 1, "There should be only one project.") self.assertEqual(result[0].name, "GeneratedProject", "The project name is not correct.") self.assertEqual(result[0].description, "test_desc", "The project description is not correct.") self.test_project = result[0] result = dao.get_filtered_operations(self.test_project.id, None) #1 op. - import project; 1 op. - save the array wrapper self.assertEqual( len(result), 2, "Should be two operations after export and not " + str(len(result)) + " !") for gid in expected_results: datatype = dao.get_datatype_by_gid(gid) self.assertEqual(datatype.module, expected_results[gid][0], 'DataTypes not imported correctly') self.assertEqual(datatype.type, expected_results[gid][1], 'DataTypes not imported correctly') #check the value wrapper new_val = self.flow_service.get_available_datatypes( self.test_project.id, "tvb.datatypes.mapped_values.ValueWrapper") self.assertEqual(len(new_val), 1, "One !=" + str(len(new_val))) new_val = ABCAdapter.load_entity_by_gid(new_val[0][2]) self.assertEqual(value_wrapper.data_value, new_val.data_value, "Data value incorrect") self.assertEqual(value_wrapper.data_type, new_val.data_type, "Data type incorrect") self.assertEqual(value_wrapper.data_name, new_val.data_name, "Data name incorrect")
def retrieve_project_full(self, project_id, applied_filters=None, current_page=1): """ Return a Tuple with Project entity and Operations for current Project. :param project_id: Current Project Identifier :param applied_filters: Filters to apply on Operations :param current_page: Number for current page in operations """ selected_project = self.find_project(project_id) total_filtered = self.count_filtered_operations( project_id, applied_filters) pages_no = total_filtered // OPERATIONS_PAGE_SIZE + ( 1 if total_filtered % OPERATIONS_PAGE_SIZE else 0) total_ops_nr = self.count_filtered_operations(project_id) start_idx = OPERATIONS_PAGE_SIZE * (current_page - 1) current_ops = dao.get_filtered_operations(project_id, applied_filters, start_idx, OPERATIONS_PAGE_SIZE) if current_ops is None: return selected_project, 0, [], 0 operations = [] for one_op in current_ops: try: result = {} if one_op[0] != one_op[1]: result["id"] = str(one_op[0]) + "-" + str(one_op[1]) else: result["id"] = str(one_op[0]) burst = dao.get_burst_for_operation_id(one_op[0]) result["burst_name"] = burst.name if burst else '-' result["count"] = one_op[2] result["gid"] = one_op[13] operation_group_id = one_op[3] if operation_group_id is not None and operation_group_id: try: operation_group = dao.get_generic_entity( OperationGroup, operation_group_id)[0] result["group"] = operation_group.name result["group"] = result["group"].replace("_", " ") result["operation_group_id"] = operation_group.id datatype_group = dao.get_datatypegroup_by_op_group_id( operation_group_id) result[ "datatype_group_gid"] = datatype_group.gid if datatype_group is not None else None result["gid"] = operation_group.gid # Filter only viewers for current DataTypeGroup entity: if datatype_group is None: view_groups = None else: view_groups = AlgorithmService( ).get_visualizers_for_group(datatype_group.gid) result["view_groups"] = view_groups except Exception: self.logger.exception( "We will ignore group on entity:" + str(one_op)) result["datatype_group_gid"] = None else: result['group'] = None result['datatype_group_gid'] = None result["algorithm"] = dao.get_algorithm_by_id(one_op[4]) result["user"] = dao.get_user_by_id(one_op[5]) if type(one_op[6]) is str: result["create"] = string2date(str(one_op[6])) else: result["create"] = one_op[6] if type(one_op[7]) is str: result["start"] = string2date(str(one_op[7])) else: result["start"] = one_op[7] if type(one_op[8]) is str: result["complete"] = string2date(str(one_op[8])) else: result["complete"] = one_op[8] if result["complete"] is not None and result[ "start"] is not None: result["duration"] = format_timedelta(result["complete"] - result["start"]) result["status"] = one_op[9] result["additional"] = one_op[10] result["visible"] = True if one_op[11] > 0 else False result['operation_tag'] = one_op[12] if not result['group']: result['results'] = dao.get_results_for_operation( result['id']) else: result['results'] = None operations.append(result) except Exception: # We got an exception when processing one Operation Row. We will continue with the rest of the rows. self.logger.exception( "Could not prepare operation for display:" + str(one_op)) return selected_project, total_ops_nr, operations, pages_no
def retrieve_project_full(self, project_id, applied_filters=None, current_page=1): """ Return a Tuple with Project entity and Operations for current Project. :param project_id: Current Project Identifier :param applied_filters: Filters to apply on Operations :param current_page: Number for current page in operations """ selected_project = self.find_project(project_id) total_filtered = self.count_filtered_operations(project_id, applied_filters) pages_no = total_filtered // OPERATIONS_PAGE_SIZE + (1 if total_filtered % OPERATIONS_PAGE_SIZE else 0) total_ops_nr = self.count_filtered_operations(project_id) start_idx = OPERATIONS_PAGE_SIZE * (current_page - 1) current_ops = dao.get_filtered_operations(project_id, applied_filters, start_idx, OPERATIONS_PAGE_SIZE) if current_ops is None: return selected_project, 0, [], 0 operations = [] view_categ_id = dao.get_visualisers_categories()[0].id for one_op in current_ops: try: result = {} if one_op[0] != one_op[1]: result["id"] = str(one_op[0]) + "-" + str(one_op[1]) else: result["id"] = str(one_op[0]) burst = dao.get_burst_for_operation_id(one_op[0]) result["burst_name"] = burst.name if burst else '-' result["count"] = one_op[2] result["gid"] = one_op[13] if one_op[3] is not None and one_op[3]: try: operation_group = dao.get_generic_entity(OperationGroup, one_op[3])[0] result["group"] = operation_group.name result["group"] = result["group"].replace("_", " ") result["operation_group_id"] = operation_group.id datatype_group = dao.get_datatypegroup_by_op_group_id(one_op[3]) result["datatype_group_gid"] = datatype_group.gid result["gid"] = operation_group.gid ## Filter only viewers for current DataTypeGroup entity: result["view_groups"] = FlowService().get_visualizers_for_group(datatype_group.gid) except Exception: self.logger.exception("We will ignore group on entity:" + str(one_op)) result["datatype_group_gid"] = None else: result['group'] = None result['datatype_group_gid'] = None result["algorithm"] = dao.get_algorithm_by_id(one_op[4]) result["user"] = dao.get_user_by_id(one_op[5]) if type(one_op[6]) is str: result["create"] = string2date(str(one_op[6])) else: result["create"] = one_op[6] if type(one_op[7]) is str: result["start"] = string2date(str(one_op[7])) else: result["start"] = one_op[7] if type(one_op[8]) is str: result["complete"] = string2date(str(one_op[8])) else: result["complete"] = one_op[8] if result["complete"] is not None and result["start"] is not None: result["duration"] = format_timedelta(result["complete"] - result["start"]) result["status"] = one_op[9] result["additional"] = one_op[10] result["visible"] = True if one_op[11] > 0 else False result['operation_tag'] = one_op[12] result['figures'] = None if not result['group']: datatype_results = dao.get_results_for_operation(result['id']) result['results'] = [] for dt in datatype_results: dt_loaded = ABCAdapter.load_entity_by_gid(dt.gid) if dt_loaded: result['results'].append(dt_loaded) else: self.logger.warning("Could not retrieve datatype %s" % str(dt)) operation_figures = dao.get_figures_for_operation(result['id']) # Compute the full path to the figure / image on disk for figure in operation_figures: figures_folder = self.structure_helper.get_images_folder(figure.project.name) figure_full_path = os.path.join(figures_folder, figure.file_path) # Compute the path available from browser figure.figure_path = utils.path2url_part(figure_full_path) result['figures'] = operation_figures else: result['results'] = None operations.append(result) except Exception: ## We got an exception when processing one Operation Row. We will continue with the rest of the rows. self.logger.exception("Could not prepare operation for display:" + str(one_op)) return selected_project, total_ops_nr, operations, pages_no
def retrieve_project_full(self, project_id, applied_filters=None, current_page=1): """ Return a Tuple with Project entity and Operations for current Project. :param project_id: Current Project Identifier :param applied_filters: Filters to apply on Operations :param current_page: Number for current page in operations """ selected_project = self.find_project(project_id) total_filtered = self.count_filtered_operations(project_id, applied_filters) start_idx = OPERATIONS_PAGE_SIZE * (current_page - 1) if total_filtered >= start_idx + OPERATIONS_PAGE_SIZE: end_idx = OPERATIONS_PAGE_SIZE else: end_idx = total_filtered - start_idx pages_no = total_filtered // OPERATIONS_PAGE_SIZE + (1 if total_filtered % OPERATIONS_PAGE_SIZE else 0) total_ops_nr = self.count_filtered_operations(project_id) current_ops = dao.get_filtered_operations(project_id, applied_filters, start_idx, end_idx) started_ops = 0 if current_ops is None: return selected_project, [], 0 operations = [] for one_op in current_ops: try: result = {} if one_op[0] != one_op[1]: result["id"] = str(one_op[0]) + "-" + str(one_op[1]) else: result["id"] = str(one_op[0]) burst = dao.get_burst_for_operation_id(one_op[0]) result["burst_name"] = burst.name if burst else '-' result["count"] = one_op[2] result["gid"] = one_op[14] if one_op[3] is not None and one_op[3]: try: operation_group = dao.get_generic_entity(model.OperationGroup, one_op[3])[0] result["group"] = operation_group.name result["group"] = result["group"].replace("_", " ") result["operation_group_id"] = operation_group.id datatype_group = dao.get_datatypegroup_by_op_group_id(one_op[3]) datatype = dao.get_datatype_by_id(datatype_group.id) result["datatype_group_gid"] = datatype.gid result["gid"] = operation_group.gid all_categs = dao.get_algorithm_categories() view_categ = dao.get_visualisers_categories()[0] excludes = [categ.id for categ in all_categs if categ.id != view_categ.id] algo = self.retrieve_launchers("DataTypeGroup", datatype.gid, exclude_categories=excludes).values()[0] view_groups = [] for algo in algo.values(): url = '/flow/' + str(algo['category']) + '/' + str(algo['id']) if algo['part_of_group']: url = '/flow/prepare_group_launch/' + datatype.gid + '/' + \ str(algo['category']) + '/' + str(algo['id']) view_groups.append(dict(name=algo["displayName"], url=url, param_name=algo['children'][0]['param_name'], part_of_group=algo['part_of_group'])) result["view_groups"] = view_groups except Exception, excep: self.logger.error(excep) self.logger.warning("Will ignore group on entity:" + str(one_op)) result["datatype_group_gid"] = None else: result['group'] = None result['datatype_group_gid'] = None result["algorithm"] = dao.get_algorithm_by_id(one_op[4]) result["method"] = one_op[5] result["user"] = dao.get_user_by_id(one_op[6]) if type(one_op[7]) in (str, unicode): result["create"] = string2date(str(one_op[7])) else: result["create"] = one_op[7] if type(one_op[8]) in (str, unicode): result["start"] = string2date(str(one_op[8])) else: result["start"] = one_op[8] if type(one_op[9]) in (str, unicode): result["complete"] = string2date(str(one_op[9])) else: result["complete"] = one_op[9] if result["complete"] is not None and result["start"] is not None: result["duration"] = timedelta2string(result["complete"] - result["start"]) result["status"] = one_op[10] if result["status"] == model.STATUS_STARTED: started_ops += 1 result["additional"] = one_op[11] result["visible"] = True if one_op[12] > 0 else False result['operation_tag'] = one_op[13] result['figures'] = None if not result['group']: datatype_results = dao.get_results_for_operation(result['id']) result['results'] = [dao.get_generic_entity(dt.module + '.' + dt.type, dt.gid, 'gid')[0] for dt in datatype_results] operation_figures = dao.get_figures_for_operation(result['id']) # Compute the full path to the figure / image on disk for figure in operation_figures: figures_folder = self.structure_helper.get_images_folder(figure.project.name, figure.operation.id) figure_full_path = os.path.join(figures_folder, figure.file_path) # Compute the path available from browser figure.figure_path = utils.path2url_part(figure_full_path) result['figures'] = operation_figures else: result['results'] = None operations.append(result)