class PSETest(TransactionalTestCase): """ Unit-tests for BrainViewer. """ def setUp(self): """ Sets up the environment for running the tests; creates a datatype group """ self.datatypeFactory = DatatypesFactory() self.group = self.datatypeFactory.create_datatype_group() def test_launch_discrete(self): """ Check that all required keys are present in output from PSE Discrete Adapter launch. """ viewer = DiscretePSEAdapter() result = viewer.launch(self.group) expected_keys = ['status', 'size_metric', 'series_array', 'min_shape_size', 'min_color', 'data', 'max_shape_size', 'max_color', 'mainContent', 'labels_y', 'labels_x', 'isAdapter', 'has_started_ops', 'datatype_group_gid', 'datatypes_dict', 'color_metric'] for key in expected_keys: self.assertTrue(key in result) self.assertEqual(self.group.gid, result["datatype_group_gid"]) self.assertEqual('false', result["has_started_ops"]) def test_launch_isocline(self): """ Check that all required keys are present in output from PSE Discrete Adapter launch. """ viewer = IsoclinePSEAdapter() result = viewer.launch(self.group) self.assertEqual(viewer._ui_name, result["title"]) self.assertEqual(TvbProfile.current.web.MPLH5_SERVER_URL, result["mplh5ServerURL"]) self.assertEqual(1, len(result["figureNumbers"])) self.assertEqual(1, len(result["metrics"]))
class PSETest(TransactionalTestCase): """ Unit-tests for BrainViewer. """ def setUp(self): """ Sets up the environment for running the tests; creates a datatype group """ self.datatypeFactory = DatatypesFactory() self.group = self.datatypeFactory.create_datatype_group() def test_launch_discrete(self): """ Check that all required keys are present in output from PSE Discrete Adapter launch. """ viewer = DiscretePSEAdapter() result = viewer.launch(self.group) expected_keys = [ 'status', 'size_metric', 'series_array', 'min_shape_size', 'min_color', 'd3_data', 'max_shape_size', 'max_color', 'mainContent', 'labels_y', 'labels_x', 'isAdapter', 'has_started_ops', 'datatype_group_gid', 'datatypes_dict', 'color_metric' ] for key in expected_keys: self.assertTrue(key in result) self.assertEqual(self.group.gid, result["datatype_group_gid"]) self.assertEqual('false', result["has_started_ops"]) def test_launch_isocline(self): """ Check that all required keys are present in output from PSE Discrete Adapter launch. """ viewer = IsoclinePSEAdapter() result = viewer.launch(self.group) self.assertEqual(viewer._ui_name, result["title"]) self.assertEqual(TvbProfile.current.web.MPLH5_SERVER_URL, result["mplh5ServerURL"]) self.assertEqual(1, len(result["figureNumbers"])) self.assertEqual(1, len(result["metrics"]))
class TestPSE(TransactionalTestCase): """ Unit-tests for BrainViewer. """ def transactional_setup_method(self): """ Sets up the environment for running the tests; creates a datatype group """ self.datatypeFactory = DatatypesFactory() self.group = self.datatypeFactory.create_datatype_group() def test_launch_discrete(self): """ Check that all required keys are present in output from PSE Discrete Adapter launch. """ viewer = DiscretePSEAdapter() result = viewer.launch(self.group) expected_keys = [ 'status', 'size_metric', 'series_array', 'min_shape_size', 'min_color', 'd3_data', 'max_shape_size', 'max_color', 'mainContent', 'labels_y', 'labels_x', 'isAdapter', 'has_started_ops', 'datatype_group_gid', 'datatypes_dict', 'color_metric' ] for key in expected_keys: assert key in result assert self.group.gid == result["datatype_group_gid"] assert 'false' == result["has_started_ops"] def test_launch_isocline(self): """ Check that all required keys are present in output from PSE Discrete Adapter launch. """ viewer = IsoclinePSEAdapter() result = viewer.launch(self.group) assert viewer._ui_name == result["title"] assert 1 == len(result["available_metrics"])
class TestPSE(TransactionalTestCase): """ Unit-tests for BrainViewer. """ def transactional_setup_method(self): """ Sets up the environment for running the tests; creates a datatype group """ self.datatypeFactory = DatatypesFactory() self.group = self.datatypeFactory.create_datatype_group() def test_launch_discrete(self): """ Check that all required keys are present in output from PSE Discrete Adapter launch. """ viewer = DiscretePSEAdapter() result = viewer.launch(self.group) expected_keys = ['status', 'size_metric', 'series_array', 'min_shape_size', 'min_color', 'd3_data', 'max_shape_size', 'max_color', 'mainContent', 'labels_y', 'labels_x', 'isAdapter', 'has_started_ops', 'datatype_group_gid', 'datatypes_dict', 'color_metric'] for key in expected_keys: assert key in result assert self.group.gid == result["datatype_group_gid"] assert 'false' == result["has_started_ops"] def test_launch_isocline(self): """ Check that all required keys are present in output from PSE Discrete Adapter launch. """ viewer = IsoclinePSEAdapter() result = viewer.launch(self.group) assert viewer._ui_name == result["title"] assert 1 == len(result["available_metrics"])
class ExportersTest(TransactionalTestCase): """ Test export functionality. """ TVB_EXPORTER = "TVBExporter" CIFTI_EXPORTER = "CIFTIExporter" def setUp(self): self.export_manager = ExportManager() self.datatypeFactory = DatatypesFactory() self.project = self.datatypeFactory.get_project() def tearDown(self): """ Clean-up tests data """ project = self.datatypeFactory.get_project() FilesHelper().remove_project_structure(project.name) # Remove EXPORT folder export_folder = os.path.join(TvbProfile.current.TVB_STORAGE, ExportManager.EXPORT_FOLDER_NAME) if os.path.exists(export_folder): shutil.rmtree(export_folder) def test_get_exporters_for_data(self): """ Test retrieval of exporters that can be used for a given data. """ datatype = self.datatypeFactory.create_simple_datatype() exporters = self.export_manager.get_exporters_for_data(datatype) # Only TVB export can export any type of data type self.assertEqual(1, len(exporters), "Incorrect number of exporters.") def test_get_exporters_for_data_with_no_data(self): """ Test retrieval of exporters when data == None. """ self.assertRaises(InvalidExportDataException, self.export_manager.get_exporters_for_data, None) def test_tvb_export_of_simple_datatype(self): """ Test export of a data type which has no data stored on file system """ datatype = self.datatypeFactory.create_simple_datatype() file_name, file_path, _ = self.export_manager.export_data( datatype, self.TVB_EXPORTER, self.project) self.assertTrue(file_name is not None, "Export process should return a file name") self.assertTrue(file_path is not None, "Export process should return path to export file") self.assertTrue(os.path.exists(file_path), "Could not find export file: %s on disk." % file_path) def test_tvb_export_of_datatype_with_storage(self): """ Test export of a data type which has no data stored on file system """ datatype = self.datatypeFactory.create_datatype_with_storage() file_name, file_path, _ = self.export_manager.export_data( datatype, self.TVB_EXPORTER, self.project) self.assertTrue(file_name is not None, "Export process should return a file name") self.assertTrue(file_path is not None, "Export process should return path to export file") self.assertTrue(os.path.exists(file_path), "Could not find export file: %s on disk." % file_path) def test_tvb_export_for_datatype_group(self): """ This method checks export of a data type group """ datatype_group = self.datatypeFactory.create_datatype_group() file_name, file_path, _ = self.export_manager.export_data( datatype_group, self.TVB_EXPORTER, self.project) self.assertTrue(file_name is not None, "Export process should return a file name") self.assertTrue(file_path is not None, "Export process should return path to export file") self.assertTrue(os.path.exists(file_path), "Could not find export file: %s on disk." % file_path) # Now check if the generated file is a correct ZIP file self.assertTrue(zipfile.is_zipfile(file_path), "Generated file is not a valid ZIP file") with closing(zipfile.ZipFile(file_path)) as zip_file: list_of_files = zip_file.namelist() count_datatypes = dao.count_datatypes_in_group(datatype_group.id) # Check if ZIP files contains files for data types + operation self.assertEqual( count_datatypes * 2, len(list_of_files), "Should have 2 x nr datatypes files, one for operations one for datatypes" ) def test_export_with_invalid_data(self): """ Test scenarios when data provided to export method is invalid """ # Test with no datatype self.assertRaises(InvalidExportDataException, self.export_manager.export_data, None, self.TVB_EXPORTER, self.project) # Test with no exporter datatype = self.datatypeFactory.create_datatype_with_storage() self.assertRaises(ExportException, self.export_manager.export_data, datatype, None, self.project) # test with wrong exporter self.assertRaises(ExportException, self.export_manager.export_data, datatype, "wrong_exporter", self.project) # test with no project folder self.assertRaises(ExportException, self.export_manager.export_data, datatype, self.TVB_EXPORTER, None) def test_export_project_failure(self): """ This method tests export of project with None data """ self.assertRaises(ExportException, self.export_manager.export_project, None) def tet_export_project(self): """ Test export of a project """ project = self.datatypeFactory.get_project() export_file = self.export_manager.export_project(project) self.assertTrue(export_file is not None, "Export process should return path to export file") self.assertTrue( os.path.exists(export_file), "Could not find export file: %s on disk." % export_file) # Now check if the generated file is a correct ZIP file self.assertTrue(zipfile.is_zipfile(export_file), "Generated file is not a valid ZIP file")
class TVBImporterTest(TransactionalTestCase): """ Unit-tests for TVB importer. """ TVB_EXPORTER = "TVBExporter" def setUp(self): """ Sets up the environment for running the tests; creates a test user, a test project, a datatype and a datatype_group; """ export_manager = ExportManager() self.datatypeFactory = DatatypesFactory() self.test_project = self.datatypeFactory.get_project() # Generate simple data type and export it to H5 file self.datatype = self.datatypeFactory.create_datatype_with_storage() _, exported_h5_file, _ = export_manager.export_data( self.datatype, self.TVB_EXPORTER, self.test_project) # Copy H5 file to another location since the original one / exported # will be deleted with the project _, h5_file_name = os.path.split(exported_h5_file) shutil.copy(exported_h5_file, TvbProfile.current.TVB_TEMP_FOLDER) self.h5_file_path = os.path.join(TvbProfile.current.TVB_TEMP_FOLDER, h5_file_name) self.assertTrue(os.path.exists(self.h5_file_path), "Simple data type was not exported correct") # Generate data type group and export it to ZIP file self.datatype_group = self.datatypeFactory.create_datatype_group() _, self.zip_file_path, _ = export_manager.export_data( self.datatype_group, self.TVB_EXPORTER, self.test_project) self.assertTrue(os.path.exists(self.zip_file_path), "Data type group was not exported correct") FilesHelper().remove_project_structure(self.test_project.name) self.clean_database(delete_folders=False) # Recreate project, but a clean one where to import data self.datatypeFactory = DatatypesFactory() self.test_project = self.datatypeFactory.get_project() self.test_user = self.datatypeFactory.get_user() def _import(self, import_file_path=None): """ This method is used for importing data in TVB format :param import_file_path: absolute path of the file to be imported """ ### Retrieve Adapter instance importer = TestFactory.create_adapter( 'tvb.adapters.uploaders.tvb_importer', 'TVBImporter') args = {'data_file': import_file_path} ### Launch import Operation FlowService().fire_operation(importer, self.test_user, self.test_project.id, **args) def test_zip_import(self): """ This method tests import of TVB data in zip format (which imply multiple data types in the same zip file - exported from a group) """ self._import(self.zip_file_path) count = FlowService().get_available_datatypes( self.test_project.id, self.datatype.module + "." + self.datatype.type)[1] self.assertEqual(9, count, "9 datatypes should have been imported from group.") def test_h5_import(self): """ This method tests import of TVB data in h5 format. Single data type / import """ self._import(self.h5_file_path) data_types = FlowService().get_available_datatypes( self.test_project.id, self.datatype.module + "." + self.datatype.type)[0] self.assertEqual(1, len(data_types), "Project should contain only one data type.") data_type_entity = ABCAdapter.load_entity_by_gid(data_types[0][2]) self.assertTrue(data_type_entity is not None, "Datatype should not be none") self.assertEqual(self.datatype.gid, data_type_entity.gid, "Imported datatype should have the same gid") def test_import_invalid_file(self): """ This method tests import of a file which does not exists or does not have a supported format. """ try: self._import("invalid_path") self.fail( "System should throw an exception if trying to import an invalid file" ) except OperationException: # Expected pass # Now try to generate a file on disk with wrong format and import that file_path = os.path.join(TvbProfile.current.TVB_TEMP_FOLDER, "dummy_file.txt") with open(file_path, "w") as f: f.write("dummy text") try: self._import(file_path) self.fail( "System should throw an exception if trying to import a file with wrong format" ) except OperationException: # Expected pass
class TVBImporterTest(TransactionalTestCase): """ Unit-tests for TVB importer. """ TVB_EXPORTER = "TVBExporter" def setUp(self): """ Sets up the environment for running the tests; creates a test user, a test project, a datatype and a datatype_group; """ export_manager = ExportManager() self.datatypeFactory = DatatypesFactory() self.test_project = self.datatypeFactory.get_project() # Generate simple data type and export it to H5 file self.datatype = self.datatypeFactory.create_datatype_with_storage() _, exported_h5_file, _ = export_manager.export_data(self.datatype, self.TVB_EXPORTER, self.test_project) # Copy H5 file to another location since the original one / exported # will be deleted with the project _, h5_file_name = os.path.split(exported_h5_file) shutil.copy(exported_h5_file, TvbProfile.current.TVB_TEMP_FOLDER) self.h5_file_path = os.path.join(TvbProfile.current.TVB_TEMP_FOLDER, h5_file_name) self.assertTrue(os.path.exists(self.h5_file_path), "Simple data type was not exported correct") # Generate data type group and export it to ZIP file self.datatype_group = self.datatypeFactory.create_datatype_group() _, self.zip_file_path, _ = export_manager.export_data(self.datatype_group, self.TVB_EXPORTER, self.test_project) self.assertTrue(os.path.exists(self.zip_file_path), "Data type group was not exported correct") FilesHelper().remove_project_structure(self.test_project.name) self.clean_database(delete_folders=False) # Recreate project, but a clean one where to import data self.datatypeFactory = DatatypesFactory() self.test_project = self.datatypeFactory.get_project() self.test_user = self.datatypeFactory.get_user() def _import(self, import_file_path=None): """ This method is used for importing data in TVB format :param import_file_path: absolute path of the file to be imported """ ### Retrieve Adapter instance importer = TestFactory.create_adapter('tvb.adapters.uploaders.tvb_importer', 'TVBImporter') args = {'data_file': import_file_path} ### Launch import Operation FlowService().fire_operation(importer, self.test_user, self.test_project.id, **args) def test_zip_import(self): """ This method tests import of TVB data in zip format (which imply multiple data types in the same zip file - exported from a group) """ self._import(self.zip_file_path) count = FlowService().get_available_datatypes(self.test_project.id, self.datatype.module + "." + self.datatype.type)[1] self.assertEqual(9, count, "9 datatypes should have been imported from group.") def test_h5_import(self): """ This method tests import of TVB data in h5 format. Single data type / import """ self._import(self.h5_file_path) data_types = FlowService().get_available_datatypes(self.test_project.id, self.datatype.module + "." + self.datatype.type)[0] self.assertEqual(1, len(data_types), "Project should contain only one data type.") data_type_entity = ABCAdapter.load_entity_by_gid(data_types[0][2]) self.assertTrue(data_type_entity is not None, "Datatype should not be none") self.assertEqual(self.datatype.gid, data_type_entity.gid, "Imported datatype should have the same gid") def test_import_invalid_file(self): """ This method tests import of a file which does not exists or does not have a supported format. """ try: self._import("invalid_path") self.fail("System should throw an exception if trying to import an invalid file") except OperationException: # Expected pass # Now try to generate a file on disk with wrong format and import that file_path = os.path.join(TvbProfile.current.TVB_TEMP_FOLDER, "dummy_file.txt") with open(file_path, "w") as f: f.write("dummy text") try: self._import(file_path) self.fail("System should throw an exception if trying to import a file with wrong format") except OperationException: # Expected pass
class ExportersTest(TransactionalTestCase): """ Test export functionality. """ TVB_EXPORTER = "TVBExporter" CIFTI_EXPORTER = "CIFTIExporter" def setUp(self): self.export_manager = ExportManager() self.datatypeFactory = DatatypesFactory() self.project = self.datatypeFactory.get_project() def tearDown(self): """ Clean-up tests data """ project = self.datatypeFactory.get_project() FilesHelper().remove_project_structure(project.name) # Remove EXPORT folder export_folder = os.path.join(TvbProfile.current.TVB_STORAGE, ExportManager.EXPORT_FOLDER_NAME) if os.path.exists(export_folder): shutil.rmtree(export_folder) def test_get_exporters_for_data(self): """ Test retrieval of exporters that can be used for a given data. """ datatype = self.datatypeFactory.create_simple_datatype() exporters = self.export_manager.get_exporters_for_data(datatype) # Only TVB export can export any type of data type self.assertEqual(1, len(exporters), "Incorrect number of exporters.") def test_get_exporters_for_data_with_no_data(self): """ Test retrieval of exporters when data == None. """ self.assertRaises(InvalidExportDataException, self.export_manager.get_exporters_for_data, None) def test_tvb_export_of_simple_datatype(self): """ Test export of a data type which has no data stored on file system """ datatype = self.datatypeFactory.create_simple_datatype() file_name, file_path, _ = self.export_manager.export_data(datatype, self.TVB_EXPORTER, self.project) self.assertTrue(file_name is not None, "Export process should return a file name") self.assertTrue(file_path is not None, "Export process should return path to export file") self.assertTrue(os.path.exists(file_path), "Could not find export file: %s on disk." % file_path) def test_tvb_export_of_datatype_with_storage(self): """ Test export of a data type which has no data stored on file system """ datatype = self.datatypeFactory.create_datatype_with_storage() file_name, file_path, _ = self.export_manager.export_data(datatype, self.TVB_EXPORTER, self.project) self.assertTrue(file_name is not None, "Export process should return a file name") self.assertTrue(file_path is not None, "Export process should return path to export file") self.assertTrue(os.path.exists(file_path), "Could not find export file: %s on disk." % file_path) def test_tvb_export_for_datatype_group(self): """ This method checks export of a data type group """ datatype_group = self.datatypeFactory.create_datatype_group() file_name, file_path, _ = self.export_manager.export_data(datatype_group, self.TVB_EXPORTER, self.project) self.assertTrue(file_name is not None, "Export process should return a file name") self.assertTrue(file_path is not None, "Export process should return path to export file") self.assertTrue(os.path.exists(file_path), "Could not find export file: %s on disk." % file_path) # Now check if the generated file is a correct ZIP file self.assertTrue(zipfile.is_zipfile(file_path), "Generated file is not a valid ZIP file") with closing(zipfile.ZipFile(file_path)) as zip_file: list_of_files = zip_file.namelist() count_datatypes = dao.count_datatypes_in_group(datatype_group.id) # Check if ZIP files contains files for data types + operation self.assertEqual(count_datatypes * 2, len(list_of_files), "Should have 2 x nr datatypes files, one for operations one for datatypes") def test_export_with_invalid_data(self): """ Test scenarios when data provided to export method is invalid """ # Test with no datatype self.assertRaises(InvalidExportDataException, self.export_manager.export_data, None, self.TVB_EXPORTER, self.project) # Test with no exporter datatype = self.datatypeFactory.create_datatype_with_storage() self.assertRaises(ExportException, self.export_manager.export_data, datatype, None, self.project) # test with wrong exporter self.assertRaises(ExportException, self.export_manager.export_data, datatype, "wrong_exporter", self.project) # test with no project folder self.assertRaises(ExportException, self.export_manager.export_data, datatype, self.TVB_EXPORTER, None) def test_export_project_failure(self): """ This method tests export of project with None data """ self.assertRaises(ExportException, self.export_manager.export_project, None) def tet_export_project(self): """ Test export of a project """ project = self.datatypeFactory.get_project() export_file = self.export_manager.export_project(project) self.assertTrue(export_file is not None, "Export process should return path to export file") self.assertTrue(os.path.exists(export_file), "Could not find export file: %s on disk." % export_file) # Now check if the generated file is a correct ZIP file self.assertTrue(zipfile.is_zipfile(export_file), "Generated file is not a valid ZIP file")