Exemple #1
0
def run_export(project_id):
    s = ProjectService()
    mng = ExportManager()

    project = s.find_project(project_id)
    export_file = mng.export_project(project)
    print("Check the exported file: %s" % export_file)
Exemple #2
0
    def test_export_datatype_with_links(self, region_mapping_index_factory,
                                        user_factory, project_factory):
        """
        This is a test for exporting region mapping with links, that results in importing:
        connectivity, surface and region mapping all from one zip.
        """
        self.test_user = user_factory()
        self.test_project = project_factory(self.test_user)

        region_mapping_index = region_mapping_index_factory()

        export_manager = ExportManager()
        _, exported_h5_file, _ = export_manager.export_data(
            region_mapping_index, self.TVB_LINKED_EXPORTER, self.test_project)
        assert zipfile.is_zipfile(
            exported_h5_file), "Generated file is not a valid ZIP file"

        with zipfile.ZipFile(exported_h5_file, 'r') as zipObj:
            # Get list of files names in zip
            dts_in_zip = len(zipObj.namelist())
            assert 3 == dts_in_zip

            has_conn = False
            has_surface = False
            for dt_file in zipObj.namelist():
                if dt_file.find(region_mapping_index.fk_connectivity_gid) > -1:
                    has_conn = True
                if dt_file.find(region_mapping_index.fk_surface_gid):
                    has_surface = True

        assert has_conn is True, "Connectivity was exported in zip"
        assert has_surface is True, "Surface was exported in zip"
    def prepare_importer_data(self, user_factory, project_factory, operation_factory,
                              connectivity_index_factory, datatype_group_factory):
        """
        Sets up the environment for running the tests;
        creates a test user, a test project, a datatype and a datatype_group;
        """
        self.test_user = user_factory()
        self.test_project = project_factory(self.test_user)
        operation = operation_factory(test_project=self.test_project)

        # Generate simple data type and export it to H5 file
        self.datatype = connectivity_index_factory(op=operation)

        export_manager = ExportManager()
        _, exported_h5_file, _ = export_manager.export_data(self.datatype, self.TVB_EXPORTER, self.test_project)

        # Copy H5 file to another location since the original one / exported will be deleted with the project
        _, h5_file_name = os.path.split(exported_h5_file)
        shutil.copy(exported_h5_file, TvbProfile.current.TVB_TEMP_FOLDER)
        self.h5_file_path = os.path.join(TvbProfile.current.TVB_TEMP_FOLDER, h5_file_name)
        assert os.path.exists(self.h5_file_path), "Simple data type was not exported correct"

        # Generate data type group and export it to ZIP file
        datatype_group, _ = datatype_group_factory(project=self.test_project, store_vm=True)
        _, self.zip_file_path, _ = export_manager.export_data(datatype_group, self.TVB_EXPORTER, self.test_project)
        assert os.path.exists(self.zip_file_path), "Data type group was not exported correct"

        self.clean_database(delete_folders=False)

        # Recreate project, but a clean one where to import data
        self.test_user = user_factory()
        self.test_project = project_factory(self.test_user)
Exemple #4
0
    def export(self, burst_id):
        export_manager = ExportManager()
        export_zip = export_manager.export_simulator_configuration(burst_id)

        result_name = "tvb_simulation_" + str(burst_id) + ".zip"
        return serve_file(export_zip, "application/x-download", "attachment",
                          result_name)
Exemple #5
0
    def transactional_setup_fixture(self, datatype_store_factory, datatype_group_factory):
        """
        Sets up the environment for running the tests;
        creates a test user, a test project, a datatype and a datatype_group;
        """

        export_manager = ExportManager()

        self.test_user = TestFactory.create_user()
        self.test_project = TestFactory.create_project(self.test_user)

        # Generate simple data type and export it to H5 file
        self.datatype = datatype_store_factory
        _, exported_h5_file, _ = export_manager.export_data(self.datatype, self.TVB_EXPORTER, self.test_project)
        # Copy H5 file to another location since the original one / exported
        # will be deleted with the project
        _, h5_file_name = os.path.split(exported_h5_file)
        shutil.copy(exported_h5_file, TvbProfile.current.TVB_TEMP_FOLDER)
        self.h5_file_path = os.path.join(TvbProfile.current.TVB_TEMP_FOLDER, h5_file_name)

        assert os.path.exists(self.h5_file_path), "Simple data type was not exported correct"

        # Generate data type group and export it to ZIP file
        datatype_group = datatype_group_factory
        _, self.zip_file_path, _ = export_manager.export_data(datatype_group, self.TVB_EXPORTER, self.test_project)
        assert os.path.exists(self.zip_file_path), "Data type group was not exported correct"

        FilesHelper().remove_project_structure(self.test_project.name)
        self.clean_database(delete_folders=False)

        # Recreate project, but a clean one where to import data

        self.test_user = TestFactory.create_user()
        self.test_project = TestFactory.create_project(self.test_user)
    def setUp(self):
        """
        Sets up the environment for running the tests;
        creates a test user, a test project, a datatype and a datatype_group;
        """
        export_manager = ExportManager()

        self.datatypeFactory = DatatypesFactory()
        self.test_project = self.datatypeFactory.get_project()

        # Generate simple data type and export it to H5 file
        self.datatype = self.datatypeFactory.create_datatype_with_storage()
        _, exported_h5_file, _ = export_manager.export_data(self.datatype, self.TVB_EXPORTER, self.test_project)
        # Copy H5 file to another location since the original one / exported 
        # will be deleted with the project
        _, h5_file_name = os.path.split(exported_h5_file)
        shutil.copy(exported_h5_file, TvbProfile.current.TVB_TEMP_FOLDER)
        self.h5_file_path = os.path.join(TvbProfile.current.TVB_TEMP_FOLDER, h5_file_name)

        self.assertTrue(os.path.exists(self.h5_file_path), "Simple data type was not exported correct")

        # Generate data type group and export it to ZIP file
        self.datatype_group = self.datatypeFactory.create_datatype_group()
        _, self.zip_file_path, _ = export_manager.export_data(self.datatype_group, self.TVB_EXPORTER, self.test_project)
        self.assertTrue(os.path.exists(self.zip_file_path), "Data type group was not exported correct")

        FilesHelper().remove_project_structure(self.test_project.name)
        self.clean_database(delete_folders=False)

        # Recreate project, but a clean one where to import data
        self.datatypeFactory = DatatypesFactory()
        self.test_project = self.datatypeFactory.get_project()
        self.test_user = self.datatypeFactory.get_user()
Exemple #7
0
def run_export(project_id, loose_irrelevant=False):
    s = ProjectService()
    mng = ExportManager()

    project = s.find_project(project_id)
    export_file = mng.export_project(project, loose_irrelevant)
    print("Check the exported file: %s" % export_file)
    def export(self, burst_id):
        export_manager = ExportManager()
        export_json = export_manager.export_burst(burst_id)

        result_name = "tvb_simulation_" + str(burst_id) + ".json"
        return serve_fileobj(export_json, "application/x-download",
                             "attachment", result_name)
Exemple #9
0
    def setUp(self):
        export_manager = ExportManager()

        self.datatypeFactory = DatatypesFactory()
        self.test_project = self.datatypeFactory.get_project()

        # Generate simple data type and export it to H5 file
        self.datatype = self.datatypeFactory.create_datatype_with_storage()
        _, exported_h5_file, _ = export_manager.export_data(self.datatype, self.TVB_EXPORTER, self.test_project)
        # Copy H5 file to another location since the original one / exported 
        # will be deleted with the project
        _, h5_file_name = os.path.split(exported_h5_file)
        shutil.copy(exported_h5_file, cfg.TVB_TEMP_FOLDER)
        self.h5_file_path = os.path.join(cfg.TVB_TEMP_FOLDER, h5_file_name)

        self.assertTrue(os.path.exists(self.h5_file_path), "Simple data type was not exported correct")

        # Generate data type group and export it to ZIP file
        self.datatype_group = self.datatypeFactory.create_datatype_group()
        _, self.zip_file_path, _ = export_manager.export_data(self.datatype_group, self.TVB_EXPORTER, self.test_project)
        self.assertTrue(os.path.exists(self.zip_file_path), "Data type group was not exported correct")

        FilesHelper().remove_project_structure(self.test_project.name)
        self.clean_database(delete_folders=False)

        # Recreate project, but a clean one where to import data
        self.datatypeFactory = DatatypesFactory()
        self.test_project = self.datatypeFactory.get_project()
        self.test_user = self.datatypeFactory.get_user()
def run_export(project_id, loose_irrelevant=False):

    s = ProjectService()
    mng = ExportManager()

    project = s.find_project(project_id)
    export_file = mng.export_project(project, loose_irrelevant)
    print("Check the exported file: %s" % export_file)
Exemple #11
0
    def initialize_linked_projects(self, initialize_two_projects):
        """
        Adds to the _BaseLinksTest setup the following
        2 links from src to dest project
        Import/export services
        """

        dest_id = self.dest_project.id
        self.flow_service.create_link([self.red_datatype.id], dest_id)
        self.flow_service.create_link([self.blue_datatype.id], dest_id)
        self.export_mng = ExportManager()
    def downloadproject(self, project_id):
        """
        Export the data from a whole project.
        """
        current_project = self.project_service.find_project(project_id)
        export_mng = ExportManager()
        export_file = export_mng.export_project(current_project)

        # Register export file for delete when download complete
        # We force parent folder deletion because export process generated it.
        self.mark_file_for_delete(export_file, True)

        return serve_file(export_file, "application/x-download", "attachment")
Exemple #13
0
    def downloadproject(self, project_id):
        """
        Export the data from a whole project.
        """
        current_project = self.project_service.find_project(project_id)
        export_mng = ExportManager()
        export_file = export_mng.export_project(current_project)

        # Register export file for delete when download complete
        # We force parent folder deletion because export process generated it.
        self.mark_file_for_delete(export_file, True)

        return serve_file(export_file, "application/x-download", "attachment")
Exemple #14
0
    def setUp(self):
        """
        Adds to the _BaseLinksTest setup the following
        2 links from src to dest project
        Import/export services
        """
        self.clean_database(delete_folders=True)
        self._initialize_two_projects()

        dest_id = self.dest_project.id
        self.flow_service.create_link([self.red_datatype.id], dest_id)
        self.flow_service.create_link([self.blue_datatype.id], dest_id)
        self.export_mng = ExportManager()
    def downloaddata(self, data_gid, export_module):
        """ Export the data to a default path of TVB_STORAGE/PROJECTS/project_name """
        current_prj = common.get_current_project()
        # Load data by GID
        entity = ABCAdapter.load_entity_by_gid(data_gid)
        # Do real export
        export_mng = ExportManager()
        file_name, file_path, delete_file = export_mng.export_data(entity, export_module, current_prj)
        if delete_file:
            # We force parent folder deletion because export process generated it.
            self.mark_file_for_delete(file_path, True)

        self.logger.debug("Data exported in file: " + str(file_path))
        return serve_file(file_path, "application/x-download", "attachment", file_name)
    def downloaddata(self, data_gid, export_module):
        """ Export the data to a default path of TVB_STORAGE/PROJECTS/project_name """
        current_prj = common.get_current_project()
        # Load data by GID
        entity = ABCAdapter.load_entity_by_gid(data_gid)
        # Do real export
        export_mng = ExportManager()
        file_name, file_path, delete_file = export_mng.export_data(entity, export_module, current_prj)
        if delete_file:
            # We force parent folder deletion because export process generated it.
            self.mark_file_for_delete(file_path, True)

        self.logger.debug("Data exported in file: " + str(file_path))
        return serve_file(file_path, "application/x-download", "attachment", file_name)
Exemple #17
0
    def test_import_export_existing(self):
        """
        Test the import/export mechanism for a project structure.
        The project contains the following data types: Connectivity, Surface, MappedArray and ValueWrapper.
        """
        result = self.get_all_datatypes()
        expected_results = {}
        for one_data in result:
            expected_results[one_data.gid] = (one_data.module, one_data.type)

        #create an array mapped in DB
        data = {'param_1': 'some value'}
        OperationService().initiate_prelaunch(self.operation,
                                              self.adapter_instance, {},
                                              **data)
        inserted = self.flow_service.get_available_datatypes(
            self.test_project.id, "tvb.datatypes.arrays.MappedArray")
        self.assertEqual(len(inserted), 2, "Problems when inserting data")

        #create a value wrapper
        self._create_value_wrapper()
        result = dao.get_filtered_operations(self.test_project.id, None)
        self.assertEqual(
            len(result), 2, "Should be two operations before export and not " +
            str(len(result)) + " !")
        self.zip_path = ExportManager().export_project(self.test_project)
        self.assertTrue(self.zip_path is not None, "Exported file is none")

        try:
            self.import_service.import_project_structure(
                self.zip_path, self.test_user.id)
            self.fail("Invalid import as the project already exists!")
        except ProjectImportException:
            #OK, do nothing. The project already exists.
            pass
Exemple #18
0
    def test_export_import_burst(self, user_factory, project_factory,
                                 simulation_launch):
        """
        Test that fk_parent_burst is correctly preserved after export/import
        """
        test_user = user_factory()
        test_project = project_factory(test_user, "TestIESim")
        sim_op = simulation_launch(test_user,
                                   test_project,
                                   simulation_length=10)
        tries = 5
        while not sim_op.has_finished and tries > 0:
            sleep(5)
            tries = tries - 1
            sim_op = dao.get_operation_by_id(sim_op.id)
        assert sim_op.has_finished, "Simulation did not finish in the given time"

        self.zip_path = ExportManager().export_project(test_project)
        assert self.zip_path is not None, "Exported file is none"
        self.project_service.remove_project(test_project.id)

        self.import_service.import_project_structure(self.zip_path,
                                                     test_user.id)
        retrieved_project = self.project_service.retrieve_projects_for_user(
            test_user.id)[0][0]
        ts = try_get_last_datatype(retrieved_project.id, TimeSeriesRegionIndex)
        bursts = dao.get_bursts_for_project(retrieved_project.id)
        assert 1 == len(bursts)
        assert ts.fk_parent_burst == bursts[0].gid
Exemple #19
0
    def test_import_export(self, user_factory, project_factory,
                           value_wrapper_factory):
        """
        Test the import/export mechanism for a project structure.
        The project contains the following data types: Connectivity, Surface, MappedArray and ValueWrapper.
        """
        test_user = user_factory()
        test_project = project_factory(test_user, "TestImportExport",
                                       "test_desc")
        zip_path = os.path.join(os.path.dirname(tvb_data.__file__),
                                'connectivity', 'connectivity_66.zip')
        TestFactory.import_zip_connectivity(test_user, test_project, zip_path)
        value_wrapper = value_wrapper_factory(test_user, test_project)

        result = self.get_all_datatypes()
        expected_results = {}
        for one_data in result:
            expected_results[one_data.gid] = (one_data.module, one_data.type)

        # Export project as ZIP
        self.zip_path = ExportManager().export_project(test_project)
        assert self.zip_path is not None, "Exported file is none"

        # Remove the original project
        self.project_service.remove_project(test_project.id)
        result, lng_ = self.project_service.retrieve_projects_for_user(
            test_user.id)
        assert 0 == len(result), "Project Not removed!"
        assert 0 == lng_, "Project Not removed!"

        # Now try to import again project
        self.import_service.import_project_structure(self.zip_path,
                                                     test_user.id)
        result = self.project_service.retrieve_projects_for_user(
            test_user.id)[0]
        assert len(result) == 1, "There should be only one project."
        assert result[
            0].name == "TestImportExport", "The project name is not correct."
        assert result[
            0].description == "test_desc", "The project description is not correct."
        test_project = result[0]

        count_operations = dao.get_filtered_operations(test_project.id,
                                                       None,
                                                       is_count=True)

        # 1 op. - import conn; 2 op. - BCT Analyzer
        assert 2 == count_operations, "Invalid ops number after export and import !"
        for gid in expected_results:
            datatype = dao.get_datatype_by_gid(gid)
            assert datatype.module == expected_results[gid][
                0], 'DataTypes not imported correctly'
            assert datatype.type == expected_results[gid][
                1], 'DataTypes not imported correctly'
        # check the value wrapper
        new_val = try_get_last_datatype(test_project.id, ValueWrapperIndex)
        assert value_wrapper.data_value == new_val.data_value, "Data value incorrect"
        assert value_wrapper.data_type == new_val.data_type, "Data type incorrect"
        assert value_wrapper.data_name == new_val.data_name, "Data name incorrect"
Exemple #20
0
 def setUpTVB(self):
     """
     Adds to the _BaseLinksTest setup the following
     2 links from src to dest project
     Import/export services
     """
     _BaseLinksTest.setUpTVB(self)
     dest_id = self.dest_project.id
     self.flow_service.create_link([self.red_datatype.id], dest_id)
     self.flow_service.create_link([self.blue_datatype.id], dest_id)
     self.export_mng = ExportManager()
     self.import_service = ImportService()
Exemple #21
0
    def test_import_datatype_with_links(self, region_mapping_index_factory, user_factory, project_factory):
        """
        This is a test for importing region mapping with links, that results in importing:
        connectivity, surface and region mapping all from one zip.
        """
        self.test_user = user_factory()
        self.test_project = project_factory(self.test_user)

        region_mapping_index = region_mapping_index_factory()

        export_manager = ExportManager()
        _, exported_h5_file, _ = export_manager.export_data(region_mapping_index, self.TVB_LINKED_EXPORTER, self.test_project)

        # Clean DB
        self.clean_database(delete_folders=False)

        # Recreate project, but a clean one where to import data
        self.test_user = user_factory()
        self.test_project = project_factory(self.test_user)

        datatypes = dao.get_datatypes_in_project(self.test_project.id)
        assert 0 == len(datatypes), "There are no DT's in DB before import."

        self._import(exported_h5_file)

        datatypes = dao.get_datatypes_in_project(self.test_project.id)
        assert 3 == len(datatypes), "Project should contain 3 data types."

        has_conn = False
        has_surface = False
        for dt in datatypes:
            if dt.gid == region_mapping_index.fk_connectivity_gid:
                has_conn = True
            if dt.gid == region_mapping_index.fk_surface_gid:
                has_surface = True

        assert has_conn is True, "Connectivity was imported as linked"
        assert has_surface is True, "Surface was imported as linked"
    def test_import_export_existing(self):
        """
        Test the import/export mechanism for a project structure.
        The project contains the following data types: Connectivity, Surface, MappedArray and ValueWrapper.
        """
        count_operations = dao.get_filtered_operations(self.test_project.id,
                                                       None,
                                                       is_count=True)
        assert 2 == count_operations, "Invalid ops before export!"

        self.zip_path = ExportManager().export_project(self.test_project)
        assert self.zip_path is not None, "Exported file is none"

        with pytest.raises(ProjectImportException):
            self.import_service.import_project_structure(
                self.zip_path, self.test_user.id)
Exemple #23
0
    def test_export_import_figures(self, user_factory, project_factory):
        """
        Test that ResultFigure instances are correctly restores after an export+import project
        """
        # Prepare data
        user = user_factory()
        project = project_factory(user, "TestImportExportFigures")
        zip_path = os.path.join(os.path.dirname(tvb_data.__file__),
                                'connectivity', 'paupau.zip')
        TestFactory.import_zip_connectivity(user, project, zip_path)

        figure_service = FigureService()
        figure_service.store_result_figure(project, user, "png", IMG_DATA,
                                           "bla")
        figure_service.store_result_figure(project, user, "png", IMG_DATA,
                                           "bla")
        figures = list(
            figure_service.retrieve_result_figures(project,
                                                   user)[0].values())[0]
        assert 2 == len(figures)

        # export, delete and the import project
        self.zip_path = ExportManager().export_project(project)
        assert self.zip_path is not None, "Exported file is none"
        self.project_service.remove_project(project.id)

        self.import_service.import_project_structure(self.zip_path, user.id)

        # Check that state is as before export: one operation, one DT, 2 figures
        retrieved_project = self.project_service.retrieve_projects_for_user(
            user.id)[0][0]
        count_operations = dao.get_filtered_operations(retrieved_project.id,
                                                       None,
                                                       is_count=True)
        assert 1 == count_operations
        count_datatypes = dao.count_datatypes(retrieved_project.id, DataType)
        assert 1 == count_datatypes

        figures = list(
            figure_service.retrieve_result_figures(retrieved_project,
                                                   user)[0].values())[0]
        assert 2 == len(figures)
        assert "bla" in figures[0].name
        assert "bla" in figures[1].name
        image_path = utils.url2path(figures[0].file_path)
        img_data = Image.open(image_path).load()
        assert img_data is not None
    def test_import_export_existing(self, user_factory, project_factory):
        """
        Test the import/export mechanism for a project structure.
        The project contains the following data types: Connectivity, Surface, MappedArray and ValueWrapper.
        """
        test_user = user_factory()
        test_project = project_factory(test_user, "TestImportExport2")
        zip_path = os.path.join(os.path.dirname(tvb_data.__file__), 'connectivity', 'connectivity_66.zip')
        TestFactory.import_zip_connectivity(test_user, test_project, zip_path)

        count_operations = dao.get_filtered_operations(test_project.id, None, is_count=True)
        assert 1 == count_operations, "Invalid ops before export!"

        self.zip_path = ExportManager().export_project(test_project)
        assert self.zip_path is not None, "Exported file is none"

        with pytest.raises(ImportException):
            self.import_service.import_project_structure(self.zip_path, test_user.id)
Exemple #25
0
 def transactional_setup_method(self):
     self.export_manager = ExportManager()
     self.test_user = TestFactory.create_user('Exporter_Tests_User1')
     self.test_project = TestFactory.create_project(
         self.test_user, 'Exporter_Tests_Project1')
Exemple #26
0
class TestExporters(TransactionalTestCase):
    """
    Test export functionality.
    """
    TVB_EXPORTER = "TVBExporter"
    CIFTI_EXPORTER = "CIFTIExporter"

    def transactional_setup_method(self):
        self.export_manager = ExportManager()
        self.test_user = TestFactory.create_user('Exporter_Tests_User1')
        self.test_project = TestFactory.create_project(
            self.test_user, 'Exporter_Tests_Project1')

    def transactional_teardown_method(self):
        """
        Clean-up tests data
        """
        user = TestFactory.create_user('Exporter_Tests_User2')
        project = TestFactory.create_project(user, 'Exporter_Tests_Project2')
        StorageInterface().remove_project_structure(project.name)

        # Remove EXPORT folder
        export_folder = os.path.join(TvbProfile.current.TVB_STORAGE,
                                     ExportManager.EXPORT_FOLDER_NAME)
        if os.path.exists(export_folder):
            shutil.rmtree(export_folder)

    def test_get_exporters_for_data(self, dummy_datatype_index_factory):
        """
        Test retrieval of exporters that can be used for a given data.
        """
        datatype = dummy_datatype_index_factory()
        exporters = self.export_manager.get_exporters_for_data(datatype)

        # Only TVB export can export any type of data type
        assert 1, len(exporters) == "Incorrect number of exporters."

    def test_get_exporters_for_data_with_no_data(self):
        """
        Test retrieval of exporters when data == None.
        """
        with pytest.raises(InvalidExportDataException):
            self.export_manager.get_exporters_for_data(None)

    def test_tvb_export_of_simple_datatype(self, dummy_datatype_index_factory):
        """
        Test export of a data type which has no data stored on file system
        """
        datatype = dummy_datatype_index_factory()
        _, file_path, _ = self.export_manager.export_data(
            datatype, self.TVB_EXPORTER, self.test_project)

        assert file_path is not None, "Export process should return path to export file"
        assert os.path.exists(
            file_path), "Could not find export file: %s on disk." % file_path

    def test_tvb_export_of_datatype_with_storage(self,
                                                 dummy_datatype_index_factory):
        """
        Test export of a data type which has no data stored on file system
        """
        datatype = dummy_datatype_index_factory()
        _, file_path, _ = self.export_manager.export_data(
            datatype, self.TVB_EXPORTER, self.test_project)

        assert file_path is not None, "Export process should return path to export file"
        assert os.path.exists(
            file_path), "Could not find export file: %s on disk." % file_path

    def test_tvb_export_for_datatype_group(self, datatype_group_factory):
        """
        This method checks export of a data type group
        """
        datatype_group = datatype_group_factory(project=self.test_project,
                                                store_vm=True)
        file_name, file_path, _ = self.export_manager.export_data(
            datatype_group, self.TVB_EXPORTER, self.test_project)

        assert file_name is not None, "Export process should return a file name"
        assert file_path is not None, "Export process should return path to export file"
        assert os.path.exists(
            file_path), "Could not find export file: %s on disk." % file_path

        # Now check if the generated file is a correct ZIP file
        assert zipfile.is_zipfile(
            file_path), "Generated file is not a valid ZIP file"

        with closing(zipfile.ZipFile(file_path)) as zip_file:
            list_of_files = zip_file.namelist()

            list_of_folders = []
            for file in list_of_files:
                dir_name = os.path.dirname(file)
                if dir_name not in list_of_folders:
                    list_of_folders.append(dir_name)

            count_datatypes = dao.count_datatypes_in_group(datatype_group.id)

            # Check if ZIP files contains files for data types and view models (multiple H5 files in case of a Sim)
            assert count_datatypes == len(list_of_folders)
            assert count_datatypes * 6 == len(list_of_files)

    def test_export_with_invalid_data(self, dummy_datatype_index_factory):
        """
        Test scenarios when data provided to export method is invalid
        """
        # Test with no datatype
        with pytest.raises(InvalidExportDataException):
            self.export_manager.export_data(None, self.TVB_EXPORTER,
                                            self.test_project)
        # Test with no exporter
        datatype = dummy_datatype_index_factory()
        with pytest.raises(ExportException):
            self.export_manager.export_data(datatype, None, self.test_project)

        # test with wrong exporter
        with pytest.raises(ExportException):
            self.export_manager.export_data(datatype, "wrong_exporter",
                                            self.test_project)

        # test with no project folder
        with pytest.raises(ExportException):
            self.export_manager.export_data(datatype, self.TVB_EXPORTER, None)

    def test_export_project_failure(self):
        """
        This method tests export of project with None data
        """
        with pytest.raises(ExportException):
            self.export_manager.export_project(None)

    def test_export_project(self, project_factory, user_factory):
        """
        Test export of a project
        """
        user = user_factory(username='******')
        project = project_factory(user)
        export_file = self.export_manager.export_project(project)

        assert export_file is not None, "Export process should return path to export file"
        assert os.path.exists(
            export_file
        ), "Could not find export file: %s on disk." % export_file
        # Now check if the generated file is a correct ZIP file
        assert zipfile.is_zipfile(
            export_file), "Generated file is not a valid ZIP file"

    def test_export_simulator_configuration(self, operation_factory,
                                            connectivity_index_factory):
        """
        Test export of a simulator configuration
        """
        conn_gid = uuid.UUID(connectivity_index_factory().gid)
        operation = operation_factory(is_simulation=True,
                                      store_vm=True,
                                      test_project=self.test_project,
                                      conn_gid=conn_gid)

        burst_configuration = BurstConfiguration(self.test_project.id)
        burst_configuration.fk_simulation = operation.id
        burst_configuration.simulator_gid = operation.view_model_gid
        burst_configuration.name = "Test_burst"
        burst_configuration = dao.store_entity(burst_configuration)

        op_folder = StorageInterface().get_project_folder(
            self.test_project.name, str(operation.id))
        BurstService().store_burst_configuration(burst_configuration,
                                                 op_folder)

        export_file = self.export_manager.export_simulator_configuration(
            burst_configuration.id)

        assert export_file is not None, "Export process should return path to export file"
        assert os.path.exists(
            export_file
        ), "Could not find export file: %s on disk." % export_file
        assert zipfile.is_zipfile(
            export_file), "Generated file is not a valid ZIP file"
    def export(self, burst_id):
        export_manager = ExportManager()
        export_json = export_manager.export_burst(burst_id)

        result_name = "tvb_simulation_" + str(burst_id) + ".json"
        return serve_fileobj(export_json, "application/x-download", "attachment", result_name)
    def get_datatype_details(self,
                             entity_gid,
                             back_page='burst',
                             exclude_tabs=None):
        """
        Returns the HTML which contains the details for the given dataType.
        """
        if exclude_tabs is None:
            exclude_tabs = []
        selected_project = bc.get_current_project()
        datatype_details, states, entity = self.project_service.get_datatype_details(
            entity_gid)

        ### Load DataType categories
        current_type = datatype_details.data_type
        datatype_gid = datatype_details.gid
        categories = {}
        if not entity.invalid:
            categories = self.getalgorithmsfordatatype(str(current_type),
                                                       str(datatype_gid))
            categories = json.loads(categories)

        datatype_id = datatype_details.data_type_id
        is_group = False
        if datatype_details.operation_group_id is not None:
            ## Is a DataTypeGroup
            datatype_id = datatype_details.operation_group_id
            is_group = True

        ### Retrieve links
        linkable_projects_dict = self._get_linkable_projects_dict(datatype_id)
        ### Load all exporters
        exporters = {}
        if not entity.invalid:
            exporters = ExportManager().get_exporters_for_data(entity)
        is_relevant = entity.visible

        template_specification = dict()
        template_specification["entity_gid"] = entity_gid
        template_specification["nodeFields"] = datatype_details.get_ui_fields()
        template_specification["allStates"] = states
        template_specification["project"] = selected_project
        template_specification["categories"] = categories
        template_specification["exporters"] = exporters
        template_specification["datatype_id"] = datatype_id
        template_specification["isGroup"] = is_group
        template_specification["isRelevant"] = is_relevant
        template_specification["nodeType"] = 'datatype'
        template_specification["backPageIdentifier"] = back_page
        template_specification.update(linkable_projects_dict)

        overlay_class = "can-browse editor-node node-type-" + str(
            current_type).lower()
        if is_relevant:
            overlay_class += " node-relevant"
        else:
            overlay_class += " node_irrelevant"
        overlay_title = current_type
        if datatype_details.datatype_tag_1:
            overlay_title += " " + datatype_details.datatype_tag_1

        tabs = []
        overlay_indexes = []
        if "Metadata" not in exclude_tabs:
            tabs.append(OverlayTabDefinition("Metadata", "metadata"))
            overlay_indexes.append(0)
        if "Analyzers" not in exclude_tabs:
            tabs.append(
                OverlayTabDefinition("Analyzers",
                                     "analyzers",
                                     enabled=categories
                                     and 'Analyze' in categories))
            overlay_indexes.append(1)
        if "Visualizers" not in exclude_tabs:
            tabs.append(
                OverlayTabDefinition("Visualizers",
                                     "visualizers",
                                     enabled=categories
                                     and 'View' in categories))
            overlay_indexes.append(2)

        enable_link_tab = False
        if (not entity.invalid) and (linkable_projects_dict is not None):
            if self.PRROJECTS_FOR_LINK_KEY in linkable_projects_dict:
                projects_for_link = linkable_projects_dict[
                    self.PRROJECTS_FOR_LINK_KEY]
                if projects_for_link is not None and len(
                        projects_for_link) > 0:
                    enable_link_tab = True
            if self.PRROJECTS_LINKED_KEY in linkable_projects_dict:
                projects_linked = linkable_projects_dict[
                    self.PRROJECTS_LINKED_KEY]
                if projects_linked is not None and len(projects_linked) > 0:
                    enable_link_tab = True
        if "Links" not in exclude_tabs:
            tabs.append(
                OverlayTabDefinition("Links",
                                     "link_to",
                                     enabled=enable_link_tab))
            overlay_indexes.append(3)
        if "Export" not in exclude_tabs:
            tabs.append(
                OverlayTabDefinition("Export",
                                     "export",
                                     enabled=(exporters
                                              and len(exporters) > 0)))
            overlay_indexes.append(4)
        if "Resulted Datatypes" not in exclude_tabs:
            tabs.append(
                OverlayTabDefinition(
                    "Resulted Datatypes",
                    "result_dts",
                    enabled=self.project_service.
                    count_datatypes_generated_from(entity_gid)))
            overlay_indexes.append(5)
        template_specification = self.fill_overlay_attributes(
            template_specification, "DataType Details", overlay_title,
            "project/details_datatype_overlay", overlay_class, tabs,
            overlay_indexes)
        template_specification['baseUrl'] = cfg.BASE_URL
        #template_specification[bc.KEY_OVERLAY_PAGINATION] = True
        #template_specification[bc.KEY_OVERLAY_PREVIOUS] = "alert(1);"
        #template_specification[bc.KEY_OVERLAY_NEXT] = "alert(2);"
        return FlowController().fill_default_attributes(template_specification)
class ExportersTest(TransactionalTestCase):
    """
    Test export functionality.
    """
    TVB_EXPORTER = "TVBExporter"
    CIFTI_EXPORTER = "CIFTIExporter"

    def setUp(self):
        self.export_manager = ExportManager()
        self.datatypeFactory = DatatypesFactory()
        self.project = self.datatypeFactory.get_project()

    def tearDown(self):
        """
        Clean-up tests data
        """
        project = self.datatypeFactory.get_project()
        FilesHelper().remove_project_structure(project.name)

        # Remove EXPORT folder
        export_folder = os.path.join(TvbProfile.current.TVB_STORAGE,
                                     ExportManager.EXPORT_FOLDER_NAME)
        if os.path.exists(export_folder):
            shutil.rmtree(export_folder)

    def test_get_exporters_for_data(self):
        """
        Test retrieval of exporters that can be used for a given data.
        """
        datatype = self.datatypeFactory.create_simple_datatype()
        exporters = self.export_manager.get_exporters_for_data(datatype)

        # Only TVB export can export any type of data type
        self.assertEqual(1, len(exporters), "Incorrect number of exporters.")

    def test_get_exporters_for_data_with_no_data(self):
        """
        Test retrieval of exporters when data == None.
        """
        self.assertRaises(InvalidExportDataException,
                          self.export_manager.get_exporters_for_data, None)

    def test_tvb_export_of_simple_datatype(self):
        """
        Test export of a data type which has no data stored on file system
        """
        datatype = self.datatypeFactory.create_simple_datatype()
        file_name, file_path, _ = self.export_manager.export_data(
            datatype, self.TVB_EXPORTER, self.project)

        self.assertTrue(file_name is not None,
                        "Export process should return a file name")
        self.assertTrue(file_path is not None,
                        "Export process should return path to export file")
        self.assertTrue(os.path.exists(file_path),
                        "Could not find export file: %s on disk." % file_path)

    def test_tvb_export_of_datatype_with_storage(self):
        """
        Test export of a data type which has no data stored on file system
        """
        datatype = self.datatypeFactory.create_datatype_with_storage()
        file_name, file_path, _ = self.export_manager.export_data(
            datatype, self.TVB_EXPORTER, self.project)

        self.assertTrue(file_name is not None,
                        "Export process should return a file name")
        self.assertTrue(file_path is not None,
                        "Export process should return path to export file")
        self.assertTrue(os.path.exists(file_path),
                        "Could not find export file: %s on disk." % file_path)

    def test_tvb_export_for_datatype_group(self):
        """
        This method checks export of a data type group
        """
        datatype_group = self.datatypeFactory.create_datatype_group()
        file_name, file_path, _ = self.export_manager.export_data(
            datatype_group, self.TVB_EXPORTER, self.project)

        self.assertTrue(file_name is not None,
                        "Export process should return a file name")
        self.assertTrue(file_path is not None,
                        "Export process should return path to export file")
        self.assertTrue(os.path.exists(file_path),
                        "Could not find export file: %s on disk." % file_path)

        # Now check if the generated file is a correct ZIP file
        self.assertTrue(zipfile.is_zipfile(file_path),
                        "Generated file is not a valid ZIP file")

        with closing(zipfile.ZipFile(file_path)) as zip_file:
            list_of_files = zip_file.namelist()

            count_datatypes = dao.count_datatypes_in_group(datatype_group.id)

            # Check if ZIP files contains files for data types + operation
            self.assertEqual(
                count_datatypes * 2, len(list_of_files),
                "Should have 2 x nr datatypes files, one for operations one for datatypes"
            )

    def test_export_with_invalid_data(self):
        """
        Test scenarios when data provided to export method is invalid
        """
        # Test with no datatype
        self.assertRaises(InvalidExportDataException,
                          self.export_manager.export_data, None,
                          self.TVB_EXPORTER, self.project)

        # Test with no exporter
        datatype = self.datatypeFactory.create_datatype_with_storage()
        self.assertRaises(ExportException, self.export_manager.export_data,
                          datatype, None, self.project)

        # test with wrong exporter
        self.assertRaises(ExportException, self.export_manager.export_data,
                          datatype, "wrong_exporter", self.project)

        # test with no project folder
        self.assertRaises(ExportException, self.export_manager.export_data,
                          datatype, self.TVB_EXPORTER, None)

    def test_export_project_failure(self):
        """
        This method tests export of project with None data
        """
        self.assertRaises(ExportException, self.export_manager.export_project,
                          None)

    def tet_export_project(self):
        """
        Test export of a project
        """
        project = self.datatypeFactory.get_project()
        export_file = self.export_manager.export_project(project)

        self.assertTrue(export_file is not None,
                        "Export process should return path to export file")
        self.assertTrue(
            os.path.exists(export_file),
            "Could not find export file: %s on disk." % export_file)
        # Now check if the generated file is a correct ZIP file
        self.assertTrue(zipfile.is_zipfile(export_file),
                        "Generated file is not a valid ZIP file")
 def setUp(self):
     self.export_manager = ExportManager()
     self.datatypeFactory = DatatypesFactory()
     self.project = self.datatypeFactory.get_project()
    def test_import_export(self):
        """
        Test the import/export mechanism for a project structure.
        The project contains the following data types: Connectivity, Surface, MappedArray and ValueWrapper.
        """
        result = self.get_all_datatypes()
        expected_results = {}
        for one_data in result:
            expected_results[one_data.gid] = (one_data.module, one_data.type)

        #create an array mapped in DB
        data = {'param_1': 'some value'}
        OperationService().initiate_prelaunch(self.operation,
                                              self.adapter_instance, {},
                                              **data)
        inserted = self.flow_service.get_available_datatypes(
            self.test_project.id, "tvb.datatypes.arrays.MappedArray")[1]
        assert 1 == inserted, "Problems when inserting data"

        #create a value wrapper
        value_wrapper = self._create_value_wrapper()
        count_operations = dao.get_filtered_operations(self.test_project.id,
                                                       None,
                                                       is_count=True)
        assert 2 == count_operations, "Invalid ops number before export!"

        # Export project as ZIP
        self.zip_path = ExportManager().export_project(self.test_project)
        assert self.zip_path is not None, "Exported file is none"

        # Remove the original project
        self.project_service.remove_project(self.test_project.id)
        result, lng_ = self.project_service.retrieve_projects_for_user(
            self.test_user.id)
        assert 0 == len(result), "Project Not removed!"
        assert 0 == lng_, "Project Not removed!"

        # Now try to import again project
        self.import_service.import_project_structure(self.zip_path,
                                                     self.test_user.id)
        result = self.project_service.retrieve_projects_for_user(
            self.test_user.id)[0]
        assert len(result) == 1, "There should be only one project."
        assert result[
            0].name == "GeneratedProject", "The project name is not correct."
        assert result[
            0].description == "test_desc", "The project description is not correct."
        self.test_project = result[0]

        count_operations = dao.get_filtered_operations(self.test_project.id,
                                                       None,
                                                       is_count=True)

        #1 op. - import cff; 2 op. - save the array wrapper;
        assert 2 == count_operations, "Invalid ops number after export and import !"
        for gid in expected_results:
            datatype = dao.get_datatype_by_gid(gid)
            assert datatype.module == expected_results[gid][
                0], 'DataTypes not imported correctly'
            assert datatype.type == expected_results[gid][
                1], 'DataTypes not imported correctly'
        #check the value wrapper
        new_val = self.flow_service.get_available_datatypes(
            self.test_project.id,
            "tvb.datatypes.mapped_values.ValueWrapper")[0]
        assert 1 == len(new_val), "One !=" + str(len(new_val))
        new_val = ABCAdapter.load_entity_by_gid(new_val[0][2])
        assert value_wrapper.data_value == new_val.data_value, "Data value incorrect"
        assert value_wrapper.data_type == new_val.data_type, "Data type incorrect"
        assert value_wrapper.data_name == new_val.data_name, "Data name incorrect"
Exemple #32
0
class TestImportExportProjectWithLinksTest(_BaseLinksTest):
    @pytest.fixture()
    def initialize_linked_projects(self, initialize_two_projects):
        """
        Adds to the _BaseLinksTest setup the following
        2 links from src to dest project
        Import/export services
        """

        dest_id = self.dest_project.id
        self.flow_service.create_link([self.red_datatype.id], dest_id)
        self.flow_service.create_link([self.blue_datatype.id], dest_id)
        self.export_mng = ExportManager()

    def test_export(self, initialize_linked_projects):
        export_file = self.export_mng.export_project(self.dest_project)
        with TvbZip(export_file) as z:
            assert 'links-to-external-projects/Operation.xml' in z.namelist()

    def _export_and_remove(self, project):
        """export the project and remove it"""
        export_file = self.export_mng.export_project(project)
        self.project_service.remove_project(project.id)
        return export_file

    def _import(self, export_file, user_id):
        """ import a project zip for a user """
        # instantiated for every use because it is stateful
        import_service = ImportService()
        import_service.import_project_structure(export_file, user_id)
        return import_service.created_projects[0].id

    def test_links_recreated_on_import(self, initialize_linked_projects):
        export_file = self._export_and_remove(self.dest_project)
        imported_proj_id = self._import(export_file, self.dst_usr_id)
        assert 1 == self.red_datatypes_in(imported_proj_id)
        assert 1 == self.blue_datatypes_in(imported_proj_id)
        links = dao.get_linked_datatypes_in_project(imported_proj_id)
        assert 2 == len(links)

    def test_datatypes_recreated_on_import(self, initialize_linked_projects):
        export_file = self._export_and_remove(self.dest_project)
        self.project_service.remove_project(self.src_project.id)
        # both projects have been deleted
        # import should recreate links as datatypes
        imported_proj_id = self._import(export_file, self.dst_usr_id)
        assert 1 == self.red_datatypes_in(imported_proj_id)
        assert 1 == self.blue_datatypes_in(imported_proj_id)
        links = dao.get_linked_datatypes_in_project(imported_proj_id)
        assert 0 == len(links)

    def test_datatypes_and_links_recreated_on_import(
            self, initialize_linked_projects):
        export_file = self._export_and_remove(self.dest_project)
        # remove datatype 2 from source project
        self.project_service.remove_datatype(self.src_project.id,
                                             self.blue_datatype.gid)
        imported_proj_id = self._import(export_file, self.dst_usr_id)
        # both datatypes should be recreated
        assert 1 == self.red_datatypes_in(imported_proj_id)
        assert 1 == self.blue_datatypes_in(imported_proj_id)
        # only datatype 1 should be a link
        links = dao.get_linked_datatypes_in_project(imported_proj_id)
        assert 1 == len(links)
        assert self.red_datatype.gid == links[0].gid

    @pytest.fixture()
    def create_interlinked_projects(self):
        def build():
            """
            Project src will have 3 datatypes, and a link to the VW from the dest project.
            Project dest will have the derived VW and links
            """
            # add a connectivity to src project and link it to dest project
            zip_path = os.path.join(os.path.dirname(tvb_data.__file__),
                                    'connectivity', 'connectivity_96.zip')
            conn = TestFactory.import_zip_connectivity(self.dst_user,
                                                       self.src_project,
                                                       zip_path, "John")
            self.flow_service.create_link([conn.id], self.dest_project.id)

            # in dest derive a ValueWrapper from the linked conn
            vw_gid = TestFactory.create_value_wrapper(self.dst_user,
                                                      self.dest_project)[1]
            vw = dao.get_datatype_by_gid(vw_gid)
            # then link the time series in the src project
            self.flow_service.create_link([vw.id], self.src_project.id)

            assert 3 == len(dao.get_datatypes_in_project(self.src_project.id))
            assert 1 == len(
                dao.get_linked_datatypes_in_project(self.src_project.id))
            assert 1 == len(dao.get_datatypes_in_project(self.dest_project.id))
            assert 3 == len(
                dao.get_linked_datatypes_in_project(self.dest_project.id))

        return build

    def test_create_interlinked_projects(self, initialize_linked_projects,
                                         create_interlinked_projects):
        create_interlinked_projects()

    def test_linked_datatype_dependencies_restored_on_import(
            self, initialize_linked_projects, create_interlinked_projects):
        create_interlinked_projects()
        # export both then remove them
        export_file_src = self._export_and_remove(self.src_project)
        assert 4 == len(dao.get_datatypes_in_project(self.dest_project.id))
        assert 0 == len(
            dao.get_linked_datatypes_in_project(self.dest_project.id))
        export_file_dest = self._export_and_remove(self.dest_project)

        # importing both projects should work
        imported_id_1 = self._import(export_file_src, self.src_usr_id)
        assert 4 == len(dao.get_datatypes_in_project(imported_id_1))
        assert 0 == len(dao.get_linked_datatypes_in_project(imported_id_1))

        imported_id_2 = self._import(export_file_dest, self.dst_usr_id)
        assert 0 == len(dao.get_datatypes_in_project(imported_id_2))
        assert 4 == len(dao.get_linked_datatypes_in_project(imported_id_2))

    def test_linked_datatype_dependencies_restored_on_import_inverse_order(
            self, initialize_linked_projects, create_interlinked_projects):
        create_interlinked_projects()
        # export both then remove them
        export_file_src = self._export_and_remove(self.src_project)
        assert 4 == len(dao.get_datatypes_in_project(self.dest_project.id))
        assert 0 == len(
            dao.get_linked_datatypes_in_project(self.dest_project.id))
        export_file_dest = self._export_and_remove(self.dest_project)

        # importing dest before src should work
        imported_id_2 = self._import(export_file_dest, self.dst_usr_id)
        assert 4 == len(dao.get_datatypes_in_project(imported_id_2))
        assert 0 == len(dao.get_linked_datatypes_in_project(imported_id_2))

        imported_id_1 = self._import(export_file_src, self.src_usr_id)
        assert 0 == len(dao.get_datatypes_in_project(imported_id_1))
        assert 4 == len(dao.get_linked_datatypes_in_project(imported_id_1))
Exemple #33
0
 def transactional_setup_method(self):
     self.export_manager = ExportManager()
     self.datatypeFactory = DatatypesFactory()
     self.project = self.datatypeFactory.get_project()
Exemple #34
0
    def get_datatype_details(self,
                             entity_gid,
                             back_page='null',
                             exclude_tabs=None):
        """
        Returns the HTML which contains the details for the given dataType.
        :param back_page: if different from 'null' (the default) it will redirect to it after saving metedata changes
        """
        if exclude_tabs is None:
            exclude_tabs = []
        selected_project = common.get_current_project()
        datatype_details, states, entity = self.project_service.get_datatype_details(
            entity_gid)

        # Load DataType categories
        current_type = datatype_details.data_type
        datatype_gid = datatype_details.gid
        categories, has_operations_warning = {}, False
        if not entity.invalid:
            categories, has_operations_warning = self.algorithm_service.get_launchable_algorithms(
                datatype_gid)

        is_group = False
        if datatype_details.operation_group_id is not None:
            is_group = True

        # Retrieve links
        linkable_projects_dict = self._get_linkable_projects_dict(entity.id)
        # Load all exporters
        exporters = {}
        if not entity.invalid:
            exporters = ExportManager().get_exporters_for_data(entity)
        is_relevant = entity.visible

        template_specification = {
            "entity_gid": entity_gid,
            "nodeFields": datatype_details.get_ui_fields(),
            "allStates": states,
            "project": selected_project,
            "categories": categories,
            "exporters": exporters,
            "datatype_id": entity.id,
            "isGroup": is_group,
            "isRelevant": is_relevant,
            "nodeType": 'datatype',
            "backPageIdentifier": back_page
        }
        template_specification.update(linkable_projects_dict)

        overlay_class = "can-browse editor-node node-type-" + str(
            current_type).lower()
        if is_relevant:
            overlay_class += " node-relevant"
        else:
            overlay_class += " node_irrelevant"
        overlay_title = current_type
        if datatype_details.datatype_tag_1:
            overlay_title += " " + datatype_details.datatype_tag_1

        tabs = []
        overlay_indexes = []
        if "Metadata" not in exclude_tabs:
            tabs.append(OverlayTabDefinition("Metadata", "metadata"))
            overlay_indexes.append(0)
        if "Analyzers" not in exclude_tabs:
            tabs.append(
                OverlayTabDefinition("Analyzers",
                                     "analyzers",
                                     enabled=categories
                                     and 'Analyze' in categories))
            overlay_indexes.append(1)
        if "Visualizers" not in exclude_tabs:
            tabs.append(
                OverlayTabDefinition("Visualizers",
                                     "visualizers",
                                     enabled=categories
                                     and 'View' in categories))
            overlay_indexes.append(2)

        enable_link_tab = False
        if (not entity.invalid) and (linkable_projects_dict is not None):
            projects_for_link = linkable_projects_dict.get(
                self.PRROJECTS_FOR_LINK_KEY)
            if projects_for_link is not None and len(projects_for_link) > 0:
                enable_link_tab = True
            projects_linked = linkable_projects_dict.get(
                self.PRROJECTS_LINKED_KEY)
            if projects_linked is not None and len(projects_linked) > 0:
                enable_link_tab = True
        if "Links" not in exclude_tabs:
            tabs.append(
                OverlayTabDefinition("Links",
                                     "link_to",
                                     enabled=enable_link_tab))
            overlay_indexes.append(3)
        if "Export" not in exclude_tabs:
            tabs.append(
                OverlayTabDefinition("Export",
                                     "export",
                                     enabled=(exporters
                                              and len(exporters) > 0)))
            overlay_indexes.append(4)
        template_specification = self.fill_overlay_attributes(
            template_specification, "DataType Details", overlay_title,
            "project/details_datatype_overlay", overlay_class, tabs,
            overlay_indexes)
        template_specification = self.flow_controller.fill_default_attributes(
            template_specification)
        if has_operations_warning:
            template_specification[common.KEY_MESSAGE] = 'Not all operations could be loaded for this input DataType.' \
                                                         ' Contact the admin to check the logs!'
            template_specification[common.KEY_MESSAGE_TYPE] = "warningMessage"
        return template_specification
Exemple #35
0
    def test_import_export(self):
        """
        Test the import/export mechanism for a project structure.
        The project contains the following data types: Connectivity, Surface, MappedArray and ValueWrapper.
        """
        result = self.get_all_datatypes()
        expected_results = {}
        for one_data in result:
            expected_results[one_data.gid] = (one_data.module, one_data.type)

        #create an array mapped in DB
        data = {'param_1': 'some value'}
        OperationService().initiate_prelaunch(self.operation,
                                              self.adapter_instance, {},
                                              **data)
        inserted = self.flow_service.get_available_datatypes(
            self.test_project.id, "tvb.datatypes.arrays.MappedArray")
        self.assertEqual(len(inserted), 2, "Problems when inserting data")

        #create a value wrapper
        value_wrapper = self._create_value_wrapper()
        result = dao.get_filtered_operations(self.test_project.id, None)
        self.assertEqual(
            len(result), 2, "Should be two operations before export and not " +
            str(len(result)) + " !")
        self.zip_path = ExportManager().export_project(self.test_project)
        self.assertTrue(self.zip_path is not None, "Exported file is none")

        # Now remove the original project
        self.project_service.remove_project(self.test_project.id)
        result, lng_ = self.project_service.retrieve_projects_for_user(
            self.test_user.id)
        self.assertEqual(0, len(result), "Project Not removed!")
        self.assertEqual(0, lng_, "Project Not removed!")

        # Now try to import again project
        self.import_service.import_project_structure(self.zip_path,
                                                     self.test_user.id)
        result = self.project_service.retrieve_projects_for_user(
            self.test_user.id)[0]
        self.assertEqual(len(result), 1, "There should be only one project.")
        self.assertEqual(result[0].name, "GeneratedProject",
                         "The project name is not correct.")
        self.assertEqual(result[0].description, "test_desc",
                         "The project description is not correct.")
        self.test_project = result[0]

        result = dao.get_filtered_operations(self.test_project.id, None)

        #1 op. - import project; 1 op. - save the array wrapper
        self.assertEqual(
            len(result), 2, "Should be two operations after export and not " +
            str(len(result)) + " !")
        for gid in expected_results:
            datatype = dao.get_datatype_by_gid(gid)
            self.assertEqual(datatype.module, expected_results[gid][0],
                             'DataTypes not imported correctly')
            self.assertEqual(datatype.type, expected_results[gid][1],
                             'DataTypes not imported correctly')
        #check the value wrapper
        new_val = self.flow_service.get_available_datatypes(
            self.test_project.id, "tvb.datatypes.mapped_values.ValueWrapper")
        self.assertEqual(len(new_val), 1, "One !=" + str(len(new_val)))
        new_val = ABCAdapter.load_entity_by_gid(new_val[0][2])
        self.assertEqual(value_wrapper.data_value, new_val.data_value,
                         "Data value incorrect")
        self.assertEqual(value_wrapper.data_type, new_val.data_type,
                         "Data type incorrect")
        self.assertEqual(value_wrapper.data_name, new_val.data_name,
                         "Data name incorrect")
class ExportersTest(TransactionalTestCase):
    """
    Test export functionality.
    """
    TVB_EXPORTER = "TVBExporter"
    CIFTI_EXPORTER = "CIFTIExporter"
    
    def setUp(self):
        self.export_manager = ExportManager()
        self.datatypeFactory = DatatypesFactory()
        self.project = self.datatypeFactory.get_project()


    def tearDown(self):
        """
        Clean-up tests data
        """
        project = self.datatypeFactory.get_project()
        FilesHelper().remove_project_structure(project.name)
        
        # Remove EXPORT folder
        export_folder = os.path.join(TvbProfile.current.TVB_STORAGE, ExportManager.EXPORT_FOLDER_NAME)
        if os.path.exists(export_folder):
            shutil.rmtree(export_folder)
        
              
    def test_get_exporters_for_data(self):
        """
        Test retrieval of exporters that can be used for a given data.
        """
        datatype = self.datatypeFactory.create_simple_datatype()       
        exporters = self.export_manager.get_exporters_for_data(datatype)
        
        # Only TVB export can export any type of data type
        self.assertEqual(1, len(exporters), "Incorrect number of exporters.")
        
        
    def test_get_exporters_for_data_with_no_data(self):
        """
        Test retrieval of exporters when data == None.
        """        
        self.assertRaises(InvalidExportDataException, self.export_manager.get_exporters_for_data, None)
        
    
    def test_tvb_export_of_simple_datatype(self):
        """
        Test export of a data type which has no data stored on file system
        """
        datatype = self.datatypeFactory.create_simple_datatype()       
        file_name, file_path, _ = self.export_manager.export_data(datatype, self.TVB_EXPORTER, self.project)
        
        self.assertTrue(file_name is not None, "Export process should return a file name")
        self.assertTrue(file_path is not None, "Export process should return path to export file")
        self.assertTrue(os.path.exists(file_path), "Could not find export file: %s on disk." % file_path)


    def test_tvb_export_of_datatype_with_storage(self):
        """
        Test export of a data type which has no data stored on file system
        """
        datatype = self.datatypeFactory.create_datatype_with_storage()       
        file_name, file_path, _ = self.export_manager.export_data(datatype, self.TVB_EXPORTER, self.project)
        
        self.assertTrue(file_name is not None, "Export process should return a file name")
        self.assertTrue(file_path is not None, "Export process should return path to export file")
        self.assertTrue(os.path.exists(file_path), "Could not find export file: %s on disk." % file_path)


    def test_tvb_export_for_datatype_group(self):
        """
        This method checks export of a data type group
        """
        datatype_group = self.datatypeFactory.create_datatype_group()       
        file_name, file_path, _ = self.export_manager.export_data(datatype_group, self.TVB_EXPORTER, self.project)
        
        self.assertTrue(file_name is not None, "Export process should return a file name")
        self.assertTrue(file_path is not None, "Export process should return path to export file")
        self.assertTrue(os.path.exists(file_path), "Could not find export file: %s on disk." % file_path)
        
        # Now check if the generated file is a correct ZIP file
        self.assertTrue(zipfile.is_zipfile(file_path), "Generated file is not a valid ZIP file")
        
        with closing(zipfile.ZipFile(file_path)) as zip_file:
            list_of_files = zip_file.namelist()
    
            count_datatypes = dao.count_datatypes_in_group(datatype_group.id)
            
            # Check if ZIP files contains files for data types + operation
            self.assertEqual(count_datatypes * 2, len(list_of_files), 
                             "Should have 2 x nr datatypes files, one for operations one for datatypes")

        
    def test_export_with_invalid_data(self):
        """
        Test scenarios when data provided to export method is invalid
        """
        # Test with no datatype
        self.assertRaises(InvalidExportDataException, self.export_manager.export_data, 
                          None, self.TVB_EXPORTER, self.project)
        
        # Test with no exporter 
        datatype = self.datatypeFactory.create_datatype_with_storage()  
        self.assertRaises(ExportException, self.export_manager.export_data, datatype, None, self.project)
        
        # test with wrong exporter
        self.assertRaises(ExportException, self.export_manager.export_data, datatype, "wrong_exporter", self.project)
        
        # test with no project folder
        self.assertRaises(ExportException, self.export_manager.export_data, datatype, self.TVB_EXPORTER, None)


    def test_export_project_failure(self):
        """
        This method tests export of project with None data
        """
        self.assertRaises(ExportException, self.export_manager.export_project, None)


    def tet_export_project(self):
        """
        Test export of a project
        """
        project = self.datatypeFactory.get_project()
        export_file = self.export_manager.export_project(project)
        
        self.assertTrue(export_file is not None, "Export process should return path to export file")
        self.assertTrue(os.path.exists(export_file), "Could not find export file: %s on disk." % export_file)
        # Now check if the generated file is a correct ZIP file
        self.assertTrue(zipfile.is_zipfile(export_file), "Generated file is not a valid ZIP file")
Exemple #37
0
class ImportExportProjectWithLinksTest(_BaseLinksTest):

    def setUp(self):
        """
        Adds to the _BaseLinksTest setup the following
        2 links from src to dest project
        Import/export services
        """
        self.clean_database(delete_folders=True)
        self._initialize_two_projects()

        dest_id = self.dest_project.id
        self.flow_service.create_link([self.red_datatype.id], dest_id)
        self.flow_service.create_link([self.blue_datatype.id], dest_id)
        self.export_mng = ExportManager()


    def test_export(self):
        export_file = self.export_mng.export_project(self.dest_project)
        with TvbZip(export_file) as z:
            self.assertTrue('links-to-external-projects/Operation.xml' in z.namelist())


    def _export_and_remove(self, project):
        """export the project and remove it"""
        export_file = self.export_mng.export_project(project)
        self.project_service.remove_project(project.id)
        return export_file


    def _import(self, export_file, user_id):
        """ import a project zip for a user """
        # instantiated for every use because it is stateful
        import_service = ImportService()
        import_service.import_project_structure(export_file, user_id)
        return import_service.created_projects[0].id


    def test_links_recreated_on_import(self):
        export_file = self._export_and_remove(self.dest_project)
        imported_proj_id = self._import(export_file, self.dest_usr_id)
        self.assertEqual(1, self.red_datatypes_in(imported_proj_id))
        self.assertEqual(1, self.blue_datatypes_in(imported_proj_id))
        links = dao.get_linked_datatypes_in_project(imported_proj_id)
        self.assertEqual(2, len(links))


    def test_datatypes_recreated_on_import(self):
        export_file = self._export_and_remove(self.dest_project)
        self.project_service.remove_project(self.src_project.id)
        # both projects have been deleted
        # import should recreate links as datatypes
        imported_proj_id = self._import(export_file, self.dest_usr_id)
        self.assertEqual(1, self.red_datatypes_in(imported_proj_id))
        self.assertEqual(1, self.blue_datatypes_in(imported_proj_id))
        links = dao.get_linked_datatypes_in_project(imported_proj_id)
        self.assertEqual(0, len(links))


    def test_datatypes_and_links_recreated_on_import(self):
        export_file = self._export_and_remove(self.dest_project)
        # remove datatype 2 from source project
        self.project_service.remove_datatype(self.src_project.id, self.blue_datatype.gid)
        imported_proj_id = self._import(export_file, self.dest_usr_id)
        # both datatypes should be recreated
        self.assertEqual(1, self.red_datatypes_in(imported_proj_id))
        self.assertEqual(1, self.blue_datatypes_in(imported_proj_id))
        # only datatype 1 should be a link
        links = dao.get_linked_datatypes_in_project(imported_proj_id)
        self.assertEqual(1, len(links))
        self.assertEquals(self.red_datatype.gid, links[0].gid)


    def _create_interlinked_projects(self):
        """
        Extend the two projects created in setup.
        Project src will have 3 datatypes, one a connectivity, and a link to the time series from the dest project.
        Project dest will have 3 links to the datatypes in src and a time series derived from the linked connectivity
        """
        # add a connectivity to src project and link it to dest project
        _, conn = self.datatype_factory_src.create_connectivity()
        self.flow_service.create_link([conn.id], self.dest_project.id)
        # in dest derive a time series from the linked conn
        ts = self.datatype_factory_dest.create_timeseries(conn)
        # then link the time series in the src project
        self.flow_service.create_link([ts.id], self.src_project.id)

        self.assertEqual(3, len(dao.get_datatypes_in_project(self.src_project.id)))
        self.assertEqual(1, len(dao.get_linked_datatypes_in_project(self.src_project.id)))
        self.assertEqual(1, len(dao.get_datatypes_in_project(self.dest_project.id)))
        self.assertEqual(3, len(dao.get_linked_datatypes_in_project(self.dest_project.id)))


    def test_create_interlinked_projects(self):
        self._create_interlinked_projects()


    def test_linked_datatype_dependencies_restored_on_import(self):
        self._create_interlinked_projects()
        # export both then remove them
        export_file_src = self._export_and_remove(self.src_project)
        self.assertEqual(4, len(dao.get_datatypes_in_project(self.dest_project.id)))
        self.assertEqual(0, len(dao.get_linked_datatypes_in_project(self.dest_project.id)))
        export_file_dest = self._export_and_remove(self.dest_project)

        # importing both projects should work
        imported_id_1 = self._import(export_file_src, self.src_usr_id)
        self.assertEqual(4, len(dao.get_datatypes_in_project(imported_id_1)))
        self.assertEqual(0, len(dao.get_linked_datatypes_in_project(imported_id_1)))

        imported_id_2 = self._import(export_file_dest, self.dest_usr_id)
        self.assertEqual(0, len(dao.get_datatypes_in_project(imported_id_2)))
        self.assertEqual(4, len(dao.get_linked_datatypes_in_project(imported_id_2)))


    def test_linked_datatype_dependencies_restored_on_import_inverse_order(self):
        self._create_interlinked_projects()
        # export both then remove them
        export_file_src = self._export_and_remove(self.src_project)
        self.assertEqual(4, len(dao.get_datatypes_in_project(self.dest_project.id)))
        self.assertEqual(0, len(dao.get_linked_datatypes_in_project(self.dest_project.id)))
        export_file_dest = self._export_and_remove(self.dest_project)

        # importing dest before src should work
        imported_id_2 = self._import(export_file_dest, self.dest_usr_id)
        self.assertEqual(4, len(dao.get_datatypes_in_project(imported_id_2)))
        self.assertEqual(0, len(dao.get_linked_datatypes_in_project(imported_id_2)))

        imported_id_1 = self._import(export_file_src, self.src_usr_id)
        self.assertEqual(0, len(dao.get_datatypes_in_project(imported_id_1)))
        self.assertEqual(4, len(dao.get_linked_datatypes_in_project(imported_id_1)))
 def setUp(self):
     self.export_manager = ExportManager()
     self.datatypeFactory = DatatypesFactory()
     self.project = self.datatypeFactory.get_project()
Exemple #39
0
class ImportExportProjectWithLinksTest(_BaseLinksTest):
    def setUpTVB(self):
        """
        Adds to the _BaseLinksTest setup the following
        2 links from src to dest project
        Import/export services
        """
        _BaseLinksTest.setUpTVB(self)
        dest_id = self.dest_project.id
        self.flow_service.create_link([self.red_datatype.id], dest_id)
        self.flow_service.create_link([self.blue_datatype.id], dest_id)
        self.export_mng = ExportManager()
        self.import_service = ImportService()

    def test_export(self):
        export_file = self.export_mng.export_project(self.dest_project)
        with TvbZip(export_file) as z:
            self.assertTrue('links-to-external-projects/Operation.xml' in z.namelist())

    def _export_and_remove_dest(self):
        """export the destination project and remove it"""
        dest_id = self.dest_project.id
        export_file = self.export_mng.export_project(self.dest_project)
        self.project_service.remove_project(dest_id)
        return export_file

    def _import_dest(self, export_file):
        self.import_service.import_project_structure(export_file, self.user.id)
        return self.import_service.created_projects[0].id

    def test_links_recreated_on_import(self):
        export_file = self._export_and_remove_dest()
        imported_proj_id = self._import_dest(export_file)
        self.assertEqual(1, self.red_datatypes_in(imported_proj_id))
        self.assertEqual(1, self.blue_datatypes_in(imported_proj_id))
        links = dao.get_linked_datatypes_for_project(imported_proj_id)
        self.assertEqual(2, len(links))

    def test_datatypes_recreated_on_import(self):
        export_file = self._export_and_remove_dest()
        self.project_service.remove_project(self.src_project.id)
        # both projects have been deleted
        # import should recreate links as datatypes
        imported_proj_id = self._import_dest(export_file)
        self.assertEqual(1, self.red_datatypes_in(imported_proj_id))
        self.assertEqual(1, self.blue_datatypes_in(imported_proj_id))
        links = dao.get_linked_datatypes_for_project(imported_proj_id)
        self.assertEqual(0, len(links))

    def test_datatypes_and_links_recreated_on_import(self):
        export_file = self._export_and_remove_dest()
        # remove datatype 2 from source project
        self.project_service.remove_datatype(self.src_project.id, self.blue_datatype.gid)
        imported_proj_id = self._import_dest(export_file)
        # both datatypes should be recreated
        self.assertEqual(1, self.red_datatypes_in(imported_proj_id))
        self.assertEqual(1, self.blue_datatypes_in(imported_proj_id))
        # only datatype 1 should be a link
        links = dao.get_linked_datatypes_for_project(imported_proj_id)
        self.assertEqual(1, len(links))
        self.assertEquals(self.red_datatype.gid, links[0].gid)