Exemplo n.º 1
0
class TestDatatypeResource(RestResourceTest):
    def transactional_setup_method(self):
        self.test_user = TestFactory.create_user('Rest_User')
        self.test_project = TestFactory.create_project(
            self.test_user, 'Rest_Project', users=[self.test_user.id])
        self.retrieve_resource = RetrieveDatatypeResource()
        self.get_operations_resource = GetOperationsForDatatypeResource()
        self.get_data_in_project_resource = GetDataInProjectResource()

    def test_server_retrieve_datatype_inexistent_gid(self, mocker):
        self._mock_user(mocker)
        datatype_gid = "inexistent-gid"
        with pytest.raises(InvalidIdentifierException):
            self.retrieve_resource.get(datatype_gid=datatype_gid)

    def test_server_retrieve_datatype(self, mocker):
        self._mock_user(mocker)
        zip_path = os.path.join(os.path.dirname(tvb_data.__file__),
                                'connectivity', 'connectivity_96.zip')
        TestFactory.import_zip_connectivity(self.test_user, self.test_project,
                                            zip_path)

        datatypes_in_project = self.get_data_in_project_resource.get(
            project_gid=self.test_project.gid)
        assert type(datatypes_in_project) is list
        assert len(datatypes_in_project) == 1
        assert datatypes_in_project[0].type == ConnectivityIndex().display_type

        def send_file_dummy(path, as_attachment, attachment_filename):
            return (path, as_attachment, attachment_filename)

        # Mock flask.send_file to behave like send_file_dummy
        mocker.patch('flask.send_file', send_file_dummy)
        result = self.retrieve_resource.get(
            datatype_gid=datatypes_in_project[0].gid)

        assert type(result) is tuple
        assert result[1] is True
        assert os.path.basename(result[0]) == os.path.basename(result[2])

    @pytest.mark.skipif(no_matlab(), reason="Matlab or Octave not installed!")
    def test_server_get_operations_for_datatype(self, mocker):
        self._mock_user(mocker)
        zip_path = os.path.join(os.path.dirname(tvb_data.__file__),
                                'connectivity', 'connectivity_96.zip')
        TestFactory.import_zip_connectivity(self.test_user, self.test_project,
                                            zip_path)

        datatypes_in_project = self.get_data_in_project_resource.get(
            project_gid=self.test_project.gid)
        assert type(datatypes_in_project) is list
        assert len(datatypes_in_project) == 1
        assert datatypes_in_project[0].type == ConnectivityIndex().display_type

        result = self.get_operations_resource.get(
            datatype_gid=datatypes_in_project[0].gid)
        assert type(result) is list
        assert len(result) > 3
Exemplo n.º 2
0
class TestBCT(TransactionalTestCase):
    """
    Test that all BCT analyzers are executed without error.
    We do not verify that the algorithms are correct, because that is outside the purpose of TVB framework.
    """
    @pytest.mark.skipif(no_matlab(), reason="Matlab or Octave not installed!")
    def transactional_setup_method(self):
        """
        Sets up the environment for running the tests;
        creates a test user, a test project, a connectivity and a list of BCT adapters;
        imports a CFF data-set
        """
        self.test_user = TestFactory.create_user("BCT_User")
        self.test_project = TestFactory.create_project(self.test_user,
                                                       "BCT-Project")
        # Make sure Connectivity is in DB
        zip_path = os.path.join(os.path.dirname(tvb_data.__file__),
                                'connectivity', 'connectivity_76.zip')
        self.connectivity = TestFactory.import_zip_connectivity(
            self.test_user, self.test_project, zip_path)

        algorithms = dao.get_generic_entity(Algorithm,
                                            'Brain Connectivity Toolbox',
                                            'group_description')
        assert algorithms is not None
        assert len(algorithms) > 5

        self.bct_adapters = []
        for algo in algorithms:
            self.bct_adapters.append(ABCAdapter.build_adapter(algo))

    def transactional_teardown_method(self):
        """
        Cleans the database after the tests
        """
        self.clean_database(True)

    @pytest.mark.skipif(no_matlab(), reason="Matlab or Octave not installed!")
    def test_bct_all(self):
        """
        Iterate all BCT algorithms and execute them.
        """

        view_model = BaseBCTModel()
        view_model.connectivity = self.connectivity.gid
        algo_category = dao.get_category_by_id(
            self.bct_adapters[0].stored_adapter.fk_category)

        for adapter_instance in self.bct_adapters:
            results = TestFactory.launch_synchronously(self.test_user.id,
                                                       self.test_project,
                                                       adapter_instance,
                                                       view_model)
            assert len(results) > 0

    @pytest.mark.skipif(no_matlab(), reason="Matlab or Octave not installed!")
    def test_bct_descriptions(self):
        """
        Iterate all BCT algorithms and check that description has been extracted from *.m files.
        """
        for adapter_instance in self.bct_adapters:
            assert len(adapter_instance.stored_adapter.description) > 10, "Description was not loaded properly for " \
                                                                          "algorithm %s" % (str(adapter_instance))
class TestProjectStructure(TransactionalTestCase):
    """
    Test ProjectService methods (part related to Project Data Structure).
    """
    def transactional_setup_method(self):
        """
        Prepare before each test.
        """
        self.project_service = ProjectService()

        self.test_user = TestFactory.create_user()
        self.test_project = TestFactory.create_project(self.test_user,
                                                       "ProjectStructure")

        self.relevant_filter = StaticFiltersFactory.build_datatype_filters(
            single_filter=StaticFiltersFactory.RELEVANT_VIEW)
        self.full_filter = StaticFiltersFactory.build_datatype_filters(
            single_filter=StaticFiltersFactory.FULL_VIEW)

    def transactional_teardown_method(self):
        """
        Clear project folders after testing
        """
        self.delete_project_folders()

    def test_set_operation_visibility(self):
        """
        Check if the visibility for an operation is set correct.
        """
        self.__init_algorithmn()
        op1 = Operation(None, self.test_user.id, self.test_project.id,
                        self.algo_inst.id)
        op1 = dao.store_entity(op1)
        assert op1.visible, "The operation should be visible."
        self.project_service.set_operation_and_group_visibility(op1.gid, False)
        updated_op = dao.get_operation_by_id(op1.id)
        assert not updated_op.visible, "The operation should not be visible."

    def test_set_op_and_group_visibility(self, datatype_group_factory):
        """
        When changing the visibility for an operation that belongs to an operation group, we
        should also change the visibility for the entire group of operations.
        """
        group = datatype_group_factory()
        list_of_operations = dao.get_operations_in_group(group.id)
        for operation in list_of_operations:
            assert operation.visible, "The operation should be visible."
        self.project_service.set_operation_and_group_visibility(
            list_of_operations[0].gid, False)
        operations = dao.get_operations_in_group(group.id)
        for operation in operations:
            assert not operation.visible, "The operation should not be visible."

    def test_set_op_group_visibility(self, datatype_group_factory):
        """
        Tests if the visibility for an operation group is set correct.
        """
        group = datatype_group_factory()
        list_of_operations = dao.get_operations_in_group(group.id)
        for operation in list_of_operations:
            assert operation.visible, "The operation should be visible."
        op_group = dao.get_operationgroup_by_id(group.id)
        self.project_service.set_operation_and_group_visibility(
            op_group.gid, False, True)
        operations = dao.get_operations_in_group(group.id)
        for operation in operations:
            assert not operation.visible, "The operation should not be visible."

    def test_is_upload_operation(self):
        """
        Tests that upload and non-upload algorithms are created and run accordingly
        """
        self.__init_algorithmn()
        upload_algo = self._create_algo_for_upload()
        op1 = Operation(None, self.test_user.id, self.test_project.id,
                        self.algo_inst.id)
        op2 = Operation(None, self.test_user.id, self.test_project.id,
                        upload_algo.id)
        operations = dao.store_entities([op1, op2])
        is_upload_operation = self.project_service.is_upload_operation(
            operations[0].gid)
        assert not is_upload_operation, "The operation is not an upload operation."
        is_upload_operation = self.project_service.is_upload_operation(
            operations[1].gid)
        assert is_upload_operation, "The operation is an upload operation."

    def test_get_upload_operations(self):
        """
        Test get_all when filter is for Upload category.
        """
        self.__init_algorithmn()
        upload_algo = self._create_algo_for_upload()

        project = Project("test_proj_2", self.test_user.id, "desc")
        project = dao.store_entity(project)

        op1 = Operation(None, self.test_user.id, self.test_project.id,
                        self.algo_inst.id)
        op2 = Operation(None,
                        self.test_user.id,
                        project.id,
                        upload_algo.id,
                        status=STATUS_FINISHED)
        op3 = Operation(None, self.test_user.id, self.test_project.id,
                        upload_algo.id)
        op4 = Operation(None,
                        self.test_user.id,
                        self.test_project.id,
                        upload_algo.id,
                        status=STATUS_FINISHED)
        op5 = Operation(None,
                        self.test_user.id,
                        self.test_project.id,
                        upload_algo.id,
                        status=STATUS_FINISHED)
        operations = dao.store_entities([op1, op2, op3, op4, op5])

        upload_operations = self.project_service.get_all_operations_for_uploaders(
            self.test_project.id)
        assert 2 == len(
            upload_operations), "Wrong number of upload operations."
        upload_ids = [operation.id for operation in upload_operations]
        for i in [3, 4]:
            assert operations[i].id in upload_ids, \
                "The operation should be an upload operation."
        for i in [0, 1, 2]:
            assert not operations[i].id in upload_ids, \
                "The operation should not be an upload operation."

    def test_is_datatype_group(self, datatype_group_factory):
        """
        Tests if a datatype is group.
        """
        group = datatype_group_factory()
        dt_group = dao.get_generic_entity(DataTypeGroup, group.id)[0]
        is_dt_group = self.project_service.is_datatype_group(dt_group.gid)
        assert is_dt_group, "The datatype should be a datatype group."
        datatypes = dao.get_datatypes_from_datatype_group(dt_group.id)
        is_dt_group = self.project_service.is_datatype_group(datatypes[0].gid)
        assert not is_dt_group, "The datatype should not be a datatype group."

    def test_count_datatypes_in_group(self, datatype_group_factory):
        """ Test that counting dataTypes is correct. Happy flow."""
        group = datatype_group_factory()
        count = dao.count_datatypes_in_group(group.id)
        assert count == group.count_results
        assert count == 6
        datatypes = dao.get_datatypes_from_datatype_group(group.id)
        count = dao.count_datatypes_in_group(datatypes[0].id)
        assert count == 0, "There should be no dataType."

    def test_set_datatype_visibility(self, dummy_datatype_index_factory):
        """
        Check if the visibility for a datatype is set correct.
        """
        # it's a list of 3 elem.
        dummy_dt_index = dummy_datatype_index_factory()
        is_visible = dummy_dt_index.visible
        assert is_visible, "The data type should be visible."

        self.project_service.set_datatype_visibility(dummy_dt_index.gid, False)
        is_visible = dao.get_datatype_by_id(dummy_dt_index.id).visible
        assert not is_visible, "The data type should not be visible."

    def test_set_visibility_for_dt_in_group(self, datatype_group_factory):
        """
        Check if the visibility for a datatype from a datatype group is set correct.
        """
        group = datatype_group_factory()
        datatypes = dao.get_datatypes_from_datatype_group(group.id)
        assert datatypes[0].visible, "The data type should be visible."
        assert datatypes[1].visible, "The data type should be visible."
        self.project_service.set_datatype_visibility(datatypes[0].gid, False)

        db_dt_group = self.project_service.get_datatype_by_id(group.id)
        db_first_dt = self.project_service.get_datatype_by_id(datatypes[0].id)
        db_second_dt = self.project_service.get_datatype_by_id(datatypes[1].id)

        assert not db_dt_group.visible, "The data type should be visible."
        assert not db_first_dt.visible, "The data type should not be visible."
        assert not db_second_dt.visible, "The data type should be visible."

    def test_set_visibility_for_group(self, datatype_group_factory):
        """
        Check if the visibility for a datatype group is set correct.
        """
        group = datatype_group_factory()
        dt_group = dao.get_generic_entity(DataTypeGroup, group.id)[0]
        datatypes = dao.get_datatypes_from_datatype_group(dt_group.id)

        assert dt_group.visible, "The data type group should be visible."
        assert datatypes[0].visible, "The data type should be visible."
        assert datatypes[1].visible, "The data type should be visible."
        self.project_service.set_datatype_visibility(dt_group.gid, False)

        updated_dt_group = self.project_service.get_datatype_by_id(dt_group.id)
        updated_first_dt = self.project_service.get_datatype_by_id(
            datatypes[0].id)
        updated_second_dt = self.project_service.get_datatype_by_id(
            datatypes[1].id)

        assert not updated_dt_group.visible, "The data type group should be visible."
        assert not updated_first_dt.visible, "The data type should be visible."
        assert not updated_second_dt.visible, "The data type should be visible."

    def test_getdatatypes_from_dtgroup(self, datatype_group_factory):
        """
        Validate that we can retrieve all DTs from a DT_Group
        """
        group = datatype_group_factory()
        exp_datatypes = dao.get_datatypes_from_datatype_group(group.id)
        datatypes = self.project_service.get_datatypes_from_datatype_group(
            group.id)
        assert len(
            datatypes
        ) == group.count_results, "There should be 10 datatypes into the datatype group."
        expected_dict = {
            exp_datatypes[0].id: exp_datatypes[0],
            exp_datatypes[1].id: exp_datatypes[1]
        }
        actual_dict = {
            datatypes[0].id: datatypes[0],
            datatypes[1].id: datatypes[1]
        }

        for key in expected_dict:
            expected = expected_dict[key]
            actual = actual_dict[key]
            assert expected.id == actual.id, "Not the same id."
            assert expected.gid == actual.gid, "Not the same gid."
            assert expected.type == actual.type, "Not the same type."
            assert expected.subject == actual.subject, "Not the same subject."
            assert expected.state == actual.state, "Not the same state."
            assert expected.visible == actual.visible, "The datatype visibility is not correct."
            assert expected.module == actual.module, "Not the same module."
            assert expected.user_tag_1 == actual.user_tag_1, "Not the same user_tag_1."
            assert expected.invalid == actual.invalid, "The invalid field value is not correct."
            assert expected.is_nan == actual.is_nan, "The is_nan field value is not correct."

    @pytest.mark.skipif(no_matlab(), reason="Matlab or Octave not installed!")
    def test_get_inputs_for_operation(self):
        """
        Tests method get_datatype_and_datatypegroup_inputs_for_operation.
        Verifies filters' influence over results is as expected
        """
        zip_path = os.path.join(os.path.dirname(tvb_data.__file__),
                                'connectivity', 'connectivity_66.zip')
        conn = TestFactory.import_zip_connectivity(self.test_user,
                                                   self.test_project, zip_path)
        view_model = BaseBCTModel()
        view_model.connectivity = conn.gid
        adapter = ABCAdapter.build_adapter_from_class(
            TransitivityBinaryDirected)
        result = OperationService().fire_operation(adapter,
                                                   self.test_user,
                                                   self.test_project.id,
                                                   view_model=view_model)

        conn.visible = False
        dao.store_entity(conn)
        operation = dao.get_operation_by_id(result[0].id)

        inputs = self.project_service.get_datatype_and_datatypegroup_inputs_for_operation(
            operation.gid, self.relevant_filter)
        assert len(inputs) == 0

        inputs = self.project_service.get_datatype_and_datatypegroup_inputs_for_operation(
            operation.gid, self.full_filter)
        assert len(inputs) == 1, "Incorrect number of inputs."
        assert conn.id == inputs[0].id, "Retrieved wrong input dataType."

    def test_remove_datatype(self, array_factory):
        """
        Tests the deletion of a datatype.
        """
        # it's a list of 3 elem.
        array_wrappers = array_factory(self.test_project)
        dt_list = []
        for array_wrapper in array_wrappers:
            dt_list.append(dao.get_datatype_by_id(array_wrapper[0]))

        self.project_service.remove_datatype(self.test_project.id,
                                             dt_list[0].gid)
        self._check_if_datatype_was_removed(dt_list[0])

    def test_remove_datatype_from_group(self, datatype_group_factory,
                                        project_factory, user_factory):
        """
        Tests the deletion of a datatype group.
        """
        user = user_factory()
        project = project_factory(user)
        group = datatype_group_factory(project=project)

        datatype_group = dao.get_generic_entity(DataTypeGroup, group.id)[0]
        datatypes = dao.get_datatypes_from_datatype_group(group.id)
        datatype_measure = dao.get_generic_entity(DatatypeMeasureIndex,
                                                  datatypes[0].gid,
                                                  "fk_source_gid")[0]

        # When trying to delete one entity in a group the entire group will be removed
        #  First remove the DTMeasures, to avoid FK failures
        self.project_service.remove_datatype(project.id, datatype_measure.gid)
        self.project_service.remove_datatype(project.id, datatypes[0].gid)
        self._check_if_datatype_was_removed(datatypes[0])
        self._check_if_datatype_was_removed(datatypes[1])
        self._check_if_datatype_was_removed(datatype_group)
        self._check_if_datatype_was_removed(datatype_measure)
        self._check_datatype_group_removed(group.id,
                                           datatype_group.fk_operation_group)

    def test_remove_datatype_group(self, datatype_group_factory,
                                   project_factory, user_factory):
        """
        Tests the deletion of a datatype group.
        """
        user = user_factory()
        project = project_factory(user)
        group = datatype_group_factory(project=project)

        datatype_groups = self.get_all_entities(DataTypeGroup)
        datatypes = dao.get_datatypes_from_datatype_group(group.id)
        assert 2 == len(datatype_groups)

        self.project_service.remove_datatype(project.id,
                                             datatype_groups[1].gid)
        self.project_service.remove_datatype(project.id,
                                             datatype_groups[0].gid)
        self._check_if_datatype_was_removed(datatypes[0])
        self._check_if_datatype_was_removed(datatypes[1])
        self._check_if_datatype_was_removed(datatype_groups[0])
        self._check_if_datatype_was_removed(datatype_groups[1])
        self._check_datatype_group_removed(
            group.id, datatype_groups[0].fk_operation_group)

    @pytest.fixture()
    def array_factory(self, operation_factory, connectivity_index_factory):
        def _create_measure(conn, op, op_dir, project_id):
            conn_measure = ConnectivityMeasure()
            conn_measure.connectivity = h5.load_from_index(conn)
            conn_measure.array_data = numpy.array(conn.number_of_regions)

            conn_measure_db = h5.store_complete(conn_measure, op_dir)
            conn_measure_db.fk_from_operation = op.id
            dao.store_entity(conn_measure_db)

            count = dao.count_datatypes(project_id, DataTypeMatrix)
            return count

        def build(project):
            count = dao.count_datatypes(project.id, DataTypeMatrix)
            assert count == 0

            op = operation_factory(test_project=project)
            conn = connectivity_index_factory(op=op)
            storage_path = FilesHelper().get_project_folder(
                op.project, str(op.id))

            count = _create_measure(conn, op, storage_path, project.id)
            assert count == 1

            count = _create_measure(conn, op, storage_path, project.id)
            assert count == 2

            count = _create_measure(conn, op, storage_path, project.id)
            assert count == 3

            return get_filtered_datatypes(project.id, DataTypeMatrix)[0]

        return build

    def _check_if_datatype_was_removed(self, datatype):
        """
        Check if a certain datatype was removed.
        """
        try:
            dao.get_datatype_by_id(datatype.id)
            raise AssertionError("The datatype was not deleted.")
        except Exception:
            pass
        try:
            dao.get_operation_by_id(datatype.fk_from_operation)
            raise AssertionError("The operation was not deleted.")
        except Exception:
            pass

    def _check_datatype_group_removed(self, datatype_group_id,
                                      operation_groupp_id):
        """
        Checks if the DataTypeGroup and OperationGroup was removed.
        """
        try:
            dao.get_generic_entity(DataTypeGroup, datatype_group_id)
            raise AssertionError("The DataTypeGroup entity was not removed.")
        except Exception:
            pass

        try:
            dao.get_operationgroup_by_id(operation_groupp_id)
            raise AssertionError("The OperationGroup entity was not removed.")
        except Exception:
            pass

    def __init_algorithmn(self):
        """
        Insert some starting data in the database.
        """
        categ1 = AlgorithmCategory('one', True)
        self.categ1 = dao.store_entity(categ1)
        ad = Algorithm(TEST_ADAPTER_VALID_MODULE, TEST_ADAPTER_VALID_CLASS,
                       categ1.id)
        self.algo_inst = dao.store_entity(ad)

    @staticmethod
    def _create_algo_for_upload():
        """ Creates a fake algorithm for an upload category. """
        category = dao.store_entity(
            AlgorithmCategory("upload_category", rawinput=True))
        return dao.store_entity(Algorithm("module", "classname", category.id))
Exemplo n.º 4
0
class TestImportService(BaseTestCase):
    """
    This class contains tests for the tvb.core.services.import_service module.
    """
    def setup_method(self):
        """
        Reset the database before each test.
        """
        self.import_service = ImportService()
        self.project_service = ProjectService()
        self.zip_path = None

    def teardown_method(self):
        """
        Reset the database when test is done.
        """
        # Delete TEMP folder
        if os.path.exists(TvbProfile.current.TVB_TEMP_FOLDER):
            shutil.rmtree(TvbProfile.current.TVB_TEMP_FOLDER)

        # Delete folder where data was exported
        if self.zip_path and os.path.exists(self.zip_path):
            shutil.rmtree(os.path.split(self.zip_path)[0])

        self.delete_project_folders()

    @pytest.mark.skipif(no_matlab(), reason="Matlab or Octave not installed!")
    def test_import_export(self, user_factory, project_factory,
                           value_wrapper_factory):
        """
        Test the import/export mechanism for a project structure.
        The project contains the following data types: Connectivity, Surface, MappedArray and ValueWrapper.
        """
        test_user = user_factory()
        test_project = project_factory(test_user, "TestImportExport",
                                       "test_desc")
        zip_path = os.path.join(os.path.dirname(tvb_data.__file__),
                                'connectivity', 'connectivity_66.zip')
        TestFactory.import_zip_connectivity(test_user, test_project, zip_path)
        value_wrapper = value_wrapper_factory(test_user, test_project)
        ProjectService.set_datatype_visibility(value_wrapper.gid, False)

        result = self.get_all_datatypes()
        expected_results = {}
        for one_data in result:
            expected_results[one_data.gid] = (one_data.module, one_data.type)

        # Export project as ZIP
        self.zip_path = ExportManager().export_project(test_project)
        assert self.zip_path is not None, "Exported file is none"

        # Remove the original project
        self.project_service.remove_project(test_project.id)
        result, lng_ = self.project_service.retrieve_projects_for_user(
            test_user.id)
        assert 0 == len(result), "Project Not removed!"
        assert 0 == lng_, "Project Not removed!"

        # Now try to import again project
        self.import_service.import_project_structure(self.zip_path,
                                                     test_user.id)
        result = self.project_service.retrieve_projects_for_user(
            test_user.id)[0]
        assert len(result) == 1, "There should be only one project."
        assert result[
            0].name == "TestImportExport", "The project name is not correct."
        assert result[
            0].description == "test_desc", "The project description is not correct."
        test_project = result[0]

        count_operations = dao.get_filtered_operations(test_project.id,
                                                       None,
                                                       is_count=True)

        # 1 op. - import conn; 2 op. - BCT Analyzer
        assert 2 == count_operations, "Invalid ops number after export and import !"
        for gid in expected_results:
            datatype = dao.get_datatype_by_gid(gid)
            assert datatype.module == expected_results[gid][
                0], 'DataTypes not imported correctly'
            assert datatype.type == expected_results[gid][
                1], 'DataTypes not imported correctly'
        # check the value wrapper
        new_val = try_get_last_datatype(test_project.id, ValueWrapperIndex)
        assert value_wrapper.data_value == new_val.data_value, "Data value incorrect"
        assert value_wrapper.data_type == new_val.data_type, "Data type incorrect"
        assert value_wrapper.data_name == new_val.data_name, "Data name incorrect"
        assert False == new_val.visible, "Visibility incorrectly restored"

    def test_import_export_existing(self, user_factory, project_factory):
        """
        Test the import/export mechanism for a project structure.
        The project contains the following data types: Connectivity, Surface, MappedArray and ValueWrapper.
        """
        test_user = user_factory()
        test_project = project_factory(test_user, "TestImportExport2")
        zip_path = os.path.join(os.path.dirname(tvb_data.__file__),
                                'connectivity', 'connectivity_66.zip')
        TestFactory.import_zip_connectivity(test_user, test_project, zip_path)

        count_operations = dao.get_filtered_operations(test_project.id,
                                                       None,
                                                       is_count=True)
        assert 1 == count_operations, "Invalid ops before export!"

        self.zip_path = ExportManager().export_project(test_project)
        assert self.zip_path is not None, "Exported file is none"

        with pytest.raises(ImportException):
            self.import_service.import_project_structure(
                self.zip_path, test_user.id)

    def test_export_import_burst(self, user_factory, project_factory,
                                 simulation_launch):
        """
        Test that fk_parent_burst is correctly preserved after export/import
        """
        test_user = user_factory()
        test_project = project_factory(test_user, "TestIESim")
        sim_op = simulation_launch(test_user,
                                   test_project,
                                   simulation_length=10)
        tries = 5
        while not sim_op.has_finished and tries > 0:
            sleep(5)
            tries = tries - 1
            sim_op = dao.get_operation_by_id(sim_op.id)
        assert sim_op.has_finished, "Simulation did not finish in the given time"

        self.zip_path = ExportManager().export_project(test_project)
        assert self.zip_path is not None, "Exported file is none"
        self.project_service.remove_project(test_project.id)

        self.import_service.import_project_structure(self.zip_path,
                                                     test_user.id)
        retrieved_project = self.project_service.retrieve_projects_for_user(
            test_user.id)[0][0]
        ts = try_get_last_datatype(retrieved_project.id, TimeSeriesRegionIndex)
        bursts = dao.get_bursts_for_project(retrieved_project.id)
        assert 1 == len(bursts)
        assert ts.fk_parent_burst == bursts[0].gid

    def test_export_import_figures(self, user_factory, project_factory):
        """
        Test that ResultFigure instances are correctly restores after an export+import project
        """
        # Prepare data
        user = user_factory()
        project = project_factory(user, "TestImportExportFigures")
        zip_path = os.path.join(os.path.dirname(tvb_data.__file__),
                                'connectivity', 'paupau.zip')
        TestFactory.import_zip_connectivity(user, project, zip_path)

        figure_service = FigureService()
        figure_service.store_result_figure(project, user, "png", IMG_DATA,
                                           "bla")
        figure_service.store_result_figure(project, user, "png", IMG_DATA,
                                           "bla")
        figures = list(
            figure_service.retrieve_result_figures(project,
                                                   user)[0].values())[0]
        assert 2 == len(figures)

        # export, delete and the import project
        self.zip_path = ExportManager().export_project(project)
        assert self.zip_path is not None, "Exported file is none"
        self.project_service.remove_project(project.id)

        self.import_service.import_project_structure(self.zip_path, user.id)

        # Check that state is as before export: one operation, one DT, 2 figures
        retrieved_project = self.project_service.retrieve_projects_for_user(
            user.id)[0][0]
        count_operations = dao.get_filtered_operations(retrieved_project.id,
                                                       None,
                                                       is_count=True)
        assert 1 == count_operations
        count_datatypes = dao.count_datatypes(retrieved_project.id, DataType)
        assert 1 == count_datatypes

        figures = list(
            figure_service.retrieve_result_figures(retrieved_project,
                                                   user)[0].values())[0]
        assert 2 == len(figures)
        assert "bla" in figures[0].name
        assert "bla" in figures[1].name
        image_path = utils.url2path(figures[0].file_path)
        img_data = Image.open(image_path).load()
        assert img_data is not None
Exemplo n.º 5
0
class TestImportService(BaseTestCase):
    """
    This class contains tests for the tvb.core.services.import_service module.
    """
    def setup_method(self):
        """
        Reset the database before each test.
        """
        self.import_service = ImportService()
        self.project_service = ProjectService()
        self.zip_path = None

    def teardown_method(self):
        """
        Reset the database when test is done.
        """
        # Delete TEMP folder
        if os.path.exists(TvbProfile.current.TVB_TEMP_FOLDER):
            shutil.rmtree(TvbProfile.current.TVB_TEMP_FOLDER)

        # Delete folder where data was exported
        if self.zip_path and os.path.exists(self.zip_path):
            shutil.rmtree(os.path.split(self.zip_path)[0])

        self.delete_project_folders()

    @pytest.mark.skipif(no_matlab(), reason="Matlab or Octave not installed!")
    def test_import_export(self, user_factory, project_factory,
                           value_wrapper_factory):
        """
        Test the import/export mechanism for a project structure.
        The project contains the following data types: Connectivity, Surface, MappedArray and ValueWrapper.
        """
        test_user = user_factory()
        test_project = project_factory(test_user, "TestImportExport",
                                       "test_desc")
        zip_path = os.path.join(os.path.dirname(tvb_data.__file__),
                                'connectivity', 'connectivity_66.zip')
        TestFactory.import_zip_connectivity(test_user, test_project, zip_path)
        value_wrapper = value_wrapper_factory(test_user, test_project)

        result = self.get_all_datatypes()
        expected_results = {}
        for one_data in result:
            expected_results[one_data.gid] = (one_data.module, one_data.type)

        # Export project as ZIP
        self.zip_path = ExportManager().export_project(test_project)
        assert self.zip_path is not None, "Exported file is none"

        # Remove the original project
        self.project_service.remove_project(test_project.id)
        result, lng_ = self.project_service.retrieve_projects_for_user(
            test_user.id)
        assert 0 == len(result), "Project Not removed!"
        assert 0 == lng_, "Project Not removed!"

        # Now try to import again project
        self.import_service.import_project_structure(self.zip_path,
                                                     test_user.id)
        result = self.project_service.retrieve_projects_for_user(
            test_user.id)[0]
        assert len(result) == 1, "There should be only one project."
        assert result[
            0].name == "TestImportExport", "The project name is not correct."
        assert result[
            0].description == "test_desc", "The project description is not correct."
        test_project = result[0]

        count_operations = dao.get_filtered_operations(test_project.id,
                                                       None,
                                                       is_count=True)

        # 1 op. - import conn; 2 op. - BCT Analyzer
        assert 2 == count_operations, "Invalid ops number after export and import !"
        for gid in expected_results:
            datatype = dao.get_datatype_by_gid(gid)
            assert datatype.module == expected_results[gid][
                0], 'DataTypes not imported correctly'
            assert datatype.type == expected_results[gid][
                1], 'DataTypes not imported correctly'
        # check the value wrapper
        new_val = try_get_last_datatype(test_project.id, ValueWrapperIndex)
        assert value_wrapper.data_value == new_val.data_value, "Data value incorrect"
        assert value_wrapper.data_type == new_val.data_type, "Data type incorrect"
        assert value_wrapper.data_name == new_val.data_name, "Data name incorrect"

    def test_import_export_existing(self, user_factory, project_factory):
        """
        Test the import/export mechanism for a project structure.
        The project contains the following data types: Connectivity, Surface, MappedArray and ValueWrapper.
        """
        test_user = user_factory()
        test_project = project_factory(test_user, "TestImportExport2")
        zip_path = os.path.join(os.path.dirname(tvb_data.__file__),
                                'connectivity', 'connectivity_66.zip')
        TestFactory.import_zip_connectivity(test_user, test_project, zip_path)

        count_operations = dao.get_filtered_operations(test_project.id,
                                                       None,
                                                       is_count=True)
        assert 1 == count_operations, "Invalid ops before export!"

        self.zip_path = ExportManager().export_project(test_project)
        assert self.zip_path is not None, "Exported file is none"

        with pytest.raises(ImportException):
            self.import_service.import_project_structure(
                self.zip_path, test_user.id)